mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-06-21 09:10:59 +02:00
Compare commits
1201 Commits
2023.10.13
...
master
Author | SHA1 | Date | |
---|---|---|---|
![]() |
73bf102116 | ||
![]() |
1722c55400 | ||
![]() |
e6bd4a3da2 | ||
![]() |
51887484e4 | ||
![]() |
ba090caeaa | ||
![]() |
339614a173 | ||
![]() |
aa863ddab9 | ||
![]() |
db162b76f6 | ||
![]() |
e3c605a61f | ||
![]() |
97ddfefeb4 | ||
![]() |
a8bf0011bd | ||
![]() |
13e5516271 | ||
![]() |
03dba2012d | ||
![]() |
5d96527be8 | ||
![]() |
1fd0e88b67 | ||
![]() |
231349786e | ||
![]() |
f37d599a69 | ||
![]() |
9e38b273b7 | ||
![]() |
4e7c1ea346 | ||
![]() |
e1b6062f8c | ||
![]() |
c723c4e5e7 | ||
![]() |
148a1eb4c5 | ||
![]() |
85c8a405e3 | ||
![]() |
943083edcd | ||
![]() |
3fe72e9eea | ||
![]() |
d30a49742c | ||
![]() |
6d265388c6 | ||
![]() |
a9b3700698 | ||
![]() |
201812100f | ||
![]() |
cc749a8a3b | ||
![]() |
f7bbf5a617 | ||
![]() |
b5be29fa58 | ||
![]() |
6121559e02 | ||
![]() |
2e5bf002da | ||
![]() |
6693d66033 | ||
![]() |
b094747e93 | ||
![]() |
98f8eec956 | ||
![]() |
0daddc780d | ||
![]() |
2d7949d564 | ||
![]() |
ed108b3ea4 | ||
![]() |
eee90acc47 | ||
![]() |
711c5d5d09 | ||
![]() |
89c1b349ad | ||
![]() |
0ee1102268 | ||
![]() |
7794374de8 | ||
![]() |
538eb30567 | ||
![]() |
f8051e3a61 | ||
![]() |
52f9729c9a | ||
![]() |
1a8a03ea8d | ||
![]() |
e0d6c08229 | ||
![]() |
53ea743a9c | ||
![]() |
415b4c9f95 | ||
![]() |
7977b329ed | ||
![]() |
e491fd4d09 | ||
![]() |
32ed5f107c | ||
![]() |
167d7a9f0f | ||
![]() |
83fabf3524 | ||
![]() |
00b1bec552 | ||
![]() |
c7e575e316 | ||
![]() |
31e090cb78 | ||
![]() |
545c1a5b6f | ||
![]() |
f569be4602 | ||
![]() |
2685654a37 | ||
![]() |
abf58dcd6a | ||
![]() |
20f288bdc2 | ||
![]() |
f475e8b529 | ||
![]() |
41c0a1fb89 | ||
![]() |
a7d9a5eb79 | ||
![]() |
586b557b12 | ||
![]() |
317f4b8006 | ||
![]() |
6839276496 | ||
![]() |
cbcfe6378d | ||
![]() |
7dbb47f84f | ||
![]() |
464c84fedf | ||
![]() |
7a7b85c901 | ||
![]() |
d880e06080 | ||
![]() |
ded11ebc9a | ||
![]() |
ea8498ed53 | ||
![]() |
b26bc32579 | ||
![]() |
f123cc83b3 | ||
![]() |
0feec6dc13 | ||
![]() |
1d0f6539c4 | ||
![]() |
17cf9088d0 | ||
![]() |
9064d2482d | ||
![]() |
8f303afb43 | ||
![]() |
5328eda882 | ||
![]() |
b77e5a553a | ||
![]() |
505b400795 | ||
![]() |
74fc2ae12c | ||
![]() |
7be14109a6 | ||
![]() |
61c9a938b3 | ||
![]() |
fd8394bc50 | ||
![]() |
22ac81a069 | ||
![]() |
25cd7c1ecb | ||
![]() |
28f04e8a5e | ||
![]() |
a3e91df30a | ||
![]() |
80736b9c90 | ||
![]() |
1ae6bff564 | ||
![]() |
b37ff4de5b | ||
![]() |
3690e91265 | ||
![]() |
8cb08028f5 | ||
![]() |
1cf39ddf3d | ||
![]() |
c2d6659d10 | ||
![]() |
26feac3dd1 | ||
![]() |
70599e53b7 | ||
![]() |
8d127b18f8 | ||
![]() |
7d05aa99c6 | ||
![]() |
36da6360e1 | ||
![]() |
e7e3b7a55c | ||
![]() |
dce8234624 | ||
![]() |
2381881fe5 | ||
![]() |
741fd809bc | ||
![]() |
34a061a295 | ||
![]() |
9032f98136 | ||
![]() |
de271a06fd | ||
![]() |
d596824c2f | ||
![]() |
88eb1e7a9a | ||
![]() |
f5a37ea40e | ||
![]() |
f07ee91c71 | ||
![]() |
ed8ad1b4d6 | ||
![]() |
839d643253 | ||
![]() |
f5736bb35b | ||
![]() |
9d26daa04a | ||
![]() |
73a26f9ee6 | ||
![]() |
4e69a626cc | ||
![]() |
77aa15e98f | ||
![]() |
cb271d445b | ||
![]() |
ceab4d5ed6 | ||
![]() |
ed6c6d7eef | ||
![]() |
f484c51599 | ||
![]() |
72ba487930 | ||
![]() |
74e90dd9b8 | ||
![]() |
1d45e30537 | ||
![]() |
3c1c75ecb8 | ||
![]() |
7faa18b83d | ||
![]() |
a473e59233 | ||
![]() |
45f01de00e | ||
![]() |
db6d1f145a | ||
![]() |
a3f2b54c25 | ||
![]() |
91832111a1 | ||
![]() |
425017531f | ||
![]() |
58d0c83457 | ||
![]() |
4ebf41309d | ||
![]() |
e1847535e2 | ||
![]() |
5361a7c6e2 | ||
![]() |
349f36606f | ||
![]() |
5e457af57f | ||
![]() |
61046c3161 | ||
![]() |
07f04005e4 | ||
![]() |
e465b078ea | ||
![]() |
d63696f23a | ||
![]() |
bb321cfdc3 | ||
![]() |
5fc521cbd0 | ||
![]() |
f033d86b96 | ||
![]() |
9a1ec1d36e | ||
![]() |
2956035912 | ||
![]() |
22e34adbd7 | ||
![]() |
6a6d97b2cb | ||
![]() |
3ddbebb3c6 | ||
![]() |
48be862b32 | ||
![]() |
a8b9ff3c2a | ||
![]() |
6eaa574c82 | ||
![]() |
ecee97b4fa | ||
![]() |
a550dfc904 | ||
![]() |
336b33e72f | ||
![]() |
9dde546e7e | ||
![]() |
66e0bab814 | ||
![]() |
801afeac91 | ||
![]() |
86ab79e1a5 | ||
![]() |
3396eb50dc | ||
![]() |
5086d4aed6 | ||
![]() |
9491b44032 | ||
![]() |
b7fbb5a0a1 | ||
![]() |
4054a2b623 | ||
![]() |
b9c979461b | ||
![]() |
9d5e6de2e7 | ||
![]() |
9bf23902ce | ||
![]() |
be5af3f9e9 | ||
![]() |
fe4f14b836 | ||
![]() |
b872ffec50 | ||
![]() |
e2dfccaf80 | ||
![]() |
b4488a9e12 | ||
![]() |
f36e4b6e65 | ||
![]() |
983095485c | ||
![]() |
bbada3ec07 | ||
![]() |
8305df0001 | ||
![]() |
7223d29569 | ||
![]() |
f5fb2229e6 | ||
![]() |
89a68c4857 | ||
![]() |
9b868518a1 | ||
![]() |
2ee3a0aff9 | ||
![]() |
01a8be4c23 | ||
![]() |
ebac65aa9e | ||
![]() |
4815dac131 | ||
![]() |
95f8df2f79 | ||
![]() |
e67d786c7c | ||
![]() |
d9a53cc1e6 | ||
![]() |
83b119dadb | ||
![]() |
06f6de78db | ||
![]() |
3380febe99 | ||
![]() |
be0d819e11 | ||
![]() |
df9ebeec00 | ||
![]() |
17504f2535 | ||
![]() |
4432a9390c | ||
![]() |
05c8023a27 | ||
![]() |
bd0a668169 | ||
![]() |
b8b4754704 | ||
![]() |
9d70abe4de | ||
![]() |
8eb9c1bf3b | ||
![]() |
42b7440963 | ||
![]() |
172d5fcd77 | ||
![]() |
7d18fed8f1 | ||
![]() |
79ec2fdff7 | ||
![]() |
3042afb5fe | ||
![]() |
ad60137c14 | ||
![]() |
0bb3978862 | ||
![]() |
7508e34f20 | ||
![]() |
9807181cfb | ||
![]() |
7126b47260 | ||
![]() |
eb1417786a | ||
![]() |
6933f5670c | ||
![]() |
26a502fc72 | ||
![]() |
652827d5a0 | ||
![]() |
0e1697232f | ||
![]() |
9f77e04c76 | ||
![]() |
c034d65548 | ||
![]() |
480125560a | ||
![]() |
a59abe0636 | ||
![]() |
a90641c836 | ||
![]() |
65c3c58c0a | ||
![]() |
99ea297875 | ||
![]() |
6deeda5c11 | ||
![]() |
7f3006eb0c | ||
![]() |
4445f37a7a | ||
![]() |
3a1583ca75 | ||
![]() |
a3e0c7d3b2 | ||
![]() |
f7a1f2d813 | ||
![]() |
9deed13d7c | ||
![]() |
c2e6e1d5f7 | ||
![]() |
9c3e8b1696 | ||
![]() |
4985a40417 | ||
![]() |
01a63629a2 | ||
![]() |
be69468752 | ||
![]() |
5271ef48c6 | ||
![]() |
d48e612609 | ||
![]() |
5c4c2ddfaa | ||
![]() |
ec17fb16e8 | ||
![]() |
e7882b682b | ||
![]() |
6ca23ffaa4 | ||
![]() |
f53553087d | ||
![]() |
4ecb833472 | ||
![]() |
2081634474 | ||
![]() |
c987be0acb | ||
![]() |
14cd7f3443 | ||
![]() |
4ca8c44a07 | ||
![]() |
241ace4f10 | ||
![]() |
1295bbedd4 | ||
![]() |
19edaa44fc | ||
![]() |
10b7ff68e9 | ||
![]() |
0d9f061d38 | ||
![]() |
517ddf3c3f | ||
![]() |
03c3d70577 | ||
![]() |
f8d0161455 | ||
![]() |
d59f14a0a7 | ||
![]() |
817483ccc6 | ||
![]() |
861aeec449 | ||
![]() |
57c717fee4 | ||
![]() |
9fb8ab2ff6 | ||
![]() |
18a28514e3 | ||
![]() |
5ff7a43623 | ||
![]() |
3b45319344 | ||
![]() |
421bc72103 | ||
![]() |
d4f5be1735 | ||
![]() |
797d2472a2 | ||
![]() |
3b99a0f0e0 | ||
![]() |
c709cc41cb | ||
![]() |
4850ce91d1 | ||
![]() |
e2e73b5c65 | ||
![]() |
13825ab778 | ||
![]() |
bc88b904cd | ||
![]() |
76ac023ff0 | ||
![]() |
b3007c44cd | ||
![]() |
78912ed9c8 | ||
![]() |
bb69f5dab7 | ||
![]() |
6d304133ab | ||
![]() |
9ff330948c | ||
![]() |
fc12e724a3 | ||
![]() |
61ae5dc34a | ||
![]() |
4651679104 | ||
![]() |
ff44ed5306 | ||
![]() |
cdcf1e8672 | ||
![]() |
f7d071e8aa | ||
![]() |
45732e2590 | ||
![]() |
7bfb4f72e4 | ||
![]() |
5d904b077d | ||
![]() |
e7cc02b14d | ||
![]() |
f0d4b8a5d6 | ||
![]() |
6b91d232e3 | ||
![]() |
de82acf876 | ||
![]() |
326fb1ffaf | ||
![]() |
ccda63934d | ||
![]() |
9676b05715 | ||
![]() |
f9f24ae376 | ||
![]() |
af2c821d74 | ||
![]() |
1ef3ee7500 | ||
![]() |
20c765d023 | ||
![]() |
3fc4608656 | ||
![]() |
68221ecc87 | ||
![]() |
de30f652ff | ||
![]() |
89198bb23b | ||
![]() |
a567f97b62 | ||
![]() |
1643686104 | ||
![]() |
bbc7591d3b | ||
![]() |
c8541f8b13 | ||
![]() |
a3c0321825 | ||
![]() |
dade5e35c8 | ||
![]() |
e2ef4fece6 | ||
![]() |
1f489f4a45 | ||
![]() |
75079f4e3f | ||
![]() |
712d2abb32 | ||
![]() |
8346b54915 | ||
![]() |
1f4e1e85a2 | ||
![]() |
763ed06ee6 | ||
![]() |
3c14e9191f | ||
![]() |
0b6b7742c2 | ||
![]() |
3905f64920 | ||
![]() |
65cf46cddd | ||
![]() |
9f42e68a74 | ||
![]() |
6fc85f617a | ||
![]() |
d298693b1b | ||
![]() |
09a6c68712 | ||
![]() |
1a8851b689 | ||
![]() |
b91c3925c2 | ||
![]() |
3d3ee458c1 | ||
![]() |
2037a6414f | ||
![]() |
5421669626 | ||
![]() |
dc3c4fddcc | ||
![]() |
5460cd9189 | ||
![]() |
f6c73aad5f | ||
![]() |
d5e2a379f2 | ||
![]() |
bc262bcad4 | ||
![]() |
f4d3e9e6dc | ||
![]() |
6fef824025 | ||
![]() |
4bd2655398 | ||
![]() |
a95ee6d880 | ||
![]() |
4c85ccd136 | ||
![]() |
2feb28028e | ||
![]() |
fca3eb5f8b | ||
![]() |
2e49c789d3 | ||
![]() |
354cb4026c | ||
![]() |
cfa76f35d2 | ||
![]() |
2b67ac300a | ||
![]() |
c038a7b187 | ||
![]() |
a13a336aa6 | ||
![]() |
dc16876480 | ||
![]() |
f05a1cd149 | ||
![]() |
d8fb349086 | ||
![]() |
2bea793632 | ||
![]() |
62cba8a1be | ||
![]() |
239f5f36fe | ||
![]() |
0d146c1e36 | ||
![]() |
cd0f934604 | ||
![]() |
360aed810a | ||
![]() |
00dcde7286 | ||
![]() |
910ecc4229 | ||
![]() |
0a0d80800b | ||
![]() |
e0500cbf79 | ||
![]() |
4b5eec0aaa | ||
![]() |
fe70f20aed | ||
![]() |
c7316373c0 | ||
![]() |
e0f1ae813b | ||
![]() |
7d6c259a03 | ||
![]() |
16336c51d0 | ||
![]() |
ccf0a6b86b | ||
![]() |
f919729538 | ||
![]() |
7ea2787920 | ||
![]() |
f7257588bd | ||
![]() |
da252d9d32 | ||
![]() |
e079ffbda6 | ||
![]() |
2009cb27e1 | ||
![]() |
f351440f1d | ||
![]() |
f9d98509a8 | ||
![]() |
37cd7660ea | ||
![]() |
d867f99622 | ||
![]() |
10fc719bc7 | ||
![]() |
eb15fd5a32 | ||
![]() |
7cecd299e4 | ||
![]() |
52c0ffe40a | ||
![]() |
637d62a3a9 | ||
![]() |
f95a92b3d0 | ||
![]() |
1d253b0a27 | ||
![]() |
720b3dc453 | ||
![]() |
d215fba7ed | ||
![]() |
8388ec256f | ||
![]() |
6365e92589 | ||
![]() |
70c55cb08f | ||
![]() |
c699bafc50 | ||
![]() |
eb64ae7d5d | ||
![]() |
c014fbcddc | ||
![]() |
39d79c9b9c | ||
![]() |
f2a4983df7 | ||
![]() |
bacc31b05a | ||
![]() |
a9f85670d0 | ||
![]() |
6b43a8d84b | ||
![]() |
2db8c2e7d5 | ||
![]() |
f9c8deb4e5 | ||
![]() |
0ec9bfed4d | ||
![]() |
c673731061 | ||
![]() |
e398217aae | ||
![]() |
c39016f66d | ||
![]() |
b83ca24eb7 | ||
![]() |
240a7d43c8 | ||
![]() |
f13df591d4 | ||
![]() |
be3579aaf0 | ||
![]() |
85fdc66b6e | ||
![]() |
282e19db82 | ||
![]() |
197d0b03b6 | ||
![]() |
b03267bf06 | ||
![]() |
4613096f2e | ||
![]() |
838f4385de | ||
![]() |
d135823137 | ||
![]() |
59f8dd8239 | ||
![]() |
9c6534da81 | ||
![]() |
a403dcf9be | ||
![]() |
754940e9a5 | ||
![]() |
beae2db127 | ||
![]() |
3945677a75 | ||
![]() |
b103aca24d | ||
![]() |
5c7a5aaab2 | ||
![]() |
422195ec70 | ||
![]() |
a6783a3b99 | ||
![]() |
428ffb75aa | ||
![]() |
b6dc2c49e8 | ||
![]() |
76802f4613 | ||
![]() |
d569a88452 | ||
![]() |
88402b714e | ||
![]() |
5bc5fb2835 | ||
![]() |
f93c16395c | ||
![]() |
f101e5d34c | ||
![]() |
330335386d | ||
![]() |
0a3991edae | ||
![]() |
5c880ef42e | ||
![]() |
21cdcf03a2 | ||
![]() |
6abef74232 | ||
![]() |
9acf79c91a | ||
![]() |
57212a5f97 | ||
![]() |
c29f5a7fae | ||
![]() |
ec9b25043f | ||
![]() |
914af9a0cf | ||
![]() |
c998238c2e | ||
![]() |
d784464399 | ||
![]() |
dd2e244469 | ||
![]() |
ea9e35d85f | ||
![]() |
87884f1558 | ||
![]() |
2a246749ec | ||
![]() |
67adeb7bab | ||
![]() |
a886cf3e90 | ||
![]() |
e68b4c19af | ||
![]() |
b8635c1d47 | ||
![]() |
46fe60ff19 | ||
![]() |
0b7ec08816 | ||
![]() |
40054cb4a7 | ||
![]() |
fed53d70bd | ||
![]() |
ec2f4bf082 | ||
![]() |
87408ccfd7 | ||
![]() |
c4d95f67dd | ||
![]() |
5af774d7a3 | ||
![]() |
0f593dca9f | ||
![]() |
8de431ec97 | ||
![]() |
679c68240a | ||
![]() |
3148c1822f | ||
![]() |
7af1ddaaf2 | ||
![]() |
fbc66e3ab3 | ||
![]() |
64d84d75ca | ||
![]() |
dcfeea4dd5 | ||
![]() |
cba7868502 | ||
![]() |
d710a6ca7c | ||
![]() |
85b87c991a | ||
![]() |
16eb28026a | ||
![]() |
1a830394a2 | ||
![]() |
edfd095b19 | ||
![]() |
c5f0f58efd | ||
![]() |
babb709605 | ||
![]() |
9d43dcb2c5 | ||
![]() |
f433871424 | ||
![]() |
ceaea731b6 | ||
![]() |
5310fa87f6 | ||
![]() |
983c58fb7a | ||
![]() |
1a176d874e | ||
![]() |
079a7bc334 | ||
![]() |
cf85cba5d9 | ||
![]() |
4b7bec66d8 | ||
![]() |
ccb23e1bac | ||
![]() |
3a193346ee | ||
![]() |
de2062753a | ||
![]() |
e59c82a74c | ||
![]() |
f91645acea | ||
![]() |
b31b81d85f | ||
![]() |
6328e2e67a | ||
![]() |
5945fc1945 | ||
![]() |
c6387abc1a | ||
![]() |
cca534cd9e | ||
![]() |
7509d692b3 | ||
![]() |
63da31b3b2 | ||
![]() |
8f4ea14680 | ||
![]() |
a1b4ac2b8e | ||
![]() |
c08e0b20b5 | ||
![]() |
0aa4426e9a | ||
![]() |
48d629d461 | ||
![]() |
7f909046f4 | ||
![]() |
eabb4680fd | ||
![]() |
1d84b780cf | ||
![]() |
9f5c9a9089 | ||
![]() |
a2000bc857 | ||
![]() |
5a8a05aebb | ||
![]() |
ad0b857f45 | ||
![]() |
124f058b54 | ||
![]() |
416686ed0c | ||
![]() |
b37417e4f9 | ||
![]() |
28b0ecba2a | ||
![]() |
e2b3634e29 | ||
![]() |
fb8b7f226d | ||
![]() |
b397a64691 | ||
![]() |
5bb1aa04da | ||
![]() |
fa2be9a7c6 | ||
![]() |
3ad0b7f422 | ||
![]() |
4a9bc8c363 | ||
![]() |
a06bb58679 | ||
![]() |
a555389c9b | ||
![]() |
173d54c151 | ||
![]() |
4a27b8f092 | ||
![]() |
41a241ca6f | ||
![]() |
3aa0156e05 | ||
![]() |
300c91274f | ||
![]() |
d8d473002b | ||
![]() |
36f9e602ad | ||
![]() |
7adff8caf1 | ||
![]() |
fa83d0b36b | ||
![]() |
c8c078fe28 | ||
![]() |
325001317d | ||
![]() |
cc85596d5b | ||
![]() |
0e1b941c6b | ||
![]() |
3dfd720d09 | ||
![]() |
25c1cdaa26 | ||
![]() |
d02df303d8 | ||
![]() |
5d0176547f | ||
![]() |
409f8e9e3b | ||
![]() |
b4760c778d | ||
![]() |
9431777b4c | ||
![]() |
3a3bd00037 | ||
![]() |
d1c4d88b2d | ||
![]() |
46f4c80bc3 | ||
![]() |
0fba08485b | ||
![]() |
b6200bdcf3 | ||
![]() |
e8e6a982a1 | ||
![]() |
7e41628ff5 | ||
![]() |
e6f48ca808 | ||
![]() |
4115c24d15 | ||
![]() |
ad9a8115aa | ||
![]() |
41be32e78c | ||
![]() |
e978c312d6 | ||
![]() |
6f9e653743 | ||
![]() |
f0bb28504c | ||
![]() |
bef1d4d6fc | ||
![]() |
c8d096c5ce | ||
![]() |
a7d3235c84 | ||
![]() |
d62fef7e07 | ||
![]() |
cc88a54bb1 | ||
![]() |
b43bd86485 | ||
![]() |
232e6db30c | ||
![]() |
49f3741a82 | ||
![]() |
a065086640 | ||
![]() |
4d92312083 | ||
![]() |
fc5eecfa31 | ||
![]() |
406f4c2e47 | ||
![]() |
c86891eb94 | ||
![]() |
bb8bf1db99 | ||
![]() |
e7d73bc453 | ||
![]() |
919540a964 | ||
![]() |
0088c6de23 | ||
![]() |
abe10131fc | ||
![]() |
ffd7781d65 | ||
![]() |
efb42763de | ||
![]() |
bb3936ae2b | ||
![]() |
d19fcb9342 | ||
![]() |
011b4a04db | ||
![]() |
7e3e4779ad | ||
![]() |
5260696b1c | ||
![]() |
2f1ddfe12a | ||
![]() |
4b69e1b53e | ||
![]() |
0e539617a4 | ||
![]() |
fe15d3178e | ||
![]() |
94a1c5e642 | ||
![]() |
2b6df93a24 | ||
![]() |
ef36d517f9 | ||
![]() |
6daf2c27c0 | ||
![]() |
28d485714f | ||
![]() |
0b77286184 | ||
![]() |
6b1e430d8e | ||
![]() |
f0993391e6 | ||
![]() |
1a34a802f4 | ||
![]() |
a0a1bc3d8d | ||
![]() |
2f97779f33 | ||
![]() |
713b4cd18f | ||
![]() |
a3bab4752a | ||
![]() |
e046db8a11 | ||
![]() |
37c233562d | ||
![]() |
89a161e8c6 | ||
![]() |
ed1b9ed93d | ||
![]() |
d9cbced493 | ||
![]() |
66ce3d76d8 | ||
![]() |
39e6c4cb44 | ||
![]() |
e62fa6b0e0 | ||
![]() |
cc0070f649 | ||
![]() |
b85eef0a61 | ||
![]() |
22870b81ba | ||
![]() |
b9afb99e7c | ||
![]() |
16da8ef993 | ||
![]() |
959b7a379b | ||
![]() |
8531d2b03b | ||
![]() |
4cd4146924 | ||
![]() |
bacd18b7df | ||
![]() |
150ecc45d9 | ||
![]() |
8b8b442cb0 | ||
![]() |
644d84d778 | ||
![]() |
ac30941ae6 | ||
![]() |
cc1a3098c0 | ||
![]() |
705f5b84de | ||
![]() |
9b95a6765a | ||
![]() |
4f8448896e | ||
![]() |
4521f30d14 | ||
![]() |
42bfca00a6 | ||
![]() |
d2189d3d36 | ||
![]() |
04e17ba20a | ||
![]() |
bbf84bf55e | ||
![]() |
7ead7332af | ||
![]() |
0b570f2a90 | ||
![]() |
1a6ac547ea | ||
![]() |
4b50b292cc | ||
![]() |
297b0a3792 | ||
![]() |
6c056ea7ae | ||
![]() |
39bc699d2e | ||
![]() |
b337d2989c | ||
![]() |
f0f867f008 | ||
![]() |
987a1f94c2 | ||
![]() |
4cdc976bd8 | ||
![]() |
0d174e8bed | ||
![]() |
4862a29854 | ||
![]() |
2469119490 | ||
![]() |
00766ece0c | ||
![]() |
2a1a1b8e67 | ||
![]() |
c1c9bb4adb | ||
![]() |
6075a029db | ||
![]() |
cc767e9490 | ||
![]() |
d28aa87e21 | ||
![]() |
93d33cb29a | ||
![]() |
7799e51895 | ||
![]() |
7509791385 | ||
![]() |
6403530e2d | ||
![]() |
d502f4c6d9 | ||
![]() |
773bbb1815 | ||
![]() |
cd68258225 | ||
![]() |
5ce582448e | ||
![]() |
6aaf96a3d6 | ||
![]() |
d4b99a2333 | ||
![]() |
1d6ab17d07 | ||
![]() |
9200bc70c9 | ||
![]() |
aefede2556 | ||
![]() |
4f5d7be3c5 | ||
![]() |
1d369b4096 | ||
![]() |
55e3e6fd21 | ||
![]() |
36e8dd8325 | ||
![]() |
e6a22834df | ||
![]() |
b8da8a98f8 | ||
![]() |
24f3097ea9 | ||
![]() |
054a3ba7d1 | ||
![]() |
e8352ad659 | ||
![]() |
2a4f2e82db | ||
![]() |
61714f4695 | ||
![]() |
61edf57f8f | ||
![]() |
5b1a2aa978 | ||
![]() |
7814c50948 | ||
![]() |
54a63e80af | ||
![]() |
7a03f88c40 | ||
![]() |
f2a4ea1794 | ||
![]() |
0953209a85 | ||
![]() |
b758877afa | ||
![]() |
f3411af12e | ||
![]() |
a8520244b8 | ||
![]() |
8ca1d57ed0 | ||
![]() |
800ec085cc | ||
![]() |
96472d72f2 | ||
![]() |
7aa322c02c | ||
![]() |
9bd8501993 | ||
![]() |
90c3721a32 | ||
![]() |
d4b52ce3fc | ||
![]() |
d6c2c2bc84 | ||
![]() |
5dbac313ae | ||
![]() |
ca8885edd9 | ||
![]() |
4093eb1fcc | ||
![]() |
a0d9967f68 | ||
![]() |
ea88129784 | ||
![]() |
b8e2a5e0e1 | ||
![]() |
e53e56b735 | ||
![]() |
92a1c4abae | ||
![]() |
3690c2f598 | ||
![]() |
081708d607 | ||
![]() |
d7d861811c | ||
![]() |
46c1b7cfec | ||
![]() |
add96eb9f8 | ||
![]() |
db50f19d76 | ||
![]() |
2e5a47da40 | ||
![]() |
5fdd13006a | ||
![]() |
03334d639d | ||
![]() |
8b46ad4d8b | ||
![]() |
bef9a9e536 | ||
![]() |
111b61ddef | ||
![]() |
12b248ce60 | ||
![]() |
5e3e19c93c | ||
![]() |
c53c2e40fd | ||
![]() |
ae2194e1dd | ||
![]() |
26603d0b34 | ||
![]() |
ed274b60b1 | ||
![]() |
ae2af1104f | ||
![]() |
5c019f6328 | ||
![]() |
5a2eebc767 | ||
![]() |
119d41f270 | ||
![]() |
347f13dd9b | ||
![]() |
96a134dea6 | ||
![]() |
a4da9db87b | ||
![]() |
e897bd8292 | ||
![]() |
a2e9031605 | ||
![]() |
3ba8de62d6 | ||
![]() |
0d067e77c3 | ||
![]() |
1463945ae5 | ||
![]() |
c92e4e625e | ||
![]() |
90d2da311b | ||
![]() |
3779f2a307 | ||
![]() |
63b569bc5e | ||
![]() |
82f4f4444e | ||
![]() |
eead3bbc01 | ||
![]() |
5bbfdb7c99 | ||
![]() |
0dd53faeca | ||
![]() |
be7db1a5a8 | ||
![]() |
65e709d235 | ||
![]() |
06cb063839 | ||
![]() |
296df0da1d | ||
![]() |
7b5674949f | ||
![]() |
f2816634e3 | ||
![]() |
beaf832c7a | ||
![]() |
eef1e9f44f | ||
![]() |
78c57cc0e0 | ||
![]() |
3f7999533e | ||
![]() |
4ccd73fea0 | ||
![]() |
3584b8390b | ||
![]() |
6e36d17f40 | ||
![]() |
c36513f1be | ||
![]() |
3e35aa32c7 | ||
![]() |
53b4d44f55 | ||
![]() |
c999bac02c | ||
![]() |
12d8ea8246 | ||
![]() |
8e15177b41 | ||
![]() |
dd9ad97b1f | ||
![]() |
61b17437dc | ||
![]() |
7975ddf245 | ||
![]() |
6d8a53d870 | ||
![]() |
4813173e45 | ||
![]() |
41ba4a808b | ||
![]() |
351dc0bc33 | ||
![]() |
518c1afc15 | ||
![]() |
85ec2a337a | ||
![]() |
b207d26f83 | ||
![]() |
01395a3434 | ||
![]() |
cf212d0a33 | ||
![]() |
6db96268c5 | ||
![]() |
800a43983e | ||
![]() |
7e4259dff0 | ||
![]() |
f1f158976e | ||
![]() |
31b417e1d1 | ||
![]() |
fc2879ecb0 | ||
![]() |
0a1a8e3005 | ||
![]() |
4cc99d7b6c | ||
![]() |
3c7a287e28 | ||
![]() |
98d71d8c5e | ||
![]() |
00a9f2e1f7 | ||
![]() |
73f12119b5 | ||
![]() |
6b54cccdcb | ||
![]() |
c4b87dd885 | ||
![]() |
2338827072 | ||
![]() |
06d52c8731 | ||
![]() |
df5c9e733a | ||
![]() |
b38018b781 | ||
![]() |
145dc6f656 | ||
![]() |
5904853ae5 | ||
![]() |
c8bf48f3a8 | ||
![]() |
351368cb9a | ||
![]() |
96da952504 | ||
![]() |
bec9a59e8e | ||
![]() |
036e0d92c6 | ||
![]() |
cb2fb4a643 | ||
![]() |
231c2eacc4 | ||
![]() |
c4853655cb | ||
![]() |
ac817bc83e | ||
![]() |
1a366403d9 | ||
![]() |
7e26bd53f9 | ||
![]() |
64766459e3 | ||
![]() |
89f535e265 | ||
![]() |
ff38a011d5 | ||
![]() |
8056a3026e | ||
![]() |
3ee1194288 | ||
![]() |
e3b42d8b1b | ||
![]() |
c9ce57d9bf | ||
![]() |
02483bea1c | ||
![]() |
315b354429 | ||
![]() |
0c21c53885 | ||
![]() |
168e72dcd3 | ||
![]() |
ff07792676 | ||
![]() |
216f6a3cb5 | ||
![]() |
b19ae095fd | ||
![]() |
9590cc6b47 | ||
![]() |
79a451e576 | ||
![]() |
df0e138fc0 | ||
![]() |
2e94602f24 | ||
![]() |
4af9d5c2f6 | ||
![]() |
36b240f9a7 | ||
![]() |
fc53ec13ff | ||
![]() |
2ab2651a4a | ||
![]() |
b15b0c1d21 | ||
![]() |
c8a61a9100 | ||
![]() |
f2fd449b46 | ||
![]() |
9415f1a5ef | ||
![]() |
a48cc86d6f | ||
![]() |
954e57e405 | ||
![]() |
9073ae6458 | ||
![]() |
4cd9e251b9 | ||
![]() |
0ae16ceb18 | ||
![]() |
443e206ec4 | ||
![]() |
4c3b7a0769 | ||
![]() |
16be117729 | ||
![]() |
b49d5ffc53 | ||
![]() |
36baaa10e0 | ||
![]() |
02f93ff51b | ||
![]() |
c59de48e2b | ||
![]() |
0284f1fee2 | ||
![]() |
e8032503b9 | ||
![]() |
97362712a1 | ||
![]() |
246571ae1d | ||
![]() |
32abfb00bd | ||
![]() |
c305a25c1b | ||
![]() |
e3a3ed8a98 | ||
![]() |
a25a424323 | ||
![]() |
86e3b82261 | ||
![]() |
e7b17fce14 | ||
![]() |
a2d0840739 | ||
![]() |
86a972033e | ||
![]() |
50c2935231 | ||
![]() |
0df63cce69 | ||
![]() |
63f685f341 | ||
![]() |
3699eeb67c | ||
![]() |
979ce2e786 | ||
![]() |
58dd0f8d1e | ||
![]() |
cb61e20c26 | ||
![]() |
9c42b7eef5 | ||
![]() |
e5d4f11104 | ||
![]() |
bc2b8c0596 | ||
![]() |
aa7e9ae4f4 | ||
![]() |
07f5b2f757 | ||
![]() |
ff349ff94a | ||
![]() |
f859ed3ba1 | ||
![]() |
17d248a587 | ||
![]() |
388c979ac6 | ||
![]() |
22e4dfacb6 | ||
![]() |
86d2f4d248 | ||
![]() |
52f5be1f1e | ||
![]() |
0b81d4d252 | ||
![]() |
f849d77ab5 | ||
![]() |
f2868b26e9 | ||
![]() |
be77923ffe | ||
![]() |
8c05b3ebae | ||
![]() |
0da66980d3 | ||
![]() |
17b96974a3 | ||
![]() |
8463fb510a | ||
![]() |
615a84447e | ||
![]() |
ed3bb2b0a1 | ||
![]() |
45491a2a30 | ||
![]() |
a687226b48 | ||
![]() |
93240fc184 | ||
![]() |
47ab66db0f | ||
![]() |
0abf2f1f15 | ||
![]() |
2d91b98456 | ||
![]() |
8828f4576b | ||
![]() |
dbd8b1bff9 | ||
![]() |
8993721ecb | ||
![]() |
263a4b55ac | ||
![]() |
b136e2af34 | ||
![]() |
b2cc150ad8 | ||
![]() |
785ab1af7f | ||
![]() |
7aad06541e | ||
![]() |
d3d4187da9 | ||
![]() |
c8c9039e64 | ||
![]() |
df773c3d5d | ||
![]() |
f4f9f6d00e | ||
![]() |
dfd8c0b696 | ||
![]() |
dd29e6e5fd | ||
![]() |
96f3924bac | ||
![]() |
0fcefb92f3 | ||
![]() |
e4fbe5f886 | ||
![]() |
cd7086c0d5 | ||
![]() |
cf91400a1d | ||
![]() |
ac340d0745 | ||
![]() |
11ffa92a61 | ||
![]() |
ede624d1db | ||
![]() |
40966e8da2 | ||
![]() |
eedb38ce40 | ||
![]() |
6ad11fef65 | ||
![]() |
f0426e9ca5 | ||
![]() |
d9b4154cbc | ||
![]() |
9749ac7fec | ||
![]() |
413d367580 | ||
![]() |
aa13a8e3dd | ||
![]() |
8f423cf805 | ||
![]() |
804f236611 | ||
![]() |
f00c0def74 | ||
![]() |
e546e5d3b3 | ||
![]() |
4170b3d712 | ||
![]() |
9ff9466455 | ||
![]() |
e28e135d6f | ||
![]() |
f1570ab84d | ||
![]() |
069b2aedae | ||
![]() |
5eedc208ec | ||
![]() |
464c919ea8 | ||
![]() |
3894ab9574 | ||
![]() |
b05640d532 | ||
![]() |
7a29cbbd5f | ||
![]() |
2e8de097ad | ||
![]() |
f3d5face83 | ||
![]() |
eabbccc439 | ||
![]() |
0de09c5b9e | ||
![]() |
6a6cdcd182 | ||
![]() |
998dffb5a2 | ||
![]() |
29a74a6126 | ||
![]() |
55f1833376 | ||
![]() |
3d9dc2f359 | ||
![]() |
28e53d60df | ||
![]() |
f591e605df | ||
![]() |
9a8afadd17 | ||
![]() |
104a7b5a46 | ||
![]() |
7e90e34fa4 | ||
![]() |
4ce57d3b87 | ||
![]() |
ffff1bc659 | ||
![]() |
4f04347909 | ||
![]() |
4392447d94 | ||
![]() |
43cfd462c0 | ||
![]() |
974d444039 | ||
![]() |
80ed8bdeba | ||
![]() |
de954c1b4d | ||
![]() |
0085e2bab8 | ||
![]() |
73fcfa39f5 | ||
![]() |
41d6b61e98 | ||
![]() |
0bee29493c | ||
![]() |
644738ddaa | ||
![]() |
c168d8791d | ||
![]() |
ddd4b5e10a | ||
![]() |
f788149237 | ||
![]() |
017adb28e7 | ||
![]() |
2e30b5567b | ||
![]() |
beaa1a4455 | ||
![]() |
fb44020fa9 | ||
![]() |
3dc9232e1a | ||
![]() |
9401736fd0 | ||
![]() |
cd0443fb14 | ||
![]() |
03536126d3 | ||
![]() |
1ed5ee2f04 | ||
![]() |
3876429d72 | ||
![]() |
b0059f0413 | ||
![]() |
b14e818b37 | ||
![]() |
867f637b95 | ||
![]() |
920397634d | ||
![]() |
b8a433aaca | ||
![]() |
fd647775e2 | ||
![]() |
775cde82dc | ||
![]() |
868d2f60a7 | ||
![]() |
a1b7784289 | ||
![]() |
882e3b753c | ||
![]() |
540b682981 | ||
![]() |
05420227aa | ||
![]() |
35d96982f1 | ||
![]() |
acaf806c15 | ||
![]() |
07256b9fee | ||
![]() |
e439693f72 | ||
![]() |
96d0f8c1cb | ||
![]() |
e3ce2b385e | ||
![]() |
4253e3b7f4 | ||
![]() |
8e765755f7 | ||
![]() |
ffa017cfc5 | ||
![]() |
a0d50aabc5 | ||
![]() |
2f4b575946 | ||
![]() |
fc2cc626f0 | ||
![]() |
a2bac6b7ad | ||
![]() |
4b8b0dded8 | ||
![]() |
4a6ff0b47a | ||
![]() |
62c65bfaf8 | ||
![]() |
d63eae7e7f | ||
![]() |
2792092afd | ||
![]() |
cbed249aaa | ||
![]() |
3725b4f0c9 | ||
![]() |
67bb70cd70 | ||
![]() |
9b5efaf86b | ||
![]() |
999ea80beb | ||
![]() |
41b6cdb419 | ||
![]() |
02e343f6ef | ||
![]() |
a514cc2feb | ||
![]() |
87286e93af | ||
![]() |
3c4d3ee491 | ||
![]() |
5b68c478fb | ||
![]() |
9526b1f179 | ||
![]() |
0023af81fb | ||
![]() |
cae6e46107 | ||
![]() |
c91d8b1899 | ||
![]() |
77c2472ca1 | ||
![]() |
d79c7e9937 | ||
![]() |
5dda3b291f | ||
![]() |
5f25f348f9 | ||
![]() |
a40b0070c2 | ||
![]() |
9cd9044790 | ||
![]() |
f0e8bc7c60 | ||
![]() |
c099ec9392 | ||
![]() |
c0ecceeefe | ||
![]() |
3e083191cd | ||
![]() |
9f1e9dab21 | ||
![]() |
5a63454b36 | ||
![]() |
fcaa2e735b | ||
![]() |
35f4f764a7 | ||
![]() |
f24e44e8cb | ||
![]() |
811d298b23 | ||
![]() |
69d3191495 | ||
![]() |
50e06e21a6 | ||
![]() |
4310b6650e | ||
![]() |
1713c88273 | ||
![]() |
4a07a455bb | ||
![]() |
5eb1458be4 | ||
![]() |
1a36dbad71 | ||
![]() |
12f0427405 | ||
![]() |
5154dc0a68 | ||
![]() |
8ab8465083 | ||
![]() |
e641aab7a6 | ||
![]() |
20cdad5a2c | ||
![]() |
43694ce13c | ||
![]() |
8226a3818f | ||
![]() |
c51316f8a6 | ||
![]() |
a281beba8d | ||
![]() |
ba6b0c8261 | ||
![]() |
6171b050d7 | ||
![]() |
aa5dcc4ee6 | ||
![]() |
5e2e24b2c5 | ||
![]() |
fee2d8d9c3 | ||
![]() |
cf9af2c7f1 | ||
![]() |
cf6413e840 | ||
![]() |
5498729c59 | ||
![]() |
393b487a4e | ||
![]() |
4d9dc0abe2 | ||
![]() |
014cb5774d | ||
![]() |
8e6e365172 | ||
![]() |
95e82347b3 | ||
![]() |
5b8c69ae04 | ||
![]() |
5af1f19787 | ||
![]() |
b6951271ac | ||
![]() |
ffbd4f2a02 | ||
![]() |
292d60b1ed | ||
![]() |
85b33f5c16 | ||
![]() |
85a2d07c1f | ||
![]() |
9f40cd2896 | ||
![]() |
f10589e345 | ||
![]() |
f9fb3ce86e | ||
![]() |
5f009a094f | ||
![]() |
225cf2b830 | ||
![]() |
2d1d683a54 | ||
![]() |
65de7d204c | ||
![]() |
c39358a54b | ||
![]() |
1f8bd8eba8 | ||
![]() |
00cdda4f6f | ||
![]() |
116c268438 | ||
![]() |
e7d22348e7 | ||
![]() |
50eaea9fd7 | ||
![]() |
f45c4efcd9 | ||
![]() |
13b3cb3c2b | ||
![]() |
0d531c35ec | ||
![]() |
bc4ab17b38 | ||
![]() |
632b8ee54e | ||
![]() |
c919b68f7e | ||
![]() |
19741ab8a4 | ||
![]() |
37755a037e | ||
![]() |
196eb0fe77 | ||
![]() |
db8b4edc7d | ||
![]() |
1c54a98e19 | ||
![]() |
00a3e47bf5 | ||
![]() |
c5f01bf7d4 | ||
![]() |
c91af948e4 | ||
![]() |
6b5d93b0b0 | ||
![]() |
298230e550 | ||
![]() |
d5d1517e7d | ||
![]() |
7e09c147fd | ||
![]() |
e370f9ec36 | ||
![]() |
b1a1ec1540 | ||
![]() |
0b6f829b1d | ||
![]() |
f98a3305eb | ||
![]() |
04a5e06350 | ||
![]() |
b03c89309e | ||
![]() |
71f28097fe | ||
![]() |
044886c220 | ||
![]() |
993edd3f6e | ||
![]() |
6a9c7a2b52 | ||
![]() |
a174c453ee | ||
![]() |
15f22b4880 | ||
![]() |
9751a457cf | ||
![]() |
5a230233d6 | ||
![]() |
4903f452b6 | ||
![]() |
ff2fde1b8f | ||
![]() |
deeb13eae8 | ||
![]() |
bb5a54e6db | ||
![]() |
628fa244bb | ||
![]() |
9cafb9ff17 | ||
![]() |
1732eccc0a | ||
![]() |
a0b19d319a | ||
![]() |
cc07f5cc85 | ||
![]() |
ccfd70f4c2 | ||
![]() |
45d82be65f | ||
![]() |
3237f8ba29 | ||
![]() |
1725e943b0 | ||
![]() |
9f09bdcfcb | ||
![]() |
f124fa4588 | ||
![]() |
585d0ed9ab | ||
![]() |
1fa3f24d4b | ||
![]() |
ddb2d7588b | ||
![]() |
f223b1b078 | ||
![]() |
6fe82491ed | ||
![]() |
34df1c1f60 | ||
![]() |
1d24da6c89 | ||
![]() |
66a0127d45 | ||
![]() |
3f90813f06 | ||
![]() |
64de1a4c25 | ||
![]() |
f96ab86cd8 | ||
![]() |
f4b95acafc | ||
![]() |
fe6c82ccff | ||
![]() |
24f827875c | ||
![]() |
15cb3528cb | ||
![]() |
2325d03aa7 | ||
![]() |
e569c2d1f4 | ||
![]() |
a489f07150 | ||
![]() |
5efe68b73c | ||
![]() |
b530118e7f | ||
![]() |
dcfad52812 | ||
![]() |
0783fd558e | ||
![]() |
0f634dba3a | ||
![]() |
21dc069bea | ||
![]() |
5d3a3cd493 | ||
![]() |
a9d3f4b20a | ||
![]() |
b012271d01 | ||
![]() |
f04b5bedad | ||
![]() |
d4f14a72dc | ||
![]() |
87264d4fda | ||
![]() |
a00af29853 | ||
![]() |
0b6ad22e6a | ||
![]() |
5438593a35 | ||
![]() |
9970d74c83 | ||
![]() |
20314dd46f | ||
![]() |
1d03633c5a | ||
![]() |
8afd9468b0 | ||
![]() |
ef12dbdcd3 | ||
![]() |
46acc418a5 | ||
![]() |
6ba3085616 | ||
![]() |
f6e97090d2 | ||
![]() |
2863fcf2b6 | ||
![]() |
c76c96677f | ||
![]() |
15b252dfd2 | ||
![]() |
312a2d1e8b | ||
![]() |
54579be436 | ||
![]() |
05adfd883a | ||
![]() |
3ff494f6f4 | ||
![]() |
9b5bedf13a | ||
![]() |
cb480e390d | ||
![]() |
25a4bd345a | ||
![]() |
3906de0755 | ||
![]() |
7d337ca977 | ||
![]() |
10025b715e | ||
![]() |
595ea4a99b | ||
![]() |
2622c804d1 | ||
![]() |
fd8fcf8f4f | ||
![]() |
21b25281c5 | ||
![]() |
4a601c9eff | ||
![]() |
464327acdb | ||
![]() |
ef79d20dc9 | ||
![]() |
39abae2354 | ||
![]() |
4ce2f29a50 | ||
![]() |
177f0d963e | ||
![]() |
8e02a4dcc8 | ||
![]() |
7b8b1cf5eb | ||
![]() |
a40e0b37df | ||
![]() |
4e38e2ae9d | ||
![]() |
8a8b54523a | ||
![]() |
700444c23d |
38
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
38
.github/ISSUE_TEMPLATE/1_broken_site.yml
vendored
@ -2,13 +2,11 @@ name: Broken site support
|
|||||||
description: Report issue with yt-dlp on a supported site
|
description: Report issue with yt-dlp on a supported site
|
||||||
labels: [triage, site-bug]
|
labels: [triage, site-bug]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@ -18,15 +16,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2023.10.13** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
@ -47,6 +43,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@ -61,19 +59,19 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2023.10.13 [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2023.10.13, Current version: 2023.10.13
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (2023.10.13)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
@ -2,13 +2,11 @@ name: Site support request
|
|||||||
description: Request support for a new site
|
description: Request support for a new site
|
||||||
labels: [triage, site-request]
|
labels: [triage, site-request]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@ -18,15 +16,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a new site support request
|
- label: I'm reporting a new site support request
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2023.10.13** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
@ -59,6 +55,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@ -73,19 +71,19 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2023.10.13 [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2023.10.13, Current version: 2023.10.13
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (2023.10.13)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
@ -1,14 +1,12 @@
|
|||||||
name: Site feature request
|
name: Site feature request
|
||||||
description: Request a new functionality for a supported site
|
description: Request new functionality for a site supported by yt-dlp
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@ -18,13 +16,11 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm requesting a site-specific feature
|
- label: I'm requesting a site-specific feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2023.10.13** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
@ -55,6 +51,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@ -69,19 +67,19 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2023.10.13 [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2023.10.13, Current version: 2023.10.13
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (2023.10.13)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
42
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
42
.github/ISSUE_TEMPLATE/4_bug_report.yml
vendored
@ -2,13 +2,11 @@ name: Core bug report
|
|||||||
description: Report a bug unrelated to any particular site or extractor
|
description: Report a bug unrelated to any particular site or extractor
|
||||||
labels: [triage, bug]
|
labels: [triage, bug]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@ -18,15 +16,9 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a bug unrelated to a specific site
|
- label: I'm reporting a bug unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2023.10.13** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
|
||||||
required: true
|
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
@ -40,6 +32,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@ -54,19 +48,19 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2023.10.13 [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2023.10.13, Current version: 2023.10.13
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (2023.10.13)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
|
40
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
40
.github/ISSUE_TEMPLATE/5_feature_request.yml
vendored
@ -1,14 +1,12 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new feature unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: checklist
|
id: checklist
|
||||||
attributes:
|
attributes:
|
||||||
@ -20,11 +18,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2023.10.13** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
@ -38,6 +34,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
@ -50,18 +48,18 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2023.10.13 [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2023.10.13, Current version: 2023.10.13
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (2023.10.13)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
40
.github/ISSUE_TEMPLATE/6_question.yml
vendored
40
.github/ISSUE_TEMPLATE/6_question.yml
vendored
@ -1,14 +1,12 @@
|
|||||||
name: Ask question
|
name: Ask question
|
||||||
description: Ask yt-dlp related question
|
description: Ask a question about using yt-dlp
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\* field
|
|
||||||
required: true
|
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
@ -26,11 +24,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **2023.10.13** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%3Aissue%20-label%3Aspam%20%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
@ -44,6 +40,8 @@ body:
|
|||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
- label: "If using API, add `'verbose': True` to `YoutubeDL` params instead"
|
||||||
@ -56,18 +54,18 @@ body:
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2023.10.13 [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: 2023.10.13, Current version: 2023.10.13
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (2023.10.13)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
|
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,8 +1,5 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Get help from the community on Discord
|
- name: Get help on Discord
|
||||||
url: https://discord.gg/H5MNcFW63r
|
url: https://discord.gg/H5MNcFW63r
|
||||||
about: Join the yt-dlp Discord for community-powered support!
|
about: Join the yt-dlp Discord server for support and discussion
|
||||||
- name: Matrix Bridge to the Discord server
|
|
||||||
url: https://matrix.to/#/#yt-dlp:matrix.org
|
|
||||||
about: For those who do not want to use Discord
|
|
||||||
|
@ -12,15 +12,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
- label: I'm reporting that yt-dlp is broken on a **supported** site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
|
@ -12,15 +12,13 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a new site support request
|
- label: I'm reporting a new site support request
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
- label: I've checked that none of provided URLs [violate any copyrights](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy) or contain any [DRM](https://en.wikipedia.org/wiki/Digital_rights_management) to the best of my knowledge
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and am willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
name: Site feature request
|
name: Site feature request
|
||||||
description: Request a new functionality for a supported site
|
description: Request new functionality for a site supported by yt-dlp
|
||||||
labels: [triage, site-enhancement]
|
labels: [triage, site-enhancement]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@ -12,13 +12,11 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm requesting a site-specific feature
|
- label: I'm requesting a site-specific feature
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
- label: I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
|
||||||
- type: input
|
- type: input
|
||||||
|
10
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
10
.github/ISSUE_TEMPLATE_tmpl/4_bug_report.yml
vendored
@ -12,15 +12,9 @@ body:
|
|||||||
options:
|
options:
|
||||||
- label: I'm reporting a bug unrelated to a specific site
|
- label: I'm reporting a bug unrelated to a specific site
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've checked that all provided URLs are playable in a browser with the same IP and same login details
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar issues **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
|
|
||||||
required: true
|
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request a new functionality unrelated to any particular site or extractor
|
description: Request a new feature unrelated to any particular site or extractor
|
||||||
labels: [triage, enhancement]
|
labels: [triage, enhancement]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@ -14,11 +14,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
|
- label: I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar requests **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
|
8
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
8
.github/ISSUE_TEMPLATE_tmpl/6_question.yml
vendored
@ -1,5 +1,5 @@
|
|||||||
name: Ask question
|
name: Ask question
|
||||||
description: Ask yt-dlp related question
|
description: Ask a question about using yt-dlp
|
||||||
labels: [question]
|
labels: [question]
|
||||||
body:
|
body:
|
||||||
%(no_skip)s
|
%(no_skip)s
|
||||||
@ -20,11 +20,9 @@ body:
|
|||||||
required: true
|
required: true
|
||||||
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
- label: I've looked through the [README](https://github.com/yt-dlp/yt-dlp#readme)
|
||||||
required: true
|
required: true
|
||||||
- label: I've verified that I'm running yt-dlp version **%(version)s** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
|
- label: I've verified that I have **updated yt-dlp to nightly or master** ([update instructions](https://github.com/yt-dlp/yt-dlp#update-channels))
|
||||||
required: true
|
required: true
|
||||||
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766) and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar questions **including closed ones**. DO NOT post duplicates
|
- label: I've searched [known issues](https://github.com/yt-dlp/yt-dlp/issues/3766), [the FAQ](https://github.com/yt-dlp/yt-dlp/wiki/FAQ), and the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=is%%3Aissue%%20-label%%3Aspam%%20%%20) for similar questions **including closed ones**. DO NOT post duplicates
|
||||||
required: true
|
|
||||||
- label: I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
|
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: question
|
id: question
|
||||||
|
44
.github/PULL_REQUEST_TEMPLATE.md
vendored
44
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,14 +1,17 @@
|
|||||||
**IMPORTANT**: PRs without the template will be CLOSED
|
<!--
|
||||||
|
**IMPORTANT**: PRs without the template will be CLOSED
|
||||||
|
|
||||||
|
Due to the high volume of pull requests, it may be a while before your PR is reviewed.
|
||||||
|
Please try to keep your pull request focused on a single bugfix or new feature.
|
||||||
|
Pull requests with a vast scope and/or very large diff will take much longer to review.
|
||||||
|
It is recommended for new contributors to stick to smaller pull requests, so you can receive much more immediate feedback as you familiarize yourself with the codebase.
|
||||||
|
|
||||||
|
PLEASE AVOID FORCE-PUSHING after opening a PR, as it makes reviewing more difficult.
|
||||||
|
-->
|
||||||
|
|
||||||
### Description of your *pull request* and other information
|
### Description of your *pull request* and other information
|
||||||
|
|
||||||
<!--
|
ADD DETAILED DESCRIPTION HERE
|
||||||
|
|
||||||
Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible
|
|
||||||
|
|
||||||
-->
|
|
||||||
|
|
||||||
ADD DESCRIPTION HERE
|
|
||||||
|
|
||||||
Fixes #
|
Fixes #
|
||||||
|
|
||||||
@ -16,34 +19,25 @@ Fixes #
|
|||||||
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
<details open><summary>Template</summary> <!-- OPEN is intentional -->
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
# PLEASE FOLLOW THE GUIDE BELOW
|
||||||
|
|
||||||
# PLEASE FOLLOW THE GUIDE BELOW
|
- You will be asked some questions, please read them **carefully** and answer honestly
|
||||||
|
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
||||||
- You will be asked some questions, please read them **carefully** and answer honestly
|
- Use *Preview* tab to see what your *pull request* will actually look like
|
||||||
- Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x])
|
|
||||||
- Use *Preview* tab to see how your *pull request* will actually look like
|
|
||||||
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Before submitting a *pull request* make sure you have:
|
### Before submitting a *pull request* make sure you have:
|
||||||
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
- [ ] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions)
|
||||||
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
- [ ] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests
|
||||||
- [ ] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions)
|
|
||||||
|
|
||||||
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply:
|
### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check those that apply and remove the others:
|
||||||
- [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/)
|
- [ ] I am the original author of the code in this PR, and I am willing to release it under [Unlicense](http://unlicense.org/)
|
||||||
- [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
- [ ] I am not the original author of the code in this PR, but it is in the public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence)
|
||||||
|
|
||||||
### What is the purpose of your *pull request*?
|
### What is the purpose of your *pull request*? Check those that apply and remove the others:
|
||||||
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
- [ ] Fix or improvement to an extractor (Make sure to add/update tests)
|
||||||
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
- [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy))
|
||||||
- [ ] Core bug fix/improvement
|
- [ ] Core bug fix/improvement
|
||||||
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
- [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes))
|
||||||
|
|
||||||
|
|
||||||
<!-- Do NOT edit/remove anything below this! -->
|
|
||||||
</details><details><summary>Copilot Summary</summary>
|
|
||||||
|
|
||||||
copilot:all
|
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
10
.github/banner.svg
vendored
10
.github/banner.svg
vendored
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 15 KiB |
378
.github/workflows/build.yml
vendored
378
.github/workflows/build.yml
vendored
@ -12,6 +12,9 @@ on:
|
|||||||
unix:
|
unix:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
linux_static:
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
linux_arm:
|
linux_arm:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
@ -27,9 +30,10 @@ on:
|
|||||||
windows32:
|
windows32:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
meta_files:
|
origin:
|
||||||
default: true
|
required: false
|
||||||
type: boolean
|
default: ''
|
||||||
|
type: string
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY:
|
GPG_SIGNING_KEY:
|
||||||
required: false
|
required: false
|
||||||
@ -37,16 +41,22 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
version:
|
version:
|
||||||
description: Version tag (YYYY.MM.DD[.REV])
|
description: |
|
||||||
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
channel:
|
channel:
|
||||||
description: Update channel (stable/nightly/...)
|
description: |
|
||||||
|
SOURCE of this build's updates: stable/nightly/master/<repo>
|
||||||
required: true
|
required: true
|
||||||
default: stable
|
default: stable
|
||||||
type: string
|
type: string
|
||||||
unix:
|
unix:
|
||||||
description: yt-dlp, yt-dlp.tar.gz, yt-dlp_linux, yt-dlp_linux.zip
|
description: yt-dlp, yt-dlp.tar.gz
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
linux_static:
|
||||||
|
description: yt-dlp_linux
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
linux_arm:
|
linux_arm:
|
||||||
@ -62,94 +72,110 @@ on:
|
|||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
windows:
|
windows:
|
||||||
description: yt-dlp.exe, yt-dlp_min.exe, yt-dlp_win.zip
|
description: yt-dlp.exe, yt-dlp_win.zip
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
windows32:
|
windows32:
|
||||||
description: yt-dlp_x86.exe
|
description: yt-dlp_x86.exe
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
meta_files:
|
origin:
|
||||||
description: SHA2-256SUMS, SHA2-512SUMS, _update_spec
|
description: Origin
|
||||||
default: true
|
required: false
|
||||||
type: boolean
|
default: 'current repo'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'current repo'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
process:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
origin: ${{ steps.process_origin.outputs.origin }}
|
||||||
|
steps:
|
||||||
|
- name: Process origin
|
||||||
|
id: process_origin
|
||||||
|
run: |
|
||||||
|
echo "origin=${{ inputs.origin == 'current repo' && github.repository || inputs.origin }}" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
unix:
|
unix:
|
||||||
|
needs: process
|
||||||
if: inputs.unix
|
if: inputs.unix
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
with:
|
||||||
|
fetch-depth: 0 # Needed for changelog
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
- uses: conda-incubator/setup-miniconda@v2
|
|
||||||
with:
|
|
||||||
miniforge-variant: Mambaforge
|
|
||||||
use-mamba: true
|
|
||||||
channels: conda-forge
|
|
||||||
auto-update-conda: true
|
|
||||||
activate-environment: ""
|
|
||||||
auto-activate-base: false
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get -y install zip pandoc man sed
|
sudo apt -y install zip pandoc man sed
|
||||||
python -m pip install -U pip setuptools wheel
|
|
||||||
python -m pip install -U Pyinstaller -r requirements.txt
|
|
||||||
reqs=$(mktemp)
|
|
||||||
cat > $reqs << EOF
|
|
||||||
python=3.10.*
|
|
||||||
pyinstaller
|
|
||||||
cffi
|
|
||||||
brotli-python
|
|
||||||
EOF
|
|
||||||
sed '/^brotli.*/d' requirements.txt >> $reqs
|
|
||||||
mamba create -n build --file $reqs
|
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
|
python devscripts/update_changelog.py -vv
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build Unix platform-independent binary
|
- name: Build Unix platform-independent binary
|
||||||
run: |
|
run: |
|
||||||
make all tar
|
make all tar
|
||||||
- name: Build Unix standalone binary
|
|
||||||
shell: bash -l {0}
|
|
||||||
run: |
|
|
||||||
unset LD_LIBRARY_PATH # Harmful; set by setup-python
|
|
||||||
conda activate build
|
|
||||||
python pyinst.py --onedir
|
|
||||||
(cd ./dist/yt-dlp_linux && zip -r ../yt-dlp_linux.zip .)
|
|
||||||
python pyinst.py
|
|
||||||
mv ./dist/yt-dlp_linux ./yt-dlp_linux
|
|
||||||
mv ./dist/yt-dlp_linux.zip ./yt-dlp_linux.zip
|
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
run: |
|
run: |
|
||||||
binaries=("yt-dlp" "yt-dlp_linux")
|
chmod +x ./yt-dlp
|
||||||
for binary in "${binaries[@]}"; do
|
cp ./yt-dlp ./yt-dlp_downgraded
|
||||||
chmod +x ./${binary}
|
version="$(./yt-dlp --version)"
|
||||||
cp ./${binary} ./${binary}_downgraded
|
./yt-dlp_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
version="$(./${binary} --version)"
|
downgraded_version="$(./yt-dlp_downgraded --version)"
|
||||||
./${binary}_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
downgraded_version="$(./${binary}_downgraded --version)"
|
|
||||||
[[ "$version" != "$downgraded_version" ]]
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
path: |
|
path: |
|
||||||
yt-dlp
|
yt-dlp
|
||||||
yt-dlp.tar.gz
|
yt-dlp.tar.gz
|
||||||
yt-dlp_linux
|
compression-level: 0
|
||||||
yt-dlp_linux.zip
|
|
||||||
|
linux_static:
|
||||||
|
needs: process
|
||||||
|
if: inputs.linux_static
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Build static executable
|
||||||
|
env:
|
||||||
|
channel: ${{ inputs.channel }}
|
||||||
|
origin: ${{ needs.process.outputs.origin }}
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
run: |
|
||||||
|
mkdir ~/build
|
||||||
|
cd bundle/docker
|
||||||
|
docker compose up --build static
|
||||||
|
sudo chown "${USER}:docker" ~/build/yt-dlp_linux
|
||||||
|
- name: Verify --update-to
|
||||||
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
|
run: |
|
||||||
|
chmod +x ~/build/yt-dlp_linux
|
||||||
|
cp ~/build/yt-dlp_linux ~/build/yt-dlp_linux_downgraded
|
||||||
|
version="$(~/build/yt-dlp_linux --version)"
|
||||||
|
~/build/yt-dlp_linux_downgraded -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
|
downgraded_version="$(~/build/yt-dlp_linux_downgraded --version)"
|
||||||
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
|
path: |
|
||||||
|
~/build/yt-dlp_linux
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
linux_arm:
|
linux_arm:
|
||||||
|
needs: process
|
||||||
if: inputs.linux_arm
|
if: inputs.linux_arm
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@ -162,32 +188,35 @@ jobs:
|
|||||||
- aarch64
|
- aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: ./repo
|
path: ./repo
|
||||||
- name: Virtualized Install, Prepare & Build
|
- name: Virtualized Install, Prepare & Build
|
||||||
uses: yt-dlp/run-on-arch-action@v2
|
uses: yt-dlp/run-on-arch-action@v3
|
||||||
with:
|
with:
|
||||||
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
# Ref: https://github.com/uraimo/run-on-arch-action/issues/55
|
||||||
env: |
|
env: |
|
||||||
GITHUB_WORKFLOW: build
|
GITHUB_WORKFLOW: build
|
||||||
githubToken: ${{ github.token }} # To cache image
|
githubToken: ${{ github.token }} # To cache image
|
||||||
arch: ${{ matrix.architecture }}
|
arch: ${{ matrix.architecture }}
|
||||||
distro: ubuntu18.04 # Standalone executable should be built on minimum supported OS
|
distro: ubuntu20.04 # Standalone executable should be built on minimum supported OS
|
||||||
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
dockerRunArgs: --volume "${PWD}/repo:/repo"
|
||||||
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
install: | # Installing Python 3.10 from the Deadsnakes repo raises errors
|
||||||
apt update
|
apt update
|
||||||
apt -y install zlib1g-dev python3.8 python3.8-dev python3.8-distutils python3-pip
|
apt -y install zlib1g-dev libffi-dev python3.9 python3.9-dev python3.9-distutils python3-pip \
|
||||||
python3.8 -m pip install -U pip setuptools wheel
|
python3-secretstorage # Cannot build cryptography wheel in virtual armv7 environment
|
||||||
# Cannot access requirements.txt from the repo directory at this stage
|
python3.9 -m pip install -U pip wheel 'setuptools>=71.0.2'
|
||||||
python3.8 -m pip install -U Pyinstaller mutagen pycryptodomex websockets brotli certifi
|
# XXX: Keep this in sync with pyproject.toml (it can't be accessed at this stage) and exclude secretstorage
|
||||||
|
python3.9 -m pip install -U Pyinstaller mutagen pycryptodomex brotli certifi cffi \
|
||||||
|
'requests>=2.32.2,<3' 'urllib3>=1.26.17,<3' 'websockets>=13.0'
|
||||||
|
|
||||||
run: |
|
run: |
|
||||||
cd repo
|
cd repo
|
||||||
python3.8 -m pip install -U Pyinstaller -r requirements.txt # Cached version may be out of date
|
python3.9 devscripts/install_deps.py -o --include build
|
||||||
python3.8 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
python3.9 devscripts/install_deps.py --include pyinstaller # Cached versions may be out of date
|
||||||
python3.8 devscripts/make_lazy_extractors.py
|
python3.9 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
python3.8 pyinst.py
|
python3.9 devscripts/make_lazy_extractors.py
|
||||||
|
python3.9 -m bundle.pyinstaller
|
||||||
|
|
||||||
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
if ${{ vars.UPDATE_TO_VERIFICATION && 'true' || 'false' }}; then
|
||||||
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
arch="${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}"
|
||||||
@ -200,34 +229,84 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-bin-linux_${{ matrix.architecture }}
|
||||||
path: | # run-on-arch-action designates armv7l as armv7
|
path: | # run-on-arch-action designates armv7l as armv7
|
||||||
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
repo/dist/yt-dlp_linux_${{ (matrix.architecture == 'armv7' && 'armv7l') || matrix.architecture }}
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
|
needs: process
|
||||||
if: inputs.macos
|
if: inputs.macos
|
||||||
runs-on: macos-11
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: write # For cleaning up cache
|
||||||
|
runs-on: macos-13
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
# NB: Building universal2 does not work with python from actions/setup-python
|
# NB: Building universal2 does not work with python from actions/setup-python
|
||||||
|
|
||||||
|
- name: Restore cached requirements
|
||||||
|
id: restore-cache
|
||||||
|
uses: actions/cache/restore@v4
|
||||||
|
env:
|
||||||
|
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 1
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/yt-dlp-build-venv
|
||||||
|
key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
python3 -m pip install -U --user pip setuptools wheel
|
python3 -m venv ~/yt-dlp-build-venv
|
||||||
|
source ~/yt-dlp-build-venv/bin/activate
|
||||||
|
python3 devscripts/install_deps.py -o --include build
|
||||||
|
python3 devscripts/install_deps.py --print --include pyinstaller > requirements.txt
|
||||||
# We need to ignore wheels otherwise we break universal2 builds
|
# We need to ignore wheels otherwise we break universal2 builds
|
||||||
python3 -m pip install -U --user --no-binary :all: Pyinstaller -r requirements.txt
|
python3 -m pip install -U --no-binary :all: -r requirements.txt
|
||||||
|
# We need to fuse our own universal2 wheels for curl_cffi
|
||||||
|
python3 -m pip install -U 'delocate==0.11.0'
|
||||||
|
mkdir curl_cffi_whls curl_cffi_universal2
|
||||||
|
python3 devscripts/install_deps.py --print -o --include curl-cffi > requirements.txt
|
||||||
|
for platform in "macosx_11_0_arm64" "macosx_11_0_x86_64"; do
|
||||||
|
python3 -m pip download \
|
||||||
|
--only-binary=:all: \
|
||||||
|
--platform "${platform}" \
|
||||||
|
-d curl_cffi_whls \
|
||||||
|
-r requirements.txt
|
||||||
|
done
|
||||||
|
( # Overwrite x86_64-only libs with fat/universal2 libs or else Pyinstaller will do the opposite
|
||||||
|
# See https://github.com/yt-dlp/yt-dlp/pull/10069
|
||||||
|
cd curl_cffi_whls
|
||||||
|
mkdir -p curl_cffi/.dylibs
|
||||||
|
python_libdir=$(python3 -c 'import sys; from pathlib import Path; print(Path(sys.path[1]).parent)')
|
||||||
|
for dylib in lib{ssl,crypto}.3.dylib; do
|
||||||
|
cp "${python_libdir}/${dylib}" "curl_cffi/.dylibs/${dylib}"
|
||||||
|
for wheel in curl_cffi*macos*x86_64.whl; do
|
||||||
|
zip "${wheel}" "curl_cffi/.dylibs/${dylib}"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
)
|
||||||
|
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/curl_cffi*.whl -w curl_cffi_universal2
|
||||||
|
python3 -m delocate.cmd.delocate_fuse curl_cffi_whls/cffi*.whl -w curl_cffi_universal2
|
||||||
|
for wheel in curl_cffi_universal2/*cffi*.whl; do
|
||||||
|
mv -n -- "${wheel}" "${wheel/x86_64/universal2}"
|
||||||
|
done
|
||||||
|
python3 -m pip install --force-reinstall -U curl_cffi_universal2/*cffi*.whl
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python3 pyinst.py --target-architecture universal2 --onedir
|
source ~/yt-dlp-build-venv/bin/activate
|
||||||
|
python3 -m bundle.pyinstaller --target-architecture universal2 --onedir
|
||||||
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
(cd ./dist/yt-dlp_macos && zip -r ../yt-dlp_macos.zip .)
|
||||||
python3 pyinst.py --target-architecture universal2
|
python3 -m bundle.pyinstaller --target-architecture universal2
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
@ -240,18 +319,36 @@ jobs:
|
|||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp_macos
|
dist/yt-dlp_macos
|
||||||
dist/yt-dlp_macos.zip
|
dist/yt-dlp_macos.zip
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
|
- name: Cleanup cache
|
||||||
|
if: steps.restore-cache.outputs.cache-hit == 'true'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
cache_key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
||||||
|
run: |
|
||||||
|
gh cache delete "${cache_key}"
|
||||||
|
|
||||||
|
- name: Cache requirements
|
||||||
|
uses: actions/cache/save@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/yt-dlp-build-venv
|
||||||
|
key: cache-reqs-${{ github.job }}-${{ github.ref }}
|
||||||
|
|
||||||
macos_legacy:
|
macos_legacy:
|
||||||
|
needs: process
|
||||||
if: inputs.macos_legacy
|
if: inputs.macos_legacy
|
||||||
runs-on: macos-latest
|
runs-on: macos-13
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
# We need the official Python, because the GA ones only support newer macOS versions
|
# We need the official Python, because the GA ones only support newer macOS versions
|
||||||
env:
|
env:
|
||||||
@ -261,22 +358,22 @@ jobs:
|
|||||||
# Hack to get the latest patch version. Uncomment if needed
|
# Hack to get the latest patch version. Uncomment if needed
|
||||||
#brew install python@3.10
|
#brew install python@3.10
|
||||||
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
#export PYTHON_VERSION=$( $(brew --prefix)/opt/python@3.10/bin/python3 --version | cut -d ' ' -f 2 )
|
||||||
curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o "python.pkg"
|
curl "https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg" -o "python.pkg"
|
||||||
sudo installer -pkg python.pkg -target /
|
sudo installer -pkg python.pkg -target /
|
||||||
python3 --version
|
python3 --version
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
brew install coreutils
|
brew install coreutils
|
||||||
python3 -m pip install -U --user pip setuptools wheel
|
python3 devscripts/install_deps.py --user -o --include build
|
||||||
python3 -m pip install -U --user Pyinstaller -r requirements.txt
|
python3 devscripts/install_deps.py --user --include pyinstaller
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python3 devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
python3 devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
python3 devscripts/make_lazy_extractors.py
|
python3 devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python3 pyinst.py
|
python3 -m bundle.pyinstaller
|
||||||
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
mv dist/yt-dlp_macos dist/yt-dlp_macos_legacy
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
@ -290,41 +387,43 @@ jobs:
|
|||||||
[[ "$version" != "$downgraded_version" ]]
|
[[ "$version" != "$downgraded_version" ]]
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp_macos_legacy
|
dist/yt-dlp_macos_legacy
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
|
needs: process
|
||||||
if: inputs.windows
|
if: inputs.windows
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
with: # 3.8 is used for Win7 support
|
with:
|
||||||
python-version: "3.8"
|
python-version: "3.10"
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
run: | # Custom pyinstaller built with https://github.com/yt-dlp/pyinstaller-builds
|
||||||
python -m pip install -U pip setuptools wheel py2exe
|
python devscripts/install_deps.py -o --include build
|
||||||
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
python devscripts/install_deps.py --include curl-cffi
|
||||||
|
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/x86_64/pyinstaller-6.13.0-py3-none-any.whl"
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python setup.py py2exe
|
python -m bundle.pyinstaller
|
||||||
Move-Item ./dist/yt-dlp.exe ./dist/yt-dlp_min.exe
|
python -m bundle.pyinstaller --onedir
|
||||||
python pyinst.py
|
|
||||||
python pyinst.py --onedir
|
|
||||||
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
Compress-Archive -Path ./dist/yt-dlp/* -DestinationPath ./dist/yt-dlp_win.zip
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
run: |
|
run: |
|
||||||
foreach ($name in @("yt-dlp","yt-dlp_min")) {
|
foreach ($name in @("yt-dlp")) {
|
||||||
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
Copy-Item "./dist/${name}.exe" "./dist/${name}_downgraded.exe"
|
||||||
$version = & "./dist/${name}.exe" --version
|
$version = & "./dist/${name}.exe" --version
|
||||||
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
& "./dist/${name}_downgraded.exe" -v --update-to yt-dlp/yt-dlp@2023.03.04
|
||||||
@ -335,35 +434,38 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp.exe
|
dist/yt-dlp.exe
|
||||||
dist/yt-dlp_min.exe
|
|
||||||
dist/yt-dlp_win.zip
|
dist/yt-dlp_win.zip
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
windows32:
|
windows32:
|
||||||
|
needs: process
|
||||||
if: inputs.windows32
|
if: inputs.windows32
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
with: # 3.7 is used for Vista support. See https://github.com/yt-dlp/yt-dlp/issues/390
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.10"
|
||||||
architecture: "x86"
|
architecture: "x86"
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
python -m pip install -U pip setuptools wheel
|
python devscripts/install_deps.py -o --include build
|
||||||
pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-5.8.0-py3-none-any.whl" -r requirements.txt
|
python devscripts/install_deps.py
|
||||||
|
python -m pip install -U "https://yt-dlp.github.io/Pyinstaller-Builds/i686/pyinstaller-6.13.0-py3-none-any.whl"
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py -c ${{ inputs.channel }} ${{ inputs.version }}
|
python devscripts/update-version.py -c "${{ inputs.channel }}" -r "${{ needs.process.outputs.origin }}" "${{ inputs.version }}"
|
||||||
python devscripts/make_lazy_extractors.py
|
python devscripts/make_lazy_extractors.py
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
python pyinst.py
|
python -m bundle.pyinstaller
|
||||||
|
|
||||||
- name: Verify --update-to
|
- name: Verify --update-to
|
||||||
if: vars.UPDATE_TO_VERIFICATION
|
if: vars.UPDATE_TO_VERIFICATION
|
||||||
@ -379,15 +481,19 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-bin-${{ github.job }}
|
||||||
path: |
|
path: |
|
||||||
dist/yt-dlp_x86.exe
|
dist/yt-dlp_x86.exe
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
meta_files:
|
meta_files:
|
||||||
if: inputs.meta_files && always() && !cancelled()
|
if: always() && !cancelled()
|
||||||
needs:
|
needs:
|
||||||
|
- process
|
||||||
- unix
|
- unix
|
||||||
|
- linux_static
|
||||||
- linux_arm
|
- linux_arm
|
||||||
- macos
|
- macos
|
||||||
- macos_legacy
|
- macos_legacy
|
||||||
@ -395,19 +501,54 @@ jobs:
|
|||||||
- windows32
|
- windows32
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v3
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifact
|
||||||
|
pattern: build-bin-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
- name: Make SHA2-SUMS files
|
- name: Make SHA2-SUMS files
|
||||||
run: |
|
run: |
|
||||||
cd ./artifact/
|
cd ./artifact/
|
||||||
sha256sum * > ../SHA2-256SUMS
|
# make sure SHA sums are also printed to stdout
|
||||||
sha512sum * > ../SHA2-512SUMS
|
sha256sum -- * | tee ../SHA2-256SUMS
|
||||||
|
sha512sum -- * | tee ../SHA2-512SUMS
|
||||||
|
# also print as permanent annotations to the summary page
|
||||||
|
while read -r shasum; do
|
||||||
|
echo "::notice title=${shasum##* }::sha256: ${shasum% *}"
|
||||||
|
done < ../SHA2-256SUMS
|
||||||
|
|
||||||
- name: Make Update spec
|
- name: Make Update spec
|
||||||
run: |
|
run: |
|
||||||
cat >> _update_spec << EOF
|
cat >> _update_spec << EOF
|
||||||
# This file is used for regulating self-update
|
# This file is used for regulating self-update
|
||||||
lock 2022.08.18.36 .+ Python 3.6
|
lock 2022.08.18.36 .+ Python 3\.6
|
||||||
|
lock 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lock 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lock 2024.10.22 py2exe .+
|
||||||
|
lock 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lock 2024.10.22 (?!\w+_exe).+ Python 3\.8
|
||||||
|
lock 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2022.08.18.36 .+ Python 3\.6
|
||||||
|
lockV2 yt-dlp/yt-dlp 2023.11.16 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 yt-dlp/yt-dlp 2023.11.16 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 (?!\w+_exe).+ Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp 2024.10.22 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2023.11.15.232826 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 (?!\w+_exe).+ Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp-nightly-builds 2024.10.22.051025 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 (?!win_x86_exe).+ Python 3\.7
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2023.11.15.232812 win_x86_exe .+ Windows-(?:Vista|2008Server)
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.045052 py2exe .+
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 linux_(?:armv7l|aarch64)_exe .+-glibc2\.(?:[12]?\d|30)\b
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 (?!\w+_exe).+ Python 3\.8
|
||||||
|
lockV2 yt-dlp/yt-dlp-master-builds 2024.10.22.060347 win(?:_x86)?_exe Python 3\.[78].+ Windows-(?:7-|2008ServerR2)
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
- name: Sign checksum files
|
- name: Sign checksum files
|
||||||
@ -421,8 +562,11 @@ jobs:
|
|||||||
done
|
done
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
name: build-${{ github.job }}
|
||||||
path: |
|
path: |
|
||||||
SHA*SUMS*
|
|
||||||
_update_spec
|
_update_spec
|
||||||
|
SHA*SUMS*
|
||||||
|
compression-level: 0
|
||||||
|
overwrite: true
|
||||||
|
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@ -29,11 +29,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v3
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@ -47,7 +47,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
@ -60,6 +60,6 @@ jobs:
|
|||||||
# ./location_of_script_within_repo/buildscript.sh
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v3
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
53
.github/workflows/core.yml
vendored
53
.github/workflows/core.yml
vendored
@ -1,8 +1,32 @@
|
|||||||
name: Core Tests
|
name: Core Tests
|
||||||
on: [push, pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- .github/**
|
||||||
|
- devscripts/**
|
||||||
|
- test/**
|
||||||
|
- yt_dlp/**.py
|
||||||
|
- '!yt_dlp/extractor/**.py'
|
||||||
|
- yt_dlp/extractor/__init__.py
|
||||||
|
- yt_dlp/extractor/common.py
|
||||||
|
- yt_dlp/extractor/extractors.py
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- .github/**
|
||||||
|
- devscripts/**
|
||||||
|
- test/**
|
||||||
|
- yt_dlp/**.py
|
||||||
|
- '!yt_dlp/extractor/**.py'
|
||||||
|
- yt_dlp/extractor/__init__.py
|
||||||
|
- yt_dlp/extractor/common.py
|
||||||
|
- yt_dlp/extractor/extractors.py
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: core-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
name: Core Tests
|
name: Core Tests
|
||||||
@ -12,30 +36,31 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
# CPython 3.11 is in quick-test
|
# CPython 3.9 is in quick-test
|
||||||
python-version: ['3.8', '3.9', '3.10', '3.12', pypy-3.7, pypy-3.8, pypy-3.10]
|
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.10]
|
||||||
run-tests-ext: [sh]
|
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.7'
|
python-version: '3.9'
|
||||||
run-tests-ext: bat
|
- os: windows-latest
|
||||||
|
python-version: '3.10'
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.12'
|
python-version: '3.12'
|
||||||
run-tests-ext: bat
|
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.9
|
python-version: '3.13'
|
||||||
run-tests-ext: bat
|
- os: windows-latest
|
||||||
|
python-version: pypy-3.10
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pytest
|
- name: Install test requirements
|
||||||
run: pip install pytest
|
run: python3 ./devscripts/install_deps.py --include test --include curl-cffi
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
timeout-minutes: 15
|
||||||
continue-on-error: False
|
continue-on-error: False
|
||||||
run: |
|
run: |
|
||||||
python3 -m yt_dlp -v || true # Print debug head
|
python3 -m yt_dlp -v || true # Print debug head
|
||||||
./devscripts/run_tests.${{ matrix.run-tests-ext }} core
|
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||||
|
27
.github/workflows/download.yml
vendored
27
.github/workflows/download.yml
vendored
@ -9,16 +9,16 @@ jobs:
|
|||||||
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
if: "contains(github.event.head_commit.message, 'ci run dl')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: pip install pytest
|
run: python3 ./devscripts/install_deps.py --include dev
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: ./devscripts/run_tests.sh download
|
run: python3 ./devscripts/run_tests.py download
|
||||||
|
|
||||||
full:
|
full:
|
||||||
name: Full Download Tests
|
name: Full Download Tests
|
||||||
@ -28,24 +28,21 @@ jobs:
|
|||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.7', '3.10', '3.12', pypy-3.7, pypy-3.8, pypy-3.10]
|
python-version: ['3.10', '3.11', '3.12', '3.13', pypy-3.10]
|
||||||
run-tests-ext: [sh]
|
|
||||||
include:
|
include:
|
||||||
# atleast one of each CPython/PyPy tests must be in windows
|
# atleast one of each CPython/PyPy tests must be in windows
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: '3.8'
|
python-version: '3.9'
|
||||||
run-tests-ext: bat
|
|
||||||
- os: windows-latest
|
- os: windows-latest
|
||||||
python-version: pypy-3.9
|
python-version: pypy-3.10
|
||||||
run-tests-ext: bat
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pytest
|
- name: Install test requirements
|
||||||
run: pip install pytest
|
run: python3 ./devscripts/install_deps.py --include dev
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: ./devscripts/run_tests.${{ matrix.run-tests-ext }} download
|
run: python3 ./devscripts/run_tests.py download
|
||||||
|
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
21
.github/workflows/issue-lockdown.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
name: Issue Lockdown
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lockdown:
|
||||||
|
name: Issue Lockdown
|
||||||
|
if: vars.ISSUE_LOCKDOWN
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: "Lock new issue"
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
run: |
|
||||||
|
gh issue lock "${ISSUE_NUMBER}" -R "${REPOSITORY}"
|
97
.github/workflows/publish.yml
vendored
97
.github/workflows/publish.yml
vendored
@ -1,97 +0,0 @@
|
|||||||
name: Publish
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
channel:
|
|
||||||
default: stable
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
target_commitish:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
prerelease:
|
|
||||||
default: false
|
|
||||||
required: true
|
|
||||||
type: boolean
|
|
||||||
secrets:
|
|
||||||
ARCHIVE_REPO_TOKEN:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
publish:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
|
|
||||||
- name: Generate release notes
|
|
||||||
run: |
|
|
||||||
printf '%s' \
|
|
||||||
'[]' \
|
|
||||||
'(https://github.com/yt-dlp/yt-dlp#installation "Installation instructions") ' \
|
|
||||||
'[]' \
|
|
||||||
'(https://github.com/yt-dlp/yt-dlp/tree/2023.03.04#readme "Documentation") ' \
|
|
||||||
'[]' \
|
|
||||||
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
|
||||||
'[]' \
|
|
||||||
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
|
||||||
${{ inputs.channel != 'nightly' && '"[]" \
|
|
||||||
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\")"' || '' }} \
|
|
||||||
> ./RELEASE_NOTES
|
|
||||||
printf '\n\n' >> ./RELEASE_NOTES
|
|
||||||
cat >> ./RELEASE_NOTES << EOF
|
|
||||||
#### A description of the various files are in the [README](https://github.com/yt-dlp/yt-dlp#release-files)
|
|
||||||
---
|
|
||||||
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
|
||||||
EOF
|
|
||||||
printf '%s\n\n' '**This is an automated nightly pre-release build**' >> ./NIGHTLY_NOTES
|
|
||||||
cat ./RELEASE_NOTES >> ./NIGHTLY_NOTES
|
|
||||||
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ inputs.target_commitish }}' >> ./ARCHIVE_NOTES
|
|
||||||
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
|
||||||
|
|
||||||
- name: Archive nightly release
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
|
||||||
GH_REPO: ${{ vars.ARCHIVE_REPO }}
|
|
||||||
if: |
|
|
||||||
inputs.channel == 'nightly' && env.GH_TOKEN != '' && env.GH_REPO != ''
|
|
||||||
run: |
|
|
||||||
gh release create \
|
|
||||||
--notes-file ARCHIVE_NOTES \
|
|
||||||
--title "yt-dlp nightly ${{ inputs.version }}" \
|
|
||||||
${{ inputs.version }} \
|
|
||||||
artifact/*
|
|
||||||
|
|
||||||
- name: Prune old nightly release
|
|
||||||
if: inputs.channel == 'nightly' && !vars.ARCHIVE_REPO
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
run: |
|
|
||||||
gh release delete --yes --cleanup-tag "nightly" || true
|
|
||||||
git tag --delete "nightly" || true
|
|
||||||
sleep 5 # Enough time to cover deletion race condition
|
|
||||||
|
|
||||||
- name: Publish release${{ inputs.channel == 'nightly' && ' (nightly)' || '' }}
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
if: (inputs.channel == 'nightly' && !vars.ARCHIVE_REPO) || inputs.channel != 'nightly'
|
|
||||||
run: |
|
|
||||||
gh release create \
|
|
||||||
--notes-file ${{ inputs.channel == 'nightly' && 'NIGHTLY_NOTES' || 'RELEASE_NOTES' }} \
|
|
||||||
--target ${{ inputs.target_commitish }} \
|
|
||||||
--title "yt-dlp ${{ inputs.channel == 'nightly' && 'nightly ' || '' }}${{ inputs.version }}" \
|
|
||||||
${{ inputs.prerelease && '--prerelease' || '' }} \
|
|
||||||
${{ inputs.channel == 'nightly' && '"nightly"' || inputs.version }} \
|
|
||||||
artifact/*
|
|
37
.github/workflows/quick-test.yml
vendored
37
.github/workflows/quick-test.yml
vendored
@ -9,27 +9,34 @@ jobs:
|
|||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Set up Python 3.11
|
- name: Set up Python 3.9
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.9'
|
||||||
- name: Install test requirements
|
- name: Install test requirements
|
||||||
run: pip install pytest pycryptodomex
|
run: python3 ./devscripts/install_deps.py -o --include test
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
timeout-minutes: 15
|
||||||
run: |
|
run: |
|
||||||
python3 -m yt_dlp -v || true
|
python3 -m yt_dlp -v || true
|
||||||
./devscripts/run_tests.sh core
|
python3 ./devscripts/run_tests.py --pytest-args '--reruns 2 --reruns-delay 3.0' core
|
||||||
flake8:
|
check:
|
||||||
name: Linter
|
name: Code check
|
||||||
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
if: "!contains(github.event.head_commit.message, 'ci skip all')"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
- name: Install flake8
|
with:
|
||||||
run: pip install flake8
|
python-version: '3.9'
|
||||||
|
- name: Install dev dependencies
|
||||||
|
run: python3 ./devscripts/install_deps.py -o --include static-analysis
|
||||||
- name: Make lazy extractors
|
- name: Make lazy extractors
|
||||||
run: python devscripts/make_lazy_extractors.py
|
run: python3 ./devscripts/make_lazy_extractors.py
|
||||||
- name: Run flake8
|
- name: Run ruff
|
||||||
run: flake8 .
|
run: ruff check --output-format github .
|
||||||
|
- name: Run autopep8
|
||||||
|
run: autopep8 --diff .
|
||||||
|
- name: Check file mode
|
||||||
|
run: git ls-files --format="%(objectmode) %(path)" yt_dlp/ | ( ! grep -v "^100644" )
|
||||||
|
47
.github/workflows/release-master.yml
vendored
Normal file
47
.github/workflows/release-master.yml
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
name: Release (master)
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- "yt_dlp/**.py"
|
||||||
|
- "!yt_dlp/version.py"
|
||||||
|
- "bundle/*.py"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "Makefile"
|
||||||
|
- ".github/workflows/build.yml"
|
||||||
|
concurrency:
|
||||||
|
group: release-master
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
if: vars.BUILD_MASTER != ''
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
|
with:
|
||||||
|
prerelease: true
|
||||||
|
source: master
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
packages: write # For package cache
|
||||||
|
actions: write # For cleaning up cache
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.MASTER_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
82
.github/workflows/release-nightly.yml
vendored
82
.github/workflows/release-nightly.yml
vendored
@ -1,52 +1,60 @@
|
|||||||
name: Release (nightly)
|
name: Release (nightly)
|
||||||
on:
|
on:
|
||||||
push:
|
schedule:
|
||||||
branches:
|
- cron: '23 23 * * *'
|
||||||
- master
|
|
||||||
paths:
|
|
||||||
- "yt_dlp/**.py"
|
|
||||||
- "!yt_dlp/version.py"
|
|
||||||
concurrency:
|
|
||||||
group: release-nightly
|
|
||||||
cancel-in-progress: true
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prepare:
|
check_nightly:
|
||||||
if: vars.BUILD_NIGHTLY != ''
|
if: vars.BUILD_NIGHTLY != ''
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
version: ${{ steps.get_version.outputs.version }}
|
commit: ${{ steps.check_for_new_commits.outputs.commit }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- name: Get version
|
with:
|
||||||
id: get_version
|
fetch-depth: 0
|
||||||
|
- name: Check for new commits
|
||||||
|
id: check_for_new_commits
|
||||||
run: |
|
run: |
|
||||||
python devscripts/update-version.py "$(date -u +"%H%M%S")" | grep -Po "version=\d+(\.\d+){3}" >> "$GITHUB_OUTPUT"
|
relevant_files=(
|
||||||
|
"yt_dlp/*.py"
|
||||||
|
':!yt_dlp/version.py'
|
||||||
|
"bundle/*.py"
|
||||||
|
"pyproject.toml"
|
||||||
|
"Makefile"
|
||||||
|
".github/workflows/build.yml"
|
||||||
|
)
|
||||||
|
echo "commit=$(git log --format=%H -1 --since="24 hours ago" -- "${relevant_files[@]}")" | tee "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
build:
|
release:
|
||||||
needs: prepare
|
needs: [check_nightly]
|
||||||
uses: ./.github/workflows/build.yml
|
if: ${{ needs.check_nightly.outputs.commit }}
|
||||||
|
uses: ./.github/workflows/release.yml
|
||||||
with:
|
with:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
prerelease: true
|
||||||
channel: nightly
|
source: nightly
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write # For package cache
|
|
||||||
secrets:
|
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
|
||||||
|
|
||||||
publish:
|
|
||||||
needs: [prepare, build]
|
|
||||||
uses: ./.github/workflows/publish.yml
|
|
||||||
secrets:
|
|
||||||
ARCHIVE_REPO_TOKEN: ${{ secrets.ARCHIVE_REPO_TOKEN }}
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
with:
|
packages: write # For package cache
|
||||||
channel: nightly
|
actions: write # For cleaning up cache
|
||||||
prerelease: true
|
id-token: write # mandatory for trusted publishing
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
secrets: inherit
|
||||||
target_commitish: ${{ github.sha }}
|
|
||||||
|
publish_pypi:
|
||||||
|
needs: [release]
|
||||||
|
if: vars.NIGHTLY_PYPI_PROJECT != ''
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
steps:
|
||||||
|
- name: Download artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: dist
|
||||||
|
name: build-pypi
|
||||||
|
- name: Publish to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
verbose: true
|
||||||
|
379
.github/workflows/release.yml
vendored
379
.github/workflows/release.yml
vendored
@ -1,14 +1,45 @@
|
|||||||
name: Release
|
name: Release
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
version:
|
source:
|
||||||
description: Version tag (YYYY.MM.DD[.REV])
|
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
channel:
|
target:
|
||||||
description: Update channel (stable/nightly/...)
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
version:
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
prerelease:
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
source:
|
||||||
|
description: |
|
||||||
|
SOURCE of this release's updates:
|
||||||
|
channel, repo, tag, or channel/repo@tag
|
||||||
|
(default: <current_repo>)
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
target:
|
||||||
|
description: |
|
||||||
|
TARGET to publish this release to:
|
||||||
|
channel, tag, or channel@tag
|
||||||
|
(default: <source> if writable else <current_repo>[@source_tag])
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
version:
|
||||||
|
description: |
|
||||||
|
VERSION: yyyy.mm.dd[.rev] or rev
|
||||||
|
(default: auto-generated)
|
||||||
required: false
|
required: false
|
||||||
default: ''
|
default: ''
|
||||||
type: string
|
type: string
|
||||||
@ -26,52 +57,154 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
channel: ${{ steps.set_channel.outputs.channel }}
|
channel: ${{ steps.setup_variables.outputs.channel }}
|
||||||
version: ${{ steps.update_version.outputs.version }}
|
version: ${{ steps.setup_variables.outputs.version }}
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
target_repo_token: ${{ steps.setup_variables.outputs.target_repo_token }}
|
||||||
|
target_tag: ${{ steps.setup_variables.outputs.target_tag }}
|
||||||
|
pypi_project: ${{ steps.setup_variables.outputs.pypi_project }}
|
||||||
|
pypi_suffix: ${{ steps.setup_variables.outputs.pypi_suffix }}
|
||||||
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
head_sha: ${{ steps.get_target.outputs.head_sha }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Set channel
|
- name: Process inputs
|
||||||
id: set_channel
|
id: process_inputs
|
||||||
run: |
|
run: |
|
||||||
CHANNEL="${{ github.repository == 'yt-dlp/yt-dlp' && 'stable' || github.repository }}"
|
cat << EOF
|
||||||
echo "channel=${{ inputs.channel || '$CHANNEL' }}" > "$GITHUB_OUTPUT"
|
::group::Inputs
|
||||||
|
prerelease=${{ inputs.prerelease }}
|
||||||
|
source=${{ inputs.source }}
|
||||||
|
target=${{ inputs.target }}
|
||||||
|
version=${{ inputs.version }}
|
||||||
|
::endgroup::
|
||||||
|
EOF
|
||||||
|
IFS='@' read -r source_repo source_tag <<<"${{ inputs.source }}"
|
||||||
|
IFS='@' read -r target_repo target_tag <<<"${{ inputs.target }}"
|
||||||
|
cat << EOF >> "$GITHUB_OUTPUT"
|
||||||
|
source_repo=${source_repo}
|
||||||
|
source_tag=${source_tag}
|
||||||
|
target_repo=${target_repo}
|
||||||
|
target_tag=${target_tag}
|
||||||
|
EOF
|
||||||
|
|
||||||
- name: Update version
|
- name: Setup variables
|
||||||
id: update_version
|
id: setup_variables
|
||||||
|
env:
|
||||||
|
source_repo: ${{ steps.process_inputs.outputs.source_repo }}
|
||||||
|
source_tag: ${{ steps.process_inputs.outputs.source_tag }}
|
||||||
|
target_repo: ${{ steps.process_inputs.outputs.target_repo }}
|
||||||
|
target_tag: ${{ steps.process_inputs.outputs.target_tag }}
|
||||||
run: |
|
run: |
|
||||||
REVISION="${{ vars.PUSH_VERSION_COMMIT == '' && '$(date -u +"%H%M%S")' || '' }}"
|
# unholy bash monstrosity (sincere apologies)
|
||||||
REVISION="${{ inputs.prerelease && '$(date -u +"%H%M%S")' || '$REVISION' }}"
|
fallback_token () {
|
||||||
python devscripts/update-version.py ${{ inputs.version || '$REVISION' }} | \
|
if ${{ !secrets.ARCHIVE_REPO_TOKEN }}; then
|
||||||
grep -Po "version=\d+\.\d+\.\d+(\.\d+)?" >> "$GITHUB_OUTPUT"
|
echo "::error::Repository access secret ${target_repo_token^^} not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
target_repo_token=ARCHIVE_REPO_TOKEN
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
source_is_channel=0
|
||||||
|
[[ "${source_repo}" == 'stable' ]] && source_repo='yt-dlp/yt-dlp'
|
||||||
|
if [[ -z "${source_repo}" ]]; then
|
||||||
|
source_repo='${{ github.repository }}'
|
||||||
|
elif [[ '${{ vars[format('{0}_archive_repo', env.source_repo)] }}' ]]; then
|
||||||
|
source_is_channel=1
|
||||||
|
source_channel='${{ vars[format('{0}_archive_repo', env.source_repo)] }}'
|
||||||
|
elif [[ -z "${source_tag}" && "${source_repo}" != */* ]]; then
|
||||||
|
source_tag="${source_repo}"
|
||||||
|
source_repo='${{ github.repository }}'
|
||||||
|
fi
|
||||||
|
resolved_source="${source_repo}"
|
||||||
|
if [[ "${source_tag}" ]]; then
|
||||||
|
resolved_source="${resolved_source}@${source_tag}"
|
||||||
|
elif [[ "${source_repo}" == 'yt-dlp/yt-dlp' ]]; then
|
||||||
|
resolved_source='stable'
|
||||||
|
fi
|
||||||
|
|
||||||
|
revision="${{ (inputs.prerelease || !vars.PUSH_VERSION_COMMIT) && '$(date -u +"%H%M%S")' || '' }}"
|
||||||
|
version="$(
|
||||||
|
python devscripts/update-version.py \
|
||||||
|
-c "${resolved_source}" -r "${{ github.repository }}" ${{ inputs.version || '$revision' }} | \
|
||||||
|
grep -Po "version=\K\d+\.\d+\.\d+(\.\d+)?")"
|
||||||
|
|
||||||
|
if [[ "${target_repo}" ]]; then
|
||||||
|
if [[ -z "${target_tag}" ]]; then
|
||||||
|
if [[ '${{ vars[format('{0}_archive_repo', env.target_repo)] }}' ]]; then
|
||||||
|
target_tag="${source_tag:-${version}}"
|
||||||
|
else
|
||||||
|
target_tag="${target_repo}"
|
||||||
|
target_repo='${{ github.repository }}'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [[ "${target_repo}" != '${{ github.repository}}' ]]; then
|
||||||
|
target_repo='${{ vars[format('{0}_archive_repo', env.target_repo)] }}'
|
||||||
|
target_repo_token='${{ env.target_repo }}_archive_repo_token'
|
||||||
|
${{ !!secrets[format('{0}_archive_repo_token', env.target_repo)] }} || fallback_token
|
||||||
|
pypi_project='${{ vars[format('{0}_pypi_project', env.target_repo)] }}'
|
||||||
|
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.target_repo)] }}'
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
target_tag="${source_tag:-${version}}"
|
||||||
|
if ((source_is_channel)); then
|
||||||
|
target_repo="${source_channel}"
|
||||||
|
target_repo_token='${{ env.source_repo }}_archive_repo_token'
|
||||||
|
${{ !!secrets[format('{0}_archive_repo_token', env.source_repo)] }} || fallback_token
|
||||||
|
pypi_project='${{ vars[format('{0}_pypi_project', env.source_repo)] }}'
|
||||||
|
pypi_suffix='${{ vars[format('{0}_pypi_suffix', env.source_repo)] }}'
|
||||||
|
else
|
||||||
|
target_repo='${{ github.repository }}'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${target_repo}" == '${{ github.repository }}' ]] && ${{ !inputs.prerelease }}; then
|
||||||
|
pypi_project='${{ vars.PYPI_PROJECT }}'
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "::group::Output variables"
|
||||||
|
cat << EOF | tee -a "$GITHUB_OUTPUT"
|
||||||
|
channel=${resolved_source}
|
||||||
|
version=${version}
|
||||||
|
target_repo=${target_repo}
|
||||||
|
target_repo_token=${target_repo_token}
|
||||||
|
target_tag=${target_tag}
|
||||||
|
pypi_project=${pypi_project}
|
||||||
|
pypi_suffix=${pypi_suffix}
|
||||||
|
EOF
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Update documentation
|
- name: Update documentation
|
||||||
|
env:
|
||||||
|
version: ${{ steps.setup_variables.outputs.version }}
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
if: |
|
||||||
|
!inputs.prerelease && env.target_repo == github.repository
|
||||||
run: |
|
run: |
|
||||||
|
python devscripts/update_changelog.py -vv
|
||||||
make doc
|
make doc
|
||||||
sed '/### /Q' Changelog.md >> ./CHANGELOG
|
|
||||||
echo '### ${{ steps.update_version.outputs.version }}' >> ./CHANGELOG
|
|
||||||
python ./devscripts/make_changelog.py -vv -c >> ./CHANGELOG
|
|
||||||
echo >> ./CHANGELOG
|
|
||||||
grep -Poz '(?s)### \d+\.\d+\.\d+.+' 'Changelog.md' | head -n -1 >> ./CHANGELOG
|
|
||||||
cat ./CHANGELOG > Changelog.md
|
|
||||||
|
|
||||||
- name: Push to release
|
- name: Push to release
|
||||||
id: push_release
|
id: push_release
|
||||||
if: ${{ !inputs.prerelease }}
|
env:
|
||||||
|
version: ${{ steps.setup_variables.outputs.version }}
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
if: |
|
||||||
|
!inputs.prerelease && env.target_repo == github.repository
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name github-actions
|
git config --global user.name "github-actions[bot]"
|
||||||
git config --global user.email github-actions@example.com
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git add -u
|
git add -u
|
||||||
git commit -m "Release ${{ steps.update_version.outputs.version }}" \
|
git commit -m "Release ${{ env.version }}" \
|
||||||
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all :ci run dl"
|
-m "Created by: ${{ github.event.sender.login }}" -m ":ci skip all"
|
||||||
git push origin --force ${{ github.event.ref }}:release
|
git push origin --force ${{ github.event.ref }}:release
|
||||||
|
|
||||||
- name: Get target commitish
|
- name: Get target commitish
|
||||||
@ -80,7 +213,10 @@ jobs:
|
|||||||
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
echo "head_sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Update master
|
- name: Update master
|
||||||
if: vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease
|
env:
|
||||||
|
target_repo: ${{ steps.setup_variables.outputs.target_repo }}
|
||||||
|
if: |
|
||||||
|
vars.PUSH_VERSION_COMMIT != '' && !inputs.prerelease && env.target_repo == github.repository
|
||||||
run: git push origin ${{ github.event.ref }}
|
run: git push origin ${{ github.event.ref }}
|
||||||
|
|
||||||
build:
|
build:
|
||||||
@ -89,75 +225,170 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
origin: ${{ needs.prepare.outputs.target_repo }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write # For package cache
|
packages: write # For package cache
|
||||||
|
actions: write # For cleaning up cache
|
||||||
secrets:
|
secrets:
|
||||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }}
|
||||||
|
|
||||||
publish_pypi_homebrew:
|
publish_pypi:
|
||||||
needs: [prepare, build]
|
needs: [prepare, build]
|
||||||
|
if: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # mandatory for trusted publishing
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install Requirements
|
- name: Install Requirements
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get -y install pandoc man
|
sudo apt -y install pandoc man
|
||||||
python -m pip install -U pip setuptools wheel twine
|
python devscripts/install_deps.py -o --include build
|
||||||
python -m pip install -U -r requirements.txt
|
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
run: |
|
|
||||||
python devscripts/update-version.py ${{ needs.prepare.outputs.version }}
|
|
||||||
python devscripts/make_lazy_extractors.py
|
|
||||||
|
|
||||||
- name: Build and publish on PyPI
|
|
||||||
env:
|
env:
|
||||||
TWINE_USERNAME: __token__
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
suffix: ${{ needs.prepare.outputs.pypi_suffix }}
|
||||||
if: env.TWINE_PASSWORD != '' && !inputs.prerelease
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
pypi_project: ${{ needs.prepare.outputs.pypi_project }}
|
||||||
|
run: |
|
||||||
|
python devscripts/update-version.py -c "${{ env.channel }}" -r "${{ env.target_repo }}" -s "${{ env.suffix }}" "${{ env.version }}"
|
||||||
|
python devscripts/update_changelog.py -vv
|
||||||
|
python devscripts/make_lazy_extractors.py
|
||||||
|
sed -i -E '0,/(name = ")[^"]+(")/s//\1${{ env.pypi_project }}\2/' pyproject.toml
|
||||||
|
|
||||||
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
rm -rf dist/*
|
rm -rf dist/*
|
||||||
make pypi-files
|
make pypi-files
|
||||||
|
printf '%s\n\n' \
|
||||||
|
'Official repository: <https://github.com/yt-dlp/yt-dlp>' \
|
||||||
|
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github' > ./README.md.new
|
||||||
|
cat ./README.md >> ./README.md.new && mv -f ./README.md.new ./README.md
|
||||||
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
python devscripts/set-variant.py pip -M "You installed yt-dlp with pip or using the wheel from PyPi; Use that to update"
|
||||||
python setup.py sdist bdist_wheel
|
make clean-cache
|
||||||
twine upload dist/*
|
python -m build --no-isolation .
|
||||||
|
|
||||||
- name: Checkout Homebrew repository
|
- name: Upload artifacts
|
||||||
env:
|
if: github.event_name != 'workflow_dispatch'
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
uses: actions/upload-artifact@v4
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
|
||||||
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
with:
|
||||||
repository: yt-dlp/homebrew-taps
|
name: build-pypi
|
||||||
path: taps
|
path: |
|
||||||
ssh-key: ${{ secrets.BREW_TOKEN }}
|
dist/*
|
||||||
|
compression-level: 0
|
||||||
|
|
||||||
- name: Update Homebrew Formulae
|
- name: Publish to PyPI
|
||||||
env:
|
if: github.event_name == 'workflow_dispatch'
|
||||||
BREW_TOKEN: ${{ secrets.BREW_TOKEN }}
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
with:
|
||||||
if: env.BREW_TOKEN != '' && env.PYPI_TOKEN != '' && !inputs.prerelease
|
verbose: true
|
||||||
run: |
|
|
||||||
python devscripts/update-formulae.py taps/Formula/yt-dlp.rb "${{ needs.prepare.outputs.version }}"
|
|
||||||
git -C taps/ config user.name github-actions
|
|
||||||
git -C taps/ config user.email github-actions@example.com
|
|
||||||
git -C taps/ commit -am 'yt-dlp: ${{ needs.prepare.outputs.version }}'
|
|
||||||
git -C taps/ push
|
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
needs: [prepare, build]
|
needs: [prepare, build]
|
||||||
uses: ./.github/workflows/publish.yml
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
with:
|
runs-on: ubuntu-latest
|
||||||
channel: ${{ needs.prepare.outputs.channel }}
|
|
||||||
prerelease: ${{ inputs.prerelease }}
|
steps:
|
||||||
version: ${{ needs.prepare.outputs.version }}
|
- uses: actions/checkout@v4
|
||||||
target_commitish: ${{ needs.prepare.outputs.head_sha }}
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: artifact
|
||||||
|
pattern: build-*
|
||||||
|
merge-multiple: true
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
env:
|
||||||
|
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
run: |
|
||||||
|
printf '%s' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/${{ github.repository }}#installation "Installation instructions") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://discord.gg/H5MNcFW63r "Discord") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators "Donate") ' \
|
||||||
|
'[]' \
|
||||||
|
'(https://github.com/${{ github.repository }}' \
|
||||||
|
'${{ env.target_repo == github.repository && format('/tree/{0}', env.target_tag) || '' }}#readme "Documentation") ' \
|
||||||
|
${{ env.target_repo == 'yt-dlp/yt-dlp' && '\
|
||||||
|
"[]" \
|
||||||
|
"(https://github.com/yt-dlp/yt-dlp-nightly-builds/releases/latest \"Nightly builds\") " \
|
||||||
|
"[]" \
|
||||||
|
"(https://github.com/yt-dlp/yt-dlp-master-builds/releases/latest \"Master builds\")"' || '' }} > ./RELEASE_NOTES
|
||||||
|
printf '\n\n' >> ./RELEASE_NOTES
|
||||||
|
cat >> ./RELEASE_NOTES << EOF
|
||||||
|
#### A description of the various files is in the [README](https://github.com/${{ github.repository }}#release-files)
|
||||||
|
---
|
||||||
|
$(python ./devscripts/make_changelog.py -vv --collapsible)
|
||||||
|
EOF
|
||||||
|
printf '%s\n\n' '**This is a pre-release build**' >> ./PRERELEASE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./PRERELEASE_NOTES
|
||||||
|
printf '%s\n\n' 'Generated from: https://github.com/${{ github.repository }}/commit/${{ env.head_sha }}' >> ./ARCHIVE_NOTES
|
||||||
|
cat ./RELEASE_NOTES >> ./ARCHIVE_NOTES
|
||||||
|
|
||||||
|
- name: Publish to archive repo
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets[needs.prepare.outputs.target_repo_token] }}
|
||||||
|
GH_REPO: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
channel: ${{ needs.prepare.outputs.channel }}
|
||||||
|
if: |
|
||||||
|
inputs.prerelease && env.GH_TOKEN != '' && env.GH_REPO != '' && env.GH_REPO != github.repository
|
||||||
|
run: |
|
||||||
|
title="${{ startswith(env.GH_REPO, 'yt-dlp/') && 'yt-dlp ' || '' }}${{ env.channel }}"
|
||||||
|
gh release create \
|
||||||
|
--notes-file ARCHIVE_NOTES \
|
||||||
|
--title "${title} ${{ env.version }}" \
|
||||||
|
${{ env.version }} \
|
||||||
|
artifact/*
|
||||||
|
|
||||||
|
- name: Prune old release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
if: |
|
||||||
|
env.target_repo == github.repository && env.target_tag != env.version
|
||||||
|
run: |
|
||||||
|
gh release delete --yes --cleanup-tag "${{ env.target_tag }}" || true
|
||||||
|
git tag --delete "${{ env.target_tag }}" || true
|
||||||
|
sleep 5 # Enough time to cover deletion race condition
|
||||||
|
|
||||||
|
- name: Publish release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
version: ${{ needs.prepare.outputs.version }}
|
||||||
|
target_repo: ${{ needs.prepare.outputs.target_repo }}
|
||||||
|
target_tag: ${{ needs.prepare.outputs.target_tag }}
|
||||||
|
head_sha: ${{ needs.prepare.outputs.head_sha }}
|
||||||
|
if: |
|
||||||
|
env.target_repo == github.repository
|
||||||
|
run: |
|
||||||
|
title="${{ github.repository == 'yt-dlp/yt-dlp' && 'yt-dlp ' || '' }}"
|
||||||
|
title+="${{ env.target_tag != env.version && format('{0} ', env.target_tag) || '' }}"
|
||||||
|
gh release create \
|
||||||
|
--notes-file ${{ inputs.prerelease && 'PRERELEASE_NOTES' || 'RELEASE_NOTES' }} \
|
||||||
|
--target ${{ env.head_sha }} \
|
||||||
|
--title "${title}${{ env.version }}" \
|
||||||
|
${{ inputs.prerelease && '--prerelease' || '' }} \
|
||||||
|
${{ env.target_tag }} \
|
||||||
|
artifact/*
|
||||||
|
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
17
.github/workflows/sanitize-comment.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
name: Sanitize comment
|
||||||
|
|
||||||
|
on:
|
||||||
|
issue_comment:
|
||||||
|
types: [created, edited]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sanitize-comment:
|
||||||
|
name: Sanitize comment
|
||||||
|
if: vars.SANITIZE_COMMENT && !github.event.issue.pull_request
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Sanitize comment
|
||||||
|
uses: yt-dlp/sanitize-comment@v1
|
10
.gitignore
vendored
10
.gitignore
vendored
@ -33,6 +33,7 @@ cookies
|
|||||||
*.gif
|
*.gif
|
||||||
*.jpeg
|
*.jpeg
|
||||||
*.jpg
|
*.jpg
|
||||||
|
*.lrc
|
||||||
*.m4a
|
*.m4a
|
||||||
*.m4v
|
*.m4v
|
||||||
*.mhtml
|
*.mhtml
|
||||||
@ -40,6 +41,7 @@ cookies
|
|||||||
*.mov
|
*.mov
|
||||||
*.mp3
|
*.mp3
|
||||||
*.mp4
|
*.mp4
|
||||||
|
*.mpg
|
||||||
*.mpga
|
*.mpga
|
||||||
*.oga
|
*.oga
|
||||||
*.ogg
|
*.ogg
|
||||||
@ -47,8 +49,8 @@ cookies
|
|||||||
*.png
|
*.png
|
||||||
*.sbv
|
*.sbv
|
||||||
*.srt
|
*.srt
|
||||||
|
*.ssa
|
||||||
*.swf
|
*.swf
|
||||||
*.swp
|
|
||||||
*.tt
|
*.tt
|
||||||
*.ttml
|
*.ttml
|
||||||
*.url
|
*.url
|
||||||
@ -64,7 +66,7 @@ cookies
|
|||||||
# Python
|
# Python
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
.pytest_cache
|
.*_cache
|
||||||
wine-py2exe/
|
wine-py2exe/
|
||||||
py2exe.log
|
py2exe.log
|
||||||
build/
|
build/
|
||||||
@ -90,6 +92,7 @@ updates_key.pem
|
|||||||
*.class
|
*.class
|
||||||
*.isorted
|
*.isorted
|
||||||
*.stackdump
|
*.stackdump
|
||||||
|
uv.lock
|
||||||
|
|
||||||
# Generated
|
# Generated
|
||||||
AUTHORS
|
AUTHORS
|
||||||
@ -102,6 +105,8 @@ README.txt
|
|||||||
*.zsh
|
*.zsh
|
||||||
*.spec
|
*.spec
|
||||||
test/testdata/sigs/player-*.js
|
test/testdata/sigs/player-*.js
|
||||||
|
test/testdata/thumbnails/empty.webp
|
||||||
|
test/testdata/thumbnails/foo\ %d\ bar/foo_%d.*
|
||||||
|
|
||||||
# Binary
|
# Binary
|
||||||
/youtube-dl
|
/youtube-dl
|
||||||
@ -116,6 +121,7 @@ yt-dlp.zip
|
|||||||
.vscode
|
.vscode
|
||||||
*.sublime-*
|
*.sublime-*
|
||||||
*.code-workspace
|
*.code-workspace
|
||||||
|
*.swp
|
||||||
|
|
||||||
# Lazy extractors
|
# Lazy extractors
|
||||||
*/extractor/lazy_extractors.py
|
*/extractor/lazy_extractors.py
|
||||||
|
14
.pre-commit-config.yaml
Normal file
14
.pre-commit-config.yaml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: linter
|
||||||
|
name: Apply linter fixes
|
||||||
|
entry: ruff check --fix .
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
require_serial: true
|
||||||
|
- id: format
|
||||||
|
name: Apply formatting fixes
|
||||||
|
entry: autopep8 --in-place .
|
||||||
|
language: system
|
||||||
|
types: [python]
|
9
.pre-commit-hatch.yaml
Normal file
9
.pre-commit-hatch.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: fix
|
||||||
|
name: Apply code fixes
|
||||||
|
entry: hatch fmt
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
require_serial: true
|
133
CONTRIBUTING.md
133
CONTRIBUTING.md
@ -37,14 +37,18 @@ Bugs and suggestions should be reported at: [yt-dlp/yt-dlp/issues](https://githu
|
|||||||
**Please include the full output of yt-dlp when run with `-vU`**, i.e. **add** `-vU` flag to **your command line**, copy the **whole** output and post it in the issue body wrapped in \`\`\` for better formatting. It should look similar to this:
|
**Please include the full output of yt-dlp when run with `-vU`**, i.e. **add** `-vU` flag to **your command line**, copy the **whole** output and post it in the issue body wrapped in \`\`\` for better formatting. It should look similar to this:
|
||||||
```
|
```
|
||||||
$ yt-dlp -vU <your command line>
|
$ yt-dlp -vU <your command line>
|
||||||
[debug] Command-line config: ['-v', 'demo.com']
|
[debug] Command-line config: ['-vU', 'https://www.example.com/']
|
||||||
[debug] Encodings: locale UTF-8, fs utf-8, out utf-8, pref UTF-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version 2021.09.25 (zip)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python version 3.8.10 (CPython 64bit) - Linux-5.4.0-74-generic-x86_64-with-glibc2.29
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] exe versions: ffmpeg 4.2.4, ffprobe 4.2.4
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
Current Build Hash 25cc412d1d3c0725a1f2f5b7e4682f6fb40e6d15f7024e96f7afd572e9919535
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
yt-dlp is up to date (2021.09.25)
|
[debug] Loaded 1838 extractors
|
||||||
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
**Do not post screenshots of verbose logs; only plain text is acceptable.**
|
**Do not post screenshots of verbose logs; only plain text is acceptable.**
|
||||||
@ -79,7 +83,7 @@ Before reporting any issue, type `yt-dlp -U`. This should report that you're up-
|
|||||||
|
|
||||||
### Is the issue already documented?
|
### Is the issue already documented?
|
||||||
|
|
||||||
Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, subcribe to it to be notified when there is any progress. Unless you have something useful to add to the converation, please refrain from commenting.
|
Make sure that someone has not already opened the issue you're trying to open. Search at the top of the window or browse the [GitHub Issues](https://github.com/yt-dlp/yt-dlp/search?type=Issues) of this repository. If there is an issue, subscribe to it to be notified when there is any progress. Unless you have something useful to add to the conversation, please refrain from commenting.
|
||||||
|
|
||||||
Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here.
|
Additionally, it is also helpful to see if the issue has already been documented in the [youtube-dl issue tracker](https://github.com/ytdl-org/youtube-dl/issues). If similar issues have already been reported in youtube-dl (but not in our issue tracker), links to them can be included in your issue report here.
|
||||||
|
|
||||||
@ -134,27 +138,59 @@ We follow [youtube-dl's policy](https://github.com/ytdl-org/youtube-dl#can-you-a
|
|||||||
|
|
||||||
# DEVELOPER INSTRUCTIONS
|
# DEVELOPER INSTRUCTIONS
|
||||||
|
|
||||||
Most users do not need to build yt-dlp and can [download the builds](https://github.com/yt-dlp/yt-dlp/releases) or get them via [the other installation methods](README.md#installation).
|
Most users do not need to build yt-dlp and can [download the builds](https://github.com/yt-dlp/yt-dlp/releases), get them via [the other installation methods](README.md#installation) or directly run it using `python -m yt_dlp`.
|
||||||
|
|
||||||
To run yt-dlp as a developer, you don't need to build anything either. Simply execute
|
`yt-dlp` uses [`hatch`](<https://hatch.pypa.io>) as a project management tool.
|
||||||
|
You can easily install it using [`pipx`](<https://pipx.pypa.io>) via `pipx install hatch`, or else via `pip` or your package manager of choice. Make sure you are using at least version `1.10.0`, otherwise some functionality might not work as expected.
|
||||||
|
|
||||||
python -m yt_dlp
|
If you plan on contributing to `yt-dlp`, best practice is to start by running the following command:
|
||||||
|
|
||||||
To run the test, simply invoke your favorite test runner, or execute a test file directly; any of the following work:
|
```shell
|
||||||
|
$ hatch run setup
|
||||||
|
```
|
||||||
|
|
||||||
python -m unittest discover
|
The above command will install a `pre-commit` hook so that required checks/fixes (linting, formatting) will run automatically before each commit. If any code needs to be linted or formatted, then the commit will be blocked and the necessary changes will be made; you should review all edits and re-commit the fixed version.
|
||||||
python test/test_download.py
|
|
||||||
nosetests
|
After this you can use `hatch shell` to enable a virtual environment that has `yt-dlp` and its development dependencies installed.
|
||||||
pytest
|
|
||||||
|
In addition, the following script commands can be used to run simple tasks such as linting or testing (without having to run `hatch shell` first):
|
||||||
|
* `hatch fmt`: Automatically fix linter violations and apply required code formatting changes
|
||||||
|
* See `hatch fmt --help` for more info
|
||||||
|
* `hatch test`: Run extractor or core tests
|
||||||
|
* See `hatch test --help` for more info
|
||||||
|
|
||||||
See item 6 of [new extractor tutorial](#adding-support-for-a-new-site) for how to run extractor specific test cases.
|
See item 6 of [new extractor tutorial](#adding-support-for-a-new-site) for how to run extractor specific test cases.
|
||||||
|
|
||||||
|
While it is strongly recommended to use `hatch` for yt-dlp development, if you are unable to do so, alternatively you can manually create a virtual environment and use the following commands:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# To only install development dependencies:
|
||||||
|
$ python -m devscripts.install_deps --include dev
|
||||||
|
|
||||||
|
# Or, for an editable install plus dev dependencies:
|
||||||
|
$ python -m pip install -e ".[default,dev]"
|
||||||
|
|
||||||
|
# To setup the pre-commit hook:
|
||||||
|
$ pre-commit install
|
||||||
|
|
||||||
|
# To be used in place of `hatch test`:
|
||||||
|
$ python -m devscripts.run_tests
|
||||||
|
|
||||||
|
# To be used in place of `hatch fmt`:
|
||||||
|
$ ruff check --fix .
|
||||||
|
$ autopep8 --in-place .
|
||||||
|
|
||||||
|
# To only check code instead of applying fixes:
|
||||||
|
$ ruff check .
|
||||||
|
$ autopep8 --diff .
|
||||||
|
```
|
||||||
|
|
||||||
If you want to create a build of yt-dlp yourself, you can follow the instructions [here](README.md#compile).
|
If you want to create a build of yt-dlp yourself, you can follow the instructions [here](README.md#compile).
|
||||||
|
|
||||||
|
|
||||||
## Adding new feature or making overarching changes
|
## Adding new feature or making overarching changes
|
||||||
|
|
||||||
Before you start writing code for implementing a new feature, open an issue explaining your feature request and atleast one use case. This allows the maintainers to decide whether such a feature is desired for the project in the first place, and will provide an avenue to discuss some implementation details. If you open a pull request for a new feature without discussing with us first, do not be surprised when we ask for large changes to the code, or even reject it outright.
|
Before you start writing code for implementing a new feature, open an issue explaining your feature request and at least one use case. This allows the maintainers to decide whether such a feature is desired for the project in the first place, and will provide an avenue to discuss some implementation details. If you open a pull request for a new feature without discussing with us first, do not be surprised when we ask for large changes to the code, or even reject it outright.
|
||||||
|
|
||||||
The same applies for changes to the documentation, code style, or overarching changes to the architecture
|
The same applies for changes to the documentation, code style, or overarching changes to the architecture
|
||||||
|
|
||||||
@ -168,12 +204,16 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
1. [Fork this repository](https://github.com/yt-dlp/yt-dlp/fork)
|
1. [Fork this repository](https://github.com/yt-dlp/yt-dlp/fork)
|
||||||
1. Check out the source code with:
|
1. Check out the source code with:
|
||||||
|
|
||||||
git clone git@github.com:YOUR_GITHUB_USERNAME/yt-dlp.git
|
```shell
|
||||||
|
$ git clone git@github.com:YOUR_GITHUB_USERNAME/yt-dlp.git
|
||||||
|
```
|
||||||
|
|
||||||
1. Start a new git branch with
|
1. Start a new git branch with
|
||||||
|
|
||||||
cd yt-dlp
|
```shell
|
||||||
git checkout -b yourextractor
|
$ cd yt-dlp
|
||||||
|
$ git checkout -b yourextractor
|
||||||
|
```
|
||||||
|
|
||||||
1. Start with this simple template and save it to `yt_dlp/extractor/yourextractor.py`:
|
1. Start with this simple template and save it to `yt_dlp/extractor/yourextractor.py`:
|
||||||
|
|
||||||
@ -187,15 +227,21 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
'url': 'https://yourextractor.com/watch/42',
|
'url': 'https://yourextractor.com/watch/42',
|
||||||
'md5': 'TODO: md5 sum of the first 10241 bytes of the video file (use --test)',
|
'md5': 'TODO: md5 sum of the first 10241 bytes of the video file (use --test)',
|
||||||
'info_dict': {
|
'info_dict': {
|
||||||
|
# For videos, only the 'id' and 'ext' fields are required to RUN the test:
|
||||||
'id': '42',
|
'id': '42',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'title': 'Video title goes here',
|
# Then if the test run fails, it will output the missing/incorrect fields.
|
||||||
'thumbnail': r're:^https?://.*\.jpg$',
|
# Properties can be added as:
|
||||||
# TODO more properties, either as:
|
# * A value, e.g.
|
||||||
# * A value
|
# 'title': 'Video title goes here',
|
||||||
# * MD5 checksum; start the string with md5:
|
# * MD5 checksum; start the string with 'md5:', e.g.
|
||||||
# * A regular expression; start the string with re:
|
# 'description': 'md5:098f6bcd4621d373cade4e832627b4f6',
|
||||||
# * Any Python type, e.g. int or float
|
# * A regular expression; start the string with 're:', e.g.
|
||||||
|
# 'thumbnail': r're:https?://.*\.jpg$',
|
||||||
|
# * A count of elements in a list; start the string with 'count:', e.g.
|
||||||
|
# 'tags': 'count:10',
|
||||||
|
# * Any Python type, e.g.
|
||||||
|
# 'view_count': int,
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
|
|
||||||
@ -214,27 +260,33 @@ After you have ensured this site is distributing its content legally, you can fo
|
|||||||
# TODO more properties (see yt_dlp/extractor/common.py)
|
# TODO more properties (see yt_dlp/extractor/common.py)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`.
|
1. Add an import in [`yt_dlp/extractor/_extractors.py`](yt_dlp/extractor/_extractors.py). Note that the class name must end with `IE`. Also note that when adding a parenthesized import group, the last import in the group must have a trailing comma in order for this formatting to be respected by our code formatter.
|
||||||
1. Run `python test/test_download.py TestDownload.test_YourExtractor` (note that `YourExtractor` doesn't end with `IE`). This *should fail* at first, but you can continually re-run it until you're done. If you decide to add more than one test, the tests will then be named `TestDownload.test_YourExtractor`, `TestDownload.test_YourExtractor_1`, `TestDownload.test_YourExtractor_2`, etc. Note that tests with `only_matching` key in test's dict are not counted in. You can also run all the tests in one go with `TestDownload.test_YourExtractor_all`
|
1. Run `hatch test YourExtractor`. This *may fail* at first, but you can continually re-run it until you're done. Upon failure, it will output the missing fields and/or correct values which you can copy. If you decide to add more than one test, the tests will then be named `YourExtractor`, `YourExtractor_1`, `YourExtractor_2`, etc. Note that tests with an `only_matching` key in the test's dict are not included in the count. You can also run all the tests in one go with `YourExtractor_all`
|
||||||
1. Make sure you have atleast one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
1. Make sure you have at least one test for your extractor. Even if all videos covered by the extractor are expected to be inaccessible for automated testing, tests should still be added with a `skip` parameter indicating why the particular test is disabled from running.
|
||||||
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L119-L440). Add tests and code for as many as you want.
|
1. Have a look at [`yt_dlp/extractor/common.py`](yt_dlp/extractor/common.py) for possible helper methods and a [detailed description of what your extractor should and may return](yt_dlp/extractor/common.py#L119-L440). Add tests and code for as many as you want.
|
||||||
1. Make sure your code follows [yt-dlp coding conventions](#yt-dlp-coding-conventions) and check the code with [flake8](https://flake8.pycqa.org/en/latest/index.html#quickstart):
|
1. Make sure your code follows [yt-dlp coding conventions](#yt-dlp-coding-conventions), passes [ruff](https://docs.astral.sh/ruff/tutorial/#getting-started) code checks and is properly formatted:
|
||||||
|
|
||||||
$ flake8 yt_dlp/extractor/yourextractor.py
|
```shell
|
||||||
|
$ hatch fmt --check
|
||||||
|
```
|
||||||
|
|
||||||
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython and PyPy for Python 3.7 and above. Backward compatibility is not required for even older versions of Python.
|
You can use `hatch fmt` to automatically fix problems. Rules that the linter/formatter enforces should not be disabled with `# noqa` unless a maintainer requests it. The only exception allowed is for old/printf-style string formatting in GraphQL query templates (use `# noqa: UP031`).
|
||||||
|
|
||||||
|
1. Make sure your code works under all [Python](https://www.python.org/) versions supported by yt-dlp, namely CPython >=3.9 and PyPy >=3.10. Backward compatibility is not required for even older versions of Python.
|
||||||
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
1. When the tests pass, [add](https://git-scm.com/docs/git-add) the new files, [commit](https://git-scm.com/docs/git-commit) them and [push](https://git-scm.com/docs/git-push) the result, like this:
|
||||||
|
|
||||||
$ git add yt_dlp/extractor/_extractors.py
|
```shell
|
||||||
$ git add yt_dlp/extractor/yourextractor.py
|
$ git add yt_dlp/extractor/_extractors.py
|
||||||
$ git commit -m '[yourextractor] Add extractor'
|
$ git add yt_dlp/extractor/yourextractor.py
|
||||||
$ git push origin yourextractor
|
$ git commit -m '[yourextractor] Add extractor'
|
||||||
|
$ git push origin yourextractor
|
||||||
|
```
|
||||||
|
|
||||||
1. Finally, [create a pull request](https://help.github.com/articles/creating-a-pull-request). We'll then review and merge it.
|
1. Finally, [create a pull request](https://help.github.com/articles/creating-a-pull-request). We'll then review and merge it.
|
||||||
|
|
||||||
In any case, thank you very much for your contributions!
|
In any case, thank you very much for your contributions!
|
||||||
|
|
||||||
**Tip:** To test extractors that require login information, create a file `test/local_parameters.json` and add `"usenetrc": true` or your username and password in it:
|
**Tip:** To test extractors that require login information, create a file `test/local_parameters.json` and add `"usenetrc": true` or your `username`&`password` or `cookiefile`/`cookiesfrombrowser` in it:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"username": "your user name",
|
"username": "your user name",
|
||||||
@ -254,14 +306,13 @@ Extractors are very fragile by nature since they depend on the layout of the sou
|
|||||||
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
For extraction to work yt-dlp relies on metadata your extractor extracts and provides to yt-dlp expressed by an [information dictionary](yt_dlp/extractor/common.py#L119-L440) or simply *info dict*. Only the following meta fields in the *info dict* are considered mandatory for a successful extraction process by yt-dlp:
|
||||||
|
|
||||||
- `id` (media identifier)
|
- `id` (media identifier)
|
||||||
- `title` (media title)
|
|
||||||
- `url` (media download URL) or `formats`
|
- `url` (media download URL) or `formats`
|
||||||
|
|
||||||
The aforementioned metafields are the critical data that the extraction does not make any sense without and if any of them fail to be extracted then the extractor is considered completely broken. While all extractors must return a `title`, they must also allow it's extraction to be non-fatal.
|
The aforementioned metadata fields are the critical data without which extraction does not make any sense. If any of them fail to be extracted, then the extractor is considered broken. All other metadata extraction should be completely non-fatal.
|
||||||
|
|
||||||
For pornographic sites, appropriate `age_limit` must also be returned.
|
For pornographic sites, appropriate `age_limit` must also be returned.
|
||||||
|
|
||||||
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract usefull information with `--ignore-no-formats-error` - e.g. when the video is a live stream that has not started yet.
|
The extractor is allowed to return the info dict without url or formats in some special cases if it allows the user to extract useful information with `--ignore-no-formats-error` - e.g. when the video is a live stream that has not started yet.
|
||||||
|
|
||||||
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
[Any field](yt_dlp/extractor/common.py#219-L426) apart from the aforementioned ones are considered **optional**. That means that extraction should be **tolerant** to situations when sources for these fields can potentially be unavailable (even if they are always available at the moment) and **future-proof** in order not to break the extraction of general purpose mandatory fields.
|
||||||
|
|
||||||
|
266
CONTRIBUTORS
266
CONTRIBUTORS
@ -513,3 +513,269 @@ awalgarg
|
|||||||
midnightveil
|
midnightveil
|
||||||
naginatana
|
naginatana
|
||||||
Riteo
|
Riteo
|
||||||
|
1100101
|
||||||
|
aniolpages
|
||||||
|
bartbroere
|
||||||
|
CrendKing
|
||||||
|
Esokrates
|
||||||
|
HitomaruKonpaku
|
||||||
|
LoserFox
|
||||||
|
peci1
|
||||||
|
saintliao
|
||||||
|
shubhexists
|
||||||
|
SirElderling
|
||||||
|
almx
|
||||||
|
elivinsky
|
||||||
|
starius
|
||||||
|
TravisDupes
|
||||||
|
amir16yp
|
||||||
|
Fymyte
|
||||||
|
Ganesh910
|
||||||
|
hashFactory
|
||||||
|
kclauhk
|
||||||
|
Kyraminol
|
||||||
|
lstrojny
|
||||||
|
middlingphys
|
||||||
|
NickCis
|
||||||
|
nicodato
|
||||||
|
prettykool
|
||||||
|
S-Aarab
|
||||||
|
sonmezberkay
|
||||||
|
TSRBerry
|
||||||
|
114514ns
|
||||||
|
agibson-fl
|
||||||
|
alard
|
||||||
|
alien-developers
|
||||||
|
antonkesy
|
||||||
|
ArnauvGilotra
|
||||||
|
Arthurszzz
|
||||||
|
Bibhav48
|
||||||
|
Bl4Cc4t
|
||||||
|
boredzo
|
||||||
|
Caesim404
|
||||||
|
chkuendig
|
||||||
|
chtk
|
||||||
|
Danish-H
|
||||||
|
dasidiot
|
||||||
|
diman8
|
||||||
|
divStar
|
||||||
|
DmitryScaletta
|
||||||
|
feederbox826
|
||||||
|
gmes78
|
||||||
|
gonzalezjo
|
||||||
|
hui1601
|
||||||
|
infanf
|
||||||
|
jazz1611
|
||||||
|
jingtra
|
||||||
|
jkmartindale
|
||||||
|
johnvictorfs
|
||||||
|
llistochek
|
||||||
|
marcdumais
|
||||||
|
martinxyz
|
||||||
|
michal-repo
|
||||||
|
mrmedieval
|
||||||
|
nbr23
|
||||||
|
Nicals
|
||||||
|
Noor-5
|
||||||
|
NurTasin
|
||||||
|
pompos02
|
||||||
|
Pranaxcau
|
||||||
|
pwaldhauer
|
||||||
|
RaduManole
|
||||||
|
RalphORama
|
||||||
|
rrgomes
|
||||||
|
ruiminggu
|
||||||
|
rvsit
|
||||||
|
sefidel
|
||||||
|
shmohawk
|
||||||
|
Snack-X
|
||||||
|
src-tinkerer
|
||||||
|
stilor
|
||||||
|
syntaxsurge
|
||||||
|
t-nil
|
||||||
|
ufukk
|
||||||
|
vista-narvas
|
||||||
|
x11x
|
||||||
|
xpadev-net
|
||||||
|
Xpl0itU
|
||||||
|
YoshichikaAAA
|
||||||
|
zhijinwuu
|
||||||
|
alb
|
||||||
|
hruzgar
|
||||||
|
kasper93
|
||||||
|
leoheitmannruiz
|
||||||
|
luiso1979
|
||||||
|
nipotan
|
||||||
|
Offert4324
|
||||||
|
sta1us
|
||||||
|
Tomoka1
|
||||||
|
trwstin
|
||||||
|
alexhuot1
|
||||||
|
clienthax
|
||||||
|
DaPotato69
|
||||||
|
emqi
|
||||||
|
hugohaa
|
||||||
|
imanoreotwe
|
||||||
|
JakeFinley96
|
||||||
|
lostfictions
|
||||||
|
minamotorin
|
||||||
|
ocococococ
|
||||||
|
Podiumnoche
|
||||||
|
RasmusAntons
|
||||||
|
roeniss
|
||||||
|
shoxie007
|
||||||
|
Szpachlarz
|
||||||
|
The-MAGI
|
||||||
|
TuxCoder
|
||||||
|
voidful
|
||||||
|
vtexier
|
||||||
|
WyohKnott
|
||||||
|
trueauracoral
|
||||||
|
ASertacAkkaya
|
||||||
|
axpauls
|
||||||
|
chilinux
|
||||||
|
hafeoz
|
||||||
|
JSubelj
|
||||||
|
jucor
|
||||||
|
megumintyan
|
||||||
|
mgedmin
|
||||||
|
Niluge-KiWi
|
||||||
|
peisenwang
|
||||||
|
TheZ3ro
|
||||||
|
tippfehlr
|
||||||
|
varunchopra
|
||||||
|
DrakoCpp
|
||||||
|
PatrykMis
|
||||||
|
DinhHuy2010
|
||||||
|
exterrestris
|
||||||
|
harbhim
|
||||||
|
LeSuisse
|
||||||
|
DunnesH
|
||||||
|
iancmy
|
||||||
|
mokrueger
|
||||||
|
luvyana
|
||||||
|
szantnerb
|
||||||
|
hugepower
|
||||||
|
scribblemaniac
|
||||||
|
Codenade
|
||||||
|
Demon000
|
||||||
|
Deukhoofd
|
||||||
|
grqz
|
||||||
|
hibes
|
||||||
|
Khaoklong51
|
||||||
|
kieraneglin
|
||||||
|
lengzuo
|
||||||
|
naglis
|
||||||
|
ndyanx
|
||||||
|
otovalek
|
||||||
|
quad
|
||||||
|
rakslice
|
||||||
|
sahilsinghss73
|
||||||
|
tony-hn
|
||||||
|
xingchensong
|
||||||
|
BallzCrasher
|
||||||
|
coreywright
|
||||||
|
eric321
|
||||||
|
poyhen
|
||||||
|
tetra-fox
|
||||||
|
444995
|
||||||
|
63427083
|
||||||
|
allendema
|
||||||
|
DarkZeros
|
||||||
|
DTrombett
|
||||||
|
imranh2
|
||||||
|
KarboniteKream
|
||||||
|
mikkovedru
|
||||||
|
pktiuk
|
||||||
|
rubyevadestaxes
|
||||||
|
avagordon01
|
||||||
|
CounterPillow
|
||||||
|
JoseAngelB
|
||||||
|
KBelmin
|
||||||
|
kesor
|
||||||
|
MellowKyler
|
||||||
|
Wesley107772
|
||||||
|
a13ssandr0
|
||||||
|
ChocoLZS
|
||||||
|
doe1080
|
||||||
|
hugovdev
|
||||||
|
jshumphrey
|
||||||
|
julionc
|
||||||
|
manavchaudhary1
|
||||||
|
powergold1
|
||||||
|
Sakura286
|
||||||
|
SamDecrock
|
||||||
|
stratus-ss
|
||||||
|
subrat-lima
|
||||||
|
gitninja1234
|
||||||
|
jkruse
|
||||||
|
xiaomac
|
||||||
|
wesson09
|
||||||
|
Crypto90
|
||||||
|
MutantPiggieGolem1
|
||||||
|
Sanceilaks
|
||||||
|
Strkmn
|
||||||
|
0x9fff00
|
||||||
|
4ft35t
|
||||||
|
7x11x13
|
||||||
|
b5i
|
||||||
|
cotko
|
||||||
|
d3d9
|
||||||
|
Dioarya
|
||||||
|
finch71
|
||||||
|
hexahigh
|
||||||
|
InvalidUsernameException
|
||||||
|
jixunmoe
|
||||||
|
knackku
|
||||||
|
krandor
|
||||||
|
kvk-2015
|
||||||
|
lonble
|
||||||
|
msm595
|
||||||
|
n10dollar
|
||||||
|
NecroRomnt
|
||||||
|
pjrobertson
|
||||||
|
subsense
|
||||||
|
test20140
|
||||||
|
arantius
|
||||||
|
entourage8
|
||||||
|
lfavole
|
||||||
|
mp3butcher
|
||||||
|
slipinthedove
|
||||||
|
YoshiTabletopGamer
|
||||||
|
Arc8ne
|
||||||
|
benfaerber
|
||||||
|
chrisellsworth
|
||||||
|
fries1234
|
||||||
|
Kenshin9977
|
||||||
|
MichaelDeBoey
|
||||||
|
msikma
|
||||||
|
pedro
|
||||||
|
pferreir
|
||||||
|
red-acid
|
||||||
|
refack
|
||||||
|
rysson
|
||||||
|
somini
|
||||||
|
thedenv
|
||||||
|
vallovic
|
||||||
|
arabcoders
|
||||||
|
mireq
|
||||||
|
mlabeeb03
|
||||||
|
1271
|
||||||
|
CasperMcFadden95
|
||||||
|
Kicer86
|
||||||
|
Kiritomo
|
||||||
|
leeblackc
|
||||||
|
meGAmeS1
|
||||||
|
NeonMan
|
||||||
|
pj47x
|
||||||
|
troex
|
||||||
|
WouterGordts
|
||||||
|
baierjan
|
||||||
|
GeoffreyFrogeye
|
||||||
|
Pawka
|
||||||
|
v3DJG6GL
|
||||||
|
yozel
|
||||||
|
brian6932
|
||||||
|
iednod55
|
||||||
|
maxbin123
|
||||||
|
nullpos
|
||||||
|
1638
Changelog.md
1638
Changelog.md
File diff suppressed because it is too large
Load Diff
@ -29,6 +29,7 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
[](https://github.com/sponsors/coletdjnz)
|
[](https://github.com/sponsors/coletdjnz)
|
||||||
|
|
||||||
* Improved plugin architecture
|
* Improved plugin architecture
|
||||||
|
* Rewrote the networking infrastructure, implemented support for `requests`
|
||||||
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
* YouTube improvements including: age-gate bypass, private playlists, multiple-clients (to avoid throttling) and a lot of under-the-hood improvements
|
||||||
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
* Added support for new websites YoutubeWebArchive, MainStreaming, PRX, nzherald, Mediaklikk, StarTV etc
|
||||||
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
* Improved/fixed support for Patreon, panopto, gfycat, itv, pbs, SouthParkDE etc
|
||||||
@ -46,16 +47,24 @@ You can also find lists of all [contributors of yt-dlp](CONTRIBUTORS) and [autho
|
|||||||
|
|
||||||
## [bashonly](https://github.com/bashonly)
|
## [bashonly](https://github.com/bashonly)
|
||||||
|
|
||||||
* `--update-to`, automated release, nightly builds
|
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||||
* `--cookies-from-browser` support for Firefox containers
|
* `--cookies-from-browser` support for Firefox containers, external downloader cookie handling overhaul
|
||||||
* Added support for new websites Genius, Kick, NBCStations, Triller, VideoKen etc
|
* Added support for new websites like Dacast, Kick, NBCStations, Triller, VideoKen, Weverse, WrestleUniverse etc
|
||||||
* Improved/fixed support for Anvato, Brightcove, Instagram, ParamountPlus, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
* Improved/fixed support for Anvato, Brightcove, Reddit, SlidesLive, TikTok, Twitter, Vimeo etc
|
||||||
|
|
||||||
|
|
||||||
## [Grub4K](https://github.com/Grub4K)
|
## [Grub4K](https://github.com/Grub4K)
|
||||||
|
|
||||||
[](https://ko-fi.com/Grub4K) [](https://github.com/sponsors/Grub4K)
|
[](https://github.com/sponsors/Grub4K) [](https://ko-fi.com/Grub4K)
|
||||||
|
|
||||||
* `--update-to`, automated release, nightly builds
|
* `--update-to`, self-updater rewrite, automated/nightly/master releases
|
||||||
* Rework internals like `traverse_obj`, various core refactors and bugs fixes
|
* Reworked internals like `traverse_obj`, various core refactors and bugs fixes
|
||||||
* Helped fix crunchyroll, Twitter, wrestleuniverse, wistia, slideslive etc
|
* Implemented proper progress reporting for parallel downloads
|
||||||
|
* Improved/fixed/added Bundestag, crunchyroll, pr0gramm, Twitter, WrestleUniverse etc
|
||||||
|
|
||||||
|
|
||||||
|
## [sepro](https://github.com/seproDev)
|
||||||
|
|
||||||
|
* UX improvements: Warn when ffmpeg is missing, warn when double-clicking exe
|
||||||
|
* Code cleanup: Remove dead extractors, mark extractors as broken, enable/apply ruff rules
|
||||||
|
* Improved/fixed/added ArdMediathek, DRTV, Floatplane, MagentaMusik, Naver, Nebula, OnDemandKorea, Vbox7 etc
|
||||||
|
10
MANIFEST.in
10
MANIFEST.in
@ -1,10 +0,0 @@
|
|||||||
include AUTHORS
|
|
||||||
include Changelog.md
|
|
||||||
include LICENSE
|
|
||||||
include README.md
|
|
||||||
include completions/*/*
|
|
||||||
include supportedsites.md
|
|
||||||
include yt-dlp.1
|
|
||||||
include requirements.txt
|
|
||||||
recursive-include devscripts *
|
|
||||||
recursive-include test *
|
|
93
Makefile
93
Makefile
@ -2,29 +2,33 @@ all: lazy-extractors yt-dlp doc pypi-files
|
|||||||
clean: clean-test clean-dist
|
clean: clean-test clean-dist
|
||||||
clean-all: clean clean-cache
|
clean-all: clean clean-cache
|
||||||
completions: completion-bash completion-fish completion-zsh
|
completions: completion-bash completion-fish completion-zsh
|
||||||
doc: README.md CONTRIBUTING.md issuetemplates supportedsites
|
doc: README.md CONTRIBUTING.md CONTRIBUTORS issuetemplates supportedsites
|
||||||
ot: offlinetest
|
ot: offlinetest
|
||||||
tar: yt-dlp.tar.gz
|
tar: yt-dlp.tar.gz
|
||||||
|
|
||||||
# Keep this list in sync with MANIFEST.in
|
# Keep this list in sync with pyproject.toml includes/artifacts
|
||||||
# intended use: when building a source distribution,
|
# intended use: when building a source distribution,
|
||||||
# make pypi-files && python setup.py sdist
|
# make pypi-files && python3 -m build -sn .
|
||||||
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
pypi-files: AUTHORS Changelog.md LICENSE README.md README.txt supportedsites \
|
||||||
completions yt-dlp.1 requirements.txt setup.cfg devscripts/* test/*
|
completions yt-dlp.1 pyproject.toml setup.cfg devscripts/* test/*
|
||||||
|
|
||||||
.PHONY: all clean install test tar pypi-files completions ot offlinetest codetest supportedsites
|
.PHONY: all clean clean-all clean-test clean-dist clean-cache \
|
||||||
|
completions completion-bash completion-fish completion-zsh \
|
||||||
|
doc issuetemplates supportedsites ot offlinetest codetest test \
|
||||||
|
tar pypi-files lazy-extractors install uninstall
|
||||||
|
|
||||||
clean-test:
|
clean-test:
|
||||||
rm -rf test/testdata/sigs/player-*.js tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
rm -rf tmp/ *.annotations.xml *.aria2 *.description *.dump *.frag \
|
||||||
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
*.frag.aria2 *.frag.urls *.info.json *.live_chat.json *.meta *.part* *.tmp *.temp *.unknown_video *.ytdl \
|
||||||
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 \
|
*.3gp *.ape *.ass *.avi *.desktop *.f4v *.flac *.flv *.gif *.jpeg *.jpg *.lrc *.m4a *.m4v *.mhtml *.mkv *.mov *.mp3 *.mp4 \
|
||||||
*.mp4 *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.swf *.swp *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp
|
*.mpg *.mpga *.oga *.ogg *.opus *.png *.sbv *.srt *.ssa *.swf *.tt *.ttml *.url *.vtt *.wav *.webloc *.webm *.webp \
|
||||||
|
test/testdata/sigs/player-*.js test/testdata/thumbnails/empty.webp "test/testdata/thumbnails/foo %d bar/foo_%d."*
|
||||||
clean-dist:
|
clean-dist:
|
||||||
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
rm -rf yt-dlp.1.temp.md yt-dlp.1 README.txt MANIFEST build/ dist/ .coverage cover/ yt-dlp.tar.gz completions/ \
|
||||||
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS .mailmap
|
yt_dlp/extractor/lazy_extractors.py *.spec CONTRIBUTING.md.tmp yt-dlp yt-dlp.exe yt_dlp.egg-info/ AUTHORS
|
||||||
clean-cache:
|
clean-cache:
|
||||||
find . \( \
|
find . \( \
|
||||||
-type d -name .pytest_cache -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
-type d -name ".*_cache" -o -type d -name __pycache__ -o -name "*.pyc" -o -name "*.class" \
|
||||||
\) -prune -exec rm -rf {} \;
|
\) -prune -exec rm -rf {} \;
|
||||||
|
|
||||||
completion-bash: completions/bash/yt-dlp
|
completion-bash: completions/bash/yt-dlp
|
||||||
@ -37,12 +41,15 @@ BINDIR ?= $(PREFIX)/bin
|
|||||||
MANDIR ?= $(PREFIX)/man
|
MANDIR ?= $(PREFIX)/man
|
||||||
SHAREDIR ?= $(PREFIX)/share
|
SHAREDIR ?= $(PREFIX)/share
|
||||||
PYTHON ?= /usr/bin/env python3
|
PYTHON ?= /usr/bin/env python3
|
||||||
|
GNUTAR ?= tar
|
||||||
|
|
||||||
# set SYSCONFDIR to /etc if PREFIX=/usr or PREFIX=/usr/local
|
# set markdown input format to "markdown-smart" for pandoc version 2+ and to "markdown" for pandoc prior to version 2
|
||||||
SYSCONFDIR = $(shell if [ $(PREFIX) = /usr -o $(PREFIX) = /usr/local ]; then echo /etc; else echo $(PREFIX)/etc; fi)
|
PANDOC_VERSION_CMD = pandoc -v 2>/dev/null | head -n1 | cut -d' ' -f2 | head -c1
|
||||||
|
PANDOC_VERSION != $(PANDOC_VERSION_CMD)
|
||||||
# set markdown input format to "markdown-smart" for pandoc version 2 and to "markdown" for pandoc prior to version 2
|
PANDOC_VERSION ?= $(shell $(PANDOC_VERSION_CMD))
|
||||||
MARKDOWN = $(shell if [ `pandoc -v | head -n1 | cut -d" " -f2 | head -c1` = "2" ]; then echo markdown-smart; else echo markdown; fi)
|
MARKDOWN_CMD = if [ "$(PANDOC_VERSION)" = "1" -o "$(PANDOC_VERSION)" = "0" ]; then echo markdown; else echo markdown-smart; fi
|
||||||
|
MARKDOWN != $(MARKDOWN_CMD)
|
||||||
|
MARKDOWN ?= $(shell $(MARKDOWN_CMD))
|
||||||
|
|
||||||
install: lazy-extractors yt-dlp yt-dlp.1 completions
|
install: lazy-extractors yt-dlp yt-dlp.1 completions
|
||||||
mkdir -p $(DESTDIR)$(BINDIR)
|
mkdir -p $(DESTDIR)$(BINDIR)
|
||||||
@ -64,33 +71,38 @@ uninstall:
|
|||||||
rm -f $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
|
rm -f $(DESTDIR)$(SHAREDIR)/fish/vendor_completions.d/yt-dlp.fish
|
||||||
|
|
||||||
codetest:
|
codetest:
|
||||||
flake8 .
|
ruff check .
|
||||||
|
autopep8 --diff .
|
||||||
|
|
||||||
test:
|
test:
|
||||||
$(PYTHON) -m pytest
|
$(PYTHON) -m pytest -Werror
|
||||||
$(MAKE) codetest
|
$(MAKE) codetest
|
||||||
|
|
||||||
offlinetest: codetest
|
offlinetest: codetest
|
||||||
$(PYTHON) -m pytest -k "not download"
|
$(PYTHON) -m pytest -Werror -m "not download"
|
||||||
|
|
||||||
# XXX: This is hard to maintain
|
CODE_FOLDERS_CMD = find yt_dlp -type f -name '__init__.py' | sed 's,/__init__.py,,' | grep -v '/__' | sort
|
||||||
CODE_FOLDERS = yt_dlp yt_dlp/downloader yt_dlp/extractor yt_dlp/postprocessor yt_dlp/compat yt_dlp/compat/urllib yt_dlp/utils yt_dlp/dependencies yt_dlp/networking
|
CODE_FOLDERS != $(CODE_FOLDERS_CMD)
|
||||||
yt-dlp: yt_dlp/*.py yt_dlp/*/*.py
|
CODE_FOLDERS ?= $(shell $(CODE_FOLDERS_CMD))
|
||||||
|
CODE_FILES_CMD = for f in $(CODE_FOLDERS) ; do echo "$$f" | sed 's,$$,/*.py,' ; done
|
||||||
|
CODE_FILES != $(CODE_FILES_CMD)
|
||||||
|
CODE_FILES ?= $(shell $(CODE_FILES_CMD))
|
||||||
|
yt-dlp: $(CODE_FILES)
|
||||||
mkdir -p zip
|
mkdir -p zip
|
||||||
for d in $(CODE_FOLDERS) ; do \
|
for d in $(CODE_FOLDERS) ; do \
|
||||||
mkdir -p zip/$$d ;\
|
mkdir -p zip/$$d ;\
|
||||||
cp -pPR $$d/*.py zip/$$d/ ;\
|
cp -pPR $$d/*.py zip/$$d/ ;\
|
||||||
done
|
done
|
||||||
touch -t 200001010101 zip/yt_dlp/*.py zip/yt_dlp/*/*.py
|
(cd zip && touch -t 200001010101 $(CODE_FILES))
|
||||||
mv zip/yt_dlp/__main__.py zip/
|
mv zip/yt_dlp/__main__.py zip/
|
||||||
cd zip ; zip -q ../yt-dlp yt_dlp/*.py yt_dlp/*/*.py __main__.py
|
(cd zip && zip -q ../yt-dlp $(CODE_FILES) __main__.py)
|
||||||
rm -rf zip
|
rm -rf zip
|
||||||
echo '#!$(PYTHON)' > yt-dlp
|
echo '#!$(PYTHON)' > yt-dlp
|
||||||
cat yt-dlp.zip >> yt-dlp
|
cat yt-dlp.zip >> yt-dlp
|
||||||
rm yt-dlp.zip
|
rm yt-dlp.zip
|
||||||
chmod a+x yt-dlp
|
chmod a+x yt-dlp
|
||||||
|
|
||||||
README.md: yt_dlp/*.py yt_dlp/*/*.py devscripts/make_readme.py
|
README.md: $(CODE_FILES) devscripts/make_readme.py
|
||||||
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
COLUMNS=80 $(PYTHON) yt_dlp/__main__.py --ignore-config --help | $(PYTHON) devscripts/make_readme.py
|
||||||
|
|
||||||
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
CONTRIBUTING.md: README.md devscripts/make_contributing.py
|
||||||
@ -115,41 +127,48 @@ yt-dlp.1: README.md devscripts/prepare_manpage.py
|
|||||||
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
pandoc -s -f $(MARKDOWN) -t man yt-dlp.1.temp.md -o yt-dlp.1
|
||||||
rm -f yt-dlp.1.temp.md
|
rm -f yt-dlp.1.temp.md
|
||||||
|
|
||||||
completions/bash/yt-dlp: yt_dlp/*.py yt_dlp/*/*.py devscripts/bash-completion.in
|
completions/bash/yt-dlp: $(CODE_FILES) devscripts/bash-completion.in
|
||||||
mkdir -p completions/bash
|
mkdir -p completions/bash
|
||||||
$(PYTHON) devscripts/bash-completion.py
|
$(PYTHON) devscripts/bash-completion.py
|
||||||
|
|
||||||
completions/zsh/_yt-dlp: yt_dlp/*.py yt_dlp/*/*.py devscripts/zsh-completion.in
|
completions/zsh/_yt-dlp: $(CODE_FILES) devscripts/zsh-completion.in
|
||||||
mkdir -p completions/zsh
|
mkdir -p completions/zsh
|
||||||
$(PYTHON) devscripts/zsh-completion.py
|
$(PYTHON) devscripts/zsh-completion.py
|
||||||
|
|
||||||
completions/fish/yt-dlp.fish: yt_dlp/*.py yt_dlp/*/*.py devscripts/fish-completion.in
|
completions/fish/yt-dlp.fish: $(CODE_FILES) devscripts/fish-completion.in
|
||||||
mkdir -p completions/fish
|
mkdir -p completions/fish
|
||||||
$(PYTHON) devscripts/fish-completion.py
|
$(PYTHON) devscripts/fish-completion.py
|
||||||
|
|
||||||
_EXTRACTOR_FILES = $(shell find yt_dlp/extractor -name '*.py' -and -not -name 'lazy_extractors.py')
|
_EXTRACTOR_FILES_CMD = find yt_dlp/extractor -name '*.py' -and -not -name 'lazy_extractors.py'
|
||||||
|
_EXTRACTOR_FILES != $(_EXTRACTOR_FILES_CMD)
|
||||||
|
_EXTRACTOR_FILES ?= $(shell $(_EXTRACTOR_FILES_CMD))
|
||||||
yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscripts/lazy_load_template.py $(_EXTRACTOR_FILES)
|
yt_dlp/extractor/lazy_extractors.py: devscripts/make_lazy_extractors.py devscripts/lazy_load_template.py $(_EXTRACTOR_FILES)
|
||||||
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
$(PYTHON) devscripts/make_lazy_extractors.py $@
|
||||||
|
|
||||||
yt-dlp.tar.gz: all
|
yt-dlp.tar.gz: all
|
||||||
@tar -czf yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
@$(GNUTAR) -czf yt-dlp.tar.gz --transform "s|^|yt-dlp/|" --owner 0 --group 0 \
|
||||||
--exclude '*.DS_Store' \
|
--exclude '*.DS_Store' \
|
||||||
--exclude '*.kate-swp' \
|
--exclude '*.kate-swp' \
|
||||||
--exclude '*.pyc' \
|
--exclude '*.pyc' \
|
||||||
--exclude '*.pyo' \
|
--exclude '*.pyo' \
|
||||||
--exclude '*~' \
|
--exclude '*~' \
|
||||||
--exclude '__pycache__' \
|
--exclude '__pycache__' \
|
||||||
--exclude '.pytest_cache' \
|
--exclude '.*_cache' \
|
||||||
--exclude '.git' \
|
--exclude '.git' \
|
||||||
-- \
|
-- \
|
||||||
README.md supportedsites.md Changelog.md LICENSE \
|
README.md supportedsites.md Changelog.md LICENSE \
|
||||||
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
CONTRIBUTING.md Collaborators.md CONTRIBUTORS AUTHORS \
|
||||||
Makefile MANIFEST.in yt-dlp.1 README.txt completions \
|
Makefile yt-dlp.1 README.txt completions .gitignore \
|
||||||
setup.py setup.cfg yt-dlp yt_dlp requirements.txt \
|
setup.cfg yt-dlp yt_dlp pyproject.toml devscripts test
|
||||||
devscripts test
|
|
||||||
|
|
||||||
AUTHORS: .mailmap
|
AUTHORS: Changelog.md
|
||||||
git shortlog -s -n | cut -f2 | sort > AUTHORS
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
|
echo 'Generating $@ from git commit history' ; \
|
||||||
|
git shortlog -s -n HEAD | cut -f2 | sort > $@ ; \
|
||||||
|
fi
|
||||||
|
|
||||||
.mailmap:
|
CONTRIBUTORS: Changelog.md
|
||||||
git shortlog -s -e -n | awk '!(out[$$NF]++) { $$1="";sub(/^[ \t]+/,""); print}' > .mailmap
|
@if [ -d '.git' ] && command -v git > /dev/null ; then \
|
||||||
|
echo 'Updating $@ from git commit history' ; \
|
||||||
|
$(PYTHON) devscripts/make_changelog.py -v -c > /dev/null ; \
|
||||||
|
fi
|
||||||
|
0
bundle/__init__.py
Normal file
0
bundle/__init__.py
Normal file
10
bundle/docker/compose.yml
Normal file
10
bundle/docker/compose.yml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
services:
|
||||||
|
static:
|
||||||
|
build: static
|
||||||
|
environment:
|
||||||
|
channel: ${channel}
|
||||||
|
origin: ${origin}
|
||||||
|
version: ${version}
|
||||||
|
volumes:
|
||||||
|
- ~/build:/build
|
||||||
|
- ../..:/yt-dlp
|
21
bundle/docker/static/Dockerfile
Normal file
21
bundle/docker/static/Dockerfile
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
FROM alpine:3.19 as base
|
||||||
|
|
||||||
|
RUN apk --update add --no-cache \
|
||||||
|
build-base \
|
||||||
|
python3 \
|
||||||
|
pipx \
|
||||||
|
;
|
||||||
|
|
||||||
|
RUN pipx install pyinstaller
|
||||||
|
# Requires above step to prepare the shared venv
|
||||||
|
RUN ~/.local/share/pipx/shared/bin/python -m pip install -U wheel
|
||||||
|
RUN apk --update add --no-cache \
|
||||||
|
scons \
|
||||||
|
patchelf \
|
||||||
|
binutils \
|
||||||
|
;
|
||||||
|
RUN pipx install staticx
|
||||||
|
|
||||||
|
WORKDIR /yt-dlp
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
ENTRYPOINT /entrypoint.sh
|
14
bundle/docker/static/entrypoint.sh
Executable file
14
bundle/docker/static/entrypoint.sh
Executable file
@ -0,0 +1,14 @@
|
|||||||
|
#!/bin/ash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
source ~/.local/share/pipx/venvs/pyinstaller/bin/activate
|
||||||
|
python -m devscripts.install_deps -o --include build
|
||||||
|
python -m devscripts.install_deps --include secretstorage --include curl-cffi
|
||||||
|
python -m devscripts.make_lazy_extractors
|
||||||
|
python devscripts/update-version.py -c "${channel}" -r "${origin}" "${version}"
|
||||||
|
python -m bundle.pyinstaller
|
||||||
|
deactivate
|
||||||
|
|
||||||
|
source ~/.local/share/pipx/venvs/staticx/bin/activate
|
||||||
|
staticx /yt-dlp/dist/yt-dlp_linux /build/yt-dlp_linux
|
||||||
|
deactivate
|
13
pyinst.py → bundle/pyinstaller.py
Normal file → Executable file
13
pyinst.py → bundle/pyinstaller.py
Normal file → Executable file
@ -4,7 +4,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
@ -36,6 +36,9 @@ def main():
|
|||||||
f'--name={name}',
|
f'--name={name}',
|
||||||
'--icon=devscripts/logo.ico',
|
'--icon=devscripts/logo.ico',
|
||||||
'--upx-exclude=vcruntime140.dll',
|
'--upx-exclude=vcruntime140.dll',
|
||||||
|
# Ref: https://github.com/yt-dlp/yt-dlp/issues/13311
|
||||||
|
# https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||||
|
'--exclude-module=pkg_resources',
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
'--additional-hooks-dir=yt_dlp/__pyinstaller',
|
||||||
*opts,
|
*opts,
|
||||||
@ -68,7 +71,7 @@ def exe(onedir):
|
|||||||
'dist/',
|
'dist/',
|
||||||
onedir and f'{name}/',
|
onedir and f'{name}/',
|
||||||
name,
|
name,
|
||||||
OS_NAME == 'win32' and '.exe'
|
OS_NAME == 'win32' and '.exe',
|
||||||
)))
|
)))
|
||||||
|
|
||||||
|
|
||||||
@ -113,7 +116,7 @@ def windows_set_version(exe, version):
|
|||||||
),
|
),
|
||||||
kids=[
|
kids=[
|
||||||
StringFileInfo([StringTable('040904B0', [
|
StringFileInfo([StringTable('040904B0', [
|
||||||
StringStruct('Comments', 'yt-dlp%s Command Line Interface' % suffix),
|
StringStruct('Comments', f'yt-dlp{suffix} Command Line Interface'),
|
||||||
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
StringStruct('CompanyName', 'https://github.com/yt-dlp'),
|
||||||
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
StringStruct('FileDescription', 'yt-dlp%s' % (MACHINE and f' ({MACHINE})')),
|
||||||
StringStruct('FileVersion', version),
|
StringStruct('FileVersion', version),
|
||||||
@ -123,8 +126,8 @@ def windows_set_version(exe, version):
|
|||||||
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
StringStruct('ProductName', f'yt-dlp{suffix}'),
|
||||||
StringStruct(
|
StringStruct(
|
||||||
'ProductVersion', f'{version}{suffix} on Python {platform.python_version()}'),
|
'ProductVersion', f'{version}{suffix} on Python {platform.python_version()}'),
|
||||||
])]), VarFileInfo([VarStruct('Translation', [0, 1200])])
|
])]), VarFileInfo([VarStruct('Translation', [0, 1200])]),
|
||||||
]
|
],
|
||||||
))
|
))
|
||||||
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
|||||||
# Empty file needed to make devscripts.utils properly importable from outside
|
|
@ -9,8 +9,8 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
|
|
||||||
BASH_COMPLETION_FILE = "completions/bash/yt-dlp"
|
BASH_COMPLETION_FILE = 'completions/bash/yt-dlp'
|
||||||
BASH_COMPLETION_TEMPLATE = "devscripts/bash-completion.in"
|
BASH_COMPLETION_TEMPLATE = 'devscripts/bash-completion.in'
|
||||||
|
|
||||||
|
|
||||||
def build_completion(opt_parser):
|
def build_completion(opt_parser):
|
||||||
@ -21,9 +21,9 @@ def build_completion(opt_parser):
|
|||||||
opts_flag.append(option.get_opt_string())
|
opts_flag.append(option.get_opt_string())
|
||||||
with open(BASH_COMPLETION_TEMPLATE) as f:
|
with open(BASH_COMPLETION_TEMPLATE) as f:
|
||||||
template = f.read()
|
template = f.read()
|
||||||
with open(BASH_COMPLETION_FILE, "w") as f:
|
with open(BASH_COMPLETION_FILE, 'w') as f:
|
||||||
# just using the special char
|
# just using the special char
|
||||||
filled_template = template.replace("{{flags}}", " ".join(opts_flag))
|
filled_template = template.replace('{{flags}}', ' '.join(opts_flag))
|
||||||
f.write(filled_template)
|
f.write(filled_template)
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,5 +98,161 @@
|
|||||||
"action": "add",
|
"action": "add",
|
||||||
"when": "61bdf15fc7400601c3da1aa7a43917310a5bf391",
|
"when": "61bdf15fc7400601c3da1aa7a43917310a5bf391",
|
||||||
"short": "[priority] Security: [[CVE-2023-40581](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40581)] [Prevent RCE when using `--exec` with `%q` on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-42h4-v29r-42qg)\n - The shell escape function is now using `\"\"` instead of `\\\"`.\n - `utils.Popen` has been patched to properly quote commands."
|
"short": "[priority] Security: [[CVE-2023-40581](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40581)] [Prevent RCE when using `--exec` with `%q` on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-42h4-v29r-42qg)\n - The shell escape function is now using `\"\"` instead of `\\\"`.\n - `utils.Popen` has been patched to properly quote commands."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "8a8b54523addf46dfd50ef599761a81bc22362e6",
|
||||||
|
"short": "[rh:requests] Add handler for `requests` HTTP library (#3668)\n\n\tAdds support for HTTPS proxies and persistent connections (keep-alive)",
|
||||||
|
"authors": ["bashonly", "coletdjnz", "Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "1d03633c5a1621b9f3a756f0a4f9dc61fab3aeaa",
|
||||||
|
"short": "[priority] **The release channels have been adjusted!**\n\t* [`master`](https://github.com/yt-dlp/yt-dlp-master-builds) builds are made after each push, containing the latest fixes (but also possibly bugs). This was previously the `nightly` channel.\n\t* [`nightly`](https://github.com/yt-dlp/yt-dlp-nightly-builds) builds are now made once a day, if there were any changes."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "f04b5bedad7b281bee9814686bba1762bae092eb",
|
||||||
|
"short": "[priority] Security: [[CVE-2023-46121](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-46121)] Patch [Generic Extractor MITM Vulnerability via Arbitrary Proxy Injection](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3ch3-jhc6-5r8x)\n\t- Disallow smuggling of arbitrary `http_headers`; extractors now only use specific headers"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "15f22b4880b6b3f71f350c64d70976ae65b9f1ca",
|
||||||
|
"short": "[webvtt] Allow spaces before newlines for CueBlock (#7681)",
|
||||||
|
"authors": ["TSRBerry"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "4ce57d3b873c2887814cbec03d029533e82f7db5",
|
||||||
|
"short": "[ie] Support multi-period MPD streams (#6654)",
|
||||||
|
"authors": ["alard", "pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "aa7e9ae4f48276bd5d0173966c77db9484f65a0a",
|
||||||
|
"short": "[ie/xvideos] Support new URL format (#9502)",
|
||||||
|
"authors": ["sta1us"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "22e4dfacb61f62dfbb3eb41b31c7b69ba1059b80"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "e3a3ed8a981d9395c4859b6ef56cd02bc3148db2",
|
||||||
|
"short": "[cleanup:ie] No `from` stdlib imports in extractors",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "9590cc6b4768e190183d7d071a6c78170889116a",
|
||||||
|
"short": "[priority] Security: [[CVE-2024-22423](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-22423)] [Prevent RCE when using `--exec` with `%q` on Windows](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-hjq6-52gw-2g7p)\n - The shell escape function now properly escapes `%`, `\\` and `\\n`.\n - `utils.Popen` has been patched accordingly."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "41ba4a808b597a3afed78c89675a30deb6844450",
|
||||||
|
"short": "[ie/tiktok] Extract via mobile API only if extractor-arg is passed (#9938)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "6e36d17f404556f0e3a43f441c477a71a91877d9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "beaf832c7a9d57833f365ce18f6115b88071b296",
|
||||||
|
"short": "[ie/soundcloud] Add `formats` extractor-arg (#10004)",
|
||||||
|
"authors": ["bashonly", "Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "5c019f6328ad40d66561eac3c4de0b3cd070d0f6",
|
||||||
|
"short": "[cleanup] Misc (#9765)",
|
||||||
|
"authors": ["bashonly", "Grub4K", "seproDev"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "e6a22834df1776ec4e486526f6df2bf53cb7e06f",
|
||||||
|
"short": "[ie/orf:on] Add `prefer_segments_playlist` extractor-arg (#10314)",
|
||||||
|
"authors": ["seproDev"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "6aaf96a3d6e7d0d426e97e11a2fcf52fda00e733",
|
||||||
|
"short": "[priority] Security: [[CVE-2024-38519](https://nvd.nist.gov/vuln/detail/CVE-2024-38519)] [Properly sanitize file-extension to prevent file system modification and RCE](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-79w7-vh3h-8g4j)\n - Unsafe extensions are now blocked from being downloaded"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "6075a029dba70a89675ae1250e7cdfd91f0eba41",
|
||||||
|
"short": "[priority] Security: [[ie/douyutv] Do not use dangerous javascript source/URL](https://github.com/yt-dlp/yt-dlp/security/advisories/GHSA-3v33-3wmw-3785)\n - A dependency on potentially malicious third-party JavaScript code has been removed from the Douyu extractors"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "fb8b7f226d251e521a89b23c415e249e5b788e5c",
|
||||||
|
"short": "[priority] **The minimum *recommended* Python version has been raised to 3.9**\nSince Python 3.8 will reach end-of-life in October 2024, support for it will be dropped soon. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "b31b81d85f00601710d4fac590c3e4efb4133283",
|
||||||
|
"short": "[ci] Rerun failed tests (#11143)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||||
|
"short": "[priority] **py2exe is no longer supported**\nThis release's `yt-dlp_min.exe` will be the last, and it's actually a PyInstaller-bundled executable so that yt-dlp users updating their py2exe build with `-U` will be automatically migrated. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10087)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "a886cf3e900f4a2ec00af705f883539269545609",
|
||||||
|
"short": "[priority] **Following this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "87884f15580910e4e0fe0e1db73508debc657471",
|
||||||
|
"short": "[priority] **Beginning with this release, yt-dlp's Python dependencies *must* be installed using the `default` group**\nIf you're installing yt-dlp with pip/pipx or requiring yt-dlp in your own Python project, you'll need to specify `yt-dlp[default]` if you want to also install yt-dlp's optional dependencies (which were previously included by default). [Read more](https://github.com/yt-dlp/yt-dlp/pull/11255)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "d784464399b600ba9516bbcec6286f11d68974dd",
|
||||||
|
"short": "[priority] **The minimum *required* Python version has been raised to 3.9**\nPython 3.8 reached its end-of-life on 2024.10.07, and yt-dlp has now removed support for it. As an unfortunate side effect, the official `yt-dlp.exe` and `yt-dlp_x86.exe` binaries are no longer supported on Windows 7. [Read more](https://github.com/yt-dlp/yt-dlp/issues/10086)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "914af9a0cf51c9a3f74aa88d952bee8334c67511",
|
||||||
|
"short": "Expand paths in `--plugin-dirs` (#11334)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "c29f5a7fae93a08f3cfbb6127b2faa75145b06a0",
|
||||||
|
"short": "[ie/generic] Do not impersonate by default (#11336)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "57212a5f97ce367590aaa5c3e9a135eead8f81f7",
|
||||||
|
"short": "[ie/vimeo] Fix API retries (#11351)",
|
||||||
|
"authors": ["bashonly"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "52c0ffe40ad6e8404d93296f575007b05b04c686",
|
||||||
|
"short": "[priority] **Login with OAuth is no longer supported for YouTube**\nDue to a change made by the site, yt-dlp is no longer able to support OAuth login for YouTube. [Read more](https://github.com/yt-dlp/yt-dlp/issues/11462#issuecomment-2471703090)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "76ac023ff02f06e8c003d104f02a03deeddebdcd",
|
||||||
|
"short": "[ie/youtube:tab] Improve shorts title extraction (#11997)",
|
||||||
|
"authors": ["bashonly", "d3d9"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "add",
|
||||||
|
"when": "88eb1e7a9a2720ac89d653c0d0e40292388823bb",
|
||||||
|
"short": "[priority] **New option `--preset-alias`/`-t` has been added**\nThis provides convenient predefined aliases for common use cases. Available presets include `mp4`, `mp3`, `mkv`, `aac`, and `sleep`. See [the README](https://github.com/yt-dlp/yt-dlp/blob/master/README.md#preset-aliases) for more details."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "remove",
|
||||||
|
"when": "d596824c2f8428362c072518856065070616e348"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
2
devscripts/cli_to_api.py
Normal file → Executable file
2
devscripts/cli_to_api.py
Normal file → Executable file
@ -1,3 +1,5 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -11,13 +11,12 @@ import codecs
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp.aes import aes_encrypt, key_expansion
|
from yt_dlp.aes import aes_encrypt, key_expansion
|
||||||
from yt_dlp.utils import intlist_to_bytes
|
|
||||||
|
|
||||||
secret_msg = b'Secret message goes here'
|
secret_msg = b'Secret message goes here'
|
||||||
|
|
||||||
|
|
||||||
def hex_str(int_list):
|
def hex_str(int_list):
|
||||||
return codecs.encode(intlist_to_bytes(int_list), 'hex')
|
return codecs.encode(bytes(int_list), 'hex')
|
||||||
|
|
||||||
|
|
||||||
def openssl_encode(algo, key, iv):
|
def openssl_encode(algo, key, iv):
|
||||||
|
81
devscripts/install_deps.py
Executable file
81
devscripts/install_deps.py
Executable file
@ -0,0 +1,81 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow execution from anywhere
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.tomlparse import parse_toml
|
||||||
|
from devscripts.utils import read_file
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Install dependencies for yt-dlp')
|
||||||
|
parser.add_argument(
|
||||||
|
'input', nargs='?', metavar='TOMLFILE', default=Path(__file__).parent.parent / 'pyproject.toml',
|
||||||
|
help='input file (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-e', '--exclude', metavar='DEPENDENCY', action='append',
|
||||||
|
help='exclude a dependency')
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--include', metavar='GROUP', action='append',
|
||||||
|
help='include an optional dependency group')
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--only-optional', action='store_true',
|
||||||
|
help='only install optional dependencies')
|
||||||
|
parser.add_argument(
|
||||||
|
'-p', '--print', action='store_true',
|
||||||
|
help='only print requirements to stdout')
|
||||||
|
parser.add_argument(
|
||||||
|
'-u', '--user', action='store_true',
|
||||||
|
help='install with pip as --user')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
project_table = parse_toml(read_file(args.input))['project']
|
||||||
|
recursive_pattern = re.compile(rf'{project_table["name"]}\[(?P<group_name>[\w-]+)\]')
|
||||||
|
optional_groups = project_table['optional-dependencies']
|
||||||
|
excludes = args.exclude or []
|
||||||
|
|
||||||
|
def yield_deps(group):
|
||||||
|
for dep in group:
|
||||||
|
if mobj := recursive_pattern.fullmatch(dep):
|
||||||
|
yield from optional_groups.get(mobj.group('group_name'), [])
|
||||||
|
else:
|
||||||
|
yield dep
|
||||||
|
|
||||||
|
targets = []
|
||||||
|
if not args.only_optional: # `-o` should exclude 'dependencies' and the 'default' group
|
||||||
|
targets.extend(project_table['dependencies'])
|
||||||
|
if 'default' not in excludes: # `--exclude default` should exclude entire 'default' group
|
||||||
|
targets.extend(yield_deps(optional_groups['default']))
|
||||||
|
|
||||||
|
for include in filter(None, map(optional_groups.get, args.include or [])):
|
||||||
|
targets.extend(yield_deps(include))
|
||||||
|
|
||||||
|
targets = [t for t in targets if re.match(r'[\w-]+', t).group(0).lower() not in excludes]
|
||||||
|
|
||||||
|
if args.print:
|
||||||
|
for target in targets:
|
||||||
|
print(target)
|
||||||
|
return
|
||||||
|
|
||||||
|
pip_args = [sys.executable, '-m', 'pip', 'install', '-U']
|
||||||
|
if args.user:
|
||||||
|
pip_args.append('--user')
|
||||||
|
pip_args.extend(targets)
|
||||||
|
|
||||||
|
return subprocess.call(pip_args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
@ -40,22 +40,9 @@ class CommitGroup(enum.Enum):
|
|||||||
return {
|
return {
|
||||||
name: group
|
name: group
|
||||||
for group, names in {
|
for group, names in {
|
||||||
cls.CORE: {
|
|
||||||
'aes',
|
|
||||||
'cache',
|
|
||||||
'compat_utils',
|
|
||||||
'compat',
|
|
||||||
'cookies',
|
|
||||||
'dependencies',
|
|
||||||
'formats',
|
|
||||||
'jsinterp',
|
|
||||||
'outtmpl',
|
|
||||||
'plugins',
|
|
||||||
'update',
|
|
||||||
'utils',
|
|
||||||
},
|
|
||||||
cls.MISC: {
|
cls.MISC: {
|
||||||
'build',
|
'build',
|
||||||
|
'ci',
|
||||||
'cleanup',
|
'cleanup',
|
||||||
'devscripts',
|
'devscripts',
|
||||||
'docs',
|
'docs',
|
||||||
@ -84,14 +71,13 @@ class CommitGroup(enum.Enum):
|
|||||||
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||||
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||||
|
|
||||||
result = cls.group_lookup().get(group)
|
if result := cls.group_lookup().get(group):
|
||||||
if not result:
|
return result, subgroup or None
|
||||||
if subgroup:
|
|
||||||
return None, value
|
|
||||||
subgroup = group
|
|
||||||
result = cls.subgroup_lookup().get(subgroup)
|
|
||||||
|
|
||||||
return result, subgroup or None
|
if subgroup:
|
||||||
|
return None, value
|
||||||
|
|
||||||
|
return cls.subgroup_lookup().get(group), group or None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -149,8 +135,7 @@ class Changelog:
|
|||||||
first = False
|
first = False
|
||||||
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
yield '\n<details><summary><h3>Changelog</h3></summary>\n'
|
||||||
|
|
||||||
group = groups[item]
|
if group := groups[item]:
|
||||||
if group:
|
|
||||||
yield self.format_module(item.value, group)
|
yield self.format_module(item.value, group)
|
||||||
|
|
||||||
if self._collapsible:
|
if self._collapsible:
|
||||||
@ -236,10 +221,10 @@ class Changelog:
|
|||||||
|
|
||||||
return message if not sep else f'{message}{sep}{rest}'
|
return message if not sep else f'{message}{sep}{rest}'
|
||||||
|
|
||||||
def _format_message_link(self, message, hash):
|
def _format_message_link(self, message, commit_hash):
|
||||||
assert message or hash, 'Improperly defined commit message or override'
|
assert message or commit_hash, 'Improperly defined commit message or override'
|
||||||
message = message if message else hash[:HASH_LENGTH]
|
message = message if message else commit_hash[:HASH_LENGTH]
|
||||||
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
return f'[{message}]({self.repo_url}/commit/{commit_hash})' if commit_hash else message
|
||||||
|
|
||||||
def _format_issues(self, issues):
|
def _format_issues(self, issues):
|
||||||
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
||||||
@ -266,7 +251,7 @@ class CommitRange:
|
|||||||
''', re.VERBOSE | re.DOTALL)
|
''', re.VERBOSE | re.DOTALL)
|
||||||
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
EXTRACTOR_INDICATOR_RE = re.compile(r'(?:Fix|Add)\s+Extractors?', re.IGNORECASE)
|
||||||
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
REVERT_RE = re.compile(r'(?:\[[^\]]+\]\s+)?(?i:Revert)\s+([\da-f]{40})')
|
||||||
FIXES_RE = re.compile(r'(?i:Fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Revert)\s+([\da-f]{40})')
|
FIXES_RE = re.compile(r'(?i:(?:bug\s*)?fix(?:es)?(?:\s+bugs?)?(?:\s+in|\s+for)?|Improve)\s+([\da-f]{40})')
|
||||||
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
UPSTREAM_MERGE_RE = re.compile(r'Update to ytdl-commit-([\da-f]+)')
|
||||||
|
|
||||||
def __init__(self, start, end, default_author=None):
|
def __init__(self, start, end, default_author=None):
|
||||||
@ -300,11 +285,16 @@ class CommitRange:
|
|||||||
short = next(lines)
|
short = next(lines)
|
||||||
skip = short.startswith('Release ') or short == '[version] update'
|
skip = short.startswith('Release ') or short == '[version] update'
|
||||||
|
|
||||||
|
fix_commitish = None
|
||||||
|
if match := self.FIXES_RE.search(short):
|
||||||
|
fix_commitish = match.group(1)
|
||||||
|
|
||||||
authors = [default_author] if default_author else []
|
authors = [default_author] if default_author else []
|
||||||
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
for line in iter(lambda: next(lines), self.COMMIT_SEPARATOR):
|
||||||
match = self.AUTHOR_INDICATOR_RE.match(line)
|
if match := self.AUTHOR_INDICATOR_RE.match(line):
|
||||||
if match:
|
|
||||||
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
authors = sorted(map(str.strip, line[match.end():].split(',')), key=str.casefold)
|
||||||
|
if not fix_commitish and (match := self.FIXES_RE.fullmatch(line)):
|
||||||
|
fix_commitish = match.group(1)
|
||||||
|
|
||||||
commit = Commit(commit_hash, short, authors)
|
commit = Commit(commit_hash, short, authors)
|
||||||
if skip and (self._start or not i):
|
if skip and (self._start or not i):
|
||||||
@ -314,21 +304,17 @@ class CommitRange:
|
|||||||
logger.debug(f'Reached Release commit, breaking: {commit}')
|
logger.debug(f'Reached Release commit, breaking: {commit}')
|
||||||
break
|
break
|
||||||
|
|
||||||
revert_match = self.REVERT_RE.fullmatch(commit.short)
|
if match := self.REVERT_RE.fullmatch(commit.short):
|
||||||
if revert_match:
|
reverts[match.group(1)] = commit
|
||||||
reverts[revert_match.group(1)] = commit
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
fix_match = self.FIXES_RE.search(commit.short)
|
if fix_commitish:
|
||||||
if fix_match:
|
fixes[fix_commitish].append(commit)
|
||||||
commitish = fix_match.group(1)
|
|
||||||
fixes[commitish].append(commit)
|
|
||||||
|
|
||||||
commits[commit.hash] = commit
|
commits[commit.hash] = commit
|
||||||
|
|
||||||
for commitish, revert_commit in reverts.items():
|
for commitish, revert_commit in reverts.items():
|
||||||
reverted = commits.pop(commitish, None)
|
if reverted := commits.pop(commitish, None):
|
||||||
if reverted:
|
|
||||||
logger.debug(f'{commitish} fully reverted {reverted}')
|
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||||
else:
|
else:
|
||||||
commits[revert_commit.hash] = revert_commit
|
commits[revert_commit.hash] = revert_commit
|
||||||
@ -369,7 +355,7 @@ class CommitRange:
|
|||||||
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
logger.info(f'CHANGE {self._commits[commit.hash]} -> {commit}')
|
||||||
self._commits[commit.hash] = commit
|
self._commits[commit.hash] = commit
|
||||||
|
|
||||||
self._commits = {key: value for key, value in reversed(self._commits.items())}
|
self._commits = dict(reversed(self._commits.items()))
|
||||||
|
|
||||||
def groups(self):
|
def groups(self):
|
||||||
group_dict = defaultdict(list)
|
group_dict = defaultdict(list)
|
||||||
@ -403,9 +389,9 @@ class CommitRange:
|
|||||||
if not group:
|
if not group:
|
||||||
if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
|
if self.EXTRACTOR_INDICATOR_RE.search(commit.short):
|
||||||
group = CommitGroup.EXTRACTOR
|
group = CommitGroup.EXTRACTOR
|
||||||
|
logger.error(f'Assuming [ie] group for {commit.short!r}')
|
||||||
else:
|
else:
|
||||||
group = CommitGroup.POSTPROCESSOR
|
group = CommitGroup.CORE
|
||||||
logger.warning(f'Failed to map {commit.short!r}, selected {group.name.lower()}')
|
|
||||||
|
|
||||||
commit_info = CommitInfo(
|
commit_info = CommitInfo(
|
||||||
details, sub_details, message.strip(),
|
details, sub_details, message.strip(),
|
||||||
@ -458,7 +444,31 @@ def get_new_contributors(contributors_path, commits):
|
|||||||
return sorted(new_contributors, key=str.casefold)
|
return sorted(new_contributors, key=str.casefold)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
def create_changelog(args):
|
||||||
|
logging.basicConfig(
|
||||||
|
datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
|
||||||
|
level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
|
||||||
|
|
||||||
|
commits = CommitRange(None, args.commitish, args.default_author)
|
||||||
|
|
||||||
|
if not args.no_override:
|
||||||
|
if args.override_path.exists():
|
||||||
|
overrides = json.loads(read_file(args.override_path))
|
||||||
|
commits.apply_overrides(overrides)
|
||||||
|
else:
|
||||||
|
logger.warning(f'File {args.override_path.as_posix()} does not exist')
|
||||||
|
|
||||||
|
logger.info(f'Loaded {len(commits)} commits')
|
||||||
|
|
||||||
|
if new_contributors := get_new_contributors(args.contributors_path, commits):
|
||||||
|
if args.contributors:
|
||||||
|
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
||||||
|
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
||||||
|
|
||||||
|
return Changelog(commits.groups(), args.repo, args.collapsible)
|
||||||
|
|
||||||
|
|
||||||
|
def create_parser():
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
@ -490,27 +500,9 @@ if __name__ == '__main__':
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--collapsible', action='store_true',
|
'--collapsible', action='store_true',
|
||||||
help='make changelog collapsible (default: %(default)s)')
|
help='make changelog collapsible (default: %(default)s)')
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
logging.basicConfig(
|
return parser
|
||||||
datefmt='%Y-%m-%d %H-%M-%S', format='{asctime} | {levelname:<8} | {message}',
|
|
||||||
level=logging.WARNING - 10 * args.verbosity, style='{', stream=sys.stderr)
|
|
||||||
|
|
||||||
commits = CommitRange(None, args.commitish, args.default_author)
|
|
||||||
|
|
||||||
if not args.no_override:
|
if __name__ == '__main__':
|
||||||
if args.override_path.exists():
|
print(create_changelog(create_parser().parse_args()))
|
||||||
overrides = json.loads(read_file(args.override_path))
|
|
||||||
commits.apply_overrides(overrides)
|
|
||||||
else:
|
|
||||||
logger.warning(f'File {args.override_path.as_posix()} does not exist')
|
|
||||||
|
|
||||||
logger.info(f'Loaded {len(commits)} commits')
|
|
||||||
|
|
||||||
new_contributors = get_new_contributors(args.contributors_path, commits)
|
|
||||||
if new_contributors:
|
|
||||||
if args.contributors:
|
|
||||||
write_file(args.contributors_path, '\n'.join(new_contributors) + '\n', mode='a')
|
|
||||||
logger.info(f'New contributors: {", ".join(new_contributors)}')
|
|
||||||
|
|
||||||
print(Changelog(commits.groups(), args.repo, args.collapsible))
|
|
||||||
|
@ -9,18 +9,15 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from devscripts.utils import (
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
get_filename_args,
|
|
||||||
read_file,
|
|
||||||
read_version,
|
|
||||||
write_file,
|
|
||||||
)
|
|
||||||
|
|
||||||
VERBOSE_TMPL = '''
|
VERBOSE = '''
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
id: verbose
|
id: verbose
|
||||||
attributes:
|
attributes:
|
||||||
label: Provide verbose output that clearly demonstrates the problem
|
label: Provide verbose output that clearly demonstrates the problem
|
||||||
|
description: |
|
||||||
|
This is mandatory unless absolutely impossible to provide. If you are unable to provide the output, please explain why.
|
||||||
options:
|
options:
|
||||||
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
- label: Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
|
||||||
required: true
|
required: true
|
||||||
@ -35,19 +32,19 @@ VERBOSE_TMPL = '''
|
|||||||
description: |
|
description: |
|
||||||
It should start like this:
|
It should start like this:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
[debug] Command-line config: ['-vU', 'test:youtube']
|
[debug] Command-line config: ['-vU', 'https://www.youtube.com/watch?v=BaW_jenozKc']
|
||||||
[debug] Portable config "yt-dlp.conf": ['-i']
|
|
||||||
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
[debug] Encodings: locale cp65001, fs utf-8, pref cp65001, out utf-8, error utf-8, screen utf-8
|
||||||
[debug] yt-dlp version %(version)s [9d339c4] (win32_exe)
|
[debug] yt-dlp version nightly@... from yt-dlp/yt-dlp-nightly-builds [1a176d874] (win_exe)
|
||||||
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.22000-SP0
|
[debug] Python 3.10.11 (CPython AMD64 64bit) - Windows-10-10.0.20348-SP0 (OpenSSL 1.1.1t 7 Feb 2023)
|
||||||
[debug] Checking exe version: ffmpeg -bsfs
|
[debug] exe versions: ffmpeg 7.0.2 (setts), ffprobe 7.0.2
|
||||||
[debug] Checking exe version: ffprobe -bsfs
|
[debug] Optional libraries: Cryptodome-3.21.0, brotli-1.1.0, certifi-2024.08.30, curl_cffi-0.5.10, mutagen-1.47.0, requests-2.32.3, sqlite3-3.40.1, urllib3-2.2.3, websockets-13.1
|
||||||
[debug] exe versions: ffmpeg N-106550-g072101bd52-20220410 (fdk,setts), ffprobe N-106624-g391ce570c8-20220415, phantomjs 2.1.1
|
|
||||||
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.06.15, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
|
|
||||||
[debug] Proxy map: {}
|
[debug] Proxy map: {}
|
||||||
|
[debug] Request Handlers: urllib, requests, websockets, curl_cffi
|
||||||
|
[debug] Loaded 1838 extractors
|
||||||
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
|
||||||
Latest version: %(version)s, Current version: %(version)s
|
Latest version: nightly@... from yt-dlp/yt-dlp-nightly-builds
|
||||||
yt-dlp is up to date (%(version)s)
|
yt-dlp is up to date (nightly@... from yt-dlp/yt-dlp-nightly-builds)
|
||||||
|
[youtube] Extracting URL: https://www.youtube.com/watch?v=BaW_jenozKc
|
||||||
<more lines>
|
<more lines>
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
@ -55,20 +52,20 @@ VERBOSE_TMPL = '''
|
|||||||
'''.strip()
|
'''.strip()
|
||||||
|
|
||||||
NO_SKIP = '''
|
NO_SKIP = '''
|
||||||
- type: checkboxes
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
label: DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
|
value: |
|
||||||
description: Fill all fields even if you think it is irrelevant for the issue
|
> [!IMPORTANT]
|
||||||
options:
|
> Not providing the required (*) information or removing the template will result in your issue being closed and ignored.
|
||||||
- label: I understand that I will be **blocked** if I *intentionally* remove or skip any mandatory\\* field
|
|
||||||
required: true
|
|
||||||
'''.strip()
|
'''.strip()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
fields = {'version': read_version(), 'no_skip': NO_SKIP}
|
fields = {
|
||||||
fields['verbose'] = VERBOSE_TMPL % fields
|
'no_skip': NO_SKIP,
|
||||||
fields['verbose_optional'] = re.sub(r'(\n\s+validations:)?\n\s+required: true', '', fields['verbose'])
|
'verbose': VERBOSE,
|
||||||
|
'verbose_optional': re.sub(r'(\n\s+validations:)?\n\s+required: true', '', VERBOSE),
|
||||||
|
}
|
||||||
|
|
||||||
infile, outfile = get_filename_args(has_infile=True)
|
infile, outfile = get_filename_args(has_infile=True)
|
||||||
write_file(outfile, read_file(infile) % fields)
|
write_file(outfile, read_file(infile) % fields)
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# Allow direct execution
|
# Allow direct execution
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
@ -11,6 +10,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
from inspect import getsource
|
from inspect import getsource
|
||||||
|
|
||||||
from devscripts.utils import get_filename_args, read_file, write_file
|
from devscripts.utils import get_filename_args, read_file, write_file
|
||||||
|
from yt_dlp.extractor import import_extractors
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
||||||
|
from yt_dlp.globals import extractors
|
||||||
|
|
||||||
NO_ATTR = object()
|
NO_ATTR = object()
|
||||||
STATIC_CLASS_PROPERTIES = [
|
STATIC_CLASS_PROPERTIES = [
|
||||||
@ -34,17 +36,12 @@ MODULE_TEMPLATE = read_file('devscripts/lazy_load_template.py')
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
os.environ['YTDLP_NO_PLUGINS'] = 'true'
|
||||||
|
os.environ['YTDLP_NO_LAZY_EXTRACTORS'] = 'true'
|
||||||
|
|
||||||
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
lazy_extractors_filename = get_filename_args(default_outfile='yt_dlp/extractor/lazy_extractors.py')
|
||||||
if os.path.exists(lazy_extractors_filename):
|
|
||||||
os.remove(lazy_extractors_filename)
|
|
||||||
|
|
||||||
_ALL_CLASSES = get_all_ies() # Must be before import
|
import_extractors()
|
||||||
|
|
||||||
import yt_dlp.plugins
|
|
||||||
from yt_dlp.extractor.common import InfoExtractor, SearchInfoExtractor
|
|
||||||
|
|
||||||
# Filter out plugins
|
|
||||||
_ALL_CLASSES = [cls for cls in _ALL_CLASSES if not cls.__module__.startswith(f'{yt_dlp.plugins.PACKAGE_NAME}.')]
|
|
||||||
|
|
||||||
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
DummyInfoExtractor = type('InfoExtractor', (InfoExtractor,), {'IE_NAME': NO_ATTR})
|
||||||
module_src = '\n'.join((
|
module_src = '\n'.join((
|
||||||
@ -52,26 +49,12 @@ def main():
|
|||||||
' _module = None',
|
' _module = None',
|
||||||
*extra_ie_code(DummyInfoExtractor),
|
*extra_ie_code(DummyInfoExtractor),
|
||||||
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
'\nclass LazyLoadSearchExtractor(LazyLoadExtractor):\n pass\n',
|
||||||
*build_ies(_ALL_CLASSES, (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
*build_ies(list(extractors.value.values()), (InfoExtractor, SearchInfoExtractor), DummyInfoExtractor),
|
||||||
))
|
))
|
||||||
|
|
||||||
write_file(lazy_extractors_filename, f'{module_src}\n')
|
write_file(lazy_extractors_filename, f'{module_src}\n')
|
||||||
|
|
||||||
|
|
||||||
def get_all_ies():
|
|
||||||
PLUGINS_DIRNAME = 'ytdlp_plugins'
|
|
||||||
BLOCKED_DIRNAME = f'{PLUGINS_DIRNAME}_blocked'
|
|
||||||
if os.path.exists(PLUGINS_DIRNAME):
|
|
||||||
# os.rename cannot be used, e.g. in Docker. See https://github.com/yt-dlp/yt-dlp/pull/4958
|
|
||||||
shutil.move(PLUGINS_DIRNAME, BLOCKED_DIRNAME)
|
|
||||||
try:
|
|
||||||
from yt_dlp.extractor.extractors import _ALL_CLASSES
|
|
||||||
finally:
|
|
||||||
if os.path.exists(BLOCKED_DIRNAME):
|
|
||||||
shutil.move(BLOCKED_DIRNAME, PLUGINS_DIRNAME)
|
|
||||||
return _ALL_CLASSES
|
|
||||||
|
|
||||||
|
|
||||||
def extra_ie_code(ie, base=None):
|
def extra_ie_code(ie, base=None):
|
||||||
for var in STATIC_CLASS_PROPERTIES:
|
for var in STATIC_CLASS_PROPERTIES:
|
||||||
val = getattr(ie, var)
|
val = getattr(ie, var)
|
||||||
@ -92,7 +75,7 @@ def build_ies(ies, bases, attr_base):
|
|||||||
if ie in ies:
|
if ie in ies:
|
||||||
names.append(ie.__name__)
|
names.append(ie.__name__)
|
||||||
|
|
||||||
yield f'\n_ALL_CLASSES = [{", ".join(names)}]'
|
yield '\n_CLASS_LOOKUP = {%s}' % ', '.join(f'{name!r}: {name}' for name in names)
|
||||||
|
|
||||||
|
|
||||||
def sort_ies(ies, ignored_bases):
|
def sort_ies(ies, ignored_bases):
|
||||||
|
@ -51,7 +51,7 @@ PATCHES = (
|
|||||||
),
|
),
|
||||||
( # Headings
|
( # Headings
|
||||||
r'(?m)^ (\w.+\n)( (?=\w))?',
|
r'(?m)^ (\w.+\n)( (?=\w))?',
|
||||||
r'## \1'
|
r'## \1',
|
||||||
),
|
),
|
||||||
( # Fixup `--date` formatting
|
( # Fixup `--date` formatting
|
||||||
rf'(?m)( --date DATE.+({delim}[^\[]+)*)\[.+({delim}.+)*$',
|
rf'(?m)( --date DATE.+({delim}[^\[]+)*)\[.+({delim}.+)*$',
|
||||||
@ -61,26 +61,26 @@ PATCHES = (
|
|||||||
),
|
),
|
||||||
( # Do not split URLs
|
( # Do not split URLs
|
||||||
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
rf'({delim[:-1]})? (?P<label>\[\S+\] )?(?P<url>https?({delim})?:({delim})?/({delim})?/(({delim})?\S+)+)\s',
|
||||||
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n'))
|
lambda mobj: ''.join((delim, mobj.group('label') or '', re.sub(r'\s+', '', mobj.group('url')), '\n')),
|
||||||
),
|
),
|
||||||
( # Do not split "words"
|
( # Do not split "words"
|
||||||
rf'(?m)({delim}\S+)+$',
|
rf'(?m)({delim}\S+)+$',
|
||||||
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, '')))
|
lambda mobj: ''.join((delim, mobj.group(0).replace(delim, ''))),
|
||||||
),
|
),
|
||||||
( # Allow overshooting last line
|
( # Allow overshooting last line
|
||||||
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
rf'(?m)^(?P<prev>.+)${delim}(?P<current>.+)$(?!{delim})',
|
||||||
lambda mobj: (mobj.group().replace(delim, ' ')
|
lambda mobj: (mobj.group().replace(delim, ' ')
|
||||||
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
if len(mobj.group()) - len(delim) + 1 <= max_width + ALLOWED_OVERSHOOT
|
||||||
else mobj.group())
|
else mobj.group()),
|
||||||
),
|
),
|
||||||
( # Avoid newline when a space is available b/w switch and description
|
( # Avoid newline when a space is available b/w switch and description
|
||||||
DISABLE_PATCH, # This creates issues with prepare_manpage
|
DISABLE_PATCH, # This creates issues with prepare_manpage
|
||||||
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
r'(?m)^(\s{4}-.{%d})(%s)' % (switch_col_width - 6, delim),
|
||||||
r'\1 '
|
r'\1 ',
|
||||||
),
|
),
|
||||||
( # Replace brackets with a Markdown link
|
( # Replace brackets with a Markdown link
|
||||||
r'SponsorBlock API \((http.+)\)',
|
r'SponsorBlock API \((http.+)\)',
|
||||||
r'[SponsorBlock API](\1)'
|
r'[SponsorBlock API](\1)',
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -10,10 +10,21 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
from devscripts.utils import get_filename_args, write_file
|
from devscripts.utils import get_filename_args, write_file
|
||||||
from yt_dlp.extractor import list_extractor_classes
|
from yt_dlp.extractor import list_extractor_classes
|
||||||
|
|
||||||
|
TEMPLATE = '''\
|
||||||
|
# Supported sites
|
||||||
|
|
||||||
|
Below is a list of all extractors that are currently included with yt-dlp.
|
||||||
|
If a site is not listed here, it might still be supported by yt-dlp's embed extraction or generic extractor.
|
||||||
|
Not all sites listed here are guaranteed to work; websites are constantly changing and sometimes this breaks yt-dlp's support for them.
|
||||||
|
The only reliable way to check if a site is supported is to try it.
|
||||||
|
|
||||||
|
{ie_list}
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
out = '\n'.join(ie.description() for ie in list_extractor_classes() if ie.IE_DESC is not False)
|
||||||
write_file(get_filename_args(), f'# Supported sites\n{out}\n')
|
write_file(get_filename_args(), TEMPLATE.format(ie_list=out))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -24,7 +24,7 @@ PREFIX = r'''%yt-dlp(1)
|
|||||||
|
|
||||||
# NAME
|
# NAME
|
||||||
|
|
||||||
yt\-dlp \- A youtube-dl fork with additional features and patches
|
yt\-dlp \- A feature\-rich command\-line audio/video downloader
|
||||||
|
|
||||||
# SYNOPSIS
|
# SYNOPSIS
|
||||||
|
|
||||||
@ -43,6 +43,27 @@ def filter_excluded_sections(readme):
|
|||||||
'', readme)
|
'', readme)
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_code_blocks(readme):
|
||||||
|
current_code_block = None
|
||||||
|
|
||||||
|
for line in readme.splitlines(True):
|
||||||
|
if current_code_block:
|
||||||
|
if line == current_code_block:
|
||||||
|
current_code_block = None
|
||||||
|
yield '\n'
|
||||||
|
else:
|
||||||
|
yield f' {line}'
|
||||||
|
elif line.startswith('```'):
|
||||||
|
current_code_block = line.count('`') * '`' + '\n'
|
||||||
|
yield '\n'
|
||||||
|
else:
|
||||||
|
yield line
|
||||||
|
|
||||||
|
|
||||||
|
def convert_code_blocks(readme):
|
||||||
|
return ''.join(_convert_code_blocks(readme))
|
||||||
|
|
||||||
|
|
||||||
def move_sections(readme):
|
def move_sections(readme):
|
||||||
MOVE_TAG_TEMPLATE = '<!-- MANPAGE: MOVE "%s" SECTION HERE -->'
|
MOVE_TAG_TEMPLATE = '<!-- MANPAGE: MOVE "%s" SECTION HERE -->'
|
||||||
sections = re.findall(r'(?m)^%s$' % (
|
sections = re.findall(r'(?m)^%s$' % (
|
||||||
@ -65,8 +86,10 @@ def move_sections(readme):
|
|||||||
|
|
||||||
def filter_options(readme):
|
def filter_options(readme):
|
||||||
section = re.search(r'(?sm)^# USAGE AND OPTIONS\n.+?(?=^# )', readme).group(0)
|
section = re.search(r'(?sm)^# USAGE AND OPTIONS\n.+?(?=^# )', readme).group(0)
|
||||||
|
section_new = section.replace('*', R'\*')
|
||||||
|
|
||||||
options = '# OPTIONS\n'
|
options = '# OPTIONS\n'
|
||||||
for line in section.split('\n')[1:]:
|
for line in section_new.split('\n')[1:]:
|
||||||
mobj = re.fullmatch(r'''(?x)
|
mobj = re.fullmatch(r'''(?x)
|
||||||
\s{4}(?P<opt>-(?:,\s|[^\s])+)
|
\s{4}(?P<opt>-(?:,\s|[^\s])+)
|
||||||
(?:\s(?P<meta>(?:[^\s]|\s(?!\s))+))?
|
(?:\s(?P<meta>(?:[^\s]|\s(?!\s))+))?
|
||||||
@ -86,7 +109,7 @@ def filter_options(readme):
|
|||||||
return readme.replace(section, options, 1)
|
return readme.replace(section, options, 1)
|
||||||
|
|
||||||
|
|
||||||
TRANSFORM = compose_functions(filter_excluded_sections, move_sections, filter_options)
|
TRANSFORM = compose_functions(filter_excluded_sections, convert_code_blocks, move_sections, filter_options)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
@setlocal
|
|
||||||
@echo off
|
|
||||||
cd /d %~dp0..
|
|
||||||
|
|
||||||
if ["%~1"]==[""] (
|
|
||||||
set "test_set="test""
|
|
||||||
) else if ["%~1"]==["core"] (
|
|
||||||
set "test_set="-m not download""
|
|
||||||
) else if ["%~1"]==["download"] (
|
|
||||||
set "test_set="-m "download""
|
|
||||||
) else (
|
|
||||||
echo.Invalid test type "%~1". Use "core" ^| "download"
|
|
||||||
exit /b 1
|
|
||||||
)
|
|
||||||
|
|
||||||
set PYTHONWARNINGS=error
|
|
||||||
pytest %test_set%
|
|
77
devscripts/run_tests.py
Executable file
77
devscripts/run_tests.py
Executable file
@ -0,0 +1,77 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
fix_test_name = functools.partial(re.compile(r'IE(_all|_\d+)?$').sub, r'\1')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Run selected yt-dlp tests')
|
||||||
|
parser.add_argument(
|
||||||
|
'test', help='an extractor test, test path, or one of "core" or "download"', nargs='*')
|
||||||
|
parser.add_argument(
|
||||||
|
'-k', help='run a test matching EXPRESSION. Same as "pytest -k"', metavar='EXPRESSION')
|
||||||
|
parser.add_argument(
|
||||||
|
'--pytest-args', help='arguments to passthrough to pytest')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def run_tests(*tests, pattern=None, ci=False):
|
||||||
|
# XXX: hatch uses `tests` if no arguments are passed
|
||||||
|
run_core = 'core' in tests or 'tests' in tests or (not pattern and not tests)
|
||||||
|
run_download = 'download' in tests
|
||||||
|
|
||||||
|
pytest_args = args.pytest_args or os.getenv('HATCH_TEST_ARGS', '')
|
||||||
|
arguments = ['pytest', '-Werror', '--tb=short', *shlex.split(pytest_args)]
|
||||||
|
if ci:
|
||||||
|
arguments.append('--color=yes')
|
||||||
|
if pattern:
|
||||||
|
arguments.extend(['-k', pattern])
|
||||||
|
if run_core:
|
||||||
|
arguments.extend(['-m', 'not download'])
|
||||||
|
elif run_download:
|
||||||
|
arguments.extend(['-m', 'download'])
|
||||||
|
else:
|
||||||
|
arguments.extend(
|
||||||
|
test if '/' in test
|
||||||
|
else f'test/test_download.py::TestDownload::test_{fix_test_name(test)}'
|
||||||
|
for test in tests)
|
||||||
|
|
||||||
|
print(f'Running {arguments}', flush=True)
|
||||||
|
try:
|
||||||
|
return subprocess.call(arguments)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
arguments = [sys.executable, '-Werror', '-m', 'unittest']
|
||||||
|
if pattern:
|
||||||
|
arguments.extend(['-k', pattern])
|
||||||
|
if run_core:
|
||||||
|
print('"pytest" needs to be installed to run core tests', file=sys.stderr, flush=True)
|
||||||
|
return 1
|
||||||
|
elif run_download:
|
||||||
|
arguments.append('test.test_download')
|
||||||
|
else:
|
||||||
|
arguments.extend(
|
||||||
|
f'test.test_download.TestDownload.test_{test}' for test in tests)
|
||||||
|
|
||||||
|
print(f'Running {arguments}', flush=True)
|
||||||
|
return subprocess.call(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
os.chdir(Path(__file__).parent.parent)
|
||||||
|
sys.exit(run_tests(*args.test, pattern=args.k, ci=bool(os.getenv('CI'))))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
@ -1,14 +0,0 @@
|
|||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
if [ -z "$1" ]; then
|
|
||||||
test_set='test'
|
|
||||||
elif [ "$1" = 'core' ]; then
|
|
||||||
test_set="-m not download"
|
|
||||||
elif [ "$1" = 'download' ]; then
|
|
||||||
test_set="-m download"
|
|
||||||
else
|
|
||||||
echo 'Invalid test type "'"$1"'". Use "core" | "download"'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
python3 -bb -Werror -m pytest "$test_set"
|
|
@ -30,7 +30,7 @@ def property_setter(name, value):
|
|||||||
opts = parse_options()
|
opts = parse_options()
|
||||||
transform = compose_functions(
|
transform = compose_functions(
|
||||||
property_setter('VARIANT', opts.variant),
|
property_setter('VARIANT', opts.variant),
|
||||||
property_setter('UPDATE_HINT', opts.update_message)
|
property_setter('UPDATE_HINT', opts.update_message),
|
||||||
)
|
)
|
||||||
|
|
||||||
write_file(VERSION_FILE, transform(read_file(VERSION_FILE)))
|
write_file(VERSION_FILE, transform(read_file(VERSION_FILE)))
|
||||||
|
189
devscripts/tomlparse.py
Executable file
189
devscripts/tomlparse.py
Executable file
@ -0,0 +1,189 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
Simple parser for spec compliant toml files
|
||||||
|
|
||||||
|
A simple toml parser for files that comply with the spec.
|
||||||
|
Should only be used to parse `pyproject.toml` for `install_deps.py`.
|
||||||
|
|
||||||
|
IMPORTANT: INVALID FILES OR MULTILINE STRINGS ARE NOT SUPPORTED!
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
WS = r'(?:[\ \t]*)'
|
||||||
|
STRING_RE = re.compile(r'"(?:\\.|[^\\"\n])*"|\'[^\'\n]*\'')
|
||||||
|
SINGLE_KEY_RE = re.compile(rf'{STRING_RE.pattern}|[A-Za-z0-9_-]+')
|
||||||
|
KEY_RE = re.compile(rf'{WS}(?:{SINGLE_KEY_RE.pattern}){WS}(?:\.{WS}(?:{SINGLE_KEY_RE.pattern}){WS})*')
|
||||||
|
EQUALS_RE = re.compile(rf'={WS}')
|
||||||
|
WS_RE = re.compile(WS)
|
||||||
|
|
||||||
|
_SUBTABLE = rf'(?P<subtable>^\[(?P<is_list>\[)?(?P<path>{KEY_RE.pattern})\]\]?)'
|
||||||
|
EXPRESSION_RE = re.compile(rf'^(?:{_SUBTABLE}|{KEY_RE.pattern}=)', re.MULTILINE)
|
||||||
|
|
||||||
|
LIST_WS_RE = re.compile(rf'{WS}((#[^\n]*)?\n{WS})*')
|
||||||
|
LEFTOVER_VALUE_RE = re.compile(r'[^,}\]\t\n#]+')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_key(value: str):
|
||||||
|
for match in SINGLE_KEY_RE.finditer(value):
|
||||||
|
if match[0][0] == '"':
|
||||||
|
yield json.loads(match[0])
|
||||||
|
elif match[0][0] == '\'':
|
||||||
|
yield match[0][1:-1]
|
||||||
|
else:
|
||||||
|
yield match[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_target(root: dict, paths: list[str], is_list=False):
|
||||||
|
target = root
|
||||||
|
|
||||||
|
for index, key in enumerate(paths, 1):
|
||||||
|
use_list = is_list and index == len(paths)
|
||||||
|
result = target.get(key)
|
||||||
|
if result is None:
|
||||||
|
result = [] if use_list else {}
|
||||||
|
target[key] = result
|
||||||
|
|
||||||
|
if isinstance(result, dict):
|
||||||
|
target = result
|
||||||
|
elif use_list:
|
||||||
|
target = {}
|
||||||
|
result.append(target)
|
||||||
|
else:
|
||||||
|
target = result[-1]
|
||||||
|
|
||||||
|
assert isinstance(target, dict)
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
def parse_enclosed(data: str, index: int, end: str, ws_re: re.Pattern):
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
while data[index] != end:
|
||||||
|
index = yield True, index
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
if data[index] == ',':
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if match := ws_re.match(data, index):
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
assert data[index] == end
|
||||||
|
yield False, index + 1
|
||||||
|
|
||||||
|
|
||||||
|
def parse_value(data: str, index: int):
|
||||||
|
if data[index] == '[':
|
||||||
|
result = []
|
||||||
|
|
||||||
|
indices = parse_enclosed(data, index, ']', LIST_WS_RE)
|
||||||
|
valid, index = next(indices)
|
||||||
|
while valid:
|
||||||
|
index, value = parse_value(data, index)
|
||||||
|
result.append(value)
|
||||||
|
valid, index = indices.send(index)
|
||||||
|
|
||||||
|
return index, result
|
||||||
|
|
||||||
|
if data[index] == '{':
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
indices = parse_enclosed(data, index, '}', WS_RE)
|
||||||
|
valid, index = next(indices)
|
||||||
|
while valid:
|
||||||
|
valid, index = indices.send(parse_kv_pair(data, index, result))
|
||||||
|
|
||||||
|
return index, result
|
||||||
|
|
||||||
|
if match := STRING_RE.match(data, index):
|
||||||
|
return match.end(), json.loads(match[0]) if match[0][0] == '"' else match[0][1:-1]
|
||||||
|
|
||||||
|
match = LEFTOVER_VALUE_RE.match(data, index)
|
||||||
|
assert match
|
||||||
|
value = match[0].strip()
|
||||||
|
for func in [
|
||||||
|
int,
|
||||||
|
float,
|
||||||
|
dt.time.fromisoformat,
|
||||||
|
dt.date.fromisoformat,
|
||||||
|
dt.datetime.fromisoformat,
|
||||||
|
{'true': True, 'false': False}.get,
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
value = func(value)
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return match.end(), value
|
||||||
|
|
||||||
|
|
||||||
|
def parse_kv_pair(data: str, index: int, target: dict):
|
||||||
|
match = KEY_RE.match(data, index)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
|
||||||
|
*keys, key = parse_key(match[0])
|
||||||
|
|
||||||
|
match = EQUALS_RE.match(data, match.end())
|
||||||
|
assert match
|
||||||
|
index = match.end()
|
||||||
|
|
||||||
|
index, value = parse_value(data, index)
|
||||||
|
get_target(target, keys)[key] = value
|
||||||
|
return index
|
||||||
|
|
||||||
|
|
||||||
|
def parse_toml(data: str):
|
||||||
|
root = {}
|
||||||
|
target = root
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
while True:
|
||||||
|
match = EXPRESSION_RE.search(data, index)
|
||||||
|
if not match:
|
||||||
|
break
|
||||||
|
|
||||||
|
if match.group('subtable'):
|
||||||
|
index = match.end()
|
||||||
|
path, is_list = match.group('path', 'is_list')
|
||||||
|
target = get_target(root, list(parse_key(path)), bool(is_list))
|
||||||
|
continue
|
||||||
|
|
||||||
|
index = parse_kv_pair(data, match.start(), target)
|
||||||
|
assert index is not None
|
||||||
|
|
||||||
|
return root
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('infile', type=Path, help='The TOML file to read as input')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
with args.infile.open('r', encoding='utf-8') as file:
|
||||||
|
data = file.read()
|
||||||
|
|
||||||
|
def default(obj):
|
||||||
|
if isinstance(obj, (dt.date, dt.time, dt.datetime)):
|
||||||
|
return obj.isoformat()
|
||||||
|
|
||||||
|
print(json.dumps(parse_toml(data), default=default))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -1,39 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
"""
|
|
||||||
Usage: python3 ./devscripts/update-formulae.py <path-to-formulae-rb> <version>
|
|
||||||
version can be either 0-aligned (yt-dlp version) or normalized (PyPi version)
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Allow direct execution
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
from devscripts.utils import read_file, write_file
|
|
||||||
|
|
||||||
filename, version = sys.argv[1:]
|
|
||||||
|
|
||||||
normalized_version = '.'.join(str(int(x)) for x in version.split('.'))
|
|
||||||
|
|
||||||
pypi_release = json.loads(urllib.request.urlopen(
|
|
||||||
'https://pypi.org/pypi/yt-dlp/%s/json' % normalized_version
|
|
||||||
).read().decode())
|
|
||||||
|
|
||||||
tarball_file = next(x for x in pypi_release['urls'] if x['filename'].endswith('.tar.gz'))
|
|
||||||
|
|
||||||
sha256sum = tarball_file['digests']['sha256']
|
|
||||||
url = tarball_file['url']
|
|
||||||
|
|
||||||
formulae_text = read_file(filename)
|
|
||||||
|
|
||||||
formulae_text = re.sub(r'sha256 "[0-9a-f]*?"', 'sha256 "%s"' % sha256sum, formulae_text, count=1)
|
|
||||||
formulae_text = re.sub(r'url "[^"]*?"', 'url "%s"' % url, formulae_text, count=1)
|
|
||||||
|
|
||||||
write_file(filename, formulae_text)
|
|
@ -9,22 +9,22 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import datetime as dt
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
from devscripts.utils import read_version, run_process, write_file
|
from devscripts.utils import read_version, run_process, write_file
|
||||||
|
|
||||||
|
|
||||||
def get_new_version(version, revision):
|
def get_new_version(version, revision):
|
||||||
if not version:
|
if not version:
|
||||||
version = datetime.now(timezone.utc).strftime('%Y.%m.%d')
|
version = dt.datetime.now(dt.timezone.utc).strftime('%Y.%m.%d')
|
||||||
|
|
||||||
if revision:
|
if revision:
|
||||||
assert revision.isdigit(), 'Revision must be a number'
|
assert revision.isdecimal(), 'Revision must be a number'
|
||||||
else:
|
else:
|
||||||
old_version = read_version().split('.')
|
old_version = read_version().split('.')
|
||||||
if version.split('.') == old_version[:3]:
|
if version.split('.') == old_version[:3]:
|
||||||
revision = str(int((old_version + [0])[3]) + 1)
|
revision = str(int(([*old_version, 0])[3]) + 1)
|
||||||
|
|
||||||
return f'{version}.{revision}' if revision else version
|
return f'{version}.{revision}' if revision else version
|
||||||
|
|
||||||
@ -46,6 +46,10 @@ VARIANT = None
|
|||||||
UPDATE_HINT = None
|
UPDATE_HINT = None
|
||||||
|
|
||||||
CHANNEL = {channel!r}
|
CHANNEL = {channel!r}
|
||||||
|
|
||||||
|
ORIGIN = {origin!r}
|
||||||
|
|
||||||
|
_pkg_version = {package_version!r}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
@ -53,6 +57,12 @@ if __name__ == '__main__':
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--channel', default='stable',
|
'-c', '--channel', default='stable',
|
||||||
help='Select update channel (default: %(default)s)')
|
help='Select update channel (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--origin', default='local',
|
||||||
|
help='Select origin/repository (default: %(default)s)')
|
||||||
|
parser.add_argument(
|
||||||
|
'-s', '--suffix', default='',
|
||||||
|
help='Add an alphanumeric suffix to the package version, e.g. "dev"')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output', default='yt_dlp/version.py',
|
'-o', '--output', default='yt_dlp/version.py',
|
||||||
help='The output file to write to (default: %(default)s)')
|
help='The output file to write to (default: %(default)s)')
|
||||||
@ -66,6 +76,7 @@ if __name__ == '__main__':
|
|||||||
args.version if args.version and '.' in args.version
|
args.version if args.version and '.' in args.version
|
||||||
else get_new_version(None, args.version))
|
else get_new_version(None, args.version))
|
||||||
write_file(args.output, VERSION_TEMPLATE.format(
|
write_file(args.output, VERSION_TEMPLATE.format(
|
||||||
version=version, git_head=git_head, channel=args.channel))
|
version=version, git_head=git_head, channel=args.channel, origin=args.origin,
|
||||||
|
package_version=f'{version}{args.suffix}'))
|
||||||
|
|
||||||
print(f'version={version} ({args.channel}), head={git_head}')
|
print(f'version={version} ({args.channel}), head={git_head}')
|
||||||
|
26
devscripts/update_changelog.py
Executable file
26
devscripts/update_changelog.py
Executable file
@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from devscripts.make_changelog import create_changelog, create_parser
|
||||||
|
from devscripts.utils import read_file, read_version, write_file
|
||||||
|
|
||||||
|
# Always run after devscripts/update-version.py, and run before `make doc|pypi-files|tar|all`
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = create_parser()
|
||||||
|
parser.description = 'Update an existing changelog file with an entry for a new release'
|
||||||
|
parser.add_argument(
|
||||||
|
'--changelog-path', type=Path, default=Path(__file__).parent.parent / 'Changelog.md',
|
||||||
|
help='path to the Changelog file')
|
||||||
|
args = parser.parse_args()
|
||||||
|
new_entry = create_changelog(args)
|
||||||
|
|
||||||
|
header, sep, changelog = read_file(args.changelog_path).partition('\n### ')
|
||||||
|
write_file(args.changelog_path, f'{header}{sep}{read_version()}\n{new_entry}\n{sep}{changelog}')
|
@ -13,10 +13,11 @@ def write_file(fname, content, mode='w'):
|
|||||||
return f.write(content)
|
return f.write(content)
|
||||||
|
|
||||||
|
|
||||||
def read_version(fname='yt_dlp/version.py'):
|
def read_version(fname='yt_dlp/version.py', varname='__version__'):
|
||||||
"""Get the version without importing the package"""
|
"""Get the version without importing the package"""
|
||||||
exec(compile(read_file(fname), fname, 'exec'))
|
items = {}
|
||||||
return locals()['__version__']
|
exec(compile(read_file(fname), fname, 'exec'), items)
|
||||||
|
return items[varname]
|
||||||
|
|
||||||
|
|
||||||
def get_filename_args(has_infile=False, default_outfile=None):
|
def get_filename_args(has_infile=False, default_outfile=None):
|
||||||
|
@ -9,15 +9,15 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
|
|
||||||
ZSH_COMPLETION_FILE = "completions/zsh/_yt-dlp"
|
ZSH_COMPLETION_FILE = 'completions/zsh/_yt-dlp'
|
||||||
ZSH_COMPLETION_TEMPLATE = "devscripts/zsh-completion.in"
|
ZSH_COMPLETION_TEMPLATE = 'devscripts/zsh-completion.in'
|
||||||
|
|
||||||
|
|
||||||
def build_completion(opt_parser):
|
def build_completion(opt_parser):
|
||||||
opts = [opt for group in opt_parser.option_groups
|
opts = [opt for group in opt_parser.option_groups
|
||||||
for opt in group.option_list]
|
for opt in group.option_list]
|
||||||
opts_file = [opt for opt in opts if opt.metavar == "FILE"]
|
opts_file = [opt for opt in opts if opt.metavar == 'FILE']
|
||||||
opts_dir = [opt for opt in opts if opt.metavar == "DIR"]
|
opts_dir = [opt for opt in opts if opt.metavar == 'DIR']
|
||||||
|
|
||||||
fileopts = []
|
fileopts = []
|
||||||
for opt in opts_file:
|
for opt in opts_file:
|
||||||
@ -38,11 +38,11 @@ def build_completion(opt_parser):
|
|||||||
with open(ZSH_COMPLETION_TEMPLATE) as f:
|
with open(ZSH_COMPLETION_TEMPLATE) as f:
|
||||||
template = f.read()
|
template = f.read()
|
||||||
|
|
||||||
template = template.replace("{{fileopts}}", "|".join(fileopts))
|
template = template.replace('{{fileopts}}', '|'.join(fileopts))
|
||||||
template = template.replace("{{diropts}}", "|".join(diropts))
|
template = template.replace('{{diropts}}', '|'.join(diropts))
|
||||||
template = template.replace("{{flags}}", " ".join(flags))
|
template = template.replace('{{flags}}', ' '.join(flags))
|
||||||
|
|
||||||
with open(ZSH_COMPLETION_FILE, "w") as f:
|
with open(ZSH_COMPLETION_FILE, 'w') as f:
|
||||||
f.write(template)
|
f.write(template)
|
||||||
|
|
||||||
|
|
||||||
|
399
pyproject.toml
399
pyproject.toml
@ -1,5 +1,396 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
build-backend = 'setuptools.build_meta'
|
requires = ["hatchling"]
|
||||||
# https://github.com/yt-dlp/yt-dlp/issues/5941
|
build-backend = "hatchling.build"
|
||||||
# https://github.com/pypa/distutils/issues/17
|
|
||||||
requires = ['setuptools > 50']
|
[project]
|
||||||
|
name = "yt-dlp"
|
||||||
|
maintainers = [
|
||||||
|
{name = "pukkandan", email = "pukkandan.ytdlp@gmail.com"},
|
||||||
|
{name = "Grub4K", email = "contact@grub4k.xyz"},
|
||||||
|
{name = "bashonly", email = "bashonly@protonmail.com"},
|
||||||
|
{name = "coletdjnz", email = "coletdjnz@protonmail.com"},
|
||||||
|
{name = "sepro", email = "sepro@sepr0.com"},
|
||||||
|
]
|
||||||
|
description = "A feature-rich command-line audio/video downloader"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.9"
|
||||||
|
keywords = [
|
||||||
|
"youtube-dl",
|
||||||
|
"video-downloader",
|
||||||
|
"youtube-downloader",
|
||||||
|
"sponsorblock",
|
||||||
|
"youtube-dlc",
|
||||||
|
"yt-dlp",
|
||||||
|
]
|
||||||
|
license = {file = "LICENSE"}
|
||||||
|
classifiers = [
|
||||||
|
"Topic :: Multimedia :: Video",
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: Implementation",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
"License :: OSI Approved :: The Unlicense (Unlicense)",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
default = [
|
||||||
|
"brotli; implementation_name=='cpython'",
|
||||||
|
"brotlicffi; implementation_name!='cpython'",
|
||||||
|
"certifi",
|
||||||
|
"mutagen",
|
||||||
|
"pycryptodomex",
|
||||||
|
"requests>=2.32.2,<3",
|
||||||
|
"urllib3>=1.26.17,<3",
|
||||||
|
"websockets>=13.0",
|
||||||
|
]
|
||||||
|
curl-cffi = [
|
||||||
|
"curl-cffi>=0.5.10,!=0.6.*,!=0.7.*,!=0.8.*,!=0.9.*,<0.11; implementation_name=='cpython'",
|
||||||
|
]
|
||||||
|
secretstorage = [
|
||||||
|
"cffi",
|
||||||
|
"secretstorage",
|
||||||
|
]
|
||||||
|
build = [
|
||||||
|
"build",
|
||||||
|
"hatchling",
|
||||||
|
"pip",
|
||||||
|
"setuptools>=71.0.2,<81", # See https://github.com/pyinstaller/pyinstaller/issues/9149
|
||||||
|
"wheel",
|
||||||
|
]
|
||||||
|
dev = [
|
||||||
|
"pre-commit",
|
||||||
|
"yt-dlp[static-analysis]",
|
||||||
|
"yt-dlp[test]",
|
||||||
|
]
|
||||||
|
static-analysis = [
|
||||||
|
"autopep8~=2.0",
|
||||||
|
"ruff~=0.11.0",
|
||||||
|
]
|
||||||
|
test = [
|
||||||
|
"pytest~=8.1",
|
||||||
|
"pytest-rerunfailures~=14.0",
|
||||||
|
]
|
||||||
|
pyinstaller = [
|
||||||
|
"pyinstaller>=6.13.0", # Windows temp cleanup fixed in 6.13.0
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Documentation = "https://github.com/yt-dlp/yt-dlp#readme"
|
||||||
|
Repository = "https://github.com/yt-dlp/yt-dlp"
|
||||||
|
Tracker = "https://github.com/yt-dlp/yt-dlp/issues"
|
||||||
|
Funding = "https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
yt-dlp = "yt_dlp:main"
|
||||||
|
|
||||||
|
[project.entry-points.pyinstaller40]
|
||||||
|
hook-dirs = "yt_dlp.__pyinstaller:get_hook_dirs"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = [
|
||||||
|
"/yt_dlp",
|
||||||
|
"/devscripts",
|
||||||
|
"/test",
|
||||||
|
"/.gitignore", # included by default, needed for auto-excludes
|
||||||
|
"/Changelog.md",
|
||||||
|
"/LICENSE", # included as license
|
||||||
|
"/pyproject.toml", # included by default
|
||||||
|
"/README.md", # included as readme
|
||||||
|
"/setup.cfg",
|
||||||
|
"/supportedsites.md",
|
||||||
|
]
|
||||||
|
artifacts = [
|
||||||
|
"/yt_dlp/extractor/lazy_extractors.py",
|
||||||
|
"/completions",
|
||||||
|
"/AUTHORS", # included by default
|
||||||
|
"/README.txt",
|
||||||
|
"/yt-dlp.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["yt_dlp"]
|
||||||
|
artifacts = ["/yt_dlp/extractor/lazy_extractors.py"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel.shared-data]
|
||||||
|
"completions/bash/yt-dlp" = "share/bash-completion/completions/yt-dlp"
|
||||||
|
"completions/zsh/_yt-dlp" = "share/zsh/site-functions/_yt-dlp"
|
||||||
|
"completions/fish/yt-dlp.fish" = "share/fish/vendor_completions.d/yt-dlp.fish"
|
||||||
|
"README.txt" = "share/doc/yt_dlp/README.txt"
|
||||||
|
"yt-dlp.1" = "share/man/man1/yt-dlp.1"
|
||||||
|
|
||||||
|
[tool.hatch.version]
|
||||||
|
path = "yt_dlp/version.py"
|
||||||
|
pattern = "_pkg_version = '(?P<version>[^']+)'"
|
||||||
|
|
||||||
|
[tool.hatch.envs.default]
|
||||||
|
features = ["curl-cffi", "default"]
|
||||||
|
dependencies = ["pre-commit"]
|
||||||
|
path = ".venv"
|
||||||
|
installer = "uv"
|
||||||
|
|
||||||
|
[tool.hatch.envs.default.scripts]
|
||||||
|
setup = "pre-commit install --config .pre-commit-hatch.yaml"
|
||||||
|
yt-dlp = "python -Werror -Xdev -m yt_dlp {args}"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-static-analysis]
|
||||||
|
detached = true
|
||||||
|
features = ["static-analysis"]
|
||||||
|
dependencies = [] # override hatch ruff version
|
||||||
|
config-path = "pyproject.toml"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-static-analysis.scripts]
|
||||||
|
format-check = "autopep8 --diff {args:.}"
|
||||||
|
format-fix = "autopep8 --in-place {args:.}"
|
||||||
|
lint-check = "ruff check {args:.}"
|
||||||
|
lint-fix = "ruff check --fix {args:.}"
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-test]
|
||||||
|
features = ["test"]
|
||||||
|
dependencies = [
|
||||||
|
"pytest-randomly~=3.15",
|
||||||
|
"pytest-xdist[psutil]~=3.5",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.envs.hatch-test.scripts]
|
||||||
|
run = "python -m devscripts.run_tests {args}"
|
||||||
|
run-cov = "echo Code coverage not implemented && exit 1"
|
||||||
|
|
||||||
|
[[tool.hatch.envs.hatch-test.matrix]]
|
||||||
|
python = [
|
||||||
|
"3.9",
|
||||||
|
"3.10",
|
||||||
|
"3.11",
|
||||||
|
"3.12",
|
||||||
|
"3.13",
|
||||||
|
"pypy3.10",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 120
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
ignore = [
|
||||||
|
"E402", # module-import-not-at-top-of-file
|
||||||
|
"E501", # line-too-long
|
||||||
|
"E731", # lambda-assignment
|
||||||
|
"E741", # ambiguous-variable-name
|
||||||
|
"UP031", # printf-string-formatting
|
||||||
|
"UP036", # outdated-version-block
|
||||||
|
"B006", # mutable-argument-default
|
||||||
|
"B008", # function-call-in-default-argument
|
||||||
|
"B011", # assert-false
|
||||||
|
"B017", # assert-raises-exception
|
||||||
|
"B023", # function-uses-loop-variable (false positives)
|
||||||
|
"B028", # no-explicit-stacklevel
|
||||||
|
"B904", # raise-without-from-inside-except
|
||||||
|
"A005", # stdlib-module-shadowing
|
||||||
|
"C401", # unnecessary-generator-set
|
||||||
|
"C402", # unnecessary-generator-dict
|
||||||
|
"PIE790", # unnecessary-placeholder
|
||||||
|
"SIM102", # collapsible-if
|
||||||
|
"SIM108", # if-else-block-instead-of-if-exp
|
||||||
|
"SIM112", # uncapitalized-environment-variables
|
||||||
|
"SIM113", # enumerate-for-loop
|
||||||
|
"SIM114", # if-with-same-arms
|
||||||
|
"SIM115", # open-file-with-context-handler
|
||||||
|
"SIM117", # multiple-with-statements
|
||||||
|
"SIM223", # expr-and-false
|
||||||
|
"SIM300", # yoda-conditions
|
||||||
|
"TD001", # invalid-todo-tag
|
||||||
|
"TD002", # missing-todo-author
|
||||||
|
"TD003", # missing-todo-link
|
||||||
|
"PLE0604", # invalid-all-object (false positives)
|
||||||
|
"PLE0643", # potential-index-error (false positives)
|
||||||
|
"PLW0603", # global-statement
|
||||||
|
"PLW1510", # subprocess-run-without-check
|
||||||
|
"PLW2901", # redefined-loop-name
|
||||||
|
"RUF001", # ambiguous-unicode-character-string
|
||||||
|
"RUF012", # mutable-class-default
|
||||||
|
"RUF100", # unused-noqa (flake8 has slightly different behavior)
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
"E", # pycodestyle Error
|
||||||
|
"W", # pycodestyle Warning
|
||||||
|
"F", # Pyflakes
|
||||||
|
"I", # isort
|
||||||
|
"Q", # flake8-quotes
|
||||||
|
"N803", # invalid-argument-name
|
||||||
|
"N804", # invalid-first-argument-name-for-class-method
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"B", # flake8-bugbear
|
||||||
|
"A", # flake8-builtins
|
||||||
|
"COM", # flake8-commas
|
||||||
|
"C4", # flake8-comprehensions
|
||||||
|
"FA", # flake8-future-annotations
|
||||||
|
"ISC", # flake8-implicit-str-concat
|
||||||
|
"ICN003", # banned-import-from
|
||||||
|
"PIE", # flake8-pie
|
||||||
|
"T20", # flake8-print
|
||||||
|
"RSE", # flake8-raise
|
||||||
|
"RET504", # unnecessary-assign
|
||||||
|
"SIM", # flake8-simplify
|
||||||
|
"TID251", # banned-api
|
||||||
|
"TD", # flake8-todos
|
||||||
|
"PLC", # Pylint Convention
|
||||||
|
"PLE", # Pylint Error
|
||||||
|
"PLW", # Pylint Warning
|
||||||
|
"RUF", # Ruff-specific rules
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
"devscripts/lazy_load_template.py" = [
|
||||||
|
"F401", # unused-import
|
||||||
|
]
|
||||||
|
"!yt_dlp/extractor/**.py" = [
|
||||||
|
"I", # isort
|
||||||
|
"ICN003", # banned-import-from
|
||||||
|
"T20", # flake8-print
|
||||||
|
"A002", # builtin-argument-shadowing
|
||||||
|
"C408", # unnecessary-collection-call
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
known-first-party = [
|
||||||
|
"bundle",
|
||||||
|
"devscripts",
|
||||||
|
"test",
|
||||||
|
]
|
||||||
|
relative-imports-order = "closest-to-furthest"
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-quotes]
|
||||||
|
docstring-quotes = "double"
|
||||||
|
multiline-quotes = "single"
|
||||||
|
inline-quotes = "single"
|
||||||
|
avoid-escape = false
|
||||||
|
|
||||||
|
[tool.ruff.lint.pep8-naming]
|
||||||
|
classmethod-decorators = [
|
||||||
|
"yt_dlp.utils.classproperty",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-import-conventions]
|
||||||
|
banned-from = [
|
||||||
|
"base64",
|
||||||
|
"datetime",
|
||||||
|
"functools",
|
||||||
|
"glob",
|
||||||
|
"hashlib",
|
||||||
|
"itertools",
|
||||||
|
"json",
|
||||||
|
"math",
|
||||||
|
"os",
|
||||||
|
"pathlib",
|
||||||
|
"random",
|
||||||
|
"re",
|
||||||
|
"string",
|
||||||
|
"sys",
|
||||||
|
"time",
|
||||||
|
"urllib.parse",
|
||||||
|
"uuid",
|
||||||
|
"xml",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||||
|
"yt_dlp.compat.compat_str".msg = "Use `str` instead."
|
||||||
|
"yt_dlp.compat.compat_b64decode".msg = "Use `base64.b64decode` instead."
|
||||||
|
"yt_dlp.compat.compat_urlparse".msg = "Use `urllib.parse` instead."
|
||||||
|
"yt_dlp.compat.compat_parse_qs".msg = "Use `urllib.parse.parse_qs` instead."
|
||||||
|
"yt_dlp.compat.compat_urllib_parse_unquote".msg = "Use `urllib.parse.unquote` instead."
|
||||||
|
"yt_dlp.compat.compat_urllib_parse_urlencode".msg = "Use `urllib.parse.urlencode` instead."
|
||||||
|
"yt_dlp.compat.compat_urllib_parse_urlparse".msg = "Use `urllib.parse.urlparse` instead."
|
||||||
|
"yt_dlp.compat.compat_shlex_quote".msg = "Use `yt_dlp.utils.shell_quote` instead."
|
||||||
|
"yt_dlp.utils.error_to_compat_str".msg = "Use `str` instead."
|
||||||
|
"yt_dlp.utils.bytes_to_intlist".msg = "Use `list` instead."
|
||||||
|
"yt_dlp.utils.intlist_to_bytes".msg = "Use `bytes` instead."
|
||||||
|
"yt_dlp.utils.decodeArgument".msg = "Do not use"
|
||||||
|
"yt_dlp.utils.decodeFilename".msg = "Do not use"
|
||||||
|
"yt_dlp.utils.encodeFilename".msg = "Do not use"
|
||||||
|
"yt_dlp.compat.compat_os_name".msg = "Use `os.name` instead."
|
||||||
|
"yt_dlp.compat.compat_realpath".msg = "Use `os.path.realpath` instead."
|
||||||
|
"yt_dlp.compat.functools".msg = "Use `functools` instead."
|
||||||
|
"yt_dlp.utils.decodeOption".msg = "Do not use"
|
||||||
|
"yt_dlp.utils.compiled_regex_type".msg = "Use `re.Pattern` instead."
|
||||||
|
|
||||||
|
[tool.autopep8]
|
||||||
|
max_line_length = 120
|
||||||
|
recursive = true
|
||||||
|
exit-code = true
|
||||||
|
jobs = 0
|
||||||
|
select = [
|
||||||
|
"E101",
|
||||||
|
"E112",
|
||||||
|
"E113",
|
||||||
|
"E115",
|
||||||
|
"E116",
|
||||||
|
"E117",
|
||||||
|
"E121",
|
||||||
|
"E122",
|
||||||
|
"E123",
|
||||||
|
"E124",
|
||||||
|
"E125",
|
||||||
|
"E126",
|
||||||
|
"E127",
|
||||||
|
"E128",
|
||||||
|
"E129",
|
||||||
|
"E131",
|
||||||
|
"E201",
|
||||||
|
"E202",
|
||||||
|
"E203",
|
||||||
|
"E211",
|
||||||
|
"E221",
|
||||||
|
"E222",
|
||||||
|
"E223",
|
||||||
|
"E224",
|
||||||
|
"E225",
|
||||||
|
"E226",
|
||||||
|
"E227",
|
||||||
|
"E228",
|
||||||
|
"E231",
|
||||||
|
"E241",
|
||||||
|
"E242",
|
||||||
|
"E251",
|
||||||
|
"E252",
|
||||||
|
"E261",
|
||||||
|
"E262",
|
||||||
|
"E265",
|
||||||
|
"E266",
|
||||||
|
"E271",
|
||||||
|
"E272",
|
||||||
|
"E273",
|
||||||
|
"E274",
|
||||||
|
"E275",
|
||||||
|
"E301",
|
||||||
|
"E302",
|
||||||
|
"E303",
|
||||||
|
"E304",
|
||||||
|
"E305",
|
||||||
|
"E306",
|
||||||
|
"E502",
|
||||||
|
"E701",
|
||||||
|
"E702",
|
||||||
|
"E704",
|
||||||
|
"W391",
|
||||||
|
"W504",
|
||||||
|
]
|
||||||
|
exclude = "*/extractor/lazy_extractors.py,*venv*,*/test/testdata/sigs/player-*.js,.idea,.vscode"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = [
|
||||||
|
"-ra", # summary: all except passed
|
||||||
|
"--verbose",
|
||||||
|
"--strict-markers",
|
||||||
|
]
|
||||||
|
markers = [
|
||||||
|
"download",
|
||||||
|
]
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
mutagen
|
|
||||||
pycryptodomex
|
|
||||||
websockets
|
|
||||||
brotli; platform_python_implementation=='CPython'
|
|
||||||
brotlicffi; platform_python_implementation!='CPython'
|
|
||||||
certifi
|
|
14
setup.cfg
14
setup.cfg
@ -1,7 +1,3 @@
|
|||||||
[wheel]
|
|
||||||
universal = true
|
|
||||||
|
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude = build,venv,.tox,.git,.pytest_cache
|
exclude = build,venv,.tox,.git,.pytest_cache
|
||||||
ignore = E402,E501,E731,E741,W503
|
ignore = E402,E501,E731,E741,W503
|
||||||
@ -18,15 +14,9 @@ remove-duplicate-keys = true
|
|||||||
remove-unused-variables = true
|
remove-unused-variables = true
|
||||||
|
|
||||||
|
|
||||||
[tool:pytest]
|
|
||||||
addopts = -ra -v --strict-markers
|
|
||||||
markers =
|
|
||||||
download
|
|
||||||
|
|
||||||
|
|
||||||
[tox:tox]
|
[tox:tox]
|
||||||
skipsdist = true
|
skipsdist = true
|
||||||
envlist = py{36,37,38,39,310,311},pypy{36,37,38,39}
|
envlist = py{39,310,311,312,313},pypy310
|
||||||
skip_missing_interpreters = true
|
skip_missing_interpreters = true
|
||||||
|
|
||||||
[testenv] # tox
|
[testenv] # tox
|
||||||
@ -39,7 +29,7 @@ setenv =
|
|||||||
|
|
||||||
|
|
||||||
[isort]
|
[isort]
|
||||||
py_version = 37
|
py_version = 39
|
||||||
multi_line_output = VERTICAL_HANGING_INDENT
|
multi_line_output = VERTICAL_HANGING_INDENT
|
||||||
line_length = 80
|
line_length = 80
|
||||||
reverse_relative = true
|
reverse_relative = true
|
||||||
|
176
setup.py
176
setup.py
@ -1,176 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Allow execution from anywhere
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
try:
|
|
||||||
from setuptools import Command, find_packages, setup
|
|
||||||
setuptools_available = True
|
|
||||||
except ImportError:
|
|
||||||
from distutils.core import Command, setup
|
|
||||||
setuptools_available = False
|
|
||||||
|
|
||||||
from devscripts.utils import read_file, read_version
|
|
||||||
|
|
||||||
VERSION = read_version()
|
|
||||||
|
|
||||||
DESCRIPTION = 'A youtube-dl fork with additional features and patches'
|
|
||||||
|
|
||||||
LONG_DESCRIPTION = '\n\n'.join((
|
|
||||||
'Official repository: <https://github.com/yt-dlp/yt-dlp>',
|
|
||||||
'**PS**: Some links in this document will not work since this is a copy of the README.md from Github',
|
|
||||||
read_file('README.md')))
|
|
||||||
|
|
||||||
REQUIREMENTS = read_file('requirements.txt').splitlines()
|
|
||||||
|
|
||||||
|
|
||||||
def packages():
|
|
||||||
if setuptools_available:
|
|
||||||
return find_packages(exclude=('youtube_dl', 'youtube_dlc', 'test', 'ytdlp_plugins', 'devscripts'))
|
|
||||||
|
|
||||||
return [
|
|
||||||
'yt_dlp', 'yt_dlp.extractor', 'yt_dlp.downloader', 'yt_dlp.postprocessor', 'yt_dlp.compat',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def py2exe_params():
|
|
||||||
warnings.warn(
|
|
||||||
'py2exe builds do not support pycryptodomex and needs VC++14 to run. '
|
|
||||||
'It is recommended to run "pyinst.py" to build using pyinstaller instead')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'console': [{
|
|
||||||
'script': './yt_dlp/__main__.py',
|
|
||||||
'dest_base': 'yt-dlp',
|
|
||||||
'icon_resources': [(1, 'devscripts/logo.ico')],
|
|
||||||
}],
|
|
||||||
'version_info': {
|
|
||||||
'version': VERSION,
|
|
||||||
'description': DESCRIPTION,
|
|
||||||
'comments': LONG_DESCRIPTION.split('\n')[0],
|
|
||||||
'product_name': 'yt-dlp',
|
|
||||||
'product_version': VERSION,
|
|
||||||
},
|
|
||||||
'options': {
|
|
||||||
'bundle_files': 0,
|
|
||||||
'compressed': 1,
|
|
||||||
'optimize': 2,
|
|
||||||
'dist_dir': './dist',
|
|
||||||
'excludes': ['Crypto', 'Cryptodome'], # py2exe cannot import Crypto
|
|
||||||
'dll_excludes': ['w9xpopen.exe', 'crypt32.dll'],
|
|
||||||
# Modules that are only imported dynamically must be added here
|
|
||||||
'includes': ['yt_dlp.compat._legacy', 'yt_dlp.compat._deprecated',
|
|
||||||
'yt_dlp.utils._legacy', 'yt_dlp.utils._deprecated'],
|
|
||||||
},
|
|
||||||
'zipfile': None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def build_params():
|
|
||||||
files_spec = [
|
|
||||||
('share/bash-completion/completions', ['completions/bash/yt-dlp']),
|
|
||||||
('share/zsh/site-functions', ['completions/zsh/_yt-dlp']),
|
|
||||||
('share/fish/vendor_completions.d', ['completions/fish/yt-dlp.fish']),
|
|
||||||
('share/doc/yt_dlp', ['README.txt']),
|
|
||||||
('share/man/man1', ['yt-dlp.1'])
|
|
||||||
]
|
|
||||||
data_files = []
|
|
||||||
for dirname, files in files_spec:
|
|
||||||
resfiles = []
|
|
||||||
for fn in files:
|
|
||||||
if not os.path.exists(fn):
|
|
||||||
warnings.warn(f'Skipping file {fn} since it is not present. Try running " make pypi-files " first')
|
|
||||||
else:
|
|
||||||
resfiles.append(fn)
|
|
||||||
data_files.append((dirname, resfiles))
|
|
||||||
|
|
||||||
params = {'data_files': data_files}
|
|
||||||
|
|
||||||
if setuptools_available:
|
|
||||||
params['entry_points'] = {
|
|
||||||
'console_scripts': ['yt-dlp = yt_dlp:main'],
|
|
||||||
'pyinstaller40': ['hook-dirs = yt_dlp.__pyinstaller:get_hook_dirs'],
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
params['scripts'] = ['yt-dlp']
|
|
||||||
return params
|
|
||||||
|
|
||||||
|
|
||||||
class build_lazy_extractors(Command):
|
|
||||||
description = 'Build the extractor lazy loading module'
|
|
||||||
user_options = []
|
|
||||||
|
|
||||||
def initialize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def finalize_options(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
if self.dry_run:
|
|
||||||
print('Skipping build of lazy extractors in dry run mode')
|
|
||||||
return
|
|
||||||
subprocess.run([sys.executable, 'devscripts/make_lazy_extractors.py'])
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if sys.argv[1:2] == ['py2exe']:
|
|
||||||
params = py2exe_params()
|
|
||||||
try:
|
|
||||||
from py2exe import freeze
|
|
||||||
except ImportError:
|
|
||||||
import py2exe # noqa: F401
|
|
||||||
warnings.warn('You are using an outdated version of py2exe. Support for this version will be removed in the future')
|
|
||||||
params['console'][0].update(params.pop('version_info'))
|
|
||||||
params['options'] = {'py2exe': params.pop('options')}
|
|
||||||
else:
|
|
||||||
return freeze(**params)
|
|
||||||
else:
|
|
||||||
params = build_params()
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name='yt-dlp',
|
|
||||||
version=VERSION,
|
|
||||||
maintainer='pukkandan',
|
|
||||||
maintainer_email='pukkandan.ytdlp@gmail.com',
|
|
||||||
description=DESCRIPTION,
|
|
||||||
long_description=LONG_DESCRIPTION,
|
|
||||||
long_description_content_type='text/markdown',
|
|
||||||
url='https://github.com/yt-dlp/yt-dlp',
|
|
||||||
packages=packages(),
|
|
||||||
install_requires=REQUIREMENTS,
|
|
||||||
python_requires='>=3.7',
|
|
||||||
project_urls={
|
|
||||||
'Documentation': 'https://github.com/yt-dlp/yt-dlp#readme',
|
|
||||||
'Source': 'https://github.com/yt-dlp/yt-dlp',
|
|
||||||
'Tracker': 'https://github.com/yt-dlp/yt-dlp/issues',
|
|
||||||
'Funding': 'https://github.com/yt-dlp/yt-dlp/blob/master/Collaborators.md#collaborators',
|
|
||||||
},
|
|
||||||
classifiers=[
|
|
||||||
'Topic :: Multimedia :: Video',
|
|
||||||
'Development Status :: 5 - Production/Stable',
|
|
||||||
'Environment :: Console',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 3.7',
|
|
||||||
'Programming Language :: Python :: 3.8',
|
|
||||||
'Programming Language :: Python :: 3.9',
|
|
||||||
'Programming Language :: Python :: 3.10',
|
|
||||||
'Programming Language :: Python :: 3.11',
|
|
||||||
'Programming Language :: Python :: Implementation',
|
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
|
||||||
'Programming Language :: Python :: Implementation :: PyPy',
|
|
||||||
'License :: Public Domain',
|
|
||||||
'Operating System :: OS Independent',
|
|
||||||
],
|
|
||||||
cmdclass={'build_lazy_extractors': build_lazy_extractors},
|
|
||||||
**params
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
main()
|
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,3 @@
|
|||||||
import functools
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@ -10,7 +9,9 @@ from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def handler(request):
|
def handler(request):
|
||||||
RH_KEY = request.param
|
RH_KEY = getattr(request, 'param', None)
|
||||||
|
if not RH_KEY:
|
||||||
|
return
|
||||||
if inspect.isclass(RH_KEY) and issubclass(RH_KEY, RequestHandler):
|
if inspect.isclass(RH_KEY) and issubclass(RH_KEY, RequestHandler):
|
||||||
handler = RH_KEY
|
handler = RH_KEY
|
||||||
elif RH_KEY in _REQUEST_HANDLERS:
|
elif RH_KEY in _REQUEST_HANDLERS:
|
||||||
@ -18,4 +19,46 @@ def handler(request):
|
|||||||
else:
|
else:
|
||||||
pytest.skip(f'{RH_KEY} request handler is not available')
|
pytest.skip(f'{RH_KEY} request handler is not available')
|
||||||
|
|
||||||
return functools.partial(handler, logger=FakeLogger)
|
class HandlerWrapper(handler):
|
||||||
|
RH_KEY = handler.RH_KEY
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(logger=FakeLogger, **kwargs)
|
||||||
|
|
||||||
|
return HandlerWrapper
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handler(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handler('my_handler', 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handler'):
|
||||||
|
if marker.args[0] == handler.RH_KEY:
|
||||||
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handler_if(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handler_if('my_handler', lambda request: True, 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handler_if'):
|
||||||
|
if marker.args[0] == handler.RH_KEY and marker.args[1](request):
|
||||||
|
pytest.skip(marker.args[2] if len(marker.args) > 2 else '')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def skip_handlers_if(request, handler):
|
||||||
|
"""usage: pytest.mark.skip_handlers_if(lambda request, handler: True, 'reason')"""
|
||||||
|
for marker in request.node.iter_markers('skip_handlers_if'):
|
||||||
|
if handler and marker.args[0](request, handler):
|
||||||
|
pytest.skip(marker.args[1] if len(marker.args) > 1 else '')
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config):
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'skip_handler(handler): skip test for the given handler',
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'skip_handler_if(handler): skip test for the given handler if condition is true',
|
||||||
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
'markers', 'skip_handlers_if(handler): skip test for handlers when the condition is true',
|
||||||
|
)
|
||||||
|
246
test/helper.py
246
test/helper.py
@ -9,15 +9,14 @@ import types
|
|||||||
|
|
||||||
import yt_dlp.extractor
|
import yt_dlp.extractor
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import compat_os_name
|
from yt_dlp.utils import preferredencoding, try_call, write_string, find_available_port
|
||||||
from yt_dlp.utils import preferredencoding, write_string
|
|
||||||
|
|
||||||
if 'pytest' in sys.modules:
|
if 'pytest' in sys.modules:
|
||||||
import pytest
|
import pytest
|
||||||
is_download_test = pytest.mark.download
|
is_download_test = pytest.mark.download
|
||||||
else:
|
else:
|
||||||
def is_download_test(testClass):
|
def is_download_test(test_class):
|
||||||
return testClass
|
return test_class
|
||||||
|
|
||||||
|
|
||||||
def get_params(override=None):
|
def get_params(override=None):
|
||||||
@ -45,11 +44,11 @@ def try_rm(filename):
|
|||||||
|
|
||||||
|
|
||||||
def report_warning(message, *args, **kwargs):
|
def report_warning(message, *args, **kwargs):
|
||||||
'''
|
"""
|
||||||
Print the message to stderr, it will be prefixed with 'WARNING:'
|
Print the message to stderr, it will be prefixed with 'WARNING:'
|
||||||
If stderr is a tty file the 'WARNING:' will be colored
|
If stderr is a tty file the 'WARNING:' will be colored
|
||||||
'''
|
"""
|
||||||
if sys.stderr.isatty() and compat_os_name != 'nt':
|
if sys.stderr.isatty() and os.name != 'nt':
|
||||||
_msg_header = '\033[0;33mWARNING:\033[0m'
|
_msg_header = '\033[0;33mWARNING:\033[0m'
|
||||||
else:
|
else:
|
||||||
_msg_header = 'WARNING:'
|
_msg_header = 'WARNING:'
|
||||||
@ -102,88 +101,109 @@ def getwebpagetestcases():
|
|||||||
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
md5 = lambda s: hashlib.md5(s.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def expect_value(self, got, expected, field):
|
def _iter_differences(got, expected, field):
|
||||||
if isinstance(expected, str) and expected.startswith('re:'):
|
if isinstance(expected, str):
|
||||||
match_str = expected[len('re:'):]
|
op, _, val = expected.partition(':')
|
||||||
match_rex = re.compile(match_str)
|
if op in ('mincount', 'maxcount', 'count'):
|
||||||
|
if not isinstance(got, (list, dict)):
|
||||||
|
yield field, f'expected either {list.__name__} or {dict.__name__}, got {type(got).__name__}'
|
||||||
|
return
|
||||||
|
|
||||||
self.assertTrue(
|
expected_num = int(val)
|
||||||
isinstance(got, str),
|
got_num = len(got)
|
||||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
|
||||||
self.assertTrue(
|
|
||||||
match_rex.match(got),
|
|
||||||
f'field {field} (value: {got!r}) should match {match_str!r}')
|
|
||||||
elif isinstance(expected, str) and expected.startswith('startswith:'):
|
|
||||||
start_str = expected[len('startswith:'):]
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
|
||||||
self.assertTrue(
|
|
||||||
got.startswith(start_str),
|
|
||||||
f'field {field} (value: {got!r}) should start with {start_str!r}')
|
|
||||||
elif isinstance(expected, str) and expected.startswith('contains:'):
|
|
||||||
contains_str = expected[len('contains:'):]
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected a {str.__name__} object, but got {type(got).__name__} for field {field}')
|
|
||||||
self.assertTrue(
|
|
||||||
contains_str in got,
|
|
||||||
f'field {field} (value: {got!r}) should contain {contains_str!r}')
|
|
||||||
elif isinstance(expected, type):
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, expected),
|
|
||||||
f'Expected type {expected!r} for field {field}, but got value {got!r} of type {type(got)!r}')
|
|
||||||
elif isinstance(expected, dict) and isinstance(got, dict):
|
|
||||||
expect_dict(self, got, expected)
|
|
||||||
elif isinstance(expected, list) and isinstance(got, list):
|
|
||||||
self.assertEqual(
|
|
||||||
len(expected), len(got),
|
|
||||||
'Expect a list of length %d, but got a list of length %d for field %s' % (
|
|
||||||
len(expected), len(got), field))
|
|
||||||
for index, (item_got, item_expected) in enumerate(zip(got, expected)):
|
|
||||||
type_got = type(item_got)
|
|
||||||
type_expected = type(item_expected)
|
|
||||||
self.assertEqual(
|
|
||||||
type_expected, type_got,
|
|
||||||
'Type mismatch for list item at index %d for field %s, expected %r, got %r' % (
|
|
||||||
index, field, type_expected, type_got))
|
|
||||||
expect_value(self, item_got, item_expected, field)
|
|
||||||
else:
|
|
||||||
if isinstance(expected, str) and expected.startswith('md5:'):
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, str),
|
|
||||||
f'Expected field {field} to be a unicode object, but got value {got!r} of type {type(got)!r}')
|
|
||||||
got = 'md5:' + md5(got)
|
|
||||||
elif isinstance(expected, str) and re.match(r'^(?:min|max)?count:\d+', expected):
|
|
||||||
self.assertTrue(
|
|
||||||
isinstance(got, (list, dict)),
|
|
||||||
f'Expected field {field} to be a list or a dict, but it is of type {type(got).__name__}')
|
|
||||||
op, _, expected_num = expected.partition(':')
|
|
||||||
expected_num = int(expected_num)
|
|
||||||
if op == 'mincount':
|
if op == 'mincount':
|
||||||
assert_func = assertGreaterEqual
|
if got_num < expected_num:
|
||||||
msg_tmpl = 'Expected %d items in field %s, but only got %d'
|
yield field, f'expected at least {val} items, got {got_num}'
|
||||||
elif op == 'maxcount':
|
return
|
||||||
assert_func = assertLessEqual
|
|
||||||
msg_tmpl = 'Expected maximum %d items in field %s, but got %d'
|
if op == 'maxcount':
|
||||||
elif op == 'count':
|
if got_num > expected_num:
|
||||||
assert_func = assertEqual
|
yield field, f'expected at most {val} items, got {got_num}'
|
||||||
msg_tmpl = 'Expected exactly %d items in field %s, but got %d'
|
return
|
||||||
else:
|
|
||||||
assert False
|
assert op == 'count'
|
||||||
assert_func(
|
if got_num != expected_num:
|
||||||
self, len(got), expected_num,
|
yield field, f'expected exactly {val} items, got {got_num}'
|
||||||
msg_tmpl % (expected_num, field, len(got)))
|
|
||||||
return
|
return
|
||||||
self.assertEqual(
|
|
||||||
expected, got,
|
if not isinstance(got, str):
|
||||||
f'Invalid value for field {field}, expected {expected!r}, got {got!r}')
|
yield field, f'expected {str.__name__}, got {type(got).__name__}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 're':
|
||||||
|
if not re.match(val, got):
|
||||||
|
yield field, f'should match {val!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'startswith':
|
||||||
|
if not got.startswith(val):
|
||||||
|
yield field, f'should start with {val!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'contains':
|
||||||
|
if not val.startswith(got):
|
||||||
|
yield field, f'should contain {val!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if op == 'md5':
|
||||||
|
hash_val = md5(got)
|
||||||
|
if hash_val != val:
|
||||||
|
yield field, f'expected hash {val}, got {hash_val}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if got != expected:
|
||||||
|
yield field, f'expected {expected!r}, got {got!r}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(expected, dict) and isinstance(got, dict):
|
||||||
|
for key, expected_val in expected.items():
|
||||||
|
if key not in got:
|
||||||
|
yield field, f'missing key: {key!r}'
|
||||||
|
continue
|
||||||
|
|
||||||
|
field_name = key if field is None else f'{field}.{key}'
|
||||||
|
yield from _iter_differences(got[key], expected_val, field_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(expected, type):
|
||||||
|
if not isinstance(got, expected):
|
||||||
|
yield field, f'expected {expected.__name__}, got {type(got).__name__}'
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(expected, list) and isinstance(got, list):
|
||||||
|
# TODO: clever diffing algorithm lmao
|
||||||
|
if len(expected) != len(got):
|
||||||
|
yield field, f'expected length of {len(expected)}, got {len(got)}'
|
||||||
|
return
|
||||||
|
|
||||||
|
for index, (got_val, expected_val) in enumerate(zip(got, expected)):
|
||||||
|
field_name = str(index) if field is None else f'{field}.{index}'
|
||||||
|
yield from _iter_differences(got_val, expected_val, field_name)
|
||||||
|
return
|
||||||
|
|
||||||
|
if got != expected:
|
||||||
|
yield field, f'expected {expected!r}, got {got!r}'
|
||||||
|
|
||||||
|
|
||||||
|
def _expect_value(message, got, expected, field):
|
||||||
|
mismatches = list(_iter_differences(got, expected, field))
|
||||||
|
if not mismatches:
|
||||||
|
return
|
||||||
|
|
||||||
|
fields = [field for field, _ in mismatches if field is not None]
|
||||||
|
return ''.join((
|
||||||
|
message, f' ({", ".join(fields)})' if fields else '',
|
||||||
|
*(f'\n\t{field}: {message}' for field, message in mismatches)))
|
||||||
|
|
||||||
|
|
||||||
|
def expect_value(self, got, expected, field):
|
||||||
|
if message := _expect_value('values differ', got, expected, field):
|
||||||
|
self.fail(message)
|
||||||
|
|
||||||
|
|
||||||
def expect_dict(self, got_dict, expected_dict):
|
def expect_dict(self, got_dict, expected_dict):
|
||||||
for info_field, expected in expected_dict.items():
|
if message := _expect_value('dictionaries differ', got_dict, expected_dict, None):
|
||||||
got = got_dict.get(info_field)
|
self.fail(message)
|
||||||
expect_value(self, got, expected, info_field)
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_got_info_dict(got_dict):
|
def sanitize_got_info_dict(got_dict):
|
||||||
@ -214,14 +234,23 @@ def sanitize_got_info_dict(got_dict):
|
|||||||
|
|
||||||
test_info_dict = {
|
test_info_dict = {
|
||||||
key: sanitize(key, value) for key, value in got_dict.items()
|
key: sanitize(key, value) for key, value in got_dict.items()
|
||||||
if value is not None and key not in IGNORED_FIELDS and not any(
|
if value is not None and key not in IGNORED_FIELDS and (
|
||||||
key.startswith(f'{prefix}_') for prefix in IGNORED_PREFIXES)
|
not any(key.startswith(f'{prefix}_') for prefix in IGNORED_PREFIXES)
|
||||||
|
or key == '_old_archive_ids')
|
||||||
}
|
}
|
||||||
|
|
||||||
# display_id may be generated from id
|
# display_id may be generated from id
|
||||||
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
if test_info_dict.get('display_id') == test_info_dict.get('id'):
|
||||||
test_info_dict.pop('display_id')
|
test_info_dict.pop('display_id')
|
||||||
|
|
||||||
|
# Remove deprecated fields
|
||||||
|
for old in YoutubeDL._deprecated_multivalue_fields:
|
||||||
|
test_info_dict.pop(old, None)
|
||||||
|
|
||||||
|
# release_year may be generated from release_date
|
||||||
|
if try_call(lambda: test_info_dict['release_year'] == int(test_info_dict['release_date'][:4])):
|
||||||
|
test_info_dict.pop('release_year')
|
||||||
|
|
||||||
# Check url for flat entries
|
# Check url for flat entries
|
||||||
if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
|
if got_dict.get('_type', 'video') != 'video' and got_dict.get('url'):
|
||||||
test_info_dict['url'] = got_dict['url']
|
test_info_dict['url'] = got_dict['url']
|
||||||
@ -230,6 +259,20 @@ def sanitize_got_info_dict(got_dict):
|
|||||||
|
|
||||||
|
|
||||||
def expect_info_dict(self, got_dict, expected_dict):
|
def expect_info_dict(self, got_dict, expected_dict):
|
||||||
|
ALLOWED_KEYS_SORT_ORDER = (
|
||||||
|
# NB: Keep in sync with the docstring of extractor/common.py
|
||||||
|
'id', 'ext', 'direct', 'display_id', 'title', 'alt_title', 'description', 'media_type',
|
||||||
|
'uploader', 'uploader_id', 'uploader_url', 'channel', 'channel_id', 'channel_url', 'channel_is_verified',
|
||||||
|
'channel_follower_count', 'comment_count', 'view_count', 'concurrent_view_count',
|
||||||
|
'like_count', 'dislike_count', 'repost_count', 'average_rating', 'age_limit', 'duration', 'thumbnail', 'heatmap',
|
||||||
|
'chapters', 'chapter', 'chapter_number', 'chapter_id', 'start_time', 'end_time', 'section_start', 'section_end',
|
||||||
|
'categories', 'tags', 'cast', 'composers', 'artists', 'album_artists', 'creators', 'genres',
|
||||||
|
'track', 'track_number', 'track_id', 'album', 'album_type', 'disc_number',
|
||||||
|
'series', 'series_id', 'season', 'season_number', 'season_id', 'episode', 'episode_number', 'episode_id',
|
||||||
|
'timestamp', 'upload_date', 'release_timestamp', 'release_date', 'release_year', 'modified_timestamp', 'modified_date',
|
||||||
|
'playable_in_embed', 'availability', 'live_status', 'location', 'license', '_old_archive_ids',
|
||||||
|
)
|
||||||
|
|
||||||
expect_dict(self, got_dict, expected_dict)
|
expect_dict(self, got_dict, expected_dict)
|
||||||
# Check for the presence of mandatory fields
|
# Check for the presence of mandatory fields
|
||||||
if got_dict.get('_type') not in ('playlist', 'multi_video'):
|
if got_dict.get('_type') not in ('playlist', 'multi_video'):
|
||||||
@ -237,19 +280,25 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
if expected_dict.get('ext'):
|
if expected_dict.get('ext'):
|
||||||
mandatory_fields.extend(('url', 'ext'))
|
mandatory_fields.extend(('url', 'ext'))
|
||||||
for key in mandatory_fields:
|
for key in mandatory_fields:
|
||||||
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
|
self.assertTrue(got_dict.get(key), f'Missing mandatory field {key}')
|
||||||
# Check for mandatory fields that are automatically set by YoutubeDL
|
# Check for mandatory fields that are automatically set by YoutubeDL
|
||||||
if got_dict.get('_type', 'video') == 'video':
|
if got_dict.get('_type', 'video') == 'video':
|
||||||
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
for key in ['webpage_url', 'extractor', 'extractor_key']:
|
||||||
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
|
self.assertTrue(got_dict.get(key), f'Missing field: {key}')
|
||||||
|
|
||||||
test_info_dict = sanitize_got_info_dict(got_dict)
|
test_info_dict = sanitize_got_info_dict(got_dict)
|
||||||
|
|
||||||
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
|
# Check for invalid/misspelled field names being returned by the extractor
|
||||||
|
invalid_keys = sorted(test_info_dict.keys() - ALLOWED_KEYS_SORT_ORDER)
|
||||||
|
self.assertFalse(invalid_keys, f'Invalid fields returned by the extractor: {", ".join(invalid_keys)}')
|
||||||
|
|
||||||
|
missing_keys = sorted(
|
||||||
|
test_info_dict.keys() - expected_dict.keys(),
|
||||||
|
key=lambda x: ALLOWED_KEYS_SORT_ORDER.index(x))
|
||||||
if missing_keys:
|
if missing_keys:
|
||||||
def _repr(v):
|
def _repr(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
|
return "'{}'".format(v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n'))
|
||||||
elif isinstance(v, type):
|
elif isinstance(v, type):
|
||||||
return v.__name__
|
return v.__name__
|
||||||
else:
|
else:
|
||||||
@ -266,8 +315,7 @@ def expect_info_dict(self, got_dict, expected_dict):
|
|||||||
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
write_string(info_dict_str.replace('\n', '\n '), out=sys.stderr)
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
missing_keys,
|
missing_keys,
|
||||||
'Missing keys in test definition: %s' % (
|
'Missing keys in test definition: {}'.format(', '.join(sorted(missing_keys))))
|
||||||
', '.join(sorted(missing_keys))))
|
|
||||||
|
|
||||||
|
|
||||||
def assertRegexpMatches(self, text, regexp, msg=None):
|
def assertRegexpMatches(self, text, regexp, msg=None):
|
||||||
@ -276,9 +324,9 @@ def assertRegexpMatches(self, text, regexp, msg=None):
|
|||||||
else:
|
else:
|
||||||
m = re.match(regexp, text)
|
m = re.match(regexp, text)
|
||||||
if not m:
|
if not m:
|
||||||
note = 'Regexp didn\'t match: %r not found' % (regexp)
|
note = f'Regexp didn\'t match: {regexp!r} not found'
|
||||||
if len(text) < 1000:
|
if len(text) < 1000:
|
||||||
note += ' in %r' % text
|
note += f' in {text!r}'
|
||||||
if msg is None:
|
if msg is None:
|
||||||
msg = note
|
msg = note
|
||||||
else:
|
else:
|
||||||
@ -301,7 +349,7 @@ def assertLessEqual(self, got, expected, msg=None):
|
|||||||
|
|
||||||
|
|
||||||
def assertEqual(self, got, expected, msg=None):
|
def assertEqual(self, got, expected, msg=None):
|
||||||
if not (got == expected):
|
if got != expected:
|
||||||
if msg is None:
|
if msg is None:
|
||||||
msg = f'{got!r} not equal to {expected!r}'
|
msg = f'{got!r} not equal to {expected!r}'
|
||||||
self.assertTrue(got == expected, msg)
|
self.assertTrue(got == expected, msg)
|
||||||
@ -324,3 +372,13 @@ def http_server_port(httpd):
|
|||||||
else:
|
else:
|
||||||
sock = httpd.socket
|
sock = httpd.socket
|
||||||
return sock.getsockname()[1]
|
return sock.getsockname()[1]
|
||||||
|
|
||||||
|
|
||||||
|
def verify_address_availability(address):
|
||||||
|
if find_available_port(address) is None:
|
||||||
|
pytest.skip(f'Unable to bind to source address {address} (address may not exist)')
|
||||||
|
|
||||||
|
|
||||||
|
def validate_and_send(rh, req):
|
||||||
|
rh.validate(req)
|
||||||
|
return rh.send(req)
|
||||||
|
@ -53,6 +53,18 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
def test_ie_key(self):
|
def test_ie_key(self):
|
||||||
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
|
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)
|
||||||
|
|
||||||
|
def test_get_netrc_login_info(self):
|
||||||
|
for params in [
|
||||||
|
{'usenetrc': True, 'netrc_location': './test/testdata/netrc/netrc'},
|
||||||
|
{'netrc_cmd': f'{sys.executable} ./test/testdata/netrc/print_netrc.py'},
|
||||||
|
]:
|
||||||
|
ie = DummyIE(FakeYDL(params))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='normal_use'), ('user', 'pass'))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_user'), ('', 'pass'))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='empty_pass'), ('user', ''))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='both_empty'), ('', ''))
|
||||||
|
self.assertEqual(ie._get_netrc_login_info(netrc_machine='nonexistent'), (None, None))
|
||||||
|
|
||||||
def test_html_search_regex(self):
|
def test_html_search_regex(self):
|
||||||
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
|
html = '<p id="foo">Watch this <a href="http://www.youtube.com/watch?v=BaW_jenozKc">video</a></p>'
|
||||||
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
|
search = lambda re, *args: self.ie._html_search_regex(re, html, *args)
|
||||||
@ -262,19 +274,19 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
''',
|
''',
|
||||||
{
|
{
|
||||||
'chapters': [
|
'chapters': [
|
||||||
{"title": "Explosie Turnhout", "start_time": 70, "end_time": 440},
|
{'title': 'Explosie Turnhout', 'start_time': 70, 'end_time': 440},
|
||||||
{"title": "Jaarwisseling", "start_time": 440, "end_time": 1179},
|
{'title': 'Jaarwisseling', 'start_time': 440, 'end_time': 1179},
|
||||||
{"title": "Natuurbranden Colorado", "start_time": 1179, "end_time": 1263},
|
{'title': 'Natuurbranden Colorado', 'start_time': 1179, 'end_time': 1263},
|
||||||
{"title": "Klimaatverandering", "start_time": 1263, "end_time": 1367},
|
{'title': 'Klimaatverandering', 'start_time': 1263, 'end_time': 1367},
|
||||||
{"title": "Zacht weer", "start_time": 1367, "end_time": 1383},
|
{'title': 'Zacht weer', 'start_time': 1367, 'end_time': 1383},
|
||||||
{"title": "Financiële balans", "start_time": 1383, "end_time": 1484},
|
{'title': 'Financiële balans', 'start_time': 1383, 'end_time': 1484},
|
||||||
{"title": "Club Brugge", "start_time": 1484, "end_time": 1575},
|
{'title': 'Club Brugge', 'start_time': 1484, 'end_time': 1575},
|
||||||
{"title": "Mentale gezondheid bij topsporters", "start_time": 1575, "end_time": 1728},
|
{'title': 'Mentale gezondheid bij topsporters', 'start_time': 1575, 'end_time': 1728},
|
||||||
{"title": "Olympische Winterspelen", "start_time": 1728, "end_time": 1873},
|
{'title': 'Olympische Winterspelen', 'start_time': 1728, 'end_time': 1873},
|
||||||
{"title": "Sober oudjaar in Nederland", "start_time": 1873, "end_time": 2079.23}
|
{'title': 'Sober oudjaar in Nederland', 'start_time': 1873, 'end_time': 2079.23},
|
||||||
],
|
],
|
||||||
'title': 'Het journaal - Aflevering 365 (Seizoen 2021)'
|
'title': 'Het journaal - Aflevering 365 (Seizoen 2021)',
|
||||||
}, {}
|
}, {},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
# test multiple thumbnails in a list
|
# test multiple thumbnails in a list
|
||||||
@ -301,13 +313,27 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'thumbnails': [{'url': 'https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg'}],
|
'thumbnails': [{'url': 'https://www.rainews.it/cropgd/640x360/dl/img/2021/12/30/1640886376927_GettyImages.jpg'}],
|
||||||
},
|
},
|
||||||
{},
|
{},
|
||||||
)
|
),
|
||||||
|
(
|
||||||
|
# test thumbnail_url key without URL scheme
|
||||||
|
r'''
|
||||||
|
<script type="application/ld+json">
|
||||||
|
{
|
||||||
|
"@context": "https://schema.org",
|
||||||
|
"@type": "VideoObject",
|
||||||
|
"thumbnail_url": "//www.nobelprize.org/images/12693-landscape-medium-gallery.jpg"
|
||||||
|
}</script>''',
|
||||||
|
{
|
||||||
|
'thumbnails': [{'url': 'https://www.nobelprize.org/images/12693-landscape-medium-gallery.jpg'}],
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
for html, expected_dict, search_json_ld_kwargs in _TESTS:
|
||||||
expect_dict(
|
expect_dict(
|
||||||
self,
|
self,
|
||||||
self.ie._search_json_ld(html, None, **search_json_ld_kwargs),
|
self.ie._search_json_ld(html, None, **search_json_ld_kwargs),
|
||||||
expected_dict
|
expected_dict,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_download_json(self):
|
def test_download_json(self):
|
||||||
@ -366,7 +392,7 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'height': 740,
|
'height': 740,
|
||||||
'tbr': 1500,
|
'tbr': 1500,
|
||||||
}],
|
}],
|
||||||
'thumbnail': '//pics.r18.com/digital/amateur/mgmr105/mgmr105jp.jpg'
|
'thumbnail': '//pics.r18.com/digital/amateur/mgmr105/mgmr105jp.jpg',
|
||||||
})
|
})
|
||||||
|
|
||||||
# from https://www.csfd.cz/
|
# from https://www.csfd.cz/
|
||||||
@ -419,9 +445,9 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'height': 1080,
|
'height': 1080,
|
||||||
}],
|
}],
|
||||||
'subtitles': {
|
'subtitles': {
|
||||||
'cs': [{'url': 'https://video.csfd.cz/files/subtitles/163/344/163344115_4c388b.srt'}]
|
'cs': [{'url': 'https://video.csfd.cz/files/subtitles/163/344/163344115_4c388b.srt'}],
|
||||||
},
|
},
|
||||||
'thumbnail': 'https://img.csfd.cz/files/images/film/video/preview/163/344/163344118_748d20.png?h360'
|
'thumbnail': 'https://img.csfd.cz/files/images/film/video/preview/163/344/163344118_748d20.png?h360',
|
||||||
})
|
})
|
||||||
|
|
||||||
# from https://tamasha.com/v/Kkdjw
|
# from https://tamasha.com/v/Kkdjw
|
||||||
@ -452,7 +478,7 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'format_id': '144p',
|
'format_id': '144p',
|
||||||
'height': 144,
|
'height': 144,
|
||||||
}]
|
}],
|
||||||
})
|
})
|
||||||
|
|
||||||
# from https://www.directvnow.com
|
# from https://www.directvnow.com
|
||||||
@ -470,7 +496,7 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'formats': [{
|
'formats': [{
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
|
'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
|
||||||
}]
|
}],
|
||||||
})
|
})
|
||||||
|
|
||||||
# from https://www.directvnow.com
|
# from https://www.directvnow.com
|
||||||
@ -488,7 +514,7 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'formats': [{
|
'formats': [{
|
||||||
'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
|
'url': 'https://cdn.directv.com/content/dam/dtv/prod/website_directvnow-international/videos/DTVN_hdr_HBO_v3.mp4',
|
||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
}]
|
}],
|
||||||
})
|
})
|
||||||
|
|
||||||
# from https://www.klarna.com/uk/
|
# from https://www.klarna.com/uk/
|
||||||
@ -547,8 +573,8 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'id': 'XEgvuql4',
|
'id': 'XEgvuql4',
|
||||||
'formats': [{
|
'formats': [{
|
||||||
'url': 'rtmp://192.138.214.154/live/sjclive',
|
'url': 'rtmp://192.138.214.154/live/sjclive',
|
||||||
'ext': 'flv'
|
'ext': 'flv',
|
||||||
}]
|
}],
|
||||||
})
|
})
|
||||||
|
|
||||||
# from https://www.pornoxo.com/videos/7564/striptease-from-sexy-secretary/
|
# from https://www.pornoxo.com/videos/7564/striptease-from-sexy-secretary/
|
||||||
@ -588,8 +614,8 @@ class TestInfoExtractor(unittest.TestCase):
|
|||||||
'thumbnail': 'https://t03.vipstreamservice.com/thumbs/pxo-full/2009-12/14/a4b2157147afe5efa93ce1978e0265289c193874e02597.flv-full-13.jpg',
|
'thumbnail': 'https://t03.vipstreamservice.com/thumbs/pxo-full/2009-12/14/a4b2157147afe5efa93ce1978e0265289c193874e02597.flv-full-13.jpg',
|
||||||
'formats': [{
|
'formats': [{
|
||||||
'url': 'https://cdn.pornoxo.com/key=MF+oEbaxqTKb50P-w9G3nA,end=1489689259,ip=104.199.146.27/ip=104.199.146.27/speed=6573765/buffer=3.0/2009-12/4b2157147afe5efa93ce1978e0265289c193874e02597.flv',
|
'url': 'https://cdn.pornoxo.com/key=MF+oEbaxqTKb50P-w9G3nA,end=1489689259,ip=104.199.146.27/ip=104.199.146.27/speed=6573765/buffer=3.0/2009-12/4b2157147afe5efa93ce1978e0265289c193874e02597.flv',
|
||||||
'ext': 'flv'
|
'ext': 'flv',
|
||||||
}]
|
}],
|
||||||
})
|
})
|
||||||
|
|
||||||
# from http://www.indiedb.com/games/king-machine/videos
|
# from http://www.indiedb.com/games/king-machine/videos
|
||||||
@ -610,12 +636,12 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'formats': [{
|
'formats': [{
|
||||||
'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode_mp4/king-machine-trailer.mp4',
|
'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode_mp4/king-machine-trailer.mp4',
|
||||||
'height': 360,
|
'height': 360,
|
||||||
'ext': 'mp4'
|
'ext': 'mp4',
|
||||||
}, {
|
}, {
|
||||||
'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode720p_mp4/king-machine-trailer.mp4',
|
'url': 'http://cdn.dbolical.com/cache/videos/games/1/50/49678/encode720p_mp4/king-machine-trailer.mp4',
|
||||||
'height': 720,
|
'height': 720,
|
||||||
'ext': 'mp4'
|
'ext': 'mp4',
|
||||||
}]
|
}],
|
||||||
})
|
})
|
||||||
|
|
||||||
def test_parse_m3u8_formats(self):
|
def test_parse_m3u8_formats(self):
|
||||||
@ -626,6 +652,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'img_bipbop_adv_example_fmp4',
|
'img_bipbop_adv_example_fmp4',
|
||||||
'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
[{
|
[{
|
||||||
|
# 60kbps (bitrate not provided in m3u8); sorted as worst because it's grouped with lowest bitrate video track
|
||||||
'format_id': 'aud1-English',
|
'format_id': 'aud1-English',
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a1/prog_index.m3u8',
|
||||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
@ -633,15 +660,9 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'protocol': 'm3u8_native',
|
'protocol': 'm3u8_native',
|
||||||
'audio_ext': 'mp4',
|
'audio_ext': 'mp4',
|
||||||
|
'source_preference': 0,
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'aud2-English',
|
# 192kbps (bitrate not provided in m3u8)
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
|
||||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
|
||||||
'language': 'en',
|
|
||||||
'ext': 'mp4',
|
|
||||||
'protocol': 'm3u8_native',
|
|
||||||
'audio_ext': 'mp4',
|
|
||||||
}, {
|
|
||||||
'format_id': 'aud3-English',
|
'format_id': 'aud3-English',
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a3/prog_index.m3u8',
|
||||||
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
@ -649,6 +670,17 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'ext': 'mp4',
|
'ext': 'mp4',
|
||||||
'protocol': 'm3u8_native',
|
'protocol': 'm3u8_native',
|
||||||
'audio_ext': 'mp4',
|
'audio_ext': 'mp4',
|
||||||
|
'source_preference': 1,
|
||||||
|
}, {
|
||||||
|
# 384kbps (bitrate not provided in m3u8); sorted as best because it's grouped with the highest bitrate video track
|
||||||
|
'format_id': 'aud2-English',
|
||||||
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/a2/prog_index.m3u8',
|
||||||
|
'manifest_url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8',
|
||||||
|
'language': 'en',
|
||||||
|
'ext': 'mp4',
|
||||||
|
'protocol': 'm3u8_native',
|
||||||
|
'audio_ext': 'mp4',
|
||||||
|
'source_preference': 2,
|
||||||
}, {
|
}, {
|
||||||
'format_id': '530',
|
'format_id': '530',
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/v2/prog_index.m3u8',
|
||||||
@ -866,7 +898,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'height': 1080,
|
'height': 1080,
|
||||||
'vcodec': 'avc1.64002a',
|
'vcodec': 'avc1.64002a',
|
||||||
}],
|
}],
|
||||||
{}
|
{},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
'bipbop_16x9',
|
'bipbop_16x9',
|
||||||
@ -990,45 +1022,45 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'en': [{
|
'en': [{
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng_forced/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/eng_forced/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}],
|
}],
|
||||||
'fr': [{
|
'fr': [{
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra_forced/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/fra_forced/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}],
|
}],
|
||||||
'es': [{
|
'es': [{
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa_forced/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/spa_forced/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}],
|
}],
|
||||||
'ja': [{
|
'ja': [{
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}, {
|
}, {
|
||||||
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn_forced/prog_index.m3u8',
|
'url': 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/subtitles/jpn_forced/prog_index.m3u8',
|
||||||
'ext': 'vtt',
|
'ext': 'vtt',
|
||||||
'protocol': 'm3u8_native'
|
'protocol': 'm3u8_native',
|
||||||
}],
|
}],
|
||||||
}
|
},
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for m3u8_file, m3u8_url, expected_formats, expected_subs in _TEST_CASES:
|
for m3u8_file, m3u8_url, expected_formats, expected_subs in _TEST_CASES:
|
||||||
with open('./test/testdata/m3u8/%s.m3u8' % m3u8_file, encoding='utf-8') as f:
|
with open(f'./test/testdata/m3u8/{m3u8_file}.m3u8', encoding='utf-8') as f:
|
||||||
formats, subs = self.ie._parse_m3u8_formats_and_subtitles(
|
formats, subs = self.ie._parse_m3u8_formats_and_subtitles(
|
||||||
f.read(), m3u8_url, ext='mp4')
|
f.read(), m3u8_url, ext='mp4')
|
||||||
self.ie._sort_formats(formats)
|
self.ie._sort_formats(formats)
|
||||||
@ -1366,14 +1398,14 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
|
'url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/manifest.mpd',
|
||||||
'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
|
'fragment_base_url': 'https://sdn-global-streaming-cache-3qsdn.akamaized.net/stream/3144/files/17/07/672975/3144-kZT4LWMQw6Rh7Kpd.ism/dash/',
|
||||||
'protocol': 'http_dash_segments',
|
'protocol': 'http_dash_segments',
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
)
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for mpd_file, mpd_url, mpd_base_url, expected_formats, expected_subtitles in _TEST_CASES:
|
for mpd_file, mpd_url, mpd_base_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||||
with open('./test/testdata/mpd/%s.mpd' % mpd_file, encoding='utf-8') as f:
|
with open(f'./test/testdata/mpd/{mpd_file}.mpd', encoding='utf-8') as f:
|
||||||
formats, subtitles = self.ie._parse_mpd_formats_and_subtitles(
|
formats, subtitles = self.ie._parse_mpd_formats_and_subtitles(
|
||||||
compat_etree_fromstring(f.read().encode()),
|
compat_etree_fromstring(f.read().encode()),
|
||||||
mpd_base_url=mpd_base_url, mpd_url=mpd_url)
|
mpd_base_url=mpd_base_url, mpd_url=mpd_url)
|
||||||
@ -1408,7 +1440,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'sampling_rate': 48000,
|
'sampling_rate': 48000,
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-100',
|
'format_id': 'video-100',
|
||||||
@ -1431,7 +1463,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401FDA0544EFFC2D002CBC40000003004000000C03C60CA80000000168EF32C8',
|
'codec_private_data': '00000001674D401FDA0544EFFC2D002CBC40000003004000000C03C60CA80000000168EF32C8',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-326',
|
'format_id': 'video-326',
|
||||||
@ -1454,7 +1486,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401FDA0241FE23FFC3BC83BA44000003000400000300C03C60CA800000000168EF32C8',
|
'codec_private_data': '00000001674D401FDA0241FE23FFC3BC83BA44000003000400000300C03C60CA800000000168EF32C8',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-698',
|
'format_id': 'video-698',
|
||||||
@ -1477,7 +1509,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401FDA0350BFB97FF06AF06AD1000003000100000300300F1832A00000000168EF32C8',
|
'codec_private_data': '00000001674D401FDA0350BFB97FF06AF06AD1000003000100000300300F1832A00000000168EF32C8',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-1493',
|
'format_id': 'video-1493',
|
||||||
@ -1500,7 +1532,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401FDA011C3DE6FFF0D890D871000003000100000300300F1832A00000000168EF32C8',
|
'codec_private_data': '00000001674D401FDA011C3DE6FFF0D890D871000003000100000300300F1832A00000000168EF32C8',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video-4482',
|
'format_id': 'video-4482',
|
||||||
@ -1523,7 +1555,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401FDA01A816F97FFC1ABC1AB440000003004000000C03C60CA80000000168EF32C8',
|
'codec_private_data': '00000001674D401FDA01A816F97FFC1ABC1AB440000003004000000C03C60CA80000000168EF32C8',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}],
|
}],
|
||||||
{
|
{
|
||||||
@ -1538,10 +1570,10 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'duration': 8880746666,
|
'duration': 8880746666,
|
||||||
'timescale': 10000000,
|
'timescale': 10000000,
|
||||||
'fourcc': 'TTML',
|
'fourcc': 'TTML',
|
||||||
'codec_private_data': ''
|
'codec_private_data': '',
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
@ -1571,7 +1603,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'sampling_rate': 48000,
|
'sampling_rate': 48000,
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'audio_deu_1-224',
|
'format_id': 'audio_deu_1-224',
|
||||||
@ -1597,7 +1629,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'sampling_rate': 48000,
|
'sampling_rate': 48000,
|
||||||
'channels': 6,
|
'channels': 6,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-23',
|
'format_id': 'video_deu-23',
|
||||||
@ -1622,7 +1654,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
|
'codec_private_data': '000000016742C00CDB06077E5C05A808080A00000300020000030009C0C02EE0177CC6300F142AE00000000168CA8DC8',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-403',
|
'format_id': 'video_deu-403',
|
||||||
@ -1647,7 +1679,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
|
'codec_private_data': '00000001674D4014E98323B602D4040405000003000100000300320F1429380000000168EAECF2',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-680',
|
'format_id': 'video_deu-680',
|
||||||
@ -1672,7 +1704,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-1253',
|
'format_id': 'video_deu-1253',
|
||||||
@ -1698,7 +1730,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
'codec_private_data': '00000001674D401EE981405FF2E02D4040405000000300100000030320F162D3800000000168EAECF2',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-2121',
|
'format_id': 'video_deu-2121',
|
||||||
@ -1723,7 +1755,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
|
'codec_private_data': '00000001674D401EECA0601BD80B50101014000003000400000300C83C58B6580000000168E93B3C80',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-3275',
|
'format_id': 'video_deu-3275',
|
||||||
@ -1748,7 +1780,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
|
'codec_private_data': '00000001674D4020ECA02802DD80B501010140000003004000000C83C60C65800000000168E93B3C80',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-5300',
|
'format_id': 'video_deu-5300',
|
||||||
@ -1773,7 +1805,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}, {
|
}, {
|
||||||
'format_id': 'video_deu-8079',
|
'format_id': 'video_deu-8079',
|
||||||
@ -1798,7 +1830,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
'codec_private_data': '00000001674D4028ECA03C0113F2E02D4040405000000300100000030320F18319600000000168E93B3C80',
|
||||||
'channels': 2,
|
'channels': 2,
|
||||||
'bits_per_sample': 16,
|
'bits_per_sample': 16,
|
||||||
'nal_unit_length_field': 4
|
'nal_unit_length_field': 4,
|
||||||
},
|
},
|
||||||
}],
|
}],
|
||||||
{},
|
{},
|
||||||
@ -1806,7 +1838,7 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
]
|
]
|
||||||
|
|
||||||
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
for ism_file, ism_url, expected_formats, expected_subtitles in _TEST_CASES:
|
||||||
with open('./test/testdata/ism/%s.Manifest' % ism_file, encoding='utf-8') as f:
|
with open(f'./test/testdata/ism/{ism_file}.Manifest', encoding='utf-8') as f:
|
||||||
formats, subtitles = self.ie._parse_ism_formats_and_subtitles(
|
formats, subtitles = self.ie._parse_ism_formats_and_subtitles(
|
||||||
compat_etree_fromstring(f.read().encode()), ism_url=ism_url)
|
compat_etree_fromstring(f.read().encode()), ism_url=ism_url)
|
||||||
self.ie._sort_formats(formats)
|
self.ie._sort_formats(formats)
|
||||||
@ -1827,12 +1859,12 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
'tbr': 2148,
|
'tbr': 2148,
|
||||||
'width': 1280,
|
'width': 1280,
|
||||||
'height': 720,
|
'height': 720,
|
||||||
}]
|
}],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for f4m_file, f4m_url, expected_formats in _TEST_CASES:
|
for f4m_file, f4m_url, expected_formats in _TEST_CASES:
|
||||||
with open('./test/testdata/f4m/%s.f4m' % f4m_file, encoding='utf-8') as f:
|
with open(f'./test/testdata/f4m/{f4m_file}.f4m', encoding='utf-8') as f:
|
||||||
formats = self.ie._parse_f4m_formats(
|
formats = self.ie._parse_f4m_formats(
|
||||||
compat_etree_fromstring(f.read().encode()),
|
compat_etree_fromstring(f.read().encode()),
|
||||||
f4m_url, None)
|
f4m_url, None)
|
||||||
@ -1873,13 +1905,13 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
}, {
|
}, {
|
||||||
'manifest_url': 'https://example.org/src/foo_xspf.xspf',
|
'manifest_url': 'https://example.org/src/foo_xspf.xspf',
|
||||||
'url': 'https://example.com/track3.mp3',
|
'url': 'https://example.com/track3.mp3',
|
||||||
}]
|
}],
|
||||||
}]
|
}],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for xspf_file, xspf_url, expected_entries in _TEST_CASES:
|
for xspf_file, xspf_url, expected_entries in _TEST_CASES:
|
||||||
with open('./test/testdata/xspf/%s.xspf' % xspf_file, encoding='utf-8') as f:
|
with open(f'./test/testdata/xspf/{xspf_file}.xspf', encoding='utf-8') as f:
|
||||||
entries = self.ie._parse_xspf(
|
entries = self.ie._parse_xspf(
|
||||||
compat_etree_fromstring(f.read().encode()),
|
compat_etree_fromstring(f.read().encode()),
|
||||||
xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
|
xspf_file, xspf_url=xspf_url, xspf_base_url=xspf_url)
|
||||||
@ -1902,10 +1934,150 @@ jwplayer("mediaplayer").setup({"abouttext":"Visit Indie DB","aboutlink":"http:\/
|
|||||||
server_thread.start()
|
server_thread.start()
|
||||||
|
|
||||||
(content, urlh) = self.ie._download_webpage_handle(
|
(content, urlh) = self.ie._download_webpage_handle(
|
||||||
'http://127.0.0.1:%d/teapot' % port, None,
|
f'http://127.0.0.1:{port}/teapot', None,
|
||||||
expected_status=TEAPOT_RESPONSE_STATUS)
|
expected_status=TEAPOT_RESPONSE_STATUS)
|
||||||
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
self.assertEqual(content, TEAPOT_RESPONSE_BODY)
|
||||||
|
|
||||||
|
def test_search_nextjs_data(self):
|
||||||
|
data = '<script id="__NEXT_DATA__" type="application/json">{"props":{}}</script>'
|
||||||
|
self.assertEqual(self.ie._search_nextjs_data(data, None), {'props': {}})
|
||||||
|
self.assertEqual(self.ie._search_nextjs_data('', None, fatal=False), {})
|
||||||
|
self.assertEqual(self.ie._search_nextjs_data('', None, default=None), None)
|
||||||
|
self.assertEqual(self.ie._search_nextjs_data('', None, default={}), {})
|
||||||
|
with self.assertWarns(DeprecationWarning):
|
||||||
|
self.assertEqual(self.ie._search_nextjs_data('', None, default='{}'), {})
|
||||||
|
|
||||||
|
def test_search_nuxt_json(self):
|
||||||
|
HTML_TMPL = '<script data-ssr="true" id="__NUXT_DATA__" type="application/json">[{}]</script>'
|
||||||
|
VALID_DATA = '''
|
||||||
|
["ShallowReactive",1],
|
||||||
|
{"data":2,"state":21,"once":25,"_errors":28,"_server_errors":30},
|
||||||
|
["ShallowReactive",3],
|
||||||
|
{"$abcdef123456":4},
|
||||||
|
{"podcast":5,"activeEpisodeData":7},
|
||||||
|
{"podcast":6,"seasons":14},
|
||||||
|
{"title":10,"id":11},
|
||||||
|
["Reactive",8],
|
||||||
|
{"episode":9,"creators":18,"empty_list":20},
|
||||||
|
{"title":12,"id":13,"refs":34,"empty_refs":35},
|
||||||
|
"Series Title",
|
||||||
|
"podcast-id-01",
|
||||||
|
"Episode Title",
|
||||||
|
"episode-id-99",
|
||||||
|
[15,16,17],
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
[19],
|
||||||
|
"Podcast Creator",
|
||||||
|
[],
|
||||||
|
{"$ssite-config":22},
|
||||||
|
{"env":23,"name":24,"map":26,"numbers":14},
|
||||||
|
"production",
|
||||||
|
"podcast-website",
|
||||||
|
["Set"],
|
||||||
|
["Reactive",27],
|
||||||
|
["Map"],
|
||||||
|
["ShallowReactive",29],
|
||||||
|
{},
|
||||||
|
["NuxtError",31],
|
||||||
|
{"status":32,"message":33},
|
||||||
|
503,
|
||||||
|
"Service Unavailable",
|
||||||
|
[36,37],
|
||||||
|
[38,39],
|
||||||
|
["Ref",40],
|
||||||
|
["ShallowRef",41],
|
||||||
|
["EmptyRef",42],
|
||||||
|
["EmptyShallowRef",43],
|
||||||
|
"ref",
|
||||||
|
"shallow_ref",
|
||||||
|
"{\\"ref\\":1}",
|
||||||
|
"{\\"shallow_ref\\":2}"
|
||||||
|
'''
|
||||||
|
PAYLOAD = {
|
||||||
|
'data': {
|
||||||
|
'$abcdef123456': {
|
||||||
|
'podcast': {
|
||||||
|
'podcast': {
|
||||||
|
'title': 'Series Title',
|
||||||
|
'id': 'podcast-id-01',
|
||||||
|
},
|
||||||
|
'seasons': [1, 2, 3],
|
||||||
|
},
|
||||||
|
'activeEpisodeData': {
|
||||||
|
'episode': {
|
||||||
|
'title': 'Episode Title',
|
||||||
|
'id': 'episode-id-99',
|
||||||
|
'refs': ['ref', 'shallow_ref'],
|
||||||
|
'empty_refs': [{'ref': 1}, {'shallow_ref': 2}],
|
||||||
|
},
|
||||||
|
'creators': ['Podcast Creator'],
|
||||||
|
'empty_list': [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'state': {
|
||||||
|
'$ssite-config': {
|
||||||
|
'env': 'production',
|
||||||
|
'name': 'podcast-website',
|
||||||
|
'map': [],
|
||||||
|
'numbers': [1, 2, 3],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'once': [],
|
||||||
|
'_errors': {},
|
||||||
|
'_server_errors': {
|
||||||
|
'status': 503,
|
||||||
|
'message': 'Service Unavailable',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
PARTIALLY_INVALID = [(
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
{"invalid_raw_list":2},
|
||||||
|
[15,16,17]
|
||||||
|
''',
|
||||||
|
{'data': {'invalid_raw_list': [None, None, None]}},
|
||||||
|
), (
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
["EmptyRef",2],
|
||||||
|
"not valid JSON"
|
||||||
|
''',
|
||||||
|
{'data': None},
|
||||||
|
), (
|
||||||
|
'''
|
||||||
|
{"data":1},
|
||||||
|
["EmptyShallowRef",2],
|
||||||
|
"not valid JSON"
|
||||||
|
''',
|
||||||
|
{'data': None},
|
||||||
|
)]
|
||||||
|
INVALID = [
|
||||||
|
'''
|
||||||
|
[]
|
||||||
|
''',
|
||||||
|
'''
|
||||||
|
["unsupported",1],
|
||||||
|
{"data":2},
|
||||||
|
{}
|
||||||
|
''',
|
||||||
|
]
|
||||||
|
DEFAULT = object()
|
||||||
|
|
||||||
|
self.assertEqual(self.ie._search_nuxt_json(HTML_TMPL.format(VALID_DATA), None), PAYLOAD)
|
||||||
|
self.assertEqual(self.ie._search_nuxt_json('', None, fatal=False), {})
|
||||||
|
self.assertIs(self.ie._search_nuxt_json('', None, default=DEFAULT), DEFAULT)
|
||||||
|
|
||||||
|
for data, expected in PARTIALLY_INVALID:
|
||||||
|
self.assertEqual(
|
||||||
|
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, fatal=False), expected)
|
||||||
|
|
||||||
|
for data in INVALID:
|
||||||
|
self.assertIs(
|
||||||
|
self.ie._search_nuxt_json(HTML_TMPL.format(data), None, default=DEFAULT), DEFAULT)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -4,16 +4,19 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from yt_dlp.globals import all_plugins_loaded
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
from test.helper import FakeYDL, assertRegexpMatches, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import compat_os_name
|
|
||||||
from yt_dlp.extractor import YoutubeIE
|
from yt_dlp.extractor import YoutubeIE
|
||||||
from yt_dlp.extractor.common import InfoExtractor
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
from yt_dlp.postprocessor.common import PostProcessor
|
from yt_dlp.postprocessor.common import PostProcessor
|
||||||
@ -129,8 +132,8 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
'allow_multiple_audio_streams': multi,
|
'allow_multiple_audio_streams': multi,
|
||||||
})
|
})
|
||||||
ydl.process_ie_result(info_dict.copy())
|
ydl.process_ie_result(info_dict.copy())
|
||||||
downloaded = map(lambda x: x['format_id'], ydl.downloaded_info_dicts)
|
downloaded = [x['format_id'] for x in ydl.downloaded_info_dicts]
|
||||||
self.assertEqual(list(downloaded), list(expected))
|
self.assertEqual(downloaded, list(expected))
|
||||||
|
|
||||||
test('20/47', '47')
|
test('20/47', '47')
|
||||||
test('20/71/worst', '35')
|
test('20/71/worst', '35')
|
||||||
@ -140,6 +143,8 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
test('example-with-dashes', 'example-with-dashes')
|
test('example-with-dashes', 'example-with-dashes')
|
||||||
test('all', '2', '47', '45', 'example-with-dashes', '35')
|
test('all', '2', '47', '45', 'example-with-dashes', '35')
|
||||||
test('mergeall', '2+47+45+example-with-dashes+35', multi=True)
|
test('mergeall', '2+47+45+example-with-dashes+35', multi=True)
|
||||||
|
# See: https://github.com/yt-dlp/yt-dlp/pulls/8797
|
||||||
|
test('7_a/worst', '35')
|
||||||
|
|
||||||
def test_format_selection_audio(self):
|
def test_format_selection_audio(self):
|
||||||
formats = [
|
formats = [
|
||||||
@ -181,7 +186,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
]
|
]
|
||||||
|
|
||||||
info_dict = _make_result(formats)
|
info_dict = _make_result(formats)
|
||||||
ydl = YDL({'format': 'best'})
|
ydl = YDL({'format': 'best', 'format_sort': ['abr', 'ext']})
|
||||||
ydl.sort_formats(info_dict)
|
ydl.sort_formats(info_dict)
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
@ -193,7 +198,7 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
self.assertEqual(downloaded['format_id'], 'mp3-64')
|
||||||
|
|
||||||
ydl = YDL({'prefer_free_formats': True})
|
ydl = YDL({'prefer_free_formats': True, 'format_sort': ['abr', 'ext']})
|
||||||
ydl.sort_formats(info_dict)
|
ydl.sort_formats(info_dict)
|
||||||
ydl.process_ie_result(copy.deepcopy(info_dict))
|
ydl.process_ie_result(copy.deepcopy(info_dict))
|
||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
@ -232,6 +237,35 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
downloaded = ydl.downloaded_info_dicts[0]
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
|
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
|
||||||
|
|
||||||
|
def test_format_selection_by_vcodec_sort(self):
|
||||||
|
formats = [
|
||||||
|
{'format_id': 'av1-format', 'ext': 'mp4', 'vcodec': 'av1', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
{'format_id': 'vp9-hdr-format', 'ext': 'mp4', 'vcodec': 'vp09.02.50.10.01.09.18.09.00', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
{'format_id': 'vp9-sdr-format', 'ext': 'mp4', 'vcodec': 'vp09.00.50.08', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
{'format_id': 'h265-format', 'ext': 'mp4', 'vcodec': 'h265', 'acodec': 'none', 'url': TEST_URL},
|
||||||
|
]
|
||||||
|
info_dict = _make_result(formats)
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9.2']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['vcodec:vp9']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9.2']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-hdr-format')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'bestvideo', 'format_sort': ['+vcodec:vp9']})
|
||||||
|
ydl.process_ie_result(info_dict.copy())
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'vp9-sdr-format')
|
||||||
|
|
||||||
def test_format_selection_string_ops(self):
|
def test_format_selection_string_ops(self):
|
||||||
formats = [
|
formats = [
|
||||||
{'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
|
{'format_id': 'abc-cba', 'ext': 'mp4', 'url': TEST_URL},
|
||||||
@ -454,11 +488,11 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
|
|
||||||
def test_format_filtering(self):
|
def test_format_filtering(self):
|
||||||
formats = [
|
formats = [
|
||||||
{'format_id': 'A', 'filesize': 500, 'width': 1000},
|
{'format_id': 'A', 'filesize': 500, 'width': 1000, 'aspect_ratio': 1.0},
|
||||||
{'format_id': 'B', 'filesize': 1000, 'width': 500},
|
{'format_id': 'B', 'filesize': 1000, 'width': 500, 'aspect_ratio': 1.33},
|
||||||
{'format_id': 'C', 'filesize': 1000, 'width': 400},
|
{'format_id': 'C', 'filesize': 1000, 'width': 400, 'aspect_ratio': 1.5},
|
||||||
{'format_id': 'D', 'filesize': 2000, 'width': 600},
|
{'format_id': 'D', 'filesize': 2000, 'width': 600, 'aspect_ratio': 1.78},
|
||||||
{'format_id': 'E', 'filesize': 3000},
|
{'format_id': 'E', 'filesize': 3000, 'aspect_ratio': 0.56},
|
||||||
{'format_id': 'F'},
|
{'format_id': 'F'},
|
||||||
{'format_id': 'G', 'filesize': 1000000},
|
{'format_id': 'G', 'filesize': 1000000},
|
||||||
]
|
]
|
||||||
@ -513,13 +547,62 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
||||||
|
|
||||||
ydl = YDL({'format': 'best[height<40]'})
|
ydl = YDL({'format': 'best[height<40]'})
|
||||||
try:
|
with contextlib.suppress(ExtractorError):
|
||||||
ydl.process_ie_result(info_dict)
|
ydl.process_ie_result(info_dict)
|
||||||
except ExtractorError:
|
|
||||||
pass
|
|
||||||
self.assertEqual(ydl.downloaded_info_dicts, [])
|
self.assertEqual(ydl.downloaded_info_dicts, [])
|
||||||
|
|
||||||
def test_default_format_spec(self):
|
ydl = YDL({'format': 'best[aspect_ratio=1]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'A')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'all[aspect_ratio > 1.00]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
|
self.assertEqual(downloaded_ids, ['D', 'C', 'B'])
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'all[aspect_ratio < 1.00]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
|
self.assertEqual(downloaded_ids, ['E'])
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'best[aspect_ratio=1.5]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded = ydl.downloaded_info_dicts[0]
|
||||||
|
self.assertEqual(downloaded['format_id'], 'C')
|
||||||
|
|
||||||
|
ydl = YDL({'format': 'all[aspect_ratio!=1]'})
|
||||||
|
ydl.process_ie_result(info_dict)
|
||||||
|
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
|
||||||
|
self.assertEqual(downloaded_ids, ['E', 'D', 'C', 'B'])
|
||||||
|
|
||||||
|
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', False)
|
||||||
|
def test_default_format_spec_without_ffmpeg(self):
|
||||||
|
ydl = YDL({})
|
||||||
|
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
ydl = YDL({'simulate': True})
|
||||||
|
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
ydl = YDL({})
|
||||||
|
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
ydl = YDL({'simulate': True})
|
||||||
|
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
ydl = YDL({'outtmpl': '-'})
|
||||||
|
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
ydl = YDL({})
|
||||||
|
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||||
|
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.available', True)
|
||||||
|
@patch('yt_dlp.postprocessor.ffmpeg.FFmpegMergerPP.can_merge', lambda _: True)
|
||||||
|
def test_default_format_spec_with_ffmpeg(self):
|
||||||
|
ydl = YDL({})
|
||||||
|
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||||
|
|
||||||
ydl = YDL({'simulate': True})
|
ydl = YDL({'simulate': True})
|
||||||
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||||
|
|
||||||
@ -527,13 +610,13 @@ class TestFormatSelection(unittest.TestCase):
|
|||||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
ydl = YDL({'simulate': True})
|
ydl = YDL({'simulate': True})
|
||||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'bestvideo*+bestaudio/best')
|
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
ydl = YDL({'outtmpl': '-'})
|
ydl = YDL({'outtmpl': '-'})
|
||||||
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
ydl = YDL({})
|
ydl = YDL({})
|
||||||
self.assertEqual(ydl._default_format_spec({}, download=False), 'bestvideo*+bestaudio/best')
|
self.assertEqual(ydl._default_format_spec({}), 'bestvideo*+bestaudio/best')
|
||||||
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
|
||||||
|
|
||||||
|
|
||||||
@ -650,8 +733,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
'formats': [
|
'formats': [
|
||||||
{'id': 'id 1', 'height': 1080, 'width': 1920},
|
{'id': 'id 1', 'height': 1080, 'width': 1920},
|
||||||
{'id': 'id 2', 'height': 720},
|
{'id': 'id 2', 'height': 720},
|
||||||
{'id': 'id 3'}
|
{'id': 'id 3'},
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_prepare_outtmpl_and_filename(self):
|
def test_prepare_outtmpl_and_filename(self):
|
||||||
@ -705,6 +788,13 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
|
test('%(width)06d.%%(ext)s', 'NA.%(ext)s')
|
||||||
test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
|
test('%%(width)06d.%(ext)s', '%(width)06d.mp4')
|
||||||
|
|
||||||
|
# Sanitization options
|
||||||
|
test('%(title3)s', (None, 'foo⧸bar⧹test'))
|
||||||
|
test('%(title5)s', (None, 'aei_A'), restrictfilenames=True)
|
||||||
|
test('%(title3)s', (None, 'foo_bar_test'), windowsfilenames=False, restrictfilenames=True)
|
||||||
|
if sys.platform != 'win32':
|
||||||
|
test('%(title3)s', (None, 'foo⧸bar\\test'), windowsfilenames=False)
|
||||||
|
|
||||||
# ID sanitization
|
# ID sanitization
|
||||||
test('%(id)s', '_abcd', info={'id': '_abcd'})
|
test('%(id)s', '_abcd', info={'id': '_abcd'})
|
||||||
test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
|
test('%(some_id)s', '_abcd', info={'some_id': '_abcd'})
|
||||||
@ -728,7 +818,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
self.assertEqual(got_dict.get(info_field), expected, info_field)
|
self.assertEqual(got_dict.get(info_field), expected, info_field)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
test('%()j', (expect_same_infodict, str))
|
test('%()j', (expect_same_infodict, None))
|
||||||
|
|
||||||
# NA placeholder
|
# NA placeholder
|
||||||
NA_TEST_OUTTMPL = '%(uploader_date)s-%(width)d-%(x|def)s-%(id)s.%(ext)s'
|
NA_TEST_OUTTMPL = '%(uploader_date)s-%(width)d-%(x|def)s-%(id)s.%(ext)s'
|
||||||
@ -771,7 +861,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(formats)j', (json.dumps(FORMATS), None))
|
test('%(formats)j', (json.dumps(FORMATS), None))
|
||||||
test('%(formats)#j', (
|
test('%(formats)#j', (
|
||||||
json.dumps(FORMATS, indent=4),
|
json.dumps(FORMATS, indent=4),
|
||||||
json.dumps(FORMATS, indent=4).replace(':', ':').replace('"', """).replace('\n', ' ')
|
json.dumps(FORMATS, indent=4).replace(':', ':').replace('"', '"').replace('\n', ' '),
|
||||||
))
|
))
|
||||||
test('%(title5).3B', 'á')
|
test('%(title5).3B', 'á')
|
||||||
test('%(title5)U', 'áéí 𝐀')
|
test('%(title5)U', 'áéí 𝐀')
|
||||||
@ -782,8 +872,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(filesize)#D', '1Ki')
|
test('%(filesize)#D', '1Ki')
|
||||||
test('%(height)5.2D', ' 1.08k')
|
test('%(height)5.2D', ' 1.08k')
|
||||||
test('%(title4)#S', 'foo_bar_test')
|
test('%(title4)#S', 'foo_bar_test')
|
||||||
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if compat_os_name == 'nt' else ' ')))
|
test('%(title4).10S', ('foo "bar" ', 'foo "bar"' + ('#' if os.name == 'nt' else ' ')))
|
||||||
if compat_os_name == 'nt':
|
if os.name == 'nt':
|
||||||
test('%(title4)q', ('"foo ""bar"" test"', None))
|
test('%(title4)q', ('"foo ""bar"" test"', None))
|
||||||
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
test('%(formats.:.id)#q', ('"id 1" "id 2" "id 3"', None))
|
||||||
test('%(formats.0.id)#q', ('"id 1"', None))
|
test('%(formats.0.id)#q', ('"id 1"', None))
|
||||||
@ -797,6 +887,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('%(title|%)s %(title|%%)s', '% %%')
|
test('%(title|%)s %(title|%%)s', '% %%')
|
||||||
test('%(id+1-height+3)05d', '00158')
|
test('%(id+1-height+3)05d', '00158')
|
||||||
test('%(width+100)05d', 'NA')
|
test('%(width+100)05d', 'NA')
|
||||||
|
test('%(filesize*8)d', '8192')
|
||||||
test('%(formats.0) 15s', ('% 15s' % FORMATS[0], None))
|
test('%(formats.0) 15s', ('% 15s' % FORMATS[0], None))
|
||||||
test('%(formats.0)r', (repr(FORMATS[0]), None))
|
test('%(formats.0)r', (repr(FORMATS[0]), None))
|
||||||
test('%(height.0)03d', '001')
|
test('%(height.0)03d', '001')
|
||||||
@ -840,14 +931,14 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
|
|
||||||
# Empty filename
|
# Empty filename
|
||||||
test('%(foo|)s-%(bar|)s.%(ext)s', '-.mp4')
|
test('%(foo|)s-%(bar|)s.%(ext)s', '-.mp4')
|
||||||
# test('%(foo|)s.%(ext)s', ('.mp4', '_.mp4')) # fixme
|
# test('%(foo|)s.%(ext)s', ('.mp4', '_.mp4')) # FIXME: ?
|
||||||
# test('%(foo|)s', ('', '_')) # fixme
|
# test('%(foo|)s', ('', '_')) # FIXME: ?
|
||||||
|
|
||||||
# Environment variable expansion for prepare_filename
|
# Environment variable expansion for prepare_filename
|
||||||
os.environ['__yt_dlp_var'] = 'expanded'
|
os.environ['__yt_dlp_var'] = 'expanded'
|
||||||
envvar = '%__yt_dlp_var%' if compat_os_name == 'nt' else '$__yt_dlp_var'
|
envvar = '%__yt_dlp_var%' if os.name == 'nt' else '$__yt_dlp_var'
|
||||||
test(envvar, (envvar, 'expanded'))
|
test(envvar, (envvar, 'expanded'))
|
||||||
if compat_os_name == 'nt':
|
if os.name == 'nt':
|
||||||
test('%s%', ('%s%', '%s%'))
|
test('%s%', ('%s%', '%s%'))
|
||||||
os.environ['s'] = 'expanded'
|
os.environ['s'] = 'expanded'
|
||||||
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
test('%s%', ('%s%', 'expanded')) # %s% should be expanded before escaping %s
|
||||||
@ -858,7 +949,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
test('Hello %(title1)s', 'Hello $PATH')
|
test('Hello %(title1)s', 'Hello $PATH')
|
||||||
test('Hello %(title2)s', 'Hello %PATH%')
|
test('Hello %(title2)s', 'Hello %PATH%')
|
||||||
test('%(title3)s', ('foo/bar\\test', 'foo⧸bar⧹test'))
|
test('%(title3)s', ('foo/bar\\test', 'foo⧸bar⧹test'))
|
||||||
test('folder/%(title3)s', ('folder/foo/bar\\test', 'folder%sfoo⧸bar⧹test' % os.path.sep))
|
test('folder/%(title3)s', ('folder/foo/bar\\test', f'folder{os.path.sep}foo⧸bar⧹test'))
|
||||||
|
|
||||||
def test_format_note(self):
|
def test_format_note(self):
|
||||||
ydl = YoutubeDL()
|
ydl = YoutubeDL()
|
||||||
@ -880,22 +971,22 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
f.write('EXAMPLE')
|
f.write('EXAMPLE')
|
||||||
return [info['filepath']], info
|
return [info['filepath']], info
|
||||||
|
|
||||||
def run_pp(params, PP):
|
def run_pp(params, pp):
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w') as f:
|
||||||
f.write('EXAMPLE')
|
f.write('EXAMPLE')
|
||||||
ydl = YoutubeDL(params)
|
ydl = YoutubeDL(params)
|
||||||
ydl.add_post_processor(PP())
|
ydl.add_post_processor(pp())
|
||||||
ydl.post_process(filename, {'filepath': filename})
|
ydl.post_process(filename, {'filepath': filename})
|
||||||
|
|
||||||
run_pp({'keepvideo': True}, SimplePP)
|
run_pp({'keepvideo': True}, SimplePP)
|
||||||
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
|
self.assertTrue(os.path.exists(filename), f'{filename} doesn\'t exist')
|
||||||
self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
|
self.assertTrue(os.path.exists(audiofile), f'{audiofile} doesn\'t exist')
|
||||||
os.unlink(filename)
|
os.unlink(filename)
|
||||||
os.unlink(audiofile)
|
os.unlink(audiofile)
|
||||||
|
|
||||||
run_pp({'keepvideo': False}, SimplePP)
|
run_pp({'keepvideo': False}, SimplePP)
|
||||||
self.assertFalse(os.path.exists(filename), '%s exists' % filename)
|
self.assertFalse(os.path.exists(filename), f'{filename} exists')
|
||||||
self.assertTrue(os.path.exists(audiofile), '%s doesn\'t exist' % audiofile)
|
self.assertTrue(os.path.exists(audiofile), f'{audiofile} doesn\'t exist')
|
||||||
os.unlink(audiofile)
|
os.unlink(audiofile)
|
||||||
|
|
||||||
class ModifierPP(PostProcessor):
|
class ModifierPP(PostProcessor):
|
||||||
@ -905,7 +996,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
return [], info
|
return [], info
|
||||||
|
|
||||||
run_pp({'keepvideo': False}, ModifierPP)
|
run_pp({'keepvideo': False}, ModifierPP)
|
||||||
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
|
self.assertTrue(os.path.exists(filename), f'{filename} doesn\'t exist')
|
||||||
os.unlink(filename)
|
os.unlink(filename)
|
||||||
|
|
||||||
def test_match_filter(self):
|
def test_match_filter(self):
|
||||||
@ -917,7 +1008,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
'duration': 30,
|
'duration': 30,
|
||||||
'filesize': 10 * 1024,
|
'filesize': 10 * 1024,
|
||||||
'playlist_id': '42',
|
'playlist_id': '42',
|
||||||
'uploader': "變態妍字幕版 太妍 тест",
|
'uploader': '變態妍字幕版 太妍 тест',
|
||||||
'creator': "тест ' 123 ' тест--",
|
'creator': "тест ' 123 ' тест--",
|
||||||
'webpage_url': 'http://example.com/watch?v=shenanigans',
|
'webpage_url': 'http://example.com/watch?v=shenanigans',
|
||||||
}
|
}
|
||||||
@ -930,7 +1021,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
'description': 'foo',
|
'description': 'foo',
|
||||||
'filesize': 5 * 1024,
|
'filesize': 5 * 1024,
|
||||||
'playlist_id': '43',
|
'playlist_id': '43',
|
||||||
'uploader': "тест 123",
|
'uploader': 'тест 123',
|
||||||
'webpage_url': 'http://example.com/watch?v=SHENANIGANS',
|
'webpage_url': 'http://example.com/watch?v=SHENANIGANS',
|
||||||
}
|
}
|
||||||
videos = [first, second]
|
videos = [first, second]
|
||||||
@ -938,7 +1029,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
def get_videos(filter_=None):
|
def get_videos(filter_=None):
|
||||||
ydl = YDL({'match_filter': filter_, 'simulate': True})
|
ydl = YDL({'match_filter': filter_, 'simulate': True})
|
||||||
for v in videos:
|
for v in videos:
|
||||||
ydl.process_ie_result(v, download=True)
|
ydl.process_ie_result(v.copy(), download=True)
|
||||||
return [v['id'] for v in ydl.downloaded_info_dicts]
|
return [v['id'] for v in ydl.downloaded_info_dicts]
|
||||||
|
|
||||||
res = get_videos()
|
res = get_videos()
|
||||||
@ -1177,7 +1268,7 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
})
|
})
|
||||||
return {
|
return {
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'title': 'Video %s' % video_id,
|
'title': f'Video {video_id}',
|
||||||
'formats': formats,
|
'formats': formats,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1191,8 +1282,8 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
'_type': 'url_transparent',
|
'_type': 'url_transparent',
|
||||||
'ie_key': VideoIE.ie_key(),
|
'ie_key': VideoIE.ie_key(),
|
||||||
'id': video_id,
|
'id': video_id,
|
||||||
'url': 'video:%s' % video_id,
|
'url': f'video:{video_id}',
|
||||||
'title': 'Video Transparent %s' % video_id,
|
'title': f'Video Transparent {video_id}',
|
||||||
}
|
}
|
||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
@ -1338,6 +1429,33 @@ class TestYoutubeDL(unittest.TestCase):
|
|||||||
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
self.assertFalse(result.get('cookies'), msg='Cookies set in cookies field for wrong domain')
|
||||||
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
self.assertFalse(ydl.cookiejar.get_cookie_header(fmt['url']), msg='Cookies set in cookiejar for wrong domain')
|
||||||
|
|
||||||
|
def test_load_plugins_compat(self):
|
||||||
|
# Should try to reload plugins if they haven't already been loaded
|
||||||
|
all_plugins_loaded.value = False
|
||||||
|
FakeYDL().close()
|
||||||
|
assert all_plugins_loaded.value
|
||||||
|
|
||||||
|
def test_close_hooks(self):
|
||||||
|
# Should call all registered close hooks on close
|
||||||
|
close_hook_called = False
|
||||||
|
close_hook_two_called = False
|
||||||
|
|
||||||
|
def close_hook():
|
||||||
|
nonlocal close_hook_called
|
||||||
|
close_hook_called = True
|
||||||
|
|
||||||
|
def close_hook_two():
|
||||||
|
nonlocal close_hook_two_called
|
||||||
|
close_hook_two_called = True
|
||||||
|
|
||||||
|
ydl = FakeYDL()
|
||||||
|
ydl.add_close_hook(close_hook)
|
||||||
|
ydl.add_close_hook(close_hook_two)
|
||||||
|
|
||||||
|
ydl.close()
|
||||||
|
self.assertTrue(close_hook_called, 'Close hook was not called')
|
||||||
|
self.assertTrue(close_hook_two_called, 'Close hook two was not called')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -27,7 +27,6 @@ from yt_dlp.aes import (
|
|||||||
pad_block,
|
pad_block,
|
||||||
)
|
)
|
||||||
from yt_dlp.dependencies import Cryptodome
|
from yt_dlp.dependencies import Cryptodome
|
||||||
from yt_dlp.utils import bytes_to_intlist, intlist_to_bytes
|
|
||||||
|
|
||||||
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
|
||||||
|
|
||||||
@ -40,33 +39,33 @@ class TestAES(unittest.TestCase):
|
|||||||
def test_encrypt(self):
|
def test_encrypt(self):
|
||||||
msg = b'message'
|
msg = b'message'
|
||||||
key = list(range(16))
|
key = list(range(16))
|
||||||
encrypted = aes_encrypt(bytes_to_intlist(msg), key)
|
encrypted = aes_encrypt(list(msg), key)
|
||||||
decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
|
decrypted = bytes(aes_decrypt(encrypted, key))
|
||||||
self.assertEqual(decrypted, msg)
|
self.assertEqual(decrypted, msg)
|
||||||
|
|
||||||
def test_cbc_decrypt(self):
|
def test_cbc_decrypt(self):
|
||||||
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
data = b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\x27\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd'
|
||||||
decrypted = intlist_to_bytes(aes_cbc_decrypt(bytes_to_intlist(data), self.key, self.iv))
|
decrypted = bytes(aes_cbc_decrypt(list(data), self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome.AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_cbc_decrypt_bytes(data, intlist_to_bytes(self.key), intlist_to_bytes(self.iv))
|
decrypted = aes_cbc_decrypt_bytes(data, bytes(self.key), bytes(self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
def test_cbc_encrypt(self):
|
def test_cbc_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = list(self.secret_msg)
|
||||||
encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
encrypted = bytes(aes_cbc_encrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
|
b'\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6\'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd')
|
||||||
|
|
||||||
def test_ctr_decrypt(self):
|
def test_ctr_decrypt(self):
|
||||||
data = bytes_to_intlist(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
data = list(b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||||
decrypted = intlist_to_bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
decrypted = bytes(aes_ctr_decrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
def test_ctr_encrypt(self):
|
def test_ctr_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = list(self.secret_msg)
|
||||||
encrypted = intlist_to_bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
encrypted = bytes(aes_ctr_encrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
b'\x03\xc7\xdd\xd4\x8e\xb3\xbc\x1a*O\xdc1\x12+8Aio\xd1z\xb5#\xaf\x08')
|
||||||
@ -75,47 +74,59 @@ class TestAES(unittest.TestCase):
|
|||||||
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
|
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f.\x08\xb4T\xe4/\x17\xbd'
|
||||||
authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
|
authentication_tag = b'\xe8&I\x80rI\x07\x9d}YWuU@:e'
|
||||||
|
|
||||||
decrypted = intlist_to_bytes(aes_gcm_decrypt_and_verify(
|
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||||
bytes_to_intlist(data), self.key, bytes_to_intlist(authentication_tag), self.iv[:12]))
|
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
if Cryptodome.AES:
|
if Cryptodome.AES:
|
||||||
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||||
data, intlist_to_bytes(self.key), authentication_tag, intlist_to_bytes(self.iv[:12]))
|
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
|
def test_gcm_aligned_decrypt(self):
|
||||||
|
data = b'\x159Y\xcf5eud\x90\x9c\x85&]\x14\x1d\x0f'
|
||||||
|
authentication_tag = b'\x08\xb1\x9d!&\x98\xd0\xeaRq\x90\xe6;\xb5]\xd8'
|
||||||
|
|
||||||
|
decrypted = bytes(aes_gcm_decrypt_and_verify(
|
||||||
|
list(data), self.key, list(authentication_tag), self.iv[:12]))
|
||||||
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||||
|
if Cryptodome.AES:
|
||||||
|
decrypted = aes_gcm_decrypt_and_verify_bytes(
|
||||||
|
data, bytes(self.key), authentication_tag, bytes(self.iv[:12]))
|
||||||
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg[:16])
|
||||||
|
|
||||||
def test_decrypt_text(self):
|
def test_decrypt_text(self):
|
||||||
password = intlist_to_bytes(self.key).decode()
|
password = bytes(self.key).decode()
|
||||||
encrypted = base64.b64encode(
|
encrypted = base64.b64encode(
|
||||||
intlist_to_bytes(self.iv[:8])
|
bytes(self.iv[:8])
|
||||||
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae'
|
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae',
|
||||||
).decode()
|
).decode()
|
||||||
decrypted = (aes_decrypt_text(encrypted, password, 16))
|
decrypted = (aes_decrypt_text(encrypted, password, 16))
|
||||||
self.assertEqual(decrypted, self.secret_msg)
|
self.assertEqual(decrypted, self.secret_msg)
|
||||||
|
|
||||||
password = intlist_to_bytes(self.key).decode()
|
password = bytes(self.key).decode()
|
||||||
encrypted = base64.b64encode(
|
encrypted = base64.b64encode(
|
||||||
intlist_to_bytes(self.iv[:8])
|
bytes(self.iv[:8])
|
||||||
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83'
|
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83',
|
||||||
).decode()
|
).decode()
|
||||||
decrypted = (aes_decrypt_text(encrypted, password, 32))
|
decrypted = (aes_decrypt_text(encrypted, password, 32))
|
||||||
self.assertEqual(decrypted, self.secret_msg)
|
self.assertEqual(decrypted, self.secret_msg)
|
||||||
|
|
||||||
def test_ecb_encrypt(self):
|
def test_ecb_encrypt(self):
|
||||||
data = bytes_to_intlist(self.secret_msg)
|
data = list(self.secret_msg)
|
||||||
encrypted = intlist_to_bytes(aes_ecb_encrypt(data, self.key))
|
encrypted = bytes(aes_ecb_encrypt(data, self.key))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
encrypted,
|
encrypted,
|
||||||
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||||
|
|
||||||
def test_ecb_decrypt(self):
|
def test_ecb_decrypt(self):
|
||||||
data = bytes_to_intlist(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
data = list(b'\xaa\x86]\x81\x97>\x02\x92\x9d\x1bR[[L/u\xd3&\xd1(h\xde{\x81\x94\xba\x02\xae\xbd\xa6\xd0:')
|
||||||
decrypted = intlist_to_bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
decrypted = bytes(aes_ecb_decrypt(data, self.key, self.iv))
|
||||||
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
|
||||||
|
|
||||||
def test_key_expansion(self):
|
def test_key_expansion(self):
|
||||||
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
key = '4f6bdaa39e2f8cb07f5e722d9edef314'
|
||||||
|
|
||||||
self.assertEqual(key_expansion(bytes_to_intlist(bytearray.fromhex(key))), [
|
self.assertEqual(key_expansion(list(bytearray.fromhex(key))), [
|
||||||
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
0x4F, 0x6B, 0xDA, 0xA3, 0x9E, 0x2F, 0x8C, 0xB0, 0x7F, 0x5E, 0x72, 0x2D, 0x9E, 0xDE, 0xF3, 0x14,
|
||||||
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
0x53, 0x66, 0x20, 0xA8, 0xCD, 0x49, 0xAC, 0x18, 0xB2, 0x17, 0xDE, 0x35, 0x2C, 0xC9, 0x2D, 0x21,
|
||||||
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
0x8C, 0xBE, 0xDD, 0xD9, 0x41, 0xF7, 0x71, 0xC1, 0xF3, 0xE0, 0xAF, 0xF4, 0xDF, 0x29, 0x82, 0xD5,
|
||||||
@ -132,16 +143,16 @@ class TestAES(unittest.TestCase):
|
|||||||
block = [0x21, 0xA0, 0x43, 0xFF]
|
block = [0x21, 0xA0, 0x43, 0xFF]
|
||||||
|
|
||||||
self.assertEqual(pad_block(block, 'pkcs7'),
|
self.assertEqual(pad_block(block, 'pkcs7'),
|
||||||
block + [0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
|
[*block, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C, 0x0C])
|
||||||
|
|
||||||
self.assertEqual(pad_block(block, 'iso7816'),
|
self.assertEqual(pad_block(block, 'iso7816'),
|
||||||
block + [0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
[*block, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||||
|
|
||||||
self.assertEqual(pad_block(block, 'whitespace'),
|
self.assertEqual(pad_block(block, 'whitespace'),
|
||||||
block + [0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
|
[*block, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20])
|
||||||
|
|
||||||
self.assertEqual(pad_block(block, 'zero'),
|
self.assertEqual(pad_block(block, 'zero'),
|
||||||
block + [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
[*block, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
|
||||||
|
|
||||||
block = list(range(16))
|
block = list(range(16))
|
||||||
for mode in ('pkcs7', 'iso7816', 'whitespace', 'zero'):
|
for mode in ('pkcs7', 'iso7816', 'whitespace', 'zero'):
|
||||||
|
@ -12,27 +12,22 @@ import struct
|
|||||||
|
|
||||||
from yt_dlp import compat
|
from yt_dlp import compat
|
||||||
from yt_dlp.compat import urllib # isort: split
|
from yt_dlp.compat import urllib # isort: split
|
||||||
from yt_dlp.compat import (
|
from yt_dlp.compat import compat_etree_fromstring, compat_expanduser
|
||||||
compat_etree_fromstring,
|
|
||||||
compat_expanduser,
|
|
||||||
compat_urllib_parse_unquote,
|
|
||||||
compat_urllib_parse_urlencode,
|
|
||||||
)
|
|
||||||
from yt_dlp.compat.urllib.request import getproxies
|
from yt_dlp.compat.urllib.request import getproxies
|
||||||
|
|
||||||
|
|
||||||
class TestCompat(unittest.TestCase):
|
class TestCompat(unittest.TestCase):
|
||||||
def test_compat_passthrough(self):
|
def test_compat_passthrough(self):
|
||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
compat.compat_basestring
|
_ = compat.compat_basestring
|
||||||
|
|
||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
compat.WINDOWS_VT_MODE
|
_ = compat.WINDOWS_VT_MODE
|
||||||
|
|
||||||
self.assertEqual(urllib.request.getproxies, getproxies)
|
self.assertEqual(urllib.request.getproxies, getproxies)
|
||||||
|
|
||||||
with self.assertWarns(DeprecationWarning):
|
with self.assertWarns(DeprecationWarning):
|
||||||
compat.compat_pycrypto_AES # Must not raise error
|
_ = compat.compat_pycrypto_AES # Must not raise error
|
||||||
|
|
||||||
def test_compat_expanduser(self):
|
def test_compat_expanduser(self):
|
||||||
old_home = os.environ.get('HOME')
|
old_home = os.environ.get('HOME')
|
||||||
@ -43,39 +38,6 @@ class TestCompat(unittest.TestCase):
|
|||||||
finally:
|
finally:
|
||||||
os.environ['HOME'] = old_home or ''
|
os.environ['HOME'] = old_home or ''
|
||||||
|
|
||||||
def test_compat_urllib_parse_unquote(self):
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('abc%20def'), 'abc def')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%7e/abc+def'), '~/abc+def')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote(''), '')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%'), '%')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%%'), '%%')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%%%'), '%%%')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%2F'), '/')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%2f'), '/')
|
|
||||||
self.assertEqual(compat_urllib_parse_unquote('%E6%B4%A5%E6%B3%A2'), '津波')
|
|
||||||
self.assertEqual(
|
|
||||||
compat_urllib_parse_unquote('''<meta property="og:description" content="%E2%96%81%E2%96%82%E2%96%83%E2%96%84%25%E2%96%85%E2%96%86%E2%96%87%E2%96%88" />
|
|
||||||
%<a href="https://ar.wikipedia.org/wiki/%D8%AA%D8%B3%D9%88%D9%86%D8%A7%D9%85%D9%8A">%a'''),
|
|
||||||
'''<meta property="og:description" content="▁▂▃▄%▅▆▇█" />
|
|
||||||
%<a href="https://ar.wikipedia.org/wiki/تسونامي">%a''')
|
|
||||||
self.assertEqual(
|
|
||||||
compat_urllib_parse_unquote('''%28%5E%E2%97%A3_%E2%97%A2%5E%29%E3%81%A3%EF%B8%BB%E3%83%87%E2%95%90%E4%B8%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%87%80 %E2%86%B6%I%Break%25Things%'''),
|
|
||||||
'''(^◣_◢^)っ︻デ═一 ⇀ ⇀ ⇀ ⇀ ⇀ ↶%I%Break%Things%''')
|
|
||||||
|
|
||||||
def test_compat_urllib_parse_unquote_plus(self):
|
|
||||||
self.assertEqual(urllib.parse.unquote_plus('abc%20def'), 'abc def')
|
|
||||||
self.assertEqual(urllib.parse.unquote_plus('%7e/abc+def'), '~/abc def')
|
|
||||||
|
|
||||||
def test_compat_urllib_parse_urlencode(self):
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': 'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({'abc': b'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': 'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode({b'abc': b'def'}), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', 'def')]), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([('abc', b'def')]), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', 'def')]), 'abc=def')
|
|
||||||
self.assertEqual(compat_urllib_parse_urlencode([(b'abc', b'def')]), 'abc=def')
|
|
||||||
|
|
||||||
def test_compat_etree_fromstring(self):
|
def test_compat_etree_fromstring(self):
|
||||||
xml = '''
|
xml = '''
|
||||||
<root foo="bar" spam="中文">
|
<root foo="bar" spam="中文">
|
||||||
|
@ -71,7 +71,7 @@ def _generate_expected_groups():
|
|||||||
Path('/etc/yt-dlp.conf'),
|
Path('/etc/yt-dlp.conf'),
|
||||||
Path('/etc/yt-dlp/config'),
|
Path('/etc/yt-dlp/config'),
|
||||||
Path('/etc/yt-dlp/config.txt'),
|
Path('/etc/yt-dlp/config.txt'),
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
import datetime as dt
|
||||||
import unittest
|
import unittest
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
from yt_dlp import cookies
|
from yt_dlp import cookies
|
||||||
from yt_dlp.cookies import (
|
from yt_dlp.cookies import (
|
||||||
@ -58,6 +58,14 @@ class TestCookies(unittest.TestCase):
|
|||||||
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
({'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
({'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'gnome'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'mate'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'kde'}, _LinuxDesktopEnvironment.KDE3),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'xfce'}, _LinuxDesktopEnvironment.XFCE),
|
||||||
|
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'my_custom_de', 'DESKTOP_SESSION': 'my_custom_de', 'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
|
||||||
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
({'GNOME_DESKTOP_SESSION_ID': 1}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
({'KDE_FULL_SESSION': 1}, _LinuxDesktopEnvironment.KDE3),
|
||||||
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
({'KDE_FULL_SESSION': 1, 'DESKTOP_SESSION': 'kde4'}, _LinuxDesktopEnvironment.KDE4),
|
||||||
@ -67,6 +75,7 @@ class TestCookies(unittest.TestCase):
|
|||||||
({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'GNOME:GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'GNOME : GNOME-Classic'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
({'XDG_CURRENT_DESKTOP': 'ubuntu:GNOME'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
|
|
||||||
({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME),
|
({'XDG_CURRENT_DESKTOP': 'Unity', 'DESKTOP_SESSION': 'gnome-fallback'}, _LinuxDesktopEnvironment.GNOME),
|
||||||
({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE5),
|
({'XDG_CURRENT_DESKTOP': 'KDE', 'KDE_SESSION_VERSION': '5'}, _LinuxDesktopEnvironment.KDE5),
|
||||||
@ -104,15 +113,31 @@ class TestCookies(unittest.TestCase):
|
|||||||
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
|
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger())
|
||||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
|
def test_chrome_cookie_decryptor_linux_v10_meta24(self):
|
||||||
|
with MonkeyPatch(cookies, {'_get_linux_keyring_password': lambda *args, **kwargs: b''}):
|
||||||
|
encrypted_value = b'v10\x1f\xe4\x0e[\x83\x0c\xcc*kPi \xce\x8d\x1d\xbb\x80\r\x11\t\xbb\x9e^Hy\x94\xf4\x963\x9f\x82\xba\xfe\xa1\xed\xb9\xf1)\x00710\x92\xc8/<\x96B'
|
||||||
|
value = 'DE'
|
||||||
|
decryptor = LinuxChromeCookieDecryptor('Chrome', Logger(), meta_version=24)
|
||||||
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
def test_chrome_cookie_decryptor_windows_v10(self):
|
def test_chrome_cookie_decryptor_windows_v10(self):
|
||||||
with MonkeyPatch(cookies, {
|
with MonkeyPatch(cookies, {
|
||||||
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&'
|
'_get_windows_v10_key': lambda *args, **kwargs: b'Y\xef\xad\xad\xeerp\xf0Y\xe6\x9b\x12\xc2<z\x16]\n\xbb\xb8\xcb\xd7\x9bA\xc3\x14e\x99{\xd6\xf4&',
|
||||||
}):
|
}):
|
||||||
encrypted_value = b'v10T\xb8\xf3\xb8\x01\xa7TtcV\xfc\x88\xb8\xb8\xef\x05\xb5\xfd\x18\xc90\x009\xab\xb1\x893\x85)\x87\xe1\xa9-\xa3\xad='
|
encrypted_value = b'v10T\xb8\xf3\xb8\x01\xa7TtcV\xfc\x88\xb8\xb8\xef\x05\xb5\xfd\x18\xc90\x009\xab\xb1\x893\x85)\x87\xe1\xa9-\xa3\xad='
|
||||||
value = '32101439'
|
value = '32101439'
|
||||||
decryptor = WindowsChromeCookieDecryptor('', Logger())
|
decryptor = WindowsChromeCookieDecryptor('', Logger())
|
||||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
|
def test_chrome_cookie_decryptor_windows_v10_meta24(self):
|
||||||
|
with MonkeyPatch(cookies, {
|
||||||
|
'_get_windows_v10_key': lambda *args, **kwargs: b'\xea\x8b\x02\xc3\xc6\xc5\x99\xc3\xa3[ j\xfa\xf6\xfcU\xac\x13u\xdc\x0c\x0e\xf1\x03\x90\xb6\xdf\xbb\x8fL\xb1\xb2',
|
||||||
|
}):
|
||||||
|
encrypted_value = b'v10dN\xe1\xacy\x84^\xe1I\xact\x03r\xfb\xe2\xce{^\x0e<(\xb0y\xeb\x01\xfb@"\x9e\x8c\xa53~\xdb*\x8f\xac\x8b\xe3\xfd3\x06\xe5\x93\x19OyOG\xb2\xfb\x1d$\xc0\xda\x13j\x9e\xfe\xc5\xa3\xa8\xfe\xd9'
|
||||||
|
value = '1234'
|
||||||
|
decryptor = WindowsChromeCookieDecryptor('', Logger(), meta_version=24)
|
||||||
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
def test_chrome_cookie_decryptor_mac_v10(self):
|
def test_chrome_cookie_decryptor_mac_v10(self):
|
||||||
with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
|
with MonkeyPatch(cookies, {'_get_mac_keyring_password': lambda *args, **kwargs: b'6eIDUdtKAacvlHwBVwvg/Q=='}):
|
||||||
encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
|
encrypted_value = b'v10\xb3\xbe\xad\xa1[\x9fC\xa1\x98\xe0\x9a\x01\xd9\xcf\xbfc'
|
||||||
@ -121,24 +146,24 @@ class TestCookies(unittest.TestCase):
|
|||||||
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
self.assertEqual(decryptor.decrypt(encrypted_value), value)
|
||||||
|
|
||||||
def test_safari_cookie_parsing(self):
|
def test_safari_cookie_parsing(self):
|
||||||
cookies = \
|
cookies = (
|
||||||
b'cook\x00\x00\x00\x01\x00\x00\x00i\x00\x00\x01\x00\x01\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00Y' \
|
b'cook\x00\x00\x00\x01\x00\x00\x00i\x00\x00\x01\x00\x01\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00Y'
|
||||||
b'\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x008\x00\x00\x00B\x00\x00\x00F\x00\x00\x00H' \
|
b'\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x008\x00\x00\x00B\x00\x00\x00F\x00\x00\x00H'
|
||||||
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x03\xa5>\xc3A\x00\x00\x80\xc3\x07:\xc3A' \
|
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x03\xa5>\xc3A\x00\x00\x80\xc3\x07:\xc3A'
|
||||||
b'localhost\x00foo\x00/\x00test%20%3Bcookie\x00\x00\x00\x054\x07\x17 \x05\x00\x00\x00Kbplist00\xd1\x01' \
|
b'localhost\x00foo\x00/\x00test%20%3Bcookie\x00\x00\x00\x054\x07\x17 \x05\x00\x00\x00Kbplist00\xd1\x01'
|
||||||
b'\x02_\x10\x18NSHTTPCookieAcceptPolicy\x10\x02\x08\x0b&\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00' \
|
b'\x02_\x10\x18NSHTTPCookieAcceptPolicy\x10\x02\x08\x0b&\x00\x00\x00\x00\x00\x00\x01\x01\x00\x00\x00'
|
||||||
b'\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00('
|
b'\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00(')
|
||||||
|
|
||||||
jar = parse_safari_cookies(cookies)
|
jar = parse_safari_cookies(cookies)
|
||||||
self.assertEqual(len(jar), 1)
|
self.assertEqual(len(jar), 1)
|
||||||
cookie = list(jar)[0]
|
cookie = next(iter(jar))
|
||||||
self.assertEqual(cookie.domain, 'localhost')
|
self.assertEqual(cookie.domain, 'localhost')
|
||||||
self.assertEqual(cookie.port, None)
|
self.assertEqual(cookie.port, None)
|
||||||
self.assertEqual(cookie.path, '/')
|
self.assertEqual(cookie.path, '/')
|
||||||
self.assertEqual(cookie.name, 'foo')
|
self.assertEqual(cookie.name, 'foo')
|
||||||
self.assertEqual(cookie.value, 'test%20%3Bcookie')
|
self.assertEqual(cookie.value, 'test%20%3Bcookie')
|
||||||
self.assertFalse(cookie.secure)
|
self.assertFalse(cookie.secure)
|
||||||
expected_expiration = datetime(2021, 6, 18, 21, 39, 19, tzinfo=timezone.utc)
|
expected_expiration = dt.datetime(2021, 6, 18, 21, 39, 19, tzinfo=dt.timezone.utc)
|
||||||
self.assertEqual(cookie.expires, int(expected_expiration.timestamp()))
|
self.assertEqual(cookie.expires, int(expected_expiration.timestamp()))
|
||||||
|
|
||||||
def test_pbkdf2_sha1(self):
|
def test_pbkdf2_sha1(self):
|
||||||
@ -164,7 +189,7 @@ class TestLenientSimpleCookie(unittest.TestCase):
|
|||||||
attributes = {
|
attributes = {
|
||||||
key: value
|
key: value
|
||||||
for key, value in dict(morsel).items()
|
for key, value in dict(morsel).items()
|
||||||
if value != ""
|
if value != ''
|
||||||
}
|
}
|
||||||
self.assertEqual(attributes, expected_attributes, message)
|
self.assertEqual(attributes, expected_attributes, message)
|
||||||
|
|
||||||
@ -174,133 +199,133 @@ class TestLenientSimpleCookie(unittest.TestCase):
|
|||||||
self._run_tests(
|
self._run_tests(
|
||||||
# Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
|
# Copied from https://github.com/python/cpython/blob/v3.10.7/Lib/test/test_http_cookies.py
|
||||||
(
|
(
|
||||||
"Test basic cookie",
|
'Test basic cookie',
|
||||||
"chips=ahoy; vienna=finger",
|
'chips=ahoy; vienna=finger',
|
||||||
{"chips": "ahoy", "vienna": "finger"},
|
{'chips': 'ahoy', 'vienna': 'finger'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test quoted cookie",
|
'Test quoted cookie',
|
||||||
'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
|
'keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"',
|
||||||
{"keebler": 'E=mc2; L="Loves"; fudge=\012;'},
|
{'keebler': 'E=mc2; L="Loves"; fudge=\012;'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Allow '=' in an unquoted value",
|
"Allow '=' in an unquoted value",
|
||||||
"keebler=E=mc2",
|
'keebler=E=mc2',
|
||||||
{"keebler": "E=mc2"},
|
{'keebler': 'E=mc2'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Allow cookies with ':' in their name",
|
"Allow cookies with ':' in their name",
|
||||||
"key:term=value:term",
|
'key:term=value:term',
|
||||||
{"key:term": "value:term"},
|
{'key:term': 'value:term'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Allow '[' and ']' in cookie values",
|
"Allow '[' and ']' in cookie values",
|
||||||
"a=b; c=[; d=r; f=h",
|
'a=b; c=[; d=r; f=h',
|
||||||
{"a": "b", "c": "[", "d": "r", "f": "h"},
|
{'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test basic cookie attributes",
|
'Test basic cookie attributes',
|
||||||
'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
|
'Customer="WILE_E_COYOTE"; Version=1; Path=/acme',
|
||||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})},
|
{'Customer': ('WILE_E_COYOTE', {'version': '1', 'path': '/acme'})},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test flag only cookie attributes",
|
'Test flag only cookie attributes',
|
||||||
'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
|
'Customer="WILE_E_COYOTE"; HttpOnly; Secure',
|
||||||
{"Customer": ("WILE_E_COYOTE", {"httponly": True, "secure": True})},
|
{'Customer': ('WILE_E_COYOTE', {'httponly': True, 'secure': True})},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test flag only attribute with values",
|
'Test flag only attribute with values',
|
||||||
"eggs=scrambled; httponly=foo; secure=bar; Path=/bacon",
|
'eggs=scrambled; httponly=foo; secure=bar; Path=/bacon',
|
||||||
{"eggs": ("scrambled", {"httponly": "foo", "secure": "bar", "path": "/bacon"})},
|
{'eggs': ('scrambled', {'httponly': 'foo', 'secure': 'bar', 'path': '/bacon'})},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test special case for 'expires' attribute, 4 digit year",
|
"Test special case for 'expires' attribute, 4 digit year",
|
||||||
'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
|
'Customer="W"; expires=Wed, 01 Jan 2010 00:00:00 GMT',
|
||||||
{"Customer": ("W", {"expires": "Wed, 01 Jan 2010 00:00:00 GMT"})},
|
{'Customer': ('W', {'expires': 'Wed, 01 Jan 2010 00:00:00 GMT'})},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test special case for 'expires' attribute, 2 digit year",
|
"Test special case for 'expires' attribute, 2 digit year",
|
||||||
'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
|
'Customer="W"; expires=Wed, 01 Jan 98 00:00:00 GMT',
|
||||||
{"Customer": ("W", {"expires": "Wed, 01 Jan 98 00:00:00 GMT"})},
|
{'Customer': ('W', {'expires': 'Wed, 01 Jan 98 00:00:00 GMT'})},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test extra spaces in keys and values",
|
'Test extra spaces in keys and values',
|
||||||
"eggs = scrambled ; secure ; path = bar ; foo=foo ",
|
'eggs = scrambled ; secure ; path = bar ; foo=foo ',
|
||||||
{"eggs": ("scrambled", {"secure": True, "path": "bar"}), "foo": "foo"},
|
{'eggs': ('scrambled', {'secure': True, 'path': 'bar'}), 'foo': 'foo'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Test quoted attributes",
|
'Test quoted attributes',
|
||||||
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
|
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"',
|
||||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "path": "/acme"})}
|
{'Customer': ('WILE_E_COYOTE', {'version': '1', 'path': '/acme'})},
|
||||||
),
|
),
|
||||||
# Our own tests that CPython passes
|
# Our own tests that CPython passes
|
||||||
(
|
(
|
||||||
"Allow ';' in quoted value",
|
"Allow ';' in quoted value",
|
||||||
'chips="a;hoy"; vienna=finger',
|
'chips="a;hoy"; vienna=finger',
|
||||||
{"chips": "a;hoy", "vienna": "finger"},
|
{'chips': 'a;hoy', 'vienna': 'finger'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Keep only the last set value",
|
'Keep only the last set value',
|
||||||
"a=c; a=b",
|
'a=c; a=b',
|
||||||
{"a": "b"},
|
{'a': 'b'},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_lenient_parsing(self):
|
def test_lenient_parsing(self):
|
||||||
self._run_tests(
|
self._run_tests(
|
||||||
(
|
(
|
||||||
"Ignore and try to skip invalid cookies",
|
'Ignore and try to skip invalid cookies',
|
||||||
'chips={"ahoy;": 1}; vienna="finger;"',
|
'chips={"ahoy;": 1}; vienna="finger;"',
|
||||||
{"vienna": "finger;"},
|
{'vienna': 'finger;'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Ignore cookies without a name",
|
'Ignore cookies without a name',
|
||||||
"a=b; unnamed; c=d",
|
'a=b; unnamed; c=d',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Ignore '\"' cookie without name",
|
"Ignore '\"' cookie without name",
|
||||||
'a=b; "; c=d',
|
'a=b; "; c=d',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Skip all space separated values",
|
'Skip all space separated values',
|
||||||
"x a=b c=d x; e=f",
|
'x a=b c=d x; e=f',
|
||||||
{"a": "b", "c": "d", "e": "f"},
|
{'a': 'b', 'c': 'd', 'e': 'f'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Skip all space separated values",
|
'Skip all space separated values',
|
||||||
'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
|
'x a=b; data={"complex": "json", "with": "key=value"}; x c=d x',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Expect quote mending",
|
'Expect quote mending',
|
||||||
'a=b; invalid="; c=d',
|
'a=b; invalid="; c=d',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Reset morsel after invalid to not capture attributes",
|
'Reset morsel after invalid to not capture attributes',
|
||||||
"a=b; invalid; Version=1; c=d",
|
'a=b; invalid; Version=1; c=d',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Reset morsel after invalid to not capture attributes",
|
'Reset morsel after invalid to not capture attributes',
|
||||||
"a=b; $invalid; $Version=1; c=d",
|
'a=b; $invalid; $Version=1; c=d',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Continue after non-flag attribute without value",
|
'Continue after non-flag attribute without value',
|
||||||
"a=b; path; Version=1; c=d",
|
'a=b; path; Version=1; c=d',
|
||||||
{"a": "b", "c": "d"},
|
{'a': 'b', 'c': 'd'},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Allow cookie attributes with `$` prefix",
|
'Allow cookie attributes with `$` prefix',
|
||||||
'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
|
'Customer="WILE_E_COYOTE"; $Version=1; $Secure; $Path=/acme',
|
||||||
{"Customer": ("WILE_E_COYOTE", {"version": "1", "secure": True, "path": "/acme"})},
|
{'Customer': ('WILE_E_COYOTE', {'version': '1', 'secure': True, 'path': '/acme'})},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Invalid Morsel keys should not result in an error",
|
'Invalid Morsel keys should not result in an error',
|
||||||
"Key=Value; [Invalid]=Value; Another=Value",
|
'Key=Value; [Invalid]=Value; Another=Value',
|
||||||
{"Key": "Value", "Another": "Value"},
|
{'Key': 'Value', 'Another': 'Value'},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
235
test/test_devalue.py
Normal file
235
test/test_devalue.py
Normal file
@ -0,0 +1,235 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Allow direct execution
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
import re
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from yt_dlp.utils.jslib import devalue
|
||||||
|
|
||||||
|
|
||||||
|
TEST_CASES_EQUALS = [{
|
||||||
|
'name': 'int',
|
||||||
|
'unparsed': [-42],
|
||||||
|
'parsed': -42,
|
||||||
|
}, {
|
||||||
|
'name': 'str',
|
||||||
|
'unparsed': ['woo!!!'],
|
||||||
|
'parsed': 'woo!!!',
|
||||||
|
}, {
|
||||||
|
'name': 'Number',
|
||||||
|
'unparsed': [['Object', 42]],
|
||||||
|
'parsed': 42,
|
||||||
|
}, {
|
||||||
|
'name': 'String',
|
||||||
|
'unparsed': [['Object', 'yar']],
|
||||||
|
'parsed': 'yar',
|
||||||
|
}, {
|
||||||
|
'name': 'Infinity',
|
||||||
|
'unparsed': -4,
|
||||||
|
'parsed': math.inf,
|
||||||
|
}, {
|
||||||
|
'name': 'negative Infinity',
|
||||||
|
'unparsed': -5,
|
||||||
|
'parsed': -math.inf,
|
||||||
|
}, {
|
||||||
|
'name': 'negative zero',
|
||||||
|
'unparsed': -6,
|
||||||
|
'parsed': -0.0,
|
||||||
|
}, {
|
||||||
|
'name': 'RegExp',
|
||||||
|
'unparsed': [['RegExp', 'regexp', 'gim']], # XXX: flags are ignored
|
||||||
|
'parsed': re.compile('regexp'),
|
||||||
|
}, {
|
||||||
|
'name': 'Date',
|
||||||
|
'unparsed': [['Date', '2001-09-09T01:46:40.000Z']],
|
||||||
|
'parsed': dt.datetime.fromtimestamp(1e9, tz=dt.timezone.utc),
|
||||||
|
}, {
|
||||||
|
'name': 'Array',
|
||||||
|
'unparsed': [[1, 2, 3], 'a', 'b', 'c'],
|
||||||
|
'parsed': ['a', 'b', 'c'],
|
||||||
|
}, {
|
||||||
|
'name': 'Array (empty)',
|
||||||
|
'unparsed': [[]],
|
||||||
|
'parsed': [],
|
||||||
|
}, {
|
||||||
|
'name': 'Array (sparse)',
|
||||||
|
'unparsed': [[-2, 1, -2], 'b'],
|
||||||
|
'parsed': [None, 'b', None],
|
||||||
|
}, {
|
||||||
|
'name': 'Object',
|
||||||
|
'unparsed': [{'foo': 1, 'x-y': 2}, 'bar', 'z'],
|
||||||
|
'parsed': {'foo': 'bar', 'x-y': 'z'},
|
||||||
|
}, {
|
||||||
|
'name': 'Set',
|
||||||
|
'unparsed': [['Set', 1, 2, 3], 1, 2, 3],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'Map',
|
||||||
|
'unparsed': [['Map', 1, 2], 'a', 'b'],
|
||||||
|
'parsed': [['a', 'b']],
|
||||||
|
}, {
|
||||||
|
'name': 'BigInt',
|
||||||
|
'unparsed': [['BigInt', '1']],
|
||||||
|
'parsed': 1,
|
||||||
|
}, {
|
||||||
|
'name': 'Uint8Array',
|
||||||
|
'unparsed': [['Uint8Array', 'AQID']],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'ArrayBuffer',
|
||||||
|
'unparsed': [['ArrayBuffer', 'AQID']],
|
||||||
|
'parsed': [1, 2, 3],
|
||||||
|
}, {
|
||||||
|
'name': 'str (repetition)',
|
||||||
|
'unparsed': [[1, 1], 'a string'],
|
||||||
|
'parsed': ['a string', 'a string'],
|
||||||
|
}, {
|
||||||
|
'name': 'None (repetition)',
|
||||||
|
'unparsed': [[1, 1], None],
|
||||||
|
'parsed': [None, None],
|
||||||
|
}, {
|
||||||
|
'name': 'dict (repetition)',
|
||||||
|
'unparsed': [[1, 1], {}],
|
||||||
|
'parsed': [{}, {}],
|
||||||
|
}, {
|
||||||
|
'name': 'Object without prototype',
|
||||||
|
'unparsed': [['null']],
|
||||||
|
'parsed': {},
|
||||||
|
}, {
|
||||||
|
'name': 'cross-realm POJO',
|
||||||
|
'unparsed': [{}],
|
||||||
|
'parsed': {},
|
||||||
|
}]
|
||||||
|
|
||||||
|
TEST_CASES_IS = [{
|
||||||
|
'name': 'bool',
|
||||||
|
'unparsed': [True],
|
||||||
|
'parsed': True,
|
||||||
|
}, {
|
||||||
|
'name': 'Boolean',
|
||||||
|
'unparsed': [['Object', False]],
|
||||||
|
'parsed': False,
|
||||||
|
}, {
|
||||||
|
'name': 'undefined',
|
||||||
|
'unparsed': -1,
|
||||||
|
'parsed': None,
|
||||||
|
}, {
|
||||||
|
'name': 'null',
|
||||||
|
'unparsed': [None],
|
||||||
|
'parsed': None,
|
||||||
|
}, {
|
||||||
|
'name': 'NaN',
|
||||||
|
'unparsed': -3,
|
||||||
|
'parsed': math.nan,
|
||||||
|
}]
|
||||||
|
|
||||||
|
TEST_CASES_INVALID = [{
|
||||||
|
'name': 'empty string',
|
||||||
|
'unparsed': '',
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'hole',
|
||||||
|
'unparsed': -2,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'invalid integer input',
|
||||||
|
}, {
|
||||||
|
'name': 'string',
|
||||||
|
'unparsed': 'hello',
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'number',
|
||||||
|
'unparsed': 42,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'invalid integer input',
|
||||||
|
}, {
|
||||||
|
'name': 'boolean',
|
||||||
|
'unparsed': True,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'null',
|
||||||
|
'unparsed': None,
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'object',
|
||||||
|
'unparsed': {},
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected int or list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'empty array',
|
||||||
|
'unparsed': [],
|
||||||
|
'error': ValueError,
|
||||||
|
'pattern': r'expected a non-empty list as input',
|
||||||
|
}, {
|
||||||
|
'name': 'Python negative indexing',
|
||||||
|
'unparsed': [[1, 2, 3, 4, 5, 6, 7, -7], 1, 2, 3, 4, 5, 6, 7],
|
||||||
|
'error': IndexError,
|
||||||
|
'pattern': r'invalid index: -7',
|
||||||
|
}]
|
||||||
|
|
||||||
|
|
||||||
|
class TestDevalue(unittest.TestCase):
|
||||||
|
def test_devalue_parse_equals(self):
|
||||||
|
for tc in TEST_CASES_EQUALS:
|
||||||
|
self.assertEqual(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||||
|
|
||||||
|
def test_devalue_parse_is(self):
|
||||||
|
for tc in TEST_CASES_IS:
|
||||||
|
self.assertIs(devalue.parse(tc['unparsed']), tc['parsed'], tc['name'])
|
||||||
|
|
||||||
|
def test_devalue_parse_invalid(self):
|
||||||
|
for tc in TEST_CASES_INVALID:
|
||||||
|
with self.assertRaisesRegex(tc['error'], tc['pattern'], msg=tc['name']):
|
||||||
|
devalue.parse(tc['unparsed'])
|
||||||
|
|
||||||
|
def test_devalue_parse_cyclical(self):
|
||||||
|
name = 'Map (cyclical)'
|
||||||
|
result = devalue.parse([['Map', 1, 0], 'self'])
|
||||||
|
self.assertEqual(result[0][0], 'self', name)
|
||||||
|
self.assertIs(result, result[0][1], name)
|
||||||
|
|
||||||
|
name = 'Set (cyclical)'
|
||||||
|
result = devalue.parse([['Set', 0, 1], 42])
|
||||||
|
self.assertEqual(result[1], 42, name)
|
||||||
|
self.assertIs(result, result[0], name)
|
||||||
|
|
||||||
|
result = devalue.parse([[0]])
|
||||||
|
self.assertIs(result, result[0], 'Array (cyclical)')
|
||||||
|
|
||||||
|
name = 'Object (cyclical)'
|
||||||
|
result = devalue.parse([{'self': 0}])
|
||||||
|
self.assertIs(result, result['self'], name)
|
||||||
|
|
||||||
|
name = 'Object with null prototype (cyclical)'
|
||||||
|
result = devalue.parse([['null', 'self', 0]])
|
||||||
|
self.assertIs(result, result['self'], name)
|
||||||
|
|
||||||
|
name = 'Objects (cyclical)'
|
||||||
|
result = devalue.parse([[1, 2], {'second': 2}, {'first': 1}])
|
||||||
|
self.assertIs(result[0], result[1]['first'], name)
|
||||||
|
self.assertIs(result[1], result[0]['second'], name)
|
||||||
|
|
||||||
|
def test_devalue_parse_revivers(self):
|
||||||
|
self.assertEqual(
|
||||||
|
devalue.parse([['indirect', 1], {'a': 2}, 'b'], revivers={'indirect': lambda x: x}),
|
||||||
|
{'a': 'b'}, 'revivers (indirect)')
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
devalue.parse([['parse', 1], '{"a":0}'], revivers={'parse': lambda x: json.loads(x)}),
|
||||||
|
{'a': 0}, 'revivers (parse)')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
@ -20,7 +20,6 @@ from test.helper import (
|
|||||||
gettestcases,
|
gettestcases,
|
||||||
getwebpagetestcases,
|
getwebpagetestcases,
|
||||||
is_download_test,
|
is_download_test,
|
||||||
report_warning,
|
|
||||||
try_rm,
|
try_rm,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -94,7 +93,7 @@ def generator(test_case, tname):
|
|||||||
'playlist', [] if is_playlist else [test_case])
|
'playlist', [] if is_playlist else [test_case])
|
||||||
|
|
||||||
def print_skipping(reason):
|
def print_skipping(reason):
|
||||||
print('Skipping %s: %s' % (test_case['name'], reason))
|
print('Skipping {}: {}'.format(test_case['name'], reason))
|
||||||
self.skipTest(reason)
|
self.skipTest(reason)
|
||||||
|
|
||||||
if not ie.working():
|
if not ie.working():
|
||||||
@ -117,7 +116,7 @@ def generator(test_case, tname):
|
|||||||
|
|
||||||
for other_ie in other_ies:
|
for other_ie in other_ies:
|
||||||
if not other_ie.working():
|
if not other_ie.working():
|
||||||
print_skipping('test depends on %sIE, marked as not WORKING' % other_ie.ie_key())
|
print_skipping(f'test depends on {other_ie.ie_key()}IE, marked as not WORKING')
|
||||||
|
|
||||||
params = get_params(test_case.get('params', {}))
|
params = get_params(test_case.get('params', {}))
|
||||||
params['outtmpl'] = tname + '_' + params['outtmpl']
|
params['outtmpl'] = tname + '_' + params['outtmpl']
|
||||||
@ -148,10 +147,7 @@ def generator(test_case, tname):
|
|||||||
return False
|
return False
|
||||||
if err.__class__.__name__ == expected_exception:
|
if err.__class__.__name__ == expected_exception:
|
||||||
return True
|
return True
|
||||||
for exc in err.exc_info:
|
return any(exc.__class__.__name__ == expected_exception for exc in err.exc_info)
|
||||||
if exc.__class__.__name__ == expected_exception:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def try_rm_tcs_files(tcs=None):
|
def try_rm_tcs_files(tcs=None):
|
||||||
if tcs is None:
|
if tcs is None:
|
||||||
@ -181,8 +177,7 @@ def generator(test_case, tname):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
if try_num == RETRIES:
|
if try_num == RETRIES:
|
||||||
report_warning('%s failed due to network errors, skipping...' % tname)
|
raise
|
||||||
return
|
|
||||||
|
|
||||||
print(f'Retrying: {try_num} failed tries\n\n##########\n\n')
|
print(f'Retrying: {try_num} failed tries\n\n##########\n\n')
|
||||||
|
|
||||||
@ -244,9 +239,8 @@ def generator(test_case, tname):
|
|||||||
got_fsize = os.path.getsize(tc_filename)
|
got_fsize = os.path.getsize(tc_filename)
|
||||||
assertGreaterEqual(
|
assertGreaterEqual(
|
||||||
self, got_fsize, expected_minsize,
|
self, got_fsize, expected_minsize,
|
||||||
'Expected %s to be at least %s, but it\'s only %s ' %
|
f'Expected {tc_filename} to be at least {format_bytes(expected_minsize)}, '
|
||||||
(tc_filename, format_bytes(expected_minsize),
|
f'but it\'s only {format_bytes(got_fsize)} ')
|
||||||
format_bytes(got_fsize)))
|
|
||||||
if 'md5' in tc:
|
if 'md5' in tc:
|
||||||
md5_for_file = _file_md5(tc_filename)
|
md5_for_file = _file_md5(tc_filename)
|
||||||
self.assertEqual(tc['md5'], md5_for_file)
|
self.assertEqual(tc['md5'], md5_for_file)
|
||||||
@ -255,7 +249,7 @@ def generator(test_case, tname):
|
|||||||
info_json_fn = os.path.splitext(tc_filename)[0] + '.info.json'
|
info_json_fn = os.path.splitext(tc_filename)[0] + '.info.json'
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.exists(info_json_fn),
|
os.path.exists(info_json_fn),
|
||||||
'Missing info file %s' % info_json_fn)
|
f'Missing info file {info_json_fn}')
|
||||||
with open(info_json_fn, encoding='utf-8') as infof:
|
with open(info_json_fn, encoding='utf-8') as infof:
|
||||||
info_dict = json.load(infof)
|
info_dict = json.load(infof)
|
||||||
expect_info_dict(self, info_dict, tc.get('info_dict', {}))
|
expect_info_dict(self, info_dict, tc.get('info_dict', {}))
|
||||||
|
@ -15,7 +15,6 @@ import threading
|
|||||||
from test.helper import http_server_port, try_rm
|
from test.helper import http_server_port, try_rm
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.downloader.http import HttpFD
|
from yt_dlp.downloader.http import HttpFD
|
||||||
from yt_dlp.utils import encodeFilename
|
|
||||||
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
from yt_dlp.utils._utils import _YDLLogger as FakeLogger
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
@ -38,9 +37,9 @@ class HTTPTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
|||||||
end = int(mobj.group(2))
|
end = int(mobj.group(2))
|
||||||
valid_range = start is not None and end is not None
|
valid_range = start is not None and end is not None
|
||||||
if valid_range:
|
if valid_range:
|
||||||
content_range = 'bytes %d-%d' % (start, end)
|
content_range = f'bytes {start}-{end}'
|
||||||
if total:
|
if total:
|
||||||
content_range += '/%d' % total
|
content_range += f'/{total}'
|
||||||
self.send_header('Content-Range', content_range)
|
self.send_header('Content-Range', content_range)
|
||||||
return (end - start + 1) if valid_range else total
|
return (end - start + 1) if valid_range else total
|
||||||
|
|
||||||
@ -82,12 +81,12 @@ class TestHttpFD(unittest.TestCase):
|
|||||||
ydl = YoutubeDL(params)
|
ydl = YoutubeDL(params)
|
||||||
downloader = HttpFD(ydl, params)
|
downloader = HttpFD(ydl, params)
|
||||||
filename = 'testfile.mp4'
|
filename = 'testfile.mp4'
|
||||||
try_rm(encodeFilename(filename))
|
try_rm(filename)
|
||||||
self.assertTrue(downloader.real_download(filename, {
|
self.assertTrue(downloader.real_download(filename, {
|
||||||
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
'url': f'http://127.0.0.1:{self.port}/{ep}',
|
||||||
}), ep)
|
}), ep)
|
||||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE, ep)
|
self.assertEqual(os.path.getsize(filename), TEST_SIZE, ep)
|
||||||
try_rm(encodeFilename(filename))
|
try_rm(filename)
|
||||||
|
|
||||||
def download_all(self, params):
|
def download_all(self, params):
|
||||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||||
|
@ -45,7 +45,7 @@ class TestExecution(unittest.TestCase):
|
|||||||
self.assertTrue(os.path.exists(LAZY_EXTRACTORS))
|
self.assertTrue(os.path.exists(LAZY_EXTRACTORS))
|
||||||
|
|
||||||
_, stderr = self.run_yt_dlp(opts=('-s', 'test:'))
|
_, stderr = self.run_yt_dlp(opts=('-s', 'test:'))
|
||||||
# `MIN_RECOMMENDED` emits a deprecated feature warning for deprecated python versions
|
# `MIN_RECOMMENDED` emits a deprecated feature warning for deprecated Python versions
|
||||||
if stderr and stderr.startswith('Deprecated Feature: Support for Python'):
|
if stderr and stderr.startswith('Deprecated Feature: Support for Python'):
|
||||||
stderr = ''
|
stderr = ''
|
||||||
self.assertFalse(stderr)
|
self.assertFalse(stderr)
|
||||||
|
376
test/test_http_proxy.py
Normal file
376
test/test_http_proxy.py
Normal file
@ -0,0 +1,376 @@
|
|||||||
|
import abc
|
||||||
|
import base64
|
||||||
|
import contextlib
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import ssl
|
||||||
|
import threading
|
||||||
|
from http.server import BaseHTTPRequestHandler
|
||||||
|
from socketserver import ThreadingTCPServer
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from test.helper import http_server_port, verify_address_availability
|
||||||
|
from test.test_networking import TEST_DIR
|
||||||
|
from test.test_socks import IPv6ThreadingTCPServer
|
||||||
|
from yt_dlp.dependencies import urllib3
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.networking.exceptions import HTTPError, ProxyError, SSLError
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyAuthMixin:
|
||||||
|
|
||||||
|
def proxy_auth_error(self):
|
||||||
|
self.send_response(407)
|
||||||
|
self.send_header('Proxy-Authenticate', 'Basic realm="test http proxy"')
|
||||||
|
self.end_headers()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def do_proxy_auth(self, username, password):
|
||||||
|
if username is None and password is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
proxy_auth_header = self.headers.get('Proxy-Authorization', None)
|
||||||
|
if proxy_auth_header is None:
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
|
||||||
|
if not proxy_auth_header.startswith('Basic '):
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
|
||||||
|
auth = proxy_auth_header[6:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
auth_username, auth_password = base64.b64decode(auth).decode().split(':', 1)
|
||||||
|
except Exception:
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
|
||||||
|
if auth_username != (username or '') or auth_password != (password or ''):
|
||||||
|
return self.proxy_auth_error()
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyHandler(BaseHTTPRequestHandler, HTTPProxyAuthMixin):
|
||||||
|
def __init__(self, *args, proxy_info=None, username=None, password=None, request_handler=None, **kwargs):
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.proxy_info = proxy_info
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def do_GET(self):
|
||||||
|
if not self.do_proxy_auth(self.username, self.password):
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
return
|
||||||
|
if self.path.endswith('/proxy_info'):
|
||||||
|
payload = json.dumps(self.proxy_info or {
|
||||||
|
'client_address': self.client_address,
|
||||||
|
'connect': False,
|
||||||
|
'connect_host': None,
|
||||||
|
'connect_port': None,
|
||||||
|
'headers': dict(self.headers),
|
||||||
|
'path': self.path,
|
||||||
|
'proxy': ':'.join(str(y) for y in self.connection.getsockname()),
|
||||||
|
})
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header('Content-Type', 'application/json; charset=utf-8')
|
||||||
|
self.send_header('Content-Length', str(len(payload)))
|
||||||
|
self.end_headers()
|
||||||
|
self.wfile.write(payload.encode())
|
||||||
|
else:
|
||||||
|
self.send_response(404)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
|
||||||
|
|
||||||
|
if urllib3:
|
||||||
|
import urllib3.util.ssltransport
|
||||||
|
|
||||||
|
class SSLTransport(urllib3.util.ssltransport.SSLTransport):
|
||||||
|
"""
|
||||||
|
Modified version of urllib3 SSLTransport to support server side SSL
|
||||||
|
|
||||||
|
This allows us to chain multiple TLS connections.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True, server_side=False):
|
||||||
|
self.incoming = ssl.MemoryBIO()
|
||||||
|
self.outgoing = ssl.MemoryBIO()
|
||||||
|
|
||||||
|
self.suppress_ragged_eofs = suppress_ragged_eofs
|
||||||
|
self.socket = socket
|
||||||
|
|
||||||
|
self.sslobj = ssl_context.wrap_bio(
|
||||||
|
self.incoming,
|
||||||
|
self.outgoing,
|
||||||
|
server_hostname=server_hostname,
|
||||||
|
server_side=server_side,
|
||||||
|
)
|
||||||
|
self._ssl_io_loop(self.sslobj.do_handshake)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _io_refs(self):
|
||||||
|
return self.socket._io_refs
|
||||||
|
|
||||||
|
@_io_refs.setter
|
||||||
|
def _io_refs(self, value):
|
||||||
|
self.socket._io_refs = value
|
||||||
|
|
||||||
|
def shutdown(self, *args, **kwargs):
|
||||||
|
self.socket.shutdown(*args, **kwargs)
|
||||||
|
else:
|
||||||
|
SSLTransport = None
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPSProxyHandler(HTTPProxyHandler):
|
||||||
|
def __init__(self, request, *args, **kwargs):
|
||||||
|
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||||
|
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
sslctx.load_cert_chain(certfn, None)
|
||||||
|
if isinstance(request, ssl.SSLSocket):
|
||||||
|
request = SSLTransport(request, ssl_context=sslctx, server_side=True)
|
||||||
|
else:
|
||||||
|
request = sslctx.wrap_socket(request, server_side=True)
|
||||||
|
super().__init__(request, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPConnectProxyHandler(BaseHTTPRequestHandler, HTTPProxyAuthMixin):
|
||||||
|
protocol_version = 'HTTP/1.1'
|
||||||
|
default_request_version = 'HTTP/1.1'
|
||||||
|
|
||||||
|
def __init__(self, *args, username=None, password=None, request_handler=None, **kwargs):
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.request_handler = request_handler
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def do_CONNECT(self):
|
||||||
|
if not self.do_proxy_auth(self.username, self.password):
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
return
|
||||||
|
self.send_response(200)
|
||||||
|
self.end_headers()
|
||||||
|
proxy_info = {
|
||||||
|
'client_address': self.client_address,
|
||||||
|
'connect': True,
|
||||||
|
'connect_host': self.path.split(':')[0],
|
||||||
|
'connect_port': int(self.path.split(':')[1]),
|
||||||
|
'headers': dict(self.headers),
|
||||||
|
'path': self.path,
|
||||||
|
'proxy': ':'.join(str(y) for y in self.connection.getsockname()),
|
||||||
|
}
|
||||||
|
self.request_handler(self.request, self.client_address, self.server, proxy_info=proxy_info)
|
||||||
|
self.server.close_request(self.request)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPSConnectProxyHandler(HTTPConnectProxyHandler):
|
||||||
|
def __init__(self, request, *args, **kwargs):
|
||||||
|
certfn = os.path.join(TEST_DIR, 'testcert.pem')
|
||||||
|
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
sslctx.load_cert_chain(certfn, None)
|
||||||
|
request = sslctx.wrap_socket(request, server_side=True)
|
||||||
|
self._original_request = request
|
||||||
|
super().__init__(request, *args, **kwargs)
|
||||||
|
|
||||||
|
def do_CONNECT(self):
|
||||||
|
super().do_CONNECT()
|
||||||
|
self.server.close_request(self._original_request)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def proxy_server(proxy_server_class, request_handler, bind_ip=None, **proxy_server_kwargs):
|
||||||
|
server = server_thread = None
|
||||||
|
try:
|
||||||
|
bind_address = bind_ip or '127.0.0.1'
|
||||||
|
server_type = ThreadingTCPServer if '.' in bind_address else IPv6ThreadingTCPServer
|
||||||
|
server = server_type(
|
||||||
|
(bind_address, 0), functools.partial(proxy_server_class, request_handler=request_handler, **proxy_server_kwargs))
|
||||||
|
server_port = http_server_port(server)
|
||||||
|
server_thread = threading.Thread(target=server.serve_forever)
|
||||||
|
server_thread.daemon = True
|
||||||
|
server_thread.start()
|
||||||
|
if '.' not in bind_address:
|
||||||
|
yield f'[{bind_address}]:{server_port}'
|
||||||
|
else:
|
||||||
|
yield f'{bind_address}:{server_port}'
|
||||||
|
finally:
|
||||||
|
server.shutdown()
|
||||||
|
server.server_close()
|
||||||
|
server_thread.join(2.0)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyTestContext(abc.ABC):
|
||||||
|
REQUEST_HANDLER_CLASS = None
|
||||||
|
REQUEST_PROTO = None
|
||||||
|
|
||||||
|
def http_server(self, server_class, *args, **kwargs):
|
||||||
|
return proxy_server(server_class, self.REQUEST_HANDLER_CLASS, *args, **kwargs)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs) -> dict:
|
||||||
|
"""return a dict of proxy_info"""
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyHTTPTestContext(HTTPProxyTestContext):
|
||||||
|
# Standard HTTP Proxy for http requests
|
||||||
|
REQUEST_HANDLER_CLASS = HTTPProxyHandler
|
||||||
|
REQUEST_PROTO = 'http'
|
||||||
|
|
||||||
|
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||||
|
request = Request(f'http://{target_domain or "127.0.0.1"}:{target_port or "40000"}/proxy_info', **req_kwargs)
|
||||||
|
handler.validate(request)
|
||||||
|
return json.loads(handler.send(request).read().decode())
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPProxyHTTPSTestContext(HTTPProxyTestContext):
|
||||||
|
# HTTP Connect proxy, for https requests
|
||||||
|
REQUEST_HANDLER_CLASS = HTTPSProxyHandler
|
||||||
|
REQUEST_PROTO = 'https'
|
||||||
|
|
||||||
|
def proxy_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||||
|
request = Request(f'https://{target_domain or "127.0.0.1"}:{target_port or "40000"}/proxy_info', **req_kwargs)
|
||||||
|
handler.validate(request)
|
||||||
|
return json.loads(handler.send(request).read().decode())
|
||||||
|
|
||||||
|
|
||||||
|
CTX_MAP = {
|
||||||
|
'http': HTTPProxyHTTPTestContext,
|
||||||
|
'https': HTTPProxyHTTPSTestContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='module')
|
||||||
|
def ctx(request):
|
||||||
|
return CTX_MAP[request.param]()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'handler', ['Urllib', 'Requests', 'CurlCFFI'], indirect=True)
|
||||||
|
@pytest.mark.parametrize('ctx', ['http'], indirect=True) # pure http proxy can only support http
|
||||||
|
class TestHTTPProxy:
|
||||||
|
def test_http_no_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler) as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is False
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://test:test@{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_bad_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||||
|
with pytest.raises(HTTPError) as exc_info:
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
assert exc_info.value.response.status == 407
|
||||||
|
exc_info.value.response.close()
|
||||||
|
|
||||||
|
def test_http_source_address(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler) as server_address:
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'},
|
||||||
|
source_address=source_address) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['client_address'][0] == source_address
|
||||||
|
|
||||||
|
@pytest.mark.skip_handler('Urllib', 'urllib does not support https proxies')
|
||||||
|
def test_https(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSProxyHandler) as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is False
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
@pytest.mark.skip_handler('Urllib', 'urllib does not support https proxies')
|
||||||
|
def test_https_verify_failed(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSProxyHandler) as server_address:
|
||||||
|
with handler(verify=True, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
# Accept SSLError as may not be feasible to tell if it is proxy or request error.
|
||||||
|
# note: if request proto also does ssl verification, this may also be the error of the request.
|
||||||
|
# Until we can support passing custom cacerts to handlers, we cannot properly test this for all cases.
|
||||||
|
with pytest.raises((ProxyError, SSLError)):
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
|
||||||
|
def test_http_with_idn(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPProxyHandler) as server_address:
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh, target_domain='中文.tw')
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['path'].startswith('http://xn--fiq228c.tw')
|
||||||
|
assert proxy_info['headers']['Host'].split(':', 1)[0] == 'xn--fiq228c.tw'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'handler,ctx', [
|
||||||
|
('Requests', 'https'),
|
||||||
|
('CurlCFFI', 'https'),
|
||||||
|
], indirect=True)
|
||||||
|
class TestHTTPConnectProxy:
|
||||||
|
def test_http_connect_no_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is True
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_connect_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:test@{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
||||||
|
|
||||||
|
def test_http_connect_bad_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'http://test:bad@{server_address}'}) as rh:
|
||||||
|
with pytest.raises(ProxyError):
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
|
||||||
|
def test_http_connect_source_address(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPConnectProxyHandler) as server_address:
|
||||||
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
|
with handler(proxies={ctx.REQUEST_PROTO: f'http://{server_address}'},
|
||||||
|
source_address=source_address,
|
||||||
|
verify=False) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['client_address'][0] == source_address
|
||||||
|
|
||||||
|
@pytest.mark.skipif(urllib3 is None, reason='requires urllib3 to test')
|
||||||
|
def test_https_connect_proxy(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSConnectProxyHandler) as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert proxy_info['connect'] is True
|
||||||
|
assert 'Proxy-Authorization' not in proxy_info['headers']
|
||||||
|
|
||||||
|
@pytest.mark.skipif(urllib3 is None, reason='requires urllib3 to test')
|
||||||
|
def test_https_connect_verify_failed(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSConnectProxyHandler) as server_address:
|
||||||
|
with handler(verify=True, proxies={ctx.REQUEST_PROTO: f'https://{server_address}'}) as rh:
|
||||||
|
# Accept SSLError as may not be feasible to tell if it is proxy or request error.
|
||||||
|
# note: if request proto also does ssl verification, this may also be the error of the request.
|
||||||
|
# Until we can support passing custom cacerts to handlers, we cannot properly test this for all cases.
|
||||||
|
with pytest.raises((ProxyError, SSLError)):
|
||||||
|
ctx.proxy_info_request(rh)
|
||||||
|
|
||||||
|
@pytest.mark.skipif(urllib3 is None, reason='requires urllib3 to test')
|
||||||
|
def test_https_connect_proxy_auth(self, handler, ctx):
|
||||||
|
with ctx.http_server(HTTPSConnectProxyHandler, username='test', password='test') as server_address:
|
||||||
|
with handler(verify=False, proxies={ctx.REQUEST_PROTO: f'https://test:test@{server_address}'}) as rh:
|
||||||
|
proxy_info = ctx.proxy_info_request(rh)
|
||||||
|
assert proxy_info['proxy'] == server_address
|
||||||
|
assert 'Proxy-Authorization' in proxy_info['headers']
|
@ -29,11 +29,11 @@ class WarningLogger:
|
|||||||
@is_download_test
|
@is_download_test
|
||||||
class TestIqiyiSDKInterpreter(unittest.TestCase):
|
class TestIqiyiSDKInterpreter(unittest.TestCase):
|
||||||
def test_iqiyi_sdk_interpreter(self):
|
def test_iqiyi_sdk_interpreter(self):
|
||||||
'''
|
"""
|
||||||
Test the functionality of IqiyiSDKInterpreter by trying to log in
|
Test the functionality of IqiyiSDKInterpreter by trying to log in
|
||||||
|
|
||||||
If `sign` is incorrect, /validate call throws an HTTP 556 error
|
If `sign` is incorrect, /validate call throws an HTTP 556 error
|
||||||
'''
|
"""
|
||||||
logger = WarningLogger()
|
logger = WarningLogger()
|
||||||
ie = IqiyiIE(FakeYDL({'logger': logger}))
|
ie = IqiyiIE(FakeYDL({'logger': logger}))
|
||||||
ie._perform_login('foo', 'bar')
|
ie._perform_login('foo', 'bar')
|
||||||
|
@ -9,7 +9,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|||||||
|
|
||||||
import math
|
import math
|
||||||
|
|
||||||
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter
|
from yt_dlp.jsinterp import JS_Undefined, JSInterpreter, js_number_to_string
|
||||||
|
|
||||||
|
|
||||||
class NaN:
|
class NaN:
|
||||||
@ -92,6 +92,17 @@ class TestJSInterpreter(unittest.TestCase):
|
|||||||
self._test('function f(){return 0 && 1 || 2;}', 2)
|
self._test('function f(){return 0 && 1 || 2;}', 2)
|
||||||
self._test('function f(){return 0 ?? 42;}', 0)
|
self._test('function f(){return 0 ?? 42;}', 0)
|
||||||
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
self._test('function f(){return "life, the universe and everything" < 42;}', False)
|
||||||
|
self._test('function f(){return 0 - 7 * - 6;}', 42)
|
||||||
|
self._test('function f(){return true << "5";}', 32)
|
||||||
|
self._test('function f(){return true << true;}', 2)
|
||||||
|
self._test('function f(){return "19" & "21.9";}', 17)
|
||||||
|
self._test('function f(){return "19" & false;}', 0)
|
||||||
|
self._test('function f(){return "11.0" >> "2.1";}', 2)
|
||||||
|
self._test('function f(){return 5 ^ 9;}', 12)
|
||||||
|
self._test('function f(){return 0.0 << NaN}', 0)
|
||||||
|
self._test('function f(){return null << undefined}', 0)
|
||||||
|
# TODO: Does not work due to number too large
|
||||||
|
# self._test('function f(){return 21 << 4294967297}', 42)
|
||||||
|
|
||||||
def test_array_access(self):
|
def test_array_access(self):
|
||||||
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
self._test('function f(){var x = [1,2,3]; x[0] = 4; x[0] = 5; x[2.0] = 7; return x;}', [5, 2, 7])
|
||||||
@ -107,6 +118,7 @@ class TestJSInterpreter(unittest.TestCase):
|
|||||||
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
self._test('function f(){var x = 20; x = 30 + 1; return x;}', 31)
|
||||||
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
self._test('function f(){var x = 20; x += 30 + 1; return x;}', 51)
|
||||||
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
self._test('function f(){var x = 20; x -= 30 + 1; return x;}', -11)
|
||||||
|
self._test('function f(){var x = 2; var y = ["a", "b"]; y[x%y["length"]]="z"; return y}', ['z', 'b'])
|
||||||
|
|
||||||
@unittest.skip('Not implemented')
|
@unittest.skip('Not implemented')
|
||||||
def test_comments(self):
|
def test_comments(self):
|
||||||
@ -373,7 +385,106 @@ class TestJSInterpreter(unittest.TestCase):
|
|||||||
@unittest.skip('Not implemented')
|
@unittest.skip('Not implemented')
|
||||||
def test_packed(self):
|
def test_packed(self):
|
||||||
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
jsi = JSInterpreter('''function f(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}''')
|
||||||
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|')))
|
self.assertEqual(jsi.call_function('f', '''h 7=g("1j");7.7h({7g:[{33:"w://7f-7e-7d-7c.v.7b/7a/79/78/77/76.74?t=73&s=2s&e=72&f=2t&71=70.0.0.1&6z=6y&6x=6w"}],6v:"w://32.v.u/6u.31",16:"r%",15:"r%",6t:"6s",6r:"",6q:"l",6p:"l",6o:"6n",6m:\'6l\',6k:"6j",9:[{33:"/2u?b=6i&n=50&6h=w://32.v.u/6g.31",6f:"6e"}],1y:{6d:1,6c:\'#6b\',6a:\'#69\',68:"67",66:30,65:r,},"64":{63:"%62 2m%m%61%5z%5y%5x.u%5w%5v%5u.2y%22 2k%m%1o%22 5t%m%1o%22 5s%m%1o%22 2j%m%5r%22 16%m%5q%22 15%m%5p%22 5o%2z%5n%5m%2z",5l:"w://v.u/d/1k/5k.2y",5j:[]},\'5i\':{"5h":"5g"},5f:"5e",5d:"w://v.u",5c:{},5b:l,1x:[0.25,0.50,0.75,1,1.25,1.5,2]});h 1m,1n,5a;h 59=0,58=0;h 7=g("1j");h 2x=0,57=0,56=0;$.55({54:{\'53-52\':\'2i-51\'}});7.j(\'4z\',6(x){c(5>0&&x.1l>=5&&1n!=1){1n=1;$(\'q.4y\').4x(\'4w\')}});7.j(\'13\',6(x){2x=x.1l});7.j(\'2g\',6(x){2w(x)});7.j(\'4v\',6(){$(\'q.2v\').4u()});6 2w(x){$(\'q.2v\').4t();c(1m)19;1m=1;17=0;c(4s.4r===l){17=1}$.4q(\'/2u?b=4p&2l=1k&4o=2t-4n-4m-2s-4l&4k=&4j=&4i=&17=\'+17,6(2r){$(\'#4h\').4g(2r)});$(\'.3-8-4f-4e:4d("4c")\').2h(6(e){2q();g().4b(0);g().4a(l)});6 2q(){h $14=$("<q />").2p({1l:"49",16:"r%",15:"r%",48:0,2n:0,2o:47,46:"45(10%, 10%, 10%, 0.4)","44-43":"42"});$("<41 />").2p({16:"60%",15:"60%",2o:40,"3z-2n":"3y"}).3x({\'2m\':\'/?b=3w&2l=1k\',\'2k\':\'0\',\'2j\':\'2i\'}).2f($14);$14.2h(6(){$(3v).3u();g().2g()});$14.2f($(\'#1j\'))}g().13(0);}6 3t(){h 9=7.1b(2e);2d.2c(9);c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==2e){2d.2c(\'!!=\'+i);7.1p(i)}}}}7.j(\'3s\',6(){g().1h("/2a/3r.29","3q 10 28",6(){g().13(g().27()+10)},"2b");$("q[26=2b]").23().21(\'.3-20-1z\');g().1h("/2a/3p.29","3o 10 28",6(){h 12=g().27()-10;c(12<0)12=0;g().13(12)},"24");$("q[26=24]").23().21(\'.3-20-1z\');});6 1i(){}7.j(\'3n\',6(){1i()});7.j(\'3m\',6(){1i()});7.j("k",6(y){h 9=7.1b();c(9.n<2)19;$(\'.3-8-3l-3k\').3j(6(){$(\'#3-8-a-k\').1e(\'3-8-a-z\');$(\'.3-a-k\').p(\'o-1f\',\'11\')});7.1h("/3i/3h.3g","3f 3e",6(){$(\'.3-1w\').3d(\'3-8-1v\');$(\'.3-8-1y, .3-8-1x\').p(\'o-1g\',\'11\');c($(\'.3-1w\').3c(\'3-8-1v\')){$(\'.3-a-k\').p(\'o-1g\',\'l\');$(\'.3-a-k\').p(\'o-1f\',\'l\');$(\'.3-8-a\').1e(\'3-8-a-z\');$(\'.3-8-a:1u\').3b(\'3-8-a-z\')}3a{$(\'.3-a-k\').p(\'o-1g\',\'11\');$(\'.3-a-k\').p(\'o-1f\',\'11\');$(\'.3-8-a:1u\').1e(\'3-8-a-z\')}},"39");7.j("38",6(y){1d.37(\'1c\',y.9[y.36].1a)});c(1d.1t(\'1c\')){35("1s(1d.1t(\'1c\'));",34)}});h 18;6 1s(1q){h 9=7.1b();c(9.n>1){1r(i=0;i<9.n;i++){c(9[i].1a==1q){c(i==18){19}18=i;7.1p(i)}}}}',36,270,'|||jw|||function|player|settings|tracks|submenu||if||||jwplayer|var||on|audioTracks|true|3D|length|aria|attr|div|100|||sx|filemoon|https||event|active||false|tt|seek|dd|height|width|adb|current_audio|return|name|getAudioTracks|default_audio|localStorage|removeClass|expanded|checked|addButton|callMeMaybe|vplayer|0fxcyc2ajhp1|position|vvplay|vvad|220|setCurrentAudioTrack|audio_name|for|audio_set|getItem|last|open|controls|playbackRates|captions|rewind|icon|insertAfter||detach|ff00||button|getPosition|sec|png|player8|ff11|log|console|track_name|appendTo|play|click|no|scrolling|frameborder|file_code|src|top|zIndex|css|showCCform|data|1662367683|383371|dl|video_ad|doPlay|prevt|mp4|3E||jpg|thumbs|file|300|setTimeout|currentTrack|setItem|audioTrackChanged|dualSound|else|addClass|hasClass|toggleClass|Track|Audio|svg|dualy|images|mousedown|buttons|topbar|playAttemptFailed|beforePlay|Rewind|fr|Forward|ff|ready|set_audio_track|remove|this|upload_srt|prop|50px|margin|1000001|iframe|center|align|text|rgba|background|1000000|left|absolute|pause|setCurrentCaptions|Upload|contains|item|content|html|fviews|referer|prem|embed|3e57249ef633e0d03bf76ceb8d8a4b65|216|83|hash|view|get|TokenZir|window|hide|show|complete|slow|fadeIn|video_ad_fadein|time||cache|Cache|Content|headers|ajaxSetup|v2done|tott|vastdone2|vastdone1|vvbefore|playbackRateControls|cast|aboutlink|FileMoon|abouttext|UHD|1870|qualityLabels|sites|GNOME_POWER|link|2Fiframe|3C|allowfullscreen|22360|22640|22no|marginheight|marginwidth|2FGNOME_POWER|2F0fxcyc2ajhp1|2Fe|2Ffilemoon|2F|3A||22https|3Ciframe|code|sharing|fontOpacity|backgroundOpacity|Tahoma|fontFamily|303030|backgroundColor|FFFFFF|color|userFontScale|thumbnails|kind|0fxcyc2ajhp10000|url|get_slides|start|startparam|none|preload|html5|primary|hlshtml|androidhls|duration|uniform|stretching|0fxcyc2ajhp1_xt|image|2048|sp|6871|asn|127|srv|43200|_g3XlBcu2lmD9oDexD2NLWSmah2Nu3XcDrl93m9PwXY|m3u8||master|0fxcyc2ajhp1_x|00076|01|hls2|to|s01|delivery|storage|moon|sources|setup'''.split('|'))) # noqa: SIM905
|
||||||
|
|
||||||
|
def test_join(self):
|
||||||
|
test_input = list('test')
|
||||||
|
tests = [
|
||||||
|
'function f(a, b){return a.join(b)}',
|
||||||
|
'function f(a, b){return Array.prototype.join.call(a, b)}',
|
||||||
|
'function f(a, b){return Array.prototype.join.apply(a, [b])}',
|
||||||
|
]
|
||||||
|
for test in tests:
|
||||||
|
jsi = JSInterpreter(test)
|
||||||
|
self._test(jsi, 'test', args=[test_input, ''])
|
||||||
|
self._test(jsi, 't-e-s-t', args=[test_input, '-'])
|
||||||
|
self._test(jsi, '', args=[[], '-'])
|
||||||
|
|
||||||
|
def test_split(self):
|
||||||
|
test_result = list('test')
|
||||||
|
tests = [
|
||||||
|
'function f(a, b){return a.split(b)}',
|
||||||
|
'function f(a, b){return a["split"](b)}',
|
||||||
|
'function f(a, b){let x = ["split"]; return a[x[0]](b)}',
|
||||||
|
'function f(a, b){return String.prototype.split.call(a, b)}',
|
||||||
|
'function f(a, b){return String.prototype.split.apply(a, [b])}',
|
||||||
|
]
|
||||||
|
for test in tests:
|
||||||
|
jsi = JSInterpreter(test)
|
||||||
|
self._test(jsi, test_result, args=['test', ''])
|
||||||
|
self._test(jsi, test_result, args=['t-e-s-t', '-'])
|
||||||
|
self._test(jsi, [''], args=['', '-'])
|
||||||
|
self._test(jsi, [], args=['', ''])
|
||||||
|
|
||||||
|
def test_slice(self):
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice()}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(5)}', [5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(99)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-2)}', [7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-99)}', [0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 0)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, 0)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(0, 1)}', [0])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(3, 6)}', [3, 4, 5])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(1, -1)}', [1, 2, 3, 4, 5, 6, 7])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-1, 1)}', [])
|
||||||
|
self._test('function f(){return [0, 1, 2, 3, 4, 5, 6, 7, 8].slice(-3, -1)}', [6, 7])
|
||||||
|
self._test('function f(){return "012345678".slice()}', '012345678')
|
||||||
|
self._test('function f(){return "012345678".slice(0)}', '012345678')
|
||||||
|
self._test('function f(){return "012345678".slice(5)}', '5678')
|
||||||
|
self._test('function f(){return "012345678".slice(99)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(-2)}', '78')
|
||||||
|
self._test('function f(){return "012345678".slice(-99)}', '012345678')
|
||||||
|
self._test('function f(){return "012345678".slice(0, 0)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(1, 0)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(0, 1)}', '0')
|
||||||
|
self._test('function f(){return "012345678".slice(3, 6)}', '345')
|
||||||
|
self._test('function f(){return "012345678".slice(1, -1)}', '1234567')
|
||||||
|
self._test('function f(){return "012345678".slice(-1, 1)}', '')
|
||||||
|
self._test('function f(){return "012345678".slice(-3, -1)}', '67')
|
||||||
|
|
||||||
|
def test_splice(self):
|
||||||
|
self._test('function f(){var T = ["0", "1", "2"]; T["splice"](2, 1, "0")[0]; return T }', ['0', '1', '0'])
|
||||||
|
|
||||||
|
def test_js_number_to_string(self):
|
||||||
|
for test, radix, expected in [
|
||||||
|
(0, None, '0'),
|
||||||
|
(-0, None, '0'),
|
||||||
|
(0.0, None, '0'),
|
||||||
|
(-0.0, None, '0'),
|
||||||
|
(math.nan, None, 'NaN'),
|
||||||
|
(-math.nan, None, 'NaN'),
|
||||||
|
(math.inf, None, 'Infinity'),
|
||||||
|
(-math.inf, None, '-Infinity'),
|
||||||
|
(10 ** 21.5, 8, '526665530627250154000000'),
|
||||||
|
(6, 2, '110'),
|
||||||
|
(254, 16, 'fe'),
|
||||||
|
(-10, 2, '-1010'),
|
||||||
|
(-0xff, 2, '-11111111'),
|
||||||
|
(0.1 + 0.2, 16, '0.4cccccccccccd'),
|
||||||
|
(1234.1234, 10, '1234.1234'),
|
||||||
|
# (1000000000000000128, 10, '1000000000000000100')
|
||||||
|
]:
|
||||||
|
assert js_number_to_string(test, radix) == expected
|
||||||
|
|
||||||
|
def test_extract_function(self):
|
||||||
|
jsi = JSInterpreter('function a(b) { return b + 1; }')
|
||||||
|
func = jsi.extract_function('a')
|
||||||
|
self.assertEqual(func([2]), 3)
|
||||||
|
|
||||||
|
def test_extract_function_with_global_stack(self):
|
||||||
|
jsi = JSInterpreter('function c(d) { return d + e + f + g; }')
|
||||||
|
func = jsi.extract_function('c', {'e': 10}, {'f': 100, 'g': 1000})
|
||||||
|
self.assertEqual(func([1]), 1111)
|
||||||
|
|
||||||
|
def test_increment_decrement(self):
|
||||||
|
self._test('function f() { var x = 1; return ++x; }', 2)
|
||||||
|
self._test('function f() { var x = 1; return x++; }', 1)
|
||||||
|
self._test('function f() { var x = 1; x--; return x }', 0)
|
||||||
|
self._test('function f() { var y; var x = 1; x++, --x, x--, x--, y="z", "abc", x++; return --x }', -1)
|
||||||
|
self._test('function f() { var a = "test--"; return a; }', 'test--')
|
||||||
|
self._test('function f() { var b = 1; var a = "b--"; return a; }', 'b--')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -21,7 +21,7 @@ class TestNetRc(unittest.TestCase):
|
|||||||
continue
|
continue
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
ie._NETRC_MACHINE,
|
ie._NETRC_MACHINE,
|
||||||
'Extractor %s supports login, but is missing a _NETRC_MACHINE property' % ie.IE_NAME)
|
f'Extractor {ie.IE_NAME} supports login, but is missing a _NETRC_MACHINE property')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -8,13 +8,9 @@ import pytest
|
|||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import io
|
import io
|
||||||
import platform
|
|
||||||
import random
|
import random
|
||||||
import ssl
|
import ssl
|
||||||
import urllib.error
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from yt_dlp.cookies import YoutubeDLCookieJar
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
from yt_dlp.dependencies import certifi
|
from yt_dlp.dependencies import certifi
|
||||||
@ -24,16 +20,14 @@ from yt_dlp.networking._helper import (
|
|||||||
add_accept_encoding_header,
|
add_accept_encoding_header,
|
||||||
get_redirect_method,
|
get_redirect_method,
|
||||||
make_socks_proxy_opts,
|
make_socks_proxy_opts,
|
||||||
select_proxy,
|
|
||||||
ssl_load_certs,
|
ssl_load_certs,
|
||||||
)
|
)
|
||||||
from yt_dlp.networking.exceptions import (
|
from yt_dlp.networking.exceptions import (
|
||||||
HTTPError,
|
HTTPError,
|
||||||
IncompleteRead,
|
IncompleteRead,
|
||||||
_CompatHTTPError,
|
|
||||||
)
|
)
|
||||||
from yt_dlp.socks import ProxyType
|
from yt_dlp.socks import ProxyType
|
||||||
from yt_dlp.utils.networking import HTTPHeaderDict
|
from yt_dlp.utils.networking import HTTPHeaderDict, select_proxy
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
@ -44,7 +38,7 @@ class TestNetworkingUtils:
|
|||||||
proxies = {
|
proxies = {
|
||||||
'all': 'socks5://example.com',
|
'all': 'socks5://example.com',
|
||||||
'http': 'http://example.com:1080',
|
'http': 'http://example.com:1080',
|
||||||
'no': 'bypass.example.com,yt-dl.org'
|
'no': 'bypass.example.com,yt-dl.org',
|
||||||
}
|
}
|
||||||
|
|
||||||
assert select_proxy('https://example.com', proxies) == proxies['all']
|
assert select_proxy('https://example.com', proxies) == proxies['all']
|
||||||
@ -59,7 +53,7 @@ class TestNetworkingUtils:
|
|||||||
'port': 1080,
|
'port': 1080,
|
||||||
'rdns': True,
|
'rdns': True,
|
||||||
'username': None,
|
'username': None,
|
||||||
'password': None
|
'password': None,
|
||||||
}),
|
}),
|
||||||
('socks5://user:@example.com:5555', {
|
('socks5://user:@example.com:5555', {
|
||||||
'proxytype': ProxyType.SOCKS5,
|
'proxytype': ProxyType.SOCKS5,
|
||||||
@ -67,7 +61,7 @@ class TestNetworkingUtils:
|
|||||||
'port': 5555,
|
'port': 5555,
|
||||||
'rdns': False,
|
'rdns': False,
|
||||||
'username': 'user',
|
'username': 'user',
|
||||||
'password': ''
|
'password': '',
|
||||||
}),
|
}),
|
||||||
('socks4://u%40ser:pa%20ss@127.0.0.1:1080', {
|
('socks4://u%40ser:pa%20ss@127.0.0.1:1080', {
|
||||||
'proxytype': ProxyType.SOCKS4,
|
'proxytype': ProxyType.SOCKS4,
|
||||||
@ -75,7 +69,7 @@ class TestNetworkingUtils:
|
|||||||
'port': 1080,
|
'port': 1080,
|
||||||
'rdns': False,
|
'rdns': False,
|
||||||
'username': 'u@ser',
|
'username': 'u@ser',
|
||||||
'password': 'pa ss'
|
'password': 'pa ss',
|
||||||
}),
|
}),
|
||||||
('socks4a://:pa%20ss@127.0.0.1', {
|
('socks4a://:pa%20ss@127.0.0.1', {
|
||||||
'proxytype': ProxyType.SOCKS4A,
|
'proxytype': ProxyType.SOCKS4A,
|
||||||
@ -83,8 +77,8 @@ class TestNetworkingUtils:
|
|||||||
'port': 1080,
|
'port': 1080,
|
||||||
'rdns': True,
|
'rdns': True,
|
||||||
'username': '',
|
'username': '',
|
||||||
'password': 'pa ss'
|
'password': 'pa ss',
|
||||||
})
|
}),
|
||||||
])
|
])
|
||||||
def test_make_socks_proxy_opts(self, socks_proxy, expected):
|
def test_make_socks_proxy_opts(self, socks_proxy, expected):
|
||||||
assert make_socks_proxy_opts(socks_proxy) == expected
|
assert make_socks_proxy_opts(socks_proxy) == expected
|
||||||
@ -179,11 +173,10 @@ class TestNetworkingExceptions:
|
|||||||
def create_response(status):
|
def create_response(status):
|
||||||
return Response(fp=io.BytesIO(b'test'), url='http://example.com', headers={'tesT': 'test'}, status=status)
|
return Response(fp=io.BytesIO(b'test'), url='http://example.com', headers={'tesT': 'test'}, status=status)
|
||||||
|
|
||||||
@pytest.mark.parametrize('http_error_class', [HTTPError, lambda r: _CompatHTTPError(HTTPError(r))])
|
def test_http_error(self):
|
||||||
def test_http_error(self, http_error_class):
|
|
||||||
|
|
||||||
response = self.create_response(403)
|
response = self.create_response(403)
|
||||||
error = http_error_class(response)
|
error = HTTPError(response)
|
||||||
|
|
||||||
assert error.status == 403
|
assert error.status == 403
|
||||||
assert str(error) == error.msg == 'HTTP Error 403: Forbidden'
|
assert str(error) == error.msg == 'HTTP Error 403: Forbidden'
|
||||||
@ -194,80 +187,12 @@ class TestNetworkingExceptions:
|
|||||||
assert data == b'test'
|
assert data == b'test'
|
||||||
assert repr(error) == '<HTTPError 403: Forbidden>'
|
assert repr(error) == '<HTTPError 403: Forbidden>'
|
||||||
|
|
||||||
@pytest.mark.parametrize('http_error_class', [HTTPError, lambda *args, **kwargs: _CompatHTTPError(HTTPError(*args, **kwargs))])
|
def test_redirect_http_error(self):
|
||||||
def test_redirect_http_error(self, http_error_class):
|
|
||||||
response = self.create_response(301)
|
response = self.create_response(301)
|
||||||
error = http_error_class(response, redirect_loop=True)
|
error = HTTPError(response, redirect_loop=True)
|
||||||
assert str(error) == error.msg == 'HTTP Error 301: Moved Permanently (redirect loop detected)'
|
assert str(error) == error.msg == 'HTTP Error 301: Moved Permanently (redirect loop detected)'
|
||||||
assert error.reason == 'Moved Permanently'
|
assert error.reason == 'Moved Permanently'
|
||||||
|
|
||||||
def test_compat_http_error(self):
|
|
||||||
response = self.create_response(403)
|
|
||||||
error = _CompatHTTPError(HTTPError(response))
|
|
||||||
assert isinstance(error, HTTPError)
|
|
||||||
assert isinstance(error, urllib.error.HTTPError)
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def raises_deprecation_warning():
|
|
||||||
with warnings.catch_warnings(record=True) as w:
|
|
||||||
warnings.simplefilter('always')
|
|
||||||
yield
|
|
||||||
|
|
||||||
if len(w) == 0:
|
|
||||||
pytest.fail('Did not raise DeprecationWarning')
|
|
||||||
if len(w) > 1:
|
|
||||||
pytest.fail(f'Raised multiple warnings: {w}')
|
|
||||||
|
|
||||||
if not issubclass(w[-1].category, DeprecationWarning):
|
|
||||||
pytest.fail(f'Expected DeprecationWarning, got {w[-1].category}')
|
|
||||||
w.clear()
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.code == 403
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.getcode() == 403
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.hdrs is error.response.headers
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.info() is error.response.headers
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.headers is error.response.headers
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.filename == error.response.url
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.url == error.response.url
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.geturl() == error.response.url
|
|
||||||
|
|
||||||
# Passthrough file operations
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert error.read() == b'test'
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
assert not error.closed
|
|
||||||
|
|
||||||
with raises_deprecation_warning():
|
|
||||||
# Technically Response operations are also passed through, which should not be used.
|
|
||||||
assert error.get_header('test') == 'test'
|
|
||||||
|
|
||||||
# Should not raise a warning
|
|
||||||
error.close()
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
|
||||||
platform.python_implementation() == 'PyPy', reason='garbage collector works differently in pypy')
|
|
||||||
def test_compat_http_error_autoclose(self):
|
|
||||||
# Compat HTTPError should not autoclose response
|
|
||||||
response = self.create_response(403)
|
|
||||||
_CompatHTTPError(HTTPError(response))
|
|
||||||
assert not response.closed
|
|
||||||
|
|
||||||
def test_incomplete_read_error(self):
|
def test_incomplete_read_error(self):
|
||||||
error = IncompleteRead(4, 3, cause='test')
|
error = IncompleteRead(4, 3, cause='test')
|
||||||
assert isinstance(error, IncompleteRead)
|
assert isinstance(error, IncompleteRead)
|
||||||
|
@ -27,7 +27,7 @@ class TestOverwrites(unittest.TestCase):
|
|||||||
[
|
[
|
||||||
sys.executable, 'yt_dlp/__main__.py',
|
sys.executable, 'yt_dlp/__main__.py',
|
||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw'
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, serr = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' in sout)
|
self.assertTrue(b'has already been downloaded' in sout)
|
||||||
@ -39,7 +39,7 @@ class TestOverwrites(unittest.TestCase):
|
|||||||
[
|
[
|
||||||
sys.executable, 'yt_dlp/__main__.py', '--yes-overwrites',
|
sys.executable, 'yt_dlp/__main__.py', '--yes-overwrites',
|
||||||
'-o', 'test.webm',
|
'-o', 'test.webm',
|
||||||
'https://www.youtube.com/watch?v=jNQXAC9IVRw'
|
'https://www.youtube.com/watch?v=jNQXAC9IVRw',
|
||||||
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
], cwd=root_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
sout, serr = outp.communicate()
|
sout, serr = outp.communicate()
|
||||||
self.assertTrue(b'has already been downloaded' not in sout)
|
self.assertTrue(b'has already been downloaded' not in sout)
|
||||||
|
@ -10,41 +10,103 @@ TEST_DATA_DIR = Path(os.path.dirname(os.path.abspath(__file__)), 'testdata')
|
|||||||
sys.path.append(str(TEST_DATA_DIR))
|
sys.path.append(str(TEST_DATA_DIR))
|
||||||
importlib.invalidate_caches()
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
from yt_dlp.plugins import PACKAGE_NAME, directories, load_plugins
|
from yt_dlp.plugins import (
|
||||||
|
PACKAGE_NAME,
|
||||||
|
PluginSpec,
|
||||||
|
directories,
|
||||||
|
load_plugins,
|
||||||
|
load_all_plugins,
|
||||||
|
register_plugin_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.globals import (
|
||||||
|
extractors,
|
||||||
|
postprocessors,
|
||||||
|
plugin_dirs,
|
||||||
|
plugin_ies,
|
||||||
|
plugin_pps,
|
||||||
|
all_plugins_loaded,
|
||||||
|
plugin_specs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
EXTRACTOR_PLUGIN_SPEC = PluginSpec(
|
||||||
|
module_name='extractor',
|
||||||
|
suffix='IE',
|
||||||
|
destination=extractors,
|
||||||
|
plugin_destination=plugin_ies,
|
||||||
|
)
|
||||||
|
|
||||||
|
POSTPROCESSOR_PLUGIN_SPEC = PluginSpec(
|
||||||
|
module_name='postprocessor',
|
||||||
|
suffix='PP',
|
||||||
|
destination=postprocessors,
|
||||||
|
plugin_destination=plugin_pps,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def reset_plugins():
|
||||||
|
plugin_ies.value = {}
|
||||||
|
plugin_pps.value = {}
|
||||||
|
plugin_dirs.value = ['default']
|
||||||
|
plugin_specs.value = {}
|
||||||
|
all_plugins_loaded.value = False
|
||||||
|
# Clearing override plugins is probably difficult
|
||||||
|
for module_name in tuple(sys.modules):
|
||||||
|
for plugin_type in ('extractor', 'postprocessor'):
|
||||||
|
if module_name.startswith(f'{PACKAGE_NAME}.{plugin_type}.'):
|
||||||
|
del sys.modules[module_name]
|
||||||
|
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
|
|
||||||
class TestPlugins(unittest.TestCase):
|
class TestPlugins(unittest.TestCase):
|
||||||
|
|
||||||
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
TEST_PLUGIN_DIR = TEST_DATA_DIR / PACKAGE_NAME
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
reset_plugins()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
reset_plugins()
|
||||||
|
|
||||||
def test_directories_containing_plugins(self):
|
def test_directories_containing_plugins(self):
|
||||||
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
self.assertIn(self.TEST_PLUGIN_DIR, map(Path, directories()))
|
||||||
|
|
||||||
def test_extractor_classes(self):
|
def test_extractor_classes(self):
|
||||||
for module_name in tuple(sys.modules):
|
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
if module_name.startswith(f'{PACKAGE_NAME}.extractor'):
|
|
||||||
del sys.modules[module_name]
|
|
||||||
plugins_ie = load_plugins('extractor', 'IE')
|
|
||||||
|
|
||||||
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
# don't load modules with underscore prefix
|
# don't load modules with underscore prefix
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules.keys(),
|
f'{PACKAGE_NAME}.extractor._ignore' in sys.modules,
|
||||||
'loaded module beginning with underscore')
|
'loaded module beginning with underscore')
|
||||||
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
self.assertNotIn('IgnorePluginIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('IgnorePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
# Don't load extractors with underscore prefix
|
# Don't load extractors with underscore prefix
|
||||||
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
self.assertNotIn('_IgnoreUnderscorePluginIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('_IgnoreUnderscorePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
# Don't load extractors not specified in __all__ (if supplied)
|
# Don't load extractors not specified in __all__ (if supplied)
|
||||||
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
self.assertNotIn('IgnoreNotInAllPluginIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('IgnoreNotInAllPluginIE', plugin_ies.value)
|
||||||
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
self.assertIn('InAllPluginIE', plugins_ie.keys())
|
||||||
|
self.assertIn('InAllPluginIE', plugin_ies.value)
|
||||||
|
|
||||||
|
# Don't load override extractors
|
||||||
|
self.assertNotIn('OverrideGenericIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('OverrideGenericIE', plugin_ies.value)
|
||||||
|
self.assertNotIn('_UnderscoreOverrideGenericIE', plugins_ie.keys())
|
||||||
|
self.assertNotIn('_UnderscoreOverrideGenericIE', plugin_ies.value)
|
||||||
|
|
||||||
def test_postprocessor_classes(self):
|
def test_postprocessor_classes(self):
|
||||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
self.assertIn('NormalPluginPP', plugin_pps.value)
|
||||||
|
|
||||||
def test_importing_zipped_module(self):
|
def test_importing_zipped_module(self):
|
||||||
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
zip_path = TEST_DATA_DIR / 'zipped_plugins.zip'
|
||||||
@ -57,10 +119,10 @@ class TestPlugins(unittest.TestCase):
|
|||||||
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||||
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
self.assertIn(zip_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||||
|
|
||||||
plugins_ie = load_plugins('extractor', 'IE')
|
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
self.assertIn('ZippedPluginIE', plugins_ie.keys())
|
||||||
|
|
||||||
plugins_pp = load_plugins('postprocessor', 'PP')
|
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
self.assertIn('ZippedPluginPP', plugins_pp.keys())
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
@ -68,6 +130,117 @@ class TestPlugins(unittest.TestCase):
|
|||||||
os.remove(zip_path)
|
os.remove(zip_path)
|
||||||
importlib.invalidate_caches() # reset the import caches
|
importlib.invalidate_caches() # reset the import caches
|
||||||
|
|
||||||
|
def test_reloading_plugins(self):
|
||||||
|
reload_plugins_path = TEST_DATA_DIR / 'reload_plugins'
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
# Remove default folder and add reload_plugin path
|
||||||
|
sys.path.remove(str(TEST_DATA_DIR))
|
||||||
|
sys.path.append(str(reload_plugins_path))
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
try:
|
||||||
|
for plugin_type in ('extractor', 'postprocessor'):
|
||||||
|
package = importlib.import_module(f'{PACKAGE_NAME}.{plugin_type}')
|
||||||
|
self.assertIn(reload_plugins_path / PACKAGE_NAME / plugin_type, map(Path, package.__path__))
|
||||||
|
|
||||||
|
plugins_ie = load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
self.assertIn('NormalPluginIE', plugins_ie.keys())
|
||||||
|
self.assertTrue(
|
||||||
|
plugins_ie['NormalPluginIE'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original extractor plugin')
|
||||||
|
self.assertTrue(
|
||||||
|
extractors.value['NormalPluginIE'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original extractor plugin globally')
|
||||||
|
|
||||||
|
plugins_pp = load_plugins(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
self.assertIn('NormalPluginPP', plugins_pp.keys())
|
||||||
|
self.assertTrue(plugins_pp['NormalPluginPP'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original postprocessor plugin')
|
||||||
|
self.assertTrue(
|
||||||
|
postprocessors.value['NormalPluginPP'].REPLACED,
|
||||||
|
msg='Reloading has not replaced original postprocessor plugin globally')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.path.remove(str(reload_plugins_path))
|
||||||
|
sys.path.append(str(TEST_DATA_DIR))
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
|
||||||
|
def test_extractor_override_plugin(self):
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.generic import GenericIE
|
||||||
|
|
||||||
|
self.assertEqual(GenericIE.TEST_FIELD, 'override')
|
||||||
|
self.assertEqual(GenericIE.SECONDARY_TEST_FIELD, 'underscore-override')
|
||||||
|
|
||||||
|
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||||
|
importlib.invalidate_caches()
|
||||||
|
# test that loading a second time doesn't wrap a second time
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
from yt_dlp.extractor.generic import GenericIE
|
||||||
|
self.assertEqual(GenericIE.IE_NAME, 'generic+override+underscore-override')
|
||||||
|
|
||||||
|
def test_load_all_plugin_types(self):
|
||||||
|
|
||||||
|
# no plugin specs registered
|
||||||
|
load_all_plugins()
|
||||||
|
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
|
||||||
|
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
load_all_plugins()
|
||||||
|
self.assertTrue(all_plugins_loaded.value)
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
|
||||||
|
def test_no_plugin_dirs(self):
|
||||||
|
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
plugin_dirs.value = []
|
||||||
|
load_all_plugins()
|
||||||
|
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.extractor.normal', sys.modules.keys())
|
||||||
|
self.assertNotIn(f'{PACKAGE_NAME}.postprocessor.normal', sys.modules.keys())
|
||||||
|
|
||||||
|
def test_set_plugin_dirs(self):
|
||||||
|
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||||
|
plugin_dirs.value = [custom_plugin_dir]
|
||||||
|
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||||
|
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
|
def test_invalid_plugin_dir(self):
|
||||||
|
plugin_dirs.value = ['invalid_dir']
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
def test_append_plugin_dirs(self):
|
||||||
|
custom_plugin_dir = str(TEST_DATA_DIR / 'plugin_packages')
|
||||||
|
|
||||||
|
self.assertEqual(plugin_dirs.value, ['default'])
|
||||||
|
plugin_dirs.value.append(custom_plugin_dir)
|
||||||
|
self.assertEqual(plugin_dirs.value, ['default', custom_plugin_dir])
|
||||||
|
|
||||||
|
load_plugins(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
self.assertIn(f'{PACKAGE_NAME}.extractor.package', sys.modules.keys())
|
||||||
|
self.assertIn('PackagePluginIE', plugin_ies.value)
|
||||||
|
|
||||||
|
def test_get_plugin_spec(self):
|
||||||
|
register_plugin_spec(EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
register_plugin_spec(POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
|
||||||
|
self.assertEqual(plugin_specs.value.get('extractor'), EXTRACTOR_PLUGIN_SPEC)
|
||||||
|
self.assertEqual(plugin_specs.value.get('postprocessor'), POSTPROCESSOR_PLUGIN_SPEC)
|
||||||
|
self.assertIsNone(plugin_specs.value.get('invalid'))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -59,7 +59,7 @@ class TestPostHooks(unittest.TestCase):
|
|||||||
|
|
||||||
def hook_three(self, filename):
|
def hook_three(self, filename):
|
||||||
self.files.append(filename)
|
self.files.append(filename)
|
||||||
raise Exception('Test exception for \'%s\'' % filename)
|
raise Exception(f'Test exception for \'{filename}\'')
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
for f in self.files:
|
for f in self.files:
|
||||||
|
@ -8,8 +8,10 @@ import unittest
|
|||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
from yt_dlp.compat import compat_shlex_quote
|
from yt_dlp.utils import shell_quote
|
||||||
from yt_dlp.postprocessor import (
|
from yt_dlp.postprocessor import (
|
||||||
ExecPP,
|
ExecPP,
|
||||||
FFmpegThumbnailsConvertorPP,
|
FFmpegThumbnailsConvertorPP,
|
||||||
@ -47,7 +49,18 @@ class TestConvertThumbnail(unittest.TestCase):
|
|||||||
print('Skipping: ffmpeg not found')
|
print('Skipping: ffmpeg not found')
|
||||||
return
|
return
|
||||||
|
|
||||||
file = 'test/testdata/thumbnails/foo %d bar/foo_%d.{}'
|
test_data_dir = 'test/testdata/thumbnails'
|
||||||
|
generated_file = f'{test_data_dir}/empty.webp'
|
||||||
|
|
||||||
|
subprocess.check_call([
|
||||||
|
pp.executable, '-y', '-f', 'lavfi', '-i', 'color=c=black:s=320x320',
|
||||||
|
'-c:v', 'libwebp', '-pix_fmt', 'yuv420p', '-vframes', '1', generated_file,
|
||||||
|
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
|
||||||
|
file = test_data_dir + '/foo %d bar/foo_%d.{}'
|
||||||
|
initial_file = file.format('webp')
|
||||||
|
os.replace(generated_file, initial_file)
|
||||||
|
|
||||||
tests = (('webp', 'png'), ('png', 'jpg'))
|
tests = (('webp', 'png'), ('png', 'jpg'))
|
||||||
|
|
||||||
for inp, out in tests:
|
for inp, out in tests:
|
||||||
@ -55,17 +68,19 @@ class TestConvertThumbnail(unittest.TestCase):
|
|||||||
if os.path.exists(out_file):
|
if os.path.exists(out_file):
|
||||||
os.remove(out_file)
|
os.remove(out_file)
|
||||||
pp.convert_thumbnail(file.format(inp), out)
|
pp.convert_thumbnail(file.format(inp), out)
|
||||||
assert os.path.exists(out_file)
|
self.assertTrue(os.path.exists(out_file))
|
||||||
|
|
||||||
for _, out in tests:
|
for _, out in tests:
|
||||||
os.remove(file.format(out))
|
os.remove(file.format(out))
|
||||||
|
|
||||||
|
os.remove(initial_file)
|
||||||
|
|
||||||
|
|
||||||
class TestExec(unittest.TestCase):
|
class TestExec(unittest.TestCase):
|
||||||
def test_parse_cmd(self):
|
def test_parse_cmd(self):
|
||||||
pp = ExecPP(YoutubeDL(), '')
|
pp = ExecPP(YoutubeDL(), '')
|
||||||
info = {'filepath': 'file name'}
|
info = {'filepath': 'file name'}
|
||||||
cmd = 'echo %s' % compat_shlex_quote(info['filepath'])
|
cmd = 'echo {}'.format(shell_quote(info['filepath']))
|
||||||
|
|
||||||
self.assertEqual(pp.parse_cmd('echo', info), cmd)
|
self.assertEqual(pp.parse_cmd('echo', info), cmd)
|
||||||
self.assertEqual(pp.parse_cmd('echo {}', info), cmd)
|
self.assertEqual(pp.parse_cmd('echo {}', info), cmd)
|
||||||
@ -125,7 +140,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, chapters, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, chapters, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithSponsors(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithSponsors(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||||
self._sponsor_chapter(30, 40, 'preview'),
|
self._sponsor_chapter(30, 40, 'preview'),
|
||||||
self._sponsor_chapter(50, 60, 'filler')]
|
self._sponsor_chapter(50, 60, 'filler')]
|
||||||
@ -136,7 +152,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
def test_remove_marked_arrange_sponsors_SponsorBlockChapters(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
|
self._sponsor_chapter(10, 20, 'chapter', title='sb c1'),
|
||||||
self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
|
self._sponsor_chapter(15, 16, 'chapter', title='sb c2'),
|
||||||
self._sponsor_chapter(30, 40, 'preview'),
|
self._sponsor_chapter(30, 40, 'preview'),
|
||||||
@ -149,10 +166,14 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
def test_remove_marked_arrange_sponsors_UniqueNamesForOverlappingSponsors(self):
|
||||||
chapters = self._chapters([120], ['c']) + [
|
chapters = [
|
||||||
self._sponsor_chapter(10, 45, 'sponsor'), self._sponsor_chapter(20, 40, 'selfpromo'),
|
*self._chapters([120], ['c']),
|
||||||
self._sponsor_chapter(50, 70, 'sponsor'), self._sponsor_chapter(60, 85, 'selfpromo'),
|
self._sponsor_chapter(10, 45, 'sponsor'),
|
||||||
self._sponsor_chapter(90, 120, 'selfpromo'), self._sponsor_chapter(100, 110, 'sponsor')]
|
self._sponsor_chapter(20, 40, 'selfpromo'),
|
||||||
|
self._sponsor_chapter(50, 70, 'sponsor'),
|
||||||
|
self._sponsor_chapter(60, 85, 'selfpromo'),
|
||||||
|
self._sponsor_chapter(90, 120, 'selfpromo'),
|
||||||
|
self._sponsor_chapter(100, 110, 'sponsor')]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
[10, 20, 40, 45, 50, 60, 70, 85, 90, 100, 110, 120],
|
[10, 20, 40, 45, 50, 60, 70, 85, 90, 100, 110, 120],
|
||||||
['c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
|
['c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Sponsor, Unpaid/Self Promotion',
|
||||||
@ -172,7 +193,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, self._chapters([40], ['c']), cuts)
|
chapters, self._chapters([40], ['c']), cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithSponsorsAndCuts(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithSponsorsAndCuts(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||||
self._sponsor_chapter(30, 40, 'selfpromo', remove=True),
|
self._sponsor_chapter(30, 40, 'selfpromo', remove=True),
|
||||||
self._sponsor_chapter(50, 60, 'interaction')]
|
self._sponsor_chapter(50, 60, 'interaction')]
|
||||||
@ -185,24 +207,29 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
def test_remove_marked_arrange_sponsors_ChapterWithSponsorCutInTheMiddle(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithSponsorCutInTheMiddle(self):
|
||||||
cuts = [self._sponsor_chapter(20, 30, 'selfpromo', remove=True),
|
cuts = [self._sponsor_chapter(20, 30, 'selfpromo', remove=True),
|
||||||
self._chapter(40, 50, remove=True)]
|
self._chapter(40, 50, remove=True)]
|
||||||
chapters = self._chapters([70], ['c']) + [self._sponsor_chapter(10, 60, 'sponsor')] + cuts
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
|
self._sponsor_chapter(10, 60, 'sponsor'),
|
||||||
|
*cuts]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
[10, 40, 50], ['c', '[SponsorBlock]: Sponsor', 'c'])
|
[10, 40, 50], ['c', '[SponsorBlock]: Sponsor', 'c'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithCutHidingSponsor(self):
|
||||||
cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
|
cuts = [self._sponsor_chapter(20, 50, 'selfpromo', remove=True)]
|
||||||
chapters = self._chapters([60], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([60], ['c']),
|
||||||
self._sponsor_chapter(10, 20, 'intro'),
|
self._sponsor_chapter(10, 20, 'intro'),
|
||||||
self._sponsor_chapter(30, 40, 'sponsor'),
|
self._sponsor_chapter(30, 40, 'sponsor'),
|
||||||
self._sponsor_chapter(50, 60, 'outro'),
|
self._sponsor_chapter(50, 60, 'outro'),
|
||||||
] + cuts
|
*cuts]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
[10, 20, 30], ['c', '[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
|
[10, 20, 30], ['c', '[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithAdjacentSponsors(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithAdjacentSponsors(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 30, 'selfpromo'),
|
self._sponsor_chapter(20, 30, 'selfpromo'),
|
||||||
self._sponsor_chapter(30, 40, 'interaction')]
|
self._sponsor_chapter(30, 40, 'interaction')]
|
||||||
@ -213,7 +240,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithAdjacentCuts(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 20, 'sponsor'),
|
self._sponsor_chapter(10, 20, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
self._sponsor_chapter(20, 30, 'interaction', remove=True),
|
||||||
self._chapter(30, 40, remove=True),
|
self._chapter(30, 40, remove=True),
|
||||||
@ -226,7 +254,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, expected, [self._chapter(20, 50, remove=True)])
|
chapters, expected, [self._chapter(20, 50, remove=True)])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithOverlappingSponsors(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithOverlappingSponsors(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 50, 'selfpromo'),
|
self._sponsor_chapter(20, 50, 'selfpromo'),
|
||||||
self._sponsor_chapter(40, 60, 'interaction')]
|
self._sponsor_chapter(40, 60, 'interaction')]
|
||||||
@ -238,7 +267,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithOverlappingCuts(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithOverlappingCuts(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 30, 'sponsor', remove=True),
|
self._sponsor_chapter(10, 30, 'sponsor', remove=True),
|
||||||
self._sponsor_chapter(20, 50, 'selfpromo', remove=True),
|
self._sponsor_chapter(20, 50, 'selfpromo', remove=True),
|
||||||
self._sponsor_chapter(40, 60, 'interaction', remove=True)]
|
self._sponsor_chapter(40, 60, 'interaction', remove=True)]
|
||||||
@ -246,7 +276,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, self._chapters([20], ['c']), [self._chapter(10, 60, remove=True)])
|
chapters, self._chapters([20], ['c']), [self._chapter(10, 60, remove=True)])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsors(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsors(self):
|
||||||
chapters = self._chapters([170], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([170], ['c']),
|
||||||
self._sponsor_chapter(0, 30, 'intro'),
|
self._sponsor_chapter(0, 30, 'intro'),
|
||||||
self._sponsor_chapter(20, 50, 'sponsor'),
|
self._sponsor_chapter(20, 50, 'sponsor'),
|
||||||
self._sponsor_chapter(40, 60, 'selfpromo'),
|
self._sponsor_chapter(40, 60, 'selfpromo'),
|
||||||
@ -267,7 +298,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingCuts(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingCuts(self):
|
||||||
chapters = self._chapters([170], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([170], ['c']),
|
||||||
self._chapter(0, 30, remove=True),
|
self._chapter(0, 30, remove=True),
|
||||||
self._sponsor_chapter(20, 50, 'sponsor', remove=True),
|
self._sponsor_chapter(20, 50, 'sponsor', remove=True),
|
||||||
self._chapter(40, 60, remove=True),
|
self._chapter(40, 60, remove=True),
|
||||||
@ -284,7 +316,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, self._chapters([20], ['c']), expected_cuts)
|
chapters, self._chapters([20], ['c']), expected_cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_OverlappingSponsorsDifferentTitlesAfterCut(self):
|
def test_remove_marked_arrange_sponsors_OverlappingSponsorsDifferentTitlesAfterCut(self):
|
||||||
chapters = self._chapters([60], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([60], ['c']),
|
||||||
self._sponsor_chapter(10, 60, 'sponsor'),
|
self._sponsor_chapter(10, 60, 'sponsor'),
|
||||||
self._sponsor_chapter(10, 40, 'intro'),
|
self._sponsor_chapter(10, 40, 'intro'),
|
||||||
self._sponsor_chapter(30, 50, 'interaction'),
|
self._sponsor_chapter(30, 50, 'interaction'),
|
||||||
@ -297,7 +330,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, expected, [self._chapter(30, 50, remove=True)])
|
chapters, expected, [self._chapter(30, 50, remove=True)])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut(self):
|
def test_remove_marked_arrange_sponsors_SponsorsNoLongerOverlapAfterCut(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 30, 'sponsor'),
|
self._sponsor_chapter(10, 30, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 50, 'interaction'),
|
self._sponsor_chapter(20, 50, 'interaction'),
|
||||||
self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
|
self._sponsor_chapter(30, 50, 'selfpromo', remove=True),
|
||||||
@ -310,7 +344,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, expected, [self._chapter(30, 50, remove=True)])
|
chapters, expected, [self._chapter(30, 50, remove=True)])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorsStillOverlapAfterCut(self):
|
def test_remove_marked_arrange_sponsors_SponsorsStillOverlapAfterCut(self):
|
||||||
chapters = self._chapters([70], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([70], ['c']),
|
||||||
self._sponsor_chapter(10, 60, 'sponsor'),
|
self._sponsor_chapter(10, 60, 'sponsor'),
|
||||||
self._sponsor_chapter(20, 60, 'interaction'),
|
self._sponsor_chapter(20, 60, 'interaction'),
|
||||||
self._sponsor_chapter(30, 50, 'selfpromo', remove=True)]
|
self._sponsor_chapter(30, 50, 'selfpromo', remove=True)]
|
||||||
@ -321,7 +356,8 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, expected, [self._chapter(30, 50, remove=True)])
|
chapters, expected, [self._chapter(30, 50, remove=True)])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsorsAndCuts(self):
|
def test_remove_marked_arrange_sponsors_ChapterWithRunsOfOverlappingSponsorsAndCuts(self):
|
||||||
chapters = self._chapters([200], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([200], ['c']),
|
||||||
self._sponsor_chapter(10, 40, 'sponsor'),
|
self._sponsor_chapter(10, 40, 'sponsor'),
|
||||||
self._sponsor_chapter(10, 30, 'intro'),
|
self._sponsor_chapter(10, 30, 'intro'),
|
||||||
self._chapter(20, 30, remove=True),
|
self._chapter(20, 30, remove=True),
|
||||||
@ -347,8 +383,9 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, expected_cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, expected_cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorOverlapsMultipleChapters(self):
|
def test_remove_marked_arrange_sponsors_SponsorOverlapsMultipleChapters(self):
|
||||||
chapters = (self._chapters([20, 40, 60, 80, 100], ['c1', 'c2', 'c3', 'c4', 'c5'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(10, 90, 'sponsor')])
|
*self._chapters([20, 40, 60, 80, 100], ['c1', 'c2', 'c3', 'c4', 'c5']),
|
||||||
|
self._sponsor_chapter(10, 90, 'sponsor')]
|
||||||
expected = self._chapters([10, 90, 100], ['c1', '[SponsorBlock]: Sponsor', 'c5'])
|
expected = self._chapters([10, 90, 100], ['c1', '[SponsorBlock]: Sponsor', 'c5'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
@ -359,9 +396,10 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorsWithinSomeChaptersAndOverlappingOthers(self):
|
def test_remove_marked_arrange_sponsors_SponsorsWithinSomeChaptersAndOverlappingOthers(self):
|
||||||
chapters = (self._chapters([10, 40, 60, 80], ['c1', 'c2', 'c3', 'c4'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(20, 30, 'sponsor'),
|
*self._chapters([10, 40, 60, 80], ['c1', 'c2', 'c3', 'c4']),
|
||||||
self._sponsor_chapter(50, 70, 'selfpromo')])
|
self._sponsor_chapter(20, 30, 'sponsor'),
|
||||||
|
self._sponsor_chapter(50, 70, 'selfpromo')]
|
||||||
expected = self._chapters([10, 20, 30, 40, 50, 70, 80],
|
expected = self._chapters([10, 20, 30, 40, 50, 70, 80],
|
||||||
['c1', 'c2', '[SponsorBlock]: Sponsor', 'c2', 'c3',
|
['c1', 'c2', '[SponsorBlock]: Sponsor', 'c2', 'c3',
|
||||||
'[SponsorBlock]: Unpaid/Self Promotion', 'c4'])
|
'[SponsorBlock]: Unpaid/Self Promotion', 'c4'])
|
||||||
@ -374,8 +412,9 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_ChaptersAfterLastSponsor(self):
|
def test_remove_marked_arrange_sponsors_ChaptersAfterLastSponsor(self):
|
||||||
chapters = (self._chapters([20, 40, 50, 60], ['c1', 'c2', 'c3', 'c4'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(10, 30, 'music_offtopic')])
|
*self._chapters([20, 40, 50, 60], ['c1', 'c2', 'c3', 'c4']),
|
||||||
|
self._sponsor_chapter(10, 30, 'music_offtopic')]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
[10, 30, 40, 50, 60],
|
[10, 30, 40, 50, 60],
|
||||||
['c1', '[SponsorBlock]: Non-Music Section', 'c2', 'c3', 'c4'])
|
['c1', '[SponsorBlock]: Non-Music Section', 'c2', 'c3', 'c4'])
|
||||||
@ -388,8 +427,9 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorStartsAtChapterStart(self):
|
def test_remove_marked_arrange_sponsors_SponsorStartsAtChapterStart(self):
|
||||||
chapters = (self._chapters([10, 20, 40], ['c1', 'c2', 'c3'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(20, 30, 'sponsor')])
|
*self._chapters([10, 20, 40], ['c1', 'c2', 'c3']),
|
||||||
|
self._sponsor_chapter(20, 30, 'sponsor')]
|
||||||
expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
|
expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
@ -400,8 +440,9 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorEndsAtChapterEnd(self):
|
def test_remove_marked_arrange_sponsors_SponsorEndsAtChapterEnd(self):
|
||||||
chapters = (self._chapters([10, 30, 40], ['c1', 'c2', 'c3'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(20, 30, 'sponsor')])
|
*self._chapters([10, 30, 40], ['c1', 'c2', 'c3']),
|
||||||
|
self._sponsor_chapter(20, 30, 'sponsor')]
|
||||||
expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
|
expected = self._chapters([10, 20, 30, 40], ['c1', 'c2', '[SponsorBlock]: Sponsor', 'c3'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
@ -412,8 +453,9 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorCoincidesWithChapters(self):
|
def test_remove_marked_arrange_sponsors_SponsorCoincidesWithChapters(self):
|
||||||
chapters = (self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(10, 30, 'sponsor')])
|
*self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4']),
|
||||||
|
self._sponsor_chapter(10, 30, 'sponsor')]
|
||||||
expected = self._chapters([10, 30, 40], ['c1', '[SponsorBlock]: Sponsor', 'c4'])
|
expected = self._chapters([10, 30, 40], ['c1', '[SponsorBlock]: Sponsor', 'c4'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
@ -424,8 +466,9 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorsAtVideoBoundaries(self):
|
def test_remove_marked_arrange_sponsors_SponsorsAtVideoBoundaries(self):
|
||||||
chapters = (self._chapters([20, 40, 60], ['c1', 'c2', 'c3'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(0, 10, 'intro'), self._sponsor_chapter(50, 60, 'outro')])
|
*self._chapters([20, 40, 60], ['c1', 'c2', 'c3']),
|
||||||
|
self._sponsor_chapter(0, 10, 'intro'), self._sponsor_chapter(50, 60, 'outro')]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
[10, 20, 40, 50, 60], ['[SponsorBlock]: Intermission/Intro Animation', 'c1', 'c2', 'c3', '[SponsorBlock]: Endcards/Credits'])
|
[10, 20, 40, 50, 60], ['[SponsorBlock]: Intermission/Intro Animation', 'c1', 'c2', 'c3', '[SponsorBlock]: Endcards/Credits'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
@ -437,8 +480,10 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SponsorsOverlapChaptersAtVideoBoundaries(self):
|
def test_remove_marked_arrange_sponsors_SponsorsOverlapChaptersAtVideoBoundaries(self):
|
||||||
chapters = (self._chapters([10, 40, 50], ['c1', 'c2', 'c3'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(0, 20, 'intro'), self._sponsor_chapter(30, 50, 'outro')])
|
*self._chapters([10, 40, 50], ['c1', 'c2', 'c3']),
|
||||||
|
self._sponsor_chapter(0, 20, 'intro'),
|
||||||
|
self._sponsor_chapter(30, 50, 'outro')]
|
||||||
expected = self._chapters(
|
expected = self._chapters(
|
||||||
[20, 30, 50], ['[SponsorBlock]: Intermission/Intro Animation', 'c2', '[SponsorBlock]: Endcards/Credits'])
|
[20, 30, 50], ['[SponsorBlock]: Intermission/Intro Animation', 'c2', '[SponsorBlock]: Endcards/Credits'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
@ -450,8 +495,10 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_EverythingSponsored(self):
|
def test_remove_marked_arrange_sponsors_EverythingSponsored(self):
|
||||||
chapters = (self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4'])
|
chapters = [
|
||||||
+ [self._sponsor_chapter(0, 20, 'intro'), self._sponsor_chapter(20, 40, 'outro')])
|
*self._chapters([10, 20, 30, 40], ['c1', 'c2', 'c3', 'c4']),
|
||||||
|
self._sponsor_chapter(0, 20, 'intro'),
|
||||||
|
self._sponsor_chapter(20, 40, 'outro')]
|
||||||
expected = self._chapters([20, 40], ['[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
|
expected = self._chapters([20, 40], ['[SponsorBlock]: Intermission/Intro Animation', '[SponsorBlock]: Endcards/Credits'])
|
||||||
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
self._remove_marked_arrange_sponsors_test_impl(chapters, expected, [])
|
||||||
|
|
||||||
@ -491,38 +538,39 @@ class TestModifyChaptersPP(unittest.TestCase):
|
|||||||
chapters, self._chapters([2.5], ['c2']), cuts)
|
chapters, self._chapters([2.5], ['c2']), cuts)
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_TinyChaptersResultingFromSponsorOverlapAreIgnored(self):
|
def test_remove_marked_arrange_sponsors_TinyChaptersResultingFromSponsorOverlapAreIgnored(self):
|
||||||
chapters = self._chapters([1, 3, 4], ['c1', 'c2', 'c3']) + [
|
chapters = [
|
||||||
|
*self._chapters([1, 3, 4], ['c1', 'c2', 'c3']),
|
||||||
self._sponsor_chapter(1.5, 2.5, 'sponsor')]
|
self._sponsor_chapter(1.5, 2.5, 'sponsor')]
|
||||||
self._remove_marked_arrange_sponsors_test_impl(
|
self._remove_marked_arrange_sponsors_test_impl(
|
||||||
chapters, self._chapters([1.5, 2.5, 4], ['c1', '[SponsorBlock]: Sponsor', 'c3']), [])
|
chapters, self._chapters([1.5, 2.5, 4], ['c1', '[SponsorBlock]: Sponsor', 'c3']), [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_TinySponsorsOverlapsAreIgnored(self):
|
def test_remove_marked_arrange_sponsors_TinySponsorsOverlapsAreIgnored(self):
|
||||||
chapters = self._chapters([2, 3, 5], ['c1', 'c2', 'c3']) + [
|
chapters = [
|
||||||
|
*self._chapters([2, 3, 5], ['c1', 'c2', 'c3']),
|
||||||
self._sponsor_chapter(1, 3, 'sponsor'),
|
self._sponsor_chapter(1, 3, 'sponsor'),
|
||||||
self._sponsor_chapter(2.5, 4, 'selfpromo')
|
self._sponsor_chapter(2.5, 4, 'selfpromo')]
|
||||||
]
|
|
||||||
self._remove_marked_arrange_sponsors_test_impl(
|
self._remove_marked_arrange_sponsors_test_impl(
|
||||||
chapters, self._chapters([1, 3, 4, 5], [
|
chapters, self._chapters([1, 3, 4, 5], [
|
||||||
'c1', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion', 'c3']), [])
|
'c1', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion', 'c3']), [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_TinySponsorsPrependedToTheNextSponsor(self):
|
def test_remove_marked_arrange_sponsors_TinySponsorsPrependedToTheNextSponsor(self):
|
||||||
chapters = self._chapters([4], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([4], ['c']),
|
||||||
self._sponsor_chapter(1.5, 2, 'sponsor'),
|
self._sponsor_chapter(1.5, 2, 'sponsor'),
|
||||||
self._sponsor_chapter(2, 4, 'selfpromo')
|
self._sponsor_chapter(2, 4, 'selfpromo')]
|
||||||
]
|
|
||||||
self._remove_marked_arrange_sponsors_test_impl(
|
self._remove_marked_arrange_sponsors_test_impl(
|
||||||
chapters, self._chapters([1.5, 4], ['c', '[SponsorBlock]: Unpaid/Self Promotion']), [])
|
chapters, self._chapters([1.5, 4], ['c', '[SponsorBlock]: Unpaid/Self Promotion']), [])
|
||||||
|
|
||||||
def test_remove_marked_arrange_sponsors_SmallestSponsorInTheOverlapGetsNamed(self):
|
def test_remove_marked_arrange_sponsors_SmallestSponsorInTheOverlapGetsNamed(self):
|
||||||
self._pp._sponsorblock_chapter_title = '[SponsorBlock]: %(name)s'
|
self._pp._sponsorblock_chapter_title = '[SponsorBlock]: %(name)s'
|
||||||
chapters = self._chapters([10], ['c']) + [
|
chapters = [
|
||||||
|
*self._chapters([10], ['c']),
|
||||||
self._sponsor_chapter(2, 8, 'sponsor'),
|
self._sponsor_chapter(2, 8, 'sponsor'),
|
||||||
self._sponsor_chapter(4, 6, 'selfpromo')
|
self._sponsor_chapter(4, 6, 'selfpromo')]
|
||||||
]
|
|
||||||
self._remove_marked_arrange_sponsors_test_impl(
|
self._remove_marked_arrange_sponsors_test_impl(
|
||||||
chapters, self._chapters([2, 4, 6, 8, 10], [
|
chapters, self._chapters([2, 4, 6, 8, 10], [
|
||||||
'c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion',
|
'c', '[SponsorBlock]: Sponsor', '[SponsorBlock]: Unpaid/Self Promotion',
|
||||||
'[SponsorBlock]: Sponsor', 'c'
|
'[SponsorBlock]: Sponsor', 'c',
|
||||||
]), [])
|
]), [])
|
||||||
|
|
||||||
def test_make_concat_opts_CommonCase(self):
|
def test_make_concat_opts_CommonCase(self):
|
||||||
@ -577,3 +625,7 @@ outpoint 10.000000
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
r"'special '\'' characters '\'' galore'\'\'\'",
|
r"'special '\'' characters '\'' galore'\'\'\'",
|
||||||
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
self._pp._quote_for_ffmpeg("special ' characters ' galore'''"))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
|
71
test/test_pot/conftest.py
Normal file
71
test/test_pot/conftest.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
import collections
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.extractor.common import InfoExtractor
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProviderLogger
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import PoTokenRequest, PoTokenContext
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
|
||||||
|
|
||||||
|
class MockLogger(IEContentProviderLogger):
|
||||||
|
|
||||||
|
log_level = IEContentProviderLogger.LogLevel.TRACE
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.messages = collections.defaultdict(list)
|
||||||
|
|
||||||
|
def trace(self, message: str):
|
||||||
|
self.messages['trace'].append(message)
|
||||||
|
|
||||||
|
def debug(self, message: str):
|
||||||
|
self.messages['debug'].append(message)
|
||||||
|
|
||||||
|
def info(self, message: str):
|
||||||
|
self.messages['info'].append(message)
|
||||||
|
|
||||||
|
def warning(self, message: str, *, once=False):
|
||||||
|
self.messages['warning'].append(message)
|
||||||
|
|
||||||
|
def error(self, message: str):
|
||||||
|
self.messages['error'].append(message)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def ie() -> InfoExtractor:
|
||||||
|
ydl = YoutubeDL()
|
||||||
|
return ydl.get_info_extractor('Youtube')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def logger() -> MockLogger:
|
||||||
|
return MockLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def pot_request() -> PoTokenRequest:
|
||||||
|
return PoTokenRequest(
|
||||||
|
context=PoTokenContext.GVS,
|
||||||
|
innertube_context={'client': {'clientName': 'WEB'}},
|
||||||
|
innertube_host='youtube.com',
|
||||||
|
session_index=None,
|
||||||
|
player_url=None,
|
||||||
|
is_authenticated=False,
|
||||||
|
video_webpage=None,
|
||||||
|
|
||||||
|
visitor_data='example-visitor-data',
|
||||||
|
data_sync_id='example-data-sync-id',
|
||||||
|
video_id='example-video-id',
|
||||||
|
|
||||||
|
request_cookiejar=YoutubeDLCookieJar(),
|
||||||
|
request_proxy=None,
|
||||||
|
request_headers=HTTPHeaderDict(),
|
||||||
|
request_timeout=None,
|
||||||
|
request_source_address=None,
|
||||||
|
request_verify_tls=True,
|
||||||
|
|
||||||
|
bypass_cache=False,
|
||||||
|
)
|
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
117
test/test_pot/test_pot_builtin_memorycache.py
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
import pytest
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||||
|
from yt_dlp.utils import bug_reports_message
|
||||||
|
from yt_dlp.extractor.youtube.pot._builtin.memory_cache import MemoryLRUPCP, memorylru_preference, initialize_global_cache
|
||||||
|
from yt_dlp.version import __version__
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_cache_providers, _pot_memory_cache
|
||||||
|
|
||||||
|
|
||||||
|
class TestMemoryLRUPCS:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(MemoryLRUPCP, IEContentProvider)
|
||||||
|
assert issubclass(MemoryLRUPCP, BuiltinIEContentProvider)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pcp(self, ie, logger) -> MemoryLRUPCP:
|
||||||
|
return MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), max_size))
|
||||||
|
|
||||||
|
def test_is_registered(self):
|
||||||
|
assert _pot_cache_providers.value.get('MemoryLRU') == MemoryLRUPCP
|
||||||
|
|
||||||
|
def test_initialization(self, pcp):
|
||||||
|
assert pcp.PROVIDER_NAME == 'memory'
|
||||||
|
assert pcp.PROVIDER_VERSION == __version__
|
||||||
|
assert pcp.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||||
|
assert pcp.is_available()
|
||||||
|
|
||||||
|
def test_store_and_get(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
|
||||||
|
def test_store_ignore_expired(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
|
||||||
|
def test_store_override_existing_key(self, ie, logger):
|
||||||
|
MAX_SIZE = 2
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
pcp.store('key2', 'value2', int(time.time()) + 60)
|
||||||
|
assert len(pcp.cache) == 2
|
||||||
|
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||||
|
# Ensure that the override key gets added to the end of the cache instead of in the same position
|
||||||
|
pcp.store('key3', 'value3', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value2'
|
||||||
|
|
||||||
|
def test_store_ignore_expired_existing_key(self, pcp):
|
||||||
|
pcp.store('key1', 'value2', int(time.time()) + 60)
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) - 1)
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
assert pcp.get('key1') == 'value2'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
|
||||||
|
def test_get_key_expired(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 60)
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
pcp.cache['key1'] = ('value1', int(time.time()) - 1)
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
|
||||||
|
def test_lru_eviction(self, ie, logger):
|
||||||
|
MAX_SIZE = 2
|
||||||
|
provider = MemoryLRUPCP(ie, logger, {}, initialize_cache=lambda max_size: (OrderedDict(), threading.Lock(), MAX_SIZE))
|
||||||
|
provider.store('key1', 'value1', int(time.time()) + 5)
|
||||||
|
provider.store('key2', 'value2', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key1') == 'value1'
|
||||||
|
|
||||||
|
provider.store('key3', 'value3', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key2') is None
|
||||||
|
|
||||||
|
provider.store('key4', 'value4', int(time.time()) + 5)
|
||||||
|
assert len(provider.cache) == 2
|
||||||
|
|
||||||
|
assert provider.get('key1') is None
|
||||||
|
assert provider.get('key3') == 'value3'
|
||||||
|
assert provider.get('key4') == 'value4'
|
||||||
|
|
||||||
|
def test_delete(self, pcp):
|
||||||
|
pcp.store('key1', 'value1', int(time.time()) + 5)
|
||||||
|
assert len(pcp.cache) == 1
|
||||||
|
assert pcp.get('key1') == 'value1'
|
||||||
|
pcp.delete('key1')
|
||||||
|
assert len(pcp.cache) == 0
|
||||||
|
assert pcp.get('key1') is None
|
||||||
|
|
||||||
|
def test_use_global_cache_default(self, ie, logger):
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
assert pcp.cache is _pot_memory_cache.value['cache']
|
||||||
|
assert pcp.lock is _pot_memory_cache.value['lock']
|
||||||
|
|
||||||
|
pcp2 = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == pcp2.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
assert pcp.cache is pcp2.cache is _pot_memory_cache.value['cache']
|
||||||
|
assert pcp.lock is pcp2.lock is _pot_memory_cache.value['lock']
|
||||||
|
|
||||||
|
def test_fail_max_size_change_global(self, ie, logger):
|
||||||
|
pcp = MemoryLRUPCP(ie, logger, {})
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
with pytest.raises(ValueError, match='Cannot change max_size of initialized global memory cache'):
|
||||||
|
initialize_global_cache(50)
|
||||||
|
|
||||||
|
assert pcp.max_size == _pot_memory_cache.value['max_size'] == 25
|
||||||
|
|
||||||
|
def test_memory_lru_preference(self, pcp, ie, pot_request):
|
||||||
|
assert memorylru_preference(pcp, pot_request) == 10000
|
47
test/test_pot/test_pot_builtin_utils.py
Normal file
47
test/test_pot/test_pot_builtin_utils.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
import pytest
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenContext,
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot.utils import get_webpo_content_binding, ContentBindingType
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetWebPoContentBinding:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated, expected', [
|
||||||
|
*[(client, context, is_authenticated, expected) for client in [
|
||||||
|
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||||
|
for context, is_authenticated, expected in [
|
||||||
|
(PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
(PoTokenContext.PLAYER, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||||
|
(PoTokenContext.SUBS, False, ('example-video-id', ContentBindingType.VIDEO_ID)),
|
||||||
|
(PoTokenContext.GVS, True, ('example-data-sync-id', ContentBindingType.DATASYNC_ID)),
|
||||||
|
]],
|
||||||
|
('WEB_REMIX', PoTokenContext.GVS, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
('WEB_REMIX', PoTokenContext.PLAYER, False, ('example-visitor-data', ContentBindingType.VISITOR_DATA)),
|
||||||
|
('ANDROID', PoTokenContext.GVS, False, (None, None)),
|
||||||
|
('IOS', PoTokenContext.GVS, False, (None, None)),
|
||||||
|
])
|
||||||
|
def test_get_webpo_content_binding(self, pot_request, client_name, context, is_authenticated, expected):
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
assert get_webpo_content_binding(pot_request) == expected
|
||||||
|
|
||||||
|
def test_extract_visitor_id(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == ('123abcXYZ_-', ContentBindingType.VISITOR_ID)
|
||||||
|
|
||||||
|
def test_invalid_visitor_id(self, pot_request):
|
||||||
|
# visitor id not alphanumeric (i.e. protobuf extraction failed)
|
||||||
|
pot_request.visitor_data = 'CggxMjM0NTY3OCiA4s-qBg%3D%3D'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_no_visitor_id(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'KIDiz6oG'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
||||||
|
|
||||||
|
def test_invalid_base64(self, pot_request):
|
||||||
|
pot_request.visitor_data = 'invalid-base64'
|
||||||
|
assert get_webpo_content_binding(pot_request, bind_to_visitor_id=True) == (pot_request.visitor_data, ContentBindingType.VISITOR_DATA)
|
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
92
test/test_pot/test_pot_builtin_webpospec.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider, BuiltinIEContentProvider
|
||||||
|
from yt_dlp.extractor.youtube.pot.cache import CacheProviderWritePolicy
|
||||||
|
from yt_dlp.utils import bug_reports_message
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenRequest,
|
||||||
|
PoTokenContext,
|
||||||
|
|
||||||
|
)
|
||||||
|
from yt_dlp.version import __version__
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._builtin.webpo_cachespec import WebPoPCSP
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_pcs_providers
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def pot_request(pot_request) -> PoTokenRequest:
|
||||||
|
pot_request.visitor_data = 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D' # visitor_id=123abcXYZ_-
|
||||||
|
return pot_request
|
||||||
|
|
||||||
|
|
||||||
|
class TestWebPoPCSP:
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(WebPoPCSP, IEContentProvider)
|
||||||
|
assert issubclass(WebPoPCSP, BuiltinIEContentProvider)
|
||||||
|
|
||||||
|
def test_init(self, ie, logger):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert pcs.PROVIDER_NAME == 'webpo'
|
||||||
|
assert pcs.PROVIDER_VERSION == __version__
|
||||||
|
assert pcs.BUG_REPORT_MESSAGE == bug_reports_message(before='')
|
||||||
|
assert pcs.is_available()
|
||||||
|
|
||||||
|
def test_is_registered(self):
|
||||||
|
assert _pot_pcs_providers.value.get('WebPo') == WebPoPCSP
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated', [
|
||||||
|
('ANDROID', PoTokenContext.GVS, False),
|
||||||
|
('IOS', PoTokenContext.GVS, False),
|
||||||
|
('IOS', PoTokenContext.PLAYER, False),
|
||||||
|
])
|
||||||
|
def test_not_supports(self, ie, logger, pot_request, client_name, context, is_authenticated):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
assert pcs.generate_cache_spec(pot_request) is None
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected', [
|
||||||
|
*[(client, context, is_authenticated, remote_host, source_address, request_proxy, expected) for client in [
|
||||||
|
'WEB', 'MWEB', 'TVHTML5', 'WEB_EMBEDDED_PLAYER', 'WEB_CREATOR', 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', 'TVHTML5_SIMPLY']
|
||||||
|
for context, is_authenticated, remote_host, source_address, request_proxy, expected in [
|
||||||
|
(PoTokenContext.GVS, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||||
|
(PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'video_id'}),
|
||||||
|
(PoTokenContext.GVS, True, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': 'example-data-sync-id', 'cbt': 'datasync_id'}),
|
||||||
|
]],
|
||||||
|
('WEB_REMIX', PoTokenContext.PLAYER, False, 'example-remote-host', 'example-source-address', 'example-request-proxy', {'t': 'webpo', 'ip': 'example-remote-host', 'sa': 'example-source-address', 'px': 'example-request-proxy', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id'}),
|
||||||
|
('WEB', PoTokenContext.GVS, False, None, None, None, {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'visitor_id', 'ip': None, 'sa': None, 'px': None}),
|
||||||
|
('TVHTML5', PoTokenContext.PLAYER, False, None, None, 'http://example.com', {'t': 'webpo', 'cb': '123abcXYZ_-', 'cbt': 'video_id', 'ip': None, 'sa': None, 'px': 'http://example.com'}),
|
||||||
|
|
||||||
|
])
|
||||||
|
def test_generate_key_bindings(self, ie, logger, pot_request, client_name, context, is_authenticated, remote_host, source_address, request_proxy, expected):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = client_name
|
||||||
|
pot_request.context = context
|
||||||
|
pot_request.is_authenticated = is_authenticated
|
||||||
|
pot_request.innertube_context['client']['remoteHost'] = remote_host
|
||||||
|
pot_request.request_source_address = source_address
|
||||||
|
pot_request.request_proxy = request_proxy
|
||||||
|
pot_request.video_id = '123abcXYZ_-' # same as visitor id to test type
|
||||||
|
|
||||||
|
assert pcs.generate_cache_spec(pot_request).key_bindings == expected
|
||||||
|
|
||||||
|
def test_no_bind_visitor_id(self, ie, logger, pot_request):
|
||||||
|
# Should not bind to visitor id if setting is set to False
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={'bind_to_visitor_id': ['false']})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = 'WEB'
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
pot_request.is_authenticated = False
|
||||||
|
assert pcs.generate_cache_spec(pot_request).key_bindings == {'t': 'webpo', 'ip': None, 'sa': None, 'px': None, 'cb': 'CgsxMjNhYmNYWVpfLSiA4s%2DqBg%3D%3D', 'cbt': 'visitor_data'}
|
||||||
|
|
||||||
|
def test_default_ttl(self, ie, logger, pot_request):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert pcs.generate_cache_spec(pot_request).default_ttl == 6 * 60 * 60 # should default to 6 hours
|
||||||
|
|
||||||
|
def test_write_policy(self, ie, logger, pot_request):
|
||||||
|
pcs = WebPoPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.context = PoTokenContext.GVS
|
||||||
|
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||||
|
pot_request.context = PoTokenContext.PLAYER
|
||||||
|
assert pcs.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
1529
test/test_pot/test_pot_director.py
Normal file
1529
test/test_pot/test_pot_director.py
Normal file
File diff suppressed because it is too large
Load Diff
629
test/test_pot/test_pot_framework.py
Normal file
629
test/test_pot/test_pot_framework.py
Normal file
@ -0,0 +1,629 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._provider import IEContentProvider
|
||||||
|
from yt_dlp.cookies import YoutubeDLCookieJar
|
||||||
|
from yt_dlp.utils.networking import HTTPHeaderDict
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenRequest,
|
||||||
|
PoTokenContext,
|
||||||
|
ExternalRequestFeature,
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot.cache import (
|
||||||
|
PoTokenCacheProvider,
|
||||||
|
PoTokenCacheSpec,
|
||||||
|
PoTokenCacheSpecProvider,
|
||||||
|
CacheProviderWritePolicy,
|
||||||
|
)
|
||||||
|
|
||||||
|
import yt_dlp.extractor.youtube.pot.cache as cache
|
||||||
|
|
||||||
|
from yt_dlp.networking import Request
|
||||||
|
from yt_dlp.extractor.youtube.pot.provider import (
|
||||||
|
PoTokenResponse,
|
||||||
|
PoTokenProvider,
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
provider_bug_report_message,
|
||||||
|
register_provider,
|
||||||
|
register_preference,
|
||||||
|
)
|
||||||
|
|
||||||
|
from yt_dlp.extractor.youtube.pot._registry import _pot_providers, _ptp_preferences, _pot_pcs_providers, _pot_cache_providers, _pot_cache_provider_preferences
|
||||||
|
|
||||||
|
|
||||||
|
class ExamplePTP(PoTokenProvider):
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
_SUPPORTED_CLIENTS = ('WEB',)
|
||||||
|
_SUPPORTED_CONTEXTS = (PoTokenContext.GVS, )
|
||||||
|
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.PROXY_SCHEME_HTTP,
|
||||||
|
ExternalRequestFeature.PROXY_SCHEME_SOCKS5H,
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
return PoTokenResponse('example-token', expires_at=123)
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleCacheProviderPCP(PoTokenCacheProvider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExampleCacheSpecProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
|
||||||
|
PROVIDER_NAME = 'example'
|
||||||
|
PROVIDER_VERSION = '0.0.1'
|
||||||
|
BUG_REPORT_LOCATION = 'https://example.com/issues'
|
||||||
|
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return PoTokenCacheSpec(
|
||||||
|
key_bindings={'field': 'example-key'},
|
||||||
|
default_ttl=60,
|
||||||
|
write_policy=CacheProviderWritePolicy.WRITE_FIRST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_fetch_method(self, ie, logger):
|
||||||
|
class MissingMethodsPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsPTP(PoTokenProvider):
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
provider = BarebonesProviderPTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_example_provider_success(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'Example'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
|
||||||
|
response = provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
assert response.po_token == 'example-token'
|
||||||
|
assert response.expires_at == 123
|
||||||
|
|
||||||
|
def test_provider_unsupported_context(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.context = PoTokenContext.PLAYER
|
||||||
|
|
||||||
|
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_client(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||||
|
|
||||||
|
with pytest.raises(PoTokenProviderRejectedRequest):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_proxy_scheme(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
pot_request.request_proxy = 'socks4://example.com'
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support proxy scheme "socks4". Supported proxy '
|
||||||
|
'schemes: http, socks5h',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_proxy = 'http://example.com'
|
||||||
|
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_ignore_external_request_features(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = None
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_proxy = 'socks5://example.com'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_external_request_source_address(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_source_address = None
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support setting source address',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_supported_external_request_source_address(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.SOURCE_ADDRESS,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_source_address = None
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_source_address = '0.0.0.0'
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_unsupported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = tuple()
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = True
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = False
|
||||||
|
with pytest.raises(
|
||||||
|
PoTokenProviderRejectedRequest,
|
||||||
|
match='External requests by "example" provider do not support ignoring TLS certificate failures',
|
||||||
|
):
|
||||||
|
provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_supported_external_request_tls_verification(self, ie, logger, pot_request):
|
||||||
|
class InternalPTP(ExamplePTP):
|
||||||
|
_SUPPORTED_EXTERNAL_REQUEST_FEATURES = (
|
||||||
|
ExternalRequestFeature.DISABLE_TLS_VERIFICATION,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = InternalPTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = True
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
pot_request.request_verify_tls = False
|
||||||
|
assert provider.request_pot(pot_request)
|
||||||
|
|
||||||
|
def test_provider_request_webpage(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
cookiejar = YoutubeDLCookieJar()
|
||||||
|
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||||
|
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||||
|
pot_request.request_cookiejar = cookiejar
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), pot_request=pot_request)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert sent_request.headers['User-Agent'] == 'example-user-agent'
|
||||||
|
assert sent_request.proxies == {'all': 'socks5://example-proxy.com'}
|
||||||
|
assert sent_request.extensions['cookiejar'] is cookiejar
|
||||||
|
assert 'Requesting webpage' in logger.messages['info']
|
||||||
|
|
||||||
|
def test_provider_request_webpage_override(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
cookiejar_request = YoutubeDLCookieJar()
|
||||||
|
pot_request.request_headers = HTTPHeaderDict({'User-Agent': 'example-user-agent'})
|
||||||
|
pot_request.request_proxy = 'socks5://example-proxy.com'
|
||||||
|
pot_request.request_cookiejar = cookiejar_request
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
headers={'User-Agent': 'override-user-agent-override'},
|
||||||
|
proxies={'http': 'http://example-proxy-override.com'},
|
||||||
|
extensions={'cookiejar': YoutubeDLCookieJar()},
|
||||||
|
), pot_request=pot_request, note='Custom requesting webpage')
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert sent_request.headers['User-Agent'] == 'override-user-agent-override'
|
||||||
|
assert sent_request.proxies == {'http': 'http://example-proxy-override.com'}
|
||||||
|
assert sent_request.extensions['cookiejar'] is not cookiejar_request
|
||||||
|
assert 'Custom requesting webpage' in logger.messages['info']
|
||||||
|
|
||||||
|
def test_provider_request_webpage_no_log(self, ie, logger, pot_request):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), note=False)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
assert 'info' not in logger.messages
|
||||||
|
|
||||||
|
def test_provider_request_webpage_no_pot_request(self, ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def mock_urlopen(request):
|
||||||
|
return request
|
||||||
|
|
||||||
|
ie._downloader.urlopen = mock_urlopen
|
||||||
|
|
||||||
|
sent_request = provider._request_webpage(Request(
|
||||||
|
'https://example.com',
|
||||||
|
), pot_request=None)
|
||||||
|
|
||||||
|
assert sent_request.url == 'https://example.com'
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenProvider):
|
||||||
|
PROVIDER_NAME = 'invalid-suffix'
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenCacheProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenCacheProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_get_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_store_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_delete_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_missing_is_available_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCP(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCP(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_barebones_provider(self, ie, logger):
|
||||||
|
class BarebonesProviderPCP(PoTokenCacheProvider):
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
provider = BarebonesProviderPCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
|
||||||
|
def test_create_provider_example(self, ie, logger):
|
||||||
|
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'ExampleCacheProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExampleCacheProviderPCP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenCacheProvider):
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenCacheSpecProvider:
|
||||||
|
|
||||||
|
def test_base_type(self):
|
||||||
|
assert issubclass(PoTokenCacheSpecProvider, IEContentProvider)
|
||||||
|
|
||||||
|
def test_create_provider_missing_supports_method(self, ie, logger):
|
||||||
|
class MissingMethodsPCS(PoTokenCacheSpecProvider):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
MissingMethodsPCS(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
def test_create_provider_barebones(self, ie, pot_request, logger):
|
||||||
|
class BarebonesProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return PoTokenCacheSpec(
|
||||||
|
default_ttl=100,
|
||||||
|
key_bindings={},
|
||||||
|
)
|
||||||
|
|
||||||
|
provider = BarebonesProviderPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_KEY == 'BarebonesProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.0'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at (developer has not provided a bug report location) .'
|
||||||
|
assert provider.is_available()
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).default_ttl == 100
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).key_bindings == {}
|
||||||
|
assert provider.generate_cache_spec(request=pot_request).write_policy == CacheProviderWritePolicy.WRITE_ALL
|
||||||
|
|
||||||
|
def test_create_provider_example(self, ie, pot_request, logger):
|
||||||
|
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.PROVIDER_NAME == 'example'
|
||||||
|
assert provider.PROVIDER_KEY == 'ExampleCacheSpecProvider'
|
||||||
|
assert provider.PROVIDER_VERSION == '0.0.1'
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
assert provider.is_available()
|
||||||
|
assert provider.generate_cache_spec(pot_request)
|
||||||
|
assert provider.generate_cache_spec(pot_request).key_bindings == {'field': 'example-key'}
|
||||||
|
assert provider.generate_cache_spec(pot_request).default_ttl == 60
|
||||||
|
assert provider.generate_cache_spec(pot_request).write_policy == CacheProviderWritePolicy.WRITE_FIRST
|
||||||
|
|
||||||
|
def test_get_config_arg(self, ie, logger):
|
||||||
|
provider = ExampleCacheSpecProviderPCSP(ie=ie, logger=logger, settings={'abc': ['123D'], 'xyz': ['456a', '789B']})
|
||||||
|
|
||||||
|
assert provider._configuration_arg('abc') == ['123d']
|
||||||
|
assert provider._configuration_arg('abc', default=['default']) == ['123d']
|
||||||
|
assert provider._configuration_arg('ABC', default=['default']) == ['default']
|
||||||
|
assert provider._configuration_arg('abc', casesense=True) == ['123D']
|
||||||
|
assert provider._configuration_arg('xyz', casesense=False) == ['456a', '789b']
|
||||||
|
|
||||||
|
def test_require_class_end_with_suffix(self, ie, logger):
|
||||||
|
class InvalidSuffix(PoTokenCacheSpecProvider):
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
provider = InvalidSuffix(ie=ie, logger=logger, settings={})
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
provider.PROVIDER_KEY # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
|
class TestPoTokenRequest:
|
||||||
|
def test_copy_request(self, pot_request):
|
||||||
|
copied_request = pot_request.copy()
|
||||||
|
|
||||||
|
assert copied_request is not pot_request
|
||||||
|
assert copied_request.context == pot_request.context
|
||||||
|
assert copied_request.innertube_context == pot_request.innertube_context
|
||||||
|
assert copied_request.innertube_context is not pot_request.innertube_context
|
||||||
|
copied_request.innertube_context['client']['clientName'] = 'ANDROID'
|
||||||
|
assert pot_request.innertube_context['client']['clientName'] != 'ANDROID'
|
||||||
|
assert copied_request.innertube_host == pot_request.innertube_host
|
||||||
|
assert copied_request.session_index == pot_request.session_index
|
||||||
|
assert copied_request.player_url == pot_request.player_url
|
||||||
|
assert copied_request.is_authenticated == pot_request.is_authenticated
|
||||||
|
assert copied_request.visitor_data == pot_request.visitor_data
|
||||||
|
assert copied_request.data_sync_id == pot_request.data_sync_id
|
||||||
|
assert copied_request.video_id == pot_request.video_id
|
||||||
|
assert copied_request.request_cookiejar is pot_request.request_cookiejar
|
||||||
|
assert copied_request.request_proxy == pot_request.request_proxy
|
||||||
|
assert copied_request.request_headers == pot_request.request_headers
|
||||||
|
assert copied_request.request_headers is not pot_request.request_headers
|
||||||
|
assert copied_request.request_timeout == pot_request.request_timeout
|
||||||
|
assert copied_request.request_source_address == pot_request.request_source_address
|
||||||
|
assert copied_request.request_verify_tls == pot_request.request_verify_tls
|
||||||
|
assert copied_request.bypass_cache == pot_request.bypass_cache
|
||||||
|
|
||||||
|
|
||||||
|
def test_provider_bug_report_message(ie, logger):
|
||||||
|
provider = ExamplePTP(ie=ie, logger=logger, settings={})
|
||||||
|
assert provider.BUG_REPORT_MESSAGE == 'please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
message = provider_bug_report_message(provider)
|
||||||
|
assert message == '; please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
message_before = provider_bug_report_message(provider, before='custom message!')
|
||||||
|
assert message_before == 'custom message! Please report this issue to the provider developer at https://example.com/issues .'
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_provider(ie):
|
||||||
|
|
||||||
|
@register_provider
|
||||||
|
class UnavailableProviderPTP(PoTokenProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _real_request_pot(self, request: PoTokenRequest) -> PoTokenResponse:
|
||||||
|
raise PoTokenProviderRejectedRequest('Not implemented')
|
||||||
|
|
||||||
|
assert _pot_providers.value.get('UnavailableProvider') == UnavailableProviderPTP
|
||||||
|
_pot_providers.value.pop('UnavailableProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_pot_preference(ie):
|
||||||
|
before = len(_ptp_preferences.value)
|
||||||
|
|
||||||
|
@register_preference(ExamplePTP)
|
||||||
|
def unavailable_preference(provider: PoTokenProvider, request: PoTokenRequest):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_ptp_preferences.value) == before + 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider(ie):
|
||||||
|
|
||||||
|
@cache.register_provider
|
||||||
|
class UnavailableCacheProviderPCP(PoTokenCacheProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get(self, key: str):
|
||||||
|
return 'example-cache'
|
||||||
|
|
||||||
|
def store(self, key: str, value: str, expires_at: int):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete(self, key: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert _pot_cache_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCP
|
||||||
|
_pot_cache_providers.value.pop('UnavailableCacheProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider_spec(ie):
|
||||||
|
|
||||||
|
@cache.register_spec
|
||||||
|
class UnavailableCacheProviderPCSP(PoTokenCacheSpecProvider):
|
||||||
|
def is_available(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def generate_cache_spec(self, request: PoTokenRequest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
assert _pot_pcs_providers.value.get('UnavailableCacheProvider') == UnavailableCacheProviderPCSP
|
||||||
|
_pot_pcs_providers.value.pop('UnavailableCacheProvider')
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_cache_provider_preference(ie):
|
||||||
|
before = len(_pot_cache_provider_preferences.value)
|
||||||
|
|
||||||
|
@cache.register_preference(ExampleCacheProviderPCP)
|
||||||
|
def unavailable_preference(provider: PoTokenCacheProvider, request: PoTokenRequest):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
assert len(_pot_cache_provider_preferences.value) == before + 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_logger_log_level(logger):
|
||||||
|
assert logger.LogLevel('INFO') == logger.LogLevel.INFO
|
||||||
|
assert logger.LogLevel('debuG') == logger.LogLevel.DEBUG
|
||||||
|
assert logger.LogLevel(10) == logger.LogLevel.DEBUG
|
||||||
|
assert logger.LogLevel('UNKNOWN') == logger.LogLevel.INFO
|
@ -25,7 +25,7 @@ from socketserver import (
|
|||||||
ThreadingTCPServer,
|
ThreadingTCPServer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from test.helper import http_server_port
|
from test.helper import http_server_port, verify_address_availability
|
||||||
from yt_dlp.networking import Request
|
from yt_dlp.networking import Request
|
||||||
from yt_dlp.networking.exceptions import ProxyError, TransportError
|
from yt_dlp.networking.exceptions import ProxyError, TransportError
|
||||||
from yt_dlp.socks import (
|
from yt_dlp.socks import (
|
||||||
@ -95,7 +95,7 @@ class Socks5ProxyHandler(StreamRequestHandler, SocksProxyHandler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
elif Socks5Auth.AUTH_USER_PASS in methods:
|
elif Socks5Auth.AUTH_USER_PASS in methods:
|
||||||
self.connection.sendall(struct.pack("!BB", SOCKS5_VERSION, Socks5Auth.AUTH_USER_PASS))
|
self.connection.sendall(struct.pack('!BB', SOCKS5_VERSION, Socks5Auth.AUTH_USER_PASS))
|
||||||
|
|
||||||
_, user_len = struct.unpack('!BB', self.connection.recv(2))
|
_, user_len = struct.unpack('!BB', self.connection.recv(2))
|
||||||
username = self.connection.recv(user_len).decode()
|
username = self.connection.recv(user_len).decode()
|
||||||
@ -174,7 +174,7 @@ class Socks4ProxyHandler(StreamRequestHandler, SocksProxyHandler):
|
|||||||
if 0x0 < dest_ip <= 0xFF:
|
if 0x0 < dest_ip <= 0xFF:
|
||||||
use_remote_dns = True
|
use_remote_dns = True
|
||||||
else:
|
else:
|
||||||
socks_info['ipv4_address'] = socket.inet_ntoa(struct.pack("!I", dest_ip))
|
socks_info['ipv4_address'] = socket.inet_ntoa(struct.pack('!I', dest_ip))
|
||||||
|
|
||||||
user_id = self._read_until_null().decode()
|
user_id = self._read_until_null().decode()
|
||||||
if user_id != (self.socks_kwargs.get('user_id') or ''):
|
if user_id != (self.socks_kwargs.get('user_id') or ''):
|
||||||
@ -210,6 +210,18 @@ class SocksHTTPTestRequestHandler(http.server.BaseHTTPRequestHandler, SocksTestR
|
|||||||
self.wfile.write(payload.encode())
|
self.wfile.write(payload.encode())
|
||||||
|
|
||||||
|
|
||||||
|
class SocksWebSocketTestRequestHandler(SocksTestRequestHandler):
|
||||||
|
def handle(self):
|
||||||
|
import websockets.sync.server
|
||||||
|
protocol = websockets.ServerProtocol()
|
||||||
|
connection = websockets.sync.server.ServerConnection(socket=self.request, protocol=protocol, close_timeout=0)
|
||||||
|
connection.handshake()
|
||||||
|
for message in connection:
|
||||||
|
if message == 'socks_info':
|
||||||
|
connection.send(json.dumps(self.socks_info))
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def socks_server(socks_server_class, request_handler, bind_ip=None, **socks_server_kwargs):
|
def socks_server(socks_server_class, request_handler, bind_ip=None, **socks_server_kwargs):
|
||||||
server = server_thread = None
|
server = server_thread = None
|
||||||
@ -252,8 +264,22 @@ class HTTPSocksTestProxyContext(SocksProxyTestContext):
|
|||||||
return json.loads(handler.send(request).read().decode())
|
return json.loads(handler.send(request).read().decode())
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketSocksTestProxyContext(SocksProxyTestContext):
|
||||||
|
REQUEST_HANDLER_CLASS = SocksWebSocketTestRequestHandler
|
||||||
|
|
||||||
|
def socks_info_request(self, handler, target_domain=None, target_port=None, **req_kwargs):
|
||||||
|
request = Request(f'ws://{target_domain or "127.0.0.1"}:{target_port or "40000"}', **req_kwargs)
|
||||||
|
handler.validate(request)
|
||||||
|
ws = handler.send(request)
|
||||||
|
ws.send('socks_info')
|
||||||
|
socks_info = ws.recv()
|
||||||
|
ws.close()
|
||||||
|
return json.loads(socks_info)
|
||||||
|
|
||||||
|
|
||||||
CTX_MAP = {
|
CTX_MAP = {
|
||||||
'http': HTTPSocksTestProxyContext,
|
'http': HTTPSocksTestProxyContext,
|
||||||
|
'ws': WebSocketSocksTestProxyContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -262,8 +288,14 @@ def ctx(request):
|
|||||||
return CTX_MAP[request.param]()
|
return CTX_MAP[request.param]()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'handler,ctx', [
|
||||||
|
('Urllib', 'http'),
|
||||||
|
('Requests', 'http'),
|
||||||
|
('Websockets', 'ws'),
|
||||||
|
('CurlCFFI', 'http'),
|
||||||
|
], indirect=True)
|
||||||
class TestSocks4Proxy:
|
class TestSocks4Proxy:
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks4_no_auth(self, handler, ctx):
|
def test_socks4_no_auth(self, handler, ctx):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
@ -271,7 +303,6 @@ class TestSocks4Proxy:
|
|||||||
rh, proxies={'all': f'socks4://{server_address}'})
|
rh, proxies={'all': f'socks4://{server_address}'})
|
||||||
assert response['version'] == 4
|
assert response['version'] == 4
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks4_auth(self, handler, ctx):
|
def test_socks4_auth(self, handler, ctx):
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
|
with ctx.socks_server(Socks4ProxyHandler, user_id='user') as server_address:
|
||||||
@ -281,7 +312,6 @@ class TestSocks4Proxy:
|
|||||||
rh, proxies={'all': f'socks4://user:@{server_address}'})
|
rh, proxies={'all': f'socks4://user:@{server_address}'})
|
||||||
assert response['version'] == 4
|
assert response['version'] == 4
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks4a_ipv4_target(self, handler, ctx):
|
def test_socks4a_ipv4_target(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
||||||
@ -289,7 +319,6 @@ class TestSocks4Proxy:
|
|||||||
assert response['version'] == 4
|
assert response['version'] == 4
|
||||||
assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
|
assert (response['ipv4_address'] == '127.0.0.1') != (response['domain_address'] == '127.0.0.1')
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks4a_domain_target(self, handler, ctx):
|
def test_socks4a_domain_target(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks4a://{server_address}'}) as rh:
|
||||||
@ -298,17 +327,16 @@ class TestSocks4Proxy:
|
|||||||
assert response['ipv4_address'] is None
|
assert response['ipv4_address'] is None
|
||||||
assert response['domain_address'] == 'localhost'
|
assert response['domain_address'] == 'localhost'
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_ipv4_client_source_address(self, handler, ctx):
|
def test_ipv4_client_source_address(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
with ctx.socks_server(Socks4ProxyHandler) as server_address:
|
||||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
with handler(proxies={'all': f'socks4://{server_address}'},
|
with handler(proxies={'all': f'socks4://{server_address}'},
|
||||||
source_address=source_address) as rh:
|
source_address=source_address) as rh:
|
||||||
response = ctx.socks_info_request(rh)
|
response = ctx.socks_info_request(rh)
|
||||||
assert response['client_address'][0] == source_address
|
assert response['client_address'][0] == source_address
|
||||||
assert response['version'] == 4
|
assert response['version'] == 4
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
@pytest.mark.parametrize('reply_code', [
|
@pytest.mark.parametrize('reply_code', [
|
||||||
Socks4CD.REQUEST_REJECTED_OR_FAILED,
|
Socks4CD.REQUEST_REJECTED_OR_FAILED,
|
||||||
Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
|
Socks4CD.REQUEST_REJECTED_CANNOT_CONNECT_TO_IDENTD,
|
||||||
@ -320,7 +348,6 @@ class TestSocks4Proxy:
|
|||||||
with pytest.raises(ProxyError):
|
with pytest.raises(ProxyError):
|
||||||
ctx.socks_info_request(rh)
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_ipv6_socks4_proxy(self, handler, ctx):
|
def test_ipv6_socks4_proxy(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
|
with ctx.socks_server(Socks4ProxyHandler, bind_ip='::1') as server_address:
|
||||||
with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks4://{server_address}'}) as rh:
|
||||||
@ -329,7 +356,6 @@ class TestSocks4Proxy:
|
|||||||
assert response['ipv4_address'] == '127.0.0.1'
|
assert response['ipv4_address'] == '127.0.0.1'
|
||||||
assert response['version'] == 4
|
assert response['version'] == 4
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_timeout(self, handler, ctx):
|
def test_timeout(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
|
with ctx.socks_server(Socks4ProxyHandler, sleep=2) as server_address:
|
||||||
with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
|
with handler(proxies={'all': f'socks4://{server_address}'}, timeout=0.5) as rh:
|
||||||
@ -337,9 +363,15 @@ class TestSocks4Proxy:
|
|||||||
ctx.socks_info_request(rh)
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'handler,ctx', [
|
||||||
|
('Urllib', 'http'),
|
||||||
|
('Requests', 'http'),
|
||||||
|
('Websockets', 'ws'),
|
||||||
|
('CurlCFFI', 'http'),
|
||||||
|
], indirect=True)
|
||||||
class TestSocks5Proxy:
|
class TestSocks5Proxy:
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5_no_auth(self, handler, ctx):
|
def test_socks5_no_auth(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
@ -347,7 +379,6 @@ class TestSocks5Proxy:
|
|||||||
assert response['auth_methods'] == [0x0]
|
assert response['auth_methods'] == [0x0]
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5_user_pass(self, handler, ctx):
|
def test_socks5_user_pass(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler, auth=('test', 'testpass')) as server_address:
|
||||||
with handler() as rh:
|
with handler() as rh:
|
||||||
@ -360,7 +391,6 @@ class TestSocks5Proxy:
|
|||||||
assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
|
assert response['auth_methods'] == [Socks5Auth.AUTH_NONE, Socks5Auth.AUTH_USER_PASS]
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5_ipv4_target(self, handler, ctx):
|
def test_socks5_ipv4_target(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
@ -368,7 +398,6 @@ class TestSocks5Proxy:
|
|||||||
assert response['ipv4_address'] == '127.0.0.1'
|
assert response['ipv4_address'] == '127.0.0.1'
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5_domain_target(self, handler, ctx):
|
def test_socks5_domain_target(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
@ -376,7 +405,6 @@ class TestSocks5Proxy:
|
|||||||
assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
|
assert (response['ipv4_address'] == '127.0.0.1') != (response['ipv6_address'] == '::1')
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5h_domain_target(self, handler, ctx):
|
def test_socks5h_domain_target(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
||||||
@ -385,7 +413,6 @@ class TestSocks5Proxy:
|
|||||||
assert response['domain_address'] == 'localhost'
|
assert response['domain_address'] == 'localhost'
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5h_ip_target(self, handler, ctx):
|
def test_socks5h_ip_target(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5h://{server_address}'}) as rh:
|
||||||
@ -394,7 +421,6 @@ class TestSocks5Proxy:
|
|||||||
assert response['domain_address'] is None
|
assert response['domain_address'] is None
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_socks5_ipv6_destination(self, handler, ctx):
|
def test_socks5_ipv6_destination(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
@ -402,7 +428,6 @@ class TestSocks5Proxy:
|
|||||||
assert response['ipv6_address'] == '::1'
|
assert response['ipv6_address'] == '::1'
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_ipv6_socks5_proxy(self, handler, ctx):
|
def test_ipv6_socks5_proxy(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
|
with ctx.socks_server(Socks5ProxyHandler, bind_ip='::1') as server_address:
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}) as rh:
|
||||||
@ -413,16 +438,15 @@ class TestSocks5Proxy:
|
|||||||
|
|
||||||
# XXX: is there any feasible way of testing IPv6 source addresses?
|
# XXX: is there any feasible way of testing IPv6 source addresses?
|
||||||
# Same would go for non-proxy source_address test...
|
# Same would go for non-proxy source_address test...
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_ipv4_client_source_address(self, handler, ctx):
|
def test_ipv4_client_source_address(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler) as server_address:
|
||||||
source_address = f'127.0.0.{random.randint(5, 255)}'
|
source_address = f'127.0.0.{random.randint(5, 255)}'
|
||||||
|
verify_address_availability(source_address)
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}, source_address=source_address) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}, source_address=source_address) as rh:
|
||||||
response = ctx.socks_info_request(rh)
|
response = ctx.socks_info_request(rh)
|
||||||
assert response['client_address'][0] == source_address
|
assert response['client_address'][0] == source_address
|
||||||
assert response['version'] == 5
|
assert response['version'] == 5
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
@pytest.mark.parametrize('reply_code', [
|
@pytest.mark.parametrize('reply_code', [
|
||||||
Socks5Reply.GENERAL_FAILURE,
|
Socks5Reply.GENERAL_FAILURE,
|
||||||
Socks5Reply.CONNECTION_NOT_ALLOWED,
|
Socks5Reply.CONNECTION_NOT_ALLOWED,
|
||||||
@ -439,7 +463,6 @@ class TestSocks5Proxy:
|
|||||||
with pytest.raises(ProxyError):
|
with pytest.raises(ProxyError):
|
||||||
ctx.socks_info_request(rh)
|
ctx.socks_info_request(rh)
|
||||||
|
|
||||||
@pytest.mark.parametrize('handler,ctx', [('Urllib', 'http')], indirect=True)
|
|
||||||
def test_timeout(self, handler, ctx):
|
def test_timeout(self, handler, ctx):
|
||||||
with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
|
with ctx.socks_server(Socks5ProxyHandler, sleep=2) as server_address:
|
||||||
with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:
|
with handler(proxies={'all': f'socks5://{server_address}'}, timeout=1) as rh:
|
||||||
|
@ -23,7 +23,6 @@ from yt_dlp.extractor import (
|
|||||||
TedTalkIE,
|
TedTalkIE,
|
||||||
ThePlatformFeedIE,
|
ThePlatformFeedIE,
|
||||||
ThePlatformIE,
|
ThePlatformIE,
|
||||||
VikiIE,
|
|
||||||
VimeoIE,
|
VimeoIE,
|
||||||
WallaIE,
|
WallaIE,
|
||||||
YoutubeIE,
|
YoutubeIE,
|
||||||
@ -40,12 +39,11 @@ class BaseTestSubtitles(unittest.TestCase):
|
|||||||
self.ie = self.IE()
|
self.ie = self.IE()
|
||||||
self.DL.add_info_extractor(self.ie)
|
self.DL.add_info_extractor(self.ie)
|
||||||
if not self.IE.working():
|
if not self.IE.working():
|
||||||
print('Skipping: %s marked as not _WORKING' % self.IE.ie_key())
|
print(f'Skipping: {self.IE.ie_key()} marked as not _WORKING')
|
||||||
self.skipTest('IE marked as not _WORKING')
|
self.skipTest('IE marked as not _WORKING')
|
||||||
|
|
||||||
def getInfoDict(self):
|
def getInfoDict(self):
|
||||||
info_dict = self.DL.extract_info(self.url, download=False)
|
return self.DL.extract_info(self.url, download=False)
|
||||||
return info_dict
|
|
||||||
|
|
||||||
def getSubtitles(self):
|
def getSubtitles(self):
|
||||||
info_dict = self.getInfoDict()
|
info_dict = self.getInfoDict()
|
||||||
@ -87,7 +85,7 @@ class TestYoutubeSubtitles(BaseTestSubtitles):
|
|||||||
self.assertEqual(md5(subtitles['en']), 'ae1bd34126571a77aabd4d276b28044d')
|
self.assertEqual(md5(subtitles['en']), 'ae1bd34126571a77aabd4d276b28044d')
|
||||||
self.assertEqual(md5(subtitles['it']), '0e0b667ba68411d88fd1c5f4f4eab2f9')
|
self.assertEqual(md5(subtitles['it']), '0e0b667ba68411d88fd1c5f4f4eab2f9')
|
||||||
for lang in ['fr', 'de']:
|
for lang in ['fr', 'de']:
|
||||||
self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
|
self.assertTrue(subtitles.get(lang) is not None, f'Subtitles for \'{lang}\' not extracted')
|
||||||
|
|
||||||
def _test_subtitles_format(self, fmt, md5_hash, lang='en'):
|
def _test_subtitles_format(self, fmt, md5_hash, lang='en'):
|
||||||
self.DL.params['writesubtitles'] = True
|
self.DL.params['writesubtitles'] = True
|
||||||
@ -157,7 +155,7 @@ class TestDailymotionSubtitles(BaseTestSubtitles):
|
|||||||
self.assertEqual(md5(subtitles['en']), '976553874490cba125086bbfea3ff76f')
|
self.assertEqual(md5(subtitles['en']), '976553874490cba125086bbfea3ff76f')
|
||||||
self.assertEqual(md5(subtitles['fr']), '594564ec7d588942e384e920e5341792')
|
self.assertEqual(md5(subtitles['fr']), '594564ec7d588942e384e920e5341792')
|
||||||
for lang in ['es', 'fr', 'de']:
|
for lang in ['es', 'fr', 'de']:
|
||||||
self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
|
self.assertTrue(subtitles.get(lang) is not None, f'Subtitles for \'{lang}\' not extracted')
|
||||||
|
|
||||||
def test_nosubtitles(self):
|
def test_nosubtitles(self):
|
||||||
self.DL.expect_warning('video doesn\'t have subtitles')
|
self.DL.expect_warning('video doesn\'t have subtitles')
|
||||||
@ -182,7 +180,7 @@ class TestTedSubtitles(BaseTestSubtitles):
|
|||||||
self.assertEqual(md5(subtitles['en']), '4262c1665ff928a2dada178f62cb8d14')
|
self.assertEqual(md5(subtitles['en']), '4262c1665ff928a2dada178f62cb8d14')
|
||||||
self.assertEqual(md5(subtitles['fr']), '66a63f7f42c97a50f8c0e90bc7797bb5')
|
self.assertEqual(md5(subtitles['fr']), '66a63f7f42c97a50f8c0e90bc7797bb5')
|
||||||
for lang in ['es', 'fr', 'de']:
|
for lang in ['es', 'fr', 'de']:
|
||||||
self.assertTrue(subtitles.get(lang) is not None, 'Subtitles for \'%s\' not extracted' % lang)
|
self.assertTrue(subtitles.get(lang) is not None, f'Subtitles for \'{lang}\' not extracted')
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
@ -332,20 +330,6 @@ class TestRaiPlaySubtitles(BaseTestSubtitles):
|
|||||||
self.assertEqual(md5(subtitles['it']), '4b3264186fbb103508abe5311cfcb9cd')
|
self.assertEqual(md5(subtitles['it']), '4b3264186fbb103508abe5311cfcb9cd')
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
|
||||||
@unittest.skip('IE broken - DRM only')
|
|
||||||
class TestVikiSubtitles(BaseTestSubtitles):
|
|
||||||
url = 'http://www.viki.com/videos/1060846v-punch-episode-18'
|
|
||||||
IE = VikiIE
|
|
||||||
|
|
||||||
def test_allsubtitles(self):
|
|
||||||
self.DL.params['writesubtitles'] = True
|
|
||||||
self.DL.params['allsubtitles'] = True
|
|
||||||
subtitles = self.getSubtitles()
|
|
||||||
self.assertEqual(set(subtitles.keys()), {'en'})
|
|
||||||
self.assertEqual(md5(subtitles['en']), '53cb083a5914b2d84ef1ab67b880d18a')
|
|
||||||
|
|
||||||
|
|
||||||
@is_download_test
|
@is_download_test
|
||||||
class TestThePlatformSubtitles(BaseTestSubtitles):
|
class TestThePlatformSubtitles(BaseTestSubtitles):
|
||||||
# from http://www.3playmedia.com/services-features/tools/integrations/theplatform/
|
# from http://www.3playmedia.com/services-features/tools/integrations/theplatform/
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user