mirror of
https://git.deluge-torrent.org/deluge
synced 2025-08-02 22:48:40 +00:00
Compare commits
782 commits
deluge-2.0
...
develop
Author | SHA1 | Date | |
---|---|---|---|
|
6ec1479cdb |
||
|
66d275734b |
||
|
99f2f1209c |
||
|
3a806973ea |
||
|
22e9adbc31 |
||
|
c5ce83eb2b |
||
|
a49b436ff2 |
||
|
a83f56a8a5 |
||
|
757a782351 |
||
|
ba7c489118 |
||
|
0b5addf58e |
||
|
98d01fbe35 |
||
|
ee33c0c5bb |
||
|
0e197ee07e |
||
|
e83f6b84fb |
||
|
0878616b2e |
||
|
7c5b7b44a3 |
||
|
7071da85c3 |
||
|
cb182daaaf |
||
|
8df36c454b |
||
|
40d4f7efef |
||
|
d064ad06c5 |
||
|
0d72195281 |
||
|
2247668571 |
||
|
e7d08d7645 |
||
|
90c5e75373 |
||
|
c88f750108 |
||
|
491458c4ad |
||
|
5d96cfc72f |
||
|
3bceb4bfc1 |
||
|
9d802b2a91 |
||
|
8867da94f8 |
||
|
e1fa8d18ec |
||
|
d5af32802f |
||
|
d1d72b1be8 |
||
|
776efe4faa |
||
|
f101f0afdd |
||
|
d98d15422a |
||
|
d9e3facbe8 |
||
|
6ba23a8013 |
||
|
af70ff1fdc |
||
|
18fa028d2d |
||
|
322faa7a54 |
||
|
785ad00d2b |
||
|
1e5f248fb8 |
||
|
80985c02da |
||
|
7660e2e5ca |
||
|
7f3f7f69ee |
||
|
5dd7aa5321 |
||
|
ee97864086 |
||
|
848d668af9 |
||
|
d9ef65d745 |
||
|
7f70d6c6ff |
||
|
b7450b5082 |
||
|
7046824115 |
||
|
fa8d19335e |
||
|
0c1a02dcb5 |
||
|
810751d72a |
||
|
7199805c89 |
||
|
29cf72577f |
||
|
42accef295 |
||
|
54d6f50231 |
||
|
b5f8c5af2d |
||
|
c7dc60571e |
||
|
1989d0de73 |
||
|
1751d62df9 |
||
|
4088e13905 |
||
|
b63699c6de |
||
|
8dba0efa85 |
||
|
b2005ecd78 |
||
|
39b99182ba |
||
|
66eaea0059 |
||
|
5aa4d07816 |
||
|
f3d7b1ffe8 |
||
|
d8f9fe4acf |
||
|
f43b605b80 |
||
|
1dbb18b80a |
||
|
21470799d0 |
||
|
e24081a17e |
||
|
6c9b058d81 |
||
|
18dca70084 |
||
|
ed1366d5ce |
||
|
7082d9cec4 |
||
|
015b0660be |
||
|
a459e78268 |
||
|
8001110625 |
||
|
d8b586e6ba |
||
|
905a7dc3bc |
||
|
89b79e4b7f |
||
|
e70e43e631 |
||
|
b24a5d2465 |
||
|
701f68d70b |
||
|
de570ae536 |
||
|
40a66278a3 |
||
|
366cded7be |
||
|
dbedf7f639 |
||
|
81116a63ca |
||
|
a83ac65ab6 |
||
|
d2a56ce15e |
||
|
71b634e968 |
||
|
39bd97f03e |
||
|
196086c1fb |
||
|
527cfa586c |
||
|
25a2b113e2 |
||
|
c38b4c72d0 |
||
|
0745c0eff8 |
||
|
e90f6c7eef |
||
|
7b1a0ef89c |
||
|
75b27485e1 |
||
|
a64cdfaf78 |
||
|
4b6ac1f4c4 |
||
|
683a4f906e |
||
|
e70a983a55 |
||
|
9ce8afe507 |
||
|
f67fb4d520 |
||
|
d00068423f |
||
|
7336877928 |
||
|
543fce4f29 |
||
|
38feea0fa4 |
||
|
7af584d649 |
||
|
1ba7beb7bc |
||
|
f4f4accd34 |
||
|
ae22a52f2f |
||
|
22f74b60ce |
||
|
253eb2240b |
||
|
6c924e6128 |
||
|
930cf87103 |
||
|
45c9f3b90a |
||
|
13f81efe98 |
||
|
98c5830013 |
||
|
8332d1aa39 |
||
|
6f7445be18 |
||
|
fb30478123 |
||
|
5d7b416373 |
||
|
4de754328f |
||
|
c4b9cc7292 |
||
|
fa750c9fd0 |
||
|
2a945de069 |
||
|
d0acd3e06e |
||
|
3565a9a817 |
||
|
b3d1fd79a8 |
||
|
b64084d248 |
||
|
e120536d87 |
||
|
f52cf760e4 |
||
|
94d790c159 |
||
|
f78506161d |
||
|
592b05cd87 |
||
|
6c8f9ce756 |
||
|
19dba297ef |
||
|
cbacaf0545 |
||
|
75db47fc1f |
||
|
f1ec68704d |
||
|
ae3fbcca77 |
||
|
6a10e8f3cd |
||
|
b0dba97fec |
||
|
d7c520c85e |
||
|
ee3180fd94 |
||
|
47e548fdb5 |
||
|
cd63efd935 |
||
|
7f0a380576 |
||
|
68c75ccc05 |
||
|
96a0825add |
||
|
61a83bbd20 |
||
|
bc6611fc0d |
||
|
970a0ae240 |
||
|
5acb57b5af |
||
|
7fa0af3446 |
||
|
a954348567 |
||
|
13be64d355 |
||
|
11fe22e4cd |
||
|
a683b7e830 |
||
|
b0f80f9654 |
||
|
f9ca3932a8 |
||
|
5ec5271fdd |
||
|
e15731fcd4 |
||
|
2962f7cd2c |
||
|
c89a366dfb |
||
|
5f8acabb81 |
||
|
055a84bb15 |
||
|
03938839e0 |
||
|
8ff4683780 |
||
|
62a4052178 |
||
|
8ece036770 |
||
|
a5503c0c60 |
||
|
f754882498 |
||
|
191549074c |
||
|
2ec6e10c8e |
||
|
2bd095e5bf |
||
|
513d5f06e5 |
||
|
a1da2058bc |
||
|
af26fdfb37 |
||
|
66b5a2fc40 |
||
|
29f0789223 |
||
|
f8f997a6eb |
||
|
374997a8d7 |
||
|
dabb505376 |
||
|
aa74261d50 |
||
|
b29829f571 |
||
|
d559f67ab9 |
||
|
d4f8775f44 |
||
|
50647ab3a5 |
||
|
90744dc2e6 |
||
|
24a3987c3a |
||
|
e87236514d |
||
|
2fb41341c9 |
||
|
b76f2c0f20 |
||
|
bd88f78af6 |
||
|
bf97bec994 |
||
|
a27a77f8c1 |
||
|
e8fd07e5e3 |
||
|
1089adb844 |
||
|
4096cdfdfe |
||
|
099077fe20 |
||
|
a684029602 |
||
|
8b0c8392b6 |
||
|
222aeed2f3 |
||
|
ece31cf3cf |
||
|
0fbb3882f2 |
||
|
73394f1fc5 |
||
|
9b043cf2c1 |
||
|
1cd005c272 |
||
|
4107bf8f25 |
||
|
49bedda956 |
||
|
540d557cb2 |
||
|
d8acadb085 |
||
|
932c3c123f |
||
|
986375fa86 |
||
|
4497c9bbcc |
||
|
23f7c4dd6e |
||
|
a41f950d09 |
||
|
209716f7cd |
||
|
3dca30343f |
||
|
71cde7c05e |
||
|
dbf3495c4e |
||
|
fffc6ab7d7 |
||
|
a73e01f89f |
||
|
87ec04af16 |
||
|
d8746a8852 |
||
|
7c9a542006 |
||
|
e75ef7e31f |
||
|
4f87612a0f |
||
|
2cad0f46f2 |
||
|
5931d0cc0b |
||
|
9d4ca77ef7 |
||
|
ad27a278fd |
||
|
4f17fc41a5 |
||
|
15d2d27a53 |
||
|
65e5010e7f |
||
|
9b97c74025 |
||
|
d62362d6ae |
||
|
1a9affbbac |
||
|
2316088f5c |
||
|
d14310078b |
||
|
1696c69776 |
||
|
5f96ea4217 |
||
|
491a20cb08 |
||
|
490fb898af |
||
|
560a52a443 |
||
|
b9a208f18f |
||
|
6da4c4bf66 |
||
|
d2390cd247 |
||
|
c3cd7f5e5c |
||
|
2351d65844 |
||
|
e50927f575 |
||
|
79b7e6093f |
||
|
4f0c786649 |
||
|
fca08cf583 |
||
|
517b2c653b |
||
|
44dcbee5f4 |
||
|
efc9f465f0 |
||
|
5321d24f2a |
||
|
f30f7f4629 |
||
|
ec0bcc11f5 |
||
|
16895b4a49 |
||
|
f3784723ae |
||
|
7f5857296e | ||
|
897955f0a1 |
||
|
ff309ea4c5 |
||
|
3b11613cc7 |
||
|
a2d0cb7141 |
||
|
88ffd1b843 |
||
|
6a10e57f7e |
||
|
612e0061ed |
||
|
2eee7453cb |
||
|
58cc278145 |
||
|
a03e649da6 |
||
|
073bbbc09d |
||
|
bca0aa3532 |
||
|
cb588d0205 |
||
|
2b20e9689b |
||
|
65f7cf0d83 | ||
|
5ac8f4c81b | ||
|
c33c9082d9 | ||
|
62ae0f5ef6 | ||
|
24aa48187e | ||
|
342cca4367 | ||
|
9194092d7b | ||
|
5f6f65a065 | ||
|
967537a409 | ||
|
f74163489c | ||
|
7a110bd60f | ||
|
d56636426e | ||
|
de4fbd2e82 | ||
|
9c3982d4ff | ||
|
88fc21e993 | ||
|
54674576db | ||
|
89189adb24 | ||
|
a5a7da4a1a | ||
|
1e6cc03946 | ||
|
d8526ba653 | ||
|
c38f913948 | ||
|
0659fe4641 | ||
|
10501db63d | ||
|
2a312159b9 | ||
|
cb75192df4 | ||
|
588f600ba2 | ||
|
ea609cd3e0 | ||
|
4b6c7d01b2 | ||
|
b89b2c45b1 | ||
|
e38f1173cf | ||
|
e1e0999de6 | ||
|
5c9378ac5e | ||
|
f075f391cb | ||
|
8fb25f71f3 | ||
|
a3332079db | ||
|
0d6eec7a33 | ||
|
f16afc59ba | ||
|
e5388048a9 | ||
|
5374d237a7 | ||
|
2e466101fc | ||
|
8676a0d2a0 | ||
|
3ec23ad96b | ||
|
dcd3918f36 | ||
|
08c7f1960f | ||
|
8a4ec493c0 | ||
|
4d970754a4 | ||
|
f331b6c754 | ||
|
1022448e4f | ||
|
291540b601 | ||
|
092d07b68e | ||
|
da5d5bee20 | ||
|
6d9dc9bd42 | ||
|
937afd921c | ||
|
a4da8d29f8 | ||
|
8ec5ca9d08 | ||
|
9c90136f57 | ||
|
610a1bb313 | ||
|
23a48dd01c | ||
|
d02fa72e80 | ||
|
62d8749e74 | ||
|
76f0bf2e04 | ||
|
635f6d970d | ||
|
672e3c42a8 | ||
|
c1110e4ef3 | ||
|
742c8a941a | ||
|
3427ae4b90 | ||
|
034db27936 | ||
|
1e3c624613 | ||
|
3519f341d4 | ||
|
d6c96d6291 | ||
|
15c250e152 | ||
|
eb57412601 | ||
|
2f1c008a26 | ||
|
5e06aee5c8 | ||
|
351664ec07 | ||
|
5f1eada3ea | ||
|
bde4e4443e | ||
|
ed4bc5fa17 | ||
|
20afc31f3c | ||
|
9232a52fd6 | ||
|
23b3f144fc | ||
|
89d62eb509 | ||
![]() |
00176ee2cd | ||
|
8737005b82 | ||
|
d08c3f72e9 | ||
|
40ebdf3f39 | ||
|
eeeb7fb69b | ||
|
3f9ae33793 | ||
|
0c7f53e305 | ||
|
63a4301a8b | ||
|
1b4ac88ce7 | ||
|
4b29436cd5 | ||
|
833b5a1f30 | ||
|
24b094a04a | ||
|
3365201011 | ||
|
c1ba403d4e | ||
|
8b62e50eb8 | ||
|
5b315e90c5 | ||
|
b711cd258a | ||
|
e1c4069a72 | ||
|
a2dee79439 | ||
|
7a54db3179 | ||
|
03e7952d26 | ||
|
7ee8750be4 | ||
|
f61001a15d | ||
|
86ddadacf7 | ||
|
632089940c | ||
|
5d7db3e727 | ||
|
4dd1f63b8b | ||
|
fc134cdffb | ||
|
36cb4c5a4f | ||
|
676bdb26e0 | ||
|
dff778ceeb | ||
|
bdadd2b515 | ||
|
a34543100c | ||
|
b8b044f451 | ||
|
2d87cde887 | ||
|
212efc4f52 | ||
|
879a397215 | ||
|
957cd5dd9c | ||
|
25087d3f2d | ||
|
d24109f0a2 | ||
|
647baebcf0 | ||
|
98ce3cd385 | ||
|
0c87d9bd7d | ||
|
aa35247e95 | ||
|
0dc4e18ac4 | ||
|
d4185505d1 | ||
|
04e58659fe | ||
|
5e738cf73a | ||
|
ce8595e8dd | ||
|
be74d96c6a | ||
|
4212bd6800 | ||
|
d40d40af31 | ||
|
cbf9ee8978 | ||
|
7abeb4ee0f | ||
|
bd4a3cba38 | ||
|
3cfa39a2ad | ||
|
a3b6d8d8e5 | ||
|
7e3692bb5a | ||
|
aa3a9a15cc | ||
|
0f92ea401f | ||
|
260d55aeae | ||
|
a9609a197d | ||
|
a8fac1381b | ||
|
65f6ede8b2 | ||
|
515dbcc5d9 | ||
|
827987fe7d | ||
|
1357ca7582 | ||
|
72d363968e | ||
|
c6b6902e9f | ||
|
6a5bb44d5b | ||
|
cbcf8eb863 | ||
|
09cfd9b89e | ||
|
b961e11df6 | ||
|
2ca683e8fe | ||
|
fd20addead | ||
|
535b13b5f1 | ||
|
d6a0276a78 | ||
|
9c0325b129 | ||
|
f885edd7fc | ||
|
2b171e58a3 | ||
|
d417c4b0f9 | ||
|
653f80eac8 | ||
|
76b89a7943 | ||
|
6ff7a5400f | ||
|
db021b9f41 | ||
|
ab4661f6fd | ||
|
396cadefda | ||
|
2296906ed3 | ||
|
1a134cab1b | ||
|
7d67792493 | ||
|
3c18e890e8 | ||
|
615500e6e6 | ||
|
1425fe5413 | ||
|
84643fb6f7 | ||
|
c8b621172e | ||
|
02e07dda2a | ||
|
b2e19561e6 | ||
|
389f4167b2 | ||
|
63cc745f5b | ||
|
1a4ac93fbb | ||
|
582f60ea0b | ||
|
157f6ff62a | ||
|
bf4244e8b2 | ||
|
25cfd58792 | ||
|
09d04aaac0 | ||
|
27b4e2d891 | ||
|
043344b986 | ||
|
3b8f71613b | ||
|
10fcbecc04 | ||
|
ab7f19fbb8 | ||
|
b665a4a6f7 | ||
|
2c45e59900 | ||
|
89868cc944 | ||
|
841cb889aa | ||
|
6b2f14e51e | ||
|
7e2192e875 | ||
|
f11a42b9bf | ||
|
845204178b | ||
|
d937a323fb | ||
|
d7c48d27d8 | ||
|
1bc766213c | ||
|
775aef5f9b | ||
|
83cac4978a | ||
|
2bb9a8e71c | ||
|
39783c7703 | ||
|
9f9f564e62 | ||
|
ab1b2bcf14 | ||
|
bb0c61bb3f | ||
|
a7dcf39a32 | ||
|
e43796ae51 | ||
|
6655fe67c3 | ||
|
2104b9831c | ||
|
e7127637cf | ||
|
6233e5c844 | ||
|
a01481b26f | ||
|
3d24998577 | ||
|
f24e9d152c | ||
|
f47089ae7d | ||
|
d70abd2986 | ||
|
7d998a45f2 | ||
|
3433a911cc | ||
|
967606fa0f | ||
|
97e7d95dd3 | ||
|
26c28445a5 | ||
|
74a459274c | ||
|
bb6e290bf8 | ||
|
4a79e1f100 | ||
|
bff93bb162 | ||
|
bffd091429 | ||
|
70d5931622 | ||
|
ce49cde49d | ||
|
a3bd2e547a | ||
|
64710ad226 | ||
|
cd6bad0e35 | ||
|
1310645f55 | ||
|
e6a7119595 | ||
|
0b39b529dd | ||
|
1d0e40c66b | ||
|
bcc89c73dd | ||
|
a6b47e18c9 | ||
|
5183c92543 | ||
|
7c1c3f62d1 | ||
|
729f062ea1 | ||
|
d879ee06a3 | ||
|
ed1b2a50fa | ||
|
c51e01ac46 | ||
|
4df5bd05ec | ||
|
cf4012bb60 | ||
|
bbcebe1306 | ||
|
bcaaeac852 | ||
|
4111f94597 | ||
|
dd7cc31918 | ||
|
d8d094cab6 | ||
|
dc6e93541b | ||
|
f6ffb940ab | ||
|
6fbb1bb370 | ||
|
8285b226eb | ||
|
194129c027 | ||
|
7d5a429466 | ||
|
ac5db1b262 | ||
|
a2857a318d | ||
|
13e1fa355d | ||
|
2e88fa1dfc | ||
|
366b10f07b | ||
|
92a048625a | ||
|
8199928160 | ||
|
545aca9a4c | ||
|
9f113eab23 | ||
|
bc6bc017cb | ||
|
535fda90e3 | ||
|
0ace086de4 | ||
|
bbb1b44a23 | ||
|
dfed17ac0d | ||
|
2f879c33f3 | ||
|
14b6ba10cf | ||
|
ae0b072b75 | ||
|
250afa6e0b | ||
|
b29b6fe69f | ||
|
f160d6312f | ||
|
a8d01fd52f | ||
|
3a5ec4f5f4 | ||
|
eebb93d4ee | ||
|
01fafd4fe0 | ||
|
ca0db4d1a7 | ||
|
ea72164798 | ||
|
e2ba980299 | ||
|
98051bdea2 | ||
|
20431cc771 | ||
|
82ecf8a416 | ||
|
9dcd90056d | ||
|
e2c7716ce2 | ||
|
e6c61c3f8c | ||
|
b834e33568 | ||
|
9ab2a50097 | ||
|
1838403e3b | ||
|
9e7c9fc1d3 | ||
|
9264cb749e | ||
|
c01679de1f | ||
|
3645feb486 | ||
|
d85f665091 | ||
|
5ec6ae3ad0 | ||
|
860730d43c | ||
|
c1ddcf6012 | ||
|
85bbdfe143 | ||
|
9f9827ca58 | ||
|
dcb3dad435 | ||
|
0e69b9199c | ||
|
88a3600ce3 | ||
|
91164d8dbf | ||
|
ec47720686 | ||
|
467ade1eb7 | ||
|
bb93a06fff | ||
|
80178f7310 | ||
|
ee354eb107 | ||
|
7d896599b8 | ||
|
55aee2b00f | ||
|
10d39c83cb | ||
|
0b2cb7539f | ||
|
6fdbf0ba5d | ||
|
a980f8e959 | ||
|
c90cf301df | ||
|
6f06cd5ebc | ||
|
86de5657ff | ||
|
4a335eeb61 | ||
|
86d582d52a | ||
|
673b6653a3 | ||
|
41732fe38b | ||
|
5964bcf897 | ||
|
3ed4a6e834 | ||
|
20fa106b8b | ||
|
654e2af4e5 | ||
|
d5dea44689 | ||
|
5743382c65 | ||
|
39f37e6133 | ||
|
0ed3554f95 | ||
|
ba6af99b05 | ||
|
9e29fe4111 | ||
|
a8a4fb69c0 | ||
|
6cf13d112b | ||
|
6973f96f8c | ||
|
0548bdb655 | ||
|
36606fc448 | ||
|
c415b097fe | ||
|
970fad7557 | ||
|
358ff74d0e | ||
|
b1cdc32f73 | ||
|
bcca07443c | ||
|
67d9c2efb4 | ||
|
34b0fdff1d | ||
|
f93e5e60b5 | ||
|
d8b1e2701c | ||
|
abf4c345f0 | ||
|
a09334e116 | ||
|
57ad9a25da | ||
|
5a2990ff90 | ||
|
759a618f74 | ||
|
23f1cfc926 | ||
|
57ea5ef5da | ||
|
944dc1659f | ||
|
2dc157578e | ||
|
8a59216061 | ||
|
cc1807cf97 | ||
|
63b7f6d382 | ||
|
5c4cbf58c5 | ||
|
5959a24d4c | ||
|
d4023e7dde | ||
|
0fd3c25684 | ||
|
4125e35ebd | ||
|
18d448d4a5 | ||
|
d5133f789a | ||
|
1cce6a297c | ||
|
ad20ec62f2 | ||
|
af2bed8a0f | ||
|
b93e868048 | ||
|
8d90ae5ffb | ||
|
ae4449642c | ||
|
bc2f4a30eb | ||
|
dc8766874e | ||
|
a33171732d | ||
|
b9a9e06c1d | ||
|
456e720b75 | ||
|
ae9bbdbae7 | ||
|
585ea88f1f | ||
|
f94f58918e | ||
|
3fc97672de | ||
|
e8e649a030 | ||
|
1e6c02ae83 | ||
|
b2e1f850d8 | ||
|
8bfa2cacbb | ||
|
c7e61f8c34 | ||
|
089c667d7f | ||
|
ebb955934d | ||
|
c7567ddee4 | ||
|
c655da38c8 | ||
|
4c0be7ddd4 | ||
|
38961d4253 | ||
|
6e81a11d8d | ||
|
be02be75be | ||
|
7b5ed9f1d6 | ||
|
e626f9fece | ||
|
3fab799dbf | ||
|
24c100d9b7 | ||
|
9bc2f62c80 | ||
|
1fa2de066f | ||
|
ae0b070c1b | ||
|
c3a2c67b98 | ||
|
200e8f552b | ||
|
4247013446 | ||
|
6ec32a85e4 | ||
|
633c56f54e | ||
|
23171ad205 | ||
|
277576268c | ||
|
74aa0db956 | ||
|
fe42fb2c31 | ||
|
4973538d6c | ||
|
de1e7c27df | ||
|
587b9afefe | ||
|
63b25311f5 | ||
|
d45dbfe064 | ||
|
3176b877a4 | ||
|
18541bce86 | ||
|
bebe08d92b | ||
|
0dbbb51cff | ||
|
bd78bd2643 | ||
|
7a3b164060 | ||
|
e7eb26416e | ||
|
b2b7703081 | ||
|
cbdde7bba5 | ||
|
4fd51a4ef9 | ||
|
333c81c1d7 | ||
|
21b5a15e5d | ||
|
edd431a304 | ||
|
d642fa3989 | ||
|
bae1647e99 | ||
|
decd7aca71 | ||
|
7cc9aaca49 | ||
|
196aa48727 | ||
|
af2972f697 | ||
|
d4addeedd6 | ||
|
8439698336 | ||
|
7d120690ab | ||
|
ee196f5035 | ||
|
ff85c334c7 | ||
|
0c574f33e1 | ||
|
a7c7309027 | ||
|
de2f998218 | ||
|
4982ba0b98 | ||
|
f57286fd51 | ||
|
12f7345d0c | ||
|
4e79ed8124 | ||
|
7787aa975f | ||
|
c13622a1e6 | ||
|
07a87fa15a | ||
|
2644169376 | ||
|
5988f5f04f | ||
|
95d826b77c | ||
|
9bcda41700 | ||
|
507c5df984 | ||
|
0ba87b424c | ||
|
53f818e176 | ||
|
00dcd60d56 | ||
|
1730230244 | ||
|
0728c03c1c | ||
|
354372b2ea | ||
|
d169aca8bd | ||
|
26720ca4c2 | ||
|
510a8b50b2 | ||
|
d190f149d1 | ||
|
24a31b1194 | ||
|
470490769f | ||
|
1259eca8ad | ||
|
f0316d3e31 | ||
|
9b580a87fa | ||
|
4bee1ce811 | ||
|
e3f537770f | ||
|
9b92bc2baf | ||
|
51b99caf24 | ||
|
850fd34522 | ||
|
9164dafe69 | ||
|
33e9545cd4 | ||
|
7b87a93862 | ||
|
51bde704b5 | ||
|
3f13c24362 | ||
|
6837d83f5b | ||
|
3c1d7da698 | ||
|
d6731b8cee | ||
|
fe80703f95 | ||
|
1808ac506a | ||
|
3174c7534d | ||
|
065729a389 |
1087 changed files with 485051 additions and 279093 deletions
2
.gitattributes
vendored
2
.gitattributes
vendored
|
@ -2,3 +2,5 @@
|
|||
.gitmodules export-ignore
|
||||
.gitignore export-ignore
|
||||
*.py diff=python
|
||||
ext-all.js diff=minjs
|
||||
*.state -merge -text
|
||||
|
|
104
.github/workflows/cd.yml
vendored
Normal file
104
.github/workflows/cd.yml
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
name: Package
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "deluge-*"
|
||||
- "!deluge*-dev*"
|
||||
branches:
|
||||
- develop
|
||||
pull_request:
|
||||
types: [labeled, opened, synchronize, reopened]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "Enter a tag or commit to package"
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
windows_package:
|
||||
runs-on: windows-2022
|
||||
if: (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'package'))
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [x64, x86]
|
||||
python: ["3.9"]
|
||||
libtorrent: [2.0.7, 1.2.19]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Checkout Deluge source to subdir to enable packaging any tag/commit
|
||||
- name: Checkout Deluge source
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
path: deluge_src
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python}}
|
||||
architecture: ${{ matrix.arch }}
|
||||
cache: pip
|
||||
|
||||
- name: Prepare pip
|
||||
run: python -m pip install wheel setuptools==68.*
|
||||
|
||||
- name: Install GTK
|
||||
run: |
|
||||
$WebClient = New-Object System.Net.WebClient
|
||||
$WebClient.DownloadFile("https://github.com/deluge-torrent/gvsbuild-release/releases/download/latest/gvsbuild-py${{ matrix.python }}-vs16-${{ matrix.arch }}.zip","C:\GTK.zip")
|
||||
7z x C:\GTK.zip -oc:\GTK
|
||||
echo "C:\GTK\release\lib" | Out-File -FilePath $env:GITHUB_PATH -Append
|
||||
echo "C:\GTK\release\bin" | Out-File -FilePath $env:GITHUB_PATH -Append
|
||||
echo "C:\GTK\release" | Out-File -FilePath $env:GITHUB_PATH -Append
|
||||
python -m pip install --no-index --find-links="C:\GTK\release\python" pycairo PyGObject
|
||||
|
||||
- name: Install Python dependencies
|
||||
# Pillow no longer provides 32-bit wheels for Windows
|
||||
# so specify only-binary to install old version.
|
||||
run: >
|
||||
python -m pip install
|
||||
--only-binary=pillow
|
||||
twisted[tls]==22.8.0
|
||||
libtorrent==${{ matrix.libtorrent }}
|
||||
pyinstaller
|
||||
pygame
|
||||
-r requirements.txt
|
||||
|
||||
- name: Install Deluge
|
||||
working-directory: deluge_src
|
||||
run: |
|
||||
python -m pip install .
|
||||
python setup.py install_scripts
|
||||
|
||||
- name: Freeze Deluge
|
||||
working-directory: packaging/win
|
||||
run: |
|
||||
pyinstaller --clean delugewin.spec --distpath freeze
|
||||
|
||||
- name: Verify Deluge exes
|
||||
working-directory: packaging/win/freeze/Deluge/
|
||||
run: |
|
||||
deluge-debug.exe -v
|
||||
deluged-debug.exe -v
|
||||
deluge-web-debug.exe -v
|
||||
deluge-console -v
|
||||
|
||||
- name: Make Deluge Installer
|
||||
working-directory: ./packaging/win
|
||||
run: |
|
||||
python setup_nsis.py
|
||||
makensis /Darch=${{ matrix.arch }} deluge-win-installer.nsi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: deluge-py${{ matrix.python }}-lt${{ matrix.libtorrent }}-${{ matrix.arch }}
|
||||
path: packaging/win/*.exe
|
101
.github/workflows/ci.yml
vendored
Normal file
101
.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
core-dump:
|
||||
description: "Set to 1 to enable retrieving core dump from crashes"
|
||||
default: "0"
|
||||
jobs:
|
||||
test-linux:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.10"]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
cache-dependency-path: "requirements*.txt"
|
||||
|
||||
- name: Sets env var for security
|
||||
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.body, 'security_test')) || (github.event_name == 'push' && contains(github.event.head_commit.message, 'security_test'))
|
||||
run: echo "SECURITY_TESTS=True" >> $GITHUB_ENV
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade pip wheel setuptools
|
||||
pip install -r requirements-ci.txt
|
||||
pip install -e .
|
||||
|
||||
- name: Install security dependencies
|
||||
if: contains(env.SECURITY_TESTS, 'True')
|
||||
run: |
|
||||
wget -O- $TESTSSL_URL$TESTSSL_VER | tar xz
|
||||
mv -t deluge/tests/data testssl.sh-$TESTSSL_VER/testssl.sh testssl.sh-$TESTSSL_VER/etc/;
|
||||
env:
|
||||
TESTSSL_VER: 3.0.6
|
||||
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
|
||||
|
||||
- name: Setup core dump catch and store
|
||||
if: github.event.inputs.core-dump == '1'
|
||||
run: |
|
||||
sudo mkdir /cores/ && sudo chmod 777 /cores/
|
||||
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
|
||||
ulimit -c unlimited
|
||||
sudo apt install glibc-tools
|
||||
echo "DEBUG_PREFIX=catchsegv python -X dev -m" >> $GITHUB_ENV
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
|
||||
$DEBUG_PREFIX pytest -v -m "not (todo or gtkui)" deluge
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
# capture all crashes as build artifacts
|
||||
if: failure()
|
||||
with:
|
||||
name: crashes
|
||||
path: /cores
|
||||
|
||||
test-windows:
|
||||
runs-on: windows-2022
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.10"]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "pip"
|
||||
cache-dependency-path: "requirements*.txt"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade pip wheel setuptools
|
||||
pip install -r requirements-ci.txt
|
||||
pip install -e .
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python -c 'import libtorrent as lt; print(lt.__version__)';
|
||||
pytest -v -m "not (todo or gtkui or security)" deluge
|
38
.github/workflows/docs.yml
vendored
Normal file
38
.github/workflows/docs.yml
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
name: Docs
|
||||
|
||||
# Controls when the action will run.
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the main branch
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
cache-dependency-path: "requirements*.txt"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade pip wheel
|
||||
pip install tox
|
||||
sudo apt-get install enchant-2
|
||||
|
||||
- name: Build docs with tox
|
||||
env:
|
||||
TOX_ENV: docs
|
||||
run: |
|
||||
tox -e $TOX_ENV
|
17
.github/workflows/lint.yml
vendored
Normal file
17
.github/workflows/lint.yml
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
name: Linting
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: Run pre-commit linting
|
||||
uses: pre-commit/action@v3.0.1
|
18
.gitignore
vendored
18
.gitignore
vendored
|
@ -2,18 +2,24 @@
|
|||
build
|
||||
.cache
|
||||
dist
|
||||
docs/source/modules
|
||||
*egg-info
|
||||
docs/source/modules/deluge*.rst
|
||||
*.egg-info/
|
||||
*.dist-info/
|
||||
*.egg
|
||||
*.log
|
||||
*.pyc
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.tar.*
|
||||
_trial_temp
|
||||
.tox/
|
||||
deluge/i18n/*/
|
||||
deluge.pot
|
||||
deluge/ui/web/js/*.js
|
||||
deluge/ui/web/js/extjs/ext-extensions*.js
|
||||
*.desktop
|
||||
*.appdata.xml
|
||||
*.metainfo.xml
|
||||
.build_data*
|
||||
osx/app
|
||||
RELEASE-VERSION
|
||||
.venv*
|
||||
# used by setuptools to cache downloaded eggs
|
||||
/.eggs
|
||||
_pytest_temp/
|
||||
|
|
41
.pre-commit-config.yaml
Normal file
41
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,41 @@
|
|||
default_language_version:
|
||||
python: python3
|
||||
exclude: >
|
||||
(?x)^(
|
||||
deluge/ui/web/docs/template/.*|
|
||||
deluge/tests/data/.*svg|
|
||||
)$
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.6.4
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: Chk Ruff
|
||||
args: [--fix]
|
||||
- id: ruff-format
|
||||
name: Fmt Ruff
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.7.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: Fmt Prettier
|
||||
# Workaround to list modified files only.
|
||||
args: [--list-different]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
name: Fix End-of-files
|
||||
exclude_types: [javascript, css]
|
||||
- id: mixed-line-ending
|
||||
name: Fix Line endings
|
||||
args: [--fix=auto]
|
||||
- id: trailing-whitespace
|
||||
name: Fix Trailing whitespace
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
stages: [manual]
|
6
.prettierignore
Normal file
6
.prettierignore
Normal file
|
@ -0,0 +1,6 @@
|
|||
deluge/ui/web/css/ext-*.css
|
||||
deluge/ui/web/js/extjs/ext-*.js
|
||||
deluge/ui/web/docs/
|
||||
deluge/ui/web/themes/images/
|
||||
*.py*
|
||||
*.html
|
13
.prettierrc.yaml
Normal file
13
.prettierrc.yaml
Normal file
|
@ -0,0 +1,13 @@
|
|||
trailingComma: "es5"
|
||||
tabWidth: 4
|
||||
singleQuote: true
|
||||
overrides:
|
||||
- files:
|
||||
- "*.yaml"
|
||||
- ".*.yaml"
|
||||
- "*.yml"
|
||||
- ".*.yml"
|
||||
- "*.md"
|
||||
options:
|
||||
tabWidth: 2
|
||||
singleQuote: false
|
|
@ -69,7 +69,7 @@ confidence=
|
|||
# Arranged by category and use symbolic names instead of ids.
|
||||
disable=
|
||||
# Convention
|
||||
missing-docstring, invalid-name,
|
||||
missing-docstring, invalid-name, bad-continuation,
|
||||
# Error
|
||||
no-member, no-name-in-module,
|
||||
# Information
|
||||
|
@ -289,7 +289,7 @@ callbacks=cb_,_cb
|
|||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,future.builtins,future_builtins
|
||||
redefining-builtins-modules=
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
@ -359,11 +359,6 @@ known-standard-library=
|
|||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
|
|
29
.readthedocs.yml
Normal file
29
.readthedocs.yml
Normal file
|
@ -0,0 +1,29 @@
|
|||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
jobs:
|
||||
post_checkout:
|
||||
- git fetch --unshallow || true
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/source/conf.py
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
formats: all
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
install:
|
||||
- requirements: requirements.txt
|
||||
- requirements: docs/requirements.txt
|
||||
- method: pip
|
||||
path: .
|
51
.travis.yml
51
.travis.yml
|
@ -1,51 +0,0 @@
|
|||
dist: trusty
|
||||
sudo: required
|
||||
group: deprecated-2017Q2
|
||||
|
||||
language: python
|
||||
|
||||
python:
|
||||
- "2.7"
|
||||
|
||||
cache: pip
|
||||
|
||||
before_install:
|
||||
- lsb_release -a
|
||||
- sudo add-apt-repository ppa:deluge-team/develop -y
|
||||
- sudo apt-get update
|
||||
|
||||
# command to install dependencies
|
||||
install:
|
||||
- bash -c "echo $APTPACKAGES"
|
||||
- sudo apt-get install $APTPACKAGES
|
||||
- pip install "tox==2.1.1"
|
||||
|
||||
env:
|
||||
global:
|
||||
- APTPACKAGES="python-libtorrent"
|
||||
- APTPACKAGES_GTKUI="python-gobject python-glade2"
|
||||
- DISPLAY=:99.0
|
||||
matrix:
|
||||
- TOX_ENV=pydef
|
||||
- TOX_ENV=flake8
|
||||
# - TOX_ENV=flake8-complexity
|
||||
- TOX_ENV=docs
|
||||
# - TOX_ENV=todo
|
||||
- TOX_ENV=trial APTPACKAGES="$APTPACKAGES $APTPACKAGES_GTKUI"
|
||||
- TOX_ENV=pygtkui APTPACKAGES="$APTPACKAGES $APTPACKAGES_GTKUI"
|
||||
# - TOX_ENV=testcoverage APTPACKAGES="$APTPACKAGES $APTPACKAGES_GTKUI"
|
||||
- TOX_ENV=plugins
|
||||
|
||||
virtualenv:
|
||||
system_site_packages: true
|
||||
|
||||
# We use xvfb for the GTKUI tests
|
||||
before_script:
|
||||
- export PYTHONPATH=$PYTHONPATH:$PWD
|
||||
- python -c "import libtorrent as lt; print lt.__version__"
|
||||
- "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x16"
|
||||
- echo '2.0.0.dev0' > RELEASE-VERSION
|
||||
|
||||
script:
|
||||
- bash -c "echo $DISPLAY"
|
||||
- tox -e $TOX_ENV
|
17
AUTHORS
17
AUTHORS
|
@ -39,14 +39,9 @@ Images Authors:
|
|||
* files: deluge/ui/data/pixmaps/*.svg, *.png
|
||||
deluge/ui/web/icons/active.png, alert.png, all.png, checking.png, dht.png,
|
||||
downloading.png, inactive.png, queued.png, seeding.png, traffic.png
|
||||
exceptions: deluge/ui/data/pixmaps/deluge.svg and derivatives
|
||||
copyright: Andrew Resch
|
||||
license: GPLv3
|
||||
|
||||
* files: deluge/ui/data/pixmaps/deluge.svg and derivatives
|
||||
deluge/ui/web/icons/apple-pre-*.png, deluge*.png
|
||||
deluge/ui/web/images/deluge*.png
|
||||
copyright: Andrew Wedderburn
|
||||
deluge/ui/web/icons/apple-pre-*.png, deluge*.png
|
||||
copyright: Calum Lind
|
||||
license: GPLv3
|
||||
|
||||
* files: deluge/plugins/blocklist/blocklist/data/*.png
|
||||
|
@ -55,11 +50,9 @@ Images Authors:
|
|||
license: GPLv2
|
||||
url: http://ftp.acc.umu.se/pub/GNOME/sources/gnome-icon-theme
|
||||
|
||||
* files: deluge/ui/data/pixmaps/magnet.png
|
||||
copyright: Woothemes
|
||||
license: Freeware
|
||||
icon pack: WP Woothemes Ultimate
|
||||
url: http://www.woothemes.com/
|
||||
* files: deluge/ui/data/pixmaps/magnet*.svg, *.png
|
||||
copyright: Matias Wilkman
|
||||
license:
|
||||
|
||||
* files: deluge/ui/data/pixmaps/flags/*.png
|
||||
copyright: Mark James <mjames@gmail.com>
|
||||
|
|
312
CHANGELOG.md
Normal file
312
CHANGELOG.md
Normal file
|
@ -0,0 +1,312 @@
|
|||
# Changelog
|
||||
|
||||
## 2.1.x (TBA)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Removed Python 3.6 support (Python >= 3.7)
|
||||
|
||||
### Core
|
||||
|
||||
- Fix GHSL-2024-189 - insecure HTTP for new version check.
|
||||
- Fix alert handler segfault.
|
||||
- Add support for creating v2 torrents.
|
||||
|
||||
### GTK UI
|
||||
|
||||
- Fix changing torrent ownership.
|
||||
- Fix upper limit of upload/download in Add Torrent dialog.
|
||||
- Fix #3339 - Resizing window crashes with Piecesbar or Stats plugin.
|
||||
- Fix #3350 - Unable to use quick search.
|
||||
- Fix #3598 - Missing AppIndicator option in Preferences.
|
||||
- Set Appindicator as default for tray icon on Linux.
|
||||
- Add feature to switch between dark/light themes.
|
||||
|
||||
### Web UI
|
||||
|
||||
- Fix GHSL-2024-191 - potential flag endpoint path traversal.
|
||||
- Fix GHSL-2024-188 - js script dir traversal vulnerability.
|
||||
- Fix GHSL-2024-190 - insecure tracker icon endpoint.
|
||||
- Fix unable to stop daemon in connection manager.
|
||||
- Fix responsiveness to avoid "Connection lost".
|
||||
- Add support for network interface name as well as IP address.
|
||||
- Add ability to change UI theme.
|
||||
|
||||
### Console UI
|
||||
|
||||
- Fix 'rm' and 'move' commands hanging when done.
|
||||
- Fix #3538 - Unable to add host in connection manager.
|
||||
- Disable interactive-mode on Windows.
|
||||
|
||||
### UI library
|
||||
|
||||
- Fix tracker icon display by converting to png format.
|
||||
- Fix splitting trackers by newline
|
||||
- Add clickable URLs for torrent comment and tracker status.
|
||||
|
||||
### Label
|
||||
|
||||
- Fix torrent deletion not removed from config.
|
||||
- Fix label display name in submenu.
|
||||
|
||||
### AutoAdd
|
||||
|
||||
- Fix #3515 - Torrent file decoding errors disabled watch folder.
|
||||
|
||||
## 2.1.1 (2022-07-10)
|
||||
|
||||
### Core
|
||||
|
||||
- Fix missing trackers added via magnet
|
||||
- Fix handling magnets with tracker tiers
|
||||
|
||||
## 2.1.0 (2022-06-28)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Python 2 support removed (Python >= 3.6)
|
||||
- libtorrent minimum requirement increased (>= 1.2).
|
||||
|
||||
### Core
|
||||
|
||||
- Add support for SVG tracker icons.
|
||||
- Fix tracker icon error handling.
|
||||
- Fix cleaning-up tracker icon temp files.
|
||||
- Fix Plugin manager to handle new metadata 2.1.
|
||||
- Hide passwords in config logs.
|
||||
- Fix cleaning-up temp files in add_torrent_url.
|
||||
- Fix KeyError in sessionproxy after torrent delete.
|
||||
- Remove libtorrent deprecated functions.
|
||||
- Fix file_completed_alert handling.
|
||||
- Add plugin keys to get_torrents_status.
|
||||
- Add support for pygeoip dependency.
|
||||
- Fix crash logging to Windows protected folder.
|
||||
- Add is_interface and is_interface_name to validate network interfaces.
|
||||
- Fix is_url and is_infohash error with None value.
|
||||
- Fix load_libintl error.
|
||||
- Add support for IPv6 in host lists.
|
||||
- Add systemd user services.
|
||||
- Fix refresh and expire the torrent status cache.
|
||||
- Fix crash when logging errors initializing gettext.
|
||||
|
||||
### Web UI
|
||||
|
||||
- Fix ETA column sorting in correct order (#3413).
|
||||
- Fix defining foreground and background colors.
|
||||
- Accept charset in content-type for json messages.
|
||||
- Fix 'Complete Seen' and 'Completed' sorting.
|
||||
- Fix encoding HTML entities for torrent attributes to prevent XSS.
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Fix download location textbox width.
|
||||
- Fix obscured port number in Connection Manager.
|
||||
- Increase connection manager default height.
|
||||
- Fix bug with setting move completed in Options tab.
|
||||
- Fix adding daemon accounts.
|
||||
- Add workaround for crash on Windows with ico or gif icons.
|
||||
- Hide account password length in log.
|
||||
- Added a torrent menu option for magnet copy.
|
||||
- Fix unable to prefetch magnet in thinclient mode.
|
||||
- Use GtkSpinner when testing open port.
|
||||
- Update About Dialog year.
|
||||
- Fix Edit Torrents dialogs close issues.
|
||||
- Fix ETA being copied to neighboring empty cells.
|
||||
- Disable GTK CSD by default on Windows.
|
||||
|
||||
### Console UI
|
||||
|
||||
- Fix curses.init_pair raise ValueError on Py3.10.
|
||||
- Swap j and k key's behavior to fit vim mode.
|
||||
- Fix torrent details status error.
|
||||
- Fix incorrect test for when a host is online.
|
||||
- Add the torrent label to info command.
|
||||
|
||||
### AutoAdd
|
||||
|
||||
- Fix handling torrent decode errors.
|
||||
- Fix error dialog not being shown on error.
|
||||
|
||||
### Blocklist
|
||||
|
||||
- Add frequency unit to interval label.
|
||||
|
||||
### Notifications
|
||||
|
||||
- Fix UnicodeEncodeError upon non-ascii torrent name.
|
||||
|
||||
## 2.0.5 (2021-12-15)
|
||||
|
||||
### WebUI
|
||||
|
||||
- Fix js minifying error resulting in WebUI blank screen.
|
||||
- Silence erronous missing translations warning.
|
||||
|
||||
## 2.0.4 (2021-12-12)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Fix python optional setup.py requirements
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Add detection of torrent URL on GTK UI focus
|
||||
- Fix piecesbar crashing when enabled
|
||||
- Remove num_blocks_cache_hits in stats
|
||||
- Fix unhandled error with empty clipboard
|
||||
- Add torrentdetails tabs position menu (#3441)
|
||||
- Hide pygame community banner in console
|
||||
- Fix cmp function for None types (#3309)
|
||||
- Fix loading config with double-quotes in string
|
||||
- Fix Status tab download speed and uploaded
|
||||
|
||||
### Web UI
|
||||
|
||||
- Handle torrent add failures
|
||||
- Add menu option to copy magnet URI
|
||||
- Fix md5sums in torrent files breaking file listing (#3388)
|
||||
- Add country flag alt/title for accessibility
|
||||
|
||||
### Console UI
|
||||
|
||||
- Fix allowing use of windows-curses on Windows
|
||||
- Fix hostlist status lookup errors
|
||||
- Fix AttributeError setting config values
|
||||
- Fix setting 'Skip' priority
|
||||
|
||||
### Core
|
||||
|
||||
- Add workaround libtorrent 2.0 file_progress error
|
||||
- Fix allow enabling any plugin Python version
|
||||
- Export torrent get_magnet_uri method
|
||||
- Fix loading magnet with resume_data and no metadata (#3478)
|
||||
- Fix httpdownloader reencoding torrent file downloads (#3440)
|
||||
- Fix lt listen_interfaces not comma-separated (#3337)
|
||||
- Fix unable to remove magnet with delete_copies enabled (#3325)
|
||||
- Fix Python 3.8 compatibility
|
||||
- Fix loading config with double-quotes in string
|
||||
- Fix pickle loading non-ascii state error (#3298)
|
||||
- Fix creation of pidfile via command option
|
||||
- Fix for peer.client UnicodeDecodeError
|
||||
- Fix show_file unhandled dbus error
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add How-to guides about services.
|
||||
|
||||
### Stats plugin
|
||||
|
||||
- Fix constant session status key warnings
|
||||
- Fix cairo error
|
||||
|
||||
### Notifications plugin
|
||||
|
||||
- Fix email KeyError with status name
|
||||
- Fix unhandled TypeErrors on Python 3
|
||||
|
||||
### Autoadd plugin
|
||||
|
||||
- Fix magnet missing applied labels
|
||||
|
||||
### Execute plugin
|
||||
|
||||
- Fix failing to run on Windows (#3439)
|
||||
|
||||
## 2.0.3 (2019-06-12)
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Fix errors running on Wayland (#3265).
|
||||
- Fix Peers Tab tooltip and context menu errors (#3266).
|
||||
|
||||
### Web UI
|
||||
|
||||
- Fix TypeError in Peers Tab setting country flag.
|
||||
- Fix reverse proxy header TypeError (#3260).
|
||||
- Fix request.base 'idna' codec error (#3261).
|
||||
- Fix unable to change password (#3262).
|
||||
|
||||
### Extractor plugin
|
||||
|
||||
- Fix potential error starting plugin.
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix macOS install typo.
|
||||
- Fix Windows install instructions.
|
||||
|
||||
## 2.0.2 (2019-06-08)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Add systemd deluged and deluge-web service files to package tarball (#2034)
|
||||
|
||||
### Core
|
||||
|
||||
- Fix Python 2 compatibility issue with SimpleNamespace.
|
||||
|
||||
## 2.0.1 (2019-06-07)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Fix `setup.py` build error without git installed.
|
||||
|
||||
## 2.0.0 (2019-06-06)
|
||||
|
||||
### Codebase
|
||||
|
||||
- Ported to Python 3
|
||||
|
||||
### Core
|
||||
|
||||
- Improved Logging
|
||||
- Removed the AutoAdd feature on the core. It's now handled with the AutoAdd
|
||||
plugin, which is also shipped with Deluge, and it does a better job and
|
||||
now, it even supports multiple users perfectly.
|
||||
- Authentication/Permission exceptions are now sent to clients and recreated
|
||||
there to allow acting upon them.
|
||||
- Updated SSL/TLS Protocol parameters for better security.
|
||||
- Make the distinction between adding to the session new unmanaged torrents
|
||||
and torrents loaded from state. This will break backwards compatibility.
|
||||
- Pass a copy of an event instead of passing the event arguments to the
|
||||
event handlers. This will break backwards compatibility.
|
||||
- Allow changing ownership of torrents.
|
||||
- File modifications on the auth file are now detected and when they happen,
|
||||
the file is reloaded. Upon finding an old auth file with an old format, an
|
||||
upgrade to the new format is made, file saved, and reloaded.
|
||||
- Authentication no longer requires a username/password. If one or both of
|
||||
these is missing, an authentication error will be sent to the client
|
||||
which should then ask the username/password to the user.
|
||||
- Implemented sequential downloads.
|
||||
- Provide information about a torrent's pieces states
|
||||
- Add Option To Specify Outgoing Connection Interface.
|
||||
- Fix potential for host_id collision when creating hostlist entries.
|
||||
|
||||
### Gtk UI
|
||||
|
||||
- Ported to GTK3 (3rd-party plugins will need updated).
|
||||
- Allow changing ownership of torrents.
|
||||
- Host entries in the Connection Manager UI are now editable.
|
||||
- Implemented sequential downloads UI handling.
|
||||
- Add optional pieces bar instead of a regular progress bar in torrent status tab.
|
||||
- Make torrent opening compatible with all Unicode paths.
|
||||
- Fix magnet association button on Windows.
|
||||
- Add keyboard shortcuts for changing queue position:
|
||||
- Up: `Ctrl+Alt+Up`
|
||||
- Down: `Ctrl+Alt+Down`
|
||||
- Top: `Ctrl+Alt+Shift+Up`
|
||||
- Bottom: `Ctrl+Alt+Shift+Down`
|
||||
|
||||
### Web UI
|
||||
|
||||
- Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable.
|
||||
- Fixed the '--base' option to work for regular use, not just with reverse proxies.
|
||||
|
||||
### Blocklist Plugin
|
||||
|
||||
- Implemented whitelist support to both core and GTK UI.
|
||||
- Implemented IP filter cleaning before each update. Restarting the deluge
|
||||
daemon is no longer needed.
|
||||
- If "check_after_days" is 0(zero), the timer is not started anymore. It
|
||||
would keep updating one call after the other. If the value changed, the
|
||||
timer is now stopped and restarted using the new value.
|
50
ChangeLog
50
ChangeLog
|
@ -1,50 +0,0 @@
|
|||
=== Deluge 2.0 (In Development) ===
|
||||
|
||||
* Improved Logging
|
||||
* Removed the AutoAdd feature on the core. It's now handled with the AutoAdd
|
||||
plugin, which is also shipped with Deluge, and it does a better job and
|
||||
now, it even supports multiple users perfectly.
|
||||
* Authentication/Permission exceptions are now sent to clients and recreated
|
||||
there to allow acting upon them.
|
||||
* Enforced the use of the "deluge.plugins" namespace to reduce package
|
||||
names clashing beetween regular packages and deluge plugins.
|
||||
|
||||
==== Core ====
|
||||
* Make the distinction between adding to the session new unmanaged torrents
|
||||
and torrents loaded from state. This will break backwards compatability.
|
||||
* Pass a copy of an event instead of passing the event arguments to the
|
||||
event handlers. This will break backwards compatability.
|
||||
* Allow changing ownership of torrents.
|
||||
* File modifications on the auth file are now detected and when they happen,
|
||||
the file is reloaded. Upon finding an old auth file with an old format, an
|
||||
upgrade to the new format is made, file saved, and reloaded.
|
||||
* Authentication no longer requires a username/password. If one or both of
|
||||
these is missing, an authentication error will be sent to the client
|
||||
which sould then ask the username/password to the user.
|
||||
* Implemented sequential downloads.
|
||||
* Provide information about a torrent's pieces states
|
||||
|
||||
==== GtkUI ====
|
||||
* Allow changing ownership of torrents.
|
||||
* Host entries in the Connection Manager UI are now editable.
|
||||
* Implemented sequential downloads UI handling.
|
||||
* Add optional pieces bar instead of a regular progress bar in torrent status tab.
|
||||
* Make torrent opening compatible with all unicode paths.
|
||||
* Fix magnet association button on Windows.
|
||||
* Add keyboard shortcuts for changing queue position:
|
||||
- Up: Ctrl+Alt+Up
|
||||
- Down: Ctrl+Alt+Down
|
||||
- Top: Ctrl+Alt+Shift+Up
|
||||
- Bottom: Ctrl+Alt+Shift+Down
|
||||
|
||||
==== WebUI ====
|
||||
* Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable.
|
||||
* Fixed the '--base' option to work for regular use, not just with reverse proxies.
|
||||
|
||||
==== Blocklist Plugin ====
|
||||
* Implemented whitelist support to both core and GTK UI.
|
||||
* Implemented ip filter cleaning before each update. Restarting the deluge
|
||||
daemon is no longer needed.
|
||||
* If "check_after_days" is 0(zero), the timer is not started anymore. It
|
||||
would keep updating one call after the other. If the value changed, the
|
||||
timer is now stopped and restarted using the new value.
|
29
DEPENDS
29
DEPENDS
|
@ -1,29 +0,0 @@
|
|||
=== Core ===
|
||||
* libtorrent (rasterbar) >= 1.1.1
|
||||
* python >= 2.7.7
|
||||
* setuptools
|
||||
* twisted >= 11.1
|
||||
* pyopenssl
|
||||
* pyxdg
|
||||
* chardet
|
||||
* gettext
|
||||
* python-geoip (optional)
|
||||
* geoip-database (optional)
|
||||
* setproctitle (optional)
|
||||
* pillow (optional)
|
||||
* py2-ipaddress (optional, required for Windows IPv6)
|
||||
* rencode >= 1.0.2 (optional), python port bundled.
|
||||
|
||||
|
||||
=== Gtk UI ===
|
||||
* pygtk >= 2.16
|
||||
* librsvg
|
||||
* xdg-utils
|
||||
* intltool
|
||||
* python-notify (optional)
|
||||
* pygame (optional)
|
||||
* python-appindicator (optional)
|
||||
|
||||
=== Web UI ===
|
||||
* mako
|
||||
* slimit (optional), minifies JS files.
|
100
DEPENDS.md
Normal file
100
DEPENDS.md
Normal file
|
@ -0,0 +1,100 @@
|
|||
# Deluge dependencies
|
||||
|
||||
The following are required to install and run Deluge. They are separated into
|
||||
sections to distinguish the precise requirements for each module.
|
||||
|
||||
All modules will require the [common](#common) section dependencies.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
- [Python] _>= 3.6_
|
||||
|
||||
## Build
|
||||
|
||||
- [setuptools]
|
||||
- [intltool] - Optional: Desktop file translation for \*nix.
|
||||
- [closure-compiler] - Minify javascript (alternative is [rjsmin])
|
||||
|
||||
## Common
|
||||
|
||||
- [Twisted] _>= 17.1_ - Use `TLS` extras for `service_identity` and `idna`.
|
||||
- [OpenSSL] _>= 1.0.1_
|
||||
- [pyOpenSSL]
|
||||
- [rencode] _>= 1.0.2_ - Encoding library.
|
||||
- [PyXDG] - Access freedesktop.org standards for \*nix.
|
||||
- [xdg-utils] - Provides xdg-open for \*nix.
|
||||
- [zope.interface]
|
||||
- [chardet] - Optional: Encoding detection.
|
||||
- [setproctitle] - Optional: Renaming processes.
|
||||
- [Pillow] - Optional: Support for resizing tracker icons.
|
||||
- [dbus-python] - Optional: Show item location in filemanager.
|
||||
- [ifaddr] - Optional: Verify network interfaces.
|
||||
|
||||
### Linux and BSD
|
||||
|
||||
- [distro] - Optional: OS platform information.
|
||||
|
||||
### Windows OS
|
||||
|
||||
- [pywin32]
|
||||
- [certifi]
|
||||
|
||||
## Core (deluged daemon)
|
||||
|
||||
- [libtorrent] _>= 1.2.0_
|
||||
- [GeoIP] or [pygeoip] - Optional: IP address country lookup. (_Debian: `python-geoip`_)
|
||||
|
||||
## GTK UI
|
||||
|
||||
- [GTK+] >= 3.10
|
||||
- [PyGObject]
|
||||
- [Pycairo]
|
||||
- [librsvg] _>= 2_
|
||||
- [ayatanaappindicator3] w/GIR - Optional: Ubuntu system tray icon.
|
||||
|
||||
### MacOS
|
||||
|
||||
- [GtkOSXApplication]
|
||||
|
||||
## Web UI
|
||||
|
||||
- [mako]
|
||||
|
||||
## Plugins
|
||||
|
||||
### Notifications
|
||||
|
||||
- [pygame] - Optional: Play sounds
|
||||
- [libnotify] w/GIR - Optional: Desktop popups.
|
||||
|
||||
[python]: https://www.python.org/
|
||||
[setuptools]: https://setuptools.readthedocs.io/en/latest/
|
||||
[intltool]: https://freedesktop.org/wiki/Software/intltool/
|
||||
[closure-compiler]: https://developers.google.com/closure/compiler/
|
||||
[rjsmin]: https://pypi.org/project/rjsmin/
|
||||
[openssl]: https://www.openssl.org/
|
||||
[pyopenssl]: https://pyopenssl.org
|
||||
[twisted]: https://twistedmatrix.com
|
||||
[pillow]: https://pypi.org/project/Pillow/
|
||||
[libtorrent]: https://libtorrent.org/
|
||||
[zope.interface]: https://pypi.org/project/zope.interface/
|
||||
[distro]: https://github.com/nir0s/distro
|
||||
[pywin32]: https://github.com/mhammond/pywin32
|
||||
[certifi]: https://pypi.org/project/certifi/
|
||||
[dbus-python]: https://pypi.org/project/dbus-python/
|
||||
[setproctitle]: https://pypi.org/project/setproctitle/
|
||||
[gtkosxapplication]: https://github.com/jralls/gtk-mac-integration
|
||||
[chardet]: https://chardet.github.io/
|
||||
[rencode]: https://github.com/aresch/rencode
|
||||
[pyxdg]: https://www.freedesktop.org/wiki/Software/pyxdg/
|
||||
[xdg-utils]: https://www.freedesktop.org/wiki/Software/xdg-utils/
|
||||
[gtk+]: https://www.gtk.org/
|
||||
[pycairo]: https://cairographics.org/pycairo/
|
||||
[pygobject]: https://pygobject.readthedocs.io/en/latest/
|
||||
[geoip]: https://pypi.org/project/GeoIP/
|
||||
[mako]: https://www.makotemplates.org/
|
||||
[pygame]: https://www.pygame.org/
|
||||
[libnotify]: https://developer.gnome.org/libnotify/
|
||||
[ayatanaappindicator3]: https://lazka.github.io/pgi-docs/AyatanaAppIndicator3-0.1/index.html
|
||||
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg
|
||||
[ifaddr]: https://pypi.org/project/ifaddr/
|
20
MANIFEST.in
20
MANIFEST.in
|
@ -1,23 +1,29 @@
|
|||
include AUTHORS ChangeLog DEPENDS LICENSE RELEASE-VERSION README.rst
|
||||
include msgfmt.py minify_web_js.py version.py
|
||||
exclude setup.cfg
|
||||
include *.md
|
||||
include AUTHORS
|
||||
include LICENSE
|
||||
include RELEASE-VERSION
|
||||
include msgfmt.py
|
||||
include minify_web_js.py
|
||||
include version.py
|
||||
include gen_web_gettext.py
|
||||
|
||||
graft docs/man
|
||||
graft packaging/systemd
|
||||
|
||||
include deluge/i18n/*.po
|
||||
recursive-exclude deluge/i18n LC_MESSAGES *.mo
|
||||
recursive-exclude deluge/i18n *.mo
|
||||
|
||||
graft deluge/plugins
|
||||
recursive-exclude deluge/plugins create_dev_link.sh *.pyc *.egg
|
||||
prune deluge/plugins/*/build
|
||||
prune deluge/plugins/*/*.egg-info
|
||||
|
||||
graft deluge/tests/data
|
||||
graft deluge/tests/twisted
|
||||
graft deluge/tests/
|
||||
recursive-exclude deluge/tests *.pyc
|
||||
|
||||
graft deluge/ui/data
|
||||
recursive-exclude deluge/ui/data *.desktop *.xml
|
||||
graft deluge/ui/gtkui/glade
|
||||
graft deluge/ui/gtk3/glade
|
||||
|
||||
include deluge/ui/web/index.html
|
||||
include deluge/ui/web/css/*.css
|
||||
|
|
71
README.md
Normal file
71
README.md
Normal file
|
@ -0,0 +1,71 @@
|
|||
# Deluge BitTorrent Client
|
||||
|
||||
[![build-status]][github-ci] [![docs-status]][rtd-deluge]
|
||||
|
||||
Deluge is a BitTorrent client that utilizes a daemon/client model.
|
||||
It has various user interfaces available such as the GTK-UI, Web-UI and
|
||||
Console-UI. It uses [libtorrent][lt] at its core to handle the BitTorrent
|
||||
protocol.
|
||||
|
||||
## Install
|
||||
|
||||
From [PyPi](https://pypi.org/project/deluge):
|
||||
|
||||
pip install deluge
|
||||
|
||||
with all optional dependencies:
|
||||
|
||||
pip install deluge[all]
|
||||
|
||||
From source code:
|
||||
|
||||
pip install .
|
||||
|
||||
with all optional dependencies:
|
||||
|
||||
pip install .[all]
|
||||
|
||||
See [DEPENDS](DEPENDS.md) and [Installing/Source] for dependency details.
|
||||
|
||||
## Usage
|
||||
|
||||
The various user-interfaces and Deluge daemon can be started with the following commands.
|
||||
|
||||
Use the `--help` option for further command options.
|
||||
|
||||
### Gtk UI
|
||||
|
||||
`deluge` or `deluge-gtk`
|
||||
|
||||
### Console UI
|
||||
|
||||
`deluge-console`
|
||||
|
||||
### Web UI
|
||||
|
||||
`deluge-web`
|
||||
|
||||
Open http://localhost:8112 with default password `deluge`.
|
||||
|
||||
### Daemon
|
||||
|
||||
`deluged`
|
||||
|
||||
See the [Thinclient guide] to connect to the daemon from another computer.
|
||||
|
||||
## Contact
|
||||
|
||||
- [Homepage](https://deluge-torrent.org)
|
||||
- [User guide][user guide]
|
||||
- [Forum](https://forum.deluge-torrent.org)
|
||||
- [IRC Libera.Chat #deluge](irc://irc.libera.chat/deluge)
|
||||
- [Discord](https://discord.gg/nwaHSE6tqn)
|
||||
|
||||
[user guide]: https://dev.deluge-torrent.org/wiki/UserGuide
|
||||
[thinclient guide]: https://dev.deluge-torrent.org/wiki/UserGuide/ThinClient
|
||||
[installing/source]: https://dev.deluge-torrent.org/wiki/Installing/Source
|
||||
[build-status]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml/badge.svg?branch=develop "CI"
|
||||
[github-ci]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml
|
||||
[docs-status]: https://readthedocs.org/projects/deluge/badge/?version=latest
|
||||
[rtd-deluge]: https://deluge.readthedocs.io/en/latest/?badge=latest "Documentation Status"
|
||||
[lt]: https://libtorrent.org
|
68
README.rst
68
README.rst
|
@ -1,68 +0,0 @@
|
|||
=========================
|
||||
Deluge BitTorrent Client
|
||||
=========================
|
||||
|
||||
|build-status| |docs|
|
||||
|
||||
Homepage: http://deluge-torrent.org
|
||||
|
||||
Authors:
|
||||
Andrew Resch
|
||||
Damien Churchill
|
||||
|
||||
For contributors and past developers see:
|
||||
AUTHORS
|
||||
|
||||
==========================
|
||||
Installation Instructions:
|
||||
==========================
|
||||
|
||||
For detailed instructions see: http://dev.deluge-torrent.org/wiki/Installing/Source
|
||||
|
||||
Ensure build dependencies are installed, see DEPENDS for a full listing.
|
||||
|
||||
Build and install by running::
|
||||
|
||||
$ python setup.py build
|
||||
$ sudo python setup.py install
|
||||
|
||||
================
|
||||
Contact/Support:
|
||||
================
|
||||
|
||||
:Forum: http://forum.deluge-torrent.org
|
||||
:IRC Channel: #deluge on irc.freenode.net
|
||||
|
||||
===
|
||||
FAQ
|
||||
===
|
||||
|
||||
For the full FAQ see: http://dev.deluge-torrent.org/wiki/Faq
|
||||
|
||||
How to start the various user-interfaces:
|
||||
Gtk::
|
||||
|
||||
deluge or deluge-gtk
|
||||
|
||||
Console::
|
||||
|
||||
deluge-console
|
||||
|
||||
Web::
|
||||
|
||||
deluge-web
|
||||
Go to http://localhost:8112/ default-password = "deluge"
|
||||
|
||||
How do I start the daemon?:
|
||||
deluged
|
||||
|
||||
I can't connect to the daemon from another machine:
|
||||
See: http://dev.deluge-torrent.org/wiki/UserGuide/ThinClient
|
||||
|
||||
|
||||
.. |build-status| image:: https://travis-ci.org/deluge-torrent/deluge.svg
|
||||
:target: https://travis-ci.org/deluge-torrent/deluge
|
||||
|
||||
.. |docs| image:: https://readthedocs.org/projects/deluge/badge/?version=develop
|
||||
:target: https://readthedocs.org/projects/deluge/?badge=develop
|
||||
:alt: Documentation Status
|
6
__builtins__.pyi
Normal file
6
__builtins__.pyi
Normal file
|
@ -0,0 +1,6 @@
|
|||
from twisted.web.http import Request
|
||||
|
||||
__request__: Request
|
||||
|
||||
def _(string: str) -> str: ...
|
||||
def _n(string: str) -> str: ...
|
|
@ -1,7 +1 @@
|
|||
"""Deluge"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# this is a namespace package
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
|
||||
#
|
||||
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
|
||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from deluge.core.core import Core
|
||||
from deluge.core.daemon import Daemon
|
||||
|
||||
|
||||
class RpcApi(object):
|
||||
pass
|
||||
|
||||
|
||||
def scan_for_methods(obj):
|
||||
methods = {
|
||||
'__doc__': 'Methods available in %s' % obj.__name__.lower()
|
||||
}
|
||||
for d in dir(obj):
|
||||
if not hasattr(getattr(obj, d), '_rpcserver_export'):
|
||||
continue
|
||||
methods[d] = getattr(obj, d)
|
||||
cobj = type(obj.__name__.lower(), (object,), methods)
|
||||
setattr(RpcApi, obj.__name__.lower(), cobj)
|
||||
|
||||
|
||||
scan_for_methods(Core)
|
||||
scan_for_methods(Daemon)
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -15,16 +14,23 @@ Example:
|
|||
>>> from deluge._libtorrent import lt
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from deluge.common import VersionSplit, get_version
|
||||
from deluge.error import LibtorrentImportError
|
||||
|
||||
try:
|
||||
import deluge.libtorrent as lt
|
||||
except ImportError:
|
||||
try:
|
||||
import libtorrent as lt
|
||||
except ImportError as ex:
|
||||
raise LibtorrentImportError('No libtorrent library found: %s' % (ex))
|
||||
|
||||
REQUIRED_VERSION = '1.1.2.0'
|
||||
|
||||
if VersionSplit(lt.__version__) < VersionSplit(REQUIRED_VERSION):
|
||||
raise ImportError('Deluge %s requires libtorrent >= %s' % (get_version(), REQUIRED_VERSION))
|
||||
REQUIRED_VERSION = '1.2.0.0'
|
||||
LT_VERSION = lt.__version__
|
||||
|
||||
if VersionSplit(LT_VERSION) < VersionSplit(REQUIRED_VERSION):
|
||||
raise LibtorrentImportError(
|
||||
f'Deluge {get_version()} requires libtorrent >= {REQUIRED_VERSION}'
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,8 +6,6 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
@ -86,15 +83,16 @@ argparse.ArgumentParser.find_subcommand = find_subcommand
|
|||
argparse.ArgumentParser.set_default_subparser = set_default_subparser
|
||||
|
||||
|
||||
def get_version():
|
||||
def _get_version_detail():
|
||||
version_str = '%s\n' % (common.get_version())
|
||||
try:
|
||||
from deluge._libtorrent import lt
|
||||
version_str += 'libtorrent: %s\n' % lt.__version__
|
||||
from deluge._libtorrent import LT_VERSION
|
||||
|
||||
version_str += 'libtorrent: %s\n' % LT_VERSION
|
||||
except ImportError:
|
||||
pass
|
||||
version_str += 'Python: %s\n' % platform.python_version()
|
||||
version_str += 'OS: %s %s\n' % (platform.system(), ' '.join(common.get_os_version()))
|
||||
version_str += f'OS: {platform.system()} {common.get_os_version()}\n'
|
||||
return version_str
|
||||
|
||||
|
||||
|
@ -108,8 +106,8 @@ class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
|
|||
line instead. This way list formatting is not mangled by textwrap.wrap.
|
||||
"""
|
||||
wrapped_lines = []
|
||||
for l in text.splitlines():
|
||||
wrapped_lines.extend(textwrap.wrap(l, width, subsequent_indent=' '))
|
||||
for line in text.splitlines():
|
||||
wrapped_lines.extend(textwrap.wrap(line, width, subsequent_indent=' '))
|
||||
return wrapped_lines
|
||||
|
||||
def _format_action_invocation(self, action):
|
||||
|
@ -121,7 +119,7 @@ class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
|
|||
|
||||
"""
|
||||
if not action.option_strings:
|
||||
metavar, = self._metavar_formatter(action, action.dest)(1)
|
||||
(metavar,) = self._metavar_formatter(action, action.dest)(1)
|
||||
return metavar
|
||||
else:
|
||||
parts = []
|
||||
|
@ -136,12 +134,11 @@ class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
|
|||
default = action.dest.upper()
|
||||
args_string = self._format_args(action, default)
|
||||
opt = ', '.join(action.option_strings)
|
||||
parts.append('%s %s' % (opt, args_string))
|
||||
parts.append(f'{opt} {args_string}')
|
||||
return ', '.join(parts)
|
||||
|
||||
|
||||
class HelpAction(argparse._HelpAction):
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
if hasattr(parser, 'subparser'):
|
||||
subparser = getattr(parser, 'subparser')
|
||||
|
@ -151,11 +148,12 @@ class HelpAction(argparse._HelpAction):
|
|||
parser.exit()
|
||||
|
||||
|
||||
class BaseArgParser(argparse.ArgumentParser):
|
||||
|
||||
class ArgParserBase(argparse.ArgumentParser):
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'formatter_class' not in kwargs:
|
||||
kwargs['formatter_class'] = lambda prog: DelugeTextHelpFormatter(prog, max_help_position=33, width=90)
|
||||
kwargs['formatter_class'] = lambda prog: DelugeTextHelpFormatter(
|
||||
prog, max_help_position=33, width=90
|
||||
)
|
||||
|
||||
kwargs['add_help'] = kwargs.get('add_help', False)
|
||||
common_help = kwargs.pop('common_help', True)
|
||||
|
@ -164,32 +162,73 @@ class BaseArgParser(argparse.ArgumentParser):
|
|||
self.log_stream = kwargs['log_stream']
|
||||
del kwargs['log_stream']
|
||||
|
||||
super(BaseArgParser, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.common_setup = False
|
||||
self.process_arg_group = False
|
||||
self.group = self.add_argument_group(_('Common Options'))
|
||||
if common_help:
|
||||
self.group.add_argument('-h', '--help', action=HelpAction,
|
||||
help=_('Print this help message'))
|
||||
self.group.add_argument('-V', '--version', action='version', version='%(prog)s ' + get_version(),
|
||||
help=_('Print version information'))
|
||||
self.group.add_argument('-v', action='version', version='%(prog)s ' + get_version(),
|
||||
help=argparse.SUPPRESS) # Deprecated arg
|
||||
self.group.add_argument('-c', '--config', metavar='<config>',
|
||||
help=_('Set the config directory path'))
|
||||
self.group.add_argument('-l', '--logfile', metavar='<logfile>',
|
||||
help=_('Output to specified logfile instead of stdout'))
|
||||
self.group.add_argument('-L', '--loglevel', choices=[l for k in deluge.log.levels for l in (k, k.upper())],
|
||||
help=_('Set the log level (none, error, warning, info, debug)'), metavar='<level>')
|
||||
self.group.add_argument('--logrotate', nargs='?', const='2M', metavar='<max-size>',
|
||||
help=_('Enable logfile rotation, with optional maximum logfile size, '
|
||||
'default: %(const)s (Logfile rotation count is 5)'))
|
||||
self.group.add_argument('-q', '--quiet', action='store_true',
|
||||
help=_('Quieten logging output (Same as `--loglevel none`)'))
|
||||
self.group.add_argument('--profile', metavar='<profile-file>', nargs='?', default=False,
|
||||
help=_('Profile %(prog)s with cProfile. Outputs to stdout '
|
||||
'unless a filename is specified'))
|
||||
self.group.add_argument(
|
||||
'-h', '--help', action=HelpAction, help=_('Print this help message')
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-V',
|
||||
'--version',
|
||||
action='version',
|
||||
version='%(prog)s ' + _get_version_detail(),
|
||||
help=_('Print version information'),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-v',
|
||||
action='version',
|
||||
version='%(prog)s ' + _get_version_detail(),
|
||||
help=argparse.SUPPRESS,
|
||||
) # Deprecated arg
|
||||
self.group.add_argument(
|
||||
'-c',
|
||||
'--config',
|
||||
metavar='<config>',
|
||||
help=_('Set the config directory path'),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-l',
|
||||
'--logfile',
|
||||
metavar='<logfile>',
|
||||
help=_('Output to specified logfile instead of stdout'),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-L',
|
||||
'--loglevel',
|
||||
choices=[level for k in deluge.log.levels for level in (k, k.upper())],
|
||||
help=_('Set the log level (none, error, warning, info, debug)'),
|
||||
metavar='<level>',
|
||||
)
|
||||
self.group.add_argument(
|
||||
'--logrotate',
|
||||
nargs='?',
|
||||
const='2M',
|
||||
metavar='<max-size>',
|
||||
help=_(
|
||||
'Enable logfile rotation, with optional maximum logfile size, '
|
||||
'default: %(const)s (Logfile rotation count is 5)'
|
||||
),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-q',
|
||||
'--quiet',
|
||||
action='store_true',
|
||||
help=_('Quieten logging output (Same as `--loglevel none`)'),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'--profile',
|
||||
metavar='<profile-file>',
|
||||
nargs='?',
|
||||
default=False,
|
||||
help=_(
|
||||
'Profile %(prog)s with cProfile. Outputs to stdout '
|
||||
'unless a filename is specified'
|
||||
),
|
||||
)
|
||||
|
||||
def parse_args(self, args=None):
|
||||
"""Parse UI arguments and handle common and process group options.
|
||||
|
@ -204,7 +243,7 @@ class BaseArgParser(argparse.ArgumentParser):
|
|||
argparse.Namespace: The parsed arguments.
|
||||
|
||||
"""
|
||||
options = super(BaseArgParser, self).parse_args(args=args)
|
||||
options = super().parse_args(args=args)
|
||||
return self._handle_ui_options(options)
|
||||
|
||||
def parse_known_ui_args(self, args, withhold=None):
|
||||
|
@ -220,9 +259,9 @@ class BaseArgParser(argparse.ArgumentParser):
|
|||
"""
|
||||
if withhold:
|
||||
args = [a for a in args if a not in withhold]
|
||||
options, remaining = super(BaseArgParser, self).parse_known_args(args=args)
|
||||
options, remaining = super().parse_known_args(args=args)
|
||||
options.remaining = remaining
|
||||
# Hanlde common and process group options
|
||||
# Handle common and process group options
|
||||
return self._handle_ui_options(options)
|
||||
|
||||
def _handle_ui_options(self, options):
|
||||
|
@ -251,8 +290,13 @@ class BaseArgParser(argparse.ArgumentParser):
|
|||
logrotate = common.parse_human_size(options.logrotate)
|
||||
|
||||
# Setup the logger
|
||||
deluge.log.setup_logger(level=options.loglevel, filename=options.logfile, filemode=logfile_mode,
|
||||
logrotate=logrotate, output_stream=self.log_stream)
|
||||
deluge.log.setup_logger(
|
||||
level=options.loglevel,
|
||||
filename=options.logfile,
|
||||
filemode=logfile_mode,
|
||||
logrotate=logrotate,
|
||||
output_stream=self.log_stream,
|
||||
)
|
||||
|
||||
if options.config:
|
||||
if not set_config_dir(options.config):
|
||||
|
@ -278,20 +322,22 @@ class BaseArgParser(argparse.ArgumentParser):
|
|||
|
||||
# Write pid file before chuid
|
||||
if options.pidfile:
|
||||
with open(options.pidfile, 'wb') as _file:
|
||||
with open(options.pidfile, 'w') as _file:
|
||||
_file.write('%d\n' % os.getpid())
|
||||
|
||||
if not common.windows_check():
|
||||
if options.user:
|
||||
if not options.user.isdigit():
|
||||
import pwd
|
||||
options.user = pwd.getpwnam(options.user)[2]
|
||||
os.setuid(options.user)
|
||||
if options.group:
|
||||
if not options.group.isdigit():
|
||||
import grp
|
||||
|
||||
options.group = grp.getgrnam(options.group)[2]
|
||||
os.setuid(options.group)
|
||||
os.setgid(options.group)
|
||||
if options.user:
|
||||
if not options.user.isdigit():
|
||||
import pwd
|
||||
|
||||
options.user = pwd.getpwnam(options.user)[2]
|
||||
os.setuid(options.user)
|
||||
|
||||
return options
|
||||
|
||||
|
@ -300,14 +346,39 @@ class BaseArgParser(argparse.ArgumentParser):
|
|||
|
||||
self.process_arg_group = True
|
||||
self.group = self.add_argument_group(_('Process Control Options'))
|
||||
self.group.add_argument('-P', '--pidfile', metavar='<pidfile>', action='store',
|
||||
help=_('Pidfile to store the process id'))
|
||||
self.group.add_argument(
|
||||
'-P',
|
||||
'--pidfile',
|
||||
metavar='<pidfile>',
|
||||
action='store',
|
||||
help=_('Pidfile to store the process id'),
|
||||
)
|
||||
if not common.windows_check():
|
||||
self.group.add_argument('-d', '--do-not-daemonize', dest='donotdaemonize', action='store_true',
|
||||
help=_('Do not daemonize (fork) this process'))
|
||||
self.group.add_argument('-f', '--fork', dest='donotdaemonize', action='store_false',
|
||||
help=argparse.SUPPRESS) # Deprecated arg
|
||||
self.group.add_argument('-U', '--user', metavar='<user>', action='store',
|
||||
help=_('Change to this user on startup (Requires root)'))
|
||||
self.group.add_argument('-g', '--group', metavar='<group>', action='store',
|
||||
help=_('Change to this group on startup (Requires root)'))
|
||||
self.group.add_argument(
|
||||
'-d',
|
||||
'--do-not-daemonize',
|
||||
dest='donotdaemonize',
|
||||
action='store_true',
|
||||
help=_('Do not daemonize (fork) this process'),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-f',
|
||||
'--fork',
|
||||
dest='donotdaemonize',
|
||||
action='store_false',
|
||||
help=argparse.SUPPRESS,
|
||||
) # Deprecated arg
|
||||
self.group.add_argument(
|
||||
'-U',
|
||||
'--user',
|
||||
metavar='<user>',
|
||||
action='store',
|
||||
help=_('Change to this user on startup (Requires root)'),
|
||||
)
|
||||
self.group.add_argument(
|
||||
'-g',
|
||||
'--group',
|
||||
metavar='<group>',
|
||||
action='store',
|
||||
help=_('Change to this group on startup (Requires root)'),
|
||||
)
|
|
@ -9,11 +9,7 @@
|
|||
# License.
|
||||
|
||||
# Written by Petru Paler
|
||||
# Updated by Calum Lind to support both Python 2 and Python 3.
|
||||
|
||||
from sys import version_info
|
||||
|
||||
PY2 = version_info.major == 2
|
||||
# Updated by Calum Lind to support Python 3.
|
||||
|
||||
|
||||
class BTFailure(Exception):
|
||||
|
@ -31,9 +27,9 @@ def decode_int(x, f):
|
|||
f += 1
|
||||
newf = x.index(END_DELIM, f)
|
||||
n = int(x[f:newf])
|
||||
if x[f:f+1] == b'-' and x[f+1:f+2] == b'0':
|
||||
if x[f : f + 1] == b'-' and x[f + 1 : f + 2] == b'0':
|
||||
raise ValueError
|
||||
elif x[f:f+1] == b'0' and newf != f + 1:
|
||||
elif x[f : f + 1] == b'0' and newf != f + 1:
|
||||
raise ValueError
|
||||
return (n, newf + 1)
|
||||
|
||||
|
@ -41,25 +37,25 @@ def decode_int(x, f):
|
|||
def decode_string(x, f):
|
||||
colon = x.index(BYTE_SEP, f)
|
||||
n = int(x[f:colon])
|
||||
if x[f:f+1] == b'0' and colon != f + 1:
|
||||
if x[f : f + 1] == b'0' and colon != f + 1:
|
||||
raise ValueError
|
||||
colon += 1
|
||||
return (x[colon:colon + n], colon + n)
|
||||
return (x[colon : colon + n], colon + n)
|
||||
|
||||
|
||||
def decode_list(x, f):
|
||||
r, f = [], f + 1
|
||||
while x[f:f+1] != END_DELIM:
|
||||
v, f = decode_func[x[f:f+1]](x, f)
|
||||
while x[f : f + 1] != END_DELIM:
|
||||
v, f = decode_func[x[f : f + 1]](x, f)
|
||||
r.append(v)
|
||||
return (r, f + 1)
|
||||
|
||||
|
||||
def decode_dict(x, f):
|
||||
r, f = {}, f + 1
|
||||
while x[f:f+1] != END_DELIM:
|
||||
while x[f : f + 1] != END_DELIM:
|
||||
k, f = decode_string(x, f)
|
||||
r[k], f = decode_func[x[f:f+1]](x, f)
|
||||
r[k], f = decode_func[x[f : f + 1]](x, f)
|
||||
return (r, f + 1)
|
||||
|
||||
|
||||
|
@ -81,15 +77,14 @@ decode_func[b'9'] = decode_string
|
|||
|
||||
def bdecode(x):
|
||||
try:
|
||||
r, l = decode_func[x[0:1]](x, 0)
|
||||
except (IndexError, KeyError, ValueError):
|
||||
r, __ = decode_func[x[0:1]](x, 0)
|
||||
except (LookupError, TypeError, ValueError):
|
||||
raise BTFailure('Not a valid bencoded string')
|
||||
else:
|
||||
return r
|
||||
|
||||
|
||||
class Bencached(object):
|
||||
|
||||
class Bencached:
|
||||
__slots__ = ['bencoded']
|
||||
|
||||
def __init__(self, s):
|
||||
|
@ -109,7 +104,7 @@ def encode_bool(x, r):
|
|||
|
||||
|
||||
def encode_string(x, r):
|
||||
encode_string(x.encode('utf8'), r)
|
||||
encode_bytes(x.encode('utf8'), r)
|
||||
|
||||
|
||||
def encode_bytes(x, r):
|
||||
|
@ -126,6 +121,10 @@ def encode_list(x, r):
|
|||
def encode_dict(x, r):
|
||||
r.append(DICT_DELIM)
|
||||
for k, v in sorted(x.items()):
|
||||
try:
|
||||
k = k.encode('utf8')
|
||||
except AttributeError:
|
||||
pass
|
||||
r.extend((str(len(k)).encode('utf8'), BYTE_SEP, k))
|
||||
encode_func[type(v)](v, r)
|
||||
r.append(END_DELIM)
|
||||
|
@ -140,10 +139,6 @@ encode_func[dict] = encode_dict
|
|||
encode_func[bool] = encode_bool
|
||||
encode_func[str] = encode_string
|
||||
encode_func[bytes] = encode_bytes
|
||||
if PY2:
|
||||
encode_func[long] = encode_int
|
||||
encode_func[str] = encode_bytes
|
||||
encode_func[unicode] = encode_string
|
||||
|
||||
|
||||
def bencode(x):
|
||||
|
|
698
deluge/common.py
698
deluge/common.py
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2010 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,8 +6,6 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
|
@ -17,8 +14,6 @@ from twisted.internet import reactor
|
|||
from twisted.internet.defer import DeferredList, fail, maybeDeferred, succeed
|
||||
from twisted.internet.task import LoopingCall, deferLater
|
||||
|
||||
from deluge.common import PY2
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -27,15 +22,14 @@ class ComponentAlreadyRegistered(Exception):
|
|||
|
||||
|
||||
class ComponentException(Exception):
|
||||
|
||||
def __init__(self, message, tb):
|
||||
super(ComponentException, self).__init__(message)
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.tb = tb
|
||||
|
||||
def __str__(self):
|
||||
s = super(ComponentException, self).__str__()
|
||||
return '%s\n%s' % (s, ''.join(self.tb))
|
||||
s = super().__str__()
|
||||
return '{}\n{}'.format(s, ''.join(self.tb))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
|
@ -47,7 +41,7 @@ class ComponentException(Exception):
|
|||
return not self.__eq__(other)
|
||||
|
||||
|
||||
class Component(object):
|
||||
class Component:
|
||||
"""Component objects are singletons managed by the :class:`ComponentRegistry`.
|
||||
|
||||
When a new Component object is instantiated, it will be automatically
|
||||
|
@ -65,11 +59,16 @@ class Component(object):
|
|||
Deluge core.
|
||||
|
||||
**update()** - This method is called every 1 second by default while the
|
||||
Componented is in a *Started* state. The interval can be
|
||||
Component is in a *Started* state. The interval can be
|
||||
specified during instantiation. The update() timer can be
|
||||
paused by instructing the :class:`ComponentRegistry` to pause
|
||||
this Component.
|
||||
|
||||
**pause()** - This method is called when the component is being paused.
|
||||
|
||||
**resume()** - This method is called when the component resumes from a Paused
|
||||
state.
|
||||
|
||||
**shutdown()** - This method is called when the client is exiting. If the
|
||||
Component is in a "Started" state when this is called, a
|
||||
call to stop() will be issued prior to shutdown().
|
||||
|
@ -86,13 +85,14 @@ class Component(object):
|
|||
|
||||
**Stopped** - The Component has either been stopped or has yet to be started.
|
||||
|
||||
**Stopping** - The Component has had it's stop method called, but it hasn't
|
||||
**Stopping** - The Component has had its stop method called, but it hasn't
|
||||
fully stopped yet.
|
||||
|
||||
**Paused** - The Component has had it's update timer stopped, but will
|
||||
**Paused** - The Component has had its update timer stopped, but will
|
||||
still be considered in a Started state.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name, interval=1, depend=None):
|
||||
"""Initialize component.
|
||||
|
||||
|
@ -116,7 +116,6 @@ class Component(object):
|
|||
_ComponentRegistry.deregister(self)
|
||||
|
||||
def _component_start_timer(self):
|
||||
if hasattr(self, 'update'):
|
||||
self._component_timer = LoopingCall(self.update)
|
||||
self._component_timer.start(self._component_interval)
|
||||
|
||||
|
@ -134,22 +133,23 @@ class Component(object):
|
|||
return fail(result)
|
||||
|
||||
if self._component_state == 'Stopped':
|
||||
if hasattr(self, 'start'):
|
||||
self._component_state = 'Starting'
|
||||
d = deferLater(reactor, 0, self.start)
|
||||
d.addCallbacks(on_start, on_start_fail)
|
||||
self._component_starting_deferred = d
|
||||
else:
|
||||
d = maybeDeferred(on_start, None)
|
||||
elif self._component_state == 'Starting':
|
||||
return self._component_starting_deferred
|
||||
elif self._component_state == 'Started':
|
||||
d = succeed(True)
|
||||
else:
|
||||
d = fail(ComponentException('Trying to start component "%s" but it is '
|
||||
'not in a stopped state. Current state: %s' %
|
||||
(self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4)))
|
||||
d = fail(
|
||||
ComponentException(
|
||||
'Trying to start component "%s" but it is '
|
||||
'not in a stopped state. Current state: %s'
|
||||
% (self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4),
|
||||
)
|
||||
)
|
||||
return d
|
||||
|
||||
def _component_stop(self):
|
||||
|
@ -166,14 +166,11 @@ class Component(object):
|
|||
return result
|
||||
|
||||
if self._component_state != 'Stopped' and self._component_state != 'Stopping':
|
||||
if hasattr(self, 'stop'):
|
||||
self._component_state = 'Stopping'
|
||||
d = maybeDeferred(self.stop)
|
||||
d.addCallback(on_stop)
|
||||
d.addErrback(on_stop_fail)
|
||||
self._component_stopping_deferred = d
|
||||
else:
|
||||
d = maybeDeferred(on_stop, None)
|
||||
|
||||
if self._component_state == 'Stopping':
|
||||
return self._component_stopping_deferred
|
||||
|
@ -183,41 +180,47 @@ class Component(object):
|
|||
def _component_pause(self):
|
||||
def on_pause(result):
|
||||
self._component_state = 'Paused'
|
||||
if self._component_timer and self._component_timer.running:
|
||||
self._component_timer.stop()
|
||||
|
||||
if self._component_state == 'Started':
|
||||
if self._component_timer and self._component_timer.running:
|
||||
d = maybeDeferred(self._component_timer.stop)
|
||||
d = maybeDeferred(self.pause)
|
||||
d.addCallback(on_pause)
|
||||
else:
|
||||
d = succeed(None)
|
||||
elif self._component_state == 'Paused':
|
||||
d = succeed(None)
|
||||
else:
|
||||
d = fail(ComponentException('Trying to pause component "%s" but it is '
|
||||
'not in a started state. Current state: %s' %
|
||||
(self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4)))
|
||||
d = fail(
|
||||
ComponentException(
|
||||
'Trying to pause component "%s" but it is '
|
||||
'not in a started state. Current state: %s'
|
||||
% (self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4),
|
||||
)
|
||||
)
|
||||
return d
|
||||
|
||||
def _component_resume(self):
|
||||
def on_resume(result):
|
||||
self._component_state = 'Started'
|
||||
self._component_start_timer()
|
||||
|
||||
if self._component_state == 'Paused':
|
||||
d = maybeDeferred(self._component_start_timer)
|
||||
d = maybeDeferred(self.resume)
|
||||
d.addCallback(on_resume)
|
||||
else:
|
||||
d = fail(ComponentException('Trying to resume component "%s" but it is '
|
||||
'not in a paused state. Current state: %s' %
|
||||
(self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4)))
|
||||
d = fail(
|
||||
ComponentException(
|
||||
'Trying to resume component "%s" but it is '
|
||||
'not in a paused state. Current state: %s'
|
||||
% (self._component_name, self._component_state),
|
||||
traceback.format_stack(limit=4),
|
||||
)
|
||||
)
|
||||
return d
|
||||
|
||||
def _component_shutdown(self):
|
||||
def on_stop(result):
|
||||
if hasattr(self, 'shutdown'):
|
||||
return maybeDeferred(self.shutdown)
|
||||
return succeed(None)
|
||||
|
||||
d = self._component_stop()
|
||||
d.addCallback(on_stop)
|
||||
|
@ -238,12 +241,19 @@ class Component(object):
|
|||
def shutdown(self):
|
||||
pass
|
||||
|
||||
def pause(self):
|
||||
pass
|
||||
|
||||
class ComponentRegistry(object):
|
||||
def resume(self):
|
||||
pass
|
||||
|
||||
|
||||
class ComponentRegistry:
|
||||
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects.
|
||||
|
||||
It is used to manage the Components by starting, stopping, pausing and shutting them down.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.components = {}
|
||||
# Stores all of the components that are dependent on a particular component
|
||||
|
@ -264,7 +274,9 @@ class ComponentRegistry(object):
|
|||
"""
|
||||
name = obj._component_name
|
||||
if name in self.components:
|
||||
raise ComponentAlreadyRegistered('Component already registered with name %s' % name)
|
||||
raise ComponentAlreadyRegistered(
|
||||
'Component already registered with name %s' % name
|
||||
)
|
||||
|
||||
self.components[obj._component_name] = obj
|
||||
if obj._component_depend:
|
||||
|
@ -279,7 +291,8 @@ class ComponentRegistry(object):
|
|||
obj (Component): a component object to deregister
|
||||
|
||||
Returns:
|
||||
Deferred: a deferred object that will fire once the Component has been sucessfully deregistered
|
||||
Deferred: a deferred object that will fire once the Component has been
|
||||
successfully deregistered
|
||||
|
||||
"""
|
||||
if obj in self.components.values():
|
||||
|
@ -289,6 +302,7 @@ class ComponentRegistry(object):
|
|||
def on_stop(result, name):
|
||||
# Component may have been removed, so pop to ensure it doesn't fail
|
||||
self.components.pop(name, None)
|
||||
|
||||
return d.addCallback(on_stop, obj._component_name)
|
||||
else:
|
||||
return succeed(None)
|
||||
|
@ -309,7 +323,7 @@ class ComponentRegistry(object):
|
|||
# Start all the components if names is empty
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
elif isinstance(names, str):
|
||||
names = [names]
|
||||
|
||||
def on_depends_started(result, name):
|
||||
|
@ -343,7 +357,7 @@ class ComponentRegistry(object):
|
|||
"""
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
elif isinstance(names, str):
|
||||
names = [names]
|
||||
|
||||
def on_dependents_stopped(result, name):
|
||||
|
@ -358,7 +372,9 @@ class ComponentRegistry(object):
|
|||
if name in self.components:
|
||||
if name in self.dependents:
|
||||
# If other components depend on this component, stop them first
|
||||
d = self.stop(self.dependents[name]).addCallback(on_dependents_stopped, name)
|
||||
d = self.stop(self.dependents[name]).addCallback(
|
||||
on_dependents_stopped, name
|
||||
)
|
||||
deferreds.append(d)
|
||||
stopped_in_deferred.update(self.dependents[name])
|
||||
else:
|
||||
|
@ -381,7 +397,7 @@ class ComponentRegistry(object):
|
|||
"""
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
elif isinstance(names, str):
|
||||
names = [names]
|
||||
|
||||
deferreds = []
|
||||
|
@ -407,7 +423,7 @@ class ComponentRegistry(object):
|
|||
"""
|
||||
if not names:
|
||||
names = list(self.components)
|
||||
elif isinstance(names, str if not PY2 else basestring):
|
||||
elif isinstance(names, str):
|
||||
names = [names]
|
||||
|
||||
deferreds = []
|
||||
|
@ -428,8 +444,11 @@ class ComponentRegistry(object):
|
|||
Deferred: Fired once all Components have been successfully shut down.
|
||||
|
||||
"""
|
||||
|
||||
def on_stopped(result):
|
||||
return DeferredList([comp._component_shutdown() for comp in self.components.values()])
|
||||
return DeferredList(
|
||||
[comp._component_shutdown() for comp in list(self.components.values())]
|
||||
)
|
||||
|
||||
return self.stop(list(self.components)).addCallback(on_stopped)
|
||||
|
||||
|
|
244
deluge/config.py
244
deluge/config.py
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -39,71 +38,67 @@ this can only be done for the 'config file version' and not for the 'format'
|
|||
version as this will be done internally.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import cPickle as pickle
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pickle
|
||||
import shutil
|
||||
from codecs import getwriter
|
||||
from io import open
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from deluge.common import JSON_FORMAT, get_default_config_dir
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
callLater = None # Necessary for the config tests
|
||||
|
||||
|
||||
def prop(func):
|
||||
"""Function decorator for defining property attributes
|
||||
|
||||
The decorated function is expected to return a dictionary
|
||||
containing one or more of the following pairs:
|
||||
|
||||
fget - function for getting attribute value
|
||||
fset - function for setting attribute value
|
||||
fdel - function for deleting attribute
|
||||
|
||||
This can be conveniently constructed by the locals() builtin
|
||||
function; see:
|
||||
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/205183
|
||||
"""
|
||||
return property(doc=func.__doc__, **func())
|
||||
|
||||
|
||||
def find_json_objects(s):
|
||||
"""Find json objects in a string.
|
||||
def find_json_objects(text, decoder=json.JSONDecoder()):
|
||||
"""Find json objects in text.
|
||||
|
||||
Args:
|
||||
s (str): the string to find json objects in
|
||||
text (str): The text to find json objects within.
|
||||
|
||||
Returns:
|
||||
list: A list of tuples containing start and end locations of json
|
||||
objects in string `s`. e.g. [(start, end), ...]
|
||||
objects in the text. e.g. [(start, end), ...]
|
||||
|
||||
|
||||
"""
|
||||
objects = []
|
||||
opens = 0
|
||||
start = s.find('{')
|
||||
offset = start
|
||||
offset = 0
|
||||
while True:
|
||||
try:
|
||||
start = text.index('{', offset)
|
||||
except ValueError:
|
||||
break
|
||||
|
||||
if start < 0:
|
||||
return []
|
||||
|
||||
for index, c in enumerate(s[offset:]):
|
||||
if c == '{':
|
||||
opens += 1
|
||||
elif c == '}':
|
||||
opens -= 1
|
||||
if opens == 0:
|
||||
objects.append((start, index + offset + 1))
|
||||
start = index + offset + 1
|
||||
try:
|
||||
__, index = decoder.raw_decode(text[start:])
|
||||
except json.decoder.JSONDecodeError:
|
||||
offset = start + 1
|
||||
else:
|
||||
offset = start + index
|
||||
objects.append((start, offset))
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
class Config(object):
|
||||
def cast_to_existing_type(value, old_value):
|
||||
"""Attempt to convert new value type to match old value type"""
|
||||
types_match = isinstance(old_value, (type(None), type(value)))
|
||||
if value is not None and not types_match:
|
||||
old_type = type(old_value)
|
||||
# Skip convert to bytes since requires knowledge of encoding and value should
|
||||
# be unicode anyway.
|
||||
if old_type is bytes:
|
||||
return value
|
||||
|
||||
return old_type(value)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class Config:
|
||||
"""This class is used to access/create/modify config files.
|
||||
|
||||
Args:
|
||||
|
@ -113,18 +108,26 @@ class Config(object):
|
|||
file_version (int): The file format for the default config values when creating
|
||||
a fresh config. This value should be increased whenever a new migration function is
|
||||
setup to convert old config files. (default: 1)
|
||||
log_mask_funcs (dict): A dict of key:function, used to mask sensitive
|
||||
key values (e.g. passwords) when logging is enabled.
|
||||
|
||||
"""
|
||||
def __init__(self, filename, defaults=None, config_dir=None, file_version=1):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename,
|
||||
defaults=None,
|
||||
config_dir=None,
|
||||
file_version=1,
|
||||
log_mask_funcs=None,
|
||||
):
|
||||
self.__config = {}
|
||||
self.__set_functions = {}
|
||||
self.__change_callbacks = []
|
||||
self.__log_mask_funcs = log_mask_funcs if log_mask_funcs else {}
|
||||
|
||||
# These hold the version numbers and they will be set when loaded
|
||||
self.__version = {
|
||||
'format': 1,
|
||||
'file': file_version
|
||||
}
|
||||
self.__version = {'format': 1, 'file': file_version}
|
||||
|
||||
# This will get set with a reactor.callLater whenever a config option
|
||||
# is set.
|
||||
|
@ -132,7 +135,7 @@ class Config(object):
|
|||
|
||||
if defaults:
|
||||
for key, value in defaults.items():
|
||||
self.set_item(key, value)
|
||||
self.set_item(key, value, default=True)
|
||||
|
||||
# Load the config from file in the config_dir
|
||||
if config_dir:
|
||||
|
@ -142,6 +145,12 @@ class Config(object):
|
|||
|
||||
self.load()
|
||||
|
||||
def callLater(self, period, func, *args, **kwargs): # noqa: N802 ignore camelCase
|
||||
"""Wrapper around reactor.callLater for test purpose."""
|
||||
from twisted.internet import reactor
|
||||
|
||||
return reactor.callLater(period, func, *args, **kwargs)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.__config
|
||||
|
||||
|
@ -150,7 +159,7 @@ class Config(object):
|
|||
|
||||
return self.set_item(key, value)
|
||||
|
||||
def set_item(self, key, value):
|
||||
def set_item(self, key, value, default=False):
|
||||
"""Sets item 'key' to 'value' in the config dictionary.
|
||||
|
||||
Does not allow changing the item's type unless it is None.
|
||||
|
@ -162,6 +171,8 @@ class Config(object):
|
|||
key (str): Item to change to change.
|
||||
value (any): The value to change item to, must be same type as what is
|
||||
currently in the config.
|
||||
default (optional, bool): When setting a default value skip func or save
|
||||
callbacks.
|
||||
|
||||
Raises:
|
||||
ValueError: Raised when the type of value is not the same as what is
|
||||
|
@ -174,54 +185,54 @@ class Config(object):
|
|||
5
|
||||
|
||||
"""
|
||||
if key not in self.__config:
|
||||
self.__config[key] = value
|
||||
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
|
||||
return
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode()
|
||||
|
||||
if self.__config[key] == value:
|
||||
return
|
||||
|
||||
# Do not allow the type to change unless it is None
|
||||
if value is not None and not isinstance(
|
||||
self.__config[key], type(None)) and not isinstance(self.__config[key], type(value)):
|
||||
if key in self.__config:
|
||||
try:
|
||||
oldtype = type(self.__config[key])
|
||||
value = oldtype(value)
|
||||
value = cast_to_existing_type(value, self.__config[key])
|
||||
except ValueError:
|
||||
log.warning('Value Type "%s" invalid for key: %s', type(value), key)
|
||||
raise
|
||||
else:
|
||||
if self.__config[key] == value:
|
||||
return
|
||||
|
||||
if isinstance(value, bytes):
|
||||
value.decode('utf8')
|
||||
|
||||
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
if key in self.__log_mask_funcs:
|
||||
value = self.__log_mask_funcs[key](value)
|
||||
log.debug(
|
||||
'Setting key "%s" to: %s (of type: %s)',
|
||||
key,
|
||||
value,
|
||||
type(value),
|
||||
)
|
||||
self.__config[key] = value
|
||||
|
||||
global callLater
|
||||
if callLater is None:
|
||||
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
|
||||
from twisted.internet.reactor import callLater # pylint: disable=redefined-outer-name
|
||||
# Skip save or func callbacks if setting default value for keys
|
||||
if default:
|
||||
return
|
||||
|
||||
# Run the set_function for this key if any
|
||||
for func in self.__set_functions.get(key, []):
|
||||
self.callLater(0, func, key, value)
|
||||
|
||||
try:
|
||||
for func in self.__set_functions[key]:
|
||||
callLater(0, func, key, value)
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
|
||||
def do_change_callbacks(key, value):
|
||||
for func in self.__change_callbacks:
|
||||
func(key, value)
|
||||
callLater(0, do_change_callbacks, key, value)
|
||||
|
||||
self.callLater(0, do_change_callbacks, key, value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# We set the save_timer for 5 seconds if not already set
|
||||
if not self._save_timer or not self._save_timer.active():
|
||||
self._save_timer = callLater(5, self.save)
|
||||
self._save_timer = self.callLater(5, self.save)
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""See get_item """
|
||||
"""See get_item"""
|
||||
return self.get_item(key)
|
||||
|
||||
def get_item(self, key):
|
||||
|
@ -294,14 +305,9 @@ class Config(object):
|
|||
|
||||
del self.__config[key]
|
||||
|
||||
global callLater
|
||||
if callLater is None:
|
||||
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
|
||||
from twisted.internet.reactor import callLater # pylint: disable=redefined-outer-name
|
||||
|
||||
# We set the save_timer for 5 seconds if not already set
|
||||
if not self._save_timer or not self._save_timer.active():
|
||||
self._save_timer = callLater(5, self.save)
|
||||
self._save_timer = self.callLater(5, self.save)
|
||||
|
||||
def register_change_callback(self, callback):
|
||||
"""Registers a callback function for any changed value.
|
||||
|
@ -347,7 +353,6 @@ class Config(object):
|
|||
# Run the function now if apply_now is set
|
||||
if apply_now:
|
||||
function(key, self.__config[key])
|
||||
return
|
||||
|
||||
def apply_all(self):
|
||||
"""Calls all set functions.
|
||||
|
@ -390,9 +395,9 @@ class Config(object):
|
|||
filename = self.__config_file
|
||||
|
||||
try:
|
||||
with open(filename, 'r', encoding='utf8') as _file:
|
||||
with open(filename, encoding='utf8') as _file:
|
||||
data = _file.read()
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.warning('Unable to open config file %s: %s', filename, ex)
|
||||
return
|
||||
|
||||
|
@ -422,8 +427,25 @@ class Config(object):
|
|||
log.exception(ex)
|
||||
log.warning('Unable to load config file: %s', filename)
|
||||
|
||||
log.debug('Config %s version: %s.%s loaded: %s', filename,
|
||||
self.__version['format'], self.__version['file'], self.__config)
|
||||
if not log.isEnabledFor(logging.DEBUG):
|
||||
return
|
||||
|
||||
config = self.__config
|
||||
if self.__log_mask_funcs:
|
||||
config = {
|
||||
key: self.__log_mask_funcs[key](config[key])
|
||||
if key in self.__log_mask_funcs
|
||||
else config[key]
|
||||
for key in config
|
||||
}
|
||||
|
||||
log.debug(
|
||||
'Config %s version: %s.%s loaded: %s',
|
||||
filename,
|
||||
self.__version['format'],
|
||||
self.__version['file'],
|
||||
config,
|
||||
)
|
||||
|
||||
def save(self, filename=None):
|
||||
"""Save configuration to disk.
|
||||
|
@ -440,7 +462,7 @@ class Config(object):
|
|||
# Check to see if the current config differs from the one on disk
|
||||
# We will only write a new config file if there is a difference
|
||||
try:
|
||||
with open(filename, 'r', encoding='utf8') as _file:
|
||||
with open(filename, encoding='utf8') as _file:
|
||||
data = _file.read()
|
||||
objects = find_json_objects(data)
|
||||
start, end = objects[0]
|
||||
|
@ -452,34 +474,40 @@ class Config(object):
|
|||
if self._save_timer and self._save_timer.active():
|
||||
self._save_timer.cancel()
|
||||
return True
|
||||
except (IOError, IndexError) as ex:
|
||||
except (OSError, IndexError) as ex:
|
||||
log.warning('Unable to open config file: %s because: %s', filename, ex)
|
||||
|
||||
# Save the new config and make sure it's written to disk
|
||||
try:
|
||||
log.debug('Saving new config file %s', filename + '.new')
|
||||
with open(filename + '.new', 'wb') as _file:
|
||||
with NamedTemporaryFile(
|
||||
prefix=os.path.basename(filename) + '.', delete=False
|
||||
) as _file:
|
||||
filename_tmp = _file.name
|
||||
log.debug('Saving new config file %s', filename_tmp)
|
||||
json.dump(self.__version, getwriter('utf8')(_file), **JSON_FORMAT)
|
||||
json.dump(self.__config, getwriter('utf8')(_file), **JSON_FORMAT)
|
||||
_file.flush()
|
||||
os.fsync(_file.fileno())
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.error('Error writing new config file: %s', ex)
|
||||
return False
|
||||
|
||||
# Resolve symlinked config files before backing up and saving.
|
||||
filename = os.path.realpath(filename)
|
||||
|
||||
# Make a backup of the old config
|
||||
try:
|
||||
log.debug('Backing up old config file to %s.bak', filename)
|
||||
shutil.move(filename, filename + '.bak')
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.warning('Unable to backup old config: %s', ex)
|
||||
|
||||
# The new config file has been written successfully, so let's move it over
|
||||
# the existing one.
|
||||
try:
|
||||
log.debug('Moving new config file %s to %s..', filename + '.new', filename)
|
||||
shutil.move(filename + '.new', filename)
|
||||
except IOError as ex:
|
||||
log.debug('Moving new config file %s to %s', filename_tmp, filename)
|
||||
shutil.move(filename_tmp, filename)
|
||||
except OSError as ex:
|
||||
log.error('Error moving new config file: %s', ex)
|
||||
return False
|
||||
else:
|
||||
|
@ -505,16 +533,23 @@ class Config(object):
|
|||
raise ValueError('output_version needs to be greater than input_range')
|
||||
|
||||
if self.__version['file'] not in input_range:
|
||||
log.debug('File version %s is not in input_range %s, ignoring converter function..',
|
||||
self.__version['file'], input_range)
|
||||
log.debug(
|
||||
'File version %s is not in input_range %s, ignoring converter function..',
|
||||
self.__version['file'],
|
||||
input_range,
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
self.__config = func(self.__config)
|
||||
except Exception as ex:
|
||||
log.exception(ex)
|
||||
log.error('There was an exception try to convert config file %s %s to %s',
|
||||
self.__config_file, self.__version['file'], output_version)
|
||||
log.error(
|
||||
'There was an exception try to convert config file %s %s to %s',
|
||||
self.__config_file,
|
||||
self.__version['file'],
|
||||
output_version,
|
||||
)
|
||||
raise ex
|
||||
else:
|
||||
self.__version['file'] = output_version
|
||||
|
@ -524,12 +559,11 @@ class Config(object):
|
|||
def config_file(self):
|
||||
return self.__config_file
|
||||
|
||||
@prop
|
||||
def config(): # pylint: disable=no-method-argument
|
||||
@property
|
||||
def config(self):
|
||||
"""The config dictionary"""
|
||||
def fget(self):
|
||||
return self.__config
|
||||
|
||||
def fdel(self):
|
||||
@config.deleter
|
||||
def config(self):
|
||||
return self.save()
|
||||
return locals()
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,8 +6,6 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
@ -19,7 +16,7 @@ from deluge.config import Config
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _ConfigManager(object):
|
||||
class _ConfigManager:
|
||||
def __init__(self):
|
||||
log.debug('ConfigManager started..')
|
||||
self.config_files = {}
|
||||
|
@ -94,9 +91,12 @@ class _ConfigManager(object):
|
|||
log.debug('Getting config: %s', config_file)
|
||||
# Create the config object if not already created
|
||||
if config_file not in self.config_files:
|
||||
self.config_files[config_file] = Config(config_file, defaults,
|
||||
self.config_files[config_file] = Config(
|
||||
config_file,
|
||||
defaults,
|
||||
config_dir=self.config_directory,
|
||||
file_version=file_version)
|
||||
file_version=file_version,
|
||||
)
|
||||
|
||||
return self.config_files[config_file]
|
||||
|
||||
|
@ -106,7 +106,9 @@ _configmanager = _ConfigManager()
|
|||
|
||||
|
||||
def ConfigManager(config, defaults=None, file_version=1): # NOQA: N802
|
||||
return _configmanager.get_config(config, defaults=defaults, file_version=file_version)
|
||||
return _configmanager.get_config(
|
||||
config, defaults=defaults, file_version=file_version
|
||||
)
|
||||
|
||||
|
||||
def set_config_dir(directory):
|
||||
|
|
215
deluge/conftest.py
Normal file
215
deluge/conftest.py
Normal file
|
@ -0,0 +1,215 @@
|
|||
#
|
||||
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
|
||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
import asyncio
|
||||
import tempfile
|
||||
import warnings
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
import pytest_twisted
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.defer import Deferred, maybeDeferred
|
||||
from twisted.internet.error import CannotListenError, ProcessTerminated
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
import deluge.component as _component
|
||||
import deluge.configmanager
|
||||
from deluge.common import get_localhost_auth
|
||||
from deluge.tests import common
|
||||
from deluge.ui.client import client as _client
|
||||
|
||||
DEFAULT_LISTEN_PORT = 58900
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def listen_port(request):
|
||||
if request and 'daemon' in request.fixturenames:
|
||||
try:
|
||||
return request.getfixturevalue('daemon').listen_port
|
||||
except Exception:
|
||||
pass
|
||||
return DEFAULT_LISTEN_PORT
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_callback():
|
||||
"""Returns a `Mock` object which can be registered as a callback to test against.
|
||||
|
||||
If callback was not called within `timeout` seconds, it will raise a TimeoutError.
|
||||
The returned Mock instance will have a `deferred` attribute which will complete when the callback has been called.
|
||||
"""
|
||||
|
||||
def reset(timeout=0.5, *args, **kwargs):
|
||||
if mock.called:
|
||||
original_reset_mock(*args, **kwargs)
|
||||
if mock.deferred:
|
||||
mock.deferred.cancel()
|
||||
deferred = Deferred(canceller=lambda x: deferred.callback(None))
|
||||
deferred.addTimeout(timeout, reactor)
|
||||
mock.side_effect = lambda *args, **kw: deferred.callback((args, kw))
|
||||
mock.deferred = deferred
|
||||
|
||||
mock = Mock()
|
||||
mock.__qualname__ = 'mock'
|
||||
original_reset_mock = mock.reset_mock
|
||||
mock.reset_mock = reset
|
||||
mock.reset_mock()
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_dir(tmp_path):
|
||||
config_dir = tmp_path / 'config'
|
||||
deluge.configmanager.set_config_dir(config_dir)
|
||||
yield config_dir
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture()
|
||||
async def client(request, config_dir, monkeypatch, listen_port):
|
||||
# monkeypatch.setattr(
|
||||
# _client, 'connect', functools.partial(_client.connect, port=listen_port)
|
||||
# )
|
||||
try:
|
||||
username, password = get_localhost_auth()
|
||||
except Exception:
|
||||
username, password = '', ''
|
||||
await _client.connect(
|
||||
'localhost',
|
||||
port=listen_port,
|
||||
username=username,
|
||||
password=password,
|
||||
)
|
||||
yield _client
|
||||
if _client.connected():
|
||||
await _client.disconnect()
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture
|
||||
async def daemon(request, config_dir, tmp_path):
|
||||
listen_port = DEFAULT_LISTEN_PORT
|
||||
logfile = tmp_path / 'daemon.log'
|
||||
|
||||
if hasattr(request.cls, 'daemon_custom_script'):
|
||||
custom_script = request.cls.daemon_custom_script
|
||||
else:
|
||||
custom_script = ''
|
||||
|
||||
for dummy in range(10):
|
||||
try:
|
||||
d, daemon = common.start_core(
|
||||
listen_port=listen_port,
|
||||
logfile=logfile,
|
||||
timeout=5,
|
||||
timeout_msg='Timeout!',
|
||||
custom_script=custom_script,
|
||||
print_stdout=True,
|
||||
print_stderr=True,
|
||||
config_directory=config_dir,
|
||||
)
|
||||
await d
|
||||
except CannotListenError as ex:
|
||||
exception_error = ex
|
||||
listen_port += 1
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise exception_error
|
||||
daemon.listen_port = listen_port
|
||||
yield daemon
|
||||
try:
|
||||
await daemon.kill()
|
||||
except ProcessTerminated:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def common_fixture(config_dir, request, monkeypatch, listen_port):
|
||||
"""Adds some instance attributes to test classes for backwards compatibility with old testing."""
|
||||
|
||||
def fail(self, reason):
|
||||
if isinstance(reason, Failure):
|
||||
reason = reason.value
|
||||
return pytest.fail(str(reason))
|
||||
|
||||
if request.instance:
|
||||
request.instance.patch = monkeypatch.setattr
|
||||
request.instance.config_dir = config_dir
|
||||
request.instance.listen_port = listen_port
|
||||
request.instance.id = lambda: request.node.name
|
||||
request.cls.fail = fail
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture(scope='function')
|
||||
async def component():
|
||||
"""Verify component registry is clean, and clean up after test."""
|
||||
if len(_component._ComponentRegistry.components) != 0:
|
||||
warnings.warn(
|
||||
'The component._ComponentRegistry.components is not empty on test setup.\n'
|
||||
'This is probably caused by another test that did not clean up after finishing!: %s'
|
||||
% _component._ComponentRegistry.components
|
||||
)
|
||||
|
||||
yield _component
|
||||
|
||||
await _component.shutdown()
|
||||
_component._ComponentRegistry.components.clear()
|
||||
_component._ComponentRegistry.dependents.clear()
|
||||
|
||||
|
||||
@pytest_twisted.async_yield_fixture(scope='function')
|
||||
async def base_fixture(common_fixture, component, request):
|
||||
"""This fixture is autoused on all tests that subclass BaseTestCase"""
|
||||
self = request.instance
|
||||
|
||||
if hasattr(self, 'set_up'):
|
||||
try:
|
||||
await maybeDeferred(self.set_up)
|
||||
except Exception as exc:
|
||||
warnings.warn('Error caught in test setup!\n%s' % exc)
|
||||
pytest.fail('Error caught in test setup!\n%s' % exc)
|
||||
|
||||
yield
|
||||
|
||||
if hasattr(self, 'tear_down'):
|
||||
try:
|
||||
await maybeDeferred(self.tear_down)
|
||||
except Exception as exc:
|
||||
pytest.fail('Error caught in test teardown!\n%s' % exc)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('base_fixture')
|
||||
class BaseTestCase:
|
||||
"""This is the base class that should be used for all test classes
|
||||
that create classes that inherit from deluge.component.Component. It
|
||||
ensures that the component registry has been cleaned up when tests
|
||||
have finished.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_mkstemp(tmp_path):
|
||||
"""Return known tempfile location to verify file deleted"""
|
||||
tmp_file = tempfile.mkstemp(dir=tmp_path)
|
||||
with patch('tempfile.mkstemp', return_value=tmp_file):
|
||||
yield tmp_file
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(session, config, items) -> None:
|
||||
"""
|
||||
Automatically runs async tests with pytest_twisted.ensureDeferred
|
||||
"""
|
||||
function_items = (item for item in items if isinstance(item, pytest.Function))
|
||||
for function_item in function_items:
|
||||
function = function_item.obj
|
||||
if hasattr(function, '__func__'):
|
||||
# methods need to be unwrapped.
|
||||
function = function.__func__
|
||||
if asyncio.iscoroutinefunction(function):
|
||||
# This is how pytest_twisted marks ensureDeferred tests
|
||||
setattr(function, '_pytest_twisted_mark', 'async_test')
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -15,11 +14,16 @@ This should typically only be used by the Core. Plugins should utilize the
|
|||
`:mod:EventManager` for similar functionality.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from typing import Any, Callable
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet import reactor, task, threads
|
||||
|
||||
import deluge.component as component
|
||||
from deluge._libtorrent import lt
|
||||
|
@ -30,68 +34,112 @@ log = logging.getLogger(__name__)
|
|||
|
||||
class AlertManager(component.Component):
|
||||
"""AlertManager fetches and processes libtorrent alerts"""
|
||||
|
||||
def __init__(self):
|
||||
log.debug('AlertManager init...')
|
||||
component.Component.__init__(self, 'AlertManager', interval=0.3)
|
||||
component.Component.__init__(self, 'AlertManager')
|
||||
self.session = component.get('Core').session
|
||||
|
||||
# Increase the alert queue size so that alerts don't get lost.
|
||||
self.alert_queue_size = 10000
|
||||
self.set_alert_queue_size(self.alert_queue_size)
|
||||
|
||||
alert_mask = (lt.alert.category_t.error_notification |
|
||||
lt.alert.category_t.port_mapping_notification |
|
||||
lt.alert.category_t.storage_notification |
|
||||
lt.alert.category_t.tracker_notification |
|
||||
lt.alert.category_t.status_notification |
|
||||
lt.alert.category_t.ip_block_notification |
|
||||
lt.alert.category_t.performance_warning)
|
||||
alert_mask = (
|
||||
lt.alert.category_t.error_notification
|
||||
| lt.alert.category_t.port_mapping_notification
|
||||
| lt.alert.category_t.storage_notification
|
||||
| lt.alert.category_t.tracker_notification
|
||||
| lt.alert.category_t.status_notification
|
||||
| lt.alert.category_t.ip_block_notification
|
||||
| lt.alert.category_t.performance_warning
|
||||
| lt.alert.category_t.file_progress_notification
|
||||
)
|
||||
|
||||
self.session.apply_settings({'alert_mask': alert_mask})
|
||||
|
||||
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
|
||||
self.handlers = {}
|
||||
self.handlers = defaultdict(list)
|
||||
self.handlers_timeout_secs = 2
|
||||
self.delayed_calls = []
|
||||
self._event = threading.Event()
|
||||
|
||||
def update(self):
|
||||
self.delayed_calls = [dc for dc in self.delayed_calls if dc.active()]
|
||||
self.handle_alerts()
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
thread = threading.Thread(
|
||||
target=self.wait_for_alert_in_thread, name='alert-poller', daemon=True
|
||||
)
|
||||
thread.start()
|
||||
self._event.set()
|
||||
|
||||
def stop(self):
|
||||
self.cancel_delayed_calls()
|
||||
|
||||
def pause(self):
|
||||
self._event.clear()
|
||||
|
||||
def resume(self):
|
||||
self._event.set()
|
||||
|
||||
def wait_for_alert_in_thread(self):
|
||||
while self._component_state not in ('Stopping', 'Stopped'):
|
||||
if self.check_delayed_calls():
|
||||
time.sleep(0.05)
|
||||
continue
|
||||
|
||||
if self.session.wait_for_alert(1000) is None:
|
||||
continue
|
||||
if self._event.wait():
|
||||
threads.blockingCallFromThread(reactor, self.maybe_handle_alerts)
|
||||
|
||||
def on_delayed_call_timeout(self, result, timeout, **kwargs):
|
||||
log.warning('Alert handler was timed-out before being called %s', kwargs)
|
||||
|
||||
def cancel_delayed_calls(self):
|
||||
"""Cancel all delayed handlers."""
|
||||
for delayed_call in self.delayed_calls:
|
||||
if delayed_call.active():
|
||||
delayed_call.cancel()
|
||||
self.delayed_calls = []
|
||||
|
||||
def register_handler(self, alert_type, handler):
|
||||
def check_delayed_calls(self) -> bool:
|
||||
"""Returns True if any handler calls are delayed."""
|
||||
self.delayed_calls = [dc for dc in self.delayed_calls if not dc.called]
|
||||
return len(self.delayed_calls) > 0
|
||||
|
||||
def maybe_handle_alerts(self) -> None:
|
||||
if self._component_state != 'Started':
|
||||
return
|
||||
|
||||
self.handle_alerts()
|
||||
|
||||
def register_handler(self, alert_type: str, handler: Callable[[Any], None]) -> None:
|
||||
"""
|
||||
Registers a function that will be called when 'alert_type' is pop'd
|
||||
in handle_alerts. The handler function should look like: handler(alert)
|
||||
Where 'alert' is the actual alert object from libtorrent.
|
||||
|
||||
:param alert_type: str, this is string representation of the alert name
|
||||
:param handler: func(alert), the function to be called when the alert is raised
|
||||
Args:
|
||||
alert_type: String representation of the libtorrent alert name.
|
||||
Can be supplied with or without `_alert` suffix.
|
||||
handler: Callback function when the alert is raised.
|
||||
"""
|
||||
if alert_type not in self.handlers:
|
||||
# There is no entry for this alert type yet, so lets make it with an
|
||||
# empty list.
|
||||
self.handlers[alert_type] = []
|
||||
if alert_type and alert_type.endswith('_alert'):
|
||||
alert_type = alert_type[: -len('_alert')]
|
||||
|
||||
# Append the handler to the list in the handlers dictionary
|
||||
self.handlers[alert_type].append(handler)
|
||||
log.debug('Registered handler for alert %s', alert_type)
|
||||
|
||||
def deregister_handler(self, handler):
|
||||
def deregister_handler(self, handler: Callable[[Any], None]):
|
||||
"""
|
||||
De-registers the `:param:handler` function from all alert types.
|
||||
De-registers the `handler` function from all alert types.
|
||||
|
||||
:param handler: func, the handler function to deregister
|
||||
Args:
|
||||
handler: The handler function to deregister.
|
||||
"""
|
||||
# Iterate through all handlers and remove 'handler' where found
|
||||
for (dummy_key, value) in self.handlers.items():
|
||||
if handler in value:
|
||||
# Handler is in this alert type list
|
||||
value.remove(handler)
|
||||
for alert_type_handlers in self.handlers.values():
|
||||
with contextlib.suppress(ValueError):
|
||||
alert_type_handlers.remove(handler)
|
||||
|
||||
def handle_alerts(self):
|
||||
"""
|
||||
|
@ -105,23 +153,42 @@ class AlertManager(component.Component):
|
|||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Alerts queued: %s', num_alerts)
|
||||
if num_alerts > 0.9 * self.alert_queue_size:
|
||||
log.warning('Warning total alerts queued, %s, passes 90%% of queue size.', num_alerts)
|
||||
log.warning(
|
||||
'Warning total alerts queued, %s, passes 90%% of queue size.',
|
||||
num_alerts,
|
||||
)
|
||||
|
||||
# Loop through all alerts in the queue
|
||||
for alert in alerts:
|
||||
alert_type = type(alert).__name__
|
||||
alert_type = alert.what()
|
||||
|
||||
# Display the alert message
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
|
||||
|
||||
if alert_type not in self.handlers:
|
||||
continue
|
||||
|
||||
# Call any handlers for this alert type
|
||||
if alert_type in self.handlers:
|
||||
for handler in self.handlers[alert_type]:
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Handling alert: %s', alert_type)
|
||||
self.delayed_calls.append(reactor.callLater(0, handler, alert))
|
||||
d = task.deferLater(reactor, 0, handler, alert)
|
||||
on_handler_timeout = partial(
|
||||
self.on_delayed_call_timeout,
|
||||
handler=handler.__qualname__,
|
||||
alert_type=alert_type,
|
||||
)
|
||||
d.addTimeout(
|
||||
self.handlers_timeout_secs,
|
||||
reactor,
|
||||
onTimeoutCancel=on_handler_timeout,
|
||||
)
|
||||
self.delayed_calls.append(d)
|
||||
|
||||
def set_alert_queue_size(self, queue_size):
|
||||
"""Sets the maximum size of the libtorrent alert queue"""
|
||||
log.info('Alert Queue Size set to %s', queue_size)
|
||||
self.alert_queue_size = queue_size
|
||||
component.get('Core').apply_session_setting('alert_queue_size', self.alert_queue_size)
|
||||
component.get('Core').apply_session_setting(
|
||||
'alert_queue_size', self.alert_queue_size
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
|
||||
|
@ -8,17 +7,20 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from io import open
|
||||
|
||||
import deluge.component as component
|
||||
import deluge.configmanager as configmanager
|
||||
from deluge.common import (AUTH_LEVEL_ADMIN, AUTH_LEVEL_DEFAULT, AUTH_LEVEL_NONE, AUTH_LEVEL_NORMAL,
|
||||
AUTH_LEVEL_READONLY, create_localclient_account)
|
||||
from deluge.common import (
|
||||
AUTH_LEVEL_ADMIN,
|
||||
AUTH_LEVEL_DEFAULT,
|
||||
AUTH_LEVEL_NONE,
|
||||
AUTH_LEVEL_NORMAL,
|
||||
AUTH_LEVEL_READONLY,
|
||||
create_localclient_account,
|
||||
)
|
||||
from deluge.error import AuthenticationRequired, AuthManagerError, BadLoginError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -26,13 +28,14 @@ log = logging.getLogger(__name__)
|
|||
AUTH_LEVELS_MAPPING = {
|
||||
'NONE': AUTH_LEVEL_NONE,
|
||||
'READONLY': AUTH_LEVEL_READONLY,
|
||||
'DEFAULT': AUTH_LEVEL_NORMAL,
|
||||
'NORMAL': AUTH_LEVEL_DEFAULT,
|
||||
'ADMIN': AUTH_LEVEL_ADMIN}
|
||||
'DEFAULT': AUTH_LEVEL_DEFAULT,
|
||||
'NORMAL': AUTH_LEVEL_NORMAL,
|
||||
'ADMIN': AUTH_LEVEL_ADMIN,
|
||||
}
|
||||
AUTH_LEVELS_MAPPING_REVERSE = {v: k for k, v in AUTH_LEVELS_MAPPING.items()}
|
||||
|
||||
|
||||
class Account(object):
|
||||
class Account:
|
||||
__slots__ = ('username', 'password', 'authlevel')
|
||||
|
||||
def __init__(self, username, password, authlevel):
|
||||
|
@ -45,12 +48,14 @@ class Account(object):
|
|||
'username': self.username,
|
||||
'password': self.password,
|
||||
'authlevel': AUTH_LEVELS_MAPPING_REVERSE[self.authlevel],
|
||||
'authlevel_int': self.authlevel
|
||||
'authlevel_int': self.authlevel,
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return ('<Account username="%(username)s" authlevel=%(authlevel)s>' %
|
||||
{'username': self.username, 'authlevel': self.authlevel})
|
||||
return '<Account username="{username}" authlevel={authlevel}>'.format(
|
||||
username=self.username,
|
||||
authlevel=self.authlevel,
|
||||
)
|
||||
|
||||
|
||||
class AuthManager(component.Component):
|
||||
|
@ -92,7 +97,7 @@ class AuthManager(component.Component):
|
|||
int: The auth level for this user.
|
||||
|
||||
Raises:
|
||||
AuthenticationRequired: If aditional details are required to authenticate.
|
||||
AuthenticationRequired: If additional details are required to authenticate.
|
||||
BadLoginError: If the username does not exist or password does not match.
|
||||
|
||||
"""
|
||||
|
@ -129,8 +134,9 @@ class AuthManager(component.Component):
|
|||
if authlevel not in AUTH_LEVELS_MAPPING:
|
||||
raise AuthManagerError('Invalid auth level: %s' % authlevel)
|
||||
try:
|
||||
self.__auth[username] = Account(username, password,
|
||||
AUTH_LEVELS_MAPPING[authlevel])
|
||||
self.__auth[username] = Account(
|
||||
username, password, AUTH_LEVELS_MAPPING[authlevel]
|
||||
)
|
||||
self.write_auth_file()
|
||||
return True
|
||||
except Exception as ex:
|
||||
|
@ -174,18 +180,21 @@ class AuthManager(component.Component):
|
|||
if os.path.isfile(filepath):
|
||||
log.debug('Creating backup of %s at: %s', filename, filepath_bak)
|
||||
shutil.copy2(filepath, filepath_bak)
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex)
|
||||
else:
|
||||
log.info('Saving the %s at: %s', filename, filepath)
|
||||
try:
|
||||
with open(filepath_tmp, 'w', encoding='utf8') as _file:
|
||||
for account in self.__auth.values():
|
||||
_file.write('%(username)s:%(password)s:%(authlevel_int)s\n' % account.data())
|
||||
_file.write(
|
||||
'%(username)s:%(password)s:%(authlevel_int)s\n'
|
||||
% account.data()
|
||||
)
|
||||
_file.flush()
|
||||
os.fsync(_file.fileno())
|
||||
shutil.move(filepath_tmp, filepath)
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.error('Unable to save %s: %s', filename, ex)
|
||||
if os.path.isfile(filepath_bak):
|
||||
log.info('Restoring backup of %s from: %s', filename, filepath_bak)
|
||||
|
@ -214,9 +223,9 @@ class AuthManager(component.Component):
|
|||
for _filepath in (auth_file, auth_file_bak):
|
||||
log.info('Opening %s for load: %s', filename, _filepath)
|
||||
try:
|
||||
with open(_filepath, 'r', encoding='utf8') as _file:
|
||||
with open(_filepath, encoding='utf8') as _file:
|
||||
file_data = _file.readlines()
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.warning('Unable to load %s: %s', _filepath, ex)
|
||||
file_data = []
|
||||
else:
|
||||
|
@ -232,8 +241,12 @@ class AuthManager(component.Component):
|
|||
lsplit = line.split(':')
|
||||
if len(lsplit) == 2:
|
||||
username, password = lsplit
|
||||
log.warning('Your auth entry for %s contains no auth level, '
|
||||
'using AUTH_LEVEL_DEFAULT(%s)..', username, AUTH_LEVEL_DEFAULT)
|
||||
log.warning(
|
||||
'Your auth entry for %s contains no auth level, '
|
||||
'using AUTH_LEVEL_DEFAULT(%s)..',
|
||||
username,
|
||||
AUTH_LEVEL_DEFAULT,
|
||||
)
|
||||
if username == 'localclient':
|
||||
authlevel = AUTH_LEVEL_ADMIN
|
||||
else:
|
||||
|
@ -254,7 +267,10 @@ class AuthManager(component.Component):
|
|||
try:
|
||||
authlevel = AUTH_LEVELS_MAPPING[authlevel]
|
||||
except KeyError:
|
||||
log.error('Your auth file is malformed: %r is not a valid auth level', authlevel)
|
||||
log.error(
|
||||
'Your auth file is malformed: %r is not a valid auth level',
|
||||
authlevel,
|
||||
)
|
||||
continue
|
||||
|
||||
self.__auth[username] = Account(username, password, authlevel)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -8,7 +7,6 @@
|
|||
#
|
||||
|
||||
"""The Deluge daemon"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
@ -44,8 +42,8 @@ def is_daemon_running(pid_file):
|
|||
|
||||
try:
|
||||
with open(pid_file) as _file:
|
||||
pid, port = [int(x) for x in _file.readline().strip().split(';')]
|
||||
except (EnvironmentError, ValueError):
|
||||
pid, port = (int(x) for x in _file.readline().strip().split(';'))
|
||||
except (OSError, ValueError):
|
||||
return False
|
||||
|
||||
if is_process_running(pid):
|
||||
|
@ -53,7 +51,7 @@ def is_daemon_running(pid_file):
|
|||
_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
_socket.connect(('127.0.0.1', port))
|
||||
except socket.error:
|
||||
except OSError:
|
||||
# Can't connect, so pid is not a deluged process.
|
||||
return False
|
||||
else:
|
||||
|
@ -62,43 +60,62 @@ def is_daemon_running(pid_file):
|
|||
return True
|
||||
|
||||
|
||||
class Daemon(object):
|
||||
class Daemon:
|
||||
"""The Deluge Daemon class"""
|
||||
|
||||
def __init__(self, listen_interface=None, interface=None, port=None, standalone=False,
|
||||
read_only_config_keys=None):
|
||||
def __init__(
|
||||
self,
|
||||
listen_interface=None,
|
||||
outgoing_interface=None,
|
||||
interface=None,
|
||||
port=None,
|
||||
standalone=False,
|
||||
read_only_config_keys=None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
listen_interface (str, optional): The IP address to listen to bittorrent connections on.
|
||||
interface (str, optional): The IP address the daemon will listen for UI connections on.
|
||||
port (int, optional): The port the daemon will listen for UI connections on.
|
||||
standalone (bool, optional): If True the client is in Standalone mode otherwise, if
|
||||
False, start the daemon as separate process.
|
||||
read_only_config_keys (list of str, optional): A list of config keys that will not be
|
||||
altered by core.set_config() RPC method.
|
||||
listen_interface (str, optional): The IP address to listen to
|
||||
BitTorrent connections on.
|
||||
outgoing_interface (str, optional): The network interface name or
|
||||
IP address to open outgoing BitTorrent connections on.
|
||||
interface (str, optional): The IP address the daemon will
|
||||
listen for UI connections on.
|
||||
port (int, optional): The port the daemon will listen for UI
|
||||
connections on.
|
||||
standalone (bool, optional): If True the client is in Standalone
|
||||
mode otherwise, if False, start the daemon as separate process.
|
||||
read_only_config_keys (list of str, optional): A list of config
|
||||
keys that will not be altered by core.set_config() RPC method.
|
||||
"""
|
||||
self.standalone = standalone
|
||||
self.pid_file = get_config_dir('deluged.pid')
|
||||
log.info('Deluge daemon %s', get_version())
|
||||
if is_daemon_running(self.pid_file):
|
||||
raise DaemonRunningError('Deluge daemon already running with this config directory!')
|
||||
raise DaemonRunningError(
|
||||
'Deluge daemon already running with this config directory!'
|
||||
)
|
||||
|
||||
# Twisted catches signals to terminate, so just have it call the shutdown method.
|
||||
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
|
||||
|
||||
# Catch some Windows specific signals
|
||||
if windows_check():
|
||||
|
||||
def win_handler(ctrl_type):
|
||||
"""Handle the Windows shutdown or close events."""
|
||||
log.debug('windows handler ctrl_type: %s', ctrl_type)
|
||||
if ctrl_type == CTRL_CLOSE_EVENT or ctrl_type == CTRL_SHUTDOWN_EVENT:
|
||||
self._shutdown()
|
||||
return 1
|
||||
|
||||
SetConsoleCtrlHandler(win_handler)
|
||||
|
||||
# Start the core as a thread and join it until it's done
|
||||
self.core = Core(listen_interface=listen_interface,
|
||||
read_only_config_keys=read_only_config_keys)
|
||||
self.core = Core(
|
||||
listen_interface=listen_interface,
|
||||
outgoing_interface=outgoing_interface,
|
||||
read_only_config_keys=read_only_config_keys,
|
||||
)
|
||||
|
||||
if port is None:
|
||||
port = self.core.config['daemon_port']
|
||||
|
@ -112,10 +129,16 @@ class Daemon(object):
|
|||
port=port,
|
||||
allow_remote=self.core.config['allow_remote'],
|
||||
listen=not standalone,
|
||||
interface=interface
|
||||
interface=interface,
|
||||
)
|
||||
|
||||
log.debug('Listening to UI on: %s:%s and bittorrent on: %s', interface, port, listen_interface)
|
||||
log.debug(
|
||||
'Listening to UI on: %s:%s and bittorrent on: %s Making connections out on: %s',
|
||||
interface,
|
||||
port,
|
||||
listen_interface,
|
||||
outgoing_interface,
|
||||
)
|
||||
|
||||
def start(self):
|
||||
# Register the daemon and the core RPCs
|
||||
|
@ -131,7 +154,7 @@ class Daemon(object):
|
|||
pid = os.getpid()
|
||||
log.debug('Storing pid %s & port %s in: %s', pid, self.port, self.pid_file)
|
||||
with open(self.pid_file, 'w') as _file:
|
||||
_file.write('%s;%s\n' % (pid, self.port))
|
||||
_file.write(f'{pid};{self.port}\n')
|
||||
|
||||
component.start()
|
||||
|
||||
|
@ -157,6 +180,11 @@ class Daemon(object):
|
|||
"""Returns a list of the exported methods."""
|
||||
return self.rpcserver.get_method_list()
|
||||
|
||||
@export()
|
||||
def get_version(self):
|
||||
"""Returns the daemon version"""
|
||||
return get_version()
|
||||
|
||||
@export(1)
|
||||
def authorized_call(self, rpc):
|
||||
"""Determines if session auth_level is authorized to call RPC.
|
||||
|
@ -170,4 +198,7 @@ class Daemon(object):
|
|||
if rpc not in self.get_method_list():
|
||||
return False
|
||||
|
||||
return self.rpcserver.get_session_auth_level() >= self.rpcserver.get_rpc_auth_level(rpc)
|
||||
return (
|
||||
self.rpcserver.get_session_auth_level()
|
||||
>= self.rpcserver.get_rpc_auth_level(rpc)
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
# Copyright (C) 2010 Pedro Algarvio <pedro@algarvio.me>
|
||||
|
@ -7,30 +6,61 @@
|
|||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
from logging import DEBUG, FileHandler, getLogger
|
||||
|
||||
from twisted.internet.error import CannotListenError
|
||||
|
||||
from deluge.argparserbase import ArgParserBase
|
||||
from deluge.common import run_profiled
|
||||
from deluge.configmanager import get_config_dir
|
||||
from deluge.ui.baseargparser import BaseArgParser
|
||||
from deluge.ui.translations_util import set_dummy_trans
|
||||
from deluge.i18n import setup_mock_translation
|
||||
|
||||
|
||||
def add_daemon_options(parser):
|
||||
group = parser.add_argument_group(_('Daemon Options'))
|
||||
group.add_argument('-u', '--ui-interface', metavar='<ip-addr>', action='store',
|
||||
help=_('IP address to listen for UI connections'))
|
||||
group.add_argument('-p', '--port', metavar='<port>', action='store', type=int,
|
||||
help=_('Port to listen for UI connections on'))
|
||||
group.add_argument('-i', '--interface', metavar='<ip-addr>', dest='listen_interface', action='store',
|
||||
help=_('IP address to listen for BitTorrent connections'))
|
||||
group.add_argument('--read-only-config-keys', metavar='<comma-separated-keys>', action='store',
|
||||
help=_('Config keys to be unmodified by `set_config` RPC'), type=str, default='')
|
||||
group.add_argument(
|
||||
'-u',
|
||||
'--ui-interface',
|
||||
metavar='<ip-addr>',
|
||||
action='store',
|
||||
help=_('IP address to listen for UI connections'),
|
||||
)
|
||||
group.add_argument(
|
||||
'-p',
|
||||
'--port',
|
||||
metavar='<port>',
|
||||
action='store',
|
||||
type=int,
|
||||
help=_('Port to listen for UI connections on'),
|
||||
)
|
||||
group.add_argument(
|
||||
'-i',
|
||||
'--interface',
|
||||
metavar='<ip-addr>',
|
||||
dest='listen_interface',
|
||||
action='store',
|
||||
help=_('IP address to listen for BitTorrent connections'),
|
||||
)
|
||||
group.add_argument(
|
||||
'-o',
|
||||
'--outgoing-interface',
|
||||
metavar='<interface>',
|
||||
dest='outgoing_interface',
|
||||
action='store',
|
||||
help=_(
|
||||
'The network interface name or IP address for outgoing BitTorrent connections.'
|
||||
),
|
||||
)
|
||||
group.add_argument(
|
||||
'--read-only-config-keys',
|
||||
metavar='<comma-separated-keys>',
|
||||
action='store',
|
||||
help=_('Config keys to be unmodified by `set_config` RPC'),
|
||||
type=str,
|
||||
default='',
|
||||
)
|
||||
parser.add_process_arg_group()
|
||||
|
||||
|
||||
|
@ -45,20 +75,23 @@ def start_daemon(skip_start=False):
|
|||
deluge.core.daemon.Daemon: A new daemon object
|
||||
|
||||
"""
|
||||
set_dummy_trans(warn_msg=True)
|
||||
setup_mock_translation()
|
||||
|
||||
# Setup the argument parser
|
||||
parser = BaseArgParser()
|
||||
parser = ArgParserBase()
|
||||
add_daemon_options(parser)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# Check for any daemons running with this same config
|
||||
from deluge.core.daemon import is_daemon_running
|
||||
|
||||
pid_file = get_config_dir('deluged.pid')
|
||||
if is_daemon_running(pid_file):
|
||||
print('Cannot run multiple daemons with same config directory.\n'
|
||||
'If you believe this is an error, force starting by deleting: %s' % pid_file)
|
||||
print(
|
||||
'Cannot run multiple daemons with same config directory.\n'
|
||||
'If you believe this is an error, force starting by deleting: %s' % pid_file
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
log = getLogger(__name__)
|
||||
|
@ -72,18 +105,25 @@ def start_daemon(skip_start=False):
|
|||
def run_daemon(options):
|
||||
try:
|
||||
from deluge.core.daemon import Daemon
|
||||
daemon = Daemon(listen_interface=options.listen_interface,
|
||||
|
||||
daemon = Daemon(
|
||||
listen_interface=options.listen_interface,
|
||||
outgoing_interface=options.outgoing_interface,
|
||||
interface=options.ui_interface,
|
||||
port=options.port,
|
||||
read_only_config_keys=options.read_only_config_keys.split(','))
|
||||
read_only_config_keys=options.read_only_config_keys.split(','),
|
||||
)
|
||||
if skip_start:
|
||||
return daemon
|
||||
else:
|
||||
daemon.start()
|
||||
except CannotListenError as ex:
|
||||
log.error('Cannot start deluged, listen port in use.\n'
|
||||
log.error(
|
||||
'Cannot start deluged, listen port in use.\n'
|
||||
' Check for other running daemons or services using this port: %s:%s',
|
||||
ex.interface, ex.port)
|
||||
ex.interface,
|
||||
ex.port,
|
||||
)
|
||||
sys.exit(1)
|
||||
except Exception as ex:
|
||||
log.error('Unable to start deluged: %s', ex)
|
||||
|
@ -95,4 +135,6 @@ def start_daemon(skip_start=False):
|
|||
if options.pidfile:
|
||||
os.remove(options.pidfile)
|
||||
|
||||
return run_profiled(run_daemon, options, output_file=options.profile, do_profile=options.profile)
|
||||
return run_profiled(
|
||||
run_daemon, options, output_file=options.profile, do_profile=options.profile
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,8 +6,6 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import deluge.component as component
|
||||
|
@ -36,7 +33,12 @@ class EventManager(component.Component):
|
|||
try:
|
||||
handler(*event.args)
|
||||
except Exception as ex:
|
||||
log.error('Event handler %s failed in %s with exception %s', event.name, handler, ex)
|
||||
log.error(
|
||||
'Event handler %s failed in %s with exception %s',
|
||||
event.name,
|
||||
handler,
|
||||
ex,
|
||||
)
|
||||
|
||||
def register_event_handler(self, event, handler):
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
|
||||
#
|
||||
|
@ -7,12 +6,10 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import deluge.component as component
|
||||
from deluge.common import PY2, TORRENT_STATE
|
||||
from deluge.common import TORRENT_STATE
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -98,9 +95,8 @@ def tracker_error_filter(torrent_ids, values):
|
|||
|
||||
|
||||
class FilterManager(component.Component):
|
||||
"""FilterManager
|
||||
"""FilterManager"""
|
||||
|
||||
"""
|
||||
def __init__(self, core):
|
||||
component.Component.__init__(self, 'FilterManager')
|
||||
log.debug('FilterManager init..')
|
||||
|
@ -115,12 +111,14 @@ class FilterManager(component.Component):
|
|||
|
||||
def _init_tracker_tree():
|
||||
return {'Error': 0}
|
||||
|
||||
self.register_tree_field('tracker_host', _init_tracker_tree)
|
||||
|
||||
self.register_filter('tracker_host', tracker_error_filter)
|
||||
|
||||
def _init_users_tree():
|
||||
return {'': 0}
|
||||
|
||||
self.register_tree_field('owner', _init_users_tree)
|
||||
|
||||
def filter_torrent_ids(self, filter_dict):
|
||||
|
@ -133,7 +131,7 @@ class FilterManager(component.Component):
|
|||
|
||||
# Sanitize input: filter-value must be a list of strings
|
||||
for key, value in filter_dict.items():
|
||||
if isinstance(value, str if not PY2 else basestring):
|
||||
if isinstance(value, str):
|
||||
filter_dict[key] = [value]
|
||||
|
||||
# Optimized filter for id
|
||||
|
@ -162,19 +160,25 @@ class FilterManager(component.Component):
|
|||
return torrent_ids
|
||||
|
||||
# Registered filters
|
||||
for field, values in filter_dict.items():
|
||||
for field, values in list(filter_dict.items()):
|
||||
if field in self.registered_filters:
|
||||
# Filters out doubles
|
||||
torrent_ids = list(set(self.registered_filters[field](torrent_ids, values)))
|
||||
torrent_ids = list(
|
||||
set(self.registered_filters[field](torrent_ids, values))
|
||||
)
|
||||
del filter_dict[field]
|
||||
|
||||
if not filter_dict:
|
||||
return torrent_ids
|
||||
|
||||
torrent_keys, plugin_keys = self.torrents.separate_keys(list(filter_dict), torrent_ids)
|
||||
torrent_keys, plugin_keys = self.torrents.separate_keys(
|
||||
list(filter_dict), torrent_ids
|
||||
)
|
||||
# Leftover filter arguments, default filter on status fields.
|
||||
for torrent_id in list(torrent_ids):
|
||||
status = self.core.create_torrent_status(torrent_id, torrent_keys, plugin_keys)
|
||||
status = self.core.create_torrent_status(
|
||||
torrent_id, torrent_keys, plugin_keys
|
||||
)
|
||||
for field, values in filter_dict.items():
|
||||
if field in status and status[field] in values:
|
||||
continue
|
||||
|
@ -194,17 +198,21 @@ class FilterManager(component.Component):
|
|||
tree_keys.remove(cat)
|
||||
|
||||
torrent_keys, plugin_keys = self.torrents.separate_keys(tree_keys, torrent_ids)
|
||||
items = dict((field, self.tree_fields[field]()) for field in tree_keys)
|
||||
items = {field: self.tree_fields[field]() for field in tree_keys}
|
||||
|
||||
for torrent_id in list(torrent_ids):
|
||||
status = self.core.create_torrent_status(torrent_id, torrent_keys, plugin_keys) # status={key:value}
|
||||
status = self.core.create_torrent_status(
|
||||
torrent_id, torrent_keys, plugin_keys
|
||||
) # status={key:value}
|
||||
for field in tree_keys:
|
||||
value = status[field]
|
||||
items[field][value] = items[field].get(value, 0) + 1
|
||||
|
||||
if 'tracker_host' in items:
|
||||
items['tracker_host']['All'] = len(torrent_ids)
|
||||
items['tracker_host']['Error'] = len(tracker_error_filter(torrent_ids, ('Error',)))
|
||||
items['tracker_host']['Error'] = len(
|
||||
tracker_error_filter(torrent_ids, ('Error',))
|
||||
)
|
||||
|
||||
if not show_zero_hits:
|
||||
for cat in ['state', 'owner', 'tracker_host']:
|
||||
|
@ -215,7 +223,7 @@ class FilterManager(component.Component):
|
|||
sorted_items = {field: sorted(items[field].items()) for field in tree_keys}
|
||||
|
||||
if 'state' in tree_keys:
|
||||
sorted_items['state'].sort(self._sort_state_items)
|
||||
sorted_items['state'].sort(key=self._sort_state_item)
|
||||
|
||||
return sorted_items
|
||||
|
||||
|
@ -224,7 +232,9 @@ class FilterManager(component.Component):
|
|||
init_state['All'] = len(self.torrents.get_torrent_list())
|
||||
for state in TORRENT_STATE:
|
||||
init_state[state] = 0
|
||||
init_state['Active'] = len(self.filter_state_active(self.torrents.get_torrent_list()))
|
||||
init_state['Active'] = len(
|
||||
self.filter_state_active(self.torrents.get_torrent_list())
|
||||
)
|
||||
return init_state
|
||||
|
||||
def register_filter(self, filter_id, filter_func, filter_value=None):
|
||||
|
@ -242,7 +252,9 @@ class FilterManager(component.Component):
|
|||
|
||||
def filter_state_active(self, torrent_ids):
|
||||
for torrent_id in list(torrent_ids):
|
||||
status = self.torrents[torrent_id].get_status(['download_payload_rate', 'upload_payload_rate'])
|
||||
status = self.torrents[torrent_id].get_status(
|
||||
['download_payload_rate', 'upload_payload_rate']
|
||||
)
|
||||
if status['download_payload_rate'] or status['upload_payload_rate']:
|
||||
pass
|
||||
else:
|
||||
|
@ -251,18 +263,12 @@ class FilterManager(component.Component):
|
|||
|
||||
def _hide_state_items(self, state_items):
|
||||
"""For hide(show)-zero hits"""
|
||||
for (value, count) in state_items.items():
|
||||
for value, count in list(state_items.items()):
|
||||
if value != 'All' and count == 0:
|
||||
del state_items[value]
|
||||
|
||||
def _sort_state_items(self, x, y):
|
||||
if x[0] in STATE_SORT:
|
||||
ix = STATE_SORT.index(x[0])
|
||||
else:
|
||||
ix = 99
|
||||
if y[0] in STATE_SORT:
|
||||
iy = STATE_SORT.index(y[0])
|
||||
else:
|
||||
iy = 99
|
||||
|
||||
return ix - iy
|
||||
def _sort_state_item(self, item):
|
||||
try:
|
||||
return STATE_SORT.index(item[0])
|
||||
except ValueError:
|
||||
return 99
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -9,7 +8,6 @@
|
|||
|
||||
|
||||
"""PluginManager for Core"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -33,7 +31,8 @@ class PluginManager(deluge.pluginmanagerbase.PluginManagerBase, component.Compon
|
|||
|
||||
# Call the PluginManagerBase constructor
|
||||
deluge.pluginmanagerbase.PluginManagerBase.__init__(
|
||||
self, 'core.conf', 'deluge.plugin.core')
|
||||
self, 'core.conf', 'deluge.plugin.core'
|
||||
)
|
||||
|
||||
def start(self):
|
||||
# Enable plugins that are enabled in the config
|
||||
|
@ -76,6 +75,7 @@ class PluginManager(deluge.pluginmanagerbase.PluginManagerBase, component.Compon
|
|||
if name not in self.plugins:
|
||||
component.get('EventManager').emit(PluginDisabledEvent(name))
|
||||
return result
|
||||
|
||||
d.addBoth(on_disable_plugin)
|
||||
return d
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008-2010 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -8,13 +7,13 @@
|
|||
#
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import random
|
||||
import threading
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.request import urlopen
|
||||
|
||||
from twisted.internet.task import LoopingCall
|
||||
|
||||
|
@ -24,17 +23,14 @@ import deluge.configmanager
|
|||
from deluge._libtorrent import lt
|
||||
from deluge.event import ConfigValueChangedEvent
|
||||
|
||||
GeoIP = None
|
||||
try:
|
||||
import GeoIP
|
||||
from GeoIP import GeoIP
|
||||
except ImportError:
|
||||
GeoIP = None
|
||||
|
||||
try:
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import quote_plus
|
||||
from urllib2 import urlopen
|
||||
try:
|
||||
from pygeoip import GeoIP
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -47,6 +43,7 @@ DEFAULT_PREFS = {
|
|||
'download_location': deluge.common.get_default_download_dir(),
|
||||
'listen_ports': [6881, 6891],
|
||||
'listen_interface': '',
|
||||
'outgoing_interface': '',
|
||||
'random_port': True,
|
||||
'listen_random_port': None,
|
||||
'listen_use_sys_port': False,
|
||||
|
@ -71,8 +68,11 @@ DEFAULT_PREFS = {
|
|||
'max_upload_speed': -1.0,
|
||||
'max_download_speed': -1.0,
|
||||
'max_upload_slots_global': 4,
|
||||
'max_half_open_connections': (lambda: deluge.common.windows_check() and
|
||||
(lambda: deluge.common.vista_check() and 4 or 8)() or 50)(),
|
||||
'max_half_open_connections': (
|
||||
lambda: deluge.common.windows_check()
|
||||
and (lambda: deluge.common.vista_check() and 4 or 8)()
|
||||
or 50
|
||||
)(),
|
||||
'max_connections_per_second': 20,
|
||||
'ignore_limits_on_local_network': True,
|
||||
'max_connections_per_torrent': -1,
|
||||
|
@ -122,7 +122,7 @@ DEFAULT_PREFS = {
|
|||
'cache_expiry': 60,
|
||||
'auto_manage_prefer_seeds': False,
|
||||
'shared': False,
|
||||
'super_seeding': False
|
||||
'super_seeding': False,
|
||||
}
|
||||
|
||||
|
||||
|
@ -131,7 +131,9 @@ class PreferencesManager(component.Component):
|
|||
component.Component.__init__(self, 'PreferencesManager')
|
||||
self.config = deluge.configmanager.ConfigManager('core.conf', DEFAULT_PREFS)
|
||||
if 'proxies' in self.config:
|
||||
log.warning('Updating config file for proxy, using "peer" values to fill new "proxy" setting')
|
||||
log.warning(
|
||||
'Updating config file for proxy, using "peer" values to fill new "proxy" setting'
|
||||
)
|
||||
self.config['proxy'].update(self.config['proxies']['peer'])
|
||||
log.warning('New proxy config is: %s', self.config['proxy'])
|
||||
del self.config['proxies']
|
||||
|
@ -187,28 +189,50 @@ class PreferencesManager(component.Component):
|
|||
def _on_set_listen_interface(self, key, value):
|
||||
self.__set_listen_on()
|
||||
|
||||
def _on_set_outgoing_interface(self, key, value):
|
||||
"""Set interface name or IP address for outgoing BitTorrent connections."""
|
||||
value = value.strip() if value else ''
|
||||
self.core.apply_session_settings({'outgoing_interfaces': value})
|
||||
|
||||
def _on_set_random_port(self, key, value):
|
||||
self.__set_listen_on()
|
||||
|
||||
def __set_listen_on(self):
|
||||
""" Set the ports and interface address to listen for incoming connections on."""
|
||||
"""Set the ports and interface address to listen for incoming connections on."""
|
||||
if self.config['random_port']:
|
||||
if not self.config['listen_random_port']:
|
||||
if (
|
||||
not self.config['listen_reuse_port']
|
||||
or not self.config['listen_random_port']
|
||||
):
|
||||
self.config['listen_random_port'] = random.randrange(49152, 65525)
|
||||
listen_ports = [self.config['listen_random_port']] * 2 # use single port range
|
||||
listen_ports = [
|
||||
self.config['listen_random_port']
|
||||
] * 2 # use single port range
|
||||
else:
|
||||
self.config['listen_random_port'] = None
|
||||
listen_ports = self.config['listen_ports']
|
||||
|
||||
interface = str(self.config['listen_interface'].strip())
|
||||
interface = interface if interface else '0.0.0.0'
|
||||
if self.config['listen_interface']:
|
||||
interface = self.config['listen_interface'].strip()
|
||||
else:
|
||||
interface = '0.0.0.0'
|
||||
|
||||
log.debug('Listen Interface: %s, Ports: %s with use_sys_port: %s',
|
||||
interface, listen_ports, self.config['listen_use_sys_port'])
|
||||
interfaces = ['%s:%s' % (interface, port) for port in range(listen_ports[0], listen_ports[1]+1)]
|
||||
log.debug(
|
||||
'Listen Interface: %s, Ports: %s with use_sys_port: %s',
|
||||
interface,
|
||||
listen_ports,
|
||||
self.config['listen_use_sys_port'],
|
||||
)
|
||||
interfaces = [
|
||||
f'{interface}:{port}'
|
||||
for port in range(listen_ports[0], listen_ports[1] + 1)
|
||||
]
|
||||
self.core.apply_session_settings(
|
||||
{'listen_system_port_fallback': self.config['listen_use_sys_port'],
|
||||
'listen_interfaces': ''.join(interfaces)})
|
||||
{
|
||||
'listen_system_port_fallback': self.config['listen_use_sys_port'],
|
||||
'listen_interfaces': ','.join(interfaces),
|
||||
}
|
||||
)
|
||||
|
||||
def _on_set_outgoing_ports(self, key, value):
|
||||
self.__set_outgoing_ports()
|
||||
|
@ -217,14 +241,22 @@ class PreferencesManager(component.Component):
|
|||
self.__set_outgoing_ports()
|
||||
|
||||
def __set_outgoing_ports(self):
|
||||
port = 0 if self.config['random_outgoing_ports'] else self.config['outgoing_ports'][0]
|
||||
port = (
|
||||
0
|
||||
if self.config['random_outgoing_ports']
|
||||
else self.config['outgoing_ports'][0]
|
||||
)
|
||||
if port:
|
||||
num_ports = self.config['outgoing_ports'][1] - self.config['outgoing_ports'][0]
|
||||
num_ports = (
|
||||
self.config['outgoing_ports'][1] - self.config['outgoing_ports'][0]
|
||||
)
|
||||
num_ports = num_ports if num_ports > 1 else 5
|
||||
else:
|
||||
num_ports = 0
|
||||
log.debug('Outgoing port set to %s with range: %s', port, num_ports)
|
||||
self.core.apply_session_settings({'outgoing_port': port, 'num_outgoing_ports': num_ports})
|
||||
self.core.apply_session_settings(
|
||||
{'outgoing_port': port, 'num_outgoing_ports': num_ports}
|
||||
)
|
||||
|
||||
def _on_set_peer_tos(self, key, value):
|
||||
try:
|
||||
|
@ -233,8 +265,21 @@ class PreferencesManager(component.Component):
|
|||
log.error('Invalid tos byte: %s', ex)
|
||||
|
||||
def _on_set_dht(self, key, value):
|
||||
dht_bootstraps = 'router.bittorrent.com:6881,router.utorrent.com:6881,router.bitcomet.com:6881'
|
||||
self.core.apply_session_settings({'dht_bootstrap_nodes': dht_bootstraps, 'enable_dht': value})
|
||||
lt_bootstraps = self.core.session.get_settings()['dht_bootstrap_nodes']
|
||||
# Update list of lt bootstraps, using set to remove duplicates.
|
||||
dht_bootstraps = set(
|
||||
lt_bootstraps.split(',')
|
||||
+ [
|
||||
'router.bittorrent.com:6881',
|
||||
'router.utorrent.com:6881',
|
||||
'router.bitcomet.com:6881',
|
||||
'dht.transmissionbt.com:6881',
|
||||
'dht.aelitis.com:6881',
|
||||
]
|
||||
)
|
||||
self.core.apply_session_settings(
|
||||
{'dht_bootstrap_nodes': ','.join(dht_bootstraps), 'enable_dht': value}
|
||||
)
|
||||
|
||||
def _on_set_upnp(self, key, value):
|
||||
self.core.apply_session_setting('enable_upnp', value)
|
||||
|
@ -260,12 +305,21 @@ class PreferencesManager(component.Component):
|
|||
|
||||
def _on_set_encryption(self, key, value):
|
||||
# Convert Deluge enc_level values to libtorrent enc_level values.
|
||||
pe_enc_level = {0: lt.enc_level.plaintext, 1: lt.enc_level.rc4, 2: lt.enc_level.both}
|
||||
pe_enc_level = {
|
||||
0: lt.enc_level.plaintext,
|
||||
1: lt.enc_level.rc4,
|
||||
2: lt.enc_level.both,
|
||||
}
|
||||
self.core.apply_session_settings(
|
||||
{'out_enc_policy': lt.enc_policy(self.config['enc_out_policy']),
|
||||
{
|
||||
'out_enc_policy': lt.enc_policy(self.config['enc_out_policy']),
|
||||
'in_enc_policy': lt.enc_policy(self.config['enc_in_policy']),
|
||||
'allowed_enc_level': lt.enc_level(pe_enc_level[self.config['enc_level']]),
|
||||
'prefer_rc4': True})
|
||||
'allowed_enc_level': lt.enc_level(
|
||||
pe_enc_level[self.config['enc_level']]
|
||||
),
|
||||
'prefer_rc4': True,
|
||||
}
|
||||
)
|
||||
|
||||
def _on_set_max_connections_global(self, key, value):
|
||||
self.core.apply_session_setting('connections_limit', value)
|
||||
|
@ -327,20 +381,29 @@ class PreferencesManager(component.Component):
|
|||
|
||||
def run(self):
|
||||
import time
|
||||
|
||||
now = time.time()
|
||||
# check if we've done this within the last week or never
|
||||
if (now - self.config['info_sent']) >= (60 * 60 * 24 * 7):
|
||||
try:
|
||||
url = 'http://deluge-torrent.org/stats_get.php?processor=' + \
|
||||
platform.machine() + '&python=' + platform.python_version() \
|
||||
+ '&deluge=' + deluge.common.get_version() \
|
||||
+ '&os=' + platform.system() \
|
||||
+ '&plugins=' + quote_plus(':'.join(self.config['enabled_plugins']))
|
||||
url = (
|
||||
'http://deluge-torrent.org/stats_get.php?processor='
|
||||
+ platform.machine()
|
||||
+ '&python='
|
||||
+ platform.python_version()
|
||||
+ '&deluge='
|
||||
+ deluge.common.get_version()
|
||||
+ '&os='
|
||||
+ platform.system()
|
||||
+ '&plugins='
|
||||
+ quote_plus(':'.join(self.config['enabled_plugins']))
|
||||
)
|
||||
urlopen(url)
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.debug('Network error while trying to send info: %s', ex)
|
||||
else:
|
||||
self.config['info_sent'] = now
|
||||
|
||||
if value:
|
||||
SendInfoThread(self.config).start()
|
||||
|
||||
|
@ -352,7 +415,8 @@ class PreferencesManager(component.Component):
|
|||
self.new_release_timer.stop()
|
||||
# Set a timer to check for a new release every 3 days
|
||||
self.new_release_timer = LoopingCall(
|
||||
self._on_set_new_release_check, 'new_release_check', True)
|
||||
self._on_set_new_release_check, 'new_release_check', True
|
||||
)
|
||||
self.new_release_timer.start(72 * 60 * 60, False)
|
||||
else:
|
||||
if self.new_release_timer and self.new_release_timer.running:
|
||||
|
@ -361,31 +425,34 @@ class PreferencesManager(component.Component):
|
|||
def _on_set_proxy(self, key, value):
|
||||
# Initialise with type none and blank hostnames.
|
||||
proxy_settings = {
|
||||
'proxy_type': lt.proxy_type.none,
|
||||
'proxy_type': lt.proxy_type_t.none,
|
||||
'i2p_hostname': '',
|
||||
'proxy_hostname': '',
|
||||
'proxy_hostnames': value['proxy_hostnames'],
|
||||
'proxy_peer_connections': value['proxy_peer_connections'],
|
||||
'proxy_tracker_connections': value['proxy_tracker_connections'],
|
||||
'force_proxy': value['force_proxy'],
|
||||
'anonymous_mode': value['anonymous_mode']
|
||||
'anonymous_mode': value['anonymous_mode'],
|
||||
}
|
||||
|
||||
if value['type'] == lt.proxy_type.i2p_proxy:
|
||||
proxy_settings.update({
|
||||
'proxy_type': lt.proxy_type.i2p_proxy,
|
||||
if value['type'] == lt.proxy_type_t.i2p_proxy:
|
||||
proxy_settings.update(
|
||||
{
|
||||
'proxy_type': lt.proxy_type_t.i2p_proxy,
|
||||
'i2p_hostname': value['hostname'],
|
||||
'i2p_port': value['port'],
|
||||
})
|
||||
elif value['type'] != lt.proxy_type.none:
|
||||
proxy_settings.update({
|
||||
}
|
||||
)
|
||||
elif value['type'] != lt.proxy_type_t.none:
|
||||
proxy_settings.update(
|
||||
{
|
||||
'proxy_type': value['type'],
|
||||
'proxy_hostname': value['hostname'],
|
||||
'proxy_port': value['port'],
|
||||
'proxy_username': value['username'],
|
||||
'proxy_password': value['password'],
|
||||
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
self.core.apply_session_settings(proxy_settings)
|
||||
|
||||
|
@ -396,9 +463,9 @@ class PreferencesManager(component.Component):
|
|||
# Load the GeoIP DB for country look-ups if available
|
||||
if os.path.exists(geoipdb_path):
|
||||
try:
|
||||
self.core.geoip_instance = GeoIP.open(geoipdb_path, GeoIP.GEOIP_STANDARD)
|
||||
except AttributeError:
|
||||
log.warning('GeoIP Unavailable')
|
||||
self.core.geoip_instance = GeoIP(geoipdb_path, 0)
|
||||
except Exception as ex:
|
||||
log.warning('GeoIP Unavailable: %s', ex)
|
||||
else:
|
||||
log.warning('Unable to find GeoIP database file: %s', geoipdb_path)
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008,2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -8,24 +7,34 @@
|
|||
#
|
||||
|
||||
"""RPCServer Module"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
from collections import namedtuple
|
||||
from types import FunctionType
|
||||
from typing import Callable, TypeVar, overload
|
||||
|
||||
from OpenSSL import SSL, crypto
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.internet.protocol import Factory, connectionDone
|
||||
|
||||
import deluge.component as component
|
||||
import deluge.configmanager
|
||||
from deluge.core.authmanager import AUTH_LEVEL_ADMIN, AUTH_LEVEL_DEFAULT, AUTH_LEVEL_NONE
|
||||
from deluge.error import DelugeError, IncompatibleClient, NotAuthorizedError, WrappedException, _ClientSideRecreateError
|
||||
from deluge.core.authmanager import (
|
||||
AUTH_LEVEL_ADMIN,
|
||||
AUTH_LEVEL_DEFAULT,
|
||||
AUTH_LEVEL_NONE,
|
||||
)
|
||||
from deluge.crypto_utils import check_ssl_keys, get_context_factory
|
||||
from deluge.error import (
|
||||
BadLoginError,
|
||||
DelugeError,
|
||||
IncompatibleClient,
|
||||
NotAuthorizedError,
|
||||
WrappedException,
|
||||
_ClientSideRecreateError,
|
||||
)
|
||||
from deluge.event import ClientDisconnectedEvent
|
||||
from deluge.transfer import DelugeTransferProtocol
|
||||
|
||||
|
@ -35,6 +44,16 @@ RPC_EVENT = 3
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
TCallable = TypeVar('TCallable', bound=Callable)
|
||||
|
||||
|
||||
@overload
|
||||
def export(func: TCallable) -> TCallable: ...
|
||||
|
||||
|
||||
@overload
|
||||
def export(auth_level: int) -> Callable[[TCallable], TCallable]: ...
|
||||
|
||||
|
||||
def export(auth_level=AUTH_LEVEL_DEFAULT):
|
||||
"""
|
||||
|
@ -47,13 +66,23 @@ def export(auth_level=AUTH_LEVEL_DEFAULT):
|
|||
:type auth_level: int
|
||||
|
||||
"""
|
||||
|
||||
def wrap(func, *args, **kwargs):
|
||||
func._rpcserver_export = True
|
||||
func._rpcserver_auth_level = auth_level
|
||||
doc = func.__doc__
|
||||
func.__doc__ = '**RPC Exported Function** (*Auth Level: %s*)\n\n' % auth_level
|
||||
if doc:
|
||||
func.__doc__ += doc
|
||||
|
||||
rpc_text = '**RPC exported method** (*Auth level: %s*)' % auth_level
|
||||
|
||||
# Append the RPC text while ensuring correct docstring formatting.
|
||||
if func.__doc__:
|
||||
if func.__doc__.endswith(' '):
|
||||
indent = func.__doc__.split('\n')[-1]
|
||||
func.__doc__ += f'\n{indent}'
|
||||
else:
|
||||
func.__doc__ += '\n\n'
|
||||
func.__doc__ += rpc_text
|
||||
else:
|
||||
func.__doc__ = rpc_text
|
||||
|
||||
return func
|
||||
|
||||
|
@ -91,25 +120,9 @@ def format_request(call):
|
|||
return s
|
||||
|
||||
|
||||
class ServerContextFactory(object):
|
||||
def getContext(self): # NOQA: N802
|
||||
"""
|
||||
Create an SSL context.
|
||||
|
||||
This loads the servers cert/private key SSL files for use with the
|
||||
SSL transport.
|
||||
"""
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
ctx = SSL.Context(SSL.SSLv23_METHOD)
|
||||
ctx.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
|
||||
ctx.use_certificate_file(os.path.join(ssl_dir, 'daemon.cert'))
|
||||
ctx.use_privatekey_file(os.path.join(ssl_dir, 'daemon.pkey'))
|
||||
return ctx
|
||||
|
||||
|
||||
class DelugeRPCProtocol(DelugeTransferProtocol):
|
||||
def __init__(self):
|
||||
super(DelugeRPCProtocol, self).__init__()
|
||||
super().__init__()
|
||||
# namedtuple subclass with auth_level, username for the connected session.
|
||||
self.AuthLevel = namedtuple('SessionAuthlevel', 'auth_level, username')
|
||||
|
||||
|
@ -134,8 +147,10 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
|
||||
for call in request:
|
||||
if len(call) != 4:
|
||||
log.debug('Received invalid rpc request: number of items '
|
||||
'in request is %s', len(call))
|
||||
log.debug(
|
||||
'Received invalid rpc request: number of items ' 'in request is %s',
|
||||
len(call),
|
||||
)
|
||||
continue
|
||||
# log.debug('RPCRequest: %s', format_request(call))
|
||||
reactor.callLater(0, self.dispatch, *call)
|
||||
|
@ -152,7 +167,7 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
try:
|
||||
self.transfer_message(data)
|
||||
except Exception as ex:
|
||||
log.warn('Error occurred when sending message: %s.', ex)
|
||||
log.warning('Error occurred when sending message: %s.', ex)
|
||||
log.exception(ex)
|
||||
raise
|
||||
|
||||
|
@ -161,11 +176,11 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
This method is called when a new client connects.
|
||||
"""
|
||||
peer = self.transport.getPeer()
|
||||
log.info('Deluge Client connection made from: %s:%s',
|
||||
peer.host, peer.port)
|
||||
log.info('Deluge Client connection made from: %s:%s', peer.host, peer.port)
|
||||
# Set the initial auth level of this session to AUTH_LEVEL_NONE
|
||||
self.factory.authorized_sessions[
|
||||
self.transport.sessionno] = self.AuthLevel(AUTH_LEVEL_NONE, '')
|
||||
self.factory.authorized_sessions[self.transport.sessionno] = self.AuthLevel(
|
||||
AUTH_LEVEL_NONE, ''
|
||||
)
|
||||
|
||||
def connectionLost(self, reason=connectionDone): # NOQA: N802
|
||||
"""
|
||||
|
@ -184,7 +199,9 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
del self.factory.interested_events[self.transport.sessionno]
|
||||
|
||||
if self.factory.state == 'running':
|
||||
component.get('EventManager').emit(ClientDisconnectedEvent(self.factory.session_id))
|
||||
component.get('EventManager').emit(
|
||||
ClientDisconnectedEvent(self.factory.session_id)
|
||||
)
|
||||
log.info('Deluge client disconnected: %s', reason.value)
|
||||
|
||||
def valid_session(self):
|
||||
|
@ -206,32 +223,42 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
:type kwargs: dict
|
||||
|
||||
"""
|
||||
|
||||
def send_error():
|
||||
"""
|
||||
Sends an error response with the contents of the exception that was raised.
|
||||
"""
|
||||
exceptionType, exceptionValue, dummy_exceptionTraceback = sys.exc_info()
|
||||
exc_type, exc_value, dummy_exc_trace = sys.exc_info()
|
||||
formated_tb = traceback.format_exc()
|
||||
try:
|
||||
self.sendData((
|
||||
self.sendData(
|
||||
(
|
||||
RPC_ERROR,
|
||||
request_id,
|
||||
exceptionType.__name__,
|
||||
exceptionValue._args,
|
||||
exceptionValue._kwargs,
|
||||
formated_tb
|
||||
))
|
||||
exc_type.__name__,
|
||||
exc_value._args,
|
||||
exc_value._kwargs,
|
||||
formated_tb,
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
# This is not a deluge exception (object has no attribute '_args), let's wrap it
|
||||
log.warning('An exception occurred while sending RPC_ERROR to '
|
||||
log.warning(
|
||||
'An exception occurred while sending RPC_ERROR to '
|
||||
'client. Wrapping it and resending. Error to '
|
||||
'send(causing exception goes next):\n%s', formated_tb)
|
||||
'send(causing exception goes next):\n%s',
|
||||
formated_tb,
|
||||
)
|
||||
try:
|
||||
raise WrappedException(str(exceptionValue), exceptionType.__name__, formated_tb)
|
||||
raise WrappedException(
|
||||
str(exc_value), exc_type.__name__, formated_tb
|
||||
)
|
||||
except WrappedException:
|
||||
send_error()
|
||||
except Exception as ex:
|
||||
log.error('An exception occurred while sending RPC_ERROR to client: %s', ex)
|
||||
log.error(
|
||||
'An exception occurred while sending RPC_ERROR to client: %s', ex
|
||||
)
|
||||
|
||||
if method == 'daemon.info':
|
||||
# This is a special case and used in the initial connection process
|
||||
|
@ -247,13 +274,22 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
raise IncompatibleClient(deluge.common.get_version())
|
||||
ret = component.get('AuthManager').authorize(*args, **kwargs)
|
||||
if ret:
|
||||
self.factory.authorized_sessions[
|
||||
self.transport.sessionno] = self.AuthLevel(ret, args[0])
|
||||
self.factory.authorized_sessions[self.transport.sessionno] = (
|
||||
self.AuthLevel(ret, args[0])
|
||||
)
|
||||
self.factory.session_protocols[self.transport.sessionno] = self
|
||||
except Exception as ex:
|
||||
send_error()
|
||||
if not isinstance(ex, _ClientSideRecreateError):
|
||||
log.exception(ex)
|
||||
if isinstance(ex, BadLoginError):
|
||||
peer = self.transport.getPeer()
|
||||
log.error(
|
||||
'Deluge client authentication error made from: %s:%s (%s)',
|
||||
peer.host,
|
||||
peer.port,
|
||||
str(ex),
|
||||
)
|
||||
else:
|
||||
self.sendData((RPC_RESPONSE, request_id, (ret)))
|
||||
if not ret:
|
||||
|
@ -290,11 +326,15 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
log.debug('RPC dispatch %s', method)
|
||||
try:
|
||||
method_auth_requirement = self.factory.methods[method]._rpcserver_auth_level
|
||||
auth_level = self.factory.authorized_sessions[self.transport.sessionno].auth_level
|
||||
auth_level = self.factory.authorized_sessions[
|
||||
self.transport.sessionno
|
||||
].auth_level
|
||||
if auth_level < method_auth_requirement:
|
||||
# This session is not allowed to call this method
|
||||
log.debug('Session %s is attempting an unauthorized method call!',
|
||||
self.transport.sessionno)
|
||||
log.debug(
|
||||
'Session %s is attempting an unauthorized method call!',
|
||||
self.transport.sessionno,
|
||||
)
|
||||
raise NotAuthorizedError(auth_level, method_auth_requirement)
|
||||
# Set the session_id in the factory so that methods can know
|
||||
# which session is calling it.
|
||||
|
@ -310,6 +350,7 @@ class DelugeRPCProtocol(DelugeTransferProtocol):
|
|||
# Check if the return value is a deferred, since we'll need to
|
||||
# wait for it to fire before sending the RPC_RESPONSE
|
||||
if isinstance(ret, defer.Deferred):
|
||||
|
||||
def on_success(result):
|
||||
try:
|
||||
self.sendData((RPC_RESPONSE, request_id, result))
|
||||
|
@ -379,8 +420,13 @@ class RPCServer(component.Component):
|
|||
# Check for SSL keys and generate some if needed
|
||||
check_ssl_keys()
|
||||
|
||||
cert = os.path.join(deluge.configmanager.get_config_dir('ssl'), 'daemon.cert')
|
||||
pkey = os.path.join(deluge.configmanager.get_config_dir('ssl'), 'daemon.pkey')
|
||||
|
||||
try:
|
||||
reactor.listenSSL(port, self.factory, ServerContextFactory(), interface=hostname)
|
||||
reactor.listenSSL(
|
||||
port, self.factory, get_context_factory(cert, pkey), interface=hostname
|
||||
)
|
||||
except Exception as ex:
|
||||
log.debug('Daemon already running or port not available.: %s', ex)
|
||||
raise
|
||||
|
@ -507,8 +553,8 @@ class RPCServer(component.Component):
|
|||
:type event: :class:`deluge.event.DelugeEvent`
|
||||
"""
|
||||
log.debug('intevents: %s', self.factory.interested_events)
|
||||
# Find sessions interested in this event
|
||||
for session_id, interest in self.factory.interested_events.items():
|
||||
# Use copy of `interested_events` since it can mutate while iterating.
|
||||
for session_id, interest in self.factory.interested_events.copy().items():
|
||||
if event.name in interest:
|
||||
log.debug('Emit Event: %s %s', event.name, event.args)
|
||||
# This session is interested so send a RPC_EVENT
|
||||
|
@ -526,73 +572,35 @@ class RPCServer(component.Component):
|
|||
:type event: :class:`deluge.event.DelugeEvent`
|
||||
"""
|
||||
if not self.is_session_valid(session_id):
|
||||
log.debug('Session ID %s is not valid. Not sending event "%s".', session_id, event.name)
|
||||
log.debug(
|
||||
'Session ID %s is not valid. Not sending event "%s".',
|
||||
session_id,
|
||||
event.name,
|
||||
)
|
||||
return
|
||||
if session_id not in self.factory.interested_events:
|
||||
log.debug('Session ID %s is not interested in any events. Not sending event "%s".',
|
||||
session_id, event.name)
|
||||
log.debug(
|
||||
'Session ID %s is not interested in any events. Not sending event "%s".',
|
||||
session_id,
|
||||
event.name,
|
||||
)
|
||||
return
|
||||
if event.name not in self.factory.interested_events[session_id]:
|
||||
log.debug('Session ID %s is not interested in event "%s". Not sending it.', session_id, event.name)
|
||||
log.debug(
|
||||
'Session ID %s is not interested in event "%s". Not sending it.',
|
||||
session_id,
|
||||
event.name,
|
||||
)
|
||||
return
|
||||
log.debug('Sending event "%s" with args "%s" to session id "%s".',
|
||||
event.name, event.args, session_id)
|
||||
self.factory.session_protocols[session_id].sendData((RPC_EVENT, event.name, event.args))
|
||||
log.debug(
|
||||
'Sending event "%s" with args "%s" to session id "%s".',
|
||||
event.name,
|
||||
event.args,
|
||||
session_id,
|
||||
)
|
||||
self.factory.session_protocols[session_id].sendData(
|
||||
(RPC_EVENT, event.name, event.args)
|
||||
)
|
||||
|
||||
def stop(self):
|
||||
self.factory.state = 'stopping'
|
||||
|
||||
|
||||
def check_ssl_keys():
|
||||
"""
|
||||
Check for SSL cert/key and create them if necessary
|
||||
"""
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
if not os.path.exists(ssl_dir):
|
||||
# The ssl folder doesn't exist so we need to create it
|
||||
os.makedirs(ssl_dir)
|
||||
generate_ssl_keys()
|
||||
else:
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
if not os.path.exists(os.path.join(ssl_dir, f)):
|
||||
generate_ssl_keys()
|
||||
break
|
||||
|
||||
|
||||
def generate_ssl_keys():
|
||||
"""
|
||||
This method generates a new SSL key/cert.
|
||||
"""
|
||||
from deluge.common import PY2
|
||||
digest = 'sha256' if not PY2 else b'sha256'
|
||||
|
||||
# Generate key pair
|
||||
pkey = crypto.PKey()
|
||||
pkey.generate_key(crypto.TYPE_RSA, 2048)
|
||||
|
||||
# Generate cert request
|
||||
req = crypto.X509Req()
|
||||
subj = req.get_subject()
|
||||
setattr(subj, 'CN', 'Deluge Daemon')
|
||||
req.set_pubkey(pkey)
|
||||
req.sign(pkey, digest)
|
||||
|
||||
# Generate certificate
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(0)
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
|
||||
cert.set_issuer(req.get_subject())
|
||||
cert.set_subject(req.get_subject())
|
||||
cert.set_pubkey(req.get_pubkey())
|
||||
cert.sign(pkey, digest)
|
||||
|
||||
# Write out files
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
|
||||
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
||||
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
|
||||
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
||||
# Make the files only readable by this user
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -14,11 +13,12 @@ Attributes:
|
|||
|
||||
"""
|
||||
|
||||
from __future__ import division, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from twisted.internet.defer import Deferred, DeferredList
|
||||
|
||||
|
@ -28,19 +28,11 @@ from deluge.common import decode_bytes
|
|||
from deluge.configmanager import ConfigManager, get_config_dir
|
||||
from deluge.core.authmanager import AUTH_LEVEL_ADMIN
|
||||
from deluge.decorators import deprecated
|
||||
from deluge.event import TorrentFolderRenamedEvent, TorrentStateChangedEvent, TorrentTrackerStatusEvent
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
# PY2 fallback
|
||||
from urlparse import urlparse # pylint: disable=ungrouped-imports
|
||||
|
||||
try:
|
||||
from future_builtins import zip
|
||||
except ImportError:
|
||||
# Ignore on Py3.
|
||||
pass
|
||||
from deluge.event import (
|
||||
TorrentFolderRenamedEvent,
|
||||
TorrentStateChangedEvent,
|
||||
TorrentTrackerStatusEvent,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -52,7 +44,7 @@ LT_TORRENT_STATE_MAP = {
|
|||
'finished': 'Seeding',
|
||||
'seeding': 'Seeding',
|
||||
'allocating': 'Allocating',
|
||||
'checking_resume_data': 'Checking'
|
||||
'checking_resume_data': 'Checking',
|
||||
}
|
||||
|
||||
|
||||
|
@ -65,6 +57,7 @@ def sanitize_filepath(filepath, folder=False):
|
|||
Args:
|
||||
folder (bool): A trailing slash is appended to the returned filepath.
|
||||
"""
|
||||
|
||||
def clean_filename(filename):
|
||||
"""Strips whitespace and discards dotted filenames"""
|
||||
filename = filename.strip()
|
||||
|
@ -89,7 +82,7 @@ def convert_lt_files(files):
|
|||
"""Indexes and decodes files from libtorrent get_files().
|
||||
|
||||
Args:
|
||||
files (list): The libtorrent torrent files.
|
||||
files (file_storage): The libtorrent torrent files.
|
||||
|
||||
Returns:
|
||||
list of dict: The files.
|
||||
|
@ -104,18 +97,20 @@ def convert_lt_files(files):
|
|||
}
|
||||
"""
|
||||
filelist = []
|
||||
for index, _file in enumerate(files):
|
||||
for index in range(files.num_files()):
|
||||
try:
|
||||
file_path = _file.path.decode('utf8')
|
||||
file_path = files.file_path(index).decode('utf8')
|
||||
except AttributeError:
|
||||
file_path = _file.path
|
||||
file_path = files.file_path(index)
|
||||
|
||||
filelist.append({
|
||||
filelist.append(
|
||||
{
|
||||
'index': index,
|
||||
'path': file_path.replace('\\', '/'),
|
||||
'size': _file.size,
|
||||
'offset': _file.offset
|
||||
})
|
||||
'size': files.file_size(index),
|
||||
'offset': files.file_offset(index),
|
||||
}
|
||||
)
|
||||
|
||||
return filelist
|
||||
|
||||
|
@ -128,7 +123,7 @@ class TorrentOptions(dict):
|
|||
auto_managed (bool): Set torrent to auto managed mode, i.e. will be started or queued automatically.
|
||||
download_location (str): The path for the torrent data to be stored while downloading.
|
||||
file_priorities (list of int): The priority for files in torrent, range is [0..7] however
|
||||
only [0, 1, 5, 7] are normally used and correspond to [Do Not Download, Normal, High, Highest]
|
||||
only [0, 1, 4, 7] are normally used and correspond to [Skip, Low, Normal, High]
|
||||
mapped_files (dict): A mapping of the renamed filenames in 'index:filename' pairs.
|
||||
max_connections (int): Sets maximum number of connections this torrent will open.
|
||||
This must be at least 2. The default is unlimited (-1).
|
||||
|
@ -152,8 +147,9 @@ class TorrentOptions(dict):
|
|||
stop_ratio (float): The seeding ratio to stop (or remove) the torrent at.
|
||||
super_seeding (bool): Enable super seeding/initial seeding.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(TorrentOptions, self).__init__()
|
||||
super().__init__()
|
||||
config = ConfigManager('core.conf').config
|
||||
options_conf_map = {
|
||||
'add_paused': 'add_paused',
|
||||
|
@ -172,7 +168,7 @@ class TorrentOptions(dict):
|
|||
'shared': 'shared',
|
||||
'stop_at_ratio': 'stop_seed_at_ratio',
|
||||
'stop_ratio': 'stop_seed_ratio',
|
||||
'super_seeding': 'super_seeding'
|
||||
'super_seeding': 'super_seeding',
|
||||
}
|
||||
for opt_k, conf_k in options_conf_map.items():
|
||||
self[opt_k] = config[conf_k]
|
||||
|
@ -183,14 +179,14 @@ class TorrentOptions(dict):
|
|||
self['seed_mode'] = False
|
||||
|
||||
|
||||
class TorrentError(object):
|
||||
class TorrentError:
|
||||
def __init__(self, error_message, was_paused=False, restart_to_resume=False):
|
||||
self.error_message = error_message
|
||||
self.was_paused = was_paused
|
||||
self.restart_to_resume = restart_to_resume
|
||||
|
||||
|
||||
class Torrent(object):
|
||||
class Torrent:
|
||||
"""Torrent holds information about torrents added to the libtorrent session.
|
||||
|
||||
Args:
|
||||
|
@ -198,12 +194,12 @@ class Torrent(object):
|
|||
options (dict): The torrent options.
|
||||
state (TorrentState): The torrent state.
|
||||
filename (str): The filename of the torrent file.
|
||||
magnet (str): The magnet uri.
|
||||
magnet (str): The magnet URI.
|
||||
|
||||
Attributes:
|
||||
torrent_id (str): The torrent_id for this torrent
|
||||
handle: Holds the libtorrent torrent handle
|
||||
magnet (str): The magnet uri used to add this torrent (if available).
|
||||
magnet (str): The magnet URI used to add this torrent (if available).
|
||||
status: Holds status info so that we don"t need to keep getting it from libtorrent.
|
||||
torrent_info: store the torrent info.
|
||||
has_metadata (bool): True if the metadata for the torrent is available, False otherwise.
|
||||
|
@ -227,6 +223,7 @@ class Torrent(object):
|
|||
we can re-pause it after its done if necessary
|
||||
forced_error (TorrentError): Keep track if we have forced this torrent to be in Error state.
|
||||
"""
|
||||
|
||||
def __init__(self, handle, options, state=None, filename=None, magnet=None):
|
||||
self.torrent_id = str(handle.info_hash())
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
|
@ -237,12 +234,12 @@ class Torrent(object):
|
|||
self.rpcserver = component.get('RPCServer')
|
||||
|
||||
self.handle = handle
|
||||
self.handle.resolve_countries(True)
|
||||
|
||||
self.magnet = magnet
|
||||
self.status = self.handle.status()
|
||||
self._status: Optional['lt.torrent_status'] = None
|
||||
self._status_last_update: float = 0.0
|
||||
|
||||
self.torrent_info = self.handle.get_torrent_info()
|
||||
self.torrent_info = self.handle.torrent_file()
|
||||
self.has_metadata = self.status.has_metadata
|
||||
|
||||
self.options = TorrentOptions()
|
||||
|
@ -258,6 +255,9 @@ class Torrent(object):
|
|||
self.is_finished = False
|
||||
self.filename = filename
|
||||
|
||||
if not self.filename:
|
||||
self.filename = ''
|
||||
|
||||
self.forced_error = None
|
||||
self.statusmsg = None
|
||||
self.state = None
|
||||
|
@ -270,7 +270,6 @@ class Torrent(object):
|
|||
self.prev_status = {}
|
||||
self.waiting_on_folder_rename = []
|
||||
|
||||
self.update_status(self.handle.status())
|
||||
self._create_status_funcs()
|
||||
self.set_options(self.options)
|
||||
self.update_state()
|
||||
|
@ -278,6 +277,18 @@ class Torrent(object):
|
|||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Torrent object created.')
|
||||
|
||||
def _set_handle_flags(self, flag: lt.torrent_flags, set_flag: bool):
|
||||
"""set or unset a flag to the lt handle
|
||||
|
||||
Args:
|
||||
flag (lt.torrent_flags): the flag to set/unset
|
||||
set_flag (bool): True for setting the flag, False for unsetting it
|
||||
"""
|
||||
if set_flag:
|
||||
self.handle.set_flags(flag)
|
||||
else:
|
||||
self.handle.unset_flags(flag)
|
||||
|
||||
def on_metadata_received(self):
|
||||
"""Process the metadata received alert for this torrent"""
|
||||
self.has_metadata = True
|
||||
|
@ -296,7 +307,9 @@ class Torrent(object):
|
|||
|
||||
# Skip set_prioritize_first_last if set_file_priorities is in options as it also calls the method.
|
||||
if 'file_priorities' in options and 'prioritize_first_last_pieces' in options:
|
||||
self.options['prioritize_first_last_pieces'] = options.pop('prioritize_first_last_pieces')
|
||||
self.options['prioritize_first_last_pieces'] = options.pop(
|
||||
'prioritize_first_last_pieces'
|
||||
)
|
||||
|
||||
for key, value in options.items():
|
||||
if key in self.options:
|
||||
|
@ -360,7 +373,7 @@ class Torrent(object):
|
|||
"""Sets maximum download speed for this torrent.
|
||||
|
||||
Args:
|
||||
m_up_speed (float): Maximum download speed in KiB/s.
|
||||
m_down_speed (float): Maximum download speed in KiB/s.
|
||||
"""
|
||||
self.options['max_download_speed'] = m_down_speed
|
||||
if m_down_speed < 0:
|
||||
|
@ -392,7 +405,7 @@ class Torrent(object):
|
|||
return
|
||||
|
||||
# A list of priorities for each piece in the torrent
|
||||
priorities = self.handle.piece_priorities()
|
||||
priorities = self.handle.get_piece_priorities()
|
||||
|
||||
def get_file_piece(idx, byte_offset):
|
||||
return self.torrent_info.map_file(idx, byte_offset, 0).piece
|
||||
|
@ -408,20 +421,27 @@ class Torrent(object):
|
|||
|
||||
# Set the pieces in first and last ranges to priority 7
|
||||
# if they are not marked as do not download
|
||||
priorities[first_start:first_end] = [p and 7 for p in priorities[first_start:first_end]]
|
||||
priorities[last_start:last_end] = [p and 7 for p in priorities[last_start:last_end]]
|
||||
priorities[first_start:first_end] = [
|
||||
p and 7 for p in priorities[first_start:first_end]
|
||||
]
|
||||
priorities[last_start:last_end] = [
|
||||
p and 7 for p in priorities[last_start:last_end]
|
||||
]
|
||||
|
||||
# Setting the priorites for all the pieces of this torrent
|
||||
self.handle.prioritize_pieces(priorities)
|
||||
|
||||
def set_sequential_download(self, set_sequencial):
|
||||
def set_sequential_download(self, sequential):
|
||||
"""Sets whether to download the pieces of the torrent in order.
|
||||
|
||||
Args:
|
||||
set_sequencial (bool): Enable sequencial downloading.
|
||||
sequential (bool): Enable sequential downloading.
|
||||
"""
|
||||
self.options['sequential_download'] = set_sequencial
|
||||
self.handle.set_sequential_download(set_sequencial)
|
||||
self.options['sequential_download'] = sequential
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.sequential_download,
|
||||
set_flag=sequential,
|
||||
)
|
||||
|
||||
def set_auto_managed(self, auto_managed):
|
||||
"""Set auto managed mode, i.e. will be started or queued automatically.
|
||||
|
@ -431,7 +451,10 @@ class Torrent(object):
|
|||
"""
|
||||
self.options['auto_managed'] = auto_managed
|
||||
if not (self.status.paused and not self.status.auto_managed):
|
||||
self.handle.auto_managed(auto_managed)
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=auto_managed,
|
||||
)
|
||||
self.update_state()
|
||||
|
||||
def set_super_seeding(self, super_seeding):
|
||||
|
@ -440,11 +463,11 @@ class Torrent(object):
|
|||
Args:
|
||||
super_seeding (bool): Enable super seeding.
|
||||
"""
|
||||
if self.status.is_seeding:
|
||||
self.options['super_seeding'] = super_seeding
|
||||
self.handle.super_seeding(super_seeding)
|
||||
else:
|
||||
self.options['super_seeding'] = False
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.super_seeding,
|
||||
set_flag=super_seeding,
|
||||
)
|
||||
|
||||
def set_stop_ratio(self, stop_ratio):
|
||||
"""The seeding ratio to stop (or remove) the torrent at.
|
||||
|
@ -493,32 +516,35 @@ class Torrent(object):
|
|||
Args:
|
||||
file_priorities (list of int): List of file priorities.
|
||||
"""
|
||||
if not self.has_metadata:
|
||||
return
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('Setting %s file priorities to: %s', self.torrent_id, file_priorities)
|
||||
log.debug(
|
||||
'Setting %s file priorities to: %s', self.torrent_id, file_priorities
|
||||
)
|
||||
|
||||
if (self.handle.has_metadata() and file_priorities and
|
||||
len(file_priorities) == len(self.get_files())):
|
||||
if file_priorities and len(file_priorities) == len(self.get_files()):
|
||||
self.handle.prioritize_files(file_priorities)
|
||||
else:
|
||||
log.debug('Unable to set new file priorities.')
|
||||
file_priorities = self.handle.file_priorities()
|
||||
file_priorities = self.handle.get_file_priorities()
|
||||
|
||||
if 0 in self.options['file_priorities']:
|
||||
# Previously marked a file 'Do Not Download' so check if changed any 0's to >0.
|
||||
# Previously marked a file 'skip' so check for any 0's now >0.
|
||||
for index, priority in enumerate(self.options['file_priorities']):
|
||||
if priority == 0 and file_priorities[index] > 0:
|
||||
# Changed 'Do Not Download' to a download priority so update state.
|
||||
# Changed priority from skip to download so update state.
|
||||
self.is_finished = False
|
||||
self.update_state()
|
||||
break
|
||||
|
||||
# Ensure stored options are in sync in case file_priorities were faulty (old state?).
|
||||
self.options['file_priorities'] = self.handle.file_priorities()
|
||||
# Store the priorities.
|
||||
self.options['file_priorities'] = file_priorities
|
||||
|
||||
# Set the first/last priorities if needed.
|
||||
if self.options['prioritize_first_last_pieces']:
|
||||
self.set_prioritize_first_last_pieces(self.options['prioritize_first_last_pieces'])
|
||||
self.set_prioritize_first_last_pieces(True)
|
||||
|
||||
@deprecated
|
||||
def set_save_path(self, download_location):
|
||||
|
@ -552,7 +578,7 @@ class Torrent(object):
|
|||
trackers (list of dicts): A list of trackers.
|
||||
"""
|
||||
if trackers is None:
|
||||
self.trackers = [tracker for tracker in self.handle.trackers()]
|
||||
self.trackers = list(self.handle.trackers())
|
||||
self.tracker_host = None
|
||||
return
|
||||
|
||||
|
@ -594,11 +620,16 @@ class Torrent(object):
|
|||
|
||||
if self.tracker_status != status:
|
||||
self.tracker_status = status
|
||||
component.get('EventManager').emit(TorrentTrackerStatusEvent(self.torrent_id, self.tracker_status))
|
||||
component.get('EventManager').emit(
|
||||
TorrentTrackerStatusEvent(self.torrent_id, self.tracker_status)
|
||||
)
|
||||
|
||||
def merge_trackers(self, torrent_info):
|
||||
"""Merges new trackers in torrent_info into torrent"""
|
||||
log.info('Adding any new trackers to torrent (%s) already in session...', self.torrent_id)
|
||||
log.info(
|
||||
'Adding any new trackers to torrent (%s) already in session...',
|
||||
self.torrent_id,
|
||||
)
|
||||
if not torrent_info:
|
||||
return
|
||||
# Don't merge trackers if either torrent has private flag set.
|
||||
|
@ -612,7 +643,7 @@ class Torrent(object):
|
|||
|
||||
def update_state(self):
|
||||
"""Updates the state, based on libtorrent's torrent state"""
|
||||
status = self.handle.status()
|
||||
status = self.get_lt_status()
|
||||
session_paused = component.get('Core').session.is_paused()
|
||||
old_state = self.state
|
||||
self.set_status_message()
|
||||
|
@ -624,7 +655,10 @@ class Torrent(object):
|
|||
elif status_error:
|
||||
self.state = 'Error'
|
||||
# auto-manage status will be reverted upon resuming.
|
||||
self.handle.auto_managed(False)
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=False,
|
||||
)
|
||||
self.set_status_message(decode_bytes(status_error))
|
||||
elif status.moving_storage:
|
||||
self.state = 'Moving'
|
||||
|
@ -636,13 +670,23 @@ class Torrent(object):
|
|||
self.state = LT_TORRENT_STATE_MAP.get(str(status.state), str(status.state))
|
||||
|
||||
if self.state != old_state:
|
||||
component.get('EventManager').emit(TorrentStateChangedEvent(self.torrent_id, self.state))
|
||||
component.get('EventManager').emit(
|
||||
TorrentStateChangedEvent(self.torrent_id, self.state)
|
||||
)
|
||||
|
||||
if log.isEnabledFor(logging.DEBUG):
|
||||
log.debug('State from lt was: %s | Session is paused: %s\nTorrent state set from "%s" to "%s" (%s)',
|
||||
'error' if status_error else status.state, session_paused, old_state, self.state, self.torrent_id)
|
||||
log.debug(
|
||||
'State from lt was: %s | Session is paused: %s\nTorrent state set from "%s" to "%s" (%s)',
|
||||
'error' if status_error else status.state,
|
||||
session_paused,
|
||||
old_state,
|
||||
self.state,
|
||||
self.torrent_id,
|
||||
)
|
||||
if self.forced_error:
|
||||
log.debug('Torrent Error state message: %s', self.forced_error.error_message)
|
||||
log.debug(
|
||||
'Torrent Error state message: %s', self.forced_error.error_message
|
||||
)
|
||||
|
||||
def set_status_message(self, message=None):
|
||||
"""Sets the torrent status message.
|
||||
|
@ -667,8 +711,11 @@ class Torrent(object):
|
|||
restart_to_resume (bool, optional): Prevent resuming clearing the error, only restarting
|
||||
session can resume.
|
||||
"""
|
||||
status = self.handle.status()
|
||||
self.handle.auto_managed(False)
|
||||
status = self.get_lt_status()
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=False,
|
||||
)
|
||||
self.forced_error = TorrentError(message, status.paused, restart_to_resume)
|
||||
if not status.paused:
|
||||
self.handle.pause()
|
||||
|
@ -682,7 +729,10 @@ class Torrent(object):
|
|||
log.error('Restart deluge to clear this torrent error')
|
||||
|
||||
if not self.forced_error.was_paused and self.options['auto_managed']:
|
||||
self.handle.auto_managed(True)
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=True,
|
||||
)
|
||||
self.forced_error = None
|
||||
self.set_status_message('OK')
|
||||
if update_state:
|
||||
|
@ -697,16 +747,23 @@ class Torrent(object):
|
|||
"""
|
||||
status = self.status
|
||||
eta = 0
|
||||
if self.is_finished and self.options['stop_at_ratio'] and status.upload_payload_rate:
|
||||
if (
|
||||
self.is_finished
|
||||
and self.options['stop_at_ratio']
|
||||
and status.upload_payload_rate
|
||||
):
|
||||
# We're a seed, so calculate the time to the 'stop_share_ratio'
|
||||
eta = ((status.all_time_download * self.options['stop_ratio']) -
|
||||
status.all_time_upload) // status.upload_payload_rate
|
||||
eta = (
|
||||
int(status.all_time_download * self.options['stop_ratio'])
|
||||
- status.all_time_upload
|
||||
) // status.upload_payload_rate
|
||||
elif status.download_payload_rate:
|
||||
left = status.total_wanted - status.total_wanted_done
|
||||
if left > 0:
|
||||
eta = left // status.download_payload_rate
|
||||
|
||||
return eta
|
||||
# Limit to 1 year, avoid excessive values and prevent GTK int overflow.
|
||||
return eta if eta < 31557600 else -1
|
||||
|
||||
def get_ratio(self):
|
||||
"""Get the ratio of upload/download for this torrent.
|
||||
|
@ -774,27 +831,37 @@ class Torrent(object):
|
|||
if peer.flags & peer.connecting or peer.flags & peer.handshake:
|
||||
continue
|
||||
|
||||
try:
|
||||
client = decode_bytes(peer.client)
|
||||
except UnicodeDecodeError:
|
||||
# libtorrent on Py3 can raise UnicodeDecodeError for peer_info.client
|
||||
client = 'unknown'
|
||||
|
||||
try:
|
||||
country = component.get('Core').geoip_instance.country_code_by_addr(peer.ip[0])
|
||||
country = component.get('Core').geoip_instance.country_code_by_addr(
|
||||
peer.ip[0]
|
||||
)
|
||||
except AttributeError:
|
||||
country = ''
|
||||
else:
|
||||
try:
|
||||
country = ''.join([char if char.isalpha() else ' ' for char in country])
|
||||
country = ''.join(
|
||||
[char if char.isalpha() else ' ' for char in country]
|
||||
)
|
||||
except TypeError:
|
||||
country = ''
|
||||
|
||||
ret.append({
|
||||
ret.append(
|
||||
{
|
||||
'client': client,
|
||||
'country': country,
|
||||
'down_speed': peer.payload_down_speed,
|
||||
'ip': '%s:%s' % (peer.ip[0], peer.ip[1]),
|
||||
'ip': f'{peer.ip[0]}:{peer.ip[1]}',
|
||||
'progress': peer.progress,
|
||||
'seed': peer.flags & peer.seed,
|
||||
'up_speed': peer.payload_up_speed,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -808,7 +875,7 @@ class Torrent(object):
|
|||
|
||||
def get_file_priorities(self):
|
||||
"""Return the file priorities"""
|
||||
if not self.handle.has_metadata():
|
||||
if not self.handle.status().has_metadata:
|
||||
return []
|
||||
|
||||
if not self.options['file_priorities']:
|
||||
|
@ -825,8 +892,19 @@ class Torrent(object):
|
|||
"""
|
||||
if not self.has_metadata:
|
||||
return []
|
||||
return [progress / _file.size if _file.size else 0.0 for progress, _file in
|
||||
zip(self.handle.file_progress(), self.torrent_info.files())]
|
||||
|
||||
try:
|
||||
files_progresses = zip(
|
||||
self.handle.file_progress(), self.torrent_info.files()
|
||||
)
|
||||
except Exception:
|
||||
# Handle libtorrent >=2.0.0,<=2.0.4 file_progress error
|
||||
files_progresses = zip(iter(lambda: 0, 1), self.torrent_info.files())
|
||||
|
||||
return [
|
||||
progress / _file.size if _file.size else 0.0
|
||||
for progress, _file in files_progresses
|
||||
]
|
||||
|
||||
def get_tracker_host(self):
|
||||
"""Get the hostname of the currently connected tracker.
|
||||
|
@ -846,11 +924,11 @@ class Torrent(object):
|
|||
if tracker:
|
||||
url = urlparse(tracker.replace('udp://', 'http://'))
|
||||
if hasattr(url, 'hostname'):
|
||||
host = (url.hostname or 'DHT')
|
||||
host = url.hostname or 'DHT'
|
||||
# Check if hostname is an IP address and just return it if that's the case
|
||||
try:
|
||||
socket.inet_aton(host)
|
||||
except socket.error:
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
# This is an IP address because an exception wasn't raised
|
||||
|
@ -867,7 +945,7 @@ class Torrent(object):
|
|||
return ''
|
||||
|
||||
def get_magnet_uri(self):
|
||||
"""Returns a magnet uri for this torrent"""
|
||||
"""Returns a magnet URI for this torrent"""
|
||||
return lt.make_magnet_uri(self.handle)
|
||||
|
||||
def get_name(self):
|
||||
|
@ -881,14 +959,18 @@ class Torrent(object):
|
|||
str: the name of the torrent.
|
||||
|
||||
"""
|
||||
if not self.options['name']:
|
||||
handle_name = self.handle.name()
|
||||
if handle_name:
|
||||
name = decode_bytes(handle_name)
|
||||
if self.options['name']:
|
||||
return self.options['name']
|
||||
|
||||
if self.has_metadata:
|
||||
# Use the top-level folder as torrent name.
|
||||
filename = decode_bytes(self.torrent_info.files().file_path(0))
|
||||
name = filename.replace('\\', '/', 1).split('/', 1)[0]
|
||||
else:
|
||||
name = decode_bytes(self.handle.status().name)
|
||||
|
||||
if not name:
|
||||
name = self.torrent_id
|
||||
else:
|
||||
name = self.options['name']
|
||||
|
||||
return name
|
||||
|
||||
|
@ -937,12 +1019,14 @@ class Torrent(object):
|
|||
call to get_status based on the session_id
|
||||
update (bool): If True the status will be updated from libtorrent
|
||||
if False, the cached values will be returned
|
||||
all_keys (bool): If True return all keys while ignoring the keys param
|
||||
if False, return only the requested keys
|
||||
|
||||
Returns:
|
||||
dict: a dictionary of the status keys and their values
|
||||
"""
|
||||
if update:
|
||||
self.update_status(self.handle.status())
|
||||
self.get_lt_status()
|
||||
|
||||
if all_keys:
|
||||
keys = list(self.status_funcs)
|
||||
|
@ -972,13 +1056,35 @@ class Torrent(object):
|
|||
|
||||
return status_dict
|
||||
|
||||
def update_status(self, status):
|
||||
def get_lt_status(self) -> 'lt.torrent_status':
|
||||
"""Get the torrent status fresh, not from cache.
|
||||
|
||||
This should be used when a guaranteed fresh status is needed rather than
|
||||
`torrent.handle.status()` because it will update the cache as well.
|
||||
"""
|
||||
self.status = self.handle.status()
|
||||
return self.status
|
||||
|
||||
@property
|
||||
def status(self) -> 'lt.torrent_status':
|
||||
"""Cached copy of the libtorrent status for this torrent.
|
||||
|
||||
If it has not been updated within the last five seconds, it will be
|
||||
automatically refreshed.
|
||||
"""
|
||||
if self._status_last_update < (time.time() - 5):
|
||||
self.status = self.handle.status()
|
||||
return self._status
|
||||
|
||||
@status.setter
|
||||
def status(self, status: 'lt.torrent_status') -> None:
|
||||
"""Updates the cached status.
|
||||
|
||||
Args:
|
||||
status (libtorrent.torrent_status): a libtorrent torrent status
|
||||
status: a libtorrent torrent status
|
||||
"""
|
||||
self.status = status
|
||||
self._status = status
|
||||
self._status_last_update = time.time()
|
||||
|
||||
def _create_status_funcs(self):
|
||||
"""Creates the functions for getting torrent status"""
|
||||
|
@ -987,7 +1093,9 @@ class Torrent(object):
|
|||
'seeding_time': lambda: self.status.seeding_time,
|
||||
'finished_time': lambda: self.status.finished_time,
|
||||
'all_time_download': lambda: self.status.all_time_download,
|
||||
'storage_mode': lambda: self.status.storage_mode.name.split('_')[2], # sparse or allocate
|
||||
'storage_mode': lambda: self.status.storage_mode.name.split('_')[
|
||||
2
|
||||
], # sparse or allocate
|
||||
'distributed_copies': lambda: max(0.0, self.status.distributed_copies),
|
||||
'download_payload_rate': lambda: self.status.download_payload_rate,
|
||||
'file_priorities': self.get_file_priorities,
|
||||
|
@ -1000,8 +1108,12 @@ class Torrent(object):
|
|||
'max_upload_slots': lambda: self.options['max_upload_slots'],
|
||||
'max_upload_speed': lambda: self.options['max_upload_speed'],
|
||||
'message': lambda: self.statusmsg,
|
||||
'move_on_completed_path': lambda: self.options['move_completed_path'], # Deprecated: move_completed_path
|
||||
'move_on_completed': lambda: self.options['move_completed'], # Deprecated: Use move_completed
|
||||
'move_on_completed_path': lambda: self.options[
|
||||
'move_completed_path'
|
||||
], # Deprecated: move_completed_path
|
||||
'move_on_completed': lambda: self.options[
|
||||
'move_completed'
|
||||
], # Deprecated: Use move_completed
|
||||
'move_completed_path': lambda: self.options['move_completed_path'],
|
||||
'move_completed': lambda: self.options['move_completed'],
|
||||
'next_announce': lambda: self.status.next_announce.seconds,
|
||||
|
@ -1009,17 +1121,25 @@ class Torrent(object):
|
|||
'num_seeds': lambda: self.status.num_seeds,
|
||||
'owner': lambda: self.options['owner'],
|
||||
'paused': lambda: self.status.paused,
|
||||
'prioritize_first_last': lambda: self.options['prioritize_first_last_pieces'],
|
||||
'prioritize_first_last': lambda: self.options[
|
||||
'prioritize_first_last_pieces'
|
||||
],
|
||||
# Deprecated: Use prioritize_first_last_pieces
|
||||
'prioritize_first_last_pieces': lambda: self.options['prioritize_first_last_pieces'],
|
||||
'prioritize_first_last_pieces': lambda: self.options[
|
||||
'prioritize_first_last_pieces'
|
||||
],
|
||||
'sequential_download': lambda: self.options['sequential_download'],
|
||||
'progress': self.get_progress,
|
||||
'shared': lambda: self.options['shared'],
|
||||
'remove_at_ratio': lambda: self.options['remove_at_ratio'],
|
||||
'save_path': lambda: self.options['download_location'], # Deprecated: Use download_location
|
||||
'save_path': lambda: self.options[
|
||||
'download_location'
|
||||
], # Deprecated: Use download_location
|
||||
'download_location': lambda: self.options['download_location'],
|
||||
'seeds_peers_ratio': lambda: -1.0 if self.status.num_incomplete == 0 else ( # Use -1.0 to signify infinity
|
||||
self.status.num_complete / self.status.num_incomplete),
|
||||
'seeds_peers_ratio': lambda: -1.0
|
||||
if self.status.num_incomplete == 0
|
||||
# Use -1.0 to signify infinity
|
||||
else (self.status.num_complete / self.status.num_incomplete),
|
||||
'seed_rank': lambda: self.status.seed_rank,
|
||||
'state': lambda: self.state,
|
||||
'stop_at_ratio': lambda: self.options['stop_at_ratio'],
|
||||
|
@ -1032,19 +1152,32 @@ class Torrent(object):
|
|||
'total_seeds': lambda: self.status.num_complete,
|
||||
'total_uploaded': lambda: self.status.all_time_upload,
|
||||
'total_wanted': lambda: self.status.total_wanted,
|
||||
'total_remaining': lambda: self.status.total_wanted - self.status.total_wanted_done,
|
||||
'total_remaining': lambda: self.status.total_wanted
|
||||
- self.status.total_wanted_done,
|
||||
'tracker': lambda: self.status.current_tracker,
|
||||
'tracker_host': self.get_tracker_host,
|
||||
'trackers': lambda: self.trackers,
|
||||
'tracker_status': lambda: self.tracker_status,
|
||||
'upload_payload_rate': lambda: self.status.upload_payload_rate,
|
||||
'comment': lambda: decode_bytes(self.torrent_info.comment()) if self.has_metadata else '',
|
||||
'creator': lambda: decode_bytes(self.torrent_info.creator()) if self.has_metadata else '',
|
||||
'num_files': lambda: self.torrent_info.num_files() if self.has_metadata else 0,
|
||||
'num_pieces': lambda: self.torrent_info.num_pieces() if self.has_metadata else 0,
|
||||
'piece_length': lambda: self.torrent_info.piece_length() if self.has_metadata else 0,
|
||||
'comment': lambda: decode_bytes(self.torrent_info.comment())
|
||||
if self.has_metadata
|
||||
else '',
|
||||
'creator': lambda: decode_bytes(self.torrent_info.creator())
|
||||
if self.has_metadata
|
||||
else '',
|
||||
'num_files': lambda: self.torrent_info.num_files()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'num_pieces': lambda: self.torrent_info.num_pieces()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'piece_length': lambda: self.torrent_info.piece_length()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'private': lambda: self.torrent_info.priv() if self.has_metadata else False,
|
||||
'total_size': lambda: self.torrent_info.total_size() if self.has_metadata else 0,
|
||||
'total_size': lambda: self.torrent_info.total_size()
|
||||
if self.has_metadata
|
||||
else 0,
|
||||
'eta': self.get_eta,
|
||||
'file_progress': self.get_file_progress,
|
||||
'files': self.get_files,
|
||||
|
@ -1061,7 +1194,7 @@ class Torrent(object):
|
|||
'super_seeding': lambda: self.status.super_seeding,
|
||||
'time_since_download': lambda: self.status.time_since_download,
|
||||
'time_since_upload': lambda: self.status.time_since_upload,
|
||||
'time_since_transfer': self.get_time_since_transfer
|
||||
'time_since_transfer': self.get_time_since_transfer,
|
||||
}
|
||||
|
||||
def pause(self):
|
||||
|
@ -1072,37 +1205,48 @@ class Torrent(object):
|
|||
|
||||
"""
|
||||
# Turn off auto-management so the torrent will not be unpaused by lt queueing
|
||||
self.handle.auto_managed(False)
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=False,
|
||||
)
|
||||
if self.state == 'Error':
|
||||
return False
|
||||
log.debug('Unable to pause torrent while in Error state')
|
||||
elif self.status.paused:
|
||||
# This torrent was probably paused due to being auto managed by lt
|
||||
# Since we turned auto_managed off, we should update the state which should
|
||||
# show it as 'Paused'. We need to emit a torrent_paused signal because
|
||||
# the torrent_paused alert from libtorrent will not be generated.
|
||||
self.update_state()
|
||||
component.get('EventManager').emit(TorrentStateChangedEvent(self.torrent_id, 'Paused'))
|
||||
component.get('EventManager').emit(
|
||||
TorrentStateChangedEvent(self.torrent_id, 'Paused')
|
||||
)
|
||||
else:
|
||||
try:
|
||||
self.handle.pause()
|
||||
except RuntimeError as ex:
|
||||
log.debug('Unable to pause torrent: %s', ex)
|
||||
return False
|
||||
return True
|
||||
|
||||
def resume(self):
|
||||
"""Resumes this torrent."""
|
||||
if self.status.paused and self.status.auto_managed:
|
||||
log.debug('Resume not possible for auto-managed torrent!')
|
||||
elif self.forced_error and self.forced_error.was_paused:
|
||||
log.debug('Resume skipped for forced_error torrent as it was originally paused.')
|
||||
elif (self.status.is_finished and self.options['stop_at_ratio'] and
|
||||
self.get_ratio() >= self.options['stop_ratio']):
|
||||
log.debug(
|
||||
'Resume skipped for forced_error torrent as it was originally paused.'
|
||||
)
|
||||
elif (
|
||||
self.status.is_finished
|
||||
and self.options['stop_at_ratio']
|
||||
and self.get_ratio() >= self.options['stop_ratio']
|
||||
):
|
||||
log.debug('Resume skipped for torrent as it has reached "stop_seed_ratio".')
|
||||
else:
|
||||
# Check if torrent was originally being auto-managed.
|
||||
if self.options['auto_managed']:
|
||||
self.handle.auto_managed(True)
|
||||
self._set_handle_flags(
|
||||
flag=lt.torrent_flags.auto_managed,
|
||||
set_flag=True,
|
||||
)
|
||||
try:
|
||||
self.handle.resume()
|
||||
except RuntimeError as ex:
|
||||
|
@ -1125,8 +1269,8 @@ class Torrent(object):
|
|||
bool: True is successful, otherwise False
|
||||
"""
|
||||
try:
|
||||
self.handle.connect_peer((peer_ip, peer_port), 0)
|
||||
except RuntimeError as ex:
|
||||
self.handle.connect_peer((peer_ip, int(peer_port)), 0)
|
||||
except (RuntimeError, ValueError) as ex:
|
||||
log.debug('Unable to connect to peer: %s', ex)
|
||||
return False
|
||||
return True
|
||||
|
@ -1147,9 +1291,13 @@ class Torrent(object):
|
|||
try:
|
||||
os.makedirs(dest)
|
||||
except OSError as ex:
|
||||
log.error('Could not move storage for torrent %s since %s does '
|
||||
log.error(
|
||||
'Could not move storage for torrent %s since %s does '
|
||||
'not exist and could not create the directory: %s',
|
||||
self.torrent_id, dest, ex)
|
||||
self.torrent_id,
|
||||
dest,
|
||||
ex,
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
|
@ -1182,8 +1330,9 @@ class Torrent(object):
|
|||
flags = lt.save_resume_flags_t.flush_disk_cache if flush_disk_cache else 0
|
||||
# Don't generate fastresume data if torrent is in a Deluge Error state.
|
||||
if self.forced_error:
|
||||
component.get('TorrentManager').waiting_on_resume_data[self.torrent_id].errback(
|
||||
UserWarning('Skipped creating resume_data while in Error state'))
|
||||
component.get('TorrentManager').waiting_on_resume_data[
|
||||
self.torrent_id
|
||||
].errback(UserWarning('Skipped creating resume_data while in Error state'))
|
||||
else:
|
||||
self.handle.save_resume_data(flags)
|
||||
|
||||
|
@ -1201,16 +1350,15 @@ class Torrent(object):
|
|||
try:
|
||||
with open(filepath, 'wb') as save_file:
|
||||
save_file.write(filedump)
|
||||
except IOError as ex:
|
||||
except OSError as ex:
|
||||
log.error('Unable to save torrent file to: %s', ex)
|
||||
|
||||
filepath = os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')
|
||||
# Regenerate the file priorities
|
||||
self.set_file_priorities([])
|
||||
|
||||
if filedump is None:
|
||||
metadata = lt.bdecode(self.torrent_info.metadata())
|
||||
torrent_file = {b'info': metadata}
|
||||
filedump = lt.bencode(torrent_file)
|
||||
lt_ct = lt.create_torrent(self.torrent_info)
|
||||
filedump = lt.bencode(lt_ct.generate())
|
||||
|
||||
write_file(filepath, filedump)
|
||||
|
||||
# If the user has requested a copy of the torrent be saved elsewhere we need to do that.
|
||||
|
@ -1222,9 +1370,13 @@ class Torrent(object):
|
|||
|
||||
def delete_torrentfile(self, delete_copies=False):
|
||||
"""Deletes the .torrent file in the state directory in config"""
|
||||
torrent_files = [os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')]
|
||||
if delete_copies:
|
||||
torrent_files.append(os.path.join(self.config['torrentfiles_location'], self.filename))
|
||||
torrent_files = [
|
||||
os.path.join(get_config_dir(), 'state', self.torrent_id + '.torrent')
|
||||
]
|
||||
if delete_copies and self.filename:
|
||||
torrent_files.append(
|
||||
os.path.join(self.config['torrentfiles_location'], self.filename)
|
||||
)
|
||||
|
||||
for torrent_file in torrent_files:
|
||||
log.debug('Deleting torrent file: %s', torrent_file)
|
||||
|
@ -1284,7 +1436,7 @@ class Torrent(object):
|
|||
# lt needs utf8 byte-string. Otherwise if wstrings enabled, unicode string.
|
||||
try:
|
||||
self.handle.rename_file(index, filename.encode('utf8'))
|
||||
except TypeError:
|
||||
except (UnicodeDecodeError, TypeError):
|
||||
self.handle.rename_file(index, filename)
|
||||
|
||||
def rename_folder(self, folder, new_folder):
|
||||
|
@ -1293,7 +1445,7 @@ class Torrent(object):
|
|||
This basically does a file rename on all of the folders children.
|
||||
|
||||
Args:
|
||||
folder (str): The orignal folder name
|
||||
folder (str): The original folder name
|
||||
new_folder (str): The new folder name
|
||||
|
||||
Returns:
|
||||
|
@ -1320,15 +1472,19 @@ class Torrent(object):
|
|||
new_path = _file['path'].replace(folder, new_folder, 1)
|
||||
try:
|
||||
self.handle.rename_file(_file['index'], new_path.encode('utf8'))
|
||||
except TypeError:
|
||||
except (UnicodeDecodeError, TypeError):
|
||||
self.handle.rename_file(_file['index'], new_path)
|
||||
|
||||
def on_folder_rename_complete(dummy_result, torrent, folder, new_folder):
|
||||
"""Folder rename complete"""
|
||||
component.get('EventManager').emit(TorrentFolderRenamedEvent(torrent.torrent_id, folder, new_folder))
|
||||
component.get('EventManager').emit(
|
||||
TorrentFolderRenamedEvent(torrent.torrent_id, folder, new_folder)
|
||||
)
|
||||
# Empty folders are removed after libtorrent folder renames
|
||||
self.remove_empty_folders(folder)
|
||||
torrent.waiting_on_folder_rename = [_dir for _dir in torrent.waiting_on_folder_rename if _dir]
|
||||
torrent.waiting_on_folder_rename = [
|
||||
_dir for _dir in torrent.waiting_on_folder_rename if _dir
|
||||
]
|
||||
component.get('TorrentManager').save_resume_data((self.torrent_id,))
|
||||
|
||||
d = DeferredList(list(wait_on_folder.values()))
|
||||
|
@ -1345,7 +1501,9 @@ class Torrent(object):
|
|||
"""
|
||||
# Removes leading slashes that can cause join to ignore download_location
|
||||
download_location = self.options['download_location']
|
||||
folder_full_path = os.path.normpath(os.path.join(download_location, folder.lstrip('\\/')))
|
||||
folder_full_path = os.path.normpath(
|
||||
os.path.join(download_location, folder.lstrip('\\/'))
|
||||
)
|
||||
|
||||
try:
|
||||
if not os.listdir(folder_full_path):
|
||||
|
@ -1356,7 +1514,9 @@ class Torrent(object):
|
|||
for name in dirs:
|
||||
try:
|
||||
os.removedirs(os.path.join(root, name))
|
||||
log.debug('Removed Empty Folder %s', os.path.join(root, name))
|
||||
log.debug(
|
||||
'Removed Empty Folder %s', os.path.join(root, name)
|
||||
)
|
||||
except OSError as ex:
|
||||
log.debug(ex)
|
||||
|
||||
|
@ -1379,16 +1539,22 @@ class Torrent(object):
|
|||
pieces = None
|
||||
else:
|
||||
pieces = []
|
||||
for piece, avail_piece in zip(self.status.pieces, self.handle.piece_availability()):
|
||||
for piece, avail_piece in zip(
|
||||
self.status.pieces, self.handle.piece_availability()
|
||||
):
|
||||
if piece:
|
||||
pieces.append(3) # Completed.
|
||||
# Completed.
|
||||
pieces.append(3)
|
||||
elif avail_piece:
|
||||
pieces.append(1) # Available, just not downloaded nor being downloaded.
|
||||
# Available, just not downloaded nor being downloaded.
|
||||
pieces.append(1)
|
||||
else:
|
||||
pieces.append(0) # Missing, no known peer with piece, or not asked for yet.
|
||||
# Missing, no known peer with piece, or not asked for yet.
|
||||
pieces.append(0)
|
||||
|
||||
for peer_info in self.handle.get_peer_info():
|
||||
if peer_info.downloading_piece_index >= 0:
|
||||
pieces[peer_info.downloading_piece_index] = 2 # Being downloaded from peer.
|
||||
# Being downloaded from peer.
|
||||
pieces[peer_info.downloading_piece_index] = 2
|
||||
|
||||
return pieces
|
||||
|
|
File diff suppressed because it is too large
Load diff
136
deluge/crypto_utils.py
Normal file
136
deluge/crypto_utils.py
Normal file
|
@ -0,0 +1,136 @@
|
|||
#
|
||||
# Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
|
||||
# the additional special exception to link portions of this program with the OpenSSL library.
|
||||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
import os
|
||||
import stat
|
||||
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL.crypto import FILETYPE_PEM
|
||||
from twisted.internet.ssl import (
|
||||
AcceptableCiphers,
|
||||
Certificate,
|
||||
CertificateOptions,
|
||||
KeyPair,
|
||||
TLSVersion,
|
||||
)
|
||||
|
||||
import deluge.configmanager
|
||||
|
||||
# A TLS ciphers list.
|
||||
# Sources for more information on TLS ciphers:
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# This list was inspired by the `urllib3` library
|
||||
# - https://github.com/urllib3/urllib3/blob/master/urllib3/util/ssl_.py#L79
|
||||
#
|
||||
# The general intent is:
|
||||
# - prefer cipher suites that offer perfect forward secrecy (ECDHE),
|
||||
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
TLS_CIPHERS = ':'.join(
|
||||
[
|
||||
'ECDH+AESGCM',
|
||||
'ECDH+CHACHA20',
|
||||
'AES256-GCM-SHA384',
|
||||
'AES128-GCM-SHA256',
|
||||
'!DSS' '!aNULL',
|
||||
'!eNULL',
|
||||
'!MD5',
|
||||
]
|
||||
)
|
||||
|
||||
# This value tells OpenSSL to disable all SSL/TLS renegotiation.
|
||||
SSL_OP_NO_RENEGOTIATION = 0x40000000
|
||||
|
||||
|
||||
def get_context_factory(cert_path, pkey_path):
|
||||
"""OpenSSL context factory.
|
||||
|
||||
Generates an OpenSSL context factory using Twisted's CertificateOptions class.
|
||||
This will keep a server cipher order.
|
||||
|
||||
Args:
|
||||
cert_path (string): The path to the certificate file
|
||||
pkey_path (string): The path to the private key file
|
||||
|
||||
Returns:
|
||||
twisted.internet.ssl.CertificateOptions: An OpenSSL context factory
|
||||
"""
|
||||
|
||||
with open(cert_path) as cert:
|
||||
certificate = Certificate.loadPEM(cert.read()).original
|
||||
with open(pkey_path) as pkey:
|
||||
private_key = KeyPair.load(pkey.read(), FILETYPE_PEM).original
|
||||
ciphers = AcceptableCiphers.fromOpenSSLCipherString(TLS_CIPHERS)
|
||||
cert_options = CertificateOptions(
|
||||
privateKey=private_key,
|
||||
certificate=certificate,
|
||||
raiseMinimumTo=TLSVersion.TLSv1_2,
|
||||
acceptableCiphers=ciphers,
|
||||
)
|
||||
ctx = cert_options.getContext()
|
||||
ctx.use_certificate_chain_file(cert_path)
|
||||
ctx.set_options(SSL_OP_NO_RENEGOTIATION)
|
||||
|
||||
return cert_options
|
||||
|
||||
|
||||
def check_ssl_keys():
|
||||
"""
|
||||
Check for SSL cert/key and create them if necessary
|
||||
"""
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
if not os.path.exists(ssl_dir):
|
||||
# The ssl folder doesn't exist so we need to create it
|
||||
os.makedirs(ssl_dir)
|
||||
generate_ssl_keys()
|
||||
else:
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
if not os.path.exists(os.path.join(ssl_dir, f)):
|
||||
generate_ssl_keys()
|
||||
break
|
||||
|
||||
|
||||
def generate_ssl_keys():
|
||||
"""
|
||||
This method generates a new SSL key/cert.
|
||||
"""
|
||||
digest = 'sha256'
|
||||
|
||||
# Generate key pair
|
||||
pkey = crypto.PKey()
|
||||
pkey.generate_key(crypto.TYPE_RSA, 2048)
|
||||
|
||||
# Generate cert request
|
||||
req = crypto.X509Req()
|
||||
subj = req.get_subject()
|
||||
setattr(subj, 'CN', 'Deluge Daemon')
|
||||
req.set_pubkey(pkey)
|
||||
req.sign(pkey, digest)
|
||||
|
||||
# Generate certificate
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(0)
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
|
||||
cert.set_issuer(req.get_subject())
|
||||
cert.set_subject(req.get_subject())
|
||||
cert.set_pubkey(req.get_pubkey())
|
||||
cert.sign(pkey, digest)
|
||||
|
||||
# Write out files
|
||||
ssl_dir = deluge.configmanager.get_config_dir('ssl')
|
||||
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
|
||||
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
|
||||
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
|
||||
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
|
||||
# Make the files only readable by this user
|
||||
for f in ('daemon.pkey', 'daemon.cert'):
|
||||
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com>
|
||||
#
|
||||
|
@ -7,12 +6,13 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import inspect
|
||||
import re
|
||||
import warnings
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Coroutine, TypeVar
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
|
||||
def proxy(proxy_func):
|
||||
|
@ -23,11 +23,14 @@ def proxy(proxy_func):
|
|||
:param proxy_func: the proxy function
|
||||
:type proxy_func: function
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return proxy_func(func, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
|
@ -53,10 +56,11 @@ def overrides(*args):
|
|||
if inspect.isfunction(args[0]):
|
||||
return _overrides(stack, args[0])
|
||||
else:
|
||||
# One or more classes are specifed, so return a function that will be
|
||||
# One or more classes are specified, so return a function that will be
|
||||
# called with the real function as argument
|
||||
def ret_func(func, **kwargs):
|
||||
return _overrides(stack, func, explicit_base_classes=args)
|
||||
|
||||
return ret_func
|
||||
|
||||
|
||||
|
@ -75,7 +79,10 @@ def _overrides(stack, method, explicit_base_classes=None):
|
|||
check_classes = base_classes
|
||||
|
||||
if not base_classes:
|
||||
raise ValueError('overrides decorator: unable to determine base class of class "%s"' % class_name)
|
||||
raise ValueError(
|
||||
'overrides decorator: unable to determine base class of class "%s"'
|
||||
% class_name
|
||||
)
|
||||
|
||||
def get_class(cls_name):
|
||||
if '.' not in cls_name:
|
||||
|
@ -91,47 +98,138 @@ def _overrides(stack, method, explicit_base_classes=None):
|
|||
|
||||
if explicit_base_classes:
|
||||
# One or more base classes are explicitly given, check only those classes
|
||||
override_classes = re.search(r'\s*@overrides\((.+)\)\s*', stack[1][4][0]).group(1)
|
||||
override_classes = re.search(r'\s*@overrides\((.+)\)\s*', stack[1][4][0]).group(
|
||||
1
|
||||
)
|
||||
override_classes = [c.strip() for c in override_classes.split(',')]
|
||||
check_classes = override_classes
|
||||
|
||||
for c in base_classes + check_classes:
|
||||
classes[c] = get_class(c)
|
||||
|
||||
# Verify that the excplicit override class is one of base classes
|
||||
# Verify that the explicit override class is one of base classes
|
||||
if explicit_base_classes:
|
||||
from itertools import product
|
||||
|
||||
for bc, cc in product(base_classes, check_classes):
|
||||
if issubclass(classes[bc], classes[cc]):
|
||||
break
|
||||
else:
|
||||
raise Exception('Excplicit override class "%s" is not a super class of: %s'
|
||||
% (explicit_base_classes, class_name))
|
||||
raise Exception(
|
||||
'Excplicit override class "%s" is not a super class of: %s'
|
||||
% (explicit_base_classes, class_name)
|
||||
)
|
||||
if not all(hasattr(classes[cls], method.__name__) for cls in check_classes):
|
||||
for cls in check_classes:
|
||||
if not hasattr(classes[cls], method.__name__):
|
||||
raise Exception('Function override "%s" not found in superclass: %s\n%s'
|
||||
% (method.__name__, cls, 'File: %s:%s' % (stack[1][1], stack[1][2])))
|
||||
raise Exception(
|
||||
'Function override "%s" not found in superclass: %s\n%s'
|
||||
% (
|
||||
method.__name__,
|
||||
cls,
|
||||
f'File: {stack[1][1]}:{stack[1][2]}',
|
||||
)
|
||||
)
|
||||
|
||||
if not any(hasattr(classes[cls], method.__name__) for cls in check_classes):
|
||||
raise Exception('Function override "%s" not found in any superclass: %s\n%s'
|
||||
% (method.__name__, check_classes, 'File: %s:%s' % (stack[1][1], stack[1][2])))
|
||||
raise Exception(
|
||||
'Function override "%s" not found in any superclass: %s\n%s'
|
||||
% (
|
||||
method.__name__,
|
||||
check_classes,
|
||||
f'File: {stack[1][1]}:{stack[1][2]}',
|
||||
)
|
||||
)
|
||||
return method
|
||||
|
||||
|
||||
def deprecated(func):
|
||||
"""This is a decorator which can be used to mark function as deprecated.
|
||||
|
||||
It will result in a warning being emmitted when the function is used.
|
||||
It will result in a warning being emitted when the function is used.
|
||||
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def depr_func(*args, **kwargs):
|
||||
warnings.simplefilter('always', DeprecationWarning) # Turn off filter
|
||||
warnings.warn('Call to deprecated function {}.'.format(func.__name__),
|
||||
category=DeprecationWarning, stacklevel=2)
|
||||
warnings.warn(
|
||||
f'Call to deprecated function {func.__name__}.',
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
warnings.simplefilter('default', DeprecationWarning) # Reset filter
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return depr_func
|
||||
|
||||
|
||||
class CoroutineDeferred(defer.Deferred):
|
||||
"""Wraps a coroutine in a Deferred.
|
||||
It will dynamically pass through the underlying coroutine without wrapping where apporpriate.
|
||||
"""
|
||||
|
||||
def __init__(self, coro: Coroutine):
|
||||
# Delay this import to make sure a reactor was installed first
|
||||
from twisted.internet import reactor
|
||||
|
||||
super().__init__()
|
||||
self.coro = coro
|
||||
self.awaited = None
|
||||
self.activate_deferred = reactor.callLater(0, self.activate)
|
||||
|
||||
def __await__(self):
|
||||
if self.awaited in [None, True]:
|
||||
self.awaited = True
|
||||
return self.coro.__await__()
|
||||
# Already in deferred mode
|
||||
return super().__await__()
|
||||
|
||||
def activate(self):
|
||||
"""If the result wasn't awaited before the next context switch, we turn it into a deferred."""
|
||||
if self.awaited is None:
|
||||
self.awaited = False
|
||||
try:
|
||||
d = defer.Deferred.fromCoroutine(self.coro)
|
||||
except AttributeError:
|
||||
# Fallback for Twisted <= 21.2 without fromCoroutine
|
||||
d = defer.ensureDeferred(self.coro)
|
||||
d.chainDeferred(self)
|
||||
|
||||
def _callback_activate(self):
|
||||
"""Verify awaited status before calling activate."""
|
||||
assert not self.awaited, 'Cannot add callbacks to an already awaited coroutine.'
|
||||
self.activate()
|
||||
|
||||
def addCallback(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addCallback(*args, **kwargs)
|
||||
|
||||
def addCallbacks(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addCallbacks(*args, **kwargs)
|
||||
|
||||
def addErrback(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addErrback(*args, **kwargs)
|
||||
|
||||
def addBoth(self, *args, **kwargs): # noqa: N802
|
||||
self._callback_activate()
|
||||
return super().addBoth(*args, **kwargs)
|
||||
|
||||
|
||||
_RetT = TypeVar('_RetT')
|
||||
|
||||
|
||||
def maybe_coroutine(
|
||||
f: Callable[..., Coroutine[Any, Any, _RetT]],
|
||||
) -> 'Callable[..., defer.Deferred[_RetT]]':
|
||||
"""Wraps a coroutine function to make it usable as a normal function that returns a Deferred."""
|
||||
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Uncomment for quick testing to make sure CoroutineDeferred magic isn't at fault
|
||||
# return defer.ensureDeferred(f(*args, **kwargs))
|
||||
return CoroutineDeferred(f(*args, **kwargs))
|
||||
|
||||
return wrapper
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
|
||||
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
|
||||
|
@ -9,19 +8,15 @@
|
|||
#
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
class DelugeError(Exception):
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
inst = super(DelugeError, cls).__new__(cls, *args, **kwargs)
|
||||
inst = super().__new__(cls, *args, **kwargs)
|
||||
inst._args = args
|
||||
inst._kwargs = kwargs
|
||||
return inst
|
||||
|
||||
def __init__(self, message=None):
|
||||
super(DelugeError, self).__init__(message)
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
|
@ -45,14 +40,13 @@ class InvalidPathError(DelugeError):
|
|||
|
||||
|
||||
class WrappedException(DelugeError):
|
||||
|
||||
def __init__(self, message, exception_type, traceback):
|
||||
super(WrappedException, self).__init__(message)
|
||||
super().__init__(message)
|
||||
self.type = exception_type
|
||||
self.traceback = traceback
|
||||
|
||||
def __str__(self):
|
||||
return '%s\n%s' % (self.message, self.traceback)
|
||||
return f'{self.message}\n{self.traceback}'
|
||||
|
||||
|
||||
class _ClientSideRecreateError(DelugeError):
|
||||
|
@ -60,29 +54,29 @@ class _ClientSideRecreateError(DelugeError):
|
|||
|
||||
|
||||
class IncompatibleClient(_ClientSideRecreateError):
|
||||
|
||||
def __init__(self, daemon_version):
|
||||
self.daemon_version = daemon_version
|
||||
msg = 'Your deluge client is not compatible with the daemon. '\
|
||||
'Please upgrade your client to %(daemon_version)s' % \
|
||||
dict(daemon_version=self.daemon_version)
|
||||
super(IncompatibleClient, self).__init__(message=msg)
|
||||
msg = (
|
||||
'Your deluge client is not compatible with the daemon. '
|
||||
'Please upgrade your client to %(daemon_version)s'
|
||||
) % {'daemon_version': self.daemon_version}
|
||||
super().__init__(message=msg)
|
||||
|
||||
|
||||
class NotAuthorizedError(_ClientSideRecreateError):
|
||||
|
||||
def __init__(self, current_level, required_level):
|
||||
msg = 'Auth level too low: %(current_level)s < %(required_level)s' % \
|
||||
dict(current_level=current_level, required_level=required_level)
|
||||
super(NotAuthorizedError, self).__init__(message=msg)
|
||||
msg = ('Auth level too low: %(current_level)s < %(required_level)s') % {
|
||||
'current_level': current_level,
|
||||
'required_level': required_level,
|
||||
}
|
||||
super().__init__(message=msg)
|
||||
self.current_level = current_level
|
||||
self.required_level = required_level
|
||||
|
||||
|
||||
class _UsernameBasedPasstroughError(_ClientSideRecreateError):
|
||||
|
||||
def __init__(self, message, username):
|
||||
super(_UsernameBasedPasstroughError, self).__init__(message)
|
||||
super().__init__(message)
|
||||
self.username = username
|
||||
|
||||
|
||||
|
@ -96,3 +90,7 @@ class AuthenticationRequired(_UsernameBasedPasstroughError):
|
|||
|
||||
class AuthManagerError(_UsernameBasedPasstroughError):
|
||||
pass
|
||||
|
||||
|
||||
class LibtorrentImportError(ImportError):
|
||||
pass
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -14,7 +13,6 @@ This module describes the types of events that can be generated by the daemon
|
|||
and subsequently emitted to the clients.
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
known_events = {}
|
||||
|
||||
|
@ -23,13 +21,14 @@ class DelugeEventMetaClass(type):
|
|||
"""
|
||||
This metaclass simply keeps a list of all events classes created.
|
||||
"""
|
||||
def __init__(self, name, bases, dct): # pylint: disable=bad-mcs-method-argument
|
||||
super(DelugeEventMetaClass, self).__init__(name, bases, dct)
|
||||
|
||||
def __init__(cls, name, bases, dct): # pylint: disable=bad-mcs-method-argument
|
||||
super().__init__(name, bases, dct)
|
||||
if name != 'DelugeEvent':
|
||||
known_events[name] = self
|
||||
known_events[name] = cls
|
||||
|
||||
|
||||
class DelugeEvent(object):
|
||||
class DelugeEvent(metaclass=DelugeEventMetaClass):
|
||||
"""
|
||||
The base class for all events.
|
||||
|
||||
|
@ -39,7 +38,6 @@ class DelugeEvent(object):
|
|||
:type args: list
|
||||
|
||||
"""
|
||||
__metaclass__ = DelugeEventMetaClass
|
||||
|
||||
def _get_name(self):
|
||||
return self.__class__.__name__
|
||||
|
@ -57,6 +55,7 @@ class TorrentAddedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a new torrent is successfully added to the session.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, from_state):
|
||||
"""
|
||||
:param torrent_id: the torrent_id of the torrent that was added
|
||||
|
@ -71,6 +70,7 @@ class TorrentRemovedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent has been removed from the session.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -83,6 +83,7 @@ class PreTorrentRemovedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent is about to be removed from the session.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -95,6 +96,7 @@ class TorrentStateChangedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent changes state.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, state):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -109,6 +111,7 @@ class TorrentTrackerStatusEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrents tracker status changes.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, status):
|
||||
"""
|
||||
Args:
|
||||
|
@ -122,6 +125,7 @@ class TorrentQueueChangedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the queue order has changed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -129,6 +133,7 @@ class TorrentFolderRenamedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a folder within a torrent has been renamed.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, old, new):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -145,6 +150,7 @@ class TorrentFileRenamedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a file within a torrent has been renamed.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, index, name):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -161,6 +167,7 @@ class TorrentFinishedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent finishes downloading.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -173,6 +180,7 @@ class TorrentResumedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a torrent resumes from a paused state.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -185,6 +193,7 @@ class TorrentFileCompletedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a file completes.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, index):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -199,6 +208,7 @@ class TorrentStorageMovedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the storage location for a torrent has been moved.
|
||||
"""
|
||||
|
||||
def __init__(self, torrent_id, path):
|
||||
"""
|
||||
:param torrent_id: the torrent_id
|
||||
|
@ -213,6 +223,7 @@ class CreateTorrentProgressEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when creating a torrent file remotely.
|
||||
"""
|
||||
|
||||
def __init__(self, piece_count, num_pieces):
|
||||
self._args = [piece_count, num_pieces]
|
||||
|
||||
|
@ -221,6 +232,7 @@ class NewVersionAvailableEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a more recent version of Deluge is available.
|
||||
"""
|
||||
|
||||
def __init__(self, new_release):
|
||||
"""
|
||||
:param new_release: the new version that is available
|
||||
|
@ -234,6 +246,7 @@ class SessionStartedEvent(DelugeEvent):
|
|||
Emitted when a session has started. This typically only happens once when
|
||||
the daemon is initially started.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -241,6 +254,7 @@ class SessionPausedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the session has been paused.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -248,6 +262,7 @@ class SessionResumedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the session has been resumed.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -255,6 +270,7 @@ class ConfigValueChangedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a config value changes in the Core.
|
||||
"""
|
||||
|
||||
def __init__(self, key, value):
|
||||
"""
|
||||
:param key: the key that changed
|
||||
|
@ -268,6 +284,7 @@ class PluginEnabledEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a plugin is enabled in the Core.
|
||||
"""
|
||||
|
||||
def __init__(self, plugin_name):
|
||||
self._args = [plugin_name]
|
||||
|
||||
|
@ -276,6 +293,7 @@ class PluginDisabledEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a plugin is disabled in the Core.
|
||||
"""
|
||||
|
||||
def __init__(self, plugin_name):
|
||||
self._args = [plugin_name]
|
||||
|
||||
|
@ -284,6 +302,7 @@ class ClientDisconnectedEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when a client disconnects.
|
||||
"""
|
||||
|
||||
def __init__(self, session_id):
|
||||
self._args = [session_id]
|
||||
|
||||
|
@ -292,6 +311,7 @@ class ExternalIPEvent(DelugeEvent):
|
|||
"""
|
||||
Emitted when the external ip address is received from libtorrent.
|
||||
"""
|
||||
|
||||
def __init__(self, external_ip):
|
||||
"""
|
||||
Args:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
|
||||
#
|
||||
|
@ -7,129 +6,200 @@
|
|||
# See LICENSE for more details.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import email.message
|
||||
import logging
|
||||
import os.path
|
||||
import zlib
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.web import client, http
|
||||
from twisted.web.error import PageRedirect
|
||||
|
||||
from deluge.common import get_version, utf8_encode_structure
|
||||
|
||||
try:
|
||||
from urllib.parse import urljoin
|
||||
except ImportError:
|
||||
# PY2 fallback
|
||||
from urlparse import urljoin # pylint: disable=ungrouped-imports
|
||||
from twisted.web._newclient import HTTPClientParser
|
||||
from twisted.web.error import Error, PageRedirect
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web.iweb import IAgent
|
||||
from zope.interface import implementer
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HTTPDownloader(client.HTTPDownloader):
|
||||
class CompressionDecoder(client.GzipDecoder):
|
||||
"""A compression decoder for gzip, x-gzip and deflate."""
|
||||
|
||||
def deliverBody(self, protocol): # NOQA: N802
|
||||
self.original.deliverBody(CompressionDecoderProtocol(protocol, self.original))
|
||||
|
||||
|
||||
class CompressionDecoderProtocol(client._GzipProtocol):
|
||||
"""A compression decoder protocol for CompressionDecoder."""
|
||||
|
||||
def __init__(self, protocol, response):
|
||||
super().__init__(protocol, response)
|
||||
self._zlibDecompress = zlib.decompressobj(32 + zlib.MAX_WBITS)
|
||||
|
||||
|
||||
class BodyHandler(HTTPClientParser):
|
||||
"""An HTTP parser that saves the response to a file."""
|
||||
|
||||
def __init__(self, request, finished, length, agent, encoding=None):
|
||||
"""BodyHandler init.
|
||||
|
||||
Args:
|
||||
request (t.w.i.IClientRequest): The parser request.
|
||||
finished (Deferred): A Deferred to handle the finished response.
|
||||
length (int): The length of the response.
|
||||
agent (t.w.i.IAgent): The agent from which the request was sent.
|
||||
"""
|
||||
Factory class for downloading files and keeping track of progress.
|
||||
"""
|
||||
def __init__(self, url, filename, part_callback=None, headers=None,
|
||||
force_filename=False, allow_compression=True):
|
||||
"""
|
||||
:param url: the url to download from
|
||||
:type url: string
|
||||
:param filename: the filename to save the file as
|
||||
:type filename: string
|
||||
:param force_filename: forces use of the supplied filename, regardless of header content
|
||||
:type force_filename: bool
|
||||
:param part_callback: a function to be called when a part of data
|
||||
is received, it's signature should be: func(data, current_length, total_length)
|
||||
:type part_callback: function
|
||||
:param headers: any optional headers to send
|
||||
:type headers: dictionary
|
||||
super().__init__(request, finished)
|
||||
self.agent = agent
|
||||
self.finished = finished
|
||||
self.total_length = length
|
||||
self.current_length = 0
|
||||
self.data = b''
|
||||
self.encoding = encoding
|
||||
|
||||
def dataReceived(self, data): # NOQA: N802
|
||||
self.current_length += len(data)
|
||||
self.data += data
|
||||
if self.agent.part_callback:
|
||||
self.agent.part_callback(data, self.current_length, self.total_length)
|
||||
|
||||
def connectionLost(self, reason): # NOQA: N802
|
||||
if self.encoding:
|
||||
self.data = self.data.decode(self.encoding).encode('utf8')
|
||||
with open(self.agent.filename, 'wb') as _file:
|
||||
_file.write(self.data)
|
||||
self.finished.callback(self.agent.filename)
|
||||
self.state = 'DONE'
|
||||
HTTPClientParser.connectionLost(self, reason)
|
||||
|
||||
|
||||
@implementer(IAgent)
|
||||
class HTTPDownloaderAgent:
|
||||
"""A File Downloader Agent."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
agent,
|
||||
filename,
|
||||
part_callback=None,
|
||||
force_filename=False,
|
||||
allow_compression=True,
|
||||
handle_redirect=True,
|
||||
):
|
||||
"""HTTPDownloaderAgent init.
|
||||
|
||||
Args:
|
||||
agent (t.w.c.Agent): The agent which will send the requests.
|
||||
filename (str): The filename to save the file as.
|
||||
force_filename (bool): Forces use of the supplied filename,
|
||||
regardless of header content.
|
||||
part_callback (func): A function to be called when a part of data
|
||||
is received, it's signature should be:
|
||||
func(data, current_length, total_length)
|
||||
"""
|
||||
|
||||
self.handle_redirect = handle_redirect
|
||||
self.agent = agent
|
||||
self.filename = filename
|
||||
self.part_callback = part_callback
|
||||
self.current_length = 0
|
||||
self.total_length = 0
|
||||
self.decoder = None
|
||||
self.value = filename
|
||||
self.force_filename = force_filename
|
||||
self.allow_compression = allow_compression
|
||||
self.code = None
|
||||
agent = b'Deluge/%s (http://deluge-torrent.org)' % get_version().encode('utf8')
|
||||
self.decoder = None
|
||||
|
||||
client.HTTPDownloader.__init__(self, url, filename, headers=headers, agent=agent)
|
||||
def request_callback(self, response):
|
||||
finished = Deferred()
|
||||
|
||||
def gotStatus(self, version, status, message): # NOQA: N802
|
||||
self.code = int(status)
|
||||
client.HTTPDownloader.gotStatus(self, version, status, message)
|
||||
|
||||
def gotHeaders(self, headers): # NOQA: N802
|
||||
if self.code == http.OK:
|
||||
if 'content-length' in headers:
|
||||
self.total_length = int(headers['content-length'][0])
|
||||
if not self.handle_redirect and response.code in (
|
||||
http.MOVED_PERMANENTLY,
|
||||
http.FOUND,
|
||||
http.SEE_OTHER,
|
||||
http.TEMPORARY_REDIRECT,
|
||||
):
|
||||
location = response.headers.getRawHeaders(b'location')[0]
|
||||
error = PageRedirect(response.code, location=location)
|
||||
finished.errback(Failure(error))
|
||||
elif response.code >= 400:
|
||||
error = Error(response.code)
|
||||
finished.errback(Failure(error))
|
||||
else:
|
||||
self.total_length = 0
|
||||
headers = response.headers
|
||||
body_length = int(headers.getRawHeaders(b'content-length', default=[0])[0])
|
||||
|
||||
if self.allow_compression and 'content-encoding' in headers and \
|
||||
headers['content-encoding'][0] in ('gzip', 'x-gzip', 'deflate'):
|
||||
# Adding 32 to the wbits enables gzip & zlib decoding (with automatic header detection)
|
||||
# Adding 16 just enables gzip decoding (no zlib)
|
||||
self.decoder = zlib.decompressobj(zlib.MAX_WBITS + 32)
|
||||
|
||||
if 'content-disposition' in headers and not self.force_filename:
|
||||
new_file_name = str(headers['content-disposition'][0]).split(';')[1].split('=')[1]
|
||||
if headers.hasHeader(b'content-disposition') and not self.force_filename:
|
||||
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
|
||||
'utf-8'
|
||||
)
|
||||
message = email.message.EmailMessage()
|
||||
message['content-disposition'] = content_disp
|
||||
new_file_name = message.get_filename()
|
||||
if new_file_name:
|
||||
new_file_name = sanitise_filename(new_file_name)
|
||||
new_file_name = os.path.join(os.path.split(self.value)[0], new_file_name)
|
||||
new_file_name = os.path.join(
|
||||
os.path.split(self.filename)[0], new_file_name
|
||||
)
|
||||
|
||||
count = 1
|
||||
fileroot = os.path.splitext(new_file_name)[0]
|
||||
fileext = os.path.splitext(new_file_name)[1]
|
||||
while os.path.isfile(new_file_name):
|
||||
# Increment filename if already exists
|
||||
new_file_name = '%s-%s%s' % (fileroot, count, fileext)
|
||||
new_file_name = f'{fileroot}-{count}{fileext}'
|
||||
count += 1
|
||||
|
||||
self.fileName = new_file_name
|
||||
self.value = new_file_name
|
||||
self.filename = new_file_name
|
||||
|
||||
elif self.code in (http.MOVED_PERMANENTLY, http.FOUND, http.SEE_OTHER, http.TEMPORARY_REDIRECT):
|
||||
location = headers['location'][0]
|
||||
error = PageRedirect(self.code, location=location)
|
||||
self.noPage(Failure(error))
|
||||
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode()
|
||||
message = email.message.EmailMessage()
|
||||
message['content-type'] = cont_type_header
|
||||
cont_type = message.get_content_type()
|
||||
params = message['content-type'].params
|
||||
# Only re-ecode text content types.
|
||||
encoding = None
|
||||
if cont_type.startswith('text/'):
|
||||
encoding = params.get('charset', None)
|
||||
response.deliverBody(
|
||||
BodyHandler(response.request, finished, body_length, self, encoding)
|
||||
)
|
||||
|
||||
return client.HTTPDownloader.gotHeaders(self, headers)
|
||||
return finished
|
||||
|
||||
def pagePart(self, data): # NOQA: N802
|
||||
if self.code == http.OK:
|
||||
self.current_length += len(data)
|
||||
if self.decoder:
|
||||
data = self.decoder.decompress(data)
|
||||
if self.part_callback:
|
||||
self.part_callback(data, self.current_length, self.total_length)
|
||||
def request(self, method, uri, headers=None, body_producer=None):
|
||||
"""Issue a new request to the wrapped agent.
|
||||
|
||||
return client.HTTPDownloader.pagePart(self, data)
|
||||
Args:
|
||||
method (bytes): The HTTP method to use.
|
||||
uri (bytes): The url to download from.
|
||||
headers (t.w.h.Headers, optional): Any extra headers to send.
|
||||
body_producer (t.w.i.IBodyProducer, optional): Request body data.
|
||||
|
||||
def pageEnd(self): # NOQA: N802
|
||||
if self.decoder:
|
||||
data = self.decoder.flush()
|
||||
self.current_length -= len(data)
|
||||
self.decoder = None
|
||||
self.pagePart(data)
|
||||
Returns:
|
||||
Deferred: The filename of the of the downloaded file.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = Headers()
|
||||
|
||||
return client.HTTPDownloader.pageEnd(self)
|
||||
if not headers.hasHeader(b'User-Agent'):
|
||||
user_agent = 'Deluge'
|
||||
headers.addRawHeader('User-Agent', user_agent)
|
||||
|
||||
d = self.agent.request(
|
||||
method=method, uri=uri, headers=headers, bodyProducer=body_producer
|
||||
)
|
||||
d.addCallback(self.request_callback)
|
||||
return d
|
||||
|
||||
|
||||
def sanitise_filename(filename):
|
||||
"""
|
||||
Sanitises a filename to use as a download destination file.
|
||||
"""Sanitises a filename to use as a download destination file.
|
||||
|
||||
Logs any filenames that could be considered malicious.
|
||||
|
||||
:param filename: the filename to sanitise
|
||||
:type filename: string
|
||||
:returns: the sanitised filename
|
||||
:rtype: string
|
||||
filename (str): The filename to sanitise.
|
||||
|
||||
Returns:
|
||||
str: The sanitised filename.
|
||||
"""
|
||||
|
||||
# Remove any quotes
|
||||
|
@ -137,136 +207,128 @@ def sanitise_filename(filename):
|
|||
|
||||
if os.path.basename(filename) != filename:
|
||||
# Dodgy server, log it
|
||||
log.warning('Potentially malicious server: trying to write to file: %s', filename)
|
||||
log.warning(
|
||||
'Potentially malicious server: trying to write to file: %s', filename
|
||||
)
|
||||
# Only use the basename
|
||||
filename = os.path.basename(filename)
|
||||
|
||||
filename = filename.strip()
|
||||
if filename.startswith('.') or ';' in filename or '|' in filename:
|
||||
# Dodgy server, log it
|
||||
log.warning('Potentially malicious server: trying to write to file: %s', filename)
|
||||
log.warning(
|
||||
'Potentially malicious server: trying to write to file: %s', filename
|
||||
)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def _download_file(url, filename, callback=None, headers=None, force_filename=False, allow_compression=True):
|
||||
"""
|
||||
Downloads a file from a specific URL and returns a Deferred. A callback
|
||||
function can be specified to be called as parts are received.
|
||||
def _download_file(
|
||||
url,
|
||||
filename,
|
||||
callback=None,
|
||||
headers=None,
|
||||
force_filename=False,
|
||||
allow_compression=True,
|
||||
handle_redirects=True,
|
||||
):
|
||||
"""Downloads a file from a specific URL and returns a Deferred.
|
||||
|
||||
A callback function can be specified to be called as parts are received.
|
||||
|
||||
Args:
|
||||
url (str): The url to download from
|
||||
filename (str): The filename to save the file as
|
||||
callback (func): A function to be called when a part of data is received,
|
||||
url (str): The url to download from.
|
||||
filename (str): The filename to save the file as.
|
||||
callback (func): A function to be called when partial data is received,
|
||||
it's signature should be: func(data, current_length, total_length)
|
||||
headers (dict): Any optional headers to send
|
||||
force_filename (bool): force us to use the filename specified rather than
|
||||
one the server may suggest
|
||||
allow_compression (bool): Allows gzip & deflate decoding
|
||||
headers (dict): Any optional headers to send.
|
||||
force_filename (bool): Force using the filename specified rather than
|
||||
one the server may suggest.
|
||||
allow_compression (bool): Allows gzip & deflate decoding.
|
||||
|
||||
Returns:
|
||||
Deferred: the filename of the downloaded file
|
||||
Deferred: The filename of the downloaded file.
|
||||
|
||||
Raises:
|
||||
t.w.e.PageRedirect
|
||||
t.w.e.Error: for all other HTTP response errors
|
||||
|
||||
"""
|
||||
|
||||
agent = client.Agent(reactor)
|
||||
|
||||
if allow_compression:
|
||||
if not headers:
|
||||
headers = {}
|
||||
headers['accept-encoding'] = 'deflate, gzip, x-gzip'
|
||||
enc_accepted = ['gzip', 'x-gzip', 'deflate']
|
||||
decoders = [(enc.encode(), CompressionDecoder) for enc in enc_accepted]
|
||||
agent = client.ContentDecoderAgent(agent, decoders)
|
||||
if handle_redirects:
|
||||
agent = client.RedirectAgent(agent)
|
||||
|
||||
url = url.encode('utf8')
|
||||
filename = filename.encode('utf8')
|
||||
headers = utf8_encode_structure(headers) if headers else headers
|
||||
factory = HTTPDownloader(url, filename, callback, headers, force_filename, allow_compression)
|
||||
agent = HTTPDownloaderAgent(
|
||||
agent, filename, callback, force_filename, allow_compression, handle_redirects
|
||||
)
|
||||
|
||||
# In Twisted 13.1.0 _parse() function replaced by _URI class.
|
||||
# In Twisted 15.0.0 _URI class renamed to URI.
|
||||
if hasattr(client, '_parse'):
|
||||
scheme, host, port, dummy_path = client._parse(url)
|
||||
else:
|
||||
try:
|
||||
from twisted.web.client import _URI as URI
|
||||
except ImportError:
|
||||
from twisted.web.client import URI
|
||||
finally:
|
||||
uri = URI.fromBytes(url)
|
||||
scheme = uri.scheme
|
||||
host = uri.host
|
||||
port = uri.port
|
||||
# The Headers init expects dict values to be a list.
|
||||
if headers:
|
||||
for name, value in list(headers.items()):
|
||||
if not isinstance(value, list):
|
||||
headers[name] = [value]
|
||||
|
||||
if scheme == 'https':
|
||||
from twisted.internet import ssl
|
||||
# ClientTLSOptions in Twisted >= 14, see ticket #2765 for details on this addition.
|
||||
try:
|
||||
from twisted.internet._sslverify import ClientTLSOptions
|
||||
except ImportError:
|
||||
ctx_factory = ssl.ClientContextFactory()
|
||||
else:
|
||||
class TLSSNIContextFactory(ssl.ClientContextFactory): # pylint: disable=no-init
|
||||
"""
|
||||
A custom context factory to add a server name for TLS connections.
|
||||
"""
|
||||
def getContext(self): # NOQA: N802
|
||||
ctx = ssl.ClientContextFactory.getContext(self)
|
||||
ClientTLSOptions(host, ctx)
|
||||
return ctx
|
||||
ctx_factory = TLSSNIContextFactory()
|
||||
|
||||
reactor.connectSSL(host, port, factory, ctx_factory)
|
||||
else:
|
||||
reactor.connectTCP(host, port, factory)
|
||||
|
||||
return factory.deferred
|
||||
return agent.request(b'GET', url.encode(), Headers(headers))
|
||||
|
||||
|
||||
def download_file(url, filename, callback=None, headers=None, force_filename=False,
|
||||
allow_compression=True, handle_redirects=True):
|
||||
"""
|
||||
Downloads a file from a specific URL and returns a Deferred. A callback
|
||||
function can be specified to be called as parts are received.
|
||||
def download_file(
|
||||
url,
|
||||
filename,
|
||||
callback=None,
|
||||
headers=None,
|
||||
force_filename=False,
|
||||
allow_compression=True,
|
||||
handle_redirects=True,
|
||||
):
|
||||
"""Downloads a file from a specific URL and returns a Deferred.
|
||||
|
||||
A callback function can be specified to be called as parts are received.
|
||||
|
||||
Args:
|
||||
url (str): The url to download from
|
||||
filename (str): The filename to save the file as
|
||||
callback (func): A function to be called when a part of data is received,
|
||||
it's signature should be: func(data, current_length, total_length)
|
||||
headers (dict): Any optional headers to send
|
||||
force_filename (bool): force us to use the filename specified rather than
|
||||
one the server may suggest
|
||||
allow_compression (bool): Allows gzip & deflate decoding
|
||||
handle_redirects (bool): If HTTP redirects should be handled automatically
|
||||
url (str): The url to download from.
|
||||
filename (str): The filename to save the file as.
|
||||
callback (func): A function to be called when partial data is received,
|
||||
it's signature should be: func(data, current_length, total_length).
|
||||
headers (dict): Any optional headers to send.
|
||||
force_filename (bool): Force the filename specified rather than one the
|
||||
server may suggest.
|
||||
allow_compression (bool): Allows gzip & deflate decoding.
|
||||
handle_redirects (bool): HTTP redirects handled automatically or not.
|
||||
|
||||
Returns:
|
||||
Deferred: the filename of the downloaded file
|
||||
Deferred: The filename of the downloaded file.
|
||||
|
||||
Raises:
|
||||
t.w.e.PageRedirect: Unless handle_redirects=True
|
||||
t.w.e.Error: for all other HTTP response errors
|
||||
|
||||
t.w.e.PageRedirect: If handle_redirects is False.
|
||||
t.w.e.Error: For all other HTTP response errors.
|
||||
"""
|
||||
|
||||
def on_download_success(result):
|
||||
log.debug('Download success!')
|
||||
return result
|
||||
|
||||
def on_download_fail(failure):
|
||||
if failure.check(PageRedirect) and handle_redirects:
|
||||
new_url = urljoin(url, failure.getErrorMessage().split(' to ')[1])
|
||||
result = _download_file(new_url, filename, callback=callback, headers=headers,
|
||||
force_filename=force_filename,
|
||||
allow_compression=allow_compression)
|
||||
result.addCallbacks(on_download_success, on_download_fail)
|
||||
else:
|
||||
# Log the failure and pass to the caller
|
||||
log.warning('Error occurred downloading file from "%s": %s',
|
||||
url, failure.getErrorMessage())
|
||||
log.warning(
|
||||
'Error occurred downloading file from "%s": %s',
|
||||
url,
|
||||
failure.getErrorMessage(),
|
||||
)
|
||||
result = failure
|
||||
return result
|
||||
|
||||
d = _download_file(url, filename, callback=callback, headers=headers,
|
||||
force_filename=force_filename, allow_compression=allow_compression)
|
||||
d = _download_file(
|
||||
url,
|
||||
filename,
|
||||
callback=callback,
|
||||
headers=headers,
|
||||
force_filename=force_filename,
|
||||
allow_compression=allow_compression,
|
||||
handle_redirects=handle_redirects,
|
||||
)
|
||||
d.addCallbacks(on_download_success, on_download_fail)
|
||||
return d
|
||||
|
|
15
deluge/i18n/__init__.py
Normal file
15
deluge/i18n/__init__.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
from .util import (
|
||||
I18N_DOMAIN,
|
||||
get_languages,
|
||||
set_language,
|
||||
setup_mock_translation,
|
||||
setup_translation,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'I18N_DOMAIN',
|
||||
'set_language',
|
||||
'get_languages',
|
||||
'setup_translation',
|
||||
'setup_mock_translation',
|
||||
]
|
6431
deluge/i18n/ab.po
Normal file
6431
deluge/i18n/ab.po
Normal file
File diff suppressed because it is too large
Load diff
6234
deluge/i18n/af.po
Normal file
6234
deluge/i18n/af.po
Normal file
File diff suppressed because it is too large
Load diff
8337
deluge/i18n/ar.po
8337
deluge/i18n/ar.po
File diff suppressed because it is too large
Load diff
8715
deluge/i18n/ast.po
8715
deluge/i18n/ast.po
File diff suppressed because it is too large
Load diff
8292
deluge/i18n/be.po
8292
deluge/i18n/be.po
File diff suppressed because it is too large
Load diff
8728
deluge/i18n/bg.po
8728
deluge/i18n/bg.po
File diff suppressed because it is too large
Load diff
7345
deluge/i18n/bn.po
7345
deluge/i18n/bn.po
File diff suppressed because it is too large
Load diff
7382
deluge/i18n/bs.po
7382
deluge/i18n/bs.po
File diff suppressed because it is too large
Load diff
8393
deluge/i18n/ca.po
8393
deluge/i18n/ca.po
File diff suppressed because it is too large
Load diff
8668
deluge/i18n/cs.po
8668
deluge/i18n/cs.po
File diff suppressed because it is too large
Load diff
7460
deluge/i18n/cy.po
7460
deluge/i18n/cy.po
File diff suppressed because it is too large
Load diff
8846
deluge/i18n/da.po
8846
deluge/i18n/da.po
File diff suppressed because it is too large
Load diff
8847
deluge/i18n/de.po
8847
deluge/i18n/de.po
File diff suppressed because it is too large
Load diff
6209
deluge/i18n/deluge.pot
Normal file
6209
deluge/i18n/deluge.pot
Normal file
File diff suppressed because it is too large
Load diff
8756
deluge/i18n/el.po
8756
deluge/i18n/el.po
File diff suppressed because it is too large
Load diff
8758
deluge/i18n/en_AU.po
8758
deluge/i18n/en_AU.po
File diff suppressed because it is too large
Load diff
8746
deluge/i18n/en_CA.po
8746
deluge/i18n/en_CA.po
File diff suppressed because it is too large
Load diff
8905
deluge/i18n/en_GB.po
8905
deluge/i18n/en_GB.po
File diff suppressed because it is too large
Load diff
7433
deluge/i18n/eo.po
7433
deluge/i18n/eo.po
File diff suppressed because it is too large
Load diff
8757
deluge/i18n/es.po
8757
deluge/i18n/es.po
File diff suppressed because it is too large
Load diff
8784
deluge/i18n/et.po
8784
deluge/i18n/et.po
File diff suppressed because it is too large
Load diff
8062
deluge/i18n/eu.po
8062
deluge/i18n/eu.po
File diff suppressed because it is too large
Load diff
7684
deluge/i18n/fa.po
7684
deluge/i18n/fa.po
File diff suppressed because it is too large
Load diff
8743
deluge/i18n/fi.po
8743
deluge/i18n/fi.po
File diff suppressed because it is too large
Load diff
6217
deluge/i18n/fo.po
Normal file
6217
deluge/i18n/fo.po
Normal file
File diff suppressed because it is too large
Load diff
10575
deluge/i18n/fr.po
10575
deluge/i18n/fr.po
File diff suppressed because it is too large
Load diff
8376
deluge/i18n/fy.po
8376
deluge/i18n/fy.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/ga.po
Normal file
6214
deluge/i18n/ga.po
Normal file
File diff suppressed because it is too large
Load diff
8468
deluge/i18n/gl.po
8468
deluge/i18n/gl.po
File diff suppressed because it is too large
Load diff
8557
deluge/i18n/he.po
8557
deluge/i18n/he.po
File diff suppressed because it is too large
Load diff
8536
deluge/i18n/hi.po
8536
deluge/i18n/hi.po
File diff suppressed because it is too large
Load diff
8227
deluge/i18n/hr.po
8227
deluge/i18n/hr.po
File diff suppressed because it is too large
Load diff
8763
deluge/i18n/hu.po
8763
deluge/i18n/hu.po
File diff suppressed because it is too large
Load diff
7753
deluge/i18n/id.po
7753
deluge/i18n/id.po
File diff suppressed because it is too large
Load diff
8725
deluge/i18n/is.po
8725
deluge/i18n/is.po
File diff suppressed because it is too large
Load diff
8789
deluge/i18n/it.po
8789
deluge/i18n/it.po
File diff suppressed because it is too large
Load diff
7330
deluge/i18n/iu.po
7330
deluge/i18n/iu.po
File diff suppressed because it is too large
Load diff
8653
deluge/i18n/ja.po
8653
deluge/i18n/ja.po
File diff suppressed because it is too large
Load diff
8117
deluge/i18n/ka.po
8117
deluge/i18n/ka.po
File diff suppressed because it is too large
Load diff
8615
deluge/i18n/kk.po
8615
deluge/i18n/kk.po
File diff suppressed because it is too large
Load diff
6222
deluge/i18n/km.po
Normal file
6222
deluge/i18n/km.po
Normal file
File diff suppressed because it is too large
Load diff
7551
deluge/i18n/kn.po
7551
deluge/i18n/kn.po
File diff suppressed because it is too large
Load diff
8645
deluge/i18n/ko.po
8645
deluge/i18n/ko.po
File diff suppressed because it is too large
Load diff
7416
deluge/i18n/ku.po
7416
deluge/i18n/ku.po
File diff suppressed because it is too large
Load diff
6214
deluge/i18n/ky.po
Normal file
6214
deluge/i18n/ky.po
Normal file
File diff suppressed because it is too large
Load diff
7341
deluge/i18n/la.po
7341
deluge/i18n/la.po
File diff suppressed because it is too large
Load diff
|
@ -1,14 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# This file is public domain.
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
|
||||
# Deferred translation
|
||||
def _(message):
|
||||
return message
|
||||
|
||||
|
||||
# Languages we provide translations for, out of the box.
|
||||
LANGUAGES = {
|
||||
'af': _('Afrikaans'),
|
||||
|
@ -107,3 +110,5 @@ LANGUAGES = {
|
|||
'zh-hant': _('Traditional Chinese'),
|
||||
'zh_TW': _('Chinese (Taiwan)'),
|
||||
}
|
||||
|
||||
del _
|
6214
deluge/i18n/lb.po
Normal file
6214
deluge/i18n/lb.po
Normal file
File diff suppressed because it is too large
Load diff
8724
deluge/i18n/lt.po
8724
deluge/i18n/lt.po
File diff suppressed because it is too large
Load diff
8691
deluge/i18n/lv.po
8691
deluge/i18n/lv.po
File diff suppressed because it is too large
Load diff
7977
deluge/i18n/mk.po
7977
deluge/i18n/mk.po
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue