Compare commits
1298 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9a01b274ba | ||
|
|
f7ed835ba5 | ||
|
|
8af49b9508 | ||
|
|
998049a39d | ||
|
|
111bd1b07c | ||
|
|
a09c10aab6 | ||
|
|
adb744a526 | ||
|
|
0f82cda811 | ||
|
|
0d6c3c8ecb | ||
|
|
6598f53fd4 | ||
|
|
6b8458d87f | ||
|
|
99a0621238 | ||
|
|
c52666309a | ||
|
|
84a458d40b | ||
|
|
f8631c6d53 | ||
|
|
b19b0775c7 | ||
|
|
2dc1c1dd38 | ||
|
|
7db8b233c8 | ||
|
|
427c77a9ef | ||
|
|
94c3969f10 | ||
|
|
debd1855dd | ||
|
|
9f77597c11 | ||
|
|
afc9039625 | ||
|
|
920d3cb44e | ||
|
|
b1fc8ad862 | ||
|
|
11b9bc39ab | ||
|
|
6dcb3f3bf2 | ||
|
|
ce768f45c5 | ||
|
|
9b91d1d6c0 | ||
|
|
d9c7a97604 | ||
|
|
0fd01aa697 | ||
|
|
58615e6f9b | ||
|
|
2277322e57 | ||
|
|
18020e609e | ||
|
|
6a31b920ac | ||
|
|
c1266a36e4 | ||
|
|
578effc538 | ||
|
|
d881120013 | ||
|
|
da5318033a | ||
|
|
31df5bce01 | ||
|
|
d5622b7cba | ||
|
|
26ad1b354f | ||
|
|
7a616a81f7 | ||
|
|
275aefc3cc | ||
|
|
2b32490f72 | ||
|
|
7b9043c16b | ||
|
|
cf83f99be0 | ||
|
|
fb8a66d207 | ||
|
|
e8a3645bc6 | ||
|
|
592e40993c | ||
|
|
b00e69e222 | ||
|
|
c9b4c8167f | ||
|
|
cdb9cfe756 | ||
|
|
e52f50b204 | ||
|
|
770c2be14c | ||
|
|
ab61961a64 | ||
|
|
6aca799bbb | ||
|
|
89836be1d1 | ||
|
|
20e1283627 | ||
|
|
ee8406e026 | ||
|
|
514941b785 | ||
|
|
1510e37652 | ||
|
|
e1e39cd3f4 | ||
|
|
e1bb8c5419 | ||
|
|
17fa33a496 | ||
|
|
601f0b54cf | ||
|
|
51d44bfc3e | ||
|
|
12148217a2 | ||
|
|
132fa12ef4 | ||
|
|
1827c2e4cd | ||
|
|
f423bca06b | ||
|
|
e7b089edf5 | ||
|
|
b8b7d94a6a | ||
|
|
2c080fec3d | ||
|
|
4c68566c77 | ||
|
|
a3af784c18 | ||
|
|
ac6f295c93 | ||
|
|
2c72cd7d9f | ||
|
|
d012dc5c85 | ||
|
|
038b4c63ee | ||
|
|
17e37996c4 | ||
|
|
9318e19347 | ||
|
|
8f4e03d04b | ||
|
|
229d67c086 | ||
|
|
d84897ff33 | ||
|
|
387a711538 | ||
|
|
7a1b914824 | ||
|
|
5e62801666 | ||
|
|
00d887153f | ||
|
|
6d5882001a | ||
|
|
4a6b45c65c | ||
|
|
b0d1fe5c33 | ||
|
|
a6e49098c8 | ||
|
|
ffcd36cbf4 | ||
|
|
3bf2d844a0 | ||
|
|
dd24eb8893 | ||
|
|
538f51dd5b | ||
|
|
eea9f40501 | ||
|
|
576bcb9f4b | ||
|
|
62c5365329 | ||
|
|
ddf575a86e | ||
|
|
6b9383ce92 | ||
|
|
cb8d24ef1f | ||
|
|
814ddfb79f | ||
|
|
766f819c0b | ||
|
|
ff43df9ef1 | ||
|
|
2e907e93e7 | ||
|
|
4d329d6a36 | ||
|
|
752191bc23 | ||
|
|
1d73fd9d7e | ||
|
|
79688c412a | ||
|
|
fc1c95fefb | ||
|
|
6a174716af | ||
|
|
defe256f1b | ||
|
|
8a5f154d9e | ||
|
|
fe56a69e8f | ||
|
|
c6d326f973 | ||
|
|
9e5f670feb | ||
|
|
9ebacf8816 | ||
|
|
df2d7ec9c2 | ||
|
|
ddab74582b | ||
|
|
2801079bc8 | ||
|
|
1deb49b524 | ||
|
|
49d550f652 | ||
|
|
1a43ce6ecc | ||
|
|
15a0131587 | ||
|
|
0dca34958c | ||
|
|
4b231e36ea | ||
|
|
52478a00db | ||
|
|
e177766270 | ||
|
|
ff8da7c8f8 | ||
|
|
89c8c5a0c7 | ||
|
|
38c6266f9c | ||
|
|
16f8e7e123 | ||
|
|
7110c7a11f | ||
|
|
6d79f316a6 | ||
|
|
c1b6811b8a | ||
|
|
7d7b76b2e9 | ||
|
|
657aa52fa7 | ||
|
|
8e9ef8db39 | ||
|
|
92a0096b54 | ||
|
|
87338760ad | ||
|
|
28019b0a09 | ||
|
|
248b007f4a | ||
|
|
9e31c59de8 | ||
|
|
269e785888 | ||
|
|
3669aef42d | ||
|
|
1087eb3a06 | ||
|
|
43af80a137 | ||
|
|
0766a27a71 | ||
|
|
a12f049d14 | ||
|
|
6afe2fd9cf | ||
|
|
61f634a21e | ||
|
|
02b6659235 | ||
|
|
dacc3d8f47 | ||
|
|
4f140bb1ac | ||
|
|
3dffaa7075 | ||
|
|
d626fda710 | ||
|
|
51c8de0fc3 | ||
|
|
4f23ccc284 | ||
|
|
a6ff34a47f | ||
|
|
b40d1f3463 | ||
|
|
f1a2d960bc | ||
|
|
4e7069e0c6 | ||
|
|
477a47e45e | ||
|
|
a3264240ab | ||
|
|
1030d0d748 | ||
|
|
f9d9fffedb | ||
|
|
6b4e9a3fac | ||
|
|
6787289846 | ||
|
|
d31a2e2768 | ||
|
|
c992680209 | ||
|
|
65f0dc25d2 | ||
|
|
b616af3a83 | ||
|
|
ca13107330 | ||
|
|
c7ce18f8c2 | ||
|
|
b6f288a522 | ||
|
|
cb48ca03df | ||
|
|
7b6641d709 | ||
|
|
3c12a2c4bf | ||
|
|
259e2bc61c | ||
|
|
9f6e4cc2fa | ||
|
|
a763957334 | ||
|
|
06293dc0a2 | ||
|
|
38a5d967dd | ||
|
|
4cdb9bc81d | ||
|
|
2104cb2839 | ||
|
|
d4a4bd40a8 | ||
|
|
ba47d7eea7 | ||
|
|
c9638ec3fa | ||
|
|
14d636d098 | ||
|
|
e1d4df7937 | ||
|
|
e08d06ba31 | ||
|
|
984ee7580d | ||
|
|
ab118ea580 | ||
|
|
f897eebb41 | ||
|
|
755873c5e7 | ||
|
|
dbc254efbe | ||
|
|
9de8ed2dee | ||
|
|
230b7f47cc | ||
|
|
58878d8a0f | ||
|
|
d9bb1bfbfb | ||
|
|
f8674f9baa | ||
|
|
20f1076037 | ||
|
|
e84f2aa04c | ||
|
|
01f70051f8 | ||
|
|
492f69b149 | ||
|
|
2270b2a28b | ||
|
|
b5a0418a36 | ||
|
|
e595722139 | ||
|
|
78ba855c68 | ||
|
|
158f638fb9 | ||
|
|
2e52c8124a | ||
|
|
5bea9dd04f | ||
|
|
910393d00e | ||
|
|
4b66b0ea07 | ||
|
|
543226450c | ||
|
|
b9dbadda0b | ||
|
|
7cb214d8a2 | ||
|
|
f6d4ddbe80 | ||
|
|
faefd7a5b5 | ||
|
|
8f02b0eea0 | ||
|
|
39d0f91de2 | ||
|
|
b3d75cb485 | ||
|
|
17b940a271 | ||
|
|
3338b72d1f | ||
|
|
70ca31a265 | ||
|
|
d7f43c2cf8 | ||
|
|
b1f88c1c48 | ||
|
|
6fa6d530ec | ||
|
|
11e7fb23ca | ||
|
|
da9d2b5ed8 | ||
|
|
2599bac1a4 | ||
|
|
0bae509311 | ||
|
|
2fa7834e6e | ||
|
|
2deb6ee6a7 | ||
|
|
0d166025d0 | ||
|
|
7861416dc5 | ||
|
|
2639c5e9ad | ||
|
|
8de5fcdac6 | ||
|
|
4aa9801be4 | ||
|
|
c4db4ace13 | ||
|
|
db367a80d1 | ||
|
|
3093b21555 | ||
|
|
3e58378490 | ||
|
|
2c40db3074 | ||
|
|
fba228fd9d | ||
|
|
ef2b8e88b4 | ||
|
|
9b62e32da8 | ||
|
|
a0b3ee8186 | ||
|
|
d70da1edce | ||
|
|
7c674b3aab | ||
|
|
98540f2fcd | ||
|
|
2f0e197320 | ||
|
|
db49585818 | ||
|
|
160bc1a5c4 | ||
|
|
8e23b02653 | ||
|
|
41e69aeac3 | ||
|
|
be30200a18 | ||
|
|
387650d040 | ||
|
|
052d64eb39 | ||
|
|
a3a8a820fe | ||
|
|
1b724b5606 | ||
|
|
5fc9d7182c | ||
|
|
c948216e33 | ||
|
|
c77b270fa8 | ||
|
|
035b99bc8a | ||
|
|
f74b837faa | ||
|
|
4c198f7116 | ||
|
|
76322c0145 | ||
|
|
12150c5efc | ||
|
|
4a9452672a | ||
|
|
f7eeaf3eda | ||
|
|
002ce4d4e1 | ||
|
|
80df57f2b6 | ||
|
|
0358378cae | ||
|
|
fa054b6b34 | ||
|
|
4b9e226cc6 | ||
|
|
ca24bf031c | ||
|
|
872a4f4650 | ||
|
|
af8806e292 | ||
|
|
4f646094b5 | ||
|
|
6e8503cfc5 | ||
|
|
4879bc6251 | ||
|
|
ab253f9030 | ||
|
|
bc6d197004 | ||
|
|
1de457fa8d | ||
|
|
9e564c49b3 | ||
|
|
50a150f570 | ||
|
|
8d55b0c92a | ||
|
|
5a2df62462 | ||
|
|
9d21dd9196 | ||
|
|
3b34196901 | ||
|
|
bad26026ae | ||
|
|
6e455e62d5 | ||
|
|
c97bd38c83 | ||
|
|
356322c5b1 | ||
|
|
9dbb477dd8 | ||
|
|
089609d5d2 | ||
|
|
487ddf1c25 | ||
|
|
83b4c17969 | ||
|
|
08c381cf0d | ||
|
|
286f14a6d2 | ||
|
|
0b14fe5454 | ||
|
|
c5a0d521d1 | ||
|
|
4a1f70da09 | ||
|
|
87e97cd8a5 | ||
|
|
e0dffe20a4 | ||
|
|
73d37584ad | ||
|
|
5fd3e86624 | ||
|
|
d0f1e7c6a3 | ||
|
|
53e7e383a3 | ||
|
|
c06e1f3135 | ||
|
|
b0ff526c95 | ||
|
|
3cfe90d581 | ||
|
|
1d60d9caf1 | ||
|
|
8e0d1520e8 | ||
|
|
b07f91d6a5 | ||
|
|
43af091b02 | ||
|
|
5f0543ba42 | ||
|
|
ef8cd1aa40 | ||
|
|
e01fe51b9e | ||
|
|
afa782194d | ||
|
|
77e602f359 | ||
|
|
a6063b0665 | ||
|
|
9a7e4ea500 | ||
|
|
1daedb7259 | ||
|
|
8e82e976f1 | ||
|
|
8b445ac9f9 | ||
|
|
91c24105cc | ||
|
|
13df26851e | ||
|
|
ca58d25785 | ||
|
|
42d728f71e | ||
|
|
659960899e | ||
|
|
d40b052cbc | ||
|
|
282f6fb73a | ||
|
|
416c9eabde | ||
|
|
b4a15f344d | ||
|
|
c545c9aab1 | ||
|
|
bb73cb8eec | ||
|
|
c0492a41d9 | ||
|
|
cfd92b8268 | ||
|
|
436883a96d | ||
|
|
c381b719b1 | ||
|
|
81d4d9a4e2 | ||
|
|
e2df3a4dfd | ||
|
|
7df92f2882 | ||
|
|
072b6d09fa | ||
|
|
3869e350bf | ||
|
|
058846f54f | ||
|
|
cd836f3660 | ||
|
|
d75f58f5ec | ||
|
|
f2b0d3f80b | ||
|
|
a366d57278 | ||
|
|
a821d85bf2 | ||
|
|
a1ce3e0d6b | ||
|
|
e7be5c7809 | ||
|
|
5acab98025 | ||
|
|
ed6a46e9c0 | ||
|
|
89f3b6624e | ||
|
|
3546f29caf | ||
|
|
e3414fe91f | ||
|
|
bdadd00d93 | ||
|
|
dd7de31e9f | ||
|
|
6897dab647 | ||
|
|
accf19bb26 | ||
|
|
4126007cac | ||
|
|
9f12fe2636 | ||
|
|
9fb348f3a4 | ||
|
|
e749d132cd | ||
|
|
bed9458604 | ||
|
|
7984ee9fcf | ||
|
|
69e3e36fae | ||
|
|
456563eab0 | ||
|
|
7b6fa4f0e5 | ||
|
|
cd1dc39ef2 | ||
|
|
0771aeac3b | ||
|
|
cd0afd20e5 | ||
|
|
324920cd8c | ||
|
|
12cda35494 | ||
|
|
1c1af9f90c | ||
|
|
687221f035 | ||
|
|
a99d52392f | ||
|
|
bd6690b159 | ||
|
|
b13df16b53 | ||
|
|
06f49be090 | ||
|
|
0b48ad5084 | ||
|
|
32ce93d2e9 | ||
|
|
e0479e79bd | ||
|
|
04e22b3966 | ||
|
|
40a5ce087b | ||
|
|
330e15bbcb | ||
|
|
d201d9fff9 | ||
|
|
f765794c99 | ||
|
|
34320e617d | ||
|
|
169ddeef5d | ||
|
|
33ad4c22c7 | ||
|
|
265f90fe69 | ||
|
|
099b72ed27 | ||
|
|
d20c0ee37e | ||
|
|
f6030a333a | ||
|
|
4cbc089de2 | ||
|
|
c45c04659f | ||
|
|
61a9037835 | ||
|
|
ad33c0bcca | ||
|
|
7afc524a9f | ||
|
|
c5a4bc9a1b | ||
|
|
1c0178dbaf | ||
|
|
dbf7feca3e | ||
|
|
d92de8ec4e | ||
|
|
8347da5a58 | ||
|
|
59e248d7de | ||
|
|
12e556e1d1 | ||
|
|
14d3ab93da | ||
|
|
e27ece512f | ||
|
|
b88d8efc8d | ||
|
|
9ec4c2837e | ||
|
|
ffc3fc9ec4 | ||
|
|
a566b4f428 | ||
|
|
69819460f3 | ||
|
|
24a8cb41fe | ||
|
|
1de0443492 | ||
|
|
bb19b380b4 | ||
|
|
b6b936ddf3 | ||
|
|
b00b6acba8 | ||
|
|
3941076c06 | ||
|
|
7401201af2 | ||
|
|
5c586fbf30 | ||
|
|
5c891b7e8e | ||
|
|
5425fcae9e | ||
|
|
4008cce12f | ||
|
|
d227105527 | ||
|
|
508649e6b6 | ||
|
|
b4e25d4345 | ||
|
|
733f925c75 | ||
|
|
40e910192e | ||
|
|
424a3cd892 | ||
|
|
9f6036c8d6 | ||
|
|
5af5749d4a | ||
|
|
f01449f14c | ||
|
|
03dff14ee9 | ||
|
|
e55302592a | ||
|
|
dbeaab052d | ||
|
|
9f07dd5a21 | ||
|
|
b933cd8718 | ||
|
|
8d85dde2c6 | ||
|
|
eaaa8dc834 | ||
|
|
5350dbf0ce | ||
|
|
28ffad10ab | ||
|
|
a37517bf6a | ||
|
|
fab9b96c8e | ||
|
|
50d6882a98 | ||
|
|
94064ac7da | ||
|
|
1c5f19a68a | ||
|
|
a26abd0dbb | ||
|
|
fb9080c18a | ||
|
|
15980471b0 | ||
|
|
b11bb9cdac | ||
|
|
474cd45fc5 | ||
|
|
0b6843a1b9 | ||
|
|
fdcdf07fa6 | ||
|
|
5617953d39 | ||
|
|
964144996f | ||
|
|
37214dd413 | ||
|
|
5a08fed0b6 | ||
|
|
443866ef04 | ||
|
|
96275adaff | ||
|
|
33884deb6c | ||
|
|
7db291fc93 | ||
|
|
9df14bd55a | ||
|
|
1e183625c9 | ||
|
|
643be19711 | ||
|
|
21a1770f3f | ||
|
|
07063d855a | ||
|
|
cf95e417f1 | ||
|
|
3f92ed0ea0 | ||
|
|
578b74f2c0 | ||
|
|
8e17b9aea5 | ||
|
|
6f766aae8c | ||
|
|
5797348bb3 | ||
|
|
57ca5067ff | ||
|
|
e8ff8a41de | ||
|
|
0b5dfe826a | ||
|
|
67fbcc8238 | ||
|
|
dd61c7dc21 | ||
|
|
3786b5435f | ||
|
|
1857e047b0 | ||
|
|
648ac7793f | ||
|
|
664ce6421f | ||
|
|
cfb77a1076 | ||
|
|
f65ddbbb9e | ||
|
|
76126271fc | ||
|
|
3faece0b4c | ||
|
|
530d3cd91e | ||
|
|
e659aba176 | ||
|
|
a196a499ae | ||
|
|
58bd9cd7a1 | ||
|
|
9dd9f850c6 | ||
|
|
cbecb74307 | ||
|
|
8ae1e58614 | ||
|
|
83e8ae392d | ||
|
|
c0297f10cb | ||
|
|
41052ae508 | ||
|
|
2d243d51e4 | ||
|
|
fdec80f676 | ||
|
|
5d3b0deb4d | ||
|
|
f68c356944 | ||
|
|
553f8d6ccd | ||
|
|
60fb3e33ae | ||
|
|
9b7c1db509 | ||
|
|
963ce356fb | ||
|
|
dcd0364ecc | ||
|
|
a2da428777 | ||
|
|
876c602710 | ||
|
|
79cb716ced | ||
|
|
ba9c975335 | ||
|
|
ef407bcb3c | ||
|
|
2898a066fe | ||
|
|
7950c4bdb4 | ||
|
|
2499012d88 | ||
|
|
7788669de1 | ||
|
|
d7f6fad3dd | ||
|
|
699c562d34 | ||
|
|
36d8225389 | ||
|
|
17ba9ee96b | ||
|
|
2769fc28d3 | ||
|
|
f5f3cfba50 | ||
|
|
1b1c77d225 | ||
|
|
cfc49e286b | ||
|
|
a2b3677c59 | ||
|
|
e5cfafdb00 | ||
|
|
bff05925e8 | ||
|
|
05f4b2b8ce | ||
|
|
2eac294643 | ||
|
|
f6789f79ea | ||
|
|
0b5976bdb1 | ||
|
|
7d2b2b9809 | ||
|
|
cce92dc1f8 | ||
|
|
fa7e59e842 | ||
|
|
8635f0ddb2 | ||
|
|
c90a423012 | ||
|
|
f0daee669b | ||
|
|
d252b660f5 | ||
|
|
e717a49c0c | ||
|
|
426155e65c | ||
|
|
6b9b446e3d | ||
|
|
ab2b2cfe6e | ||
|
|
4b236c6ed6 | ||
|
|
2396fadf04 | ||
|
|
a3bffb5867 | ||
|
|
1b44fc40af | ||
|
|
b894139ca1 | ||
|
|
daa0662869 | ||
|
|
81de9529c3 | ||
|
|
6b06caf00d | ||
|
|
9370366112 | ||
|
|
32bcf6e615 | ||
|
|
aa804471a7 | ||
|
|
681d8b1ddc | ||
|
|
c82b1f51e3 | ||
|
|
6d048e0003 | ||
|
|
0314910bbe | ||
|
|
3bd831782c | ||
|
|
40f01dca6f | ||
|
|
8dead66b58 | ||
|
|
18807191c0 | ||
|
|
9d9630a27a | ||
|
|
8ac851555d | ||
|
|
27f331a1fc | ||
|
|
e6b4d32506 | ||
|
|
a28ee58a1f | ||
|
|
47749c2d73 | ||
|
|
d6d0ff724a | ||
|
|
ba65700aad | ||
|
|
84a7cfe07d | ||
|
|
9ccd4a5e84 | ||
|
|
616434a00f | ||
|
|
4cf62f73da | ||
|
|
0145aecab4 | ||
|
|
6c4184d1f5 | ||
|
|
9d011b42a9 | ||
|
|
bf81b5cacc | ||
|
|
8d2b6e4097 | ||
|
|
50d8399f09 | ||
|
|
bc99b77dbe | ||
|
|
1c7edc9487 | ||
|
|
90c06fb3c9 | ||
|
|
10a04c16ba | ||
|
|
90a618bd7e | ||
|
|
b630b84ab0 | ||
|
|
a5ee362fc0 | ||
|
|
7c0870b6b8 | ||
|
|
a42264b280 | ||
|
|
e714604ec0 | ||
|
|
c094120f04 | ||
|
|
6691c8ddd7 | ||
|
|
013705c318 | ||
|
|
bda6f92a4d | ||
|
|
7ceb8dc79c | ||
|
|
7f48210c97 | ||
|
|
23c440cd58 | ||
|
|
0097167dec | ||
|
|
21e5f156bb | ||
|
|
08f55314d5 | ||
|
|
577bf09859 | ||
|
|
c446cd2fb0 | ||
|
|
06a8414f12 | ||
|
|
1ac01456a9 | ||
|
|
b86853f06f | ||
|
|
311a2798dd | ||
|
|
fe9998fb9d | ||
|
|
ce648c5d35 | ||
|
|
5a2a9bbf9a | ||
|
|
0f8ab05fd4 | ||
|
|
b87c00c041 | ||
|
|
8999f51dc9 | ||
|
|
d5e19db5e6 | ||
|
|
675bee83ca | ||
|
|
33e5dd1fdb | ||
|
|
4ff2794c83 | ||
|
|
81f9302da1 | ||
|
|
93f4b8b537 | ||
|
|
0587d2f8db | ||
|
|
6ba25b5468 | ||
|
|
cc10969506 | ||
|
|
c2eb50a7ee | ||
|
|
33d24068fd | ||
|
|
3a4c191b11 | ||
|
|
e06b4ccb3f | ||
|
|
3c6b86ea28 | ||
|
|
c4a9a13d6c | ||
|
|
c0f1a3c603 | ||
|
|
9d3425061a | ||
|
|
c2dcd2f67d | ||
|
|
24b822aecd | ||
|
|
a7d3de766f | ||
|
|
b56c897e4b | ||
|
|
df14032107 | ||
|
|
66b4821f7f | ||
|
|
d301cde266 | ||
|
|
0590a0d722 | ||
|
|
fc71a03a12 | ||
|
|
923c794e39 | ||
|
|
e7fbff5b3f | ||
|
|
1bd556fbb3 | ||
|
|
18a870f8c3 | ||
|
|
3e2a2c3bee | ||
|
|
73e74881a6 | ||
|
|
b37112600e | ||
|
|
6172ce4960 | ||
|
|
3d277e1c01 | ||
|
|
b3b13899f1 | ||
|
|
7c4a59539a | ||
|
|
e6dfb3da16 | ||
|
|
8e220ededa | ||
|
|
11126f8083 | ||
|
|
ac8a13db22 | ||
|
|
5ab10ff97a | ||
|
|
f3b0346ba2 | ||
|
|
96c94f97f4 | ||
|
|
192c0200e5 | ||
|
|
03ae8f459c | ||
|
|
377fdd9e5e | ||
|
|
daec7d20fe | ||
|
|
66a149590b | ||
|
|
1b6f010df2 | ||
|
|
7e4bc29b59 | ||
|
|
0284fa9b0a | ||
|
|
e5bcea59b5 | ||
|
|
16f603ced2 | ||
|
|
bdcb3b7e33 | ||
|
|
0def6fcfe3 | ||
|
|
75a352fef3 | ||
|
|
07eb1f7f4c | ||
|
|
8e35c02763 | ||
|
|
c1f6d9a858 | ||
|
|
3e20a3bac7 | ||
|
|
818570fd2d | ||
|
|
bcd2d22fbf | ||
|
|
ffc99cd4f4 | ||
|
|
bb56750c1a | ||
|
|
b08d587a22 | ||
|
|
47f4132b39 | ||
|
|
faefab5554 | ||
|
|
243a033055 | ||
|
|
db1eeaae38 | ||
|
|
8c2960e891 | ||
|
|
d6a86e8616 | ||
|
|
5260f42378 | ||
|
|
84f28f3c54 | ||
|
|
860b6793fb | ||
|
|
df03409d7a | ||
|
|
6a81f2241d | ||
|
|
5ce817cee6 | ||
|
|
7cdf124f9d | ||
|
|
ff46aa0226 | ||
|
|
669e331f6c | ||
|
|
4179ba642b | ||
|
|
00954d98f7 | ||
|
|
037e77860b | ||
|
|
47e187449d | ||
|
|
06e9afbe69 | ||
|
|
bfe8aa5f5f | ||
|
|
e51ddd7a50 | ||
|
|
442552c024 | ||
|
|
ce4806df64 | ||
|
|
0c2e65c92b | ||
|
|
b01aa2b385 | ||
|
|
2e04890756 | ||
|
|
1657857b4a | ||
|
|
72383592ba | ||
|
|
d093f935f9 | ||
|
|
8cc7d101aa | ||
|
|
f39eebbd22 | ||
|
|
3ac8bc738a | ||
|
|
0eac041a26 | ||
|
|
ab0f5daaf3 | ||
|
|
b59a0f82ab | ||
|
|
9b75e6af5c | ||
|
|
aa37f2b0ef | ||
|
|
d22237a5cc | ||
|
|
26f5e8aa4b | ||
|
|
9072c6cae0 | ||
|
|
8739c1197f | ||
|
|
a477973862 | ||
|
|
95ce26d261 | ||
|
|
8c934c1ca8 | ||
|
|
349d7d4866 | ||
|
|
f1ea8fa693 | ||
|
|
685210aee3 | ||
|
|
ae42b62b3c | ||
|
|
7faa7c3dba | ||
|
|
eba36b6d57 | ||
|
|
84a2afe08f | ||
|
|
98a85f6950 | ||
|
|
c89c99b272 | ||
|
|
3f16dbd09c | ||
|
|
e547851905 | ||
|
|
cbb0462948 | ||
|
|
a185292578 | ||
|
|
cec1f54cdd | ||
|
|
0112a3141b | ||
|
|
5f93b08c23 | ||
|
|
ff0de896c4 | ||
|
|
6d98f67668 | ||
|
|
5d5cf5cf29 | ||
|
|
610edea20e | ||
|
|
8f4219a93c | ||
|
|
9540ae5a19 | ||
|
|
0f7c3f5d0f | ||
|
|
39fb3a1107 | ||
|
|
e609931d2c | ||
|
|
70d94cda8c | ||
|
|
5c89a52f23 | ||
|
|
686e0a9441 | ||
|
|
e8dcf5ee02 | ||
|
|
95369e79a5 | ||
|
|
eb0a8454bc | ||
|
|
4f059c2549 | ||
|
|
fb7dbd5716 | ||
|
|
07fc4b3728 | ||
|
|
2d5b02baf9 | ||
|
|
f8b2547a45 | ||
|
|
f8cc8acfec | ||
|
|
17787c5a4f | ||
|
|
304de5adb6 | ||
|
|
46db38c5bf | ||
|
|
99e77e409a | ||
|
|
6152ddbd5f | ||
|
|
f99a94d685 | ||
|
|
47f58ff45f | ||
|
|
f225066130 | ||
|
|
83c5d701b3 | ||
|
|
ffb3359e66 | ||
|
|
0861b21532 | ||
|
|
e7420367f1 | ||
|
|
1998c779c7 | ||
|
|
93eb33811a | ||
|
|
d7bf9dba01 | ||
|
|
d5c6942266 | ||
|
|
e870fab277 | ||
|
|
f3ae63c7a9 | ||
|
|
3df1f1b153 | ||
|
|
74fd7c684e | ||
|
|
745b262800 | ||
|
|
72f6516a1c | ||
|
|
7bb723d6b3 | ||
|
|
2ccdc8ffdc | ||
|
|
1cabf64993 | ||
|
|
c55bd5a35d | ||
|
|
77a3552797 | ||
|
|
81efd4bce7 | ||
|
|
98183ccc1e | ||
|
|
09df863b6c | ||
|
|
4e70c1882b | ||
|
|
d38d581d1d | ||
|
|
61c95240c2 | ||
|
|
59347400c3 | ||
|
|
f976e04597 | ||
|
|
1602fe88e6 | ||
|
|
d4eca60b1d | ||
|
|
5a4467adb9 | ||
|
|
f50852fee0 | ||
|
|
1f647b3cc7 | ||
|
|
caf4eab104 | ||
|
|
334078fc34 | ||
|
|
25b1d86c50 | ||
|
|
78e2ff4870 | ||
|
|
ad94cce283 | ||
|
|
b4610e5c23 | ||
|
|
e12dcc2fb8 | ||
|
|
a818276b6d | ||
|
|
269d779df7 | ||
|
|
b63f7b7e5d | ||
|
|
b4a3ac8081 | ||
|
|
bbaaaa72fb | ||
|
|
89c83001ca | ||
|
|
61f1fdabd1 | ||
|
|
28062eacb6 | ||
|
|
8bdbf8df2e | ||
|
|
27e4800ed2 | ||
|
|
37bc54e01e | ||
|
|
6115f83a09 | ||
|
|
a8159c9e55 | ||
|
|
f734e27d23 | ||
|
|
8a118df636 | ||
|
|
a19b75760f | ||
|
|
1224b98745 | ||
|
|
6243ed3bd5 | ||
|
|
41e94e1e22 | ||
|
|
8c6940c351 | ||
|
|
384a2e0e15 | ||
|
|
a691841756 | ||
|
|
ff94bd6a90 | ||
|
|
00419910b4 | ||
|
|
21c9d7fcc3 | ||
|
|
e314c605f1 | ||
|
|
8316b5cb29 | ||
|
|
be46ed12ac | ||
|
|
a2d22b6feb | ||
|
|
f4e373447e | ||
|
|
5b2dfffe0f | ||
|
|
b347f761a7 | ||
|
|
445724573d | ||
|
|
8c5e0cf0a7 | ||
|
|
c6016a25df | ||
|
|
5a0a5ad83b | ||
|
|
3f0a0f552b | ||
|
|
63fd35a95c | ||
|
|
db163e7bd1 | ||
|
|
f3cd569e77 | ||
|
|
a95671491d | ||
|
|
95295e47ab | ||
|
|
a54e9ddd9c | ||
|
|
0e5f89d7d6 | ||
|
|
742d5cbfb3 | ||
|
|
e3fa695ad4 | ||
|
|
d6675f3311 | ||
|
|
64850a45da | ||
|
|
6aa0b7c748 | ||
|
|
fd80728857 | ||
|
|
9618a8d543 | ||
|
|
ec3a6e65ae | ||
|
|
d74578ec66 | ||
|
|
865dd24901 | ||
|
|
e063028c7d | ||
|
|
db11c1b7a8 | ||
|
|
89d7a924fb | ||
|
|
fe16115b20 | ||
|
|
07db34beb9 | ||
|
|
f42fb2fdd2 | ||
|
|
5853a373f3 | ||
|
|
951fbdccbd | ||
|
|
c6d20eb91f | ||
|
|
b68cea3921 | ||
|
|
36125f1067 | ||
|
|
eee16c7a3d | ||
|
|
bf1d93f256 | ||
|
|
42e3c95f87 | ||
|
|
ee702d92e6 | ||
|
|
f5aae23111 | ||
|
|
178f770b16 | ||
|
|
1b2d72531f | ||
|
|
7e08454edd | ||
|
|
72d318323e | ||
|
|
b611a98bae | ||
|
|
8b2eb50f29 | ||
|
|
988d0d6e35 | ||
|
|
48ec6fc757 | ||
|
|
0921c5e160 | ||
|
|
83843ae210 | ||
|
|
e5e768c56f | ||
|
|
f775c9da0b | ||
|
|
6a9c3dac77 | ||
|
|
11aaaecb7b | ||
|
|
12c08154c5 | ||
|
|
79d6a6f85f | ||
|
|
4513f03e8f | ||
|
|
f3adfca9c5 | ||
|
|
0b61ec1e13 | ||
|
|
8492c9b214 | ||
|
|
2a60c52483 | ||
|
|
917e813607 | ||
|
|
c20f64685f | ||
|
|
471229216a | ||
|
|
28661ab11a | ||
|
|
11c348a3d7 | ||
|
|
ffe6b7dd70 | ||
|
|
720af9085a | ||
|
|
8916ea5299 | ||
|
|
bf9a43b3d1 | ||
|
|
bca597c4e2 | ||
|
|
33e2f63ed5 | ||
|
|
e3f6df7120 | ||
|
|
58f198ddad | ||
|
|
61edcfe4f3 | ||
|
|
85bc3ddde6 | ||
|
|
488b631c38 | ||
|
|
da1b430200 | ||
|
|
bef76f0118 | ||
|
|
dd6baa72fa | ||
|
|
519b832d8c | ||
|
|
131326675e | ||
|
|
73dfa232f9 | ||
|
|
3f64173905 | ||
|
|
5cc4260d8e | ||
|
|
7f33a3847c | ||
|
|
c9be74ce80 | ||
|
|
f5d29eafe0 | ||
|
|
274e2c1cc2 | ||
|
|
c2233f7474 | ||
|
|
75f22f44a1 | ||
|
|
d60a8a71b7 | ||
|
|
f8bfd6fd3f | ||
|
|
f2bc735bc0 | ||
|
|
9e471ac389 | ||
|
|
ca34cbd180 | ||
|
|
9f4ea662da | ||
|
|
3172a4d030 | ||
|
|
c58315e2ee | ||
|
|
dc0ea5b3f6 | ||
|
|
b50cf1cf4c | ||
|
|
a7a4499dd4 | ||
|
|
f78261ee32 | ||
|
|
b69898d624 | ||
|
|
cfa8702654 | ||
|
|
dd9c65db4c | ||
|
|
9e7e29f03f | ||
|
|
2066625bf0 | ||
|
|
7af1d00ea2 | ||
|
|
5b279a48cb | ||
|
|
3d6e84e11c | ||
|
|
527d6ab7ff | ||
|
|
8a295c72ba | ||
|
|
d2b82a37b2 | ||
|
|
ec4e680d62 | ||
|
|
aa80ed3d4b | ||
|
|
1d4a7894e8 | ||
|
|
2d28cb6897 | ||
|
|
89e561c991 | ||
|
|
d16dd7c75d | ||
|
|
e51e6b2171 | ||
|
|
3eaf5c9bc0 | ||
|
|
4625c920c3 | ||
|
|
2ce0f7beb4 | ||
|
|
81ba95f540 | ||
|
|
b25e0ea393 | ||
|
|
2632b34438 | ||
|
|
ff60013335 | ||
|
|
a0bcb03dde | ||
|
|
f0044a9342 | ||
|
|
04fb81e071 | ||
|
|
642b665418 | ||
|
|
a5fa0681ed | ||
|
|
22e922e860 | ||
|
|
0126f5ae84 | ||
|
|
cfb246fa84 | ||
|
|
651119b7dd | ||
|
|
f944a70a9c | ||
|
|
9056f5ae59 | ||
|
|
ed62c981cc | ||
|
|
2a7ba28903 | ||
|
|
e8ec2ef8d1 | ||
|
|
864e8654c3 | ||
|
|
e11453aafb | ||
|
|
c1596f098c | ||
|
|
e57620f67c | ||
|
|
e481763967 | ||
|
|
2b3d755c64 | ||
|
|
fc2db36820 | ||
|
|
4fdea782f3 | ||
|
|
188a1a3b03 | ||
|
|
8e2014f2d4 | ||
|
|
fb95d7923f | ||
|
|
fe5ca69f36 | ||
|
|
1086b808dc | ||
|
|
0050e5cdfc | ||
|
|
6b357674d0 | ||
|
|
8f44dfcde5 | ||
|
|
82c0592e49 | ||
|
|
28ab4576d5 | ||
|
|
2debd5598f | ||
|
|
d86d44e2d4 | ||
|
|
3d85460dc8 | ||
|
|
52ce85fbf2 | ||
|
|
6d70533e0b | ||
|
|
4d8338e829 | ||
|
|
a4e48e1f6b | ||
|
|
790a74f9e4 | ||
|
|
893dde9958 | ||
|
|
d448b8cd99 | ||
|
|
ca2c4a0b3e | ||
|
|
499b8193ab | ||
|
|
1f18d2b09c | ||
|
|
a92d6fd35c | ||
|
|
12adde8f80 | ||
|
|
6437079be3 | ||
|
|
8b747dff9b | ||
|
|
027ff43dfd | ||
|
|
f50c8504cf | ||
|
|
30f5a3944c | ||
|
|
a1c0b000a4 | ||
|
|
f22778aacb | ||
|
|
888ee07f65 | ||
|
|
aa5937c278 | ||
|
|
4831c80598 | ||
|
|
886a271d19 | ||
|
|
8dfb0d1d5c | ||
|
|
1f982b7999 | ||
|
|
eb1556f3e8 | ||
|
|
061b79eac0 | ||
|
|
3bbeec513a | ||
|
|
a6be59bbea | ||
|
|
96e8a909d8 | ||
|
|
8724076601 | ||
|
|
4b356aba3e | ||
|
|
a13e0a75e8 | ||
|
|
ecf91d616b | ||
|
|
0d4d0f3126 | ||
|
|
b9a8ca14c3 | ||
|
|
f7e1a2a5eb | ||
|
|
c3bc9c8591 | ||
|
|
3380e20e3a | ||
|
|
a2c87e1b7d | ||
|
|
a609b401c4 | ||
|
|
9098e44513 | ||
|
|
62524e01e1 | ||
|
|
78bf1d274e | ||
|
|
461e469f28 | ||
|
|
99252074be | ||
|
|
e4e7ae3621 | ||
|
|
63743dd2b6 | ||
|
|
a254886bad | ||
|
|
aab10fb599 | ||
|
|
9d55ecffe9 | ||
|
|
00b613d2e0 | ||
|
|
fe24322f7c | ||
|
|
660e20dada | ||
|
|
18c8e803a4 | ||
|
|
15a19949b8 | ||
|
|
ebc5a66375 | ||
|
|
1120b4ab51 | ||
|
|
f91081e39c | ||
|
|
9e991e1595 | ||
|
|
afac06081c | ||
|
|
b773228719 | ||
|
|
7001ed476d | ||
|
|
31c39650a9 | ||
|
|
fbae706b0f | ||
|
|
88c328af8e | ||
|
|
cbd8981ee2 | ||
|
|
3101926e9b | ||
|
|
c9e0910c55 | ||
|
|
d65667ce16 | ||
|
|
7d7251862c | ||
|
|
4d02a969c2 | ||
|
|
e20c776364 | ||
|
|
c55404699e | ||
|
|
6240e4eba0 | ||
|
|
cf86719607 | ||
|
|
76943b6529 | ||
|
|
ca8bbdc293 | ||
|
|
8e6f12a897 | ||
|
|
52c64c1a6a | ||
|
|
ca94d48f8b | ||
|
|
d860680823 | ||
|
|
d1dbf3745a | ||
|
|
4b1151bda1 | ||
|
|
18c64e493b | ||
|
|
fc6839b441 | ||
|
|
405b63acdd | ||
|
|
f3dee50448 | ||
|
|
04e550ebe7 | ||
|
|
05b58819d6 | ||
|
|
63c72853f4 | ||
|
|
f20cce0176 | ||
|
|
723cbcd8bd | ||
|
|
dfbb84caae | ||
|
|
009d6cafaf | ||
|
|
bd9a4289d1 | ||
|
|
29a34fef8c | ||
|
|
08e2a3a883 | ||
|
|
2d37022525 | ||
|
|
bb3faaf2cd | ||
|
|
2c43b9a926 | ||
|
|
964ed5f497 | ||
|
|
b47a94852a | ||
|
|
f318524070 | ||
|
|
04539edb45 | ||
|
|
5cf21452c1 | ||
|
|
799299c7cc | ||
|
|
458330d325 | ||
|
|
973bec9e6a | ||
|
|
1f941a5105 | ||
|
|
8217fecb33 | ||
|
|
1dda7edf1c | ||
|
|
3c03e400f0 | ||
|
|
6388d97c5c | ||
|
|
161e3086fa | ||
|
|
b3f1f938be | ||
|
|
082da6e3a6 | ||
|
|
d9b9447242 | ||
|
|
d743282578 | ||
|
|
7eee6f0b96 | ||
|
|
dda3fca4b4 | ||
|
|
8648b2f948 | ||
|
|
f52cbd24f8 | ||
|
|
5ea13eeffd | ||
|
|
6cc802952f | ||
|
|
190b9db645 | ||
|
|
81949b9cad | ||
|
|
894e419f40 | ||
|
|
cdc6c036aa | ||
|
|
1e9168f682 | ||
|
|
790415dd4f | ||
|
|
679e0ea2c3 | ||
|
|
bd167403c3 | ||
|
|
13abe62bed | ||
|
|
4147c5b870 | ||
|
|
37d4755aae | ||
|
|
a9f416c4c5 | ||
|
|
8a11f246b1 | ||
|
|
8d44577dca | ||
|
|
72457d8d10 | ||
|
|
3bb44f8d9f | ||
|
|
279297b8fa | ||
|
|
c71e661daf | ||
|
|
f8820c06fe | ||
|
|
907b40e3c6 | ||
|
|
d318e163bb | ||
|
|
6e9c36a503 | ||
|
|
c9e9fe86aa | ||
|
|
c4f4e2b524 | ||
|
|
95246b90f6 | ||
|
|
2fad29df51 | ||
|
|
a95320e162 | ||
|
|
31b8805b5e | ||
|
|
9e69d4e153 | ||
|
|
aa5ecd7b42 | ||
|
|
15f90aa503 | ||
|
|
ec86bc4a38 | ||
|
|
a3efc64901 | ||
|
|
c929ecbac0 | ||
|
|
cc32e49060 | ||
|
|
05c41460c2 | ||
|
|
794efaa209 | ||
|
|
b0e93ee18c | ||
|
|
0393b51db6 | ||
|
|
464c8ad71c | ||
|
|
9df0e01874 | ||
|
|
bf2beb2530 | ||
|
|
f0b096d41a | ||
|
|
c948f38469 | ||
|
|
190e1d2c4f | ||
|
|
8a822e35e2 | ||
|
|
64a196f21d | ||
|
|
a24d4a9e3b | ||
|
|
138a3b1f3c | ||
|
|
9a55961786 | ||
|
|
4eaddadf8c | ||
|
|
9dd98b29be | ||
|
|
732946d38a | ||
|
|
ca070e67e7 | ||
|
|
b468048d95 | ||
|
|
029ae20573 | ||
|
|
fdcddaaffc | ||
|
|
b4275639f5 | ||
|
|
d6709469f6 | ||
|
|
3e43e3fc4c | ||
|
|
e622e68701 | ||
|
|
a90a4d1bc2 | ||
|
|
165676407a | ||
|
|
5131cb0ae1 | ||
|
|
d023eb8f1f | ||
|
|
d6fa5c97db | ||
|
|
3717443e85 | ||
|
|
a1ba39b3d3 | ||
|
|
b4ad7b459f | ||
|
|
5af8fd0b21 | ||
|
|
83e7a8d765 | ||
|
|
4bdd4eab64 | ||
|
|
5e683b5a48 | ||
|
|
f178825d21 | ||
|
|
fe2290fccb | ||
|
|
e4d67645b7 | ||
|
|
324415be15 | ||
|
|
7c44f9ab13 | ||
|
|
628fda2097 | ||
|
|
da6d749072 | ||
|
|
bef2b28acc | ||
|
|
302f571837 | ||
|
|
41dde209d5 | ||
|
|
5d350ef5ac | ||
|
|
4fd1d986dd | ||
|
|
55d57bc07b | ||
|
|
a81a262fb6 | ||
|
|
c37360f848 | ||
|
|
d7700900db | ||
|
|
faa136a365 | ||
|
|
bd73b94ea4 | ||
|
|
d764d0f096 | ||
|
|
fc8db130e0 | ||
|
|
682d678f91 | ||
|
|
06a211a24a | ||
|
|
55af696b7c | ||
|
|
636e9514e8 | ||
|
|
47e649643f | ||
|
|
bf59d2f357 | ||
|
|
fb90f6591b | ||
|
|
d66722e737 | ||
|
|
47d37c2ec9 | ||
|
|
40324ee89f | ||
|
|
9db1f3430e | ||
|
|
ec19932eef | ||
|
|
daf31870f3 | ||
|
|
35d49f6a5e | ||
|
|
5c0d8a7fef | ||
|
|
b2ab114b6d | ||
|
|
a8523e6d01 | ||
|
|
f946389d60 | ||
|
|
0a749ce913 | ||
|
|
dfd2c33657 | ||
|
|
7aad27c3d2 | ||
|
|
7a5588d5de | ||
|
|
f1dde5c925 | ||
|
|
0eff4f0096 | ||
|
|
4d7fa08805 | ||
|
|
f0af184262 | ||
|
|
5a23be2224 | ||
|
|
7f87b255f9 | ||
|
|
5ac1118db3 | ||
|
|
2c46279617 | ||
|
|
5d6a9ad2d0 | ||
|
|
b9c2b42725 | ||
|
|
a8369b4e93 | ||
|
|
fab8e66fe1 | ||
|
|
4db1b57c70 | ||
|
|
b06dbd3069 | ||
|
|
f84aa8c638 | ||
|
|
8e07dfc730 | ||
|
|
a49a00a25f | ||
|
|
673843fb66 | ||
|
|
811f35b028 | ||
|
|
ec6e2c240f | ||
|
|
3187a0f820 | ||
|
|
f86b9299c4 | ||
|
|
d27d0abeb0 | ||
|
|
7c59348138 | ||
|
|
ab53f44157 | ||
|
|
b35f325d94 | ||
|
|
393c14de54 | ||
|
|
bff17c0b95 | ||
|
|
d172828ac5 | ||
|
|
9500ac73fc | ||
|
|
e2cf7e4421 | ||
|
|
7e6234298d | ||
|
|
d4da206f93 | ||
|
|
985a168724 | ||
|
|
173c6194ed | ||
|
|
bcd23ad10c | ||
|
|
898e6f487d | ||
|
|
6618c3927c | ||
|
|
4b58b40226 | ||
|
|
3ecc826629 | ||
|
|
32fe3796e4 | ||
|
|
359d1aaafa | ||
|
|
fb5d336351 | ||
|
|
eb30dff986 | ||
|
|
9312336962 | ||
|
|
ade4338ea6 | ||
|
|
55b20324c0 | ||
|
|
c0fb28301d | ||
|
|
f9c2503f81 | ||
|
|
5b4cdf05b1 | ||
|
|
6f25a6bdfd | ||
|
|
23427e95f7 | ||
|
|
90a09e573b | ||
|
|
e1d7440b9d |
@@ -1,4 +1,5 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
from logging import handlers
|
from logging import handlers
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import logging
|
import logging
|
||||||
@@ -9,7 +10,6 @@ import socket
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import time
|
|
||||||
|
|
||||||
# Root path
|
# Root path
|
||||||
base_path = dirname(os.path.abspath(__file__))
|
base_path = dirname(os.path.abspath(__file__))
|
||||||
@@ -18,7 +18,12 @@ base_path = dirname(os.path.abspath(__file__))
|
|||||||
sys.path.insert(0, os.path.join(base_path, 'libs'))
|
sys.path.insert(0, os.path.join(base_path, 'libs'))
|
||||||
|
|
||||||
from couchpotato.environment import Env
|
from couchpotato.environment import Env
|
||||||
from couchpotato.core.helpers.variable import getDataDir
|
from couchpotato.core.helpers.variable import getDataDir, removePyc
|
||||||
|
|
||||||
|
|
||||||
|
# Remove pyc files before dynamic load (sees .pyc files regular .py modules)
|
||||||
|
removePyc(base_path)
|
||||||
|
|
||||||
|
|
||||||
class Loader(object):
|
class Loader(object):
|
||||||
|
|
||||||
@@ -28,7 +33,7 @@ class Loader(object):
|
|||||||
|
|
||||||
# Get options via arg
|
# Get options via arg
|
||||||
from couchpotato.runner import getOptions
|
from couchpotato.runner import getOptions
|
||||||
self.options = getOptions(base_path, sys.argv[1:])
|
self.options = getOptions(sys.argv[1:])
|
||||||
|
|
||||||
# Load settings
|
# Load settings
|
||||||
settings = Env.get('settings')
|
settings = Env.get('settings')
|
||||||
@@ -49,7 +54,7 @@ class Loader(object):
|
|||||||
# Create logging dir
|
# Create logging dir
|
||||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||||
if not os.path.isdir(self.log_dir):
|
if not os.path.isdir(self.log_dir):
|
||||||
os.mkdir(self.log_dir)
|
os.makedirs(self.log_dir)
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
from couchpotato.core.logger import CPLog
|
from couchpotato.core.logger import CPLog
|
||||||
@@ -66,10 +71,11 @@ class Loader(object):
|
|||||||
signal.signal(signal.SIGTERM, lambda signum, stack_frame: sys.exit(1))
|
signal.signal(signal.SIGTERM, lambda signum, stack_frame: sys.exit(1))
|
||||||
|
|
||||||
from couchpotato.core.event import addEvent
|
from couchpotato.core.event import addEvent
|
||||||
addEvent('app.after_shutdown', self.afterShutdown)
|
addEvent('app.do_shutdown', self.setRestart)
|
||||||
|
|
||||||
def afterShutdown(self, restart):
|
def setRestart(self, restart):
|
||||||
self.do_restart = restart
|
self.do_restart = restart
|
||||||
|
return True
|
||||||
|
|
||||||
def onExit(self, signal, frame):
|
def onExit(self, signal, frame):
|
||||||
from couchpotato.core.event import fireEvent
|
from couchpotato.core.event import fireEvent
|
||||||
@@ -97,7 +103,6 @@ class Loader(object):
|
|||||||
|
|
||||||
# Release log files and shutdown logger
|
# Release log files and shutdown logger
|
||||||
logging.shutdown()
|
logging.shutdown()
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
args = [sys.executable] + [os.path.join(base_path, os.path.basename(__file__))] + sys.argv[1:]
|
args = [sys.executable] + [os.path.join(base_path, os.path.basename(__file__))] + sys.argv[1:]
|
||||||
subprocess.Popen(args)
|
subprocess.Popen(args)
|
||||||
@@ -132,14 +137,15 @@ if __name__ == '__main__':
|
|||||||
pass
|
pass
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
raise
|
raise
|
||||||
except socket.error as (nr, msg):
|
except socket.error as e:
|
||||||
# log when socket receives SIGINT, but continue.
|
# log when socket receives SIGINT, but continue.
|
||||||
# previous code would have skipped over other types of IO errors too.
|
# previous code would have skipped over other types of IO errors too.
|
||||||
|
nr, msg = e
|
||||||
if nr != 4:
|
if nr != 4:
|
||||||
try:
|
try:
|
||||||
l.log.critical(traceback.format_exc())
|
l.log.critical(traceback.format_exc())
|
||||||
except:
|
except:
|
||||||
print traceback.format_exc()
|
print(traceback.format_exc())
|
||||||
raise
|
raise
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
@@ -148,7 +154,7 @@ if __name__ == '__main__':
|
|||||||
if l:
|
if l:
|
||||||
l.log.critical(traceback.format_exc())
|
l.log.critical(traceback.format_exc())
|
||||||
else:
|
else:
|
||||||
print traceback.format_exc()
|
print(traceback.format_exc())
|
||||||
except:
|
except:
|
||||||
print traceback.format_exc()
|
print(traceback.format_exc())
|
||||||
raise
|
raise
|
||||||
|
|||||||
35
README.md
35
README.md
@@ -1,4 +1,4 @@
|
|||||||
CouchPotato Server
|
CouchPotato
|
||||||
=====
|
=====
|
||||||
|
|
||||||
CouchPotato (CP) is an automatic NZB and torrent downloader. You can keep a "movies I want"-list and it will search for NZBs/torrents of these movies every X hours.
|
CouchPotato (CP) is an automatic NZB and torrent downloader. You can keep a "movies I want"-list and it will search for NZBs/torrents of these movies every X hours.
|
||||||
@@ -7,7 +7,7 @@ Once a movie is found, it will send it to SABnzbd or download the torrent to a s
|
|||||||
|
|
||||||
## Running from Source
|
## Running from Source
|
||||||
|
|
||||||
CouchPotatoServer can be run from source. This will use *git* as updater, so make sure that is installed also.
|
CouchPotatoServer can be run from source. This will use *git* as updater, so make sure that is installed.
|
||||||
|
|
||||||
Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for more details:
|
Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for more details:
|
||||||
|
|
||||||
@@ -17,9 +17,9 @@ Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for
|
|||||||
* Open up `Git Bash` (or CMD) and go to the folder you want to install CP. Something like Program Files.
|
* Open up `Git Bash` (or CMD) and go to the folder you want to install CP. Something like Program Files.
|
||||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`.
|
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`.
|
||||||
* You can now start CP via `CouchPotatoServer\CouchPotato.py` to start
|
* You can now start CP via `CouchPotatoServer\CouchPotato.py` to start
|
||||||
* Your browser should open up, but if it doesn't go to: `http://localhost:5050/`
|
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||||
|
|
||||||
OSx:
|
OS X:
|
||||||
|
|
||||||
* If you're on Leopard (10.5) install Python 2.6+: [Python 2.6.5](http://www.python.org/download/releases/2.6.5/)
|
* If you're on Leopard (10.5) install Python 2.6+: [Python 2.6.5](http://www.python.org/download/releases/2.6.5/)
|
||||||
* Install [GIT](http://git-scm.com/)
|
* Install [GIT](http://git-scm.com/)
|
||||||
@@ -27,20 +27,27 @@ OSx:
|
|||||||
* Go to your App folder `cd /Applications`
|
* Go to your App folder `cd /Applications`
|
||||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||||
* Then do `python CouchPotatoServer/CouchPotato.py`
|
* Then do `python CouchPotatoServer/CouchPotato.py`
|
||||||
* Your browser should open up, but if it doesn't go to: `http://localhost:5050/`
|
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||||
|
|
||||||
Linux (ubuntu / debian):
|
Linux:
|
||||||
|
|
||||||
* Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
* (Ubuntu / Debian) Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||||
|
* (Fedora / CentOS) Install [GIT](http://git-scm.com/) with `yum install git`
|
||||||
* 'cd' to the folder of your choosing.
|
* 'cd' to the folder of your choosing.
|
||||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||||
* Then do `python CouchPotatoServer/CouchPotato.py` to start
|
* Then do `python CouchPotatoServer/CouchPotato.py` to start
|
||||||
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
* (Ubuntu / Debian) To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||||
* Change the paths inside the init script. `sudo nano /etc/init.d/couchpotato`
|
* (Ubuntu / Debian) Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||||
* Make it executable. `sudo chmod +x /etc/init.d/couchpotato`
|
* (Ubuntu / Debian) Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||||
* Add it to defaults. `sudo update-rc.d couchpotato defaults`
|
* (Ubuntu / Debian) Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||||
* Open your browser and go to: `http://localhost:5050/`
|
* (Ubuntu / Debian) Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||||
|
* (systemd) To run on boot copy the systemd config `sudo cp CouchPotatoServer/init/couchpotato.fedora.service /etc/systemd/system/couchpotato.service`
|
||||||
|
* (systemd) Update the systemd config file with your user and path to CouchPotato.py
|
||||||
|
* (systemd) Enable it at boot with `sudo systemctl enable couchpotato`
|
||||||
|
* Open your browser and go to `http://localhost:5050/`
|
||||||
|
|
||||||
|
Docker:
|
||||||
|
* You can use [razorgirl's Dockerfile](https://github.com/razorgirl/docker-couchpotato) to quickly build your own isolated app container. It's based on the Linux instructions above. For more info about Docker check out the [official website](https://www.docker.com).
|
||||||
|
|
||||||
FreeBSD :
|
FreeBSD :
|
||||||
|
|
||||||
@@ -56,7 +63,7 @@ FreeBSD :
|
|||||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||||
* Then run `sudo python CouchPotatoServer/CouchPotato.py` to start for the first time
|
* Then run `sudo python CouchPotatoServer/CouchPotato.py` to start for the first time
|
||||||
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/freebsd /etc/rc.d/couchpotato`
|
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/freebsd /etc/rc.d/couchpotato`
|
||||||
* Change the paths inside the init script. `sudo vim /etc/init.d/couchpotato`
|
* Change the paths inside the init script. `sudo vim /etc/rc.d/couchpotato`
|
||||||
* Make init script executable. `sudo chmod +x /etc/rc.d/couchpotato`
|
* Make init script executable. `sudo chmod +x /etc/rc.d/couchpotato`
|
||||||
* Add init to startup. `sudo echo 'couchpotato_enable="YES"' >> /etc/rc.conf`
|
* Add init to startup. `sudo echo 'couchpotato_enable="YES"' >> /etc/rc.conf`
|
||||||
* Open your browser and go to: `http://server:5050/`
|
* Open your browser and go to: `http://server:5050/`
|
||||||
|
|||||||
@@ -1,25 +1,40 @@
|
|||||||
## Got a issue/feature request or submitting a pull request?
|
# Contributing to CouchPotatoServer
|
||||||
|
|
||||||
Make sure you think of the following things:
|
1. [Contributing](#contributing)
|
||||||
|
2. [Submitting an Issue](#issues)
|
||||||
|
3. [Submitting a Pull Request](#pull-requests)
|
||||||
|
|
||||||
## Issue
|
## Contributing
|
||||||
* Search through the existing (and closed) issues first, see if you can get your answer there.
|
Thank you for your interest in contributing to CouchPotato. There are several ways to help out, even if you've never worked on an open source project before.
|
||||||
* Double check the result manually, because it could be an external issue.
|
If you've found a bug or want to request a feature, you can report it by [posting an issue](https://github.com/RuudBurger/CouchPotatoServer/issues/new) - be sure to read the [guidelines](#issues) first!
|
||||||
* Post logs! Without seeing what is going on, I can't reproduce the error.
|
If you want to contribute your own work, please read the [guidelines](#pull-requests) for submitting a pull request.
|
||||||
* Also check the logs before submitting, obvious errors like permission or http errors are often not related to CP.
|
Lastly, for anything related to CouchPotato, feel free to stop by the [forum](http://couchpota.to/forum/) or the [#couchpotato](http://webchat.freenode.net/?channels=couchpotato) IRC channel at irc.freenode.net.
|
||||||
* What is the movie + quality you are searching for?
|
|
||||||
* What are you're settings for the specific problem?
|
## Issues
|
||||||
* What providers are you using? (While you're logs include these, scanning through hundred of lines of log isn't our hobby)
|
Issues are intended for reporting bugs and weird behaviour or suggesting improvements to CouchPotatoServer.
|
||||||
* Post the logs from config directory, please do not copy paste the UI. Use pastebin to store these logs!
|
Before you submit an issue, please go through the following checklist:
|
||||||
|
* **FILL IN ALL THE FIELDS ASKED FOR**
|
||||||
|
* **POST MORE THAN A SINGLE LINE LOG**, if you do, you'd better have a easy reproducable bug
|
||||||
|
* Search through existing issues (*including closed issues!*) first: you might be able to get your answer there.
|
||||||
|
* Double check your issue manually, because it could be an external issue.
|
||||||
|
* Post logs with your issue: Without seeing what is going on, the developers can't reproduce the error.
|
||||||
|
* Check the logs yourself before submitting them. Obvious errors like permission or HTTP errors are often not related to CouchPotato.
|
||||||
|
* What movie and quality are you searching for?
|
||||||
|
* What are your settings for the specific problem?
|
||||||
|
* What providers are you using? (While your logs include these, scanning through hundreds of lines of logs isn't our hobby)
|
||||||
|
* Post the logs from the *config* directory, please do not copy paste the UI. Use pastebin to store these logs!
|
||||||
* Give a short step by step of how to reproduce the error.
|
* Give a short step by step of how to reproduce the error.
|
||||||
* What hardware / OS are you using and what are the limits? NAS can be slow and maybe have a different python installed then when you use CP on OSX or Windows for example.
|
* What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed than when you use CP on OS X or Windows.
|
||||||
* I will mark issues with the "can't reproduce" tag. Don't go asking "why closed" if it clearly says the issue in the tag ;)
|
* Your issue might be marked with the "can't reproduce" tag. Don't ask why your issue was closed if it says so in the tag.
|
||||||
* If you're running on a NAS (QNAP, Austor etc..) with pre-made packages, make sure these are setup to use our source repo (RuudBurger/CouchPotatoServer) and nothing else!!
|
* If you're running on a NAS (QNAP, Austor, Synology etc.) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else!
|
||||||
|
* Do not "bump" issues with "Any updates on this" or whatever. Yes I've seen it, you don't have to remind me of it. There will be an update when the code is done or I need information. If you feel the need to do so, you'd better have more info on the issue.
|
||||||
|
|
||||||
## Pull Request
|
The more relevant information you provide, the more likely that your issue will be resolved.
|
||||||
* Make sure you're pull request is made for develop branch (or relevant feature branch)
|
If you don't follow any of the checks above, I'll close the issue. If you are wondering why (and ask) I'll block you from posting new issues and the repo.
|
||||||
|
|
||||||
|
## Pull Requests
|
||||||
|
Pull requests are intended for contributing code or documentation to the project. Before you submit a pull request, consider the following:
|
||||||
|
* Make sure your pull request is made for the *develop* branch (or relevant feature branch).
|
||||||
* Have you tested your PR? If not, why?
|
* Have you tested your PR? If not, why?
|
||||||
* Are there any limitations of your PR we should know of?
|
* Does your PR have any limitations I should know of?
|
||||||
* Make sure to keep you're PR up-to-date with the branch you're trying to push into.
|
* Is your PR up-to-date with the branch you're trying to push into?
|
||||||
|
|
||||||
**If we don't get enough info, the chance of the issue getting closed is a lot bigger ;)**
|
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
from couchpotato.api import api_docs, api_docs_missing, api
|
from couchpotato.api import api_docs, api_docs_missing, api
|
||||||
from couchpotato.core.event import fireEvent
|
from couchpotato.core.event import fireEvent
|
||||||
from couchpotato.core.helpers.variable import md5, tryInt
|
from couchpotato.core.helpers.variable import md5, tryInt
|
||||||
@@ -5,13 +9,10 @@ from couchpotato.core.logger import CPLog
|
|||||||
from couchpotato.environment import Env
|
from couchpotato.environment import Env
|
||||||
from tornado import template
|
from tornado import template
|
||||||
from tornado.web import RequestHandler, authenticated
|
from tornado.web import RequestHandler, authenticated
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
views = {}
|
views = {}
|
||||||
template_loader = template.Loader(os.path.join(os.path.dirname(__file__), 'templates'))
|
template_loader = template.Loader(os.path.join(os.path.dirname(__file__), 'templates'))
|
||||||
|
|
||||||
@@ -24,9 +25,10 @@ class BaseHandler(RequestHandler):
|
|||||||
|
|
||||||
if username and password:
|
if username and password:
|
||||||
return self.get_secure_cookie('user')
|
return self.get_secure_cookie('user')
|
||||||
else: # Login when no username or password are set
|
else: # Login when no username or password are set
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# Main web handler
|
# Main web handler
|
||||||
class WebHandler(BaseHandler):
|
class WebHandler(BaseHandler):
|
||||||
|
|
||||||
@@ -38,16 +40,20 @@ class WebHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if route == 'robots.txt':
|
||||||
|
self.set_header('Content-Type', 'text/plain')
|
||||||
self.write(views[route]())
|
self.write(views[route]())
|
||||||
except:
|
except:
|
||||||
log.error("Failed doing web request '%s': %s", (route, traceback.format_exc()))
|
log.error("Failed doing web request '%s': %s", (route, traceback.format_exc()))
|
||||||
self.write({'success': False, 'error': 'Failed returning results'})
|
self.write({'success': False, 'error': 'Failed returning results'})
|
||||||
|
|
||||||
def addView(route, func, static = False):
|
|
||||||
|
def addView(route, func):
|
||||||
views[route] = func
|
views[route] = func
|
||||||
|
|
||||||
def get_session(engine = None):
|
|
||||||
return Env.getSession(engine)
|
def get_db():
|
||||||
|
return Env.get('db')
|
||||||
|
|
||||||
|
|
||||||
# Web view
|
# Web view
|
||||||
@@ -55,12 +61,17 @@ def index():
|
|||||||
return template_loader.load('index.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env)
|
return template_loader.load('index.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env)
|
||||||
addView('', index)
|
addView('', index)
|
||||||
|
|
||||||
|
|
||||||
|
# Web view
|
||||||
|
def robots():
|
||||||
|
return 'User-agent: * \n' \
|
||||||
|
'Disallow: /'
|
||||||
|
addView('robots.txt', robots)
|
||||||
|
|
||||||
|
|
||||||
# API docs
|
# API docs
|
||||||
def apiDocs():
|
def apiDocs():
|
||||||
routes = []
|
routes = list(api.keys())
|
||||||
|
|
||||||
for route in api.iterkeys():
|
|
||||||
routes.append(route)
|
|
||||||
|
|
||||||
if api_docs.get(''):
|
if api_docs.get(''):
|
||||||
del api_docs['']
|
del api_docs['']
|
||||||
@@ -70,21 +81,30 @@ def apiDocs():
|
|||||||
|
|
||||||
addView('docs', apiDocs)
|
addView('docs', apiDocs)
|
||||||
|
|
||||||
|
|
||||||
|
# Database debug manager
|
||||||
|
def databaseManage():
|
||||||
|
return template_loader.load('database.html').generate(fireEvent = fireEvent, Env = Env)
|
||||||
|
|
||||||
|
addView('database', databaseManage)
|
||||||
|
|
||||||
|
|
||||||
# Make non basic auth option to get api key
|
# Make non basic auth option to get api key
|
||||||
class KeyHandler(RequestHandler):
|
class KeyHandler(RequestHandler):
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
def get(self, *args, **kwargs):
|
||||||
api = None
|
api_key = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
username = Env.setting('username')
|
username = Env.setting('username')
|
||||||
password = Env.setting('password')
|
password = Env.setting('password')
|
||||||
|
|
||||||
if (self.get_argument('u') == md5(username) or not username) and (self.get_argument('p') == password or not password):
|
if (self.get_argument('u') == md5(username) or not username) and (self.get_argument('p') == password or not password):
|
||||||
api = Env.setting('api_key')
|
api_key = Env.setting('api_key')
|
||||||
|
|
||||||
self.write({
|
self.write({
|
||||||
'success': api is not None,
|
'success': api_key is not None,
|
||||||
'api_key': api
|
'api_key': api_key
|
||||||
})
|
})
|
||||||
except:
|
except:
|
||||||
log.error('Failed doing key request: %s', (traceback.format_exc()))
|
log.error('Failed doing key request: %s', (traceback.format_exc()))
|
||||||
@@ -102,20 +122,21 @@ class LoginHandler(BaseHandler):
|
|||||||
|
|
||||||
def post(self, *args, **kwargs):
|
def post(self, *args, **kwargs):
|
||||||
|
|
||||||
api = None
|
api_key = None
|
||||||
|
|
||||||
username = Env.setting('username')
|
username = Env.setting('username')
|
||||||
password = Env.setting('password')
|
password = Env.setting('password')
|
||||||
|
|
||||||
if (self.get_argument('username') == username or not username) and (md5(self.get_argument('password')) == password or not password):
|
if (self.get_argument('username') == username or not username) and (md5(self.get_argument('password')) == password or not password):
|
||||||
api = Env.setting('api_key')
|
api_key = Env.setting('api_key')
|
||||||
|
|
||||||
if api:
|
if api_key:
|
||||||
remember_me = tryInt(self.get_argument('remember_me', default = 0))
|
remember_me = tryInt(self.get_argument('remember_me', default = 0))
|
||||||
self.set_secure_cookie('user', api, expires_days = 30 if remember_me > 0 else None)
|
self.set_secure_cookie('user', api_key, expires_days = 30 if remember_me > 0 else None)
|
||||||
|
|
||||||
self.redirect(Env.get('web_base'))
|
self.redirect(Env.get('web_base'))
|
||||||
|
|
||||||
|
|
||||||
class LogoutHandler(BaseHandler):
|
class LogoutHandler(BaseHandler):
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
def get(self, *args, **kwargs):
|
||||||
@@ -136,4 +157,3 @@ def page_not_found(rh):
|
|||||||
|
|
||||||
rh.set_status(404)
|
rh.set_status(404)
|
||||||
rh.write('Wrong API key used')
|
rh.write('Wrong API key used')
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,16 @@
|
|||||||
from couchpotato.core.helpers.request import getParams
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from tornado.gen import coroutine
|
|
||||||
from tornado.web import RequestHandler, asynchronous
|
|
||||||
import json
|
import json
|
||||||
import threading
|
import threading
|
||||||
import tornado
|
|
||||||
import traceback
|
import traceback
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.request import getParams
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from tornado.ioloop import IOLoop
|
||||||
|
from tornado.web import RequestHandler, asynchronous
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -20,15 +21,25 @@ api_nonblock = {}
|
|||||||
api_docs = {}
|
api_docs = {}
|
||||||
api_docs_missing = []
|
api_docs_missing = []
|
||||||
|
|
||||||
|
|
||||||
def run_async(func):
|
def run_async(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def async_func(*args, **kwargs):
|
def async_func(*args, **kwargs):
|
||||||
func_hl = Thread(target = func, args = args, kwargs = kwargs)
|
func_hl = Thread(target = func, args = args, kwargs = kwargs)
|
||||||
func_hl.start()
|
func_hl.start()
|
||||||
return func_hl
|
|
||||||
|
|
||||||
return async_func
|
return async_func
|
||||||
|
|
||||||
|
@run_async
|
||||||
|
def run_handler(route, kwargs, callback = None):
|
||||||
|
try:
|
||||||
|
res = api[route](**kwargs)
|
||||||
|
callback(res, route)
|
||||||
|
except:
|
||||||
|
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||||
|
callback({'success': False, 'error': 'Failed returning results'}, route)
|
||||||
|
|
||||||
|
|
||||||
# NonBlock API handler
|
# NonBlock API handler
|
||||||
class NonBlockHandler(RequestHandler):
|
class NonBlockHandler(RequestHandler):
|
||||||
|
|
||||||
@@ -40,27 +51,26 @@ class NonBlockHandler(RequestHandler):
|
|||||||
start, stop = api_nonblock[route]
|
start, stop = api_nonblock[route]
|
||||||
self.stopper = stop
|
self.stopper = stop
|
||||||
|
|
||||||
start(self.onNewMessage, last_id = self.get_argument('last_id', None))
|
start(self.sendData, last_id = self.get_argument('last_id', None))
|
||||||
|
|
||||||
def onNewMessage(self, response):
|
def sendData(self, response):
|
||||||
if self.request.connection.stream.closed():
|
if not self.request.connection.stream.closed():
|
||||||
self.on_connection_close()
|
try:
|
||||||
return
|
self.finish(response)
|
||||||
|
except:
|
||||||
|
log.debug('Failed doing nonblock request, probably already closed: %s', (traceback.format_exc()))
|
||||||
|
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||||
|
except: pass
|
||||||
|
|
||||||
try:
|
self.removeStopper()
|
||||||
self.finish(response)
|
|
||||||
except:
|
|
||||||
log.debug('Failed doing nonblock request, probably already closed: %s', (traceback.format_exc()))
|
|
||||||
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
|
||||||
except: pass
|
|
||||||
|
|
||||||
def on_connection_close(self):
|
|
||||||
|
|
||||||
|
def removeStopper(self):
|
||||||
if self.stopper:
|
if self.stopper:
|
||||||
self.stopper(self.onNewMessage)
|
self.stopper(self.sendData)
|
||||||
|
|
||||||
self.stopper = None
|
self.stopper = None
|
||||||
|
|
||||||
|
|
||||||
def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
||||||
api_nonblock[route] = func_tuple
|
api_nonblock[route] = func_tuple
|
||||||
|
|
||||||
@@ -69,16 +79,23 @@ def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
|||||||
else:
|
else:
|
||||||
api_docs_missing.append(route)
|
api_docs_missing.append(route)
|
||||||
|
|
||||||
|
|
||||||
# Blocking API handler
|
# Blocking API handler
|
||||||
class ApiHandler(RequestHandler):
|
class ApiHandler(RequestHandler):
|
||||||
|
route = None
|
||||||
|
|
||||||
@coroutine
|
@asynchronous
|
||||||
def get(self, route, *args, **kwargs):
|
def get(self, route, *args, **kwargs):
|
||||||
route = route.strip('/')
|
self.route = route = route.strip('/')
|
||||||
if not api.get(route):
|
if not api.get(route):
|
||||||
self.write('API call doesn\'t seem to exist')
|
self.write('API call doesn\'t seem to exist')
|
||||||
|
self.finish()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Create lock if it doesn't exist
|
||||||
|
if route in api_locks and not api_locks.get(route):
|
||||||
|
api_locks[route] = threading.Lock()
|
||||||
|
|
||||||
api_locks[route].acquire()
|
api_locks[route].acquire()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -89,38 +106,56 @@ class ApiHandler(RequestHandler):
|
|||||||
|
|
||||||
# Split array arguments
|
# Split array arguments
|
||||||
kwargs = getParams(kwargs)
|
kwargs = getParams(kwargs)
|
||||||
|
kwargs['_request'] = self
|
||||||
|
|
||||||
# Remove t random string
|
# Remove t random string
|
||||||
try: del kwargs['t']
|
try: del kwargs['t']
|
||||||
except: pass
|
except: pass
|
||||||
|
|
||||||
# Add async callback handler
|
# Add async callback handler
|
||||||
@run_async
|
run_handler(route, kwargs, callback = self.taskFinished)
|
||||||
def run_handler(callback):
|
|
||||||
try:
|
|
||||||
result = api[route](**kwargs)
|
|
||||||
callback(result)
|
|
||||||
except:
|
|
||||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
|
||||||
callback({'success': False, 'error': 'Failed returning results'})
|
|
||||||
result = yield tornado.gen.Task(run_handler)
|
|
||||||
|
|
||||||
# Check JSONP callback
|
|
||||||
jsonp_callback = self.get_argument('callback_func', default = None)
|
|
||||||
|
|
||||||
if jsonp_callback:
|
|
||||||
self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')')
|
|
||||||
self.set_header("Content-Type", "text/javascript")
|
|
||||||
elif isinstance(result, tuple) and result[0] == 'redirect':
|
|
||||||
self.redirect(result[1])
|
|
||||||
else:
|
|
||||||
self.write(result)
|
|
||||||
|
|
||||||
except:
|
except:
|
||||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||||
self.write({'success': False, 'error': 'Failed returning results'})
|
try:
|
||||||
|
self.write({'success': False, 'error': 'Failed returning results'})
|
||||||
|
self.finish()
|
||||||
|
except:
|
||||||
|
log.error('Failed write error "%s": %s', (route, traceback.format_exc()))
|
||||||
|
|
||||||
|
self.unlock()
|
||||||
|
|
||||||
|
post = get
|
||||||
|
|
||||||
|
def taskFinished(self, result, route):
|
||||||
|
IOLoop.current().add_callback(self.sendData, result, route)
|
||||||
|
self.unlock()
|
||||||
|
|
||||||
|
def sendData(self, result, route):
|
||||||
|
|
||||||
|
if not self.request.connection.stream.closed():
|
||||||
|
try:
|
||||||
|
# Check JSONP callback
|
||||||
|
jsonp_callback = self.get_argument('callback_func', default = None)
|
||||||
|
|
||||||
|
if jsonp_callback:
|
||||||
|
self.set_header('Content-Type', 'text/javascript')
|
||||||
|
self.finish(str(jsonp_callback) + '(' + json.dumps(result) + ')')
|
||||||
|
elif isinstance(result, tuple) and result[0] == 'redirect':
|
||||||
|
self.redirect(result[1])
|
||||||
|
else:
|
||||||
|
self.finish(result)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
log.error('Failed proper encode: %s', traceback.format_exc())
|
||||||
|
except:
|
||||||
|
log.debug('Failed doing request, probably already closed: %s', (traceback.format_exc()))
|
||||||
|
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||||
|
except: pass
|
||||||
|
|
||||||
|
def unlock(self):
|
||||||
|
try: api_locks[self.route].release()
|
||||||
|
except: pass
|
||||||
|
|
||||||
api_locks[route].release()
|
|
||||||
|
|
||||||
def addApiView(route, func, static = False, docs = None, **kwargs):
|
def addApiView(route, func, static = False, docs = None, **kwargs):
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,3 @@
|
|||||||
from couchpotato.api import addApiView
|
|
||||||
from couchpotato.core.event import fireEvent, addEvent
|
|
||||||
from couchpotato.core.helpers.variable import cleanHost, md5
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.core.plugins.base import Plugin
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
from tornado.ioloop import IOLoop
|
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
@@ -13,8 +6,19 @@ import time
|
|||||||
import traceback
|
import traceback
|
||||||
import webbrowser
|
import webbrowser
|
||||||
|
|
||||||
|
from couchpotato.api import addApiView
|
||||||
|
from couchpotato.core.event import fireEvent, addEvent
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost, md5, isSubFolder
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.plugins.base import Plugin
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
from tornado.ioloop import IOLoop
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Core'
|
||||||
|
|
||||||
|
|
||||||
class Core(Plugin):
|
class Core(Plugin):
|
||||||
|
|
||||||
@@ -47,6 +51,7 @@ class Core(Plugin):
|
|||||||
addEvent('app.api_url', self.createApiUrl)
|
addEvent('app.api_url', self.createApiUrl)
|
||||||
addEvent('app.version', self.version)
|
addEvent('app.version', self.version)
|
||||||
addEvent('app.load', self.checkDataDir)
|
addEvent('app.load', self.checkDataDir)
|
||||||
|
addEvent('app.load', self.cleanUpFolders)
|
||||||
|
|
||||||
addEvent('setting.save.core.password', self.md5Password)
|
addEvent('setting.save.core.password', self.md5Password)
|
||||||
addEvent('setting.save.core.api_key', self.checkApikey)
|
addEvent('setting.save.core.api_key', self.checkApikey)
|
||||||
@@ -66,11 +71,15 @@ class Core(Plugin):
|
|||||||
return value if value and len(value) > 3 else uuid4().hex
|
return value if value and len(value) > 3 else uuid4().hex
|
||||||
|
|
||||||
def checkDataDir(self):
|
def checkDataDir(self):
|
||||||
if Env.get('app_dir') in Env.get('data_dir'):
|
if isSubFolder(Env.get('data_dir'), Env.get('app_dir')):
|
||||||
log.error('You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted.')
|
log.error('You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted.')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def cleanUpFolders(self):
|
||||||
|
only_clean = ['couchpotato', 'libs', 'init']
|
||||||
|
self.deleteEmptyFolder(Env.get('app_dir'), show_error = False, only_clean = only_clean)
|
||||||
|
|
||||||
def available(self, **kwargs):
|
def available(self, **kwargs):
|
||||||
return {
|
return {
|
||||||
'success': True
|
'success': True
|
||||||
@@ -82,7 +91,11 @@ class Core(Plugin):
|
|||||||
|
|
||||||
def shutdown():
|
def shutdown():
|
||||||
self.initShutdown()
|
self.initShutdown()
|
||||||
IOLoop.current().add_callback(shutdown)
|
|
||||||
|
if IOLoop.current()._closing:
|
||||||
|
shutdown()
|
||||||
|
else:
|
||||||
|
IOLoop.current().add_callback(shutdown)
|
||||||
|
|
||||||
return 'shutdown'
|
return 'shutdown'
|
||||||
|
|
||||||
@@ -105,7 +118,7 @@ class Core(Plugin):
|
|||||||
|
|
||||||
self.shutdown_started = True
|
self.shutdown_started = True
|
||||||
|
|
||||||
fireEvent('app.do_shutdown')
|
fireEvent('app.do_shutdown', restart = restart)
|
||||||
log.debug('Every plugin got shutdown event')
|
log.debug('Every plugin got shutdown event')
|
||||||
|
|
||||||
loop = True
|
loop = True
|
||||||
@@ -117,7 +130,7 @@ class Core(Plugin):
|
|||||||
|
|
||||||
if len(still_running) == 0:
|
if len(still_running) == 0:
|
||||||
break
|
break
|
||||||
elif starttime < time.time() - 30: # Always force break after 30s wait
|
elif starttime < time.time() - 30: # Always force break after 30s wait
|
||||||
break
|
break
|
||||||
|
|
||||||
running = list(set(still_running) - set(self.ignore_restart))
|
running = list(set(still_running) - set(self.ignore_restart))
|
||||||
@@ -130,8 +143,11 @@ class Core(Plugin):
|
|||||||
|
|
||||||
log.debug('Safe to shutdown/restart')
|
log.debug('Safe to shutdown/restart')
|
||||||
|
|
||||||
|
loop = IOLoop.current()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
IOLoop.current().stop()
|
if not loop._closing:
|
||||||
|
loop.stop()
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
pass
|
pass
|
||||||
except:
|
except:
|
||||||
@@ -165,13 +181,13 @@ class Core(Plugin):
|
|||||||
return '%sapi/%s' % (self.createBaseUrl(), Env.setting('api_key'))
|
return '%sapi/%s' % (self.createBaseUrl(), Env.setting('api_key'))
|
||||||
|
|
||||||
def version(self):
|
def version(self):
|
||||||
ver = fireEvent('updater.info', single = True)
|
ver = fireEvent('updater.info', single = True) or {'version': {}}
|
||||||
|
|
||||||
if os.name == 'nt': platf = 'windows'
|
if os.name == 'nt': platf = 'windows'
|
||||||
elif 'Darwin' in platform.platform(): platf = 'osx'
|
elif 'Darwin' in platform.platform(): platf = 'osx'
|
||||||
else: platf = 'linux'
|
else: platf = 'linux'
|
||||||
|
|
||||||
return '%s - %s-%s - v2' % (platf, ver.get('version')['type'], ver.get('version')['hash'])
|
return '%s - %s-%s - v2' % (platf, ver.get('version').get('type') or 'unknown', ver.get('version').get('hash') or 'unknown')
|
||||||
|
|
||||||
def versionView(self, **kwargs):
|
def versionView(self, **kwargs):
|
||||||
return {
|
return {
|
||||||
@@ -181,8 +197,104 @@ class Core(Plugin):
|
|||||||
def signalHandler(self):
|
def signalHandler(self):
|
||||||
if Env.get('daemonized'): return
|
if Env.get('daemonized'): return
|
||||||
|
|
||||||
def signal_handler(signal, frame):
|
def signal_handler(*args, **kwargs):
|
||||||
fireEvent('app.shutdown', single = True)
|
fireEvent('app.shutdown', single = True)
|
||||||
|
|
||||||
signal.signal(signal.SIGINT, signal_handler)
|
signal.signal(signal.SIGINT, signal_handler)
|
||||||
signal.signal(signal.SIGTERM, signal_handler)
|
signal.signal(signal.SIGTERM, signal_handler)
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'core',
|
||||||
|
'order': 1,
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'general',
|
||||||
|
'name': 'basics',
|
||||||
|
'description': 'Needs restart before changes take effect.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'port',
|
||||||
|
'default': 5050,
|
||||||
|
'type': 'int',
|
||||||
|
'description': 'The port I should listen to.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ssl_cert',
|
||||||
|
'description': 'Path to SSL server.crt',
|
||||||
|
'advanced': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ssl_key',
|
||||||
|
'description': 'Path to SSL server.key',
|
||||||
|
'advanced': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'launch_browser',
|
||||||
|
'default': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Launch the browser when I start.',
|
||||||
|
'wizard': True,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'tab': 'general',
|
||||||
|
'name': 'advanced',
|
||||||
|
'description': "For those who know what they're doing",
|
||||||
|
'advanced': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'api_key',
|
||||||
|
'default': uuid4().hex,
|
||||||
|
'readonly': 1,
|
||||||
|
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'debug',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Enable debugging.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'development',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'data_dir',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'url_base',
|
||||||
|
'default': '',
|
||||||
|
'description': 'When using mod_proxy use this to append the url with this.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'permission_folder',
|
||||||
|
'default': '0755',
|
||||||
|
'label': 'Folder CHMOD',
|
||||||
|
'description': 'Can be either decimal (493) or octal (leading zero: 0755). <a target="_blank" href="http://permissions-calculator.org/">Calculate the correct value</a>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'permission_file',
|
||||||
|
'default': '0644',
|
||||||
|
'label': 'File CHMOD',
|
||||||
|
'description': 'See Folder CHMOD description, but for files',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
from .main import Core
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Core()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'core',
|
|
||||||
'order': 1,
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'general',
|
|
||||||
'name': 'basics',
|
|
||||||
'description': 'Needs restart before changes take effect.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
'default': '',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'default': '',
|
|
||||||
'type': 'password',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'port',
|
|
||||||
'default': 5050,
|
|
||||||
'type': 'int',
|
|
||||||
'description': 'The port I should listen to.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'ssl_cert',
|
|
||||||
'description': 'Path to SSL server.crt',
|
|
||||||
'advanced': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'ssl_key',
|
|
||||||
'description': 'Path to SSL server.key',
|
|
||||||
'advanced': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'launch_browser',
|
|
||||||
'default': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Launch the browser when I start.',
|
|
||||||
'wizard': True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'tab': 'general',
|
|
||||||
'name': 'advanced',
|
|
||||||
'description': "For those who know what they're doing",
|
|
||||||
'advanced': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'api_key',
|
|
||||||
'default': uuid4().hex,
|
|
||||||
'readonly': 1,
|
|
||||||
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'debug',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Enable debugging.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'development',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'data_dir',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'url_base',
|
|
||||||
'default': '',
|
|
||||||
'description': 'When using mod_proxy use this to append the url with this.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'permission_folder',
|
|
||||||
'default': '0755',
|
|
||||||
'label': 'Folder CHMOD',
|
|
||||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'permission_file',
|
|
||||||
'default': '0755',
|
|
||||||
'label': 'File CHMOD',
|
|
||||||
'description': 'Same as Folder CHMOD but for files',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
from couchpotato.core.event import addEvent
|
from couchpotato.core.event import addEvent
|
||||||
from couchpotato.core.helpers.encoding import ss
|
from couchpotato.core.helpers.encoding import ss
|
||||||
from couchpotato.core.helpers.variable import tryInt
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
@@ -7,12 +11,12 @@ from couchpotato.environment import Env
|
|||||||
from minify.cssmin import cssmin
|
from minify.cssmin import cssmin
|
||||||
from minify.jsmin import jsmin
|
from minify.jsmin import jsmin
|
||||||
from tornado.web import StaticFileHandler
|
from tornado.web import StaticFileHandler
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'ClientScript'
|
||||||
|
|
||||||
|
|
||||||
class ClientScript(Plugin):
|
class ClientScript(Plugin):
|
||||||
|
|
||||||
@@ -45,20 +49,17 @@ class ClientScript(Plugin):
|
|||||||
'scripts/block/footer.js',
|
'scripts/block/footer.js',
|
||||||
'scripts/block/menu.js',
|
'scripts/block/menu.js',
|
||||||
'scripts/page/home.js',
|
'scripts/page/home.js',
|
||||||
'scripts/page/wanted.js',
|
|
||||||
'scripts/page/settings.js',
|
'scripts/page/settings.js',
|
||||||
'scripts/page/about.js',
|
'scripts/page/about.js',
|
||||||
'scripts/page/manage.js',
|
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
urls = {'style': {}, 'script': {}}
|
||||||
urls = {'style': {}, 'script': {}, }
|
minified = {'style': {}, 'script': {}}
|
||||||
minified = {'style': {}, 'script': {}, }
|
paths = {'style': {}, 'script': {}}
|
||||||
paths = {'style': {}, 'script': {}, }
|
|
||||||
comment = {
|
comment = {
|
||||||
'style': '/*** %s:%d ***/\n',
|
'style': '/*** %s:%d ***/\n',
|
||||||
'script': '// %s:%d\n'
|
'script': '// %s:%d\n'
|
||||||
}
|
}
|
||||||
|
|
||||||
html = {
|
html = {
|
||||||
@@ -90,7 +91,6 @@ class ClientScript(Plugin):
|
|||||||
else:
|
else:
|
||||||
self.registerStyle(core_url, file_path, position = 'front')
|
self.registerStyle(core_url, file_path, position = 'front')
|
||||||
|
|
||||||
|
|
||||||
def minify(self):
|
def minify(self):
|
||||||
|
|
||||||
# Create cache dir
|
# Create cache dir
|
||||||
@@ -124,7 +124,7 @@ class ClientScript(Plugin):
|
|||||||
data = cssmin(data)
|
data = cssmin(data)
|
||||||
data = data.replace('../images/', '../static/images/')
|
data = data.replace('../images/', '../static/images/')
|
||||||
data = data.replace('../fonts/', '../static/fonts/')
|
data = data.replace('../fonts/', '../static/fonts/')
|
||||||
data = data.replace('../../static/', '../static/') # Replace inside plugins
|
data = data.replace('../../static/', '../static/') # Replace inside plugins
|
||||||
|
|
||||||
raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data})
|
raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data})
|
||||||
|
|
||||||
@@ -187,6 +187,7 @@ class ClientScript(Plugin):
|
|||||||
|
|
||||||
prefix_properties = ['border-radius', 'transform', 'transition', 'box-shadow']
|
prefix_properties = ['border-radius', 'transform', 'transition', 'box-shadow']
|
||||||
prefix_tags = ['ms', 'moz', 'webkit']
|
prefix_tags = ['ms', 'moz', 'webkit']
|
||||||
|
|
||||||
def prefix(self, data):
|
def prefix(self, data):
|
||||||
|
|
||||||
trimmed_data = re.sub('(\t|\n|\r)+', '', data)
|
trimmed_data = re.sub('(\t|\n|\r)+', '', data)
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from .main import ClientScript
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return ClientScript()
|
|
||||||
|
|
||||||
config = []
|
|
||||||
@@ -5,6 +5,9 @@ from couchpotato.environment import Env
|
|||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Desktop'
|
||||||
|
|
||||||
|
|
||||||
if Env.get('desktop'):
|
if Env.get('desktop'):
|
||||||
|
|
||||||
class Desktop(Plugin):
|
class Desktop(Plugin):
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from .main import Desktop
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Desktop()
|
|
||||||
|
|
||||||
config = []
|
|
||||||
20
couchpotato/core/_base/downloader/__init__.py
Normal file
20
couchpotato/core/_base/downloader/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from .main import Downloader
|
||||||
|
|
||||||
|
|
||||||
|
def autoload():
|
||||||
|
return Downloader()
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'download_providers',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'label': 'Downloaders',
|
||||||
|
'description': 'You can select different downloaders for each type (usenet / torrent)',
|
||||||
|
'type': 'list',
|
||||||
|
'name': 'download_providers',
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'options': [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,37 +1,51 @@
|
|||||||
from base64 import b32decode, b16encode
|
from base64 import b32decode, b16encode
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
|
||||||
|
from couchpotato.api import addApiView
|
||||||
from couchpotato.core.event import addEvent
|
from couchpotato.core.event import addEvent
|
||||||
from couchpotato.core.helpers.variable import mergeDicts
|
from couchpotato.core.helpers.variable import mergeDicts
|
||||||
from couchpotato.core.logger import CPLog
|
from couchpotato.core.logger import CPLog
|
||||||
from couchpotato.core.providers.base import Provider
|
from couchpotato.core.media._base.providers.base import Provider
|
||||||
import random
|
from couchpotato.core.plugins.base import Plugin
|
||||||
import re
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Downloader(Provider):
|
## This is here to load the static files
|
||||||
|
class Downloader(Plugin):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloaderBase(Provider):
|
||||||
|
|
||||||
protocol = []
|
protocol = []
|
||||||
http_time_between_calls = 0
|
http_time_between_calls = 0
|
||||||
status_support = True
|
status_support = True
|
||||||
|
|
||||||
torrent_sources = [
|
torrent_sources = [
|
||||||
|
'https://zoink.it/torrent/%s.torrent',
|
||||||
'http://torrage.com/torrent/%s.torrent',
|
'http://torrage.com/torrent/%s.torrent',
|
||||||
'https://torcache.net/torrent/%s.torrent',
|
'https://torcache.net/torrent/%s.torrent',
|
||||||
]
|
]
|
||||||
|
|
||||||
torrent_trackers = [
|
torrent_trackers = [
|
||||||
'http://tracker.publicbt.com/announce',
|
|
||||||
'udp://tracker.istole.it:80/announce',
|
'udp://tracker.istole.it:80/announce',
|
||||||
'udp://fr33domtracker.h33t.com:3310/announce',
|
|
||||||
'http://tracker.istole.it/announce',
|
'http://tracker.istole.it/announce',
|
||||||
'http://tracker.ccc.de/announce',
|
'udp://fr33domtracker.h33t.com:3310/announce',
|
||||||
|
'http://tracker.publicbt.com/announce',
|
||||||
'udp://tracker.publicbt.com:80/announce',
|
'udp://tracker.publicbt.com:80/announce',
|
||||||
|
'http://tracker.ccc.de/announce',
|
||||||
'udp://tracker.ccc.de:80/announce',
|
'udp://tracker.ccc.de:80/announce',
|
||||||
'http://exodus.desync.com/announce',
|
'http://exodus.desync.com/announce',
|
||||||
'http://exodus.desync.com:6969/announce',
|
'http://exodus.desync.com:6969/announce',
|
||||||
'http://tracker.publichd.eu/announce',
|
'http://tracker.publichd.eu/announce',
|
||||||
|
'udp://tracker.publichd.eu:80/announce',
|
||||||
'http://tracker.openbittorrent.com/announce',
|
'http://tracker.openbittorrent.com/announce',
|
||||||
|
'udp://tracker.openbittorrent.com/announce',
|
||||||
|
'udp://tracker.openbittorrent.com:80/announce',
|
||||||
|
'udp://open.demonii.com:1337/announce',
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -42,6 +56,7 @@ class Downloader(Provider):
|
|||||||
addEvent('download.remove_failed', self._removeFailed)
|
addEvent('download.remove_failed', self._removeFailed)
|
||||||
addEvent('download.pause', self._pause)
|
addEvent('download.pause', self._pause)
|
||||||
addEvent('download.process_complete', self._processComplete)
|
addEvent('download.process_complete', self._processComplete)
|
||||||
|
addApiView('download.%s.test' % self.getName().lower(), self._test)
|
||||||
|
|
||||||
def getEnabledProtocol(self):
|
def getEnabledProtocol(self):
|
||||||
for download_protocol in self.protocol:
|
for download_protocol in self.protocol:
|
||||||
@@ -58,6 +73,9 @@ class Downloader(Provider):
|
|||||||
return
|
return
|
||||||
return self.download(data = data, media = media, filedata = filedata)
|
return self.download(data = data, media = media, filedata = filedata)
|
||||||
|
|
||||||
|
def download(self, *args, **kwargs):
|
||||||
|
return False
|
||||||
|
|
||||||
def _getAllDownloadStatus(self, download_ids):
|
def _getAllDownloadStatus(self, download_ids):
|
||||||
if self.isDisabled(manual = True, data = {}):
|
if self.isDisabled(manual = True, data = {}):
|
||||||
return
|
return
|
||||||
@@ -154,10 +172,19 @@ class Downloader(Provider):
|
|||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
d_manual = self.conf('manual', default = False)
|
d_manual = self.conf('manual', default = False)
|
||||||
return super(Downloader, self).isEnabled() and \
|
return super(DownloaderBase, self).isEnabled() and \
|
||||||
(d_manual and manual or d_manual is False) and \
|
(d_manual and manual or d_manual is False) and \
|
||||||
(not data or self.isCorrectProtocol(data.get('protocol')))
|
(not data or self.isCorrectProtocol(data.get('protocol')))
|
||||||
|
|
||||||
|
def _test(self, **kwargs):
|
||||||
|
t = self.test()
|
||||||
|
if isinstance(t, tuple):
|
||||||
|
return {'success': t[0], 'msg': t[1]}
|
||||||
|
return {'success': t}
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
return False
|
||||||
|
|
||||||
def _pause(self, release_download, pause = True):
|
def _pause(self, release_download, pause = True):
|
||||||
if self.isDisabled(manual = True, data = {}):
|
if self.isDisabled(manual = True, data = {}):
|
||||||
return
|
return
|
||||||
@@ -171,6 +198,7 @@ class Downloader(Provider):
|
|||||||
def pause(self, release_download, pause):
|
def pause(self, release_download, pause):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
class ReleaseDownloadList(list):
|
class ReleaseDownloadList(list):
|
||||||
|
|
||||||
provider = None
|
provider = None
|
||||||
@@ -197,7 +225,7 @@ class ReleaseDownloadList(list):
|
|||||||
'status': 'busy',
|
'status': 'busy',
|
||||||
'downloader': self.provider.getName(),
|
'downloader': self.provider.getName(),
|
||||||
'folder': '',
|
'folder': '',
|
||||||
'files': '',
|
'files': [],
|
||||||
}
|
}
|
||||||
|
|
||||||
return mergeDicts(defaults, result)
|
return mergeDicts(defaults, result)
|
||||||
76
couchpotato/core/_base/downloader/static/downloaders.js
Normal file
76
couchpotato/core/_base/downloader/static/downloaders.js
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
var DownloadersBase = new Class({
|
||||||
|
|
||||||
|
Implements: [Events],
|
||||||
|
|
||||||
|
initialize: function(){
|
||||||
|
var self = this;
|
||||||
|
|
||||||
|
// Add test buttons to settings page
|
||||||
|
App.addEvent('loadSettings', self.addTestButtons.bind(self));
|
||||||
|
|
||||||
|
},
|
||||||
|
|
||||||
|
// Downloaders setting tests
|
||||||
|
addTestButtons: function(){
|
||||||
|
var self = this;
|
||||||
|
|
||||||
|
var setting_page = App.getPage('Settings');
|
||||||
|
setting_page.addEvent('create', function(){
|
||||||
|
Object.each(setting_page.tabs.downloaders.groups, self.addTestButton.bind(self))
|
||||||
|
})
|
||||||
|
|
||||||
|
},
|
||||||
|
|
||||||
|
addTestButton: function(fieldset, plugin_name){
|
||||||
|
var self = this,
|
||||||
|
button_name = self.testButtonName(fieldset);
|
||||||
|
|
||||||
|
if(button_name.contains('Downloaders')) return;
|
||||||
|
|
||||||
|
new Element('.ctrlHolder.test_button').adopt(
|
||||||
|
new Element('a.button', {
|
||||||
|
'text': button_name,
|
||||||
|
'events': {
|
||||||
|
'click': function(){
|
||||||
|
var button = fieldset.getElement('.test_button .button');
|
||||||
|
button.set('text', 'Connecting...');
|
||||||
|
|
||||||
|
Api.request('download.'+plugin_name+'.test', {
|
||||||
|
'onComplete': function(json){
|
||||||
|
|
||||||
|
button.set('text', button_name);
|
||||||
|
|
||||||
|
var message;
|
||||||
|
if(json.success){
|
||||||
|
message = new Element('span.success', {
|
||||||
|
'text': 'Connection successful'
|
||||||
|
}).inject(button, 'after')
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var msg_text = 'Connection failed. Check logs for details.';
|
||||||
|
if(json.hasOwnProperty('msg')) msg_text = json.msg;
|
||||||
|
message = new Element('span.failed', {
|
||||||
|
'text': msg_text
|
||||||
|
}).inject(button, 'after')
|
||||||
|
}
|
||||||
|
|
||||||
|
(function(){
|
||||||
|
message.destroy();
|
||||||
|
}).delay(3000)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
).inject(fieldset);
|
||||||
|
|
||||||
|
},
|
||||||
|
|
||||||
|
testButtonName: function(fieldset){
|
||||||
|
var name = String(fieldset.getElement('h2').innerHTML).substring(0,String(fieldset.getElement('h2').innerHTML).indexOf("<span"));
|
||||||
|
return 'Test '+name;
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
var Downloaders = new DownloadersBase();
|
||||||
@@ -5,6 +5,8 @@ from couchpotato.core.plugins.base import Plugin
|
|||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Scheduler'
|
||||||
|
|
||||||
|
|
||||||
class Scheduler(Plugin):
|
class Scheduler(Plugin):
|
||||||
|
|
||||||
@@ -17,6 +19,7 @@ class Scheduler(Plugin):
|
|||||||
addEvent('schedule.cron', self.cron)
|
addEvent('schedule.cron', self.cron)
|
||||||
addEvent('schedule.interval', self.interval)
|
addEvent('schedule.interval', self.interval)
|
||||||
addEvent('schedule.remove', self.remove)
|
addEvent('schedule.remove', self.remove)
|
||||||
|
addEvent('schedule.queue', self.queue)
|
||||||
|
|
||||||
self.sched = Sched(misfire_grace_time = 60)
|
self.sched = Sched(misfire_grace_time = 60)
|
||||||
self.sched.start()
|
self.sched.start()
|
||||||
@@ -30,9 +33,9 @@ class Scheduler(Plugin):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def doShutdown(self):
|
def doShutdown(self, *args, **kwargs):
|
||||||
self.stop()
|
self.stop()
|
||||||
return super(Scheduler, self).doShutdown()
|
return super(Scheduler, self).doShutdown(*args, **kwargs)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
if self.started:
|
if self.started:
|
||||||
@@ -64,3 +67,16 @@ class Scheduler(Plugin):
|
|||||||
'seconds': seconds,
|
'seconds': seconds,
|
||||||
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
|
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def queue(self, handlers = None):
|
||||||
|
if not handlers: handlers = []
|
||||||
|
|
||||||
|
for h in handlers:
|
||||||
|
h()
|
||||||
|
|
||||||
|
if self.shuttingDown():
|
||||||
|
break
|
||||||
|
|
||||||
|
return True
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from .main import Scheduler
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Scheduler()
|
|
||||||
|
|
||||||
config = []
|
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
from .main import Updater
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
def start():
|
from .main import Updater
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
|
||||||
|
|
||||||
|
def autoload():
|
||||||
return Updater()
|
return Updater()
|
||||||
|
|
||||||
config = [{
|
config = [{
|
||||||
|
|||||||
@@ -1,20 +1,25 @@
|
|||||||
from couchpotato.api import addApiView
|
|
||||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
|
||||||
from couchpotato.core.helpers.encoding import ss
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.core.plugins.base import Plugin
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
from datetime import datetime
|
|
||||||
from dateutil.parser import parse
|
|
||||||
from git.repository import LocalRepository
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import version
|
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from datetime import datetime
|
||||||
|
from threading import RLock
|
||||||
|
|
||||||
|
from couchpotato.api import addApiView
|
||||||
|
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||||
|
from couchpotato.core.helpers.encoding import sp
|
||||||
|
from couchpotato.core.helpers.variable import removePyc
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.plugins.base import Plugin
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
from dateutil.parser import parse
|
||||||
|
from git.repository import LocalRepository
|
||||||
|
import version
|
||||||
|
from six.moves import filter
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
@@ -22,6 +27,7 @@ log = CPLog(__name__)
|
|||||||
class Updater(Plugin):
|
class Updater(Plugin):
|
||||||
|
|
||||||
available_notified = False
|
available_notified = False
|
||||||
|
_lock = RLock()
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
@@ -32,6 +38,7 @@ class Updater(Plugin):
|
|||||||
else:
|
else:
|
||||||
self.updater = SourceUpdater()
|
self.updater = SourceUpdater()
|
||||||
|
|
||||||
|
addEvent('app.load', self.logVersion, priority = 10000)
|
||||||
addEvent('app.load', self.setCrons)
|
addEvent('app.load', self.setCrons)
|
||||||
addEvent('updater.info', self.info)
|
addEvent('updater.info', self.info)
|
||||||
|
|
||||||
@@ -53,12 +60,16 @@ class Updater(Plugin):
|
|||||||
|
|
||||||
addEvent('setting.save.updater.enabled.after', self.setCrons)
|
addEvent('setting.save.updater.enabled.after', self.setCrons)
|
||||||
|
|
||||||
|
def logVersion(self):
|
||||||
|
info = self.info()
|
||||||
|
log.info('=== VERSION %s, using %s ===', (info.get('version', {}).get('repr', 'UNKNOWN'), self.updater.getName()))
|
||||||
|
|
||||||
def setCrons(self):
|
def setCrons(self):
|
||||||
|
|
||||||
fireEvent('schedule.remove', 'updater.check', single = True)
|
fireEvent('schedule.remove', 'updater.check', single = True)
|
||||||
if self.isEnabled():
|
if self.isEnabled():
|
||||||
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
|
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
|
||||||
self.autoUpdate() # Check after enabling
|
self.autoUpdate() # Check after enabling
|
||||||
|
|
||||||
def autoUpdate(self):
|
def autoUpdate(self):
|
||||||
if self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed:
|
if self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed:
|
||||||
@@ -94,7 +105,17 @@ class Updater(Plugin):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def info(self, **kwargs):
|
def info(self, **kwargs):
|
||||||
return self.updater.info()
|
self._lock.acquire()
|
||||||
|
|
||||||
|
info = {}
|
||||||
|
try:
|
||||||
|
info = self.updater.info()
|
||||||
|
except:
|
||||||
|
log.error('Failed getting updater info: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
def checkView(self, **kwargs):
|
def checkView(self, **kwargs):
|
||||||
return {
|
return {
|
||||||
@@ -121,6 +142,12 @@ class Updater(Plugin):
|
|||||||
'success': success
|
'success': success
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def doShutdown(self, *args, **kwargs):
|
||||||
|
if not Env.get('dev') and not Env.get('desktop'):
|
||||||
|
removePyc(Env.get('app_dir'), show_logs = False)
|
||||||
|
|
||||||
|
return super(Updater, self).doShutdown(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class BaseUpdater(Plugin):
|
class BaseUpdater(Plugin):
|
||||||
|
|
||||||
@@ -138,42 +165,23 @@ class BaseUpdater(Plugin):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def info(self):
|
def info(self):
|
||||||
|
|
||||||
|
current_version = self.getVersion()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'last_check': self.last_check,
|
'last_check': self.last_check,
|
||||||
'update_version': self.update_version,
|
'update_version': self.update_version,
|
||||||
'version': self.getVersion(),
|
'version': current_version,
|
||||||
'repo_name': '%s/%s' % (self.repo_user, self.repo_name),
|
'repo_name': '%s/%s' % (self.repo_user, self.repo_name),
|
||||||
'branch': self.branch,
|
'branch': current_version.get('branch', self.branch),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def getVersion(self):
|
||||||
|
pass
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def deletePyc(self, only_excess = True):
|
|
||||||
|
|
||||||
for root, dirs, files in os.walk(ss(Env.get('app_dir'))):
|
|
||||||
|
|
||||||
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
|
|
||||||
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
|
|
||||||
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
|
|
||||||
|
|
||||||
for excess_pyc_file in excess_pyc_files:
|
|
||||||
full_path = os.path.join(root, excess_pyc_file)
|
|
||||||
log.debug('Removing old PYC file: %s', full_path)
|
|
||||||
try:
|
|
||||||
os.remove(full_path)
|
|
||||||
except:
|
|
||||||
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
|
|
||||||
|
|
||||||
for dir_name in dirs:
|
|
||||||
full_path = os.path.join(root, dir_name)
|
|
||||||
if len(os.listdir(full_path)) == 0:
|
|
||||||
try:
|
|
||||||
os.rmdir(full_path)
|
|
||||||
except:
|
|
||||||
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class GitUpdater(BaseUpdater):
|
class GitUpdater(BaseUpdater):
|
||||||
|
|
||||||
@@ -186,9 +194,6 @@ class GitUpdater(BaseUpdater):
|
|||||||
log.info('Updating to latest version')
|
log.info('Updating to latest version')
|
||||||
self.repo.pull()
|
self.repo.pull()
|
||||||
|
|
||||||
# Delete leftover .pyc files
|
|
||||||
self.deletePyc()
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
log.error('Failed updating via GIT: %s', traceback.format_exc())
|
log.error('Failed updating via GIT: %s', traceback.format_exc())
|
||||||
@@ -200,17 +205,28 @@ class GitUpdater(BaseUpdater):
|
|||||||
def getVersion(self):
|
def getVersion(self):
|
||||||
|
|
||||||
if not self.version:
|
if not self.version:
|
||||||
|
|
||||||
|
hash = None
|
||||||
|
date = None
|
||||||
|
branch = self.branch
|
||||||
|
|
||||||
try:
|
try:
|
||||||
output = self.repo.getHead() # Yes, please
|
output = self.repo.getHead() # Yes, please
|
||||||
log.debug('Git version output: %s', output.hash)
|
log.debug('Git version output: %s', output.hash)
|
||||||
self.version = {
|
|
||||||
'hash': output.hash[:8],
|
hash = output.hash[:8]
|
||||||
'date': output.getDate(),
|
date = output.getDate()
|
||||||
'type': 'git',
|
branch = self.repo.getCurrentBranch().name
|
||||||
}
|
except Exception as e:
|
||||||
except Exception, e:
|
|
||||||
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
|
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
|
||||||
return 'No GIT'
|
|
||||||
|
self.version = {
|
||||||
|
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, branch, hash or 'unknown_hash', datetime.fromtimestamp(date) if date else 'unknown_date'),
|
||||||
|
'hash': hash,
|
||||||
|
'date': date,
|
||||||
|
'type': 'git',
|
||||||
|
'branch': branch
|
||||||
|
}
|
||||||
|
|
||||||
return self.version
|
return self.version
|
||||||
|
|
||||||
@@ -231,7 +247,7 @@ class GitUpdater(BaseUpdater):
|
|||||||
local = self.repo.getHead()
|
local = self.repo.getHead()
|
||||||
remote = branch.getHead()
|
remote = branch.getHead()
|
||||||
|
|
||||||
log.info('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
|
log.debug('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
|
||||||
|
|
||||||
if local.getDate() < remote.getDate():
|
if local.getDate() < remote.getDate():
|
||||||
self.update_version = {
|
self.update_version = {
|
||||||
@@ -244,7 +260,6 @@ class GitUpdater(BaseUpdater):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class SourceUpdater(BaseUpdater):
|
class SourceUpdater(BaseUpdater):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -270,9 +285,9 @@ class SourceUpdater(BaseUpdater):
|
|||||||
|
|
||||||
# Extract
|
# Extract
|
||||||
if download_data.get('type') == 'zip':
|
if download_data.get('type') == 'zip':
|
||||||
zip = zipfile.ZipFile(destination)
|
zip_file = zipfile.ZipFile(destination)
|
||||||
zip.extractall(extracted_path)
|
zip_file.extractall(extracted_path)
|
||||||
zip.close()
|
zip_file.close()
|
||||||
else:
|
else:
|
||||||
tar = tarfile.open(destination)
|
tar = tarfile.open(destination)
|
||||||
tar.extractall(path = extracted_path)
|
tar.extractall(path = extracted_path)
|
||||||
@@ -294,11 +309,12 @@ class SourceUpdater(BaseUpdater):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def replaceWith(self, path):
|
def replaceWith(self, path):
|
||||||
app_dir = ss(Env.get('app_dir'))
|
path = sp(path)
|
||||||
data_dir = ss(Env.get('data_dir'))
|
app_dir = Env.get('app_dir')
|
||||||
|
data_dir = Env.get('data_dir')
|
||||||
|
|
||||||
# Get list of files we want to overwrite
|
# Get list of files we want to overwrite
|
||||||
self.deletePyc()
|
removePyc(app_dir)
|
||||||
existing_files = []
|
existing_files = []
|
||||||
for root, subfiles, filenames in os.walk(app_dir):
|
for root, subfiles, filenames in os.walk(app_dir):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
@@ -339,13 +355,12 @@ class SourceUpdater(BaseUpdater):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def removeDir(self, path):
|
def removeDir(self, path):
|
||||||
try:
|
try:
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
shutil.rmtree(path)
|
shutil.rmtree(path)
|
||||||
except OSError, inst:
|
except OSError as inst:
|
||||||
os.chmod(inst.filename, 0777)
|
os.chmod(inst.filename, 0o777)
|
||||||
self.removeDir(path)
|
self.removeDir(path)
|
||||||
|
|
||||||
def getVersion(self):
|
def getVersion(self):
|
||||||
@@ -359,7 +374,8 @@ class SourceUpdater(BaseUpdater):
|
|||||||
log.debug('Source version output: %s', output)
|
log.debug('Source version output: %s', output)
|
||||||
self.version = output
|
self.version = output
|
||||||
self.version['type'] = 'source'
|
self.version['type'] = 'source'
|
||||||
except Exception, e:
|
self.version['repr'] = 'source:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.branch, output.get('hash', '')[:8], datetime.fromtimestamp(output.get('date', 0)))
|
||||||
|
except Exception as e:
|
||||||
log.error('Failed using source updater. %s', e)
|
log.error('Failed using source updater. %s', e)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@@ -389,7 +405,7 @@ class SourceUpdater(BaseUpdater):
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
'hash': commit['sha'],
|
'hash': commit['sha'],
|
||||||
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
|
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
|
||||||
}
|
}
|
||||||
except:
|
except:
|
||||||
log.error('Failed getting latest request from github: %s', traceback.format_exc())
|
log.error('Failed getting latest request from github: %s', traceback.format_exc())
|
||||||
@@ -434,7 +450,7 @@ class DesktopUpdater(BaseUpdater):
|
|||||||
if latest and latest != current_version.get('hash'):
|
if latest and latest != current_version.get('hash'):
|
||||||
self.update_version = {
|
self.update_version = {
|
||||||
'hash': latest,
|
'hash': latest,
|
||||||
'date': None,
|
'date': None,
|
||||||
'changelog': self.desktop._changelogURL,
|
'changelog': self.desktop._changelogURL,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -446,6 +462,7 @@ class DesktopUpdater(BaseUpdater):
|
|||||||
|
|
||||||
def getVersion(self):
|
def getVersion(self):
|
||||||
return {
|
return {
|
||||||
|
'repr': 'desktop: %s' % self.desktop._esky.active_version,
|
||||||
'hash': self.desktop._esky.active_version,
|
'hash': self.desktop._esky.active_version,
|
||||||
'date': None,
|
'date': None,
|
||||||
'type': 'desktop',
|
'type': 'desktop',
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ var UpdaterBase = new Class({
|
|||||||
initialize: function(){
|
initialize: function(){
|
||||||
var self = this;
|
var self = this;
|
||||||
|
|
||||||
App.addEvent('load', self.info.bind(self, 2000))
|
App.addEvent('load', self.info.bind(self, 2000));
|
||||||
App.addEvent('unload', function(){
|
App.addEvent('unload', function(){
|
||||||
if(self.timer)
|
if(self.timer)
|
||||||
clearTimeout(self.timer);
|
clearTimeout(self.timer);
|
||||||
@@ -24,7 +24,7 @@ var UpdaterBase = new Class({
|
|||||||
self.doUpdate();
|
self.doUpdate();
|
||||||
else {
|
else {
|
||||||
App.unBlockPage();
|
App.unBlockPage();
|
||||||
App.on('message', 'No updates available');
|
App.trigger('message', ['No updates available']);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -66,7 +66,7 @@ var UpdaterBase = new Class({
|
|||||||
|
|
||||||
var changelog = 'https://github.com/'+data.repo_name+'/compare/'+data.version.hash+'...'+data.branch;
|
var changelog = 'https://github.com/'+data.repo_name+'/compare/'+data.version.hash+'...'+data.branch;
|
||||||
if(data.update_version.changelog)
|
if(data.update_version.changelog)
|
||||||
changelog = data.update_version.changelog + '#' + data.version.hash+'...'+data.update_version.hash
|
changelog = data.update_version.changelog + '#' + data.version.hash+'...'+data.update_version.hash;
|
||||||
|
|
||||||
self.message = new Element('div.message.update').adopt(
|
self.message = new Element('div.message.update').adopt(
|
||||||
new Element('span', {
|
new Element('span', {
|
||||||
|
|||||||
639
couchpotato/core/database.py
Normal file
639
couchpotato/core/database.py
Normal file
@@ -0,0 +1,639 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
from sqlite3 import OperationalError
|
||||||
|
|
||||||
|
from CodernityDB.database import RecordNotFound
|
||||||
|
from CodernityDB.index import IndexException, IndexNotFoundException, IndexConflict
|
||||||
|
from couchpotato import CPLog
|
||||||
|
from couchpotato.api import addApiView
|
||||||
|
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode, sp
|
||||||
|
from couchpotato.core.helpers.variable import getImdb, tryInt, randomString
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Database(object):
|
||||||
|
|
||||||
|
indexes = None
|
||||||
|
db = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
|
||||||
|
self.indexes = {}
|
||||||
|
|
||||||
|
addApiView('database.list_documents', self.listDocuments)
|
||||||
|
addApiView('database.reindex', self.reindex)
|
||||||
|
addApiView('database.compact', self.compact)
|
||||||
|
addApiView('database.document.update', self.updateDocument)
|
||||||
|
addApiView('database.document.delete', self.deleteDocument)
|
||||||
|
|
||||||
|
addEvent('database.setup.after', self.startup_compact)
|
||||||
|
addEvent('database.setup_index', self.setupIndex)
|
||||||
|
addEvent('database.delete_corrupted', self.deleteCorrupted)
|
||||||
|
|
||||||
|
addEvent('app.migrate', self.migrate)
|
||||||
|
addEvent('app.after_shutdown', self.close)
|
||||||
|
|
||||||
|
def getDB(self):
|
||||||
|
|
||||||
|
if not self.db:
|
||||||
|
from couchpotato import get_db
|
||||||
|
self.db = get_db()
|
||||||
|
|
||||||
|
return self.db
|
||||||
|
|
||||||
|
def close(self, **kwargs):
|
||||||
|
self.getDB().close()
|
||||||
|
|
||||||
|
def setupIndex(self, index_name, klass):
|
||||||
|
|
||||||
|
self.indexes[index_name] = klass
|
||||||
|
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
# Category index
|
||||||
|
index_instance = klass(db.path, index_name)
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Make sure store and bucket don't exist
|
||||||
|
exists = []
|
||||||
|
for x in ['buck', 'stor']:
|
||||||
|
full_path = os.path.join(db.path, '%s_%s' % (index_name, x))
|
||||||
|
if os.path.exists(full_path):
|
||||||
|
exists.append(full_path)
|
||||||
|
|
||||||
|
if index_name not in db.indexes_names:
|
||||||
|
|
||||||
|
# Remove existing buckets if index isn't there
|
||||||
|
for x in exists:
|
||||||
|
os.unlink(x)
|
||||||
|
|
||||||
|
# Add index (will restore buckets)
|
||||||
|
db.add_index(index_instance)
|
||||||
|
db.reindex_index(index_name)
|
||||||
|
else:
|
||||||
|
# Previous info
|
||||||
|
previous = db.indexes_names[index_name]
|
||||||
|
previous_version = previous._version
|
||||||
|
current_version = klass._version
|
||||||
|
|
||||||
|
# Only edit index if versions are different
|
||||||
|
if previous_version < current_version:
|
||||||
|
log.debug('Index "%s" already exists, updating and reindexing', index_name)
|
||||||
|
db.destroy_index(previous)
|
||||||
|
db.add_index(index_instance)
|
||||||
|
db.reindex_index(index_name)
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed adding index %s: %s', (index_name, traceback.format_exc()))
|
||||||
|
|
||||||
|
def deleteDocument(self, **kwargs):
|
||||||
|
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
document_id = kwargs.get('_request').get_argument('id')
|
||||||
|
document = db.get('id', document_id)
|
||||||
|
db.delete(document)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': True
|
||||||
|
}
|
||||||
|
except:
|
||||||
|
return {
|
||||||
|
'success': False,
|
||||||
|
'error': traceback.format_exc()
|
||||||
|
}
|
||||||
|
|
||||||
|
def updateDocument(self, **kwargs):
|
||||||
|
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
document = json.loads(kwargs.get('_request').get_argument('document'))
|
||||||
|
d = db.update(document)
|
||||||
|
document.update(d)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': True,
|
||||||
|
'document': document
|
||||||
|
}
|
||||||
|
except:
|
||||||
|
return {
|
||||||
|
'success': False,
|
||||||
|
'error': traceback.format_exc()
|
||||||
|
}
|
||||||
|
|
||||||
|
def listDocuments(self, **kwargs):
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
results = {
|
||||||
|
'unknown': []
|
||||||
|
}
|
||||||
|
|
||||||
|
for document in db.all('id'):
|
||||||
|
key = document.get('_t', 'unknown')
|
||||||
|
|
||||||
|
if kwargs.get('show') and key != kwargs.get('show'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not results.get(key):
|
||||||
|
results[key] = []
|
||||||
|
results[key].append(document)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def deleteCorrupted(self, _id, traceback_error = ''):
|
||||||
|
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
try:
|
||||||
|
log.debug('Deleted corrupted document "%s": %s', (_id, traceback_error))
|
||||||
|
corrupted = db.get('id', _id, with_storage = False)
|
||||||
|
db._delete_id_index(corrupted.get('_id'), corrupted.get('_rev'), None)
|
||||||
|
except:
|
||||||
|
log.debug('Failed deleting corrupted: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
def reindex(self, **kwargs):
|
||||||
|
|
||||||
|
success = True
|
||||||
|
try:
|
||||||
|
db = self.getDB()
|
||||||
|
db.reindex()
|
||||||
|
except:
|
||||||
|
log.error('Failed index: %s', traceback.format_exc())
|
||||||
|
success = False
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': success
|
||||||
|
}
|
||||||
|
|
||||||
|
def compact(self, try_repair = True, **kwargs):
|
||||||
|
|
||||||
|
success = False
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
# Removing left over compact files
|
||||||
|
db_path = sp(db.path)
|
||||||
|
for f in os.listdir(sp(db.path)):
|
||||||
|
for x in ['_compact_buck', '_compact_stor']:
|
||||||
|
if f[-len(x):] == x:
|
||||||
|
os.unlink(os.path.join(db_path, f))
|
||||||
|
|
||||||
|
try:
|
||||||
|
start = time.time()
|
||||||
|
size = float(db.get_db_details().get('size', 0))
|
||||||
|
log.debug('Compacting database, current size: %sMB', round(size/1048576, 2))
|
||||||
|
|
||||||
|
db.compact()
|
||||||
|
new_size = float(db.get_db_details().get('size', 0))
|
||||||
|
log.debug('Done compacting database in %ss, new size: %sMB, saved: %sMB', (round(time.time()-start, 2), round(new_size/1048576, 2), round((size-new_size)/1048576, 2)))
|
||||||
|
success = True
|
||||||
|
except (IndexException, AttributeError):
|
||||||
|
if try_repair:
|
||||||
|
log.error('Something wrong with indexes, trying repair')
|
||||||
|
|
||||||
|
# Remove all indexes
|
||||||
|
old_indexes = self.indexes.keys()
|
||||||
|
for index_name in old_indexes:
|
||||||
|
try:
|
||||||
|
db.destroy_index(index_name)
|
||||||
|
except IndexNotFoundException:
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
log.error('Failed removing old index %s', index_name)
|
||||||
|
|
||||||
|
# Add them again
|
||||||
|
for index_name in self.indexes:
|
||||||
|
klass = self.indexes[index_name]
|
||||||
|
|
||||||
|
# Category index
|
||||||
|
index_instance = klass(db.path, index_name)
|
||||||
|
try:
|
||||||
|
db.add_index(index_instance)
|
||||||
|
db.reindex_index(index_name)
|
||||||
|
except IndexConflict:
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
log.error('Failed adding index %s', index_name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
self.compact(try_repair = False)
|
||||||
|
else:
|
||||||
|
log.error('Failed compact: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed compact: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': success
|
||||||
|
}
|
||||||
|
|
||||||
|
# Compact on start
|
||||||
|
def startup_compact(self):
|
||||||
|
from couchpotato import Env
|
||||||
|
|
||||||
|
db = self.getDB()
|
||||||
|
|
||||||
|
# Try fix for migration failures on desktop
|
||||||
|
if Env.get('desktop'):
|
||||||
|
try:
|
||||||
|
list(db.all('profile', with_doc = True))
|
||||||
|
except RecordNotFound:
|
||||||
|
|
||||||
|
failed_location = '%s_failed' % db.path
|
||||||
|
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old')
|
||||||
|
|
||||||
|
if not os.path.isdir(failed_location) and os.path.isfile(old_db):
|
||||||
|
log.error('Corrupt database, trying migrate again')
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
# Rename database folder
|
||||||
|
os.rename(db.path, '%s_failed' % db.path)
|
||||||
|
|
||||||
|
# Rename .old database to try another migrate
|
||||||
|
os.rename(old_db, old_db[:-4])
|
||||||
|
|
||||||
|
fireEventAsync('app.restart')
|
||||||
|
else:
|
||||||
|
log.error('Migration failed and couldn\'t recover database. Please report on GitHub, with this message.')
|
||||||
|
db.reindex()
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check size and compact if needed
|
||||||
|
size = db.get_db_details().get('size')
|
||||||
|
prop_name = 'last_db_compact'
|
||||||
|
last_check = int(Env.prop(prop_name, default = 0))
|
||||||
|
|
||||||
|
if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days
|
||||||
|
self.compact()
|
||||||
|
Env.prop(prop_name, value = int(time.time()))
|
||||||
|
|
||||||
|
def migrate(self):
|
||||||
|
|
||||||
|
from couchpotato import Env
|
||||||
|
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db')
|
||||||
|
if not os.path.isfile(old_db): return
|
||||||
|
|
||||||
|
log.info('=' * 30)
|
||||||
|
log.info('Migrating database, hold on..')
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
if os.path.isfile(old_db):
|
||||||
|
|
||||||
|
migrate_start = time.time()
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
conn = sqlite3.connect(old_db)
|
||||||
|
|
||||||
|
migrate_list = {
|
||||||
|
'category': ['id', 'label', 'order', 'required', 'preferred', 'ignored', 'destination'],
|
||||||
|
'profile': ['id', 'label', 'order', 'core', 'hide'],
|
||||||
|
'profiletype': ['id', 'order', 'finish', 'wait_for', 'quality_id', 'profile_id'],
|
||||||
|
'quality': ['id', 'identifier', 'order', 'size_min', 'size_max'],
|
||||||
|
'movie': ['id', 'last_edit', 'library_id', 'status_id', 'profile_id', 'category_id'],
|
||||||
|
'library': ['id', 'identifier', 'info'],
|
||||||
|
'librarytitle': ['id', 'title', 'default', 'libraries_id'],
|
||||||
|
'library_files__file_library': ['library_id', 'file_id'],
|
||||||
|
'release': ['id', 'identifier', 'movie_id', 'status_id', 'quality_id', 'last_edit'],
|
||||||
|
'releaseinfo': ['id', 'identifier', 'value', 'release_id'],
|
||||||
|
'release_files__file_release': ['release_id', 'file_id'],
|
||||||
|
'status': ['id', 'identifier'],
|
||||||
|
'properties': ['id', 'identifier', 'value'],
|
||||||
|
'file': ['id', 'path', 'type_id'],
|
||||||
|
'filetype': ['identifier', 'id']
|
||||||
|
}
|
||||||
|
|
||||||
|
migrate_data = {}
|
||||||
|
rename_old = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
c = conn.cursor()
|
||||||
|
|
||||||
|
for ml in migrate_list:
|
||||||
|
migrate_data[ml] = {}
|
||||||
|
rows = migrate_list[ml]
|
||||||
|
|
||||||
|
try:
|
||||||
|
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||||
|
except:
|
||||||
|
# ignore faulty destination_id database
|
||||||
|
if ml == 'category':
|
||||||
|
migrate_data[ml] = {}
|
||||||
|
else:
|
||||||
|
rename_old = True
|
||||||
|
raise
|
||||||
|
|
||||||
|
for p in c.fetchall():
|
||||||
|
columns = {}
|
||||||
|
for row in migrate_list[ml]:
|
||||||
|
columns[row] = p[rows.index(row)]
|
||||||
|
|
||||||
|
if not migrate_data[ml].get(p[0]):
|
||||||
|
migrate_data[ml][p[0]] = columns
|
||||||
|
else:
|
||||||
|
if not isinstance(migrate_data[ml][p[0]], list):
|
||||||
|
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||||
|
migrate_data[ml][p[0]].append(columns)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
log.info('Getting data took %s', time.time() - migrate_start)
|
||||||
|
|
||||||
|
db = self.getDB()
|
||||||
|
if not db.opened:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Use properties
|
||||||
|
properties = migrate_data['properties']
|
||||||
|
log.info('Importing %s properties', len(properties))
|
||||||
|
for x in properties:
|
||||||
|
property = properties[x]
|
||||||
|
Env.prop(property.get('identifier'), property.get('value'))
|
||||||
|
|
||||||
|
# Categories
|
||||||
|
categories = migrate_data.get('category', [])
|
||||||
|
log.info('Importing %s categories', len(categories))
|
||||||
|
category_link = {}
|
||||||
|
for x in categories:
|
||||||
|
c = categories[x]
|
||||||
|
|
||||||
|
new_c = db.insert({
|
||||||
|
'_t': 'category',
|
||||||
|
'order': c.get('order', 999),
|
||||||
|
'label': toUnicode(c.get('label', '')),
|
||||||
|
'ignored': toUnicode(c.get('ignored', '')),
|
||||||
|
'preferred': toUnicode(c.get('preferred', '')),
|
||||||
|
'required': toUnicode(c.get('required', '')),
|
||||||
|
'destination': toUnicode(c.get('destination', '')),
|
||||||
|
})
|
||||||
|
|
||||||
|
category_link[x] = new_c.get('_id')
|
||||||
|
|
||||||
|
# Profiles
|
||||||
|
log.info('Importing profiles')
|
||||||
|
new_profiles = db.all('profile', with_doc = True)
|
||||||
|
new_profiles_by_label = {}
|
||||||
|
for x in new_profiles:
|
||||||
|
|
||||||
|
# Remove default non core profiles
|
||||||
|
if not x['doc'].get('core'):
|
||||||
|
db.delete(x['doc'])
|
||||||
|
else:
|
||||||
|
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||||
|
|
||||||
|
profiles = migrate_data['profile']
|
||||||
|
profile_link = {}
|
||||||
|
for x in profiles:
|
||||||
|
p = profiles[x]
|
||||||
|
|
||||||
|
exists = new_profiles_by_label.get(p.get('label'))
|
||||||
|
|
||||||
|
# Update existing with order only
|
||||||
|
if exists and p.get('core'):
|
||||||
|
profile = db.get('id', exists)
|
||||||
|
profile['order'] = tryInt(p.get('order'))
|
||||||
|
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||||
|
db.update(profile)
|
||||||
|
|
||||||
|
profile_link[x] = profile.get('_id')
|
||||||
|
else:
|
||||||
|
|
||||||
|
new_profile = {
|
||||||
|
'_t': 'profile',
|
||||||
|
'label': p.get('label'),
|
||||||
|
'order': int(p.get('order', 999)),
|
||||||
|
'core': p.get('core', False),
|
||||||
|
'qualities': [],
|
||||||
|
'wait_for': [],
|
||||||
|
'finish': []
|
||||||
|
}
|
||||||
|
|
||||||
|
types = migrate_data['profiletype']
|
||||||
|
for profile_type in types:
|
||||||
|
p_type = types[profile_type]
|
||||||
|
if types[profile_type]['profile_id'] == p['id']:
|
||||||
|
if p_type['quality_id']:
|
||||||
|
new_profile['finish'].append(p_type['finish'])
|
||||||
|
new_profile['wait_for'].append(p_type['wait_for'])
|
||||||
|
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||||
|
|
||||||
|
if len(new_profile['qualities']) > 0:
|
||||||
|
new_profile.update(db.insert(new_profile))
|
||||||
|
profile_link[x] = new_profile.get('_id')
|
||||||
|
else:
|
||||||
|
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||||
|
|
||||||
|
# Qualities
|
||||||
|
log.info('Importing quality sizes')
|
||||||
|
new_qualities = db.all('quality', with_doc = True)
|
||||||
|
new_qualities_by_identifier = {}
|
||||||
|
for x in new_qualities:
|
||||||
|
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||||
|
|
||||||
|
qualities = migrate_data['quality']
|
||||||
|
quality_link = {}
|
||||||
|
for x in qualities:
|
||||||
|
q = qualities[x]
|
||||||
|
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||||
|
|
||||||
|
quality = db.get('id', q_id)
|
||||||
|
quality['order'] = q.get('order')
|
||||||
|
quality['size_min'] = tryInt(q.get('size_min'))
|
||||||
|
quality['size_max'] = tryInt(q.get('size_max'))
|
||||||
|
db.update(quality)
|
||||||
|
|
||||||
|
quality_link[x] = quality
|
||||||
|
|
||||||
|
# Titles
|
||||||
|
titles = migrate_data['librarytitle']
|
||||||
|
titles_by_library = {}
|
||||||
|
for x in titles:
|
||||||
|
title = titles[x]
|
||||||
|
if title.get('default'):
|
||||||
|
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||||
|
|
||||||
|
# Releases
|
||||||
|
releaseinfos = migrate_data['releaseinfo']
|
||||||
|
for x in releaseinfos:
|
||||||
|
info = releaseinfos[x]
|
||||||
|
|
||||||
|
# Skip if release doesn't exist for this info
|
||||||
|
if not migrate_data['release'].get(info.get('release_id')):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||||
|
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||||
|
|
||||||
|
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||||
|
|
||||||
|
releases = migrate_data['release']
|
||||||
|
releases_by_media = {}
|
||||||
|
for x in releases:
|
||||||
|
release = releases[x]
|
||||||
|
if not releases_by_media.get(release.get('movie_id')):
|
||||||
|
releases_by_media[release.get('movie_id')] = []
|
||||||
|
|
||||||
|
releases_by_media[release.get('movie_id')].append(release)
|
||||||
|
|
||||||
|
# Type ids
|
||||||
|
types = migrate_data['filetype']
|
||||||
|
type_by_id = {}
|
||||||
|
for t in types:
|
||||||
|
type = types[t]
|
||||||
|
type_by_id[type.get('id')] = type
|
||||||
|
|
||||||
|
# Media
|
||||||
|
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||||
|
statuses = migrate_data['status']
|
||||||
|
libraries = migrate_data['library']
|
||||||
|
library_files = migrate_data['library_files__file_library']
|
||||||
|
releases_files = migrate_data['release_files__file_release']
|
||||||
|
all_files = migrate_data['file']
|
||||||
|
poster_type = migrate_data['filetype']['poster']
|
||||||
|
medias = migrate_data['movie']
|
||||||
|
for x in medias:
|
||||||
|
m = medias[x]
|
||||||
|
|
||||||
|
status = statuses.get(m['status_id']).get('identifier')
|
||||||
|
l = libraries.get(m['library_id'])
|
||||||
|
|
||||||
|
# Only migrate wanted movies, Skip if no identifier present
|
||||||
|
if not l or not getImdb(l.get('identifier')): continue
|
||||||
|
|
||||||
|
profile_id = profile_link.get(m['profile_id'])
|
||||||
|
category_id = category_link.get(m['category_id'])
|
||||||
|
title = titles_by_library.get(m['library_id'])
|
||||||
|
releases = releases_by_media.get(x, [])
|
||||||
|
info = json.loads(l.get('info', ''))
|
||||||
|
|
||||||
|
files = library_files.get(m['library_id'], [])
|
||||||
|
if not isinstance(files, list):
|
||||||
|
files = [files]
|
||||||
|
|
||||||
|
added_media = fireEvent('movie.add', {
|
||||||
|
'info': info,
|
||||||
|
'identifier': l.get('identifier'),
|
||||||
|
'profile_id': profile_id,
|
||||||
|
'category_id': category_id,
|
||||||
|
'title': title
|
||||||
|
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||||
|
|
||||||
|
if not added_media:
|
||||||
|
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||||
|
continue
|
||||||
|
|
||||||
|
added_media['files'] = added_media.get('files', {})
|
||||||
|
for f in files:
|
||||||
|
ffile = all_files[f.get('file_id')]
|
||||||
|
|
||||||
|
# Only migrate posters
|
||||||
|
if ffile.get('type_id') == poster_type.get('id'):
|
||||||
|
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||||
|
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||||
|
break
|
||||||
|
|
||||||
|
if 'image_poster' in added_media['files']:
|
||||||
|
db.update(added_media)
|
||||||
|
|
||||||
|
for rel in releases:
|
||||||
|
|
||||||
|
empty_info = False
|
||||||
|
if not rel.get('info'):
|
||||||
|
empty_info = True
|
||||||
|
rel['info'] = {}
|
||||||
|
|
||||||
|
quality = quality_link.get(rel.get('quality_id'))
|
||||||
|
if not quality:
|
||||||
|
continue
|
||||||
|
|
||||||
|
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||||
|
|
||||||
|
if rel['info'].get('download_id'):
|
||||||
|
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||||
|
rel['info']['download_info'] = {
|
||||||
|
'id': rel['info'].get('download_id'),
|
||||||
|
'downloader': rel['info'].get('download_downloader'),
|
||||||
|
'status_support': status_support,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add status to keys
|
||||||
|
rel['info']['status'] = release_status
|
||||||
|
if not empty_info:
|
||||||
|
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||||
|
else:
|
||||||
|
release = {
|
||||||
|
'_t': 'release',
|
||||||
|
'identifier': rel.get('identifier'),
|
||||||
|
'media_id': added_media.get('_id'),
|
||||||
|
'quality': quality.get('identifier'),
|
||||||
|
'status': release_status,
|
||||||
|
'last_edit': int(time.time()),
|
||||||
|
'files': {}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add downloader info if provided
|
||||||
|
try:
|
||||||
|
release['download_info'] = rel['info']['download_info']
|
||||||
|
del rel['download_info']
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Add files
|
||||||
|
release_files = releases_files.get(rel.get('id'), [])
|
||||||
|
if not isinstance(release_files, list):
|
||||||
|
release_files = [release_files]
|
||||||
|
|
||||||
|
if len(release_files) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for f in release_files:
|
||||||
|
rfile = all_files.get(f.get('file_id'))
|
||||||
|
if not rfile:
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||||
|
|
||||||
|
if not release['files'].get(file_type):
|
||||||
|
release['files'][file_type] = []
|
||||||
|
|
||||||
|
release['files'][file_type].append(rfile.get('path'))
|
||||||
|
|
||||||
|
try:
|
||||||
|
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||||
|
rls.update(release)
|
||||||
|
db.update(rls)
|
||||||
|
except:
|
||||||
|
db.insert(release)
|
||||||
|
|
||||||
|
log.info('Total migration took %s', time.time() - migrate_start)
|
||||||
|
log.info('=' * 30)
|
||||||
|
|
||||||
|
rename_old = True
|
||||||
|
|
||||||
|
except OperationalError:
|
||||||
|
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
rename_old = True
|
||||||
|
except:
|
||||||
|
log.error('Migration failed: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
|
# rename old database
|
||||||
|
if rename_old:
|
||||||
|
random = randomString()
|
||||||
|
log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random))
|
||||||
|
os.rename(old_db, '%s.%s_old' % (old_db, random))
|
||||||
|
|
||||||
|
if os.path.isfile(old_db + '-wal'):
|
||||||
|
os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random))
|
||||||
|
if os.path.isfile(old_db + '-shm'):
|
||||||
|
os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random))
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
config = [{
|
|
||||||
'name': 'download_providers',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'label': 'Downloaders',
|
|
||||||
'description': 'You can select different downloaders for each type (usenet / torrent)',
|
|
||||||
'type': 'list',
|
|
||||||
'name': 'download_providers',
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'options': [],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
|
|||||||
194
couchpotato/core/downloaders/blackhole.py
Normal file
194
couchpotato/core/downloaders/blackhole.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||||
|
from couchpotato.core.helpers.encoding import sp
|
||||||
|
from couchpotato.core.helpers.variable import getDownloadDir
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Blackhole'
|
||||||
|
|
||||||
|
|
||||||
|
class Blackhole(DownloaderBase):
|
||||||
|
|
||||||
|
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
||||||
|
status_support = False
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
directory = self.conf('directory')
|
||||||
|
|
||||||
|
# The folder needs to exist
|
||||||
|
if not directory or not os.path.isdir(directory):
|
||||||
|
log.error('No directory set for blackhole %s download.', data.get('protocol'))
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
# Filedata can be empty, which probably means it a magnet link
|
||||||
|
if not filedata or len(filedata) < 50:
|
||||||
|
try:
|
||||||
|
if data.get('protocol') == 'torrent_magnet':
|
||||||
|
filedata = self.magnetToTorrent(data.get('url'))
|
||||||
|
data['protocol'] = 'torrent'
|
||||||
|
except:
|
||||||
|
log.error('Failed download torrent via magnet url: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
# If it's still empty, don't know what to do!
|
||||||
|
if not filedata or len(filedata) < 50:
|
||||||
|
log.error('No nzb/torrent available: %s', data.get('url'))
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Create filename with imdb id and other nice stuff
|
||||||
|
file_name = self.createFileName(data, filedata, media)
|
||||||
|
full_path = os.path.join(directory, file_name)
|
||||||
|
|
||||||
|
# People want thinks nice and tidy, create a subdir
|
||||||
|
if self.conf('create_subdir'):
|
||||||
|
try:
|
||||||
|
new_path = os.path.splitext(full_path)[0]
|
||||||
|
if not os.path.exists(new_path):
|
||||||
|
os.makedirs(new_path)
|
||||||
|
full_path = os.path.join(new_path, file_name)
|
||||||
|
except:
|
||||||
|
log.error('Couldnt create sub dir, reverting to old one: %s', full_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Make sure the file doesn't exist yet, no need in overwriting it
|
||||||
|
if not os.path.isfile(full_path):
|
||||||
|
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
|
||||||
|
with open(full_path, 'wb') as f:
|
||||||
|
f.write(filedata)
|
||||||
|
os.chmod(full_path, Env.getPermission('file'))
|
||||||
|
return self.downloadReturnId('')
|
||||||
|
else:
|
||||||
|
log.info('File %s already exists.', full_path)
|
||||||
|
return self.downloadReturnId('')
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed to download to blackhole %s', traceback.format_exc())
|
||||||
|
pass
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||||
|
return False
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Test and see if the directory is writable
|
||||||
|
:return: boolean
|
||||||
|
"""
|
||||||
|
|
||||||
|
directory = self.conf('directory')
|
||||||
|
if directory and os.path.isdir(directory):
|
||||||
|
|
||||||
|
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
|
||||||
|
|
||||||
|
# Check if folder is writable
|
||||||
|
self.createFile(test_file, 'This is a test file')
|
||||||
|
if os.path.isfile(test_file):
|
||||||
|
os.remove(test_file)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getEnabledProtocol(self):
|
||||||
|
""" What protocols is this downloaded used for
|
||||||
|
:return: list with protocols
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.conf('use_for') == 'both':
|
||||||
|
return super(Blackhole, self).getEnabledProtocol()
|
||||||
|
elif self.conf('use_for') == 'torrent':
|
||||||
|
return ['torrent', 'torrent_magnet']
|
||||||
|
else:
|
||||||
|
return ['nzb']
|
||||||
|
|
||||||
|
def isEnabled(self, manual = False, data = None):
|
||||||
|
""" Check if protocol is used (and enabled)
|
||||||
|
:param manual: The user has clicked to download a link through the webUI
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:return: boolean
|
||||||
|
"""
|
||||||
|
if not data: data = {}
|
||||||
|
for_protocol = ['both']
|
||||||
|
if data and 'torrent' in data.get('protocol'):
|
||||||
|
for_protocol.append('torrent')
|
||||||
|
elif data:
|
||||||
|
for_protocol.append(data.get('protocol'))
|
||||||
|
|
||||||
|
return super(Blackhole, self).isEnabled(manual, data) and \
|
||||||
|
((self.conf('use_for') in for_protocol))
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'blackhole',
|
||||||
|
'order': 30,
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'blackhole',
|
||||||
|
'label': 'Black hole',
|
||||||
|
'description': 'Download the NZB/Torrent to a specific folder. <em>Note: Seeding and copying/linking features do <strong>not</strong> work with Black hole</em>.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': True,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'nzb,torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'directory',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Directory where the .nzb (or .torrent) file is saved to.',
|
||||||
|
'default': getDownloadDir()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'use_for',
|
||||||
|
'label': 'Use for',
|
||||||
|
'default': 'both',
|
||||||
|
'type': 'dropdown',
|
||||||
|
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'create_subdir',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Create a sub directory when saving the .nzb (or .torrent).',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
from .main import Blackhole
|
|
||||||
from couchpotato.core.helpers.variable import getDownloadDir
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Blackhole()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'blackhole',
|
|
||||||
'order': 30,
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'blackhole',
|
|
||||||
'label': 'Black hole',
|
|
||||||
'description': 'Download the NZB/Torrent to a specific folder. <em>Note: Seeding and copying/linking features do <strong>not</strong> work with Black hole</em>.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': True,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'nzb,torrent',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'directory',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Directory where the .nzb (or .torrent) file is saved to.',
|
|
||||||
'default': getDownloadDir()
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'use_for',
|
|
||||||
'label': 'Use for',
|
|
||||||
'default': 'both',
|
|
||||||
'type': 'dropdown',
|
|
||||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'create_subdir',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Create a sub directory when saving the .nzb (or .torrent).',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
from __future__ import with_statement
|
|
||||||
from couchpotato.core.downloaders.base import Downloader
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
import os
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Blackhole(Downloader):
|
|
||||||
|
|
||||||
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
|
||||||
status_support = False
|
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
|
||||||
if not media: media = {}
|
|
||||||
if not data: data = {}
|
|
||||||
|
|
||||||
directory = self.conf('directory')
|
|
||||||
if not directory or not os.path.isdir(directory):
|
|
||||||
log.error('No directory set for blackhole %s download.', data.get('protocol'))
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
if not filedata or len(filedata) < 50:
|
|
||||||
try:
|
|
||||||
if data.get('protocol') == 'torrent_magnet':
|
|
||||||
filedata = self.magnetToTorrent(data.get('url'))
|
|
||||||
data['protocol'] = 'torrent'
|
|
||||||
except:
|
|
||||||
log.error('Failed download torrent via magnet url: %s', traceback.format_exc())
|
|
||||||
|
|
||||||
if not filedata or len(filedata) < 50:
|
|
||||||
log.error('No nzb/torrent available: %s', data.get('url'))
|
|
||||||
return False
|
|
||||||
|
|
||||||
file_name = self.createFileName(data, filedata, media)
|
|
||||||
full_path = os.path.join(directory, file_name)
|
|
||||||
|
|
||||||
if self.conf('create_subdir'):
|
|
||||||
try:
|
|
||||||
new_path = os.path.splitext(full_path)[0]
|
|
||||||
if not os.path.exists(new_path):
|
|
||||||
os.makedirs(new_path)
|
|
||||||
full_path = os.path.join(new_path, file_name)
|
|
||||||
except:
|
|
||||||
log.error('Couldnt create sub dir, reverting to old one: %s', full_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not os.path.isfile(full_path):
|
|
||||||
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
|
|
||||||
with open(full_path, 'wb') as f:
|
|
||||||
f.write(filedata)
|
|
||||||
os.chmod(full_path, Env.getPermission('file'))
|
|
||||||
return self.downloadReturnId('')
|
|
||||||
else:
|
|
||||||
log.info('File %s already exists.', full_path)
|
|
||||||
return self.downloadReturnId('')
|
|
||||||
|
|
||||||
except:
|
|
||||||
log.error('Failed to download to blackhole %s', traceback.format_exc())
|
|
||||||
pass
|
|
||||||
|
|
||||||
except:
|
|
||||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
|
||||||
return False
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def getEnabledProtocol(self):
|
|
||||||
if self.conf('use_for') == 'both':
|
|
||||||
return super(Blackhole, self).getEnabledProtocol()
|
|
||||||
elif self.conf('use_for') == 'torrent':
|
|
||||||
return ['torrent', 'torrent_magnet']
|
|
||||||
else:
|
|
||||||
return ['nzb']
|
|
||||||
|
|
||||||
def isEnabled(self, manual = False, data = None):
|
|
||||||
if not data: data = {}
|
|
||||||
for_protocol = ['both']
|
|
||||||
if data and 'torrent' in data.get('protocol'):
|
|
||||||
for_protocol.append('torrent')
|
|
||||||
elif data:
|
|
||||||
for_protocol.append(data.get('protocol'))
|
|
||||||
|
|
||||||
return super(Blackhole, self).isEnabled(manual, data) and \
|
|
||||||
((self.conf('use_for') in for_protocol))
|
|
||||||
@@ -1,38 +1,66 @@
|
|||||||
from base64 import b64encode, b16encode, b32decode
|
from base64 import b64encode, b16encode, b32decode
|
||||||
from bencode import bencode as benc, bdecode
|
|
||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import isInt, sp
|
|
||||||
from couchpotato.core.helpers.variable import tryFloat
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from synchronousdeluge import DelugeClient
|
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from bencode import bencode as benc, bdecode
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import isInt, sp
|
||||||
|
from couchpotato.core.helpers.variable import tryFloat, cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from synchronousdeluge import DelugeClient
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Deluge'
|
||||||
|
|
||||||
class Deluge(Downloader):
|
|
||||||
|
class Deluge(DownloaderBase):
|
||||||
|
|
||||||
protocol = ['torrent', 'torrent_magnet']
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
drpc = None
|
drpc = None
|
||||||
|
|
||||||
def connect(self):
|
def connect(self, reconnect = False):
|
||||||
|
""" Connect to the delugeRPC, re-use connection when already available
|
||||||
|
:param reconnect: force reconnect
|
||||||
|
:return: DelugeRPC instance
|
||||||
|
"""
|
||||||
|
|
||||||
# Load host from config and split out port.
|
# Load host from config and split out port.
|
||||||
host = self.conf('host').split(':')
|
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||||
|
|
||||||
|
# Force host assignment
|
||||||
|
if len(host) == 1:
|
||||||
|
host.append(80)
|
||||||
|
|
||||||
if not isInt(host[1]):
|
if not isInt(host[1]):
|
||||||
log.error('Config properties are not filled in correctly, port is missing.')
|
log.error('Config properties are not filled in correctly, port is missing.')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.drpc:
|
if not self.drpc or reconnect:
|
||||||
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
||||||
|
|
||||||
return self.drpc
|
return self.drpc
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
if not media: media = {}
|
if not media: media = {}
|
||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
@@ -86,7 +114,22 @@ class Deluge(Downloader):
|
|||||||
log.info('Torrent sent to Deluge successfully.')
|
log.info('Torrent sent to Deluge successfully.')
|
||||||
return self.downloadReturnId(remote_torrent)
|
return self.downloadReturnId(remote_torrent)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
if self.connect(True) and self.drpc.test():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
log.debug('Checking Deluge download status.')
|
log.debug('Checking Deluge download status.')
|
||||||
|
|
||||||
@@ -103,8 +146,13 @@ class Deluge(Downloader):
|
|||||||
|
|
||||||
for torrent_id in queue:
|
for torrent_id in queue:
|
||||||
torrent = queue[torrent_id]
|
torrent = queue[torrent_id]
|
||||||
log.debug('name=%s / id=%s / save_path=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
|
|
||||||
|
if not 'hash' in torrent:
|
||||||
|
# When given a list of ids, deluge will return an empty item for a non-existant torrent.
|
||||||
|
continue
|
||||||
|
|
||||||
|
log.debug('name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_on_completed'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
|
||||||
|
|
||||||
# Deluge has no easy way to work out if a torrent is stalled or failing.
|
# Deluge has no easy way to work out if a torrent is stalled or failing.
|
||||||
#status = 'failed'
|
#status = 'failed'
|
||||||
status = 'busy'
|
status = 'busy'
|
||||||
@@ -120,11 +168,11 @@ class Deluge(Downloader):
|
|||||||
download_dir = sp(torrent['save_path'])
|
download_dir = sp(torrent['save_path'])
|
||||||
if torrent['move_on_completed']:
|
if torrent['move_on_completed']:
|
||||||
download_dir = torrent['move_completed_path']
|
download_dir = torrent['move_completed_path']
|
||||||
|
|
||||||
torrent_files = []
|
torrent_files = []
|
||||||
for file_item in torrent['files']:
|
for file_item in torrent['files']:
|
||||||
torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
|
torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
|
||||||
|
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': torrent['hash'],
|
'id': torrent['hash'],
|
||||||
'name': torrent['name'],
|
'name': torrent['name'],
|
||||||
@@ -133,7 +181,7 @@ class Deluge(Downloader):
|
|||||||
'seed_ratio': torrent['ratio'],
|
'seed_ratio': torrent['ratio'],
|
||||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||||
'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
|
'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
|
||||||
'files': '|'.join(torrent_files),
|
'files': torrent_files,
|
||||||
})
|
})
|
||||||
|
|
||||||
return release_downloads
|
return release_downloads
|
||||||
@@ -152,6 +200,7 @@ class Deluge(Downloader):
|
|||||||
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||||
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
|
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
|
||||||
|
|
||||||
|
|
||||||
class DelugeRPC(object):
|
class DelugeRPC(object):
|
||||||
|
|
||||||
host = 'localhost'
|
host = 'localhost'
|
||||||
@@ -172,6 +221,13 @@ class DelugeRPC(object):
|
|||||||
self.client = DelugeClient()
|
self.client = DelugeClient()
|
||||||
self.client.connect(self.host, int(self.port), self.username, self.password)
|
self.client.connect(self.host, int(self.port), self.username, self.password)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
try:
|
||||||
|
self.connect()
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def add_torrent_magnet(self, torrent, options):
|
def add_torrent_magnet(self, torrent, options):
|
||||||
torrent_id = False
|
torrent_id = False
|
||||||
try:
|
try:
|
||||||
@@ -182,7 +238,7 @@ class DelugeRPC(object):
|
|||||||
|
|
||||||
if torrent_id and options['label']:
|
if torrent_id and options['label']:
|
||||||
self.client.label.set_torrent(torrent_id, options['label']).get()
|
self.client.label.set_torrent(torrent_id, options['label']).get()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
|
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
|
||||||
finally:
|
finally:
|
||||||
if self.client:
|
if self.client:
|
||||||
@@ -200,7 +256,7 @@ class DelugeRPC(object):
|
|||||||
|
|
||||||
if torrent_id and options['label']:
|
if torrent_id and options['label']:
|
||||||
self.client.label.set_torrent(torrent_id, options['label']).get()
|
self.client.label.set_torrent(torrent_id, options['label']).get()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
|
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
|
||||||
finally:
|
finally:
|
||||||
if self.client:
|
if self.client:
|
||||||
@@ -212,8 +268,8 @@ class DelugeRPC(object):
|
|||||||
ret = False
|
ret = False
|
||||||
try:
|
try:
|
||||||
self.connect()
|
self.connect()
|
||||||
ret = self.client.core.get_torrents_status({'id': ids}, {}).get()
|
ret = self.client.core.get_torrents_status({'id': ids}, ('name', 'hash', 'save_path', 'move_completed_path', 'progress', 'state', 'eta', 'ratio', 'stop_ratio', 'is_seed', 'is_finished', 'paused', 'move_on_completed', 'files')).get()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
|
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
|
||||||
finally:
|
finally:
|
||||||
if self.client:
|
if self.client:
|
||||||
@@ -224,7 +280,7 @@ class DelugeRPC(object):
|
|||||||
try:
|
try:
|
||||||
self.connect()
|
self.connect()
|
||||||
self.client.core.pause_torrent(torrent_ids).get()
|
self.client.core.pause_torrent(torrent_ids).get()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
|
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
|
||||||
finally:
|
finally:
|
||||||
if self.client:
|
if self.client:
|
||||||
@@ -234,7 +290,7 @@ class DelugeRPC(object):
|
|||||||
try:
|
try:
|
||||||
self.connect()
|
self.connect()
|
||||||
self.client.core.resume_torrent(torrent_ids).get()
|
self.client.core.resume_torrent(torrent_ids).get()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
|
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
|
||||||
finally:
|
finally:
|
||||||
if self.client:
|
if self.client:
|
||||||
@@ -245,7 +301,7 @@ class DelugeRPC(object):
|
|||||||
try:
|
try:
|
||||||
self.connect()
|
self.connect()
|
||||||
ret = self.client.core.remove_torrent(torrent_id, remove_local_data).get()
|
ret = self.client.core.remove_torrent(torrent_id, remove_local_data).get()
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
|
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
|
||||||
finally:
|
finally:
|
||||||
if self.client:
|
if self.client:
|
||||||
@@ -273,3 +329,90 @@ class DelugeRPC(object):
|
|||||||
return torrent_hash
|
return torrent_hash
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'deluge',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'deluge',
|
||||||
|
'label': 'Deluge',
|
||||||
|
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:58846',
|
||||||
|
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'directory',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'completed_directory',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
|
||||||
|
'advanced': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'label',
|
||||||
|
'description': 'Label to add to torrents in the Deluge UI.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'remove_complete',
|
||||||
|
'label': 'Remove torrent',
|
||||||
|
'type': 'bool',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Remove the torrent from Deluge after it has finished seeding.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_files',
|
||||||
|
'label': 'Remove files',
|
||||||
|
'default': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Also remove the leftover files.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'paused',
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'default': False,
|
||||||
|
'description': 'Add the torrent paused.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Delete a release after the download has failed.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
from .main import Deluge
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Deluge()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'deluge',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'deluge',
|
|
||||||
'label': 'Deluge',
|
|
||||||
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'torrent',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'localhost:58846',
|
|
||||||
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'type': 'password',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'directory',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'completed_directory',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
|
|
||||||
'advanced': True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'label',
|
|
||||||
'description': 'Label to add to torrents in the Deluge UI.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'remove_complete',
|
|
||||||
'label': 'Remove torrent',
|
|
||||||
'type': 'bool',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Remove the torrent from Deluge after it has finished seeding.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_files',
|
|
||||||
'label': 'Remove files',
|
|
||||||
'default': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Also remove the leftover files.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'paused',
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'default': False,
|
|
||||||
'description': 'Add the torrent paused.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Delete a release after the download has failed.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
427
couchpotato/core/downloaders/hadouken.py
Normal file
427
couchpotato/core/downloaders/hadouken.py
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
from base64 import b16encode, b32decode, b64encode
|
||||||
|
from distutils.version import LooseVersion
|
||||||
|
from hashlib import sha1
|
||||||
|
import httplib
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import isInt, sp
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from bencode import bencode as benc, bdecode
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Hadouken'
|
||||||
|
|
||||||
|
|
||||||
|
class Hadouken(DownloaderBase):
|
||||||
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
|
hadouken_api = None
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
# Load host from config and split out port.
|
||||||
|
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||||
|
|
||||||
|
if not isInt(host[1]):
|
||||||
|
log.error('Config properties are not filled in correctly, port is missing.')
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.conf('api_key'):
|
||||||
|
log.error('Config properties are not filled in correctly, API key is missing.')
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.hadouken_api = HadoukenAPI(host[0], port = host[1], api_key = self.conf('api_key'))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
log.debug("Sending '%s' (%s) to Hadouken.", (data.get('name'), data.get('protocol')))
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
torrent_params = {}
|
||||||
|
|
||||||
|
if self.conf('label'):
|
||||||
|
torrent_params['label'] = self.conf('label')
|
||||||
|
|
||||||
|
torrent_filename = self.createFileName(data, filedata, media)
|
||||||
|
|
||||||
|
if data.get('protocol') == 'torrent_magnet':
|
||||||
|
torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper()
|
||||||
|
torrent_params['trackers'] = self.torrent_trackers
|
||||||
|
torrent_params['name'] = torrent_filename
|
||||||
|
else:
|
||||||
|
info = bdecode(filedata)['info']
|
||||||
|
torrent_hash = sha1(benc(info)).hexdigest().upper()
|
||||||
|
|
||||||
|
# Convert base 32 to hex
|
||||||
|
if len(torrent_hash) == 32:
|
||||||
|
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||||
|
|
||||||
|
# Send request to Hadouken
|
||||||
|
if data.get('protocol') == 'torrent_magnet':
|
||||||
|
self.hadouken_api.add_magnet_link(data.get('url'), torrent_params)
|
||||||
|
else:
|
||||||
|
self.hadouken_api.add_file(filedata, torrent_params)
|
||||||
|
|
||||||
|
return self.downloadReturnId(torrent_hash)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Tests the given host:port and API key """
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
version = self.hadouken_api.get_version()
|
||||||
|
|
||||||
|
if not version:
|
||||||
|
log.error('Could not get Hadouken version.')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# The minimum required version of Hadouken is 4.5.6.
|
||||||
|
if LooseVersion(version) >= LooseVersion('4.5.6'):
|
||||||
|
return True
|
||||||
|
|
||||||
|
log.error('Hadouken v4.5.6 (or newer) required. Found v%s', version)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
|
log.debug('Checking Hadouken download status.')
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return []
|
||||||
|
|
||||||
|
release_downloads = ReleaseDownloadList(self)
|
||||||
|
queue = self.hadouken_api.get_by_hash_list(ids)
|
||||||
|
|
||||||
|
if not queue:
|
||||||
|
return []
|
||||||
|
|
||||||
|
for torrent in queue:
|
||||||
|
if torrent is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
torrent_filelist = self.hadouken_api.get_files_by_hash(torrent['InfoHash'])
|
||||||
|
torrent_files = []
|
||||||
|
|
||||||
|
save_path = torrent['SavePath']
|
||||||
|
|
||||||
|
# The 'Path' key for each file_item contains
|
||||||
|
# the full path to the single file relative to the
|
||||||
|
# torrents save path.
|
||||||
|
|
||||||
|
# For a single file torrent the result would be,
|
||||||
|
# - Save path: "C:\Downloads"
|
||||||
|
# - file_item['Path'] = "file1.iso"
|
||||||
|
# Resulting path: "C:\Downloads\file1.iso"
|
||||||
|
|
||||||
|
# For a multi file torrent the result would be,
|
||||||
|
# - Save path: "C:\Downloads"
|
||||||
|
# - file_item['Path'] = "dirname/file1.iso"
|
||||||
|
# Resulting path: "C:\Downloads\dirname/file1.iso"
|
||||||
|
|
||||||
|
for file_item in torrent_filelist:
|
||||||
|
torrent_files.append(sp(os.path.join(save_path, file_item['Path'])))
|
||||||
|
|
||||||
|
release_downloads.append({
|
||||||
|
'id': torrent['InfoHash'].upper(),
|
||||||
|
'name': torrent['Name'],
|
||||||
|
'status': self.get_torrent_status(torrent),
|
||||||
|
'seed_ratio': self.get_seed_ratio(torrent),
|
||||||
|
'original_status': torrent['State'],
|
||||||
|
'timeleft': -1,
|
||||||
|
'folder': sp(save_path if len(torrent_files == 1) else os.path.join(save_path, torrent['Name'])),
|
||||||
|
'files': torrent_files
|
||||||
|
})
|
||||||
|
|
||||||
|
return release_downloads
|
||||||
|
|
||||||
|
def get_seed_ratio(self, torrent):
|
||||||
|
""" Returns the seed ratio for a given torrent.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
torrent -- The torrent to calculate seed ratio for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
up = torrent['TotalUploadedBytes']
|
||||||
|
down = torrent['TotalDownloadedBytes']
|
||||||
|
|
||||||
|
if up > 0 and down > 0:
|
||||||
|
return up / down
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def get_torrent_status(self, torrent):
|
||||||
|
""" Returns the CouchPotato status for a given torrent.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
torrent -- The torrent to translate status for.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if torrent['IsSeeding'] and torrent['IsFinished'] and torrent['Paused']:
|
||||||
|
return 'completed'
|
||||||
|
|
||||||
|
if torrent['IsSeeding']:
|
||||||
|
return 'seeding'
|
||||||
|
|
||||||
|
return 'busy'
|
||||||
|
|
||||||
|
def pause(self, release_download, pause = True):
|
||||||
|
""" Pauses or resumes the torrent specified by the ID field
|
||||||
|
in release_download.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
release_download -- The CouchPotato release_download to pause/resume.
|
||||||
|
pause -- Boolean indicating whether to pause or resume.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
return self.hadouken_api.pause(release_download['id'], pause)
|
||||||
|
|
||||||
|
def removeFailed(self, release_download):
|
||||||
|
""" Removes a failed torrent and also remove the data associated with it.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
release_download -- The CouchPotato release_download to remove.
|
||||||
|
"""
|
||||||
|
|
||||||
|
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
return self.hadouken_api.remove(release_download['id'], remove_data = True)
|
||||||
|
|
||||||
|
def processComplete(self, release_download, delete_files = False):
|
||||||
|
""" Removes the completed torrent from Hadouken and optionally removes the data
|
||||||
|
associated with it.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
release_download -- The CouchPotato release_download to remove.
|
||||||
|
delete_files: Boolean indicating whether to remove the associated data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
log.debug('Requesting Hadouken to remove the torrent %s%s.',
|
||||||
|
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
return self.hadouken_api.remove(release_download['id'], remove_data = delete_files)
|
||||||
|
|
||||||
|
|
||||||
|
class HadoukenAPI(object):
|
||||||
|
def __init__(self, host = 'localhost', port = 7890, api_key = None):
|
||||||
|
self.url = 'http://' + str(host) + ':' + str(port)
|
||||||
|
self.api_key = api_key
|
||||||
|
self.requestId = 0;
|
||||||
|
|
||||||
|
self.opener = urllib2.build_opener()
|
||||||
|
self.opener.addheaders = [('User-agent', 'couchpotato-hadouken-client/1.0'), ('Accept', 'application/json')]
|
||||||
|
|
||||||
|
if not api_key:
|
||||||
|
log.error('API key missing.')
|
||||||
|
|
||||||
|
def add_file(self, filedata, torrent_params):
|
||||||
|
""" Add a file to Hadouken with the specified parameters.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
filedata -- The binary torrent data.
|
||||||
|
torrent_params -- Additional parameters for the file.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
'method': 'torrents.addFile',
|
||||||
|
'params': [b64encode(filedata), torrent_params]
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._request(data)
|
||||||
|
|
||||||
|
def add_magnet_link(self, magnetLink, torrent_params):
|
||||||
|
""" Add a magnet link to Hadouken with the specified parameters.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
magnetLink -- The magnet link to send.
|
||||||
|
torrent_params -- Additional parameters for the magnet link.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
'method': 'torrents.addUrl',
|
||||||
|
'params': [magnetLink, torrent_params]
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._request(data)
|
||||||
|
|
||||||
|
def get_by_hash_list(self, infoHashList):
|
||||||
|
""" Gets a list of torrents filtered by the given info hash list.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
infoHashList -- A list of info hashes.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
'method': 'torrents.getByInfoHashList',
|
||||||
|
'params': [infoHashList]
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._request(data)
|
||||||
|
|
||||||
|
def get_files_by_hash(self, infoHash):
|
||||||
|
""" Gets a list of files for the torrent identified by the
|
||||||
|
given info hash.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
infoHash -- The info hash of the torrent to return files for.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
'method': 'torrents.getFiles',
|
||||||
|
'params': [infoHash]
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._request(data)
|
||||||
|
|
||||||
|
def get_version(self):
|
||||||
|
""" Gets the version, commitish and build date of Hadouken. """
|
||||||
|
data = {
|
||||||
|
'method': 'core.getVersion',
|
||||||
|
'params': None
|
||||||
|
}
|
||||||
|
|
||||||
|
result = self._request(data)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return result['Version']
|
||||||
|
|
||||||
|
def pause(self, infoHash, pause):
|
||||||
|
""" Pauses/unpauses the torrent identified by the given info hash.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
infoHash -- The info hash of the torrent to operate on.
|
||||||
|
pause -- If true, pauses the torrent. Otherwise resumes.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
'method': 'torrents.pause',
|
||||||
|
'params': [infoHash]
|
||||||
|
}
|
||||||
|
|
||||||
|
if not pause:
|
||||||
|
data['method'] = 'torrents.resume'
|
||||||
|
|
||||||
|
return self._request(data)
|
||||||
|
|
||||||
|
def remove(self, infoHash, remove_data = False):
|
||||||
|
""" Removes the torrent identified by the given info hash and
|
||||||
|
optionally removes the data as well.
|
||||||
|
|
||||||
|
Keyword arguments:
|
||||||
|
infoHash -- The info hash of the torrent to remove.
|
||||||
|
remove_data -- If true, removes the data associated with the torrent.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
'method': 'torrents.remove',
|
||||||
|
'params': [infoHash, remove_data]
|
||||||
|
}
|
||||||
|
|
||||||
|
return self._request(data)
|
||||||
|
|
||||||
|
|
||||||
|
def _request(self, data):
|
||||||
|
self.requestId += 1
|
||||||
|
|
||||||
|
data['jsonrpc'] = '2.0'
|
||||||
|
data['id'] = self.requestId
|
||||||
|
|
||||||
|
request = urllib2.Request(self.url + '/jsonrpc', data = json.dumps(data))
|
||||||
|
request.add_header('Authorization', 'Token ' + self.api_key)
|
||||||
|
request.add_header('Content-Type', 'application/json')
|
||||||
|
|
||||||
|
try:
|
||||||
|
f = self.opener.open(request)
|
||||||
|
response = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
obj = json.loads(response)
|
||||||
|
|
||||||
|
if not 'error' in obj.keys():
|
||||||
|
return obj['result']
|
||||||
|
|
||||||
|
log.error('JSONRPC error, %s: %s', obj['error']['code'], obj['error']['message'])
|
||||||
|
except httplib.InvalidURL as err:
|
||||||
|
log.error('Invalid Hadouken host, check your config %s', err)
|
||||||
|
except urllib2.HTTPError as err:
|
||||||
|
if err.code == 401:
|
||||||
|
log.error('Invalid Hadouken API key, check your config')
|
||||||
|
else:
|
||||||
|
log.error('Hadouken HTTPError: %s', err)
|
||||||
|
except urllib2.URLError as err:
|
||||||
|
log.error('Unable to connect to Hadouken %s', err)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'hadouken',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'hadouken',
|
||||||
|
'label': 'Hadouken',
|
||||||
|
'description': 'Use <a href="http://www.hdkn.net">Hadouken</a> (>= v4.5.6) to download torrents.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:7890'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'api_key',
|
||||||
|
'label': 'API key',
|
||||||
|
'type': 'password'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'label',
|
||||||
|
'description': 'Label to add torrent as.'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
@@ -1,8 +1,4 @@
|
|||||||
from base64 import standard_b64encode
|
from base64 import standard_b64encode
|
||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import ss, sp
|
|
||||||
from couchpotato.core.helpers.variable import tryInt, md5
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@@ -10,16 +6,37 @@ import socket
|
|||||||
import traceback
|
import traceback
|
||||||
import xmlrpclib
|
import xmlrpclib
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import ss, sp
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, md5, cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'NZBGet'
|
||||||
|
|
||||||
class NZBGet(Downloader):
|
|
||||||
|
class NZBGet(DownloaderBase):
|
||||||
|
|
||||||
protocol = ['nzb']
|
protocol = ['nzb']
|
||||||
|
rpc = 'xmlrpc'
|
||||||
url = '%(protocol)s://%(username)s:%(password)s@%(host)s/xmlrpc'
|
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
if not media: media = {}
|
if not media: media = {}
|
||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
@@ -29,10 +46,10 @@ class NZBGet(Downloader):
|
|||||||
|
|
||||||
log.info('Sending "%s" to NZBGet.', data.get('name'))
|
log.info('Sending "%s" to NZBGet.', data.get('name'))
|
||||||
|
|
||||||
url = self.url % {'protocol': 'https' if self.conf('ssl') else 'http', 'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
|
|
||||||
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
|
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
|
||||||
|
|
||||||
rpc = xmlrpclib.ServerProxy(url)
|
rpc = self.getRPC()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
|
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
|
||||||
log.debug('Successfully connected to NZBGet')
|
log.debug('Successfully connected to NZBGet')
|
||||||
@@ -41,7 +58,7 @@ class NZBGet(Downloader):
|
|||||||
except socket.error:
|
except socket.error:
|
||||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||||
return False
|
return False
|
||||||
except xmlrpclib.ProtocolError, e:
|
except xmlrpclib.ProtocolError as e:
|
||||||
if e.errcode == 401:
|
if e.errcode == 401:
|
||||||
log.error('Password is incorrect.')
|
log.error('Password is incorrect.')
|
||||||
else:
|
else:
|
||||||
@@ -55,7 +72,7 @@ class NZBGet(Downloader):
|
|||||||
|
|
||||||
if xml_response:
|
if xml_response:
|
||||||
log.info('NZB sent successfully to NZBGet')
|
log.info('NZB sent successfully to NZBGet')
|
||||||
nzb_id = md5(data['url']) # about as unique as they come ;)
|
nzb_id = md5(data['url']) # about as unique as they come ;)
|
||||||
couchpotato_id = "couchpotato=" + nzb_id
|
couchpotato_id = "couchpotato=" + nzb_id
|
||||||
groups = rpc.listgroups()
|
groups = rpc.listgroups()
|
||||||
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
|
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
|
||||||
@@ -67,13 +84,43 @@ class NZBGet(Downloader):
|
|||||||
log.error('NZBGet could not add %s to the queue.', nzb_name)
|
log.error('NZBGet could not add %s to the queue.', nzb_name)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
rpc = self.getRPC()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if rpc.writelog('INFO', 'CouchPotato connected to test connection'):
|
||||||
|
log.debug('Successfully connected to NZBGet')
|
||||||
|
else:
|
||||||
|
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||||
|
except socket.error:
|
||||||
|
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||||
|
return False
|
||||||
|
except xmlrpclib.ProtocolError as e:
|
||||||
|
if e.errcode == 401:
|
||||||
|
log.error('Password is incorrect.')
|
||||||
|
else:
|
||||||
|
log.error('Protocol Error: %s', e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
log.debug('Checking NZBGet download status.')
|
log.debug('Checking NZBGet download status.')
|
||||||
|
|
||||||
url = self.url % {'protocol': 'https' if self.conf('ssl') else 'http', 'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
|
rpc = self.getRPC()
|
||||||
|
|
||||||
rpc = xmlrpclib.ServerProxy(url)
|
|
||||||
try:
|
try:
|
||||||
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
|
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
|
||||||
log.debug('Successfully connected to NZBGet')
|
log.debug('Successfully connected to NZBGet')
|
||||||
@@ -82,7 +129,7 @@ class NZBGet(Downloader):
|
|||||||
except socket.error:
|
except socket.error:
|
||||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||||
return []
|
return []
|
||||||
except xmlrpclib.ProtocolError, e:
|
except xmlrpclib.ProtocolError as e:
|
||||||
if e.errcode == 401:
|
if e.errcode == 401:
|
||||||
log.error('Password is incorrect.')
|
log.error('Password is incorrect.')
|
||||||
else:
|
else:
|
||||||
@@ -115,7 +162,7 @@ class NZBGet(Downloader):
|
|||||||
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
|
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': nzb_id,
|
'id': nzb_id,
|
||||||
'name': nzb['NZBFilename'],
|
'name': nzb['NZBFilename'],
|
||||||
@@ -124,7 +171,7 @@ class NZBGet(Downloader):
|
|||||||
'timeleft': timeleft,
|
'timeleft': timeleft,
|
||||||
})
|
})
|
||||||
|
|
||||||
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
|
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
|
||||||
if nzb['NZBID'] in ids:
|
if nzb['NZBID'] in ids:
|
||||||
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
|
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
@@ -141,12 +188,12 @@ class NZBGet(Downloader):
|
|||||||
nzb_id = nzb['NZBID']
|
nzb_id = nzb['NZBID']
|
||||||
|
|
||||||
if nzb_id in ids:
|
if nzb_id in ids:
|
||||||
log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
|
log.debug('Found %s in NZBGet history. TotalStatus: %s, ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['Status'], nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': nzb_id,
|
'id': nzb_id,
|
||||||
'name': nzb['NZBFilename'],
|
'name': nzb['NZBFilename'],
|
||||||
'status': 'completed' if nzb['ParStatus'] in ['SUCCESS', 'NONE'] and nzb['ScriptStatus'] in ['SUCCESS', 'NONE'] else 'failed',
|
'status': 'completed' if 'SUCCESS' in nzb['Status'] else 'failed',
|
||||||
'original_status': nzb['ParStatus'] + ', ' + nzb['ScriptStatus'],
|
'original_status': nzb['Status'],
|
||||||
'timeleft': str(timedelta(seconds = 0)),
|
'timeleft': str(timedelta(seconds = 0)),
|
||||||
'folder': sp(nzb['DestDir'])
|
'folder': sp(nzb['DestDir'])
|
||||||
})
|
})
|
||||||
@@ -157,9 +204,8 @@ class NZBGet(Downloader):
|
|||||||
|
|
||||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||||
|
|
||||||
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
|
rpc = self.getRPC()
|
||||||
|
|
||||||
rpc = xmlrpclib.ServerProxy(url)
|
|
||||||
try:
|
try:
|
||||||
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
|
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
|
||||||
log.debug('Successfully connected to NZBGet')
|
log.debug('Successfully connected to NZBGet')
|
||||||
@@ -168,7 +214,7 @@ class NZBGet(Downloader):
|
|||||||
except socket.error:
|
except socket.error:
|
||||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||||
return False
|
return False
|
||||||
except xmlrpclib.ProtocolError, e:
|
except xmlrpclib.ProtocolError as e:
|
||||||
if e.errcode == 401:
|
if e.errcode == 401:
|
||||||
log.error('Password is incorrect.')
|
log.error('Password is incorrect.')
|
||||||
else:
|
else:
|
||||||
@@ -193,3 +239,80 @@ class NZBGet(Downloader):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def getRPC(self):
|
||||||
|
url = cleanHost(host = self.conf('host'), ssl = self.conf('ssl'), username = self.conf('username'), password = self.conf('password')) + self.rpc
|
||||||
|
return xmlrpclib.ServerProxy(url)
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'nzbget',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'nzbget',
|
||||||
|
'label': 'NZBGet',
|
||||||
|
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'nzb',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:6789',
|
||||||
|
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ssl',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': 'nzbget',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Set a different username to connect. Default: nzbget',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'category',
|
||||||
|
'default': 'Movies',
|
||||||
|
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'priority',
|
||||||
|
'advanced': True,
|
||||||
|
'default': '0',
|
||||||
|
'type': 'dropdown',
|
||||||
|
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
|
||||||
|
'description': 'Only change this if you are using NZBget 9.0 or higher',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Delete a release after the download has failed.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
from .main import NZBGet
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return NZBGet()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'nzbget',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'nzbget',
|
|
||||||
'label': 'NZBGet',
|
|
||||||
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'nzb',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'localhost:6789',
|
|
||||||
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'ssl',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
'default': 'nzbget',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Set a different username to connect. Default: nzbget',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'type': 'password',
|
|
||||||
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'category',
|
|
||||||
'default': 'Movies',
|
|
||||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'priority',
|
|
||||||
'advanced': True,
|
|
||||||
'default': '0',
|
|
||||||
'type': 'dropdown',
|
|
||||||
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
|
|
||||||
'description': 'Only change this if you are using NZBget 9.0 or higher',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Delete a release after the download has failed.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
245
couchpotato/core/downloaders/nzbvortex.py
Normal file
245
couchpotato/core/downloaders/nzbvortex.py
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
from base64 import b64encode
|
||||||
|
import os
|
||||||
|
from uuid import uuid4
|
||||||
|
import hashlib
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from requests import HTTPError
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'NZBVortex'
|
||||||
|
|
||||||
|
|
||||||
|
class NZBVortex(DownloaderBase):
|
||||||
|
|
||||||
|
protocol = ['nzb']
|
||||||
|
api_level = None
|
||||||
|
session_id = None
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
# Send the nzb
|
||||||
|
try:
|
||||||
|
nzb_filename = self.createFileName(data, filedata, media, unique_tag = True)
|
||||||
|
response = self.call('nzb/add', files = {'file': (nzb_filename, filedata, 'application/octet-stream')}, parameters = {
|
||||||
|
'name': nzb_filename,
|
||||||
|
'groupname': self.conf('group')
|
||||||
|
})
|
||||||
|
|
||||||
|
if response and response.get('result', '').lower() == 'ok':
|
||||||
|
return self.downloadReturnId(nzb_filename)
|
||||||
|
|
||||||
|
log.error('Something went wrong sending the NZB file. Response: %s', response)
|
||||||
|
return False
|
||||||
|
except:
|
||||||
|
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
login_result = self.login()
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return login_result
|
||||||
|
|
||||||
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
|
raw_statuses = self.call('nzb')
|
||||||
|
|
||||||
|
release_downloads = ReleaseDownloadList(self)
|
||||||
|
for nzb in raw_statuses.get('nzbs', []):
|
||||||
|
nzb_id = os.path.basename(nzb['nzbFileName'])
|
||||||
|
if nzb_id in ids:
|
||||||
|
|
||||||
|
# Check status
|
||||||
|
status = 'busy'
|
||||||
|
if nzb['state'] == 20:
|
||||||
|
status = 'completed'
|
||||||
|
elif nzb['state'] in [21, 22, 24]:
|
||||||
|
status = 'failed'
|
||||||
|
|
||||||
|
release_downloads.append({
|
||||||
|
'temp_id': nzb['id'],
|
||||||
|
'id': nzb_id,
|
||||||
|
'name': nzb['uiTitle'],
|
||||||
|
'status': status,
|
||||||
|
'original_status': nzb['state'],
|
||||||
|
'timeleft': -1,
|
||||||
|
'folder': sp(nzb['destinationPath']),
|
||||||
|
})
|
||||||
|
|
||||||
|
return release_downloads
|
||||||
|
|
||||||
|
def removeFailed(self, release_download):
|
||||||
|
|
||||||
|
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.call('nzb/%s/cancel' % release_download['temp_id'])
|
||||||
|
except:
|
||||||
|
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
|
||||||
|
nonce = self.call('auth/nonce', auth = False).get('authNonce')
|
||||||
|
cnonce = uuid4().hex
|
||||||
|
hashed = b64encode(hashlib.sha256('%s:%s:%s' % (nonce, cnonce, self.conf('api_key'))).digest())
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'nonce': nonce,
|
||||||
|
'cnonce': cnonce,
|
||||||
|
'hash': hashed
|
||||||
|
}
|
||||||
|
|
||||||
|
login_data = self.call('auth/login', parameters = params, auth = False)
|
||||||
|
|
||||||
|
# Save for later
|
||||||
|
if login_data.get('loginResult') == 'successful':
|
||||||
|
self.session_id = login_data.get('sessionID')
|
||||||
|
return True
|
||||||
|
|
||||||
|
log.error('Login failed, please check you api-key')
|
||||||
|
return False
|
||||||
|
|
||||||
|
def call(self, call, parameters = None, is_repeat = False, auth = True, *args, **kwargs):
|
||||||
|
|
||||||
|
# Login first
|
||||||
|
if not parameters: parameters = {}
|
||||||
|
if not self.session_id and auth:
|
||||||
|
self.login()
|
||||||
|
|
||||||
|
# Always add session id to request
|
||||||
|
if self.session_id:
|
||||||
|
parameters['sessionid'] = self.session_id
|
||||||
|
|
||||||
|
params = tryUrlencode(parameters)
|
||||||
|
|
||||||
|
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self.getJsonData('%s%s' % (url, '?' + params if params else ''), *args, cache_timeout = 0, show_error = False, **kwargs)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
return data
|
||||||
|
except HTTPError as e:
|
||||||
|
sc = e.response.status_code
|
||||||
|
if sc == 403:
|
||||||
|
# Try login and do again
|
||||||
|
if not is_repeat:
|
||||||
|
self.login()
|
||||||
|
return self.call(call, parameters = parameters, is_repeat = True, **kwargs)
|
||||||
|
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
except:
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def getApiLevel(self):
|
||||||
|
|
||||||
|
if not self.api_level:
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self.call('app/apilevel', auth = False)
|
||||||
|
self.api_level = float(data.get('apilevel'))
|
||||||
|
except HTTPError as e:
|
||||||
|
sc = e.response.status_code
|
||||||
|
if sc == 403:
|
||||||
|
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||||
|
else:
|
||||||
|
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||||
|
|
||||||
|
return self.api_level
|
||||||
|
|
||||||
|
def isEnabled(self, manual = False, data = None):
|
||||||
|
if not data: data = {}
|
||||||
|
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'nzbvortex',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'nzbvortex',
|
||||||
|
'label': 'NZBVortex',
|
||||||
|
'description': 'Use <a href="http://www.nzbvortex.com/landing/" target="_blank">NZBVortex</a> to download NZBs.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'nzb',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'https://localhost:4321',
|
||||||
|
'description': 'Hostname with port. Usually <strong>https://localhost:4321</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'api_key',
|
||||||
|
'label': 'Api Key',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'group',
|
||||||
|
'label': 'Group',
|
||||||
|
'description': 'The group CP places the nzb in. Make sure to create it in NZBVortex.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': False,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Delete a release after the download has failed.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
from .main import NZBVortex
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return NZBVortex()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'nzbvortex',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'nzbvortex',
|
|
||||||
'label': 'NZBVortex',
|
|
||||||
'description': 'Use <a href="http://www.nzbvortex.com/landing/" target="_blank">NZBVortex</a> to download NZBs.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'nzb',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'https://localhost:4321',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'api_key',
|
|
||||||
'label': 'Api Key',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': False,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Delete a release after the download has failed.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
from base64 import b64encode
|
|
||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
|
||||||
from couchpotato.core.helpers.variable import cleanHost
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from urllib2 import URLError
|
|
||||||
from uuid import uuid4
|
|
||||||
import hashlib
|
|
||||||
import httplib
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import ssl
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class NZBVortex(Downloader):
|
|
||||||
|
|
||||||
protocol = ['nzb']
|
|
||||||
api_level = None
|
|
||||||
session_id = None
|
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
|
||||||
if not media: media = {}
|
|
||||||
if not data: data = {}
|
|
||||||
|
|
||||||
# Send the nzb
|
|
||||||
try:
|
|
||||||
nzb_filename = self.createFileName(data, filedata, media)
|
|
||||||
self.call('nzb/add', files = {'file': (nzb_filename, filedata)})
|
|
||||||
|
|
||||||
time.sleep(10)
|
|
||||||
raw_statuses = self.call('nzb')
|
|
||||||
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if os.path.basename(item['nzbFileName']) == nzb_filename][0]
|
|
||||||
return self.downloadReturnId(nzb_id)
|
|
||||||
except:
|
|
||||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
|
||||||
return False
|
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
|
||||||
|
|
||||||
raw_statuses = self.call('nzb')
|
|
||||||
|
|
||||||
release_downloads = ReleaseDownloadList(self)
|
|
||||||
for nzb in raw_statuses.get('nzbs', []):
|
|
||||||
if nzb['id'] in ids:
|
|
||||||
|
|
||||||
# Check status
|
|
||||||
status = 'busy'
|
|
||||||
if nzb['state'] == 20:
|
|
||||||
status = 'completed'
|
|
||||||
elif nzb['state'] in [21, 22, 24]:
|
|
||||||
status = 'failed'
|
|
||||||
|
|
||||||
release_downloads.append({
|
|
||||||
'id': nzb['id'],
|
|
||||||
'name': nzb['uiTitle'],
|
|
||||||
'status': status,
|
|
||||||
'original_status': nzb['state'],
|
|
||||||
'timeleft':-1,
|
|
||||||
'folder': sp(nzb['destinationPath']),
|
|
||||||
})
|
|
||||||
|
|
||||||
return release_downloads
|
|
||||||
|
|
||||||
def removeFailed(self, release_download):
|
|
||||||
|
|
||||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.call('nzb/%s/cancel' % release_download['id'])
|
|
||||||
except:
|
|
||||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def login(self):
|
|
||||||
|
|
||||||
nonce = self.call('auth/nonce', auth = False).get('authNonce')
|
|
||||||
cnonce = uuid4().hex
|
|
||||||
hashed = b64encode(hashlib.sha256('%s:%s:%s' % (nonce, cnonce, self.conf('api_key'))).digest())
|
|
||||||
|
|
||||||
params = {
|
|
||||||
'nonce': nonce,
|
|
||||||
'cnonce': cnonce,
|
|
||||||
'hash': hashed
|
|
||||||
}
|
|
||||||
|
|
||||||
login_data = self.call('auth/login', parameters = params, auth = False)
|
|
||||||
|
|
||||||
# Save for later
|
|
||||||
if login_data.get('loginResult') == 'successful':
|
|
||||||
self.session_id = login_data.get('sessionID')
|
|
||||||
return True
|
|
||||||
|
|
||||||
log.error('Login failed, please check you api-key')
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def call(self, call, parameters = None, repeat = False, auth = True, *args, **kwargs):
|
|
||||||
|
|
||||||
# Login first
|
|
||||||
if not parameters: parameters = {}
|
|
||||||
if not self.session_id and auth:
|
|
||||||
self.login()
|
|
||||||
|
|
||||||
# Always add session id to request
|
|
||||||
if self.session_id:
|
|
||||||
parameters['sessionid'] = self.session_id
|
|
||||||
|
|
||||||
params = tryUrlencode(parameters)
|
|
||||||
|
|
||||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = self.urlopen('%s?%s' % (url, params), *args, **kwargs)
|
|
||||||
|
|
||||||
if data:
|
|
||||||
return json.loads(data)
|
|
||||||
except URLError, e:
|
|
||||||
if hasattr(e, 'code') and e.code == 403:
|
|
||||||
# Try login and do again
|
|
||||||
if not repeat:
|
|
||||||
self.login()
|
|
||||||
return self.call(call, parameters = parameters, repeat = True, **kwargs)
|
|
||||||
|
|
||||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
|
||||||
except:
|
|
||||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def getApiLevel(self):
|
|
||||||
|
|
||||||
if not self.api_level:
|
|
||||||
|
|
||||||
url = cleanHost(self.conf('host')) + 'api/app/apilevel'
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = self.urlopen(url, show_error = False)
|
|
||||||
self.api_level = float(json.loads(data).get('apilevel'))
|
|
||||||
except URLError, e:
|
|
||||||
if hasattr(e, 'code') and e.code == 403:
|
|
||||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
|
||||||
else:
|
|
||||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
|
||||||
|
|
||||||
return self.api_level
|
|
||||||
|
|
||||||
def isEnabled(self, manual = False, data = None):
|
|
||||||
if not data: data = {}
|
|
||||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPSConnection(httplib.HTTPSConnection):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
|
||||||
if sys.version_info < (2, 6, 7):
|
|
||||||
if hasattr(self, '_tunnel_host'):
|
|
||||||
self.sock = sock
|
|
||||||
self._tunnel()
|
|
||||||
else:
|
|
||||||
if self._tunnel_host:
|
|
||||||
self.sock = sock
|
|
||||||
self._tunnel()
|
|
||||||
|
|
||||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version = ssl.PROTOCOL_TLSv1)
|
|
||||||
|
|
||||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
|
||||||
def https_open(self, req):
|
|
||||||
return self.do_open(HTTPSConnection, req)
|
|
||||||
129
couchpotato/core/downloaders/pneumatic.py
Normal file
129
couchpotato/core/downloaders/pneumatic.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||||
|
from couchpotato.core.helpers.encoding import sp
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Pneumatic'
|
||||||
|
|
||||||
|
|
||||||
|
class Pneumatic(DownloaderBase):
|
||||||
|
|
||||||
|
protocol = ['nzb']
|
||||||
|
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
|
||||||
|
status_support = False
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
directory = self.conf('directory')
|
||||||
|
if not directory or not os.path.isdir(directory):
|
||||||
|
log.error('No directory set for .strm downloads.')
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
if not filedata or len(filedata) < 50:
|
||||||
|
log.error('No nzb available!')
|
||||||
|
return False
|
||||||
|
|
||||||
|
full_path = os.path.join(directory, self.createFileName(data, filedata, media))
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not os.path.isfile(full_path):
|
||||||
|
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
|
||||||
|
with open(full_path, 'wb') as f:
|
||||||
|
f.write(filedata)
|
||||||
|
|
||||||
|
nzb_name = self.createNzbName(data, media)
|
||||||
|
strm_path = os.path.join(directory, nzb_name)
|
||||||
|
|
||||||
|
strm_file = open(strm_path + '.strm', 'wb')
|
||||||
|
strmContent = self.strm_syntax % (full_path, nzb_name)
|
||||||
|
strm_file.write(strmContent)
|
||||||
|
strm_file.close()
|
||||||
|
|
||||||
|
return self.downloadReturnId('')
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.info('File %s already exists.', full_path)
|
||||||
|
return self.downloadReturnId('')
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed to download .strm: %s', traceback.format_exc())
|
||||||
|
pass
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
directory = self.conf('directory')
|
||||||
|
if directory and os.path.isdir(directory):
|
||||||
|
|
||||||
|
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
|
||||||
|
|
||||||
|
# Check if folder is writable
|
||||||
|
self.createFile(test_file, 'This is a test file')
|
||||||
|
if os.path.isfile(test_file):
|
||||||
|
os.remove(test_file)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'pneumatic',
|
||||||
|
'order': 30,
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'pneumatic',
|
||||||
|
'label': 'Pneumatic',
|
||||||
|
'description': 'Use <a href="http://forum.xbmc.org/showthread.php?tid=97657" target="_blank">Pneumatic</a> to download .strm files.',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'directory',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Directory where the .strm file is saved to.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
from .main import Pneumatic
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Pneumatic()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'pneumatic',
|
|
||||||
'order': 30,
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'pneumatic',
|
|
||||||
'label': 'Pneumatic',
|
|
||||||
'description': 'Use <a href="http://forum.xbmc.org/showthread.php?tid=97657" target="_blank">Pneumatic</a> to download .strm files.',
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'directory',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Directory where the .strm file is saved to.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
from __future__ import with_statement
|
|
||||||
from couchpotato.core.downloaders.base import Downloader
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
import os
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Pneumatic(Downloader):
|
|
||||||
|
|
||||||
protocol = ['nzb']
|
|
||||||
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
|
|
||||||
status_support = False
|
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
|
||||||
if not media: media = {}
|
|
||||||
if not data: data = {}
|
|
||||||
|
|
||||||
directory = self.conf('directory')
|
|
||||||
if not directory or not os.path.isdir(directory):
|
|
||||||
log.error('No directory set for .strm downloads.')
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
if not filedata or len(filedata) < 50:
|
|
||||||
log.error('No nzb available!')
|
|
||||||
return False
|
|
||||||
|
|
||||||
fullPath = os.path.join(directory, self.createFileName(data, filedata, media))
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not os.path.isfile(fullPath):
|
|
||||||
log.info('Downloading %s to %s.', (data.get('protocol'), fullPath))
|
|
||||||
with open(fullPath, 'wb') as f:
|
|
||||||
f.write(filedata)
|
|
||||||
|
|
||||||
nzb_name = self.createNzbName(data, media)
|
|
||||||
strm_path = os.path.join(directory, nzb_name)
|
|
||||||
|
|
||||||
strm_file = open(strm_path + '.strm', 'wb')
|
|
||||||
strmContent = self.strm_syntax % (fullPath, nzb_name)
|
|
||||||
strm_file.write(strmContent)
|
|
||||||
strm_file.close()
|
|
||||||
|
|
||||||
return self.downloadReturnId('')
|
|
||||||
|
|
||||||
else:
|
|
||||||
log.info('File %s already exists.', fullPath)
|
|
||||||
return self.downloadReturnId('')
|
|
||||||
|
|
||||||
except:
|
|
||||||
log.error('Failed to download .strm: %s', traceback.format_exc())
|
|
||||||
pass
|
|
||||||
|
|
||||||
except:
|
|
||||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
|
||||||
return False
|
|
||||||
return False
|
|
||||||
68
couchpotato/core/downloaders/putio/__init__.py
Normal file
68
couchpotato/core/downloaders/putio/__init__.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from .main import PutIO
|
||||||
|
|
||||||
|
|
||||||
|
def autoload():
|
||||||
|
return PutIO()
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'putio',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'putio',
|
||||||
|
'label': 'put.io',
|
||||||
|
'description': 'This will start a torrent download on <a href="http://put.io">Put.io</a>.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'oauth_token',
|
||||||
|
'label': 'oauth_token',
|
||||||
|
'description': 'This is the OAUTH_TOKEN from your putio API',
|
||||||
|
'advanced': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'folder',
|
||||||
|
'description': ('The folder on putio where you want the upload to go','Will find the first first folder that matches this name'),
|
||||||
|
'default': 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'callback_host',
|
||||||
|
'description': 'External reachable url to CP so put.io can do it\'s thing',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'download',
|
||||||
|
'description': 'Set this to have CouchPotato download the file from Put.io',
|
||||||
|
'type': 'bool',
|
||||||
|
'default': 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_file',
|
||||||
|
'description': ('Set this to remove the file from putio after sucessful download','Does nothing if you don\'t select download'),
|
||||||
|
'type': 'bool',
|
||||||
|
'default': 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'download_dir',
|
||||||
|
'type': 'directory',
|
||||||
|
'label': 'Download Directory',
|
||||||
|
'description': 'The Directory to download files to, does nothing if you don\'t select download',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
181
couchpotato/core/downloaders/putio/main.py
Normal file
181
couchpotato/core/downloaders/putio/main.py
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
from couchpotato.api import addApiView
|
||||||
|
from couchpotato.core.event import addEvent, fireEventAsync
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
from pio import api as pio
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Putiodownload'
|
||||||
|
|
||||||
|
|
||||||
|
class PutIO(DownloaderBase):
|
||||||
|
|
||||||
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
|
downloading_list = []
|
||||||
|
oauth_authenticate = 'https://api.couchpota.to/authorize/putio/'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
addApiView('downloader.putio.getfrom', self.getFromPutio, docs = {
|
||||||
|
'desc': 'Allows you to download file from prom Put.io',
|
||||||
|
})
|
||||||
|
addApiView('downloader.putio.auth_url', self.getAuthorizationUrl)
|
||||||
|
addApiView('downloader.putio.credentials', self.getCredentials)
|
||||||
|
addEvent('putio.download', self.putioDownloader)
|
||||||
|
|
||||||
|
return super(PutIO, self).__init__()
|
||||||
|
|
||||||
|
# This is a recusive function to check for the folders
|
||||||
|
def recursionFolder(self, client, folder = 0, tfolder = ''):
|
||||||
|
files = client.File.list(folder)
|
||||||
|
for f in files:
|
||||||
|
if f.content_type == 'application/x-directory':
|
||||||
|
if f.name == tfolder:
|
||||||
|
return f.id
|
||||||
|
else:
|
||||||
|
result = self.recursionFolder(client, f.id, tfolder)
|
||||||
|
if result != 0:
|
||||||
|
return result
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# This will check the root for the folder, and kick of recusively checking sub folder
|
||||||
|
def convertFolder(self, client, folder):
|
||||||
|
if folder == 0:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return self.recursionFolder(client, 0, folder)
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
log.info('Sending "%s" to put.io', data.get('name'))
|
||||||
|
url = data.get('url')
|
||||||
|
client = pio.Client(self.conf('oauth_token'))
|
||||||
|
putioFolder = self.convertFolder(client, self.conf('folder'))
|
||||||
|
log.debug('putioFolder ID is %s', putioFolder)
|
||||||
|
# It might be possible to call getFromPutio from the renamer if we can then we don't need to do this.
|
||||||
|
# Note callback_host is NOT our address, it's the internet host that putio can call too
|
||||||
|
callbackurl = None
|
||||||
|
if self.conf('download'):
|
||||||
|
callbackurl = 'http://' + self.conf('callback_host') + '%sdownloader.putio.getfrom/' %Env.get('api_base'.strip('/'))
|
||||||
|
resp = client.Transfer.add_url(url, callback_url = callbackurl, parent_id = putioFolder)
|
||||||
|
log.debug('resp is %s', resp.id);
|
||||||
|
return self.downloadReturnId(resp.id)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
try:
|
||||||
|
client = pio.Client(self.conf('oauth_token'))
|
||||||
|
if client.File.list():
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
log.info('Failed to get file listing, check OAUTH_TOKEN')
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getAuthorizationUrl(self, host = None, **kwargs):
|
||||||
|
|
||||||
|
callback_url = cleanHost(host) + '%sdownloader.putio.credentials/' % (Env.get('api_base').lstrip('/'))
|
||||||
|
log.debug('callback_url is %s', callback_url)
|
||||||
|
|
||||||
|
target_url = self.oauth_authenticate + "?target=" + callback_url
|
||||||
|
log.debug('target_url is %s', target_url)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': True,
|
||||||
|
'url': target_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
def getCredentials(self, **kwargs):
|
||||||
|
try:
|
||||||
|
oauth_token = kwargs.get('oauth')
|
||||||
|
except:
|
||||||
|
return 'redirect', Env.get('web_base') + 'settings/downloaders/'
|
||||||
|
log.debug('oauth_token is: %s', oauth_token)
|
||||||
|
self.conf('oauth_token', value = oauth_token);
|
||||||
|
return 'redirect', Env.get('web_base') + 'settings/downloaders/'
|
||||||
|
|
||||||
|
def getAllDownloadStatus(self, ids):
|
||||||
|
|
||||||
|
log.debug('Checking putio download status.')
|
||||||
|
client = pio.Client(self.conf('oauth_token'))
|
||||||
|
|
||||||
|
transfers = client.Transfer.list()
|
||||||
|
|
||||||
|
log.debug(transfers);
|
||||||
|
release_downloads = ReleaseDownloadList(self)
|
||||||
|
for t in transfers:
|
||||||
|
if t.id in ids:
|
||||||
|
|
||||||
|
log.debug('downloading list is %s', self.downloading_list)
|
||||||
|
if t.status == "COMPLETED" and self.conf('download') == False :
|
||||||
|
status = 'completed'
|
||||||
|
|
||||||
|
# So check if we are trying to download something
|
||||||
|
elif t.status == "COMPLETED" and self.conf('download') == True:
|
||||||
|
# Assume we are done
|
||||||
|
status = 'completed'
|
||||||
|
if not self.downloading_list:
|
||||||
|
now = datetime.datetime.utcnow()
|
||||||
|
date_time = datetime.datetime.strptime(t.finished_at,"%Y-%m-%dT%H:%M:%S")
|
||||||
|
# We need to make sure a race condition didn't happen
|
||||||
|
if (now - date_time) < datetime.timedelta(minutes=5):
|
||||||
|
# 5 minutes haven't passed so we wait
|
||||||
|
status = 'busy'
|
||||||
|
else:
|
||||||
|
# If we have the file_id in the downloading_list mark it as busy
|
||||||
|
if str(t.file_id) in self.downloading_list:
|
||||||
|
status = 'busy'
|
||||||
|
else:
|
||||||
|
status = 'busy'
|
||||||
|
release_downloads.append({
|
||||||
|
'id' : t.id,
|
||||||
|
'name': t.name,
|
||||||
|
'status': status,
|
||||||
|
'timeleft': t.estimated_time,
|
||||||
|
})
|
||||||
|
|
||||||
|
return release_downloads
|
||||||
|
|
||||||
|
def putioDownloader(self, fid):
|
||||||
|
|
||||||
|
log.info('Put.io Real downloader called with file_id: %s',fid)
|
||||||
|
client = pio.Client(self.conf('oauth_token'))
|
||||||
|
|
||||||
|
log.debug('About to get file List')
|
||||||
|
putioFolder = self.convertFolder(client, self.conf('folder'))
|
||||||
|
log.debug('PutioFolderID is %s', putioFolder)
|
||||||
|
files = client.File.list(parent_id=putioFolder)
|
||||||
|
downloaddir = self.conf('download_dir')
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
if str(f.id) == str(fid):
|
||||||
|
client.File.download(f, dest = downloaddir, delete_after_download = self.conf('delete_file'))
|
||||||
|
# Once the download is complete we need to remove it from the running list.
|
||||||
|
self.downloading_list.remove(fid)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def getFromPutio(self, **kwargs):
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_id = str(kwargs.get('file_id'))
|
||||||
|
except:
|
||||||
|
return {
|
||||||
|
'success' : False,
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('Put.io Download has been called file_id is %s', file_id)
|
||||||
|
if file_id not in self.downloading_list:
|
||||||
|
self.downloading_list.append(file_id)
|
||||||
|
fireEventAsync('putio.download',fid = file_id)
|
||||||
|
return {
|
||||||
|
'success': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': False,
|
||||||
|
}
|
||||||
|
|
||||||
68
couchpotato/core/downloaders/putio/static/putio.js
Normal file
68
couchpotato/core/downloaders/putio/static/putio.js
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
var PutIODownloader = new Class({
|
||||||
|
|
||||||
|
initialize: function(){
|
||||||
|
var self = this;
|
||||||
|
|
||||||
|
App.addEvent('loadSettings', self.addRegisterButton.bind(self));
|
||||||
|
},
|
||||||
|
|
||||||
|
addRegisterButton: function(){
|
||||||
|
var self = this;
|
||||||
|
|
||||||
|
var setting_page = App.getPage('Settings');
|
||||||
|
setting_page.addEvent('create', function(){
|
||||||
|
|
||||||
|
var fieldset = setting_page.tabs.downloaders.groups.putio,
|
||||||
|
l = window.location;
|
||||||
|
|
||||||
|
var putio_set = 0;
|
||||||
|
fieldset.getElements('input[type=text]').each(function(el){
|
||||||
|
putio_set += +(el.get('value') != '');
|
||||||
|
});
|
||||||
|
|
||||||
|
new Element('.ctrlHolder').adopt(
|
||||||
|
|
||||||
|
// Unregister button
|
||||||
|
(putio_set > 0) ?
|
||||||
|
[
|
||||||
|
self.unregister = new Element('a.button.red', {
|
||||||
|
'text': 'Unregister "'+fieldset.getElement('input[name*=oauth_token]').get('value')+'"',
|
||||||
|
'events': {
|
||||||
|
'click': function(){
|
||||||
|
fieldset.getElements('input[name*=oauth_token]').set('value', '').fireEvent('change');
|
||||||
|
|
||||||
|
self.unregister.destroy();
|
||||||
|
self.unregister_or.destroy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
self.unregister_or = new Element('span[text=or]')
|
||||||
|
]
|
||||||
|
: null,
|
||||||
|
|
||||||
|
// Register button
|
||||||
|
new Element('a.button', {
|
||||||
|
'text': putio_set > 0 ? 'Register a different account' : 'Register your put.io account',
|
||||||
|
'events': {
|
||||||
|
'click': function(){
|
||||||
|
Api.request('downloader.putio.auth_url', {
|
||||||
|
'data': {
|
||||||
|
'host': l.protocol + '//' + l.hostname + (l.port ? ':' + l.port : '')
|
||||||
|
},
|
||||||
|
'onComplete': function(json){
|
||||||
|
window.location = json.url;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
).inject(fieldset.getElement('.test_button'), 'before');
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
window.addEvent('domready', function(){
|
||||||
|
new PutIODownloader();
|
||||||
|
});
|
||||||
271
couchpotato/core/downloaders/qbittorrent_.py
Normal file
271
couchpotato/core/downloaders/qbittorrent_.py
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
from base64 import b16encode, b32decode
|
||||||
|
from hashlib import sha1
|
||||||
|
import os
|
||||||
|
|
||||||
|
from bencode import bencode, bdecode
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import sp
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from qbittorrent.client import QBittorrentClient
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'qBittorrent'
|
||||||
|
|
||||||
|
|
||||||
|
class qBittorrent(DownloaderBase):
|
||||||
|
|
||||||
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
|
qb = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(qBittorrent, self).__init__()
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
if self.qb is not None:
|
||||||
|
return self.qb
|
||||||
|
|
||||||
|
url = cleanHost(self.conf('host'), protocol = True, ssl = False)
|
||||||
|
|
||||||
|
if self.conf('username') and self.conf('password'):
|
||||||
|
self.qb = QBittorrentClient(
|
||||||
|
url,
|
||||||
|
username = self.conf('username'),
|
||||||
|
password = self.conf('password')
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.qb = QBittorrentClient(url)
|
||||||
|
|
||||||
|
return self.qb
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.connect():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
log.debug('Sending "%s" to qBittorrent.', (data.get('name')))
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not filedata and data.get('protocol') == 'torrent':
|
||||||
|
log.error('Failed sending torrent, no data')
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
if data.get('protocol') == 'torrent_magnet':
|
||||||
|
filedata = self.magnetToTorrent(data.get('url'))
|
||||||
|
|
||||||
|
if filedata is False:
|
||||||
|
return False
|
||||||
|
|
||||||
|
data['protocol'] = 'torrent'
|
||||||
|
|
||||||
|
info = bdecode(filedata)["info"]
|
||||||
|
torrent_hash = sha1(bencode(info)).hexdigest()
|
||||||
|
|
||||||
|
# Convert base 32 to hex
|
||||||
|
if len(torrent_hash) == 32:
|
||||||
|
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||||
|
|
||||||
|
# Send request to qBittorrent
|
||||||
|
try:
|
||||||
|
self.qb.add_file(filedata)
|
||||||
|
|
||||||
|
return self.downloadReturnId(torrent_hash)
|
||||||
|
except Exception as e:
|
||||||
|
log.error('Failed to send torrent to qBittorrent: %s', e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getTorrentStatus(self, torrent):
|
||||||
|
|
||||||
|
if torrent.state in ('uploading', 'queuedUP', 'stalledUP'):
|
||||||
|
return 'seeding'
|
||||||
|
|
||||||
|
if torrent.progress == 1:
|
||||||
|
return 'completed'
|
||||||
|
|
||||||
|
return 'busy'
|
||||||
|
|
||||||
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
|
log.debug('Checking qBittorrent download status.')
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
torrents = self.qb.get_torrents()
|
||||||
|
|
||||||
|
release_downloads = ReleaseDownloadList(self)
|
||||||
|
|
||||||
|
for torrent in torrents:
|
||||||
|
if torrent.hash in ids:
|
||||||
|
torrent.update_general() # get extra info
|
||||||
|
torrent_filelist = torrent.get_files()
|
||||||
|
|
||||||
|
torrent_files = []
|
||||||
|
torrent_dir = os.path.join(torrent.save_path, torrent.name)
|
||||||
|
|
||||||
|
if os.path.isdir(torrent_dir):
|
||||||
|
torrent.save_path = torrent_dir
|
||||||
|
|
||||||
|
if len(torrent_filelist) > 1 and os.path.isdir(torrent_dir): # multi file torrent, path.isdir check makes sure we're not in the root download folder
|
||||||
|
for root, _, files in os.walk(torrent.save_path):
|
||||||
|
for f in files:
|
||||||
|
torrent_files.append(sp(os.path.join(root, f)))
|
||||||
|
|
||||||
|
else: # multi or single file placed directly in torrent.save_path
|
||||||
|
for f in torrent_filelist:
|
||||||
|
file_path = os.path.join(torrent.save_path, f.name)
|
||||||
|
if os.path.isfile(file_path):
|
||||||
|
torrent_files.append(sp(file_path))
|
||||||
|
|
||||||
|
release_downloads.append({
|
||||||
|
'id': torrent.hash,
|
||||||
|
'name': torrent.name,
|
||||||
|
'status': self.getTorrentStatus(torrent),
|
||||||
|
'seed_ratio': torrent.ratio,
|
||||||
|
'original_status': torrent.state,
|
||||||
|
'timeleft': torrent.progress * 100 if torrent.progress else -1, # percentage
|
||||||
|
'folder': sp(torrent.save_path),
|
||||||
|
'files': torrent_files
|
||||||
|
})
|
||||||
|
|
||||||
|
return release_downloads
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.error('Failed to get status from qBittorrent: %s', e)
|
||||||
|
return []
|
||||||
|
|
||||||
|
def pause(self, release_download, pause = True):
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
torrent = self.qb.get_torrent(release_download['id'])
|
||||||
|
if torrent is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if pause:
|
||||||
|
return torrent.pause()
|
||||||
|
return torrent.resume()
|
||||||
|
|
||||||
|
def removeFailed(self, release_download):
|
||||||
|
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||||
|
return self.processComplete(release_download, delete_files = True)
|
||||||
|
|
||||||
|
def processComplete(self, release_download, delete_files):
|
||||||
|
log.debug('Requesting qBittorrent to remove the torrent %s%s.',
|
||||||
|
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
torrent = self.qb.find_torrent(release_download['id'])
|
||||||
|
|
||||||
|
if torrent is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if delete_files:
|
||||||
|
torrent.delete() # deletes torrent with data
|
||||||
|
else:
|
||||||
|
torrent.remove() # just removes the torrent, doesn't delete data
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'qbittorrent',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'qbittorrent',
|
||||||
|
'label': 'qbittorrent',
|
||||||
|
'description': '',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'http://localhost:8080/',
|
||||||
|
'description': 'RPC Communication URI. Usually <strong>http://localhost:8080/</strong>'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'remove_complete',
|
||||||
|
'label': 'Remove torrent',
|
||||||
|
'default': False,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Remove the torrent after it finishes seeding.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_files',
|
||||||
|
'label': 'Remove files',
|
||||||
|
'default': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Also remove the leftover files.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'paused',
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'default': False,
|
||||||
|
'description': 'Add the torrent paused.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
from .main import rTorrent
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return rTorrent()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'rtorrent',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'rtorrent',
|
|
||||||
'label': 'rTorrent',
|
|
||||||
'description': '',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'torrent',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'url',
|
|
||||||
'default': 'http://localhost:80/RPC2',
|
|
||||||
'description': 'XML-RPC Endpoint URI. Usually <strong>scgi://localhost:5000</strong> '
|
|
||||||
'or <strong>http://localhost:80/RPC2</strong>'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'type': 'password',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'label',
|
|
||||||
'description': 'Label to apply on added torrents.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'directory',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'remove_complete',
|
|
||||||
'label': 'Remove torrent',
|
|
||||||
'default': False,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Remove the torrent after it finishes seeding.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_files',
|
|
||||||
'label': 'Remove files',
|
|
||||||
'default': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Also remove the leftover files.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'paused',
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'default': False,
|
|
||||||
'description': 'Add the torrent paused.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
from base64 import b16encode, b32decode
|
|
||||||
from bencode import bencode, bdecode
|
|
||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import sp
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from datetime import timedelta
|
|
||||||
from hashlib import sha1
|
|
||||||
from rtorrent import RTorrent
|
|
||||||
from rtorrent.err import MethodError
|
|
||||||
import os
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class rTorrent(Downloader):
|
|
||||||
|
|
||||||
protocol = ['torrent', 'torrent_magnet']
|
|
||||||
rt = None
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
# Already connected?
|
|
||||||
if self.rt is not None:
|
|
||||||
return self.rt
|
|
||||||
|
|
||||||
# Ensure url is set
|
|
||||||
if not self.conf('url'):
|
|
||||||
log.error('Config properties are not filled in correctly, url is missing.')
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.conf('username') and self.conf('password'):
|
|
||||||
self.rt = RTorrent(
|
|
||||||
self.conf('url'),
|
|
||||||
self.conf('username'),
|
|
||||||
self.conf('password')
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.rt = RTorrent(self.conf('url'))
|
|
||||||
|
|
||||||
return self.rt
|
|
||||||
|
|
||||||
def _update_provider_group(self, name, data):
|
|
||||||
if data.get('seed_time'):
|
|
||||||
log.info('seeding time ignored, not supported')
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not self.connect():
|
|
||||||
return False
|
|
||||||
|
|
||||||
views = self.rt.get_views()
|
|
||||||
|
|
||||||
if name not in views:
|
|
||||||
self.rt.create_group(name)
|
|
||||||
|
|
||||||
group = self.rt.get_group(name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if data.get('seed_ratio'):
|
|
||||||
ratio = int(float(data.get('seed_ratio')) * 100)
|
|
||||||
log.debug('Updating provider ratio to %s, group name: %s', (ratio, name))
|
|
||||||
|
|
||||||
# Explicitly set all group options to ensure it is setup correctly
|
|
||||||
group.set_upload('1M')
|
|
||||||
group.set_min(ratio)
|
|
||||||
group.set_max(ratio)
|
|
||||||
group.set_command('d.stop')
|
|
||||||
group.enable()
|
|
||||||
else:
|
|
||||||
# Reset group action and disable it
|
|
||||||
group.set_command()
|
|
||||||
group.disable()
|
|
||||||
except MethodError, err:
|
|
||||||
log.error('Unable to set group options: %s', err.msg)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
|
||||||
if not media: media = {}
|
|
||||||
if not data: data = {}
|
|
||||||
|
|
||||||
log.debug('Sending "%s" to rTorrent.', (data.get('name')))
|
|
||||||
|
|
||||||
if not self.connect():
|
|
||||||
return False
|
|
||||||
|
|
||||||
group_name = 'cp_' + data.get('provider').lower()
|
|
||||||
if not self._update_provider_group(group_name, data):
|
|
||||||
return False
|
|
||||||
|
|
||||||
torrent_params = {}
|
|
||||||
if self.conf('label'):
|
|
||||||
torrent_params['label'] = self.conf('label')
|
|
||||||
|
|
||||||
|
|
||||||
if not filedata and data.get('protocol') == 'torrent':
|
|
||||||
log.error('Failed sending torrent, no data')
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Try download magnet torrents
|
|
||||||
if data.get('protocol') == 'torrent_magnet':
|
|
||||||
filedata = self.magnetToTorrent(data.get('url'))
|
|
||||||
|
|
||||||
if filedata is False:
|
|
||||||
return False
|
|
||||||
|
|
||||||
data['protocol'] = 'torrent'
|
|
||||||
|
|
||||||
info = bdecode(filedata)["info"]
|
|
||||||
torrent_hash = sha1(bencode(info)).hexdigest().upper()
|
|
||||||
|
|
||||||
# Convert base 32 to hex
|
|
||||||
if len(torrent_hash) == 32:
|
|
||||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
|
||||||
|
|
||||||
# Send request to rTorrent
|
|
||||||
try:
|
|
||||||
# Send torrent to rTorrent
|
|
||||||
torrent = self.rt.load_torrent(filedata)
|
|
||||||
|
|
||||||
if not torrent:
|
|
||||||
log.error('Unable to find the torrent, did it fail to load?')
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Set label
|
|
||||||
if self.conf('label'):
|
|
||||||
torrent.set_custom(1, self.conf('label'))
|
|
||||||
|
|
||||||
if self.conf('directory'):
|
|
||||||
torrent.set_directory(self.conf('directory'))
|
|
||||||
|
|
||||||
# Set Ratio Group
|
|
||||||
torrent.set_visible(group_name)
|
|
||||||
|
|
||||||
# Start torrent
|
|
||||||
if not self.conf('paused', default = 0):
|
|
||||||
torrent.start()
|
|
||||||
|
|
||||||
return self.downloadReturnId(torrent_hash)
|
|
||||||
except Exception, err:
|
|
||||||
log.error('Failed to send torrent to rTorrent: %s', err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
|
||||||
log.debug('Checking rTorrent download status.')
|
|
||||||
|
|
||||||
if not self.connect():
|
|
||||||
return []
|
|
||||||
|
|
||||||
try:
|
|
||||||
torrents = self.rt.get_torrents()
|
|
||||||
|
|
||||||
release_downloads = ReleaseDownloadList(self)
|
|
||||||
|
|
||||||
for torrent in torrents:
|
|
||||||
if torrent.info_hash in ids:
|
|
||||||
torrent_files = []
|
|
||||||
for file_item in torrent.get_files():
|
|
||||||
torrent_files.append(sp(os.path.join(torrent.directory, file_item.path)))
|
|
||||||
|
|
||||||
status = 'busy'
|
|
||||||
if torrent.complete:
|
|
||||||
if torrent.active:
|
|
||||||
status = 'seeding'
|
|
||||||
else:
|
|
||||||
status = 'completed'
|
|
||||||
|
|
||||||
release_downloads.append({
|
|
||||||
'id': torrent.info_hash,
|
|
||||||
'name': torrent.name,
|
|
||||||
'status': status,
|
|
||||||
'seed_ratio': torrent.ratio,
|
|
||||||
'original_status': torrent.state,
|
|
||||||
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
|
|
||||||
'folder': sp(torrent.directory),
|
|
||||||
'files': '|'.join(torrent_files)
|
|
||||||
})
|
|
||||||
|
|
||||||
return release_downloads
|
|
||||||
|
|
||||||
except Exception, err:
|
|
||||||
log.error('Failed to get status from rTorrent: %s', err)
|
|
||||||
return []
|
|
||||||
|
|
||||||
def pause(self, release_download, pause = True):
|
|
||||||
if not self.connect():
|
|
||||||
return False
|
|
||||||
|
|
||||||
torrent = self.rt.find_torrent(release_download['id'])
|
|
||||||
if torrent is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if pause:
|
|
||||||
return torrent.pause()
|
|
||||||
return torrent.resume()
|
|
||||||
|
|
||||||
def removeFailed(self, release_download):
|
|
||||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
|
||||||
return self.processComplete(release_download, delete_files = True)
|
|
||||||
|
|
||||||
def processComplete(self, release_download, delete_files):
|
|
||||||
log.debug('Requesting rTorrent to remove the torrent %s%s.',
|
|
||||||
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
|
||||||
|
|
||||||
if not self.connect():
|
|
||||||
return False
|
|
||||||
|
|
||||||
torrent = self.rt.find_torrent(release_download['id'])
|
|
||||||
|
|
||||||
if torrent is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if delete_files:
|
|
||||||
for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
|
|
||||||
os.unlink(os.path.join(torrent.directory, file_item.path))
|
|
||||||
|
|
||||||
if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
|
|
||||||
# Remove empty directories bottom up
|
|
||||||
try:
|
|
||||||
for path, _, _ in os.walk(torrent.directory, topdown = False):
|
|
||||||
os.rmdir(path)
|
|
||||||
except OSError:
|
|
||||||
log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
|
|
||||||
|
|
||||||
torrent.erase() # just removes the torrent, doesn't delete data
|
|
||||||
|
|
||||||
return True
|
|
||||||
361
couchpotato/core/downloaders/rtorrent_.py
Normal file
361
couchpotato/core/downloaders/rtorrent_.py
Normal file
@@ -0,0 +1,361 @@
|
|||||||
|
from base64 import b16encode, b32decode
|
||||||
|
from datetime import timedelta
|
||||||
|
from hashlib import sha1
|
||||||
|
from urlparse import urlparse
|
||||||
|
import os
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.event import addEvent
|
||||||
|
from couchpotato.core.helpers.encoding import sp
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost, splitString
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from bencode import bencode, bdecode
|
||||||
|
from rtorrent import RTorrent
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'rTorrent'
|
||||||
|
|
||||||
|
|
||||||
|
class rTorrent(DownloaderBase):
|
||||||
|
|
||||||
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
|
rt = None
|
||||||
|
error_msg = ''
|
||||||
|
|
||||||
|
# Migration url to host options
|
||||||
|
def __init__(self):
|
||||||
|
super(rTorrent, self).__init__()
|
||||||
|
|
||||||
|
addEvent('app.load', self.migrate)
|
||||||
|
addEvent('setting.save.rtorrent.*.after', self.settingsChanged)
|
||||||
|
|
||||||
|
def migrate(self):
|
||||||
|
|
||||||
|
url = self.conf('url')
|
||||||
|
if url:
|
||||||
|
host_split = splitString(url.split('://')[-1], split_on = '/')
|
||||||
|
|
||||||
|
self.conf('ssl', value = url.startswith('https'))
|
||||||
|
self.conf('host', value = host_split[0].strip())
|
||||||
|
self.conf('rpc_url', value = '/'.join(host_split[1:]))
|
||||||
|
|
||||||
|
self.deleteConf('url')
|
||||||
|
|
||||||
|
def settingsChanged(self):
|
||||||
|
# Reset active connection if settings have changed
|
||||||
|
if self.rt:
|
||||||
|
log.debug('Settings have changed, closing active connection')
|
||||||
|
|
||||||
|
self.rt = None
|
||||||
|
return True
|
||||||
|
|
||||||
|
def connect(self, reconnect = False):
|
||||||
|
# Already connected?
|
||||||
|
if not reconnect and self.rt is not None:
|
||||||
|
return self.rt
|
||||||
|
|
||||||
|
url = cleanHost(self.conf('host'), protocol = True, ssl = self.conf('ssl'))
|
||||||
|
|
||||||
|
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled
|
||||||
|
if self.conf('ssl') and url.startswith('httprpc://'):
|
||||||
|
url = url.replace('httprpc://', 'httprpc+https://')
|
||||||
|
|
||||||
|
parsed = urlparse(url)
|
||||||
|
|
||||||
|
# rpc_url is only used on http/https scgi pass-through
|
||||||
|
if parsed.scheme in ['http', 'https']:
|
||||||
|
url += self.conf('rpc_url')
|
||||||
|
|
||||||
|
self.rt = RTorrent(
|
||||||
|
url,
|
||||||
|
self.conf('username'),
|
||||||
|
self.conf('password')
|
||||||
|
)
|
||||||
|
|
||||||
|
self.error_msg = ''
|
||||||
|
try:
|
||||||
|
self.rt._verify_conn()
|
||||||
|
except AssertionError as e:
|
||||||
|
self.error_msg = e.message
|
||||||
|
self.rt = None
|
||||||
|
|
||||||
|
return self.rt
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.connect(True):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.error_msg:
|
||||||
|
return False, 'Connection failed: ' + self.error_msg
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
""" Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not media: media = {}
|
||||||
|
if not data: data = {}
|
||||||
|
|
||||||
|
log.debug('Sending "%s" to rTorrent.', (data.get('name')))
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
torrent_params = {}
|
||||||
|
if self.conf('label'):
|
||||||
|
torrent_params['label'] = self.conf('label')
|
||||||
|
|
||||||
|
if not filedata and data.get('protocol') == 'torrent':
|
||||||
|
log.error('Failed sending torrent, no data')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Try download magnet torrents
|
||||||
|
if data.get('protocol') == 'torrent_magnet':
|
||||||
|
filedata = self.magnetToTorrent(data.get('url'))
|
||||||
|
|
||||||
|
if filedata is False:
|
||||||
|
return False
|
||||||
|
|
||||||
|
data['protocol'] = 'torrent'
|
||||||
|
|
||||||
|
info = bdecode(filedata)["info"]
|
||||||
|
torrent_hash = sha1(bencode(info)).hexdigest().upper()
|
||||||
|
|
||||||
|
# Convert base 32 to hex
|
||||||
|
if len(torrent_hash) == 32:
|
||||||
|
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||||
|
|
||||||
|
# Send request to rTorrent
|
||||||
|
try:
|
||||||
|
# Send torrent to rTorrent
|
||||||
|
torrent = self.rt.load_torrent(filedata, verify_retries=10)
|
||||||
|
|
||||||
|
if not torrent:
|
||||||
|
log.error('Unable to find the torrent, did it fail to load?')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Set label
|
||||||
|
if self.conf('label'):
|
||||||
|
torrent.set_custom(1, self.conf('label'))
|
||||||
|
|
||||||
|
if self.conf('directory'):
|
||||||
|
torrent.set_directory(self.conf('directory'))
|
||||||
|
|
||||||
|
# Start torrent
|
||||||
|
if not self.conf('paused', default = 0):
|
||||||
|
torrent.start()
|
||||||
|
|
||||||
|
return self.downloadReturnId(torrent_hash)
|
||||||
|
except Exception as err:
|
||||||
|
log.error('Failed to send torrent to rTorrent: %s', err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getTorrentStatus(self, torrent):
|
||||||
|
if not torrent.complete:
|
||||||
|
return 'busy'
|
||||||
|
|
||||||
|
if torrent.open:
|
||||||
|
return 'seeding'
|
||||||
|
|
||||||
|
return 'completed'
|
||||||
|
|
||||||
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
|
log.debug('Checking rTorrent download status.')
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
torrents = self.rt.get_torrents()
|
||||||
|
|
||||||
|
release_downloads = ReleaseDownloadList(self)
|
||||||
|
|
||||||
|
for torrent in torrents:
|
||||||
|
if torrent.info_hash in ids:
|
||||||
|
torrent_directory = os.path.normpath(torrent.directory)
|
||||||
|
torrent_files = []
|
||||||
|
|
||||||
|
for file in torrent.get_files():
|
||||||
|
if not os.path.normpath(file.path).startswith(torrent_directory):
|
||||||
|
file_path = os.path.join(torrent_directory, file.path.lstrip('/'))
|
||||||
|
else:
|
||||||
|
file_path = file.path
|
||||||
|
|
||||||
|
torrent_files.append(sp(file_path))
|
||||||
|
|
||||||
|
release_downloads.append({
|
||||||
|
'id': torrent.info_hash,
|
||||||
|
'name': torrent.name,
|
||||||
|
'status': self.getTorrentStatus(torrent),
|
||||||
|
'seed_ratio': torrent.ratio,
|
||||||
|
'original_status': torrent.state,
|
||||||
|
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
|
||||||
|
'folder': sp(torrent.directory),
|
||||||
|
'files': torrent_files
|
||||||
|
})
|
||||||
|
|
||||||
|
return release_downloads
|
||||||
|
|
||||||
|
except Exception as err:
|
||||||
|
log.error('Failed to get status from rTorrent: %s', err)
|
||||||
|
return []
|
||||||
|
|
||||||
|
def pause(self, release_download, pause = True):
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
torrent = self.rt.find_torrent(release_download['id'])
|
||||||
|
if torrent is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if pause:
|
||||||
|
return torrent.pause()
|
||||||
|
return torrent.resume()
|
||||||
|
|
||||||
|
def removeFailed(self, release_download):
|
||||||
|
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||||
|
return self.processComplete(release_download, delete_files = True)
|
||||||
|
|
||||||
|
def processComplete(self, release_download, delete_files):
|
||||||
|
log.debug('Requesting rTorrent to remove the torrent %s%s.',
|
||||||
|
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||||
|
|
||||||
|
if not self.connect():
|
||||||
|
return False
|
||||||
|
|
||||||
|
torrent = self.rt.find_torrent(release_download['id'])
|
||||||
|
|
||||||
|
if torrent is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if delete_files:
|
||||||
|
for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
|
||||||
|
os.unlink(os.path.join(torrent.directory, file_item.path))
|
||||||
|
|
||||||
|
if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
|
||||||
|
# Remove empty directories bottom up
|
||||||
|
try:
|
||||||
|
for path, _, _ in os.walk(sp(torrent.directory), topdown = False):
|
||||||
|
os.rmdir(path)
|
||||||
|
except OSError:
|
||||||
|
log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
|
||||||
|
|
||||||
|
torrent.erase() # just removes the torrent, doesn't delete data
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'rtorrent',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'rtorrent',
|
||||||
|
'label': 'rTorrent',
|
||||||
|
'description': '',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:80',
|
||||||
|
'description': 'RPC Communication URI. Usually <strong>scgi://localhost:5000</strong>, '
|
||||||
|
'<strong>httprpc://localhost/rutorrent</strong> or <strong>localhost:80</strong>'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ssl',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'rpc_url',
|
||||||
|
'type': 'string',
|
||||||
|
'default': 'RPC2',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Change if your RPC mount is at a different path.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'label',
|
||||||
|
'description': 'Label to apply on added torrents.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'directory',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'remove_complete',
|
||||||
|
'label': 'Remove torrent',
|
||||||
|
'default': False,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Remove the torrent after it finishes seeding.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_files',
|
||||||
|
'label': 'Remove files',
|
||||||
|
'default': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Also remove the leftover files.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'paused',
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'default': False,
|
||||||
|
'description': 'Add the torrent paused.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,22 +1,41 @@
|
|||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
|
|
||||||
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from urllib2 import URLError
|
from urllib2 import URLError
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Sabnzbd'
|
||||||
|
|
||||||
class Sabnzbd(Downloader):
|
|
||||||
|
class Sabnzbd(DownloaderBase):
|
||||||
|
|
||||||
protocol = ['nzb']
|
protocol = ['nzb']
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
"""
|
||||||
|
Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
if not media: media = {}
|
if not media: media = {}
|
||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
@@ -64,7 +83,39 @@ class Sabnzbd(Downloader):
|
|||||||
log.error('Error getting data from SABNZBd: %s', sab_data)
|
log.error('Error getting data from SABNZBd: %s', sab_data)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
Return message if an old version of SAB is used
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
sab_data = self.call({
|
||||||
|
'mode': 'version',
|
||||||
|
})
|
||||||
|
v = sab_data.split('.')
|
||||||
|
if int(v[0]) == 0 and int(v[1]) < 7:
|
||||||
|
return False, 'Your Sabnzbd client is too old, please update to newest version.'
|
||||||
|
|
||||||
|
# the version check will work even with wrong api key, so we need the next check as well
|
||||||
|
sab_data = self.call({
|
||||||
|
'mode': 'qstatus',
|
||||||
|
})
|
||||||
|
if not sab_data:
|
||||||
|
return False
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
log.debug('Checking SABnzbd download status.')
|
log.debug('Checking SABnzbd download status.')
|
||||||
|
|
||||||
@@ -95,7 +146,7 @@ class Sabnzbd(Downloader):
|
|||||||
status = 'busy'
|
status = 'busy'
|
||||||
if 'ENCRYPTED / ' in nzb['filename']:
|
if 'ENCRYPTED / ' in nzb['filename']:
|
||||||
status = 'failed'
|
status = 'failed'
|
||||||
|
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': nzb['nzo_id'],
|
'id': nzb['nzo_id'],
|
||||||
'name': nzb['filename'],
|
'name': nzb['filename'],
|
||||||
@@ -112,7 +163,7 @@ class Sabnzbd(Downloader):
|
|||||||
status = 'failed'
|
status = 'failed'
|
||||||
elif nzb['status'] == 'Completed':
|
elif nzb['status'] == 'Completed':
|
||||||
status = 'completed'
|
status = 'completed'
|
||||||
|
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': nzb['nzo_id'],
|
'id': nzb['nzo_id'],
|
||||||
'name': nzb['name'],
|
'name': nzb['name'],
|
||||||
@@ -165,9 +216,9 @@ class Sabnzbd(Downloader):
|
|||||||
|
|
||||||
def call(self, request_params, use_json = True, **kwargs):
|
def call(self, request_params, use_json = True, **kwargs):
|
||||||
|
|
||||||
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
|
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
|
||||||
'apikey': self.conf('api_key'),
|
'apikey': self.conf('api_key'),
|
||||||
'output': 'json'
|
'output': 'json'
|
||||||
}))
|
}))
|
||||||
|
|
||||||
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
|
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
|
||||||
@@ -181,3 +232,77 @@ class Sabnzbd(Downloader):
|
|||||||
else:
|
else:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'sabnzbd',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'sabnzbd',
|
||||||
|
'label': 'Sabnzbd',
|
||||||
|
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'nzb',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:8080',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ssl',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'api_key',
|
||||||
|
'label': 'Api Key',
|
||||||
|
'description': 'Used for all calls to Sabnzbd.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'category',
|
||||||
|
'label': 'Category',
|
||||||
|
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'priority',
|
||||||
|
'label': 'Priority',
|
||||||
|
'type': 'dropdown',
|
||||||
|
'default': '0',
|
||||||
|
'advanced': True,
|
||||||
|
'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)],
|
||||||
|
'description': 'Add to the queue with this priority.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': False,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'remove_complete',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Remove NZB',
|
||||||
|
'default': False,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Remove the NZB from history after it completed.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Delete a release after the download has failed.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
from .main import Sabnzbd
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Sabnzbd()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'sabnzbd',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'sabnzbd',
|
|
||||||
'label': 'Sabnzbd',
|
|
||||||
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'nzb',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'localhost:8080',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'api_key',
|
|
||||||
'label': 'Api Key',
|
|
||||||
'description': 'Used for all calls to Sabnzbd.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'category',
|
|
||||||
'label': 'Category',
|
|
||||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'priority',
|
|
||||||
'label': 'Priority',
|
|
||||||
'type': 'dropdown',
|
|
||||||
'default': '0',
|
|
||||||
'advanced': True,
|
|
||||||
'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)],
|
|
||||||
'description': 'Add to the queue with this priority.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': False,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'remove_complete',
|
|
||||||
'advanced': True,
|
|
||||||
'label': 'Remove NZB',
|
|
||||||
'default': False,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Remove the NZB from history after it completed.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Delete a release after the download has failed.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,19 +1,39 @@
|
|||||||
from couchpotato.core.downloaders.base import Downloader
|
|
||||||
from couchpotato.core.helpers.encoding import isInt
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
import json
|
import json
|
||||||
import requests
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||||
|
from couchpotato.core.helpers.encoding import isInt
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Synology'
|
||||||
|
|
||||||
class Synology(Downloader):
|
|
||||||
|
class Synology(DownloaderBase):
|
||||||
|
|
||||||
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
||||||
status_support = False
|
status_support = False
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
"""
|
||||||
|
Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
if not media: media = {}
|
if not media: media = {}
|
||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
@@ -21,14 +41,14 @@ class Synology(Downloader):
|
|||||||
log.error('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
|
log.error('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
|
||||||
|
|
||||||
# Load host from config and split out port.
|
# Load host from config and split out port.
|
||||||
host = self.conf('host').split(':')
|
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||||
if not isInt(host[1]):
|
if not isInt(host[1]):
|
||||||
log.error('Config properties are not filled in correctly, port is missing.')
|
log.error('Config properties are not filled in correctly, port is missing.')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Send request to Synology
|
# Send request to Synology
|
||||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
|
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'), self.conf('destination'))
|
||||||
if data['protocol'] == 'torrent_magnet':
|
if data['protocol'] == 'torrent_magnet':
|
||||||
log.info('Adding torrent URL %s', data['url'])
|
log.info('Adding torrent URL %s', data['url'])
|
||||||
response = srpc.create_task(url = data['url'])
|
response = srpc.create_task(url = data['url'])
|
||||||
@@ -44,6 +64,20 @@ class Synology(Downloader):
|
|||||||
finally:
|
finally:
|
||||||
return self.downloadReturnId('') if response else False
|
return self.downloadReturnId('') if response else False
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||||
|
try:
|
||||||
|
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
|
||||||
|
test_result = srpc.test()
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return test_result
|
||||||
|
|
||||||
def getEnabledProtocol(self):
|
def getEnabledProtocol(self):
|
||||||
if self.conf('use_for') == 'both':
|
if self.conf('use_for') == 'both':
|
||||||
return super(Synology, self).getEnabledProtocol()
|
return super(Synology, self).getEnabledProtocol()
|
||||||
@@ -64,18 +98,21 @@ class Synology(Downloader):
|
|||||||
return super(Synology, self).isEnabled(manual, data) and\
|
return super(Synology, self).isEnabled(manual, data) and\
|
||||||
((self.conf('use_for') in for_protocol))
|
((self.conf('use_for') in for_protocol))
|
||||||
|
|
||||||
|
|
||||||
class SynologyRPC(object):
|
class SynologyRPC(object):
|
||||||
|
|
||||||
"""SynologyRPC lite library"""
|
"""SynologyRPC lite library"""
|
||||||
|
|
||||||
def __init__(self, host = 'localhost', port = 5000, username = None, password = None):
|
def __init__(self, host = 'localhost', port = 5000, username = None, password = None, destination = None):
|
||||||
|
|
||||||
super(SynologyRPC, self).__init__()
|
super(SynologyRPC, self).__init__()
|
||||||
|
|
||||||
self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port)
|
self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port)
|
||||||
self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port)
|
self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port)
|
||||||
|
self.sid = None
|
||||||
self.username = username
|
self.username = username
|
||||||
self.password = password
|
self.password = password
|
||||||
|
self.destination = destination
|
||||||
self.session_name = 'DownloadStation'
|
self.session_name = 'DownloadStation'
|
||||||
|
|
||||||
def _login(self):
|
def _login(self):
|
||||||
@@ -100,17 +137,17 @@ class SynologyRPC(object):
|
|||||||
def _req(self, url, args, files = None):
|
def _req(self, url, args, files = None):
|
||||||
response = {'success': False}
|
response = {'success': False}
|
||||||
try:
|
try:
|
||||||
req = requests.post(url, data = args, files = files)
|
req = requests.post(url, data = args, files = files, verify = False)
|
||||||
req.raise_for_status()
|
req.raise_for_status()
|
||||||
response = json.loads(req.text)
|
response = json.loads(req.text)
|
||||||
if response['success']:
|
if response['success']:
|
||||||
log.info('Synology action successfull')
|
log.info('Synology action successfull')
|
||||||
return response
|
return response
|
||||||
except requests.ConnectionError, err:
|
except requests.ConnectionError as err:
|
||||||
log.error('Synology connection error, check your config %s', err)
|
log.error('Synology connection error, check your config %s', err)
|
||||||
except requests.HTTPError, err:
|
except requests.HTTPError as err:
|
||||||
log.error('SynologyRPC HTTPError: %s', err)
|
log.error('SynologyRPC HTTPError: %s', err)
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Exception: %s', err)
|
log.error('Exception: %s', err)
|
||||||
finally:
|
finally:
|
||||||
return response
|
return response
|
||||||
@@ -128,6 +165,10 @@ class SynologyRPC(object):
|
|||||||
'version': '1',
|
'version': '1',
|
||||||
'method': 'create',
|
'method': 'create',
|
||||||
'_sid': self.sid}
|
'_sid': self.sid}
|
||||||
|
|
||||||
|
if self.destination and len(self.destination) > 0:
|
||||||
|
args['destination'] = self.destination
|
||||||
|
|
||||||
if url:
|
if url:
|
||||||
log.info('Login success, adding torrent URI')
|
log.info('Login success, adding torrent URI')
|
||||||
args['uri'] = url
|
args['uri'] = url
|
||||||
@@ -145,3 +186,60 @@ class SynologyRPC(object):
|
|||||||
self._logout()
|
self._logout()
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
return bool(self._login())
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'synology',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'synology',
|
||||||
|
'label': 'Synology',
|
||||||
|
'description': 'Use <a href="http://www.synology.com/dsm/home_home_applications_download_station.php" target="_blank">Synology Download Station</a> to download.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'nzb,torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:5000',
|
||||||
|
'description': 'Hostname with port. Usually <strong>localhost:5000</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'destination',
|
||||||
|
'description': 'Specify <strong>existing</strong> destination share to where your files will be downloaded, usually <strong>Downloads</strong>',
|
||||||
|
'advanced': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'use_for',
|
||||||
|
'label': 'Use for',
|
||||||
|
'default': 'both',
|
||||||
|
'type': 'dropdown',
|
||||||
|
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
from .main import Synology
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Synology()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'synology',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'synology',
|
|
||||||
'label': 'Synology',
|
|
||||||
'description': 'Use <a href="http://www.synology.com/dsm/home_home_applications_download_station.php" target="_blank">Synology Download Station</a> to download.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'nzb,torrent',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'localhost:5000',
|
|
||||||
'description': 'Hostname with port. Usually <strong>localhost:5000</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'type': 'password',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'use_for',
|
|
||||||
'label': 'Use for',
|
|
||||||
'default': 'both',
|
|
||||||
'type': 'dropdown',
|
|
||||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,8 +1,4 @@
|
|||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import isInt, sp
|
|
||||||
from couchpotato.core.helpers.variable import tryInt, tryFloat
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import httplib
|
import httplib
|
||||||
import json
|
import json
|
||||||
@@ -10,10 +6,18 @@ import os.path
|
|||||||
import re
|
import re
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import isInt, sp
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'Transmission'
|
||||||
|
|
||||||
class Transmission(Downloader):
|
|
||||||
|
class Transmission(DownloaderBase):
|
||||||
|
|
||||||
protocol = ['torrent', 'torrent_magnet']
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
@@ -21,17 +25,30 @@ class Transmission(Downloader):
|
|||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
# Load host from config and split out port.
|
# Load host from config and split out port.
|
||||||
host = self.conf('host').split(':')
|
host = cleanHost(self.conf('host')).rstrip('/').rsplit(':', 1)
|
||||||
if not isInt(host[1]):
|
if not isInt(host[1]):
|
||||||
log.error('Config properties are not filled in correctly, port is missing.')
|
log.error('Config properties are not filled in correctly, port is missing.')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.trpc:
|
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url'), username = self.conf('username'), password = self.conf('password'))
|
|
||||||
|
|
||||||
return self.trpc
|
return self.trpc
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
"""
|
||||||
|
Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
if not media: media = {}
|
if not media: media = {}
|
||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
@@ -76,14 +93,32 @@ class Transmission(Downloader):
|
|||||||
log.error('Failed sending torrent to Transmission')
|
log.error('Failed sending torrent to Transmission')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
data = remote_torrent.get('torrent-added') or remote_torrent.get('torrent-duplicate')
|
||||||
|
|
||||||
# Change settings of added torrents
|
# Change settings of added torrents
|
||||||
if torrent_params:
|
if torrent_params:
|
||||||
self.trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
|
self.trpc.set_torrent(data['hashString'], torrent_params)
|
||||||
|
|
||||||
log.info('Torrent sent to Transmission successfully.')
|
log.info('Torrent sent to Transmission successfully.')
|
||||||
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
|
return self.downloadReturnId(data['hashString'])
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.connect() and self.trpc.get_session():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
log.debug('Checking Transmission download status.')
|
log.debug('Checking Transmission download status.')
|
||||||
|
|
||||||
@@ -96,6 +131,7 @@ class Transmission(Downloader):
|
|||||||
'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
|
'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
|
||||||
}
|
}
|
||||||
|
|
||||||
|
session = self.trpc.get_session()
|
||||||
queue = self.trpc.get_alltorrents(return_params)
|
queue = self.trpc.get_alltorrents(return_params)
|
||||||
if not (queue and queue.get('torrents')):
|
if not (queue and queue.get('torrents')):
|
||||||
log.debug('Nothing in queue or error')
|
log.debug('Nothing in queue or error')
|
||||||
@@ -103,21 +139,28 @@ class Transmission(Downloader):
|
|||||||
|
|
||||||
for torrent in queue['torrents']:
|
for torrent in queue['torrents']:
|
||||||
if torrent['hashString'] in ids:
|
if torrent['hashString'] in ids:
|
||||||
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s',
|
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s',
|
||||||
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished']))
|
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir']))
|
||||||
|
|
||||||
torrent_files = []
|
|
||||||
for file_item in torrent['files']:
|
|
||||||
torrent_files.append(sp(os.path.join(torrent['downloadDir'], file_item['name'])))
|
|
||||||
|
|
||||||
status = 'busy'
|
status = 'busy'
|
||||||
if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'):
|
if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'):
|
||||||
status = 'failed'
|
status = 'failed'
|
||||||
elif torrent['status'] == 0 and torrent['percentDone'] == 1:
|
elif torrent['status'] == 0 and torrent['percentDone'] == 1:
|
||||||
status = 'completed'
|
status = 'completed'
|
||||||
|
elif torrent['status'] == 16 and torrent['percentDone'] == 1:
|
||||||
|
status = 'completed'
|
||||||
elif torrent['status'] in [5, 6]:
|
elif torrent['status'] in [5, 6]:
|
||||||
status = 'seeding'
|
status = 'seeding'
|
||||||
|
|
||||||
|
if session['incomplete-dir-enabled'] and status == 'busy':
|
||||||
|
torrent_folder = session['incomplete-dir']
|
||||||
|
else:
|
||||||
|
torrent_folder = torrent['downloadDir']
|
||||||
|
|
||||||
|
torrent_files = []
|
||||||
|
for file_item in torrent['files']:
|
||||||
|
torrent_files.append(sp(os.path.join(torrent_folder, file_item['name'])))
|
||||||
|
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': torrent['hashString'],
|
'id': torrent['hashString'],
|
||||||
'name': torrent['name'],
|
'name': torrent['name'],
|
||||||
@@ -125,8 +168,8 @@ class Transmission(Downloader):
|
|||||||
'original_status': torrent['status'],
|
'original_status': torrent['status'],
|
||||||
'seed_ratio': torrent['uploadRatio'],
|
'seed_ratio': torrent['uploadRatio'],
|
||||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||||
'folder': sp(torrent['downloadDir'] if len(torrent_files) == 1 else os.path.join(torrent['downloadDir'], torrent['name'])),
|
'folder': sp(torrent_folder if len(torrent_files) == 1 else os.path.join(torrent_folder, torrent['name'])),
|
||||||
'files': '|'.join(torrent_files)
|
'files': torrent_files
|
||||||
})
|
})
|
||||||
|
|
||||||
return release_downloads
|
return release_downloads
|
||||||
@@ -145,21 +188,22 @@ class Transmission(Downloader):
|
|||||||
log.debug('Requesting Transmission to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
log.debug('Requesting Transmission to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||||
return self.trpc.remove_torrent(release_download['id'], delete_files)
|
return self.trpc.remove_torrent(release_download['id'], delete_files)
|
||||||
|
|
||||||
|
|
||||||
class TransmissionRPC(object):
|
class TransmissionRPC(object):
|
||||||
|
|
||||||
"""TransmissionRPC lite library"""
|
"""TransmissionRPC lite library"""
|
||||||
def __init__(self, host = 'localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
def __init__(self, host = 'http://localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||||
|
|
||||||
super(TransmissionRPC, self).__init__()
|
super(TransmissionRPC, self).__init__()
|
||||||
|
|
||||||
self.url = 'http://' + host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
self.url = host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||||
self.tag = 0
|
self.tag = 0
|
||||||
self.session_id = 0
|
self.session_id = 0
|
||||||
self.session = {}
|
self.session = {}
|
||||||
if username and password:
|
if username and password:
|
||||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
password_manager.add_password(realm = 'Transmission', uri = self.url, user = username, passwd = password)
|
||||||
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager), urllib2.HTTPDigestAuthHandler(password_manager))
|
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||||
opener.addheaders = [('User-agent', 'couchpotato-transmission-client/1.0')]
|
opener.addheaders = [('User-agent', 'couchpotato-transmission-client/1.0')]
|
||||||
urllib2.install_opener(opener)
|
urllib2.install_opener(opener)
|
||||||
elif username or password:
|
elif username or password:
|
||||||
@@ -181,10 +225,10 @@ class TransmissionRPC(object):
|
|||||||
else:
|
else:
|
||||||
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
|
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
|
||||||
return False
|
return False
|
||||||
except httplib.InvalidURL, err:
|
except httplib.InvalidURL as err:
|
||||||
log.error('Invalid Transmission host, check your config %s', err)
|
log.error('Invalid Transmission host, check your config %s', err)
|
||||||
return False
|
return False
|
||||||
except urllib2.HTTPError, err:
|
except urllib2.HTTPError as err:
|
||||||
if err.code == 401:
|
if err.code == 401:
|
||||||
log.error('Invalid Transmission Username or Password, check your config')
|
log.error('Invalid Transmission Username or Password, check your config')
|
||||||
return False
|
return False
|
||||||
@@ -202,7 +246,7 @@ class TransmissionRPC(object):
|
|||||||
log.error('Unable to get Transmission Session-Id %s', err)
|
log.error('Unable to get Transmission Session-Id %s', err)
|
||||||
else:
|
else:
|
||||||
log.error('TransmissionRPC HTTPError: %s', err)
|
log.error('TransmissionRPC HTTPError: %s', err)
|
||||||
except urllib2.URLError, err:
|
except urllib2.URLError as err:
|
||||||
log.error('Unable to connect to Transmission %s', err)
|
log.error('Unable to connect to Transmission %s', err)
|
||||||
|
|
||||||
def get_session(self):
|
def get_session(self):
|
||||||
@@ -240,3 +284,93 @@ class TransmissionRPC(object):
|
|||||||
post_data = {'arguments': {'ids': torrent_id, 'delete-local-data': delete_local_data}, 'method': 'torrent-remove', 'tag': self.tag}
|
post_data = {'arguments': {'ids': torrent_id, 'delete-local-data': delete_local_data}, 'method': 'torrent-remove', 'tag': self.tag}
|
||||||
return self._request(post_data)
|
return self._request(post_data)
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'transmission',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'transmission',
|
||||||
|
'label': 'Transmission',
|
||||||
|
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'http://localhost:9091',
|
||||||
|
'description': 'Hostname with port. Usually <strong>http://localhost:9091</strong>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'rpc_url',
|
||||||
|
'type': 'string',
|
||||||
|
'default': 'transmission',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Change if you don\'t run Transmission RPC at the default url.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'directory',
|
||||||
|
'type': 'directory',
|
||||||
|
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'remove_complete',
|
||||||
|
'label': 'Remove torrent',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Remove the torrent from Transmission after it finished seeding.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_files',
|
||||||
|
'label': 'Remove files',
|
||||||
|
'default': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Also remove the leftover files.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'paused',
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'default': False,
|
||||||
|
'description': 'Add the torrent paused.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'stalled_as_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Consider a stalled torrent as failed',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Delete a release after the download has failed.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
from .main import Transmission
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return Transmission()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'transmission',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'transmission',
|
|
||||||
'label': 'Transmission',
|
|
||||||
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'torrent',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'localhost:9091',
|
|
||||||
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'rpc_url',
|
|
||||||
'type': 'string',
|
|
||||||
'default': 'transmission',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Change if you don\'t run Transmission RPC at the default url.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'type': 'password',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'directory',
|
|
||||||
'type': 'directory',
|
|
||||||
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'remove_complete',
|
|
||||||
'label': 'Remove torrent',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Remove the torrent from Transmission after it finished seeding.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_files',
|
|
||||||
'label': 'Remove files',
|
|
||||||
'default': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Also remove the leftover files.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'paused',
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'default': False,
|
|
||||||
'description': 'Add the torrent paused.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'stalled_as_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Consider a stalled torrent as failed',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Delete a release after the download has failed.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,12 +1,6 @@
|
|||||||
from base64 import b16encode, b32decode
|
from base64 import b16encode, b32decode
|
||||||
from bencode import bencode as benc, bdecode
|
|
||||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
|
||||||
from couchpotato.core.helpers.encoding import isInt, ss, sp
|
|
||||||
from couchpotato.core.helpers.variable import tryInt, tryFloat
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
from multipartpost import MultipartPostHandler
|
|
||||||
import cookielib
|
import cookielib
|
||||||
import httplib
|
import httplib
|
||||||
import json
|
import json
|
||||||
@@ -17,27 +11,37 @@ import time
|
|||||||
import urllib
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
|
|
||||||
|
from bencode import bencode as benc, bdecode
|
||||||
|
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||||
|
from couchpotato.core.helpers.encoding import isInt, ss, sp
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from multipartpost import MultipartPostHandler
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
autoload = 'uTorrent'
|
||||||
|
|
||||||
class uTorrent(Downloader):
|
|
||||||
|
class uTorrent(DownloaderBase):
|
||||||
|
|
||||||
protocol = ['torrent', 'torrent_magnet']
|
protocol = ['torrent', 'torrent_magnet']
|
||||||
utorrent_api = None
|
utorrent_api = None
|
||||||
status_flags = {
|
status_flags = {
|
||||||
'STARTED' : 1,
|
'STARTED': 1,
|
||||||
'CHECKING' : 2,
|
'CHECKING': 2,
|
||||||
'CHECK-START' : 4,
|
'CHECK-START': 4,
|
||||||
'CHECKED' : 8,
|
'CHECKED': 8,
|
||||||
'ERROR' : 16,
|
'ERROR': 16,
|
||||||
'PAUSED' : 32,
|
'PAUSED': 32,
|
||||||
'QUEUED' : 64,
|
'QUEUED': 64,
|
||||||
'LOADED' : 128
|
'LOADED': 128
|
||||||
}
|
}
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
# Load host from config and split out port.
|
# Load host from config and split out port.
|
||||||
host = self.conf('host').split(':')
|
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||||
if not isInt(host[1]):
|
if not isInt(host[1]):
|
||||||
log.error('Config properties are not filled in correctly, port is missing.')
|
log.error('Config properties are not filled in correctly, port is missing.')
|
||||||
return False
|
return False
|
||||||
@@ -47,6 +51,21 @@ class uTorrent(Downloader):
|
|||||||
return self.utorrent_api
|
return self.utorrent_api
|
||||||
|
|
||||||
def download(self, data = None, media = None, filedata = None):
|
def download(self, data = None, media = None, filedata = None):
|
||||||
|
"""
|
||||||
|
Send a torrent/nzb file to the downloader
|
||||||
|
|
||||||
|
:param data: dict returned from provider
|
||||||
|
Contains the release information
|
||||||
|
:param media: media dict with information
|
||||||
|
Used for creating the filename when possible
|
||||||
|
:param filedata: downloaded torrent/nzb filedata
|
||||||
|
The file gets downloaded in the searcher and send to this function
|
||||||
|
This is done to have failed checking before using the downloader, so the downloader
|
||||||
|
doesn't need to worry about that
|
||||||
|
:return: boolean
|
||||||
|
One faile returns false, but the downloaded should log his own errors
|
||||||
|
"""
|
||||||
|
|
||||||
if not media: media = {}
|
if not media: media = {}
|
||||||
if not data: data = {}
|
if not data: data = {}
|
||||||
|
|
||||||
@@ -66,7 +85,7 @@ class uTorrent(Downloader):
|
|||||||
new_settings['seed_prio_limitul_flag'] = True
|
new_settings['seed_prio_limitul_flag'] = True
|
||||||
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
|
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
|
||||||
|
|
||||||
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
|
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
|
||||||
new_settings['bt.read_only_on_complete'] = False
|
new_settings['bt.read_only_on_complete'] = False
|
||||||
log.info('Updated uTorrent settings to not set the files to read only after completing.')
|
log.info('Updated uTorrent settings to not set the files to read only after completing.')
|
||||||
|
|
||||||
@@ -115,7 +134,29 @@ class uTorrent(Downloader):
|
|||||||
|
|
||||||
return self.downloadReturnId(torrent_hash)
|
return self.downloadReturnId(torrent_hash)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
""" Check if connection works
|
||||||
|
:return: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.connect():
|
||||||
|
build_version = self.utorrent_api.get_build()
|
||||||
|
if not build_version:
|
||||||
|
return False
|
||||||
|
if build_version < 25406: # This build corresponds to version 3.0.0 stable
|
||||||
|
return False, 'Your uTorrent client is too old, please update to newest version.'
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
def getAllDownloadStatus(self, ids):
|
def getAllDownloadStatus(self, ids):
|
||||||
|
""" Get status of all active downloads
|
||||||
|
|
||||||
|
:param ids: list of (mixed) downloader ids
|
||||||
|
Used to match the releases for this downloader as there could be
|
||||||
|
other downloaders active that it should ignore
|
||||||
|
:return: list of releases
|
||||||
|
"""
|
||||||
|
|
||||||
log.debug('Checking uTorrent download status.')
|
log.debug('Checking uTorrent download status.')
|
||||||
|
|
||||||
@@ -149,18 +190,18 @@ class uTorrent(Downloader):
|
|||||||
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
|
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
|
||||||
except:
|
except:
|
||||||
log.debug('Failed getting files from torrent: %s', torrent[2])
|
log.debug('Failed getting files from torrent: %s', torrent[2])
|
||||||
|
|
||||||
status = 'busy'
|
status = 'busy'
|
||||||
if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000:
|
if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000:
|
||||||
status = 'seeding'
|
status = 'seeding'
|
||||||
elif (torrent[1] & self.status_flags['ERROR']):
|
elif torrent[1] & self.status_flags['ERROR']:
|
||||||
status = 'failed'
|
status = 'failed'
|
||||||
elif torrent[4] == 1000:
|
elif torrent[4] == 1000:
|
||||||
status = 'completed'
|
status = 'completed'
|
||||||
|
|
||||||
if not status == 'busy':
|
if not status == 'busy':
|
||||||
self.removeReadOnly(torrent_files)
|
self.removeReadOnly(torrent_files)
|
||||||
|
|
||||||
release_downloads.append({
|
release_downloads.append({
|
||||||
'id': torrent[0],
|
'id': torrent[0],
|
||||||
'name': torrent[2],
|
'name': torrent[2],
|
||||||
@@ -169,7 +210,7 @@ class uTorrent(Downloader):
|
|||||||
'original_status': torrent[1],
|
'original_status': torrent[1],
|
||||||
'timeleft': str(timedelta(seconds = torrent[10])),
|
'timeleft': str(timedelta(seconds = torrent[10])),
|
||||||
'folder': sp(torrent[26]),
|
'folder': sp(torrent[26]),
|
||||||
'files': '|'.join(torrent_files)
|
'files': torrent_files
|
||||||
})
|
})
|
||||||
|
|
||||||
return release_downloads
|
return release_downloads
|
||||||
@@ -214,7 +255,6 @@ class uTorrentAPI(object):
|
|||||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
||||||
self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager))
|
self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||||
self.opener.add_handler(urllib2.HTTPDigestAuthHandler(password_manager))
|
|
||||||
elif username or password:
|
elif username or password:
|
||||||
log.debug('User or password missing, not using authentication.')
|
log.debug('User or password missing, not using authentication.')
|
||||||
self.token = self.get_token()
|
self.token = self.get_token()
|
||||||
@@ -231,14 +271,14 @@ class uTorrentAPI(object):
|
|||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
|
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
|
||||||
except httplib.InvalidURL, err:
|
except httplib.InvalidURL as err:
|
||||||
log.error('Invalid uTorrent host, check your config %s', err)
|
log.error('Invalid uTorrent host, check your config %s', err)
|
||||||
except urllib2.HTTPError, err:
|
except urllib2.HTTPError as err:
|
||||||
if err.code == 401:
|
if err.code == 401:
|
||||||
log.error('Invalid uTorrent Username or Password, check your config')
|
log.error('Invalid uTorrent Username or Password, check your config')
|
||||||
else:
|
else:
|
||||||
log.error('uTorrent HTTPError: %s', err)
|
log.error('uTorrent HTTPError: %s', err)
|
||||||
except urllib2.URLError, err:
|
except urllib2.URLError as err:
|
||||||
log.error('Unable to connect to uTorrent %s', err)
|
log.error('Unable to connect to uTorrent %s', err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -261,7 +301,7 @@ class uTorrentAPI(object):
|
|||||||
|
|
||||||
def set_torrent(self, hash, params):
|
def set_torrent(self, hash, params):
|
||||||
action = 'action=setprops&hash=%s' % hash
|
action = 'action=setprops&hash=%s' % hash
|
||||||
for k, v in params.iteritems():
|
for k, v in params.items():
|
||||||
action += '&s=%s&v=%s' % (k, v)
|
action += '&s=%s&v=%s' % (k, v)
|
||||||
return self._request(action)
|
return self._request(action)
|
||||||
|
|
||||||
@@ -304,7 +344,7 @@ class uTorrentAPI(object):
|
|||||||
|
|
||||||
#log.debug('uTorrent settings: %s', settings_dict)
|
#log.debug('uTorrent settings: %s', settings_dict)
|
||||||
|
|
||||||
except Exception, err:
|
except Exception as err:
|
||||||
log.error('Failed to get settings from uTorrent: %s', err)
|
log.error('Failed to get settings from uTorrent: %s', err)
|
||||||
|
|
||||||
return settings_dict
|
return settings_dict
|
||||||
@@ -322,3 +362,86 @@ class uTorrentAPI(object):
|
|||||||
def get_files(self, hash):
|
def get_files(self, hash):
|
||||||
action = 'action=getfiles&hash=%s' % hash
|
action = 'action=getfiles&hash=%s' % hash
|
||||||
return self._request(action)
|
return self._request(action)
|
||||||
|
|
||||||
|
def get_build(self):
|
||||||
|
data = self._request('')
|
||||||
|
if not data:
|
||||||
|
return False
|
||||||
|
response = json.loads(data)
|
||||||
|
return int(response.get('build'))
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'utorrent',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'downloaders',
|
||||||
|
'list': 'download_providers',
|
||||||
|
'name': 'utorrent',
|
||||||
|
'label': 'uTorrent',
|
||||||
|
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> (3.0+) to download torrents.',
|
||||||
|
'wizard': True,
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'enabler',
|
||||||
|
'radio_group': 'torrent',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'localhost:8000',
|
||||||
|
'description': 'Port can be found in settings when enabling WebUI.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'label',
|
||||||
|
'description': 'Label to add torrent as.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'remove_complete',
|
||||||
|
'label': 'Remove torrent',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Remove the torrent from uTorrent after it finished seeding.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_files',
|
||||||
|
'label': 'Remove files',
|
||||||
|
'default': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Also remove the leftover files.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'paused',
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'default': False,
|
||||||
|
'description': 'Add the torrent paused.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'manual',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'delete_failed',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Delete a release after the download has failed.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
from .main import uTorrent
|
|
||||||
|
|
||||||
def start():
|
|
||||||
return uTorrent()
|
|
||||||
|
|
||||||
config = [{
|
|
||||||
'name': 'utorrent',
|
|
||||||
'groups': [
|
|
||||||
{
|
|
||||||
'tab': 'downloaders',
|
|
||||||
'list': 'download_providers',
|
|
||||||
'name': 'utorrent',
|
|
||||||
'label': 'uTorrent',
|
|
||||||
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> (3.0+) to download torrents.',
|
|
||||||
'wizard': True,
|
|
||||||
'options': [
|
|
||||||
{
|
|
||||||
'name': 'enabled',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'enabler',
|
|
||||||
'radio_group': 'torrent',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'host',
|
|
||||||
'default': 'localhost:8000',
|
|
||||||
'description': 'Hostname with port. Usually <strong>localhost:8000</strong>',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'username',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'password',
|
|
||||||
'type': 'password',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'label',
|
|
||||||
'description': 'Label to add torrent as.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'remove_complete',
|
|
||||||
'label': 'Remove torrent',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Remove the torrent from uTorrent after it finished seeding.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_files',
|
|
||||||
'label': 'Remove files',
|
|
||||||
'default': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Also remove the leftover files.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'paused',
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'default': False,
|
|
||||||
'description': 'Add the torrent paused.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'manual',
|
|
||||||
'default': 0,
|
|
||||||
'type': 'bool',
|
|
||||||
'advanced': True,
|
|
||||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'delete_failed',
|
|
||||||
'default': True,
|
|
||||||
'advanced': True,
|
|
||||||
'type': 'bool',
|
|
||||||
'description': 'Delete a release after the download has failed.',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}]
|
|
||||||
@@ -1,12 +1,15 @@
|
|||||||
from axl.axel import Event
|
|
||||||
from couchpotato.core.helpers.variable import mergeDicts, natcmp
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from axl.axel import Event
|
||||||
|
from couchpotato.core.helpers.variable import mergeDicts, natsortKey
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
events = {}
|
events = {}
|
||||||
|
|
||||||
|
|
||||||
def runHandler(name, handler, *args, **kwargs):
|
def runHandler(name, handler, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return handler(*args, **kwargs)
|
return handler(*args, **kwargs)
|
||||||
@@ -14,6 +17,7 @@ def runHandler(name, handler, *args, **kwargs):
|
|||||||
from couchpotato.environment import Env
|
from couchpotato.environment import Env
|
||||||
log.error('Error in event "%s", that wasn\'t caught: %s%s', (name, traceback.format_exc(), Env.all() if not Env.get('dev') else ''))
|
log.error('Error in event "%s", that wasn\'t caught: %s%s', (name, traceback.format_exc(), Env.all() if not Env.get('dev') else ''))
|
||||||
|
|
||||||
|
|
||||||
def addEvent(name, handler, priority = 100):
|
def addEvent(name, handler, priority = 100):
|
||||||
|
|
||||||
if not events.get(name):
|
if not events.get(name):
|
||||||
@@ -27,7 +31,7 @@ def addEvent(name, handler, priority = 100):
|
|||||||
has_parent = hasattr(handler, 'im_self')
|
has_parent = hasattr(handler, 'im_self')
|
||||||
parent = None
|
parent = None
|
||||||
if has_parent:
|
if has_parent:
|
||||||
parent = handler.im_self
|
parent = handler.__self__
|
||||||
bc = hasattr(parent, 'beforeCall')
|
bc = hasattr(parent, 'beforeCall')
|
||||||
if bc: parent.beforeCall(handler)
|
if bc: parent.beforeCall(handler)
|
||||||
|
|
||||||
@@ -48,22 +52,19 @@ def addEvent(name, handler, priority = 100):
|
|||||||
'priority': priority,
|
'priority': priority,
|
||||||
})
|
})
|
||||||
|
|
||||||
def removeEvent(name, handler):
|
|
||||||
e = events[name]
|
|
||||||
e -= handler
|
|
||||||
|
|
||||||
def fireEvent(name, *args, **kwargs):
|
def fireEvent(name, *args, **kwargs):
|
||||||
if not events.has_key(name): return
|
if name not in events: return
|
||||||
|
|
||||||
#log.debug('Firing event %s', name)
|
#log.debug('Firing event %s', name)
|
||||||
try:
|
try:
|
||||||
|
|
||||||
options = {
|
options = {
|
||||||
'is_after_event': False, # Fire after event
|
'is_after_event': False, # Fire after event
|
||||||
'on_complete': False, # onComplete event
|
'on_complete': False, # onComplete event
|
||||||
'single': False, # Return single handler
|
'single': False, # Return single handler
|
||||||
'merge': False, # Merge items
|
'merge': False, # Merge items
|
||||||
'in_order': False, # Fire them in specific order, waits for the other to finish
|
'in_order': False, # Fire them in specific order, waits for the other to finish
|
||||||
}
|
}
|
||||||
|
|
||||||
# Do options
|
# Do options
|
||||||
@@ -89,7 +90,7 @@ def fireEvent(name, *args, **kwargs):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
|
e = Event(name = name, threads = 10, exc_info = True, traceback = True)
|
||||||
|
|
||||||
for event in events[name]:
|
for event in events[name]:
|
||||||
e.handle(event['handler'], priority = event['priority'])
|
e.handle(event['handler'], priority = event['priority'])
|
||||||
@@ -101,11 +102,14 @@ def fireEvent(name, *args, **kwargs):
|
|||||||
# Fire
|
# Fire
|
||||||
result = e(*args, **kwargs)
|
result = e(*args, **kwargs)
|
||||||
|
|
||||||
|
result_keys = result.keys()
|
||||||
|
result_keys.sort(key = natsortKey)
|
||||||
|
|
||||||
if options['single'] and not options['merge']:
|
if options['single'] and not options['merge']:
|
||||||
results = None
|
results = None
|
||||||
|
|
||||||
# Loop over results, stop when first not None result is found.
|
# Loop over results, stop when first not None result is found.
|
||||||
for r_key in sorted(result.iterkeys(), cmp = natcmp):
|
for r_key in result_keys:
|
||||||
r = result[r_key]
|
r = result[r_key]
|
||||||
if r[0] is True and r[1] is not None:
|
if r[0] is True and r[1] is not None:
|
||||||
results = r[1]
|
results = r[1]
|
||||||
@@ -117,7 +121,7 @@ def fireEvent(name, *args, **kwargs):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = []
|
||||||
for r_key in sorted(result.iterkeys(), cmp = natcmp):
|
for r_key in result_keys:
|
||||||
r = result[r_key]
|
r = result[r_key]
|
||||||
if r[0] == True and r[1]:
|
if r[0] == True and r[1]:
|
||||||
results.append(r[1])
|
results.append(r[1])
|
||||||
@@ -160,18 +164,21 @@ def fireEvent(name, *args, **kwargs):
|
|||||||
except Exception:
|
except Exception:
|
||||||
log.error('%s: %s', (name, traceback.format_exc()))
|
log.error('%s: %s', (name, traceback.format_exc()))
|
||||||
|
|
||||||
|
|
||||||
def fireEventAsync(*args, **kwargs):
|
def fireEventAsync(*args, **kwargs):
|
||||||
try:
|
try:
|
||||||
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
|
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
|
||||||
t.setDaemon(True)
|
t.setDaemon(True)
|
||||||
t.start()
|
t.start()
|
||||||
return True
|
return True
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.error('%s: %s', (args[0], e))
|
log.error('%s: %s', (args[0], e))
|
||||||
|
|
||||||
|
|
||||||
def errorHandler(error):
|
def errorHandler(error):
|
||||||
etype, value, tb = error
|
etype, value, tb = error
|
||||||
log.error(''.join(traceback.format_exception(etype, value, tb)))
|
log.error(''.join(traceback.format_exception(etype, value, tb)))
|
||||||
|
|
||||||
|
|
||||||
def getEvent(name):
|
def getEvent(name):
|
||||||
return events[name]
|
return events[name]
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from string import ascii_letters, digits
|
from string import ascii_letters, digits
|
||||||
from urllib import quote_plus
|
from urllib import quote_plus
|
||||||
import os
|
import os
|
||||||
@@ -6,37 +5,51 @@ import re
|
|||||||
import traceback
|
import traceback
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
|
||||||
|
from chardet import detect
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
def toSafeString(original):
|
def toSafeString(original):
|
||||||
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
|
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
|
||||||
cleanedFilename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
cleaned_filename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
||||||
valid_string = ''.join(c for c in cleanedFilename if c in valid_chars)
|
valid_string = ''.join(c for c in cleaned_filename if c in valid_chars)
|
||||||
return ' '.join(valid_string.split())
|
return ' '.join(valid_string.split())
|
||||||
|
|
||||||
|
|
||||||
def simplifyString(original):
|
def simplifyString(original):
|
||||||
string = stripAccents(original.lower())
|
string = stripAccents(original.lower())
|
||||||
string = toSafeString(' '.join(re.split('\W+', string)))
|
string = toSafeString(' '.join(re.split('\W+', string)))
|
||||||
split = re.split('\W+|_', string.lower())
|
split = re.split('\W+|_', string.lower())
|
||||||
return toUnicode(' '.join(split))
|
return toUnicode(' '.join(split))
|
||||||
|
|
||||||
|
|
||||||
def toUnicode(original, *args):
|
def toUnicode(original, *args):
|
||||||
try:
|
try:
|
||||||
if isinstance(original, unicode):
|
if isinstance(original, unicode):
|
||||||
return original
|
return original
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
return unicode(original, *args)
|
return six.text_type(original, *args)
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
|
detected = detect(original)
|
||||||
|
try:
|
||||||
|
if detected.get('confidence') > 0.8:
|
||||||
|
return original.decode(detected.get('encoding'))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return ek(original, *args)
|
return ek(original, *args)
|
||||||
except:
|
except:
|
||||||
raise
|
raise
|
||||||
except:
|
except:
|
||||||
log.error('Unable to decode value "%s..." : %s ', (repr(original)[:20], traceback.format_exc()))
|
log.error('Unable to decode value "%s..." : %s ', (repr(original)[:20], traceback.format_exc()))
|
||||||
ascii_text = str(original).encode('string_escape')
|
return 'ERROR DECODING STRING'
|
||||||
return toUnicode(ascii_text)
|
|
||||||
|
|
||||||
def ss(original, *args):
|
def ss(original, *args):
|
||||||
|
|
||||||
@@ -44,9 +57,13 @@ def ss(original, *args):
|
|||||||
try:
|
try:
|
||||||
from couchpotato.environment import Env
|
from couchpotato.environment import Env
|
||||||
return u_original.encode(Env.get('encoding'))
|
return u_original.encode(Env.get('encoding'))
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.debug('Failed ss encoding char, force UTF8: %s', e)
|
log.debug('Failed ss encoding char, force UTF8: %s', e)
|
||||||
return u_original.encode('UTF-8')
|
try:
|
||||||
|
return u_original.encode(Env.get('encoding'), 'replace')
|
||||||
|
except:
|
||||||
|
return u_original.encode('utf-8', 'replace')
|
||||||
|
|
||||||
|
|
||||||
def sp(path, *args):
|
def sp(path, *args):
|
||||||
|
|
||||||
@@ -58,7 +75,7 @@ def sp(path, *args):
|
|||||||
if os.path.sep == '/' and '\\' in path:
|
if os.path.sep == '/' and '\\' in path:
|
||||||
path = '/' + path.replace(':', '').replace('\\', '/')
|
path = '/' + path.replace(':', '').replace('\\', '/')
|
||||||
|
|
||||||
path = os.path.normcase(os.path.normpath(ss(path, *args)))
|
path = os.path.normpath(ss(path, *args))
|
||||||
|
|
||||||
# Remove any trailing path separators
|
# Remove any trailing path separators
|
||||||
if path != os.path.sep:
|
if path != os.path.sep:
|
||||||
@@ -73,16 +90,18 @@ def sp(path, *args):
|
|||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def ek(original, *args):
|
def ek(original, *args):
|
||||||
if isinstance(original, (str, unicode)):
|
if isinstance(original, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
from couchpotato.environment import Env
|
from couchpotato.environment import Env
|
||||||
return original.decode(Env.get('encoding'))
|
return original.decode(Env.get('encoding'), 'ignore')
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return original
|
return original
|
||||||
|
|
||||||
|
|
||||||
def isInt(value):
|
def isInt(value):
|
||||||
try:
|
try:
|
||||||
int(value)
|
int(value)
|
||||||
@@ -90,14 +109,16 @@ def isInt(value):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def stripAccents(s):
|
def stripAccents(s):
|
||||||
return ''.join((c for c in unicodedata.normalize('NFD', toUnicode(s)) if unicodedata.category(c) != 'Mn'))
|
return ''.join((c for c in unicodedata.normalize('NFD', toUnicode(s)) if unicodedata.category(c) != 'Mn'))
|
||||||
|
|
||||||
|
|
||||||
def tryUrlencode(s):
|
def tryUrlencode(s):
|
||||||
new = u''
|
new = six.u('')
|
||||||
if isinstance(s, dict):
|
if isinstance(s, dict):
|
||||||
for key, value in s.iteritems():
|
for key, value in s.items():
|
||||||
new += u'&%s=%s' % (key, tryUrlencode(value))
|
new += six.u('&%s=%s') % (key, tryUrlencode(value))
|
||||||
|
|
||||||
return new[1:]
|
return new[1:]
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,15 +1,21 @@
|
|||||||
from couchpotato.core.helpers.encoding import toUnicode
|
|
||||||
from couchpotato.core.helpers.variable import natcmp
|
|
||||||
from urllib import unquote
|
from urllib import unquote
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import natsortKey
|
||||||
|
|
||||||
|
|
||||||
def getParams(params):
|
def getParams(params):
|
||||||
|
|
||||||
reg = re.compile('^[a-z0-9_\.]+$')
|
reg = re.compile('^[a-z0-9_\.]+$')
|
||||||
|
|
||||||
|
# Sort keys
|
||||||
|
param_keys = params.keys()
|
||||||
|
param_keys.sort(key = natsortKey)
|
||||||
|
|
||||||
temp = {}
|
temp = {}
|
||||||
for param, value in sorted(params.iteritems()):
|
for param in param_keys:
|
||||||
|
value = params[param]
|
||||||
|
|
||||||
nest = re.split("([\[\]]+)", param)
|
nest = re.split("([\[\]]+)", param)
|
||||||
if len(nest) > 1:
|
if len(nest) > 1:
|
||||||
@@ -37,13 +43,26 @@ def getParams(params):
|
|||||||
|
|
||||||
return dictToList(temp)
|
return dictToList(temp)
|
||||||
|
|
||||||
|
non_decimal = re.compile(r'[^\d.]+')
|
||||||
|
|
||||||
def dictToList(params):
|
def dictToList(params):
|
||||||
|
|
||||||
if type(params) is dict:
|
if type(params) is dict:
|
||||||
new = {}
|
new = {}
|
||||||
for x, value in params.iteritems():
|
for x, value in params.items():
|
||||||
try:
|
try:
|
||||||
new_value = [dictToList(value[k]) for k in sorted(value.iterkeys(), cmp = natcmp)]
|
convert = lambda text: int(text) if text.isdigit() else text.lower()
|
||||||
|
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
|
||||||
|
sorted_keys = sorted(value.keys(), key = alphanum_key)
|
||||||
|
|
||||||
|
all_ints = 0
|
||||||
|
for pnr in sorted_keys:
|
||||||
|
all_ints += 1 if non_decimal.sub('', pnr) == pnr else 0
|
||||||
|
|
||||||
|
if all_ints == len(sorted_keys):
|
||||||
|
new_value = [dictToList(value[k]) for k in sorted_keys]
|
||||||
|
else:
|
||||||
|
new_value = value
|
||||||
except:
|
except:
|
||||||
new_value = value
|
new_value = value
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
from couchpotato.core.logger import CPLog
|
|
||||||
import xml.etree.ElementTree as XMLTree
|
import xml.etree.ElementTree as XMLTree
|
||||||
|
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RSS(object):
|
class RSS(object):
|
||||||
|
|
||||||
def getTextElements(self, xml, path):
|
def getTextElements(self, xml, path):
|
||||||
@@ -46,6 +49,6 @@ class RSS(object):
|
|||||||
def getItems(self, data, path = 'channel/item'):
|
def getItems(self, data, path = 'channel/item'):
|
||||||
try:
|
try:
|
||||||
return XMLTree.parse(data).findall(path)
|
return XMLTree.parse(data).findall(path)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.error('Error parsing RSS. %s', e)
|
log.error('Error parsing RSS. %s', e)
|
||||||
return []
|
return []
|
||||||
|
|||||||
259
couchpotato/core/helpers/variable.py
Normal file → Executable file
259
couchpotato/core/helpers/variable.py
Normal file → Executable file
@@ -1,6 +1,5 @@
|
|||||||
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
import collections
|
import collections
|
||||||
|
import ctypes
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
@@ -8,34 +7,46 @@ import random
|
|||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss, sp
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
import six
|
||||||
|
from six.moves import map, zip, filter
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
def fnEscape(pattern):
|
def fnEscape(pattern):
|
||||||
return pattern.replace('[','[[').replace(']','[]]').replace('[[','[[]')
|
return pattern.replace('[', '[[').replace(']', '[]]').replace('[[', '[[]')
|
||||||
|
|
||||||
|
|
||||||
def link(src, dst):
|
def link(src, dst):
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import ctypes
|
import ctypes
|
||||||
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
if ctypes.windll.kernel32.CreateHardLinkW(six.text_type(dst), six.text_type(src), 0) == 0: raise ctypes.WinError()
|
||||||
else:
|
else:
|
||||||
os.link(src, dst)
|
os.link(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def symlink(src, dst):
|
def symlink(src, dst):
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import ctypes
|
import ctypes
|
||||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
if ctypes.windll.kernel32.CreateSymbolicLinkW(six.text_type(dst), six.text_type(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
||||||
else:
|
else:
|
||||||
os.symlink(src, dst)
|
os.symlink(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def getUserDir():
|
def getUserDir():
|
||||||
try:
|
try:
|
||||||
import pwd
|
import pwd
|
||||||
os.environ['HOME'] = pwd.getpwuid(os.geteuid()).pw_dir
|
os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return os.path.expanduser('~')
|
return sp(os.path.expanduser('~'))
|
||||||
|
|
||||||
|
|
||||||
def getDownloadDir():
|
def getDownloadDir():
|
||||||
user_dir = getUserDir()
|
user_dir = getUserDir()
|
||||||
@@ -49,6 +60,7 @@ def getDownloadDir():
|
|||||||
|
|
||||||
return user_dir
|
return user_dir
|
||||||
|
|
||||||
|
|
||||||
def getDataDir():
|
def getDataDir():
|
||||||
|
|
||||||
# Windows
|
# Windows
|
||||||
@@ -68,8 +80,10 @@ def getDataDir():
|
|||||||
# Linux
|
# Linux
|
||||||
return os.path.join(user_dir, '.couchpotato')
|
return os.path.join(user_dir, '.couchpotato')
|
||||||
|
|
||||||
def isDict(object):
|
|
||||||
return isinstance(object, dict)
|
def isDict(obj):
|
||||||
|
return isinstance(obj, dict)
|
||||||
|
|
||||||
|
|
||||||
def mergeDicts(a, b, prepend_list = False):
|
def mergeDicts(a, b, prepend_list = False):
|
||||||
assert isDict(a), isDict(b)
|
assert isDict(a), isDict(b)
|
||||||
@@ -91,6 +105,7 @@ def mergeDicts(a, b, prepend_list = False):
|
|||||||
current_dst[key] = current_src[key]
|
current_dst[key] = current_src[key]
|
||||||
return dst
|
return dst
|
||||||
|
|
||||||
|
|
||||||
def removeListDuplicates(seq):
|
def removeListDuplicates(seq):
|
||||||
checked = []
|
checked = []
|
||||||
for e in seq:
|
for e in seq:
|
||||||
@@ -98,35 +113,73 @@ def removeListDuplicates(seq):
|
|||||||
checked.append(e)
|
checked.append(e)
|
||||||
return checked
|
return checked
|
||||||
|
|
||||||
|
|
||||||
def flattenList(l):
|
def flattenList(l):
|
||||||
if isinstance(l, list):
|
if isinstance(l, list):
|
||||||
return sum(map(flattenList, l))
|
return sum(map(flattenList, l))
|
||||||
else:
|
else:
|
||||||
return l
|
return l
|
||||||
|
|
||||||
|
|
||||||
def md5(text):
|
def md5(text):
|
||||||
return hashlib.md5(ss(text)).hexdigest()
|
return hashlib.md5(ss(text)).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def sha1(text):
|
def sha1(text):
|
||||||
return hashlib.sha1(text).hexdigest()
|
return hashlib.sha1(text).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def isLocalIP(ip):
|
def isLocalIP(ip):
|
||||||
ip = ip.lstrip('htps:/')
|
ip = ip.lstrip('htps:/')
|
||||||
regex = '/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1)$/'
|
regex = '/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1)$/'
|
||||||
return re.search(regex, ip) is not None or 'localhost' in ip or ip[:4] == '127.'
|
return re.search(regex, ip) is not None or 'localhost' in ip or ip[:4] == '127.'
|
||||||
|
|
||||||
|
|
||||||
def getExt(filename):
|
def getExt(filename):
|
||||||
return os.path.splitext(filename)[1][1:]
|
return os.path.splitext(filename)[1][1:]
|
||||||
|
|
||||||
def cleanHost(host):
|
|
||||||
if not host.startswith(('http://', 'https://')):
|
|
||||||
host = 'http://' + host
|
|
||||||
|
|
||||||
host = host.rstrip('/')
|
def cleanHost(host, protocol = True, ssl = False, username = None, password = None):
|
||||||
host += '/'
|
"""Return a cleaned up host with given url options set
|
||||||
|
|
||||||
|
Changes protocol to https if ssl is set to True and http if ssl is set to false.
|
||||||
|
>>> cleanHost("localhost:80", ssl=True)
|
||||||
|
'https://localhost:80/'
|
||||||
|
>>> cleanHost("localhost:80", ssl=False)
|
||||||
|
'http://localhost:80/'
|
||||||
|
|
||||||
|
Username and password is managed with the username and password variables
|
||||||
|
>>> cleanHost("localhost:80", username="user", password="passwd")
|
||||||
|
'http://user:passwd@localhost:80/'
|
||||||
|
|
||||||
|
Output without scheme (protocol) can be forced with protocol=False
|
||||||
|
>>> cleanHost("localhost:80", protocol=False)
|
||||||
|
'localhost:80'
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not '://' in host and protocol:
|
||||||
|
host = ('https://' if ssl else 'http://') + host
|
||||||
|
|
||||||
|
if not protocol:
|
||||||
|
host = host.split('://', 1)[-1]
|
||||||
|
|
||||||
|
if protocol and username and password:
|
||||||
|
try:
|
||||||
|
auth = re.findall('^(?:.+?//)(.+?):(.+?)@(?:.+)$', host)
|
||||||
|
if auth:
|
||||||
|
log.error('Cleanhost error: auth already defined in url: %s, please remove BasicAuth from url.', host)
|
||||||
|
else:
|
||||||
|
host = host.replace('://', '://%s:%s@' % (username, password), 1)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
host = host.rstrip('/ ')
|
||||||
|
if protocol:
|
||||||
|
host += '/'
|
||||||
|
|
||||||
return host
|
return host
|
||||||
|
|
||||||
|
|
||||||
def getImdb(txt, check_inside = False, multiple = False):
|
def getImdb(txt, check_inside = False, multiple = False):
|
||||||
|
|
||||||
if not check_inside:
|
if not check_inside:
|
||||||
@@ -143,7 +196,7 @@ def getImdb(txt, check_inside = False, multiple = False):
|
|||||||
ids = re.findall('(tt\d{4,7})', txt)
|
ids = re.findall('(tt\d{4,7})', txt)
|
||||||
|
|
||||||
if multiple:
|
if multiple:
|
||||||
return list(set(['tt%07d' % tryInt(x[2:]) for x in ids])) if len(ids) > 0 else []
|
return removeDuplicate(['tt%07d' % tryInt(x[2:]) for x in ids]) if len(ids) > 0 else []
|
||||||
|
|
||||||
return 'tt%07d' % tryInt(ids[0][2:])
|
return 'tt%07d' % tryInt(ids[0][2:])
|
||||||
except IndexError:
|
except IndexError:
|
||||||
@@ -151,10 +204,12 @@ def getImdb(txt, check_inside = False, multiple = False):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def tryInt(s, default = 0):
|
def tryInt(s, default = 0):
|
||||||
try: return int(s)
|
try: return int(s)
|
||||||
except: return default
|
except: return default
|
||||||
|
|
||||||
|
|
||||||
def tryFloat(s):
|
def tryFloat(s):
|
||||||
try:
|
try:
|
||||||
if isinstance(s, str):
|
if isinstance(s, str):
|
||||||
@@ -163,39 +218,43 @@ def tryFloat(s):
|
|||||||
return float(s)
|
return float(s)
|
||||||
except: return 0
|
except: return 0
|
||||||
|
|
||||||
def natsortKey(s):
|
|
||||||
return map(tryInt, re.findall(r'(\d+|\D+)', s))
|
|
||||||
|
|
||||||
def natcmp(a, b):
|
def natsortKey(string_):
|
||||||
return cmp(natsortKey(a), natsortKey(b))
|
"""See http://www.codinghorror.com/blog/archives/001018.html"""
|
||||||
|
return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_)]
|
||||||
|
|
||||||
|
|
||||||
def toIterable(value):
|
def toIterable(value):
|
||||||
if isinstance(value, collections.Iterable):
|
if isinstance(value, collections.Iterable):
|
||||||
return value
|
return value
|
||||||
return [value]
|
return [value]
|
||||||
|
|
||||||
def getTitle(library_dict):
|
|
||||||
|
def getIdentifier(media):
|
||||||
|
return media.get('identifier') or media.get('identifiers', {}).get('imdb')
|
||||||
|
|
||||||
|
|
||||||
|
def getTitle(media_dict):
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
return library_dict['titles'][0]['title']
|
return media_dict['title']
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
for title in library_dict.titles:
|
return media_dict['titles'][0]
|
||||||
if title.default:
|
|
||||||
return title.title
|
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
return library_dict['info']['titles'][0]
|
return media_dict['info']['titles'][0]
|
||||||
except:
|
except:
|
||||||
log.error('Could not get title for %s', library_dict.identifier)
|
try:
|
||||||
return None
|
return media_dict['media']['info']['titles'][0]
|
||||||
|
except:
|
||||||
log.error('Could not get title for %s', library_dict['identifier'])
|
log.error('Could not get title for %s', getIdentifier(media_dict))
|
||||||
return None
|
return None
|
||||||
except:
|
except:
|
||||||
log.error('Could not get title for library item: %s', library_dict)
|
log.error('Could not get title for library item: %s', media_dict)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def possibleTitles(raw_title):
|
def possibleTitles(raw_title):
|
||||||
|
|
||||||
titles = [
|
titles = [
|
||||||
@@ -208,18 +267,146 @@ def possibleTitles(raw_title):
|
|||||||
new_title = raw_title.replace('&', 'and')
|
new_title = raw_title.replace('&', 'and')
|
||||||
titles.append(simplifyString(new_title))
|
titles.append(simplifyString(new_title))
|
||||||
|
|
||||||
return list(set(titles))
|
return removeDuplicate(titles)
|
||||||
|
|
||||||
|
|
||||||
def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
|
def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
|
||||||
return ''.join(random.choice(chars) for x in range(size))
|
return ''.join(random.choice(chars) for x in range(size))
|
||||||
|
|
||||||
|
|
||||||
def splitString(str, split_on = ',', clean = True):
|
def splitString(str, split_on = ',', clean = True):
|
||||||
list = [x.strip() for x in str.split(split_on)] if str else []
|
l = [x.strip() for x in str.split(split_on)] if str else []
|
||||||
return filter(None, list) if clean else list
|
return removeEmpty(l) if clean else l
|
||||||
|
|
||||||
|
|
||||||
|
def removeEmpty(l):
|
||||||
|
return list(filter(None, l))
|
||||||
|
|
||||||
|
|
||||||
|
def removeDuplicate(l):
|
||||||
|
seen = set()
|
||||||
|
return [x for x in l if x not in seen and not seen.add(x)]
|
||||||
|
|
||||||
|
|
||||||
def dictIsSubset(a, b):
|
def dictIsSubset(a, b):
|
||||||
return all([k in b and b[k] == v for k, v in a.items()])
|
return all([k in b and b[k] == v for k, v in a.items()])
|
||||||
|
|
||||||
|
|
||||||
|
# Returns True if sub_folder is the same as or inside base_folder
|
||||||
def isSubFolder(sub_folder, base_folder):
|
def isSubFolder(sub_folder, base_folder):
|
||||||
# Returns True is sub_folder is the same as or in base_folder
|
if base_folder and sub_folder:
|
||||||
return base_folder.rstrip(os.path.sep) + os.path.sep in sub_folder.rstrip(os.path.sep) + os.path.sep
|
base = sp(os.path.realpath(base_folder)) + os.path.sep
|
||||||
|
subfolder = sp(os.path.realpath(sub_folder)) + os.path.sep
|
||||||
|
return os.path.commonprefix([subfolder, base]) == base
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# From SABNZBD
|
||||||
|
re_password = [re.compile(r'(.+){{([^{}]+)}}$'), re.compile(r'(.+)\s+password\s*=\s*(.+)$', re.I)]
|
||||||
|
|
||||||
|
|
||||||
|
def scanForPassword(name):
|
||||||
|
m = None
|
||||||
|
for reg in re_password:
|
||||||
|
m = reg.search(name)
|
||||||
|
if m: break
|
||||||
|
|
||||||
|
if m:
|
||||||
|
return m.group(1).strip('. '), m.group(2).strip()
|
||||||
|
|
||||||
|
|
||||||
|
under_pat = re.compile(r'_([a-z])')
|
||||||
|
|
||||||
|
def underscoreToCamel(name):
|
||||||
|
return under_pat.sub(lambda x: x.group(1).upper(), name)
|
||||||
|
|
||||||
|
|
||||||
|
def removePyc(folder, only_excess = True, show_logs = True):
|
||||||
|
|
||||||
|
folder = sp(folder)
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(folder):
|
||||||
|
|
||||||
|
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
|
||||||
|
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
|
||||||
|
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
|
||||||
|
|
||||||
|
for excess_pyc_file in excess_pyc_files:
|
||||||
|
full_path = os.path.join(root, excess_pyc_file)
|
||||||
|
if show_logs: log.debug('Removing old PYC file: %s', full_path)
|
||||||
|
try:
|
||||||
|
os.remove(full_path)
|
||||||
|
except:
|
||||||
|
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
|
||||||
|
|
||||||
|
for dir_name in dirs:
|
||||||
|
full_path = os.path.join(root, dir_name)
|
||||||
|
if len(os.listdir(full_path)) == 0:
|
||||||
|
try:
|
||||||
|
os.rmdir(full_path)
|
||||||
|
except:
|
||||||
|
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
||||||
|
|
||||||
|
|
||||||
|
def getFreeSpace(directories):
|
||||||
|
|
||||||
|
single = not isinstance(directories, (tuple, list))
|
||||||
|
if single:
|
||||||
|
directories = [directories]
|
||||||
|
|
||||||
|
free_space = {}
|
||||||
|
for folder in directories:
|
||||||
|
|
||||||
|
size = None
|
||||||
|
if os.path.isdir(folder):
|
||||||
|
if os.name == 'nt':
|
||||||
|
_, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
|
||||||
|
ctypes.c_ulonglong()
|
||||||
|
if sys.version_info >= (3,) or isinstance(folder, unicode):
|
||||||
|
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW #@UndefinedVariable
|
||||||
|
else:
|
||||||
|
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA #@UndefinedVariable
|
||||||
|
ret = fun(folder, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
|
||||||
|
if ret == 0:
|
||||||
|
raise ctypes.WinError()
|
||||||
|
return [total.value, free.value]
|
||||||
|
else:
|
||||||
|
s = os.statvfs(folder)
|
||||||
|
size = [s.f_blocks * s.f_frsize / (1024 * 1024), (s.f_bavail * s.f_frsize) / (1024 * 1024)]
|
||||||
|
|
||||||
|
if single: return size
|
||||||
|
|
||||||
|
free_space[folder] = size
|
||||||
|
|
||||||
|
return free_space
|
||||||
|
|
||||||
|
|
||||||
|
def getSize(paths):
|
||||||
|
|
||||||
|
single = not isinstance(paths, (tuple, list))
|
||||||
|
if single:
|
||||||
|
paths = [paths]
|
||||||
|
|
||||||
|
total_size = 0
|
||||||
|
for path in paths:
|
||||||
|
path = sp(path)
|
||||||
|
|
||||||
|
if os.path.isdir(path):
|
||||||
|
total_size = 0
|
||||||
|
for dirpath, _, filenames in os.walk(path):
|
||||||
|
for f in filenames:
|
||||||
|
total_size += os.path.getsize(sp(os.path.join(dirpath, f)))
|
||||||
|
|
||||||
|
elif os.path.isfile(path):
|
||||||
|
total_size += os.path.getsize(path)
|
||||||
|
|
||||||
|
return total_size / 1048576 # MB
|
||||||
|
|
||||||
|
|
||||||
|
def find(func, iterable):
|
||||||
|
for item in iterable:
|
||||||
|
if func(item):
|
||||||
|
return item
|
||||||
|
|
||||||
|
return None
|
||||||
|
|||||||
@@ -1,30 +1,33 @@
|
|||||||
from couchpotato.core.event import fireEvent
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from importlib import import_module
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.event import fireEvent
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from importhelper import import_module
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Loader(object):
|
class Loader(object):
|
||||||
plugins = {}
|
|
||||||
providers = {}
|
def __init__(self):
|
||||||
modules = {}
|
self.plugins = {}
|
||||||
|
self.providers = {}
|
||||||
|
self.modules = {}
|
||||||
|
self.paths = {}
|
||||||
|
|
||||||
def preload(self, root = ''):
|
def preload(self, root = ''):
|
||||||
core = os.path.join(root, 'couchpotato', 'core')
|
core = os.path.join(root, 'couchpotato', 'core')
|
||||||
|
|
||||||
self.paths = {
|
self.paths.update({
|
||||||
'core': (0, 'couchpotato.core._base', os.path.join(core, '_base')),
|
'core': (0, 'couchpotato.core._base', os.path.join(core, '_base')),
|
||||||
'plugin': (1, 'couchpotato.core.plugins', os.path.join(core, 'plugins')),
|
'plugin': (1, 'couchpotato.core.plugins', os.path.join(core, 'plugins')),
|
||||||
'notifications': (20, 'couchpotato.core.notifications', os.path.join(core, 'notifications')),
|
'notifications': (20, 'couchpotato.core.notifications', os.path.join(core, 'notifications')),
|
||||||
'downloaders': (20, 'couchpotato.core.downloaders', os.path.join(core, 'downloaders')),
|
'downloaders': (20, 'couchpotato.core.downloaders', os.path.join(core, 'downloaders')),
|
||||||
}
|
})
|
||||||
|
|
||||||
# Add providers to loader
|
|
||||||
self.addPath(root, ['couchpotato', 'core', 'providers'], 25, recursive = False)
|
|
||||||
|
|
||||||
# Add media to loader
|
# Add media to loader
|
||||||
self.addPath(root, ['couchpotato', 'core', 'media'], 25, recursive = True)
|
self.addPath(root, ['couchpotato', 'core', 'media'], 25, recursive = True)
|
||||||
@@ -37,7 +40,7 @@ class Loader(object):
|
|||||||
self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
|
self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
|
||||||
|
|
||||||
# Loop over all paths and add to module list
|
# Loop over all paths and add to module list
|
||||||
for plugin_type, plugin_tuple in self.paths.iteritems():
|
for plugin_type, plugin_tuple in self.paths.items():
|
||||||
priority, module, dir_name = plugin_tuple
|
priority, module, dir_name = plugin_tuple
|
||||||
self.addFromDir(plugin_type, priority, module, dir_name)
|
self.addFromDir(plugin_type, priority, module, dir_name)
|
||||||
|
|
||||||
@@ -45,7 +48,7 @@ class Loader(object):
|
|||||||
did_save = 0
|
did_save = 0
|
||||||
|
|
||||||
for priority in sorted(self.modules):
|
for priority in sorted(self.modules):
|
||||||
for module_name, plugin in sorted(self.modules[priority].iteritems()):
|
for module_name, plugin in sorted(self.modules[priority].items()):
|
||||||
|
|
||||||
# Load module
|
# Load module
|
||||||
try:
|
try:
|
||||||
@@ -56,12 +59,10 @@ class Loader(object):
|
|||||||
if m is None:
|
if m is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log.info('Loading %s: %s', (plugin['type'], plugin['name']))
|
|
||||||
|
|
||||||
# Save default settings for plugin/provider
|
# Save default settings for plugin/provider
|
||||||
did_save += self.loadSettings(m, module_name, save = False)
|
did_save += self.loadSettings(m, module_name, save = False)
|
||||||
|
|
||||||
self.loadPlugins(m, plugin.get('name'))
|
self.loadPlugins(m, plugin.get('type'), plugin.get('name'))
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
# todo:: subclass ImportError for missing requirements.
|
# todo:: subclass ImportError for missing requirements.
|
||||||
if e.message.lower().startswith("missing"):
|
if e.message.lower().startswith("missing"):
|
||||||
@@ -81,7 +82,7 @@ class Loader(object):
|
|||||||
for filename in os.listdir(root_path):
|
for filename in os.listdir(root_path):
|
||||||
path = os.path.join(root_path, filename)
|
path = os.path.join(root_path, filename)
|
||||||
if os.path.isdir(path) and filename[:2] != '__':
|
if os.path.isdir(path) and filename[:2] != '__':
|
||||||
if u'__init__.py' in os.listdir(path):
|
if six.u('__init__.py') in os.listdir(path):
|
||||||
new_base_path = ''.join(s + '.' for s in base_path) + filename
|
new_base_path = ''.join(s + '.' for s in base_path) + filename
|
||||||
self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
|
self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
|
||||||
|
|
||||||
@@ -95,14 +96,19 @@ class Loader(object):
|
|||||||
self.addModule(priority, plugin_type, module, os.path.basename(dir_name))
|
self.addModule(priority, plugin_type, module, os.path.basename(dir_name))
|
||||||
|
|
||||||
for name in os.listdir(dir_name):
|
for name in os.listdir(dir_name):
|
||||||
if os.path.isdir(os.path.join(dir_name, name)) and name != 'static' and os.path.isfile(os.path.join(dir_name, name, '__init__.py')):
|
path = os.path.join(dir_name, name)
|
||||||
|
ext = os.path.splitext(path)[1]
|
||||||
|
ext_length = len(ext)
|
||||||
|
if name != 'static' and ((os.path.isdir(path) and os.path.isfile(os.path.join(path, '__init__.py')))
|
||||||
|
or (os.path.isfile(path) and ext == '.py')):
|
||||||
|
name = name[:-ext_length] if ext_length > 0 else name
|
||||||
module_name = '%s.%s' % (module, name)
|
module_name = '%s.%s' % (module, name)
|
||||||
self.addModule(priority, plugin_type, module_name, name)
|
self.addModule(priority, plugin_type, module_name, name)
|
||||||
|
|
||||||
def loadSettings(self, module, name, save = True):
|
def loadSettings(self, module, name, save = True):
|
||||||
|
|
||||||
if not hasattr(module, 'config'):
|
if not hasattr(module, 'config'):
|
||||||
log.debug('Skip loading settings for plugin %s as it has no config section' % module.__file__)
|
#log.debug('Skip loading settings for plugin %s as it has no config section' % module.__file__)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -118,13 +124,20 @@ class Loader(object):
|
|||||||
log.debug('Failed loading settings for "%s": %s', (name, traceback.format_exc()))
|
log.debug('Failed loading settings for "%s": %s', (name, traceback.format_exc()))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def loadPlugins(self, module, name):
|
def loadPlugins(self, module, type, name):
|
||||||
|
|
||||||
if not hasattr(module, 'start'):
|
if not hasattr(module, 'autoload'):
|
||||||
log.debug('Skip startup for plugin %s as it has no start section' % module.__file__)
|
#log.debug('Skip startup for plugin %s as it has no start section' % module.__file__)
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
module.start()
|
# Load single file plugin
|
||||||
|
if isinstance(module.autoload, (str, unicode)):
|
||||||
|
getattr(module, module.autoload)()
|
||||||
|
# Load folder plugin
|
||||||
|
else:
|
||||||
|
module.autoload()
|
||||||
|
|
||||||
|
log.info('Loaded %s: %s', (type, name))
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
log.error('Failed loading plugin "%s": %s', (module.__file__, traceback.format_exc()))
|
log.error('Failed loading plugin "%s": %s', (module.__file__, traceback.format_exc()))
|
||||||
@@ -136,6 +149,9 @@ class Loader(object):
|
|||||||
self.modules[priority] = {}
|
self.modules[priority] = {}
|
||||||
|
|
||||||
module = module.lstrip('.')
|
module = module.lstrip('.')
|
||||||
|
if plugin_type.startswith('couchpotato_core'):
|
||||||
|
plugin_type = plugin_type[17:]
|
||||||
|
|
||||||
self.modules[priority][module] = {
|
self.modules[priority][module] = {
|
||||||
'priority': priority,
|
'priority': priority,
|
||||||
'module': module,
|
'module': module,
|
||||||
|
|||||||
@@ -1,11 +1,15 @@
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
class CPLog(object):
|
class CPLog(object):
|
||||||
|
|
||||||
context = ''
|
context = ''
|
||||||
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key', 'passkey']
|
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key', 'passkey']
|
||||||
|
|
||||||
|
Env = None
|
||||||
|
is_develop = False
|
||||||
|
|
||||||
def __init__(self, context = ''):
|
def __init__(self, context = ''):
|
||||||
if context.endswith('.main'):
|
if context.endswith('.main'):
|
||||||
context = context[:-5]
|
context = context[:-5]
|
||||||
@@ -13,6 +17,20 @@ class CPLog(object):
|
|||||||
self.context = context
|
self.context = context
|
||||||
self.logger = logging.getLogger()
|
self.logger = logging.getLogger()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
if not self.Env:
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
|
||||||
|
self.Env = Env
|
||||||
|
self.is_develop = Env.get('dev')
|
||||||
|
|
||||||
|
from couchpotato.core.event import addEvent
|
||||||
|
addEvent('app.after_shutdown', self.close)
|
||||||
|
|
||||||
|
def close(self, *args, **kwargs):
|
||||||
|
logging.shutdown()
|
||||||
|
|
||||||
def info(self, msg, replace_tuple = ()):
|
def info(self, msg, replace_tuple = ()):
|
||||||
self.logger.info(self.addContext(msg, replace_tuple))
|
self.logger.info(self.addContext(msg, replace_tuple))
|
||||||
|
|
||||||
@@ -36,23 +54,22 @@ class CPLog(object):
|
|||||||
|
|
||||||
def safeMessage(self, msg, replace_tuple = ()):
|
def safeMessage(self, msg, replace_tuple = ()):
|
||||||
|
|
||||||
from couchpotato.environment import Env
|
from couchpotato.core.helpers.encoding import ss, toUnicode
|
||||||
from couchpotato.core.helpers.encoding import ss
|
|
||||||
|
|
||||||
msg = ss(msg)
|
msg = ss(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
msg = msg % replace_tuple
|
if isinstance(replace_tuple, tuple):
|
||||||
except:
|
msg = msg % tuple([ss(x) if not isinstance(x, (int, float)) else x for x in list(replace_tuple)])
|
||||||
try:
|
elif isinstance(replace_tuple, dict):
|
||||||
if isinstance(replace_tuple, tuple):
|
msg = msg % dict((k, ss(v) if not isinstance(v, (int, float)) else v) for k, v in replace_tuple.iteritems())
|
||||||
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
|
else:
|
||||||
else:
|
msg = msg % ss(replace_tuple)
|
||||||
msg = msg % ss(replace_tuple)
|
except Exception as e:
|
||||||
except Exception, e:
|
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e))
|
||||||
self.logger.error(u'Failed encoding stuff to log "%s": %s' % (msg, e))
|
|
||||||
|
|
||||||
if not Env.get('dev'):
|
self.setup()
|
||||||
|
if not self.is_develop:
|
||||||
|
|
||||||
for replace in self.replace_private:
|
for replace in self.replace_private:
|
||||||
msg = re.sub('(\?%s=)[^\&]+' % replace, '?%s=xxx' % replace, msg)
|
msg = re.sub('(\?%s=)[^\&]+' % replace, '?%s=xxx' % replace, msg)
|
||||||
@@ -60,10 +77,10 @@ class CPLog(object):
|
|||||||
|
|
||||||
# Replace api key
|
# Replace api key
|
||||||
try:
|
try:
|
||||||
api_key = Env.setting('api_key')
|
api_key = self.Env.setting('api_key')
|
||||||
if api_key:
|
if api_key:
|
||||||
msg = msg.replace(api_key, 'API_KEY')
|
msg = msg.replace(api_key, 'API_KEY')
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return msg
|
return toUnicode(msg)
|
||||||
|
|||||||
108
couchpotato/core/media/__init__.py
Normal file → Executable file
108
couchpotato/core/media/__init__.py
Normal file → Executable file
@@ -1,44 +1,110 @@
|
|||||||
from couchpotato import get_session
|
import os
|
||||||
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato import CPLog, md5
|
||||||
|
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import getExt
|
||||||
from couchpotato.core.plugins.base import Plugin
|
from couchpotato.core.plugins.base import Plugin
|
||||||
from couchpotato.core.settings.model import Media
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MediaBase(Plugin):
|
class MediaBase(Plugin):
|
||||||
|
|
||||||
_type = None
|
_type = None
|
||||||
|
|
||||||
default_dict = {
|
|
||||||
'profile': {'types': {'quality': {}}},
|
|
||||||
'releases': {'status': {}, 'quality': {}, 'files':{}, 'info': {}},
|
|
||||||
'library': {'titles': {}, 'files':{}},
|
|
||||||
'files': {},
|
|
||||||
'status': {},
|
|
||||||
'category': {},
|
|
||||||
}
|
|
||||||
|
|
||||||
def initType(self):
|
def initType(self):
|
||||||
addEvent('media.types', self.getType)
|
addEvent('media.types', self.getType)
|
||||||
|
|
||||||
def getType(self):
|
def getType(self):
|
||||||
return self._type
|
return self._type
|
||||||
|
|
||||||
def createOnComplete(self, id):
|
def createOnComplete(self, media_id):
|
||||||
|
|
||||||
def onComplete():
|
def onComplete():
|
||||||
db = get_session()
|
try:
|
||||||
media = db.query(Media).filter_by(id = id).first()
|
media = fireEvent('media.get', media_id, single = True)
|
||||||
fireEventAsync('%s.searcher.single' % media.type, media.to_dict(self.default_dict), on_complete = self.createNotifyFront(id))
|
if media:
|
||||||
db.expire_all()
|
event_name = '%s.searcher.single' % media.get('type')
|
||||||
|
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||||
|
except:
|
||||||
|
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||||
|
|
||||||
return onComplete
|
return onComplete
|
||||||
|
|
||||||
def createNotifyFront(self, media_id):
|
def createNotifyFront(self, media_id):
|
||||||
|
|
||||||
def notifyFront():
|
def notifyFront():
|
||||||
db = get_session()
|
try:
|
||||||
media = db.query(Media).filter_by(id = media_id).first()
|
media = fireEvent('media.get', media_id, single = True)
|
||||||
fireEvent('notify.frontend', type = '%s.update' % media.type, data = media.to_dict(self.default_dict))
|
if media:
|
||||||
db.expire_all()
|
event_name = '%s.update' % media.get('type')
|
||||||
|
fireEvent('notify.frontend', type = event_name, data = media)
|
||||||
|
except:
|
||||||
|
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||||
|
|
||||||
return notifyFront
|
return notifyFront
|
||||||
|
|
||||||
|
def getDefaultTitle(self, info, ):
|
||||||
|
|
||||||
|
# Set default title
|
||||||
|
default_title = toUnicode(info.get('title'))
|
||||||
|
titles = info.get('titles', [])
|
||||||
|
counter = 0
|
||||||
|
def_title = None
|
||||||
|
for title in titles:
|
||||||
|
if (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title):
|
||||||
|
def_title = toUnicode(title)
|
||||||
|
break
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
if not def_title:
|
||||||
|
def_title = toUnicode(titles[0])
|
||||||
|
|
||||||
|
return def_title or 'UNKNOWN'
|
||||||
|
|
||||||
|
def getPoster(self, media, image_urls):
|
||||||
|
if 'files' not in media:
|
||||||
|
media['files'] = {}
|
||||||
|
|
||||||
|
existing_files = media['files']
|
||||||
|
|
||||||
|
image_type = 'poster'
|
||||||
|
file_type = 'image_%s' % image_type
|
||||||
|
|
||||||
|
# Make existing unique
|
||||||
|
unique_files = list(set(existing_files.get(file_type, [])))
|
||||||
|
|
||||||
|
# Remove files that can't be found
|
||||||
|
for ef in unique_files:
|
||||||
|
if not os.path.isfile(ef):
|
||||||
|
unique_files.remove(ef)
|
||||||
|
|
||||||
|
# Replace new files list
|
||||||
|
existing_files[file_type] = unique_files
|
||||||
|
if len(existing_files) == 0:
|
||||||
|
del existing_files[file_type]
|
||||||
|
|
||||||
|
# Loop over type
|
||||||
|
for image in image_urls.get(image_type, []):
|
||||||
|
if not isinstance(image, (str, unicode)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if it has top image
|
||||||
|
filename = '%s.%s' % (md5(image), getExt(image))
|
||||||
|
existing = existing_files.get(file_type, [])
|
||||||
|
has_latest = False
|
||||||
|
for x in existing:
|
||||||
|
if filename in x:
|
||||||
|
has_latest = True
|
||||||
|
|
||||||
|
if not has_latest or file_type not in existing_files or len(existing_files.get(file_type, [])) == 0:
|
||||||
|
file_path = fireEvent('file.download', url = image, single = True)
|
||||||
|
if file_path:
|
||||||
|
existing_files[file_type] = [toUnicode(file_path)]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|||||||
@@ -1,13 +1,7 @@
|
|||||||
from couchpotato.core.event import addEvent
|
from .main import Library
|
||||||
from couchpotato.core.plugins.base import Plugin
|
|
||||||
|
|
||||||
|
|
||||||
class LibraryBase(Plugin):
|
def autoload():
|
||||||
|
return Library()
|
||||||
|
|
||||||
_type = None
|
config = []
|
||||||
|
|
||||||
def initType(self):
|
|
||||||
addEvent('library.types', self.getType)
|
|
||||||
|
|
||||||
def getType(self):
|
|
||||||
return self._type
|
|
||||||
|
|||||||
13
couchpotato/core/media/_base/library/base.py
Normal file
13
couchpotato/core/media/_base/library/base.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from couchpotato.core.event import addEvent
|
||||||
|
from couchpotato.core.plugins.base import Plugin
|
||||||
|
|
||||||
|
|
||||||
|
class LibraryBase(Plugin):
|
||||||
|
|
||||||
|
_type = None
|
||||||
|
|
||||||
|
def initType(self):
|
||||||
|
addEvent('library.types', self.getType)
|
||||||
|
|
||||||
|
def getType(self):
|
||||||
|
return self._type
|
||||||
128
couchpotato/core/media/_base/library/main.py
Executable file
128
couchpotato/core/media/_base/library/main.py
Executable file
@@ -0,0 +1,128 @@
|
|||||||
|
from couchpotato import get_db
|
||||||
|
from couchpotato.api import addApiView
|
||||||
|
from couchpotato.core.event import addEvent, fireEvent
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.library.base import LibraryBase
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Library(LibraryBase):
|
||||||
|
def __init__(self):
|
||||||
|
addEvent('library.title', self.title)
|
||||||
|
addEvent('library.related', self.related)
|
||||||
|
addEvent('library.tree', self.tree)
|
||||||
|
|
||||||
|
addEvent('library.root', self.root)
|
||||||
|
|
||||||
|
addApiView('library.query', self.queryView)
|
||||||
|
addApiView('library.related', self.relatedView)
|
||||||
|
addApiView('library.tree', self.treeView)
|
||||||
|
|
||||||
|
def queryView(self, media_id, **kwargs):
|
||||||
|
db = get_db()
|
||||||
|
media = db.get('id', media_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'result': fireEvent('library.query', media, single = True)
|
||||||
|
}
|
||||||
|
|
||||||
|
def relatedView(self, media_id, **kwargs):
|
||||||
|
db = get_db()
|
||||||
|
media = db.get('id', media_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'result': fireEvent('library.related', media, single = True)
|
||||||
|
}
|
||||||
|
|
||||||
|
def treeView(self, media_id, **kwargs):
|
||||||
|
db = get_db()
|
||||||
|
media = db.get('id', media_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'result': fireEvent('library.tree', media, single = True)
|
||||||
|
}
|
||||||
|
|
||||||
|
def title(self, library):
|
||||||
|
return fireEvent(
|
||||||
|
'library.query',
|
||||||
|
library,
|
||||||
|
|
||||||
|
condense = False,
|
||||||
|
include_year = False,
|
||||||
|
include_identifier = False,
|
||||||
|
single = True
|
||||||
|
)
|
||||||
|
|
||||||
|
def related(self, media):
|
||||||
|
result = {self.key(media['type']): media}
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
cur = media
|
||||||
|
|
||||||
|
while cur and cur.get('parent_id'):
|
||||||
|
cur = db.get('id', cur['parent_id'])
|
||||||
|
|
||||||
|
result[self.key(cur['type'])] = cur
|
||||||
|
|
||||||
|
children = db.get_many('media_children', media['_id'], with_doc = True)
|
||||||
|
|
||||||
|
for item in children:
|
||||||
|
key = self.key(item['doc']['type']) + 's'
|
||||||
|
|
||||||
|
if key not in result:
|
||||||
|
result[key] = []
|
||||||
|
|
||||||
|
result[key].append(item['doc'])
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def root(self, media):
|
||||||
|
db = get_db()
|
||||||
|
cur = media
|
||||||
|
|
||||||
|
while cur and cur.get('parent_id'):
|
||||||
|
cur = db.get('id', cur['parent_id'])
|
||||||
|
|
||||||
|
return cur
|
||||||
|
|
||||||
|
def tree(self, media = None, media_id = None):
|
||||||
|
db = get_db()
|
||||||
|
|
||||||
|
if media:
|
||||||
|
result = media
|
||||||
|
elif media_id:
|
||||||
|
result = db.get('id', media_id, with_doc = True)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Find children
|
||||||
|
items = db.get_many('media_children', result['_id'], with_doc = True)
|
||||||
|
keys = []
|
||||||
|
|
||||||
|
# Build children arrays
|
||||||
|
for item in items:
|
||||||
|
key = self.key(item['doc']['type']) + 's'
|
||||||
|
|
||||||
|
if key not in result:
|
||||||
|
result[key] = {}
|
||||||
|
elif type(result[key]) is not dict:
|
||||||
|
result[key] = {}
|
||||||
|
|
||||||
|
if key not in keys:
|
||||||
|
keys.append(key)
|
||||||
|
|
||||||
|
result[key][item['_id']] = fireEvent('library.tree', item['doc'], single = True)
|
||||||
|
|
||||||
|
# Unique children
|
||||||
|
for key in keys:
|
||||||
|
result[key] = result[key].values()
|
||||||
|
|
||||||
|
# Include releases
|
||||||
|
result['releases'] = fireEvent('release.for_media', result['_id'], single = True)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def key(self, media_type):
|
||||||
|
parts = media_type.split('.')
|
||||||
|
return parts[-1]
|
||||||
7
couchpotato/core/media/_base/matcher/__init__.py
Normal file
7
couchpotato/core/media/_base/matcher/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from .main import Matcher
|
||||||
|
|
||||||
|
|
||||||
|
def autoload():
|
||||||
|
return Matcher()
|
||||||
|
|
||||||
|
config = []
|
||||||
84
couchpotato/core/media/_base/matcher/base.py
Normal file
84
couchpotato/core/media/_base/matcher/base.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
from couchpotato.core.event import addEvent
|
||||||
|
from couchpotato.core.helpers.encoding import simplifyString
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.plugins.base import Plugin
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MatcherBase(Plugin):
|
||||||
|
type = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if self.type:
|
||||||
|
addEvent('%s.matcher.correct' % self.type, self.correct)
|
||||||
|
|
||||||
|
def correct(self, chain, release, media, quality):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def flattenInfo(self, info):
|
||||||
|
# Flatten dictionary of matches (chain info)
|
||||||
|
if isinstance(info, dict):
|
||||||
|
return dict([(key, self.flattenInfo(value)) for key, value in info.items()])
|
||||||
|
|
||||||
|
# Flatten matches
|
||||||
|
result = None
|
||||||
|
|
||||||
|
for match in info:
|
||||||
|
if isinstance(match, dict):
|
||||||
|
if result is None:
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
for key, value in match.items():
|
||||||
|
if key not in result:
|
||||||
|
result[key] = []
|
||||||
|
|
||||||
|
result[key].append(value)
|
||||||
|
else:
|
||||||
|
if result is None:
|
||||||
|
result = []
|
||||||
|
|
||||||
|
result.append(match)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def constructFromRaw(self, match):
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
|
||||||
|
parts = [
|
||||||
|
''.join([
|
||||||
|
y for y in x[1:] if y
|
||||||
|
]) for x in match
|
||||||
|
]
|
||||||
|
|
||||||
|
return ''.join(parts)[:-1].strip()
|
||||||
|
|
||||||
|
def simplifyValue(self, value):
|
||||||
|
if not value:
|
||||||
|
return value
|
||||||
|
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
return simplifyString(value)
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [self.simplifyValue(x) for x in value]
|
||||||
|
|
||||||
|
raise ValueError("Unsupported value type")
|
||||||
|
|
||||||
|
def chainMatch(self, chain, group, tags):
|
||||||
|
info = self.flattenInfo(chain.info[group])
|
||||||
|
|
||||||
|
found_tags = []
|
||||||
|
for tag, accepted in tags.items():
|
||||||
|
values = [self.simplifyValue(x) for x in info.get(tag, [None])]
|
||||||
|
|
||||||
|
if any([val in accepted for val in values]):
|
||||||
|
found_tags.append(tag)
|
||||||
|
|
||||||
|
log.debug('tags found: %s, required: %s' % (found_tags, tags.keys()))
|
||||||
|
|
||||||
|
if set(tags.keys()) == set(found_tags):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return all([key in found_tags for key, value in tags.items()])
|
||||||
89
couchpotato/core/media/_base/matcher/main.py
Normal file
89
couchpotato/core/media/_base/matcher/main.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from couchpotato.core.event import addEvent, fireEvent
|
||||||
|
from couchpotato.core.helpers.variable import possibleTitles
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.matcher.base import MatcherBase
|
||||||
|
from caper import Caper
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Matcher(MatcherBase):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Matcher, self).__init__()
|
||||||
|
|
||||||
|
self.caper = Caper()
|
||||||
|
|
||||||
|
addEvent('matcher.parse', self.parse)
|
||||||
|
addEvent('matcher.match', self.match)
|
||||||
|
|
||||||
|
addEvent('matcher.flatten_info', self.flattenInfo)
|
||||||
|
addEvent('matcher.construct_from_raw', self.constructFromRaw)
|
||||||
|
|
||||||
|
addEvent('matcher.correct_title', self.correctTitle)
|
||||||
|
addEvent('matcher.correct_quality', self.correctQuality)
|
||||||
|
|
||||||
|
def parse(self, name, parser='scene'):
|
||||||
|
return self.caper.parse(name, parser)
|
||||||
|
|
||||||
|
def match(self, release, media, quality):
|
||||||
|
match = fireEvent('matcher.parse', release['name'], single = True)
|
||||||
|
|
||||||
|
if len(match.chains) < 1:
|
||||||
|
log.info2('Wrong: %s, unable to parse release name (no chains)', release['name'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
for chain in match.chains:
|
||||||
|
if fireEvent('%s.matcher.correct' % media['type'], chain, release, media, quality, single = True):
|
||||||
|
return chain
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def correctTitle(self, chain, media):
|
||||||
|
root = fireEvent('library.root', media, single = True)
|
||||||
|
|
||||||
|
if 'show_name' not in chain.info or not len(chain.info['show_name']):
|
||||||
|
log.info('Wrong: missing show name in parsed result')
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Get the lower-case parsed show name from the chain
|
||||||
|
chain_words = [x.lower() for x in chain.info['show_name']]
|
||||||
|
|
||||||
|
# Build a list of possible titles of the media we are searching for
|
||||||
|
titles = root['info']['titles']
|
||||||
|
|
||||||
|
# Add year suffix titles (will result in ['<name_one>', '<name_one> <suffix_one>', '<name_two>', ...])
|
||||||
|
suffixes = [None, root['info']['year']]
|
||||||
|
|
||||||
|
titles = [
|
||||||
|
title + ((' %s' % suffix) if suffix else '')
|
||||||
|
for title in titles
|
||||||
|
for suffix in suffixes
|
||||||
|
]
|
||||||
|
|
||||||
|
# Check show titles match
|
||||||
|
# TODO check xem names
|
||||||
|
for title in titles:
|
||||||
|
for valid_words in [x.split(' ') for x in possibleTitles(title)]:
|
||||||
|
|
||||||
|
if valid_words == chain_words:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def correctQuality(self, chain, quality, quality_map):
|
||||||
|
if quality['identifier'] not in quality_map:
|
||||||
|
log.info2('Wrong: unknown preferred quality %s', quality['identifier'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
if 'video' not in chain.info:
|
||||||
|
log.info2('Wrong: no video tags found')
|
||||||
|
return False
|
||||||
|
|
||||||
|
video_tags = quality_map[quality['identifier']]
|
||||||
|
|
||||||
|
if not self.chainMatch(chain, 'video', video_tags):
|
||||||
|
log.info2('Wrong: %s tags not in chain', video_tags)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
from .main import MediaPlugin
|
from .main import MediaPlugin
|
||||||
|
|
||||||
def start():
|
|
||||||
return MediaPlugin()
|
|
||||||
|
|
||||||
config = []
|
def autoload():
|
||||||
|
return MediaPlugin()
|
||||||
|
|||||||
199
couchpotato/core/media/_base/media/index.py
Normal file
199
couchpotato/core/media/_base/media/index.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
from string import ascii_letters
|
||||||
|
from hashlib import md5
|
||||||
|
|
||||||
|
from CodernityDB.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
|
||||||
|
|
||||||
|
|
||||||
|
class MediaIndex(MultiTreeBasedIndex):
|
||||||
|
_version = 3
|
||||||
|
|
||||||
|
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(MediaIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return md5(key).hexdigest()
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and (data.get('identifier') or data.get('identifiers')):
|
||||||
|
|
||||||
|
identifiers = data.get('identifiers', {})
|
||||||
|
if data.get('identifier') and 'imdb' not in identifiers:
|
||||||
|
identifiers['imdb'] = data.get('identifier')
|
||||||
|
|
||||||
|
ids = []
|
||||||
|
for x in identifiers:
|
||||||
|
ids.append(md5('%s-%s' % (x, identifiers[x])).hexdigest())
|
||||||
|
|
||||||
|
return ids, None
|
||||||
|
|
||||||
|
|
||||||
|
class MediaStatusIndex(TreeBasedIndex):
|
||||||
|
_version = 1
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(MediaStatusIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return md5(key).hexdigest()
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and data.get('status'):
|
||||||
|
return md5(data.get('status')).hexdigest(), None
|
||||||
|
|
||||||
|
|
||||||
|
class MediaTypeIndex(TreeBasedIndex):
|
||||||
|
_version = 1
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(MediaTypeIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return md5(key).hexdigest()
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and data.get('type'):
|
||||||
|
return md5(data.get('type')).hexdigest(), None
|
||||||
|
|
||||||
|
|
||||||
|
class TitleSearchIndex(MultiTreeBasedIndex):
|
||||||
|
_version = 1
|
||||||
|
|
||||||
|
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex
|
||||||
|
from itertools import izip
|
||||||
|
from couchpotato.core.helpers.encoding import simplifyString"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(TitleSearchIndex, self).__init__(*args, **kwargs)
|
||||||
|
self.__l = kwargs.get('w_len', 2)
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
|
||||||
|
if data.get('_t') == 'media' and len(data.get('title', '')) > 0:
|
||||||
|
|
||||||
|
out = set()
|
||||||
|
title = str(simplifyString(data.get('title').lower()))
|
||||||
|
l = self.__l
|
||||||
|
title_split = title.split()
|
||||||
|
|
||||||
|
for x in range(len(title_split)):
|
||||||
|
combo = ' '.join(title_split[x:])[:32].strip()
|
||||||
|
out.add(combo.rjust(32, '_'))
|
||||||
|
combo_range = max(l, min(len(combo), 32))
|
||||||
|
|
||||||
|
for cx in range(1, combo_range):
|
||||||
|
ccombo = combo[:-cx].strip()
|
||||||
|
if len(ccombo) > l:
|
||||||
|
out.add(ccombo.rjust(32, '_'))
|
||||||
|
|
||||||
|
return out, None
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return key.rjust(32, '_').lower()
|
||||||
|
|
||||||
|
|
||||||
|
class TitleIndex(TreeBasedIndex):
|
||||||
|
_version = 4
|
||||||
|
|
||||||
|
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||||
|
from string import ascii_letters
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(TitleIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return self.simplify(key)
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and data.get('title') is not None and len(data.get('title')) > 0:
|
||||||
|
return self.simplify(data['title']), None
|
||||||
|
|
||||||
|
def simplify(self, title):
|
||||||
|
|
||||||
|
title = toUnicode(title)
|
||||||
|
|
||||||
|
nr_prefix = '' if title and len(title) > 0 and title[0] in ascii_letters else '#'
|
||||||
|
title = simplifyString(title)
|
||||||
|
|
||||||
|
for prefix in ['the ', 'an ', 'a ']:
|
||||||
|
if prefix == title[:len(prefix)]:
|
||||||
|
title = title[len(prefix):]
|
||||||
|
break
|
||||||
|
|
||||||
|
return str(nr_prefix + title).ljust(32, ' ')[:32]
|
||||||
|
|
||||||
|
|
||||||
|
class StartsWithIndex(TreeBasedIndex):
|
||||||
|
_version = 3
|
||||||
|
|
||||||
|
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||||
|
from string import ascii_letters
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '1s'
|
||||||
|
super(StartsWithIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return self.first(key)
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and data.get('title') is not None:
|
||||||
|
return self.first(data['title']), None
|
||||||
|
|
||||||
|
def first(self, title):
|
||||||
|
title = toUnicode(title)
|
||||||
|
title = simplifyString(title)
|
||||||
|
|
||||||
|
for prefix in ['the ', 'an ', 'a ']:
|
||||||
|
if prefix == title[:len(prefix)]:
|
||||||
|
title = title[len(prefix):]
|
||||||
|
break
|
||||||
|
|
||||||
|
return str(title[0] if title and len(title) > 0 and title[0] in ascii_letters else '#').lower()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class MediaChildrenIndex(TreeBasedIndex):
|
||||||
|
_version = 1
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(MediaChildrenIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return key
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and data.get('parent_id'):
|
||||||
|
return data.get('parent_id'), None
|
||||||
|
|
||||||
|
|
||||||
|
class MediaTagIndex(MultiTreeBasedIndex):
|
||||||
|
_version = 2
|
||||||
|
|
||||||
|
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['key_format'] = '32s'
|
||||||
|
super(MediaTagIndex, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def make_key_value(self, data):
|
||||||
|
if data.get('_t') == 'media' and data.get('tags') and len(data.get('tags', [])) > 0:
|
||||||
|
|
||||||
|
tags = set()
|
||||||
|
for tag in data.get('tags', []):
|
||||||
|
tags.add(self.make_key(tag))
|
||||||
|
|
||||||
|
return list(tags), None
|
||||||
|
|
||||||
|
def make_key(self, key):
|
||||||
|
return md5(key).hexdigest()
|
||||||
620
couchpotato/core/media/_base/media/main.py
Normal file → Executable file
620
couchpotato/core/media/_base/media/main.py
Normal file → Executable file
@@ -1,21 +1,35 @@
|
|||||||
from couchpotato import get_session
|
from datetime import timedelta
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
from string import ascii_lowercase
|
||||||
|
|
||||||
|
from CodernityDB.database import RecordNotFound, RecordDeleted
|
||||||
|
from couchpotato import tryInt, get_db
|
||||||
from couchpotato.api import addApiView
|
from couchpotato.api import addApiView
|
||||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||||
from couchpotato.core.helpers.encoding import toUnicode
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
from couchpotato.core.helpers.variable import mergeDicts, splitString, getImdb
|
from couchpotato.core.helpers.variable import splitString, getImdb, getTitle
|
||||||
from couchpotato.core.logger import CPLog
|
from couchpotato.core.logger import CPLog
|
||||||
from couchpotato.core.media import MediaBase
|
from couchpotato.core.media import MediaBase
|
||||||
from couchpotato.core.settings.model import Library, LibraryTitle, Release, \
|
from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex, MediaTagIndex
|
||||||
Media
|
|
||||||
from sqlalchemy.orm import joinedload_all
|
|
||||||
from sqlalchemy.sql.expression import or_, asc, not_, desc
|
|
||||||
from string import ascii_lowercase
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MediaPlugin(MediaBase):
|
class MediaPlugin(MediaBase):
|
||||||
|
|
||||||
|
_database = {
|
||||||
|
'media': MediaIndex,
|
||||||
|
'media_search_title': TitleSearchIndex,
|
||||||
|
'media_status': MediaStatusIndex,
|
||||||
|
'media_tag': MediaTagIndex,
|
||||||
|
'media_by_type': MediaTypeIndex,
|
||||||
|
'media_title': TitleIndex,
|
||||||
|
'media_startswith': StartsWithIndex,
|
||||||
|
'media_children': MediaChildrenIndex,
|
||||||
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
addApiView('media.refresh', self.refresh, docs = {
|
addApiView('media.refresh', self.refresh, docs = {
|
||||||
@@ -29,15 +43,15 @@ class MediaPlugin(MediaBase):
|
|||||||
'desc': 'List media',
|
'desc': 'List media',
|
||||||
'params': {
|
'params': {
|
||||||
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
|
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
|
||||||
'status': {'type': 'array or csv', 'desc': 'Filter movie by status. Example:"active,done"'},
|
'status': {'type': 'array or csv', 'desc': 'Filter media by status. Example:"active,done"'},
|
||||||
'release_status': {'type': 'array or csv', 'desc': 'Filter movie by status of its releases. Example:"snatched,available"'},
|
'release_status': {'type': 'array or csv', 'desc': 'Filter media by status of its releases. Example:"snatched,available"'},
|
||||||
'limit_offset': {'desc': 'Limit and offset the movie list. Examples: "50" or "50,30"'},
|
'limit_offset': {'desc': 'Limit and offset the media list. Examples: "50" or "50,30"'},
|
||||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all movies starting with the letter "a"'},
|
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all media starting with the letter "a"'},
|
||||||
'search': {'desc': 'Search movie title'},
|
'search': {'desc': 'Search media title'},
|
||||||
},
|
},
|
||||||
'return': {'type': 'object', 'example': """{
|
'return': {'type': 'object', 'example': """{
|
||||||
'success': True,
|
'success': True,
|
||||||
'empty': bool, any movies returned or not,
|
'empty': bool, any media returned or not,
|
||||||
'media': array, media found,
|
'media': array, media found,
|
||||||
}"""}
|
}"""}
|
||||||
})
|
})
|
||||||
@@ -59,37 +73,64 @@ class MediaPlugin(MediaBase):
|
|||||||
|
|
||||||
addApiView('media.available_chars', self.charView)
|
addApiView('media.available_chars', self.charView)
|
||||||
|
|
||||||
addEvent('app.load', self.addSingleRefreshView)
|
addEvent('app.load', self.addSingleRefreshView, priority = 100)
|
||||||
addEvent('app.load', self.addSingleListView)
|
addEvent('app.load', self.addSingleListView, priority = 100)
|
||||||
addEvent('app.load', self.addSingleCharView)
|
addEvent('app.load', self.addSingleCharView, priority = 100)
|
||||||
addEvent('app.load', self.addSingleDeleteView)
|
addEvent('app.load', self.addSingleDeleteView, priority = 100)
|
||||||
|
addEvent('app.load', self.cleanupFaults)
|
||||||
|
|
||||||
addEvent('media.get', self.get)
|
addEvent('media.get', self.get)
|
||||||
|
addEvent('media.with_status', self.withStatus)
|
||||||
|
addEvent('media.with_identifiers', self.withIdentifiers)
|
||||||
addEvent('media.list', self.list)
|
addEvent('media.list', self.list)
|
||||||
addEvent('media.delete', self.delete)
|
addEvent('media.delete', self.delete)
|
||||||
addEvent('media.restatus', self.restatus)
|
addEvent('media.restatus', self.restatus)
|
||||||
|
addEvent('media.tag', self.tag)
|
||||||
|
addEvent('media.untag', self.unTag)
|
||||||
|
|
||||||
|
# Wrongly tagged media files
|
||||||
|
def cleanupFaults(self):
|
||||||
|
medias = fireEvent('media.with_status', 'ignored', single = True) or []
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
for media in medias:
|
||||||
|
try:
|
||||||
|
media['status'] = 'done'
|
||||||
|
db.update(media)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def refresh(self, id = '', **kwargs):
|
def refresh(self, id = '', **kwargs):
|
||||||
db = get_session()
|
handlers = []
|
||||||
|
ids = splitString(id)
|
||||||
|
|
||||||
for x in splitString(id):
|
for x in ids:
|
||||||
media = db.query(Media).filter_by(id = x).first()
|
|
||||||
|
|
||||||
if media:
|
refresh_handler = self.createRefreshHandler(x)
|
||||||
# Get current selected title
|
if refresh_handler:
|
||||||
default_title = ''
|
handlers.append(refresh_handler)
|
||||||
for title in media.library.titles:
|
|
||||||
if title.default: default_title = title.title
|
|
||||||
|
|
||||||
fireEvent('notify.frontend', type = '%s.busy' % media.type, data = {'id': x})
|
fireEvent('notify.frontend', type = 'media.busy', data = {'_id': ids})
|
||||||
fireEventAsync('library.update.%s' % media.type, identifier = media.library.identifier, default_title = default_title, force = True, on_complete = self.createOnComplete(x))
|
fireEventAsync('schedule.queue', handlers = handlers)
|
||||||
|
|
||||||
db.expire_all()
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'success': True,
|
'success': True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def createRefreshHandler(self, media_id):
|
||||||
|
|
||||||
|
try:
|
||||||
|
media = get_db().get('id', media_id)
|
||||||
|
event = '%s.update' % media.get('type')
|
||||||
|
|
||||||
|
def handler():
|
||||||
|
fireEvent(event, media_id = media_id, on_complete = self.createOnComplete(media_id))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Refresh handler for non existing media: %s', traceback.format_exc())
|
||||||
|
|
||||||
def addSingleRefreshView(self):
|
def addSingleRefreshView(self):
|
||||||
|
|
||||||
for media_type in fireEvent('media.types', merge = True):
|
for media_type in fireEvent('media.types', merge = True):
|
||||||
@@ -97,21 +138,30 @@ class MediaPlugin(MediaBase):
|
|||||||
|
|
||||||
def get(self, media_id):
|
def get(self, media_id):
|
||||||
|
|
||||||
db = get_session()
|
try:
|
||||||
|
db = get_db()
|
||||||
|
|
||||||
imdb_id = getImdb(str(media_id))
|
imdb_id = getImdb(str(media_id))
|
||||||
|
|
||||||
if imdb_id:
|
if imdb_id:
|
||||||
m = db.query(Media).filter(Media.library.has(identifier = imdb_id)).first()
|
media = db.get('media', 'imdb-%s' % imdb_id, with_doc = True)['doc']
|
||||||
else:
|
else:
|
||||||
m = db.query(Media).filter_by(id = media_id).first()
|
media = db.get('id', media_id)
|
||||||
|
|
||||||
results = None
|
if media:
|
||||||
if m:
|
|
||||||
results = m.to_dict(self.default_dict)
|
|
||||||
|
|
||||||
db.expire_all()
|
# Attach category
|
||||||
return results
|
try: media['category'] = db.get('id', media.get('category_id'))
|
||||||
|
except: pass
|
||||||
|
|
||||||
|
media['releases'] = fireEvent('release.for_media', media['_id'], single = True)
|
||||||
|
|
||||||
|
return media
|
||||||
|
|
||||||
|
except (RecordNotFound, RecordDeleted):
|
||||||
|
log.error('Media with id "%s" not found', media_id)
|
||||||
|
except:
|
||||||
|
raise
|
||||||
|
|
||||||
def getView(self, id = None, **kwargs):
|
def getView(self, id = None, **kwargs):
|
||||||
|
|
||||||
@@ -122,9 +172,47 @@ class MediaPlugin(MediaBase):
|
|||||||
'media': media,
|
'media': media,
|
||||||
}
|
}
|
||||||
|
|
||||||
def list(self, types = None, status = None, release_status = None, limit_offset = None, starts_with = None, search = None, order = None):
|
def withStatus(self, status, types = None, with_doc = True):
|
||||||
|
|
||||||
db = get_session()
|
db = get_db()
|
||||||
|
|
||||||
|
if types and not isinstance(types, (list, tuple)):
|
||||||
|
types = [types]
|
||||||
|
|
||||||
|
status = list(status if isinstance(status, (list, tuple)) else [status])
|
||||||
|
|
||||||
|
for s in status:
|
||||||
|
for ms in db.get_many('media_status', s):
|
||||||
|
if with_doc:
|
||||||
|
try:
|
||||||
|
doc = db.get('id', ms['_id'])
|
||||||
|
|
||||||
|
if types and doc.get('type') not in types:
|
||||||
|
continue
|
||||||
|
|
||||||
|
yield doc
|
||||||
|
except (RecordDeleted, RecordNotFound):
|
||||||
|
log.debug('Record not found, skipping: %s', ms['_id'])
|
||||||
|
except (ValueError, EOFError):
|
||||||
|
fireEvent('database.delete_corrupted', ms.get('_id'), traceback_error = traceback.format_exc(0))
|
||||||
|
else:
|
||||||
|
yield ms
|
||||||
|
|
||||||
|
def withIdentifiers(self, identifiers, with_doc = False):
|
||||||
|
db = get_db()
|
||||||
|
|
||||||
|
for x in identifiers:
|
||||||
|
try:
|
||||||
|
return db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
log.debug('No media found with identifiers: %s', identifiers)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None):
|
||||||
|
|
||||||
|
db = get_db()
|
||||||
|
|
||||||
# Make a list from string
|
# Make a list from string
|
||||||
if status and not isinstance(status, (list, tuple)):
|
if status and not isinstance(status, (list, tuple)):
|
||||||
@@ -133,140 +221,104 @@ class MediaPlugin(MediaBase):
|
|||||||
release_status = [release_status]
|
release_status = [release_status]
|
||||||
if types and not isinstance(types, (list, tuple)):
|
if types and not isinstance(types, (list, tuple)):
|
||||||
types = [types]
|
types = [types]
|
||||||
|
if with_tags and not isinstance(with_tags, (list, tuple)):
|
||||||
|
with_tags = [with_tags]
|
||||||
|
|
||||||
# query movie ids
|
# query media ids
|
||||||
q = db.query(Media) \
|
if types:
|
||||||
.with_entities(Media.id) \
|
all_media_ids = set()
|
||||||
.group_by(Media.id)
|
for media_type in types:
|
||||||
|
all_media_ids = all_media_ids.union(set([x['_id'] for x in db.get_many('media_by_type', media_type)]))
|
||||||
|
else:
|
||||||
|
all_media_ids = set([x['_id'] for x in db.all('media')])
|
||||||
|
|
||||||
|
media_ids = list(all_media_ids)
|
||||||
|
filter_by = {}
|
||||||
|
|
||||||
# Filter on movie status
|
# Filter on movie status
|
||||||
if status and len(status) > 0:
|
if status and len(status) > 0:
|
||||||
statuses = fireEvent('status.get', status, single = len(status) > 1)
|
filter_by['media_status'] = set()
|
||||||
statuses = [s.get('id') for s in statuses]
|
for media_status in fireEvent('media.with_status', status, with_doc = False, single = True):
|
||||||
|
filter_by['media_status'].add(media_status.get('_id'))
|
||||||
q = q.filter(Media.status_id.in_(statuses))
|
|
||||||
|
|
||||||
# Filter on release status
|
# Filter on release status
|
||||||
if release_status and len(release_status) > 0:
|
if release_status and len(release_status) > 0:
|
||||||
q = q.join(Media.releases)
|
filter_by['release_status'] = set()
|
||||||
|
for release_status in fireEvent('release.with_status', release_status, with_doc = False, single = True):
|
||||||
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
|
filter_by['release_status'].add(release_status.get('media_id'))
|
||||||
statuses = [s.get('id') for s in statuses]
|
|
||||||
|
|
||||||
q = q.filter(Release.status_id.in_(statuses))
|
|
||||||
|
|
||||||
# Filter on type
|
|
||||||
if types and len(types) > 0:
|
|
||||||
try: q = q.filter(Media.type.in_(types))
|
|
||||||
except: pass
|
|
||||||
|
|
||||||
# Only join when searching / ordering
|
|
||||||
if starts_with or search or order != 'release_order':
|
|
||||||
q = q.join(Media.library, Library.titles) \
|
|
||||||
.filter(LibraryTitle.default == True)
|
|
||||||
|
|
||||||
# Add search filters
|
# Add search filters
|
||||||
filter_or = []
|
|
||||||
if starts_with:
|
if starts_with:
|
||||||
starts_with = toUnicode(starts_with.lower())
|
starts_with = toUnicode(starts_with.lower())[0]
|
||||||
if starts_with in ascii_lowercase:
|
starts_with = starts_with if starts_with in ascii_lowercase else '#'
|
||||||
filter_or.append(LibraryTitle.simple_title.startswith(starts_with))
|
filter_by['starts_with'] = [x['_id'] for x in db.get_many('media_startswith', starts_with)]
|
||||||
else:
|
|
||||||
ignore = []
|
|
||||||
for letter in ascii_lowercase:
|
|
||||||
ignore.append(LibraryTitle.simple_title.startswith(toUnicode(letter)))
|
|
||||||
filter_or.append(not_(or_(*ignore)))
|
|
||||||
|
|
||||||
|
# Add tag filter
|
||||||
|
if with_tags:
|
||||||
|
filter_by['with_tags'] = set()
|
||||||
|
for tag in with_tags:
|
||||||
|
for x in db.get_many('media_tag', tag):
|
||||||
|
filter_by['with_tags'].add(x['_id'])
|
||||||
|
|
||||||
|
# Filter with search query
|
||||||
if search:
|
if search:
|
||||||
filter_or.append(LibraryTitle.simple_title.like('%%' + search + '%%'))
|
filter_by['search'] = [x['_id'] for x in db.get_many('media_search_title', search)]
|
||||||
|
|
||||||
if len(filter_or) > 0:
|
if status_or and 'media_status' in filter_by and 'release_status' in filter_by:
|
||||||
q = q.filter(or_(*filter_or))
|
filter_by['status'] = list(filter_by['media_status']) + list(filter_by['release_status'])
|
||||||
|
del filter_by['media_status']
|
||||||
|
del filter_by['release_status']
|
||||||
|
|
||||||
total_count = q.count()
|
# Filter by combining ids
|
||||||
|
for x in filter_by:
|
||||||
|
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||||
|
|
||||||
|
total_count = len(media_ids)
|
||||||
if total_count == 0:
|
if total_count == 0:
|
||||||
return 0, []
|
return 0, []
|
||||||
|
|
||||||
if order == 'release_order':
|
offset = 0
|
||||||
q = q.order_by(desc(Release.last_edit))
|
limit = -1
|
||||||
else:
|
|
||||||
q = q.order_by(asc(LibraryTitle.simple_title))
|
|
||||||
|
|
||||||
if limit_offset:
|
if limit_offset:
|
||||||
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
|
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
|
||||||
limit = splt[0]
|
limit = tryInt(splt[0])
|
||||||
offset = 0 if len(splt) is 1 else splt[1]
|
offset = tryInt(0 if len(splt) is 1 else splt[1])
|
||||||
q = q.limit(limit).offset(offset)
|
|
||||||
|
|
||||||
# Get all media_ids in sorted order
|
# List movies based on title order
|
||||||
media_ids = [m.id for m in q.all()]
|
medias = []
|
||||||
|
for m in db.all('media_title'):
|
||||||
|
media_id = m['_id']
|
||||||
|
if media_id not in media_ids: continue
|
||||||
|
if offset > 0:
|
||||||
|
offset -= 1
|
||||||
|
continue
|
||||||
|
|
||||||
# List release statuses
|
media = fireEvent('media.get', media_id, single = True)
|
||||||
releases = db.query(Release) \
|
|
||||||
.filter(Release.movie_id.in_(media_ids)) \
|
|
||||||
.all()
|
|
||||||
|
|
||||||
release_statuses = dict((m, set()) for m in media_ids)
|
# Skip if no media has been found
|
||||||
releases_count = dict((m, 0) for m in media_ids)
|
if not media:
|
||||||
for release in releases:
|
continue
|
||||||
release_statuses[release.movie_id].add('%d,%d' % (release.status_id, release.quality_id))
|
|
||||||
releases_count[release.movie_id] += 1
|
|
||||||
|
|
||||||
# Get main movie data
|
|
||||||
q2 = db.query(Media) \
|
|
||||||
.options(joinedload_all('library.titles')) \
|
|
||||||
.options(joinedload_all('library.files')) \
|
|
||||||
.options(joinedload_all('status')) \
|
|
||||||
.options(joinedload_all('files'))
|
|
||||||
|
|
||||||
q2 = q2.filter(Media.id.in_(media_ids))
|
|
||||||
|
|
||||||
results = q2.all()
|
|
||||||
|
|
||||||
# Create dict by movie id
|
|
||||||
movie_dict = {}
|
|
||||||
for movie in results:
|
|
||||||
movie_dict[movie.id] = movie
|
|
||||||
|
|
||||||
# List movies based on media_ids order
|
|
||||||
movies = []
|
|
||||||
for media_id in media_ids:
|
|
||||||
|
|
||||||
releases = []
|
|
||||||
for r in release_statuses.get(media_id):
|
|
||||||
x = splitString(r)
|
|
||||||
releases.append({'status_id': x[0], 'quality_id': x[1]})
|
|
||||||
|
|
||||||
# Merge releases with movie dict
|
# Merge releases with movie dict
|
||||||
movies.append(mergeDicts(movie_dict[media_id].to_dict({
|
medias.append(media)
|
||||||
'library': {'titles': {}, 'files':{}},
|
|
||||||
'files': {},
|
|
||||||
}), {
|
|
||||||
'releases': releases,
|
|
||||||
'releases_count': releases_count.get(media_id),
|
|
||||||
}))
|
|
||||||
|
|
||||||
db.expire_all()
|
# remove from media ids
|
||||||
return total_count, movies
|
media_ids.remove(media_id)
|
||||||
|
if len(media_ids) == 0 or len(medias) == limit: break
|
||||||
|
|
||||||
|
return total_count, medias
|
||||||
|
|
||||||
def listView(self, **kwargs):
|
def listView(self, **kwargs):
|
||||||
|
|
||||||
types = splitString(kwargs.get('types'))
|
|
||||||
status = splitString(kwargs.get('status'))
|
|
||||||
release_status = splitString(kwargs.get('release_status'))
|
|
||||||
limit_offset = kwargs.get('limit_offset')
|
|
||||||
starts_with = kwargs.get('starts_with')
|
|
||||||
search = kwargs.get('search')
|
|
||||||
order = kwargs.get('order')
|
|
||||||
|
|
||||||
total_movies, movies = self.list(
|
total_movies, movies = self.list(
|
||||||
types = types,
|
types = splitString(kwargs.get('type')),
|
||||||
status = status,
|
status = splitString(kwargs.get('status')),
|
||||||
release_status = release_status,
|
release_status = splitString(kwargs.get('release_status')),
|
||||||
limit_offset = limit_offset,
|
status_or = kwargs.get('status_or') is not None,
|
||||||
starts_with = starts_with,
|
limit_offset = kwargs.get('limit_offset'),
|
||||||
search = search,
|
with_tags = splitString(kwargs.get('with_tags')),
|
||||||
order = order
|
starts_with = kwargs.get('starts_with'),
|
||||||
|
search = kwargs.get('search')
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -279,69 +331,71 @@ class MediaPlugin(MediaBase):
|
|||||||
def addSingleListView(self):
|
def addSingleListView(self):
|
||||||
|
|
||||||
for media_type in fireEvent('media.types', merge = True):
|
for media_type in fireEvent('media.types', merge = True):
|
||||||
def tempList(*args, **kwargs):
|
tempList = lambda *args, **kwargs : self.listView(type = media_type, **kwargs)
|
||||||
return self.listView(types = media_type, *args, **kwargs)
|
addApiView('%s.list' % media_type, tempList, docs = {
|
||||||
addApiView('%s.list' % media_type, tempList)
|
'desc': 'List media',
|
||||||
|
'params': {
|
||||||
|
'status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status. Example:"active,done"'},
|
||||||
|
'release_status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status of its releases. Example:"snatched,available"'},
|
||||||
|
'limit_offset': {'desc': 'Limit and offset the ' + media_type + ' list. Examples: "50" or "50,30"'},
|
||||||
|
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all ' + media_type + 's starting with the letter "a"'},
|
||||||
|
'search': {'desc': 'Search ' + media_type + ' title'},
|
||||||
|
},
|
||||||
|
'return': {'type': 'object', 'example': """{
|
||||||
|
'success': True,
|
||||||
|
'empty': bool, any """ + media_type + """s returned or not,
|
||||||
|
'media': array, media found,
|
||||||
|
}"""}
|
||||||
|
})
|
||||||
|
|
||||||
def availableChars(self, types = None, status = None, release_status = None):
|
def availableChars(self, types = None, status = None, release_status = None):
|
||||||
|
|
||||||
types = types or []
|
db = get_db()
|
||||||
status = status or []
|
|
||||||
release_status = release_status or []
|
|
||||||
|
|
||||||
db = get_session()
|
|
||||||
|
|
||||||
# Make a list from string
|
# Make a list from string
|
||||||
if not isinstance(status, (list, tuple)):
|
if status and not isinstance(status, (list, tuple)):
|
||||||
status = [status]
|
status = [status]
|
||||||
if release_status and not isinstance(release_status, (list, tuple)):
|
if release_status and not isinstance(release_status, (list, tuple)):
|
||||||
release_status = [release_status]
|
release_status = [release_status]
|
||||||
if types and not isinstance(types, (list, tuple)):
|
if types and not isinstance(types, (list, tuple)):
|
||||||
types = [types]
|
types = [types]
|
||||||
|
|
||||||
q = db.query(Media)
|
# query media ids
|
||||||
|
if types:
|
||||||
|
all_media_ids = set()
|
||||||
|
for media_type in types:
|
||||||
|
all_media_ids = all_media_ids.union(set([x['_id'] for x in db.get_many('media_by_type', media_type)]))
|
||||||
|
else:
|
||||||
|
all_media_ids = set([x['_id'] for x in db.all('media')])
|
||||||
|
|
||||||
|
media_ids = all_media_ids
|
||||||
|
filter_by = {}
|
||||||
|
|
||||||
# Filter on movie status
|
# Filter on movie status
|
||||||
if status and len(status) > 0:
|
if status and len(status) > 0:
|
||||||
statuses = fireEvent('status.get', status, single = len(release_status) > 1)
|
filter_by['media_status'] = set()
|
||||||
statuses = [s.get('id') for s in statuses]
|
for media_status in fireEvent('media.with_status', status, with_doc = False, single = True):
|
||||||
|
filter_by['media_status'].add(media_status.get('_id'))
|
||||||
q = q.filter(Media.status_id.in_(statuses))
|
|
||||||
|
|
||||||
# Filter on release status
|
# Filter on release status
|
||||||
if release_status and len(release_status) > 0:
|
if release_status and len(release_status) > 0:
|
||||||
|
filter_by['release_status'] = set()
|
||||||
|
for release_status in fireEvent('release.with_status', release_status, with_doc = False, single = True):
|
||||||
|
filter_by['release_status'].add(release_status.get('media_id'))
|
||||||
|
|
||||||
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
|
# Filter by combining ids
|
||||||
statuses = [s.get('id') for s in statuses]
|
for x in filter_by:
|
||||||
|
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||||
q = q.join(Media.releases) \
|
|
||||||
.filter(Release.status_id.in_(statuses))
|
|
||||||
|
|
||||||
# Filter on type
|
|
||||||
if types and len(types) > 0:
|
|
||||||
try: q = q.filter(Media.type.in_(types))
|
|
||||||
except: pass
|
|
||||||
|
|
||||||
q = q.join(Library, LibraryTitle) \
|
|
||||||
.with_entities(LibraryTitle.simple_title) \
|
|
||||||
.filter(LibraryTitle.default == True)
|
|
||||||
|
|
||||||
titles = q.all()
|
|
||||||
|
|
||||||
chars = set()
|
chars = set()
|
||||||
for title in titles:
|
for x in db.all('media_startswith'):
|
||||||
try:
|
if x['_id'] in media_ids:
|
||||||
char = title[0][0]
|
chars.add(x['key'])
|
||||||
char = char if char in ascii_lowercase else '#'
|
|
||||||
chars.add(str(char))
|
|
||||||
except:
|
|
||||||
log.error('Failed getting title for %s', title.libraries_id)
|
|
||||||
|
|
||||||
if len(chars) == 25:
|
if len(chars) == 27:
|
||||||
break
|
break
|
||||||
|
|
||||||
db.expire_all()
|
return list(chars)
|
||||||
return ''.join(sorted(chars))
|
|
||||||
|
|
||||||
def charView(self, **kwargs):
|
def charView(self, **kwargs):
|
||||||
|
|
||||||
@@ -359,56 +413,65 @@ class MediaPlugin(MediaBase):
|
|||||||
def addSingleCharView(self):
|
def addSingleCharView(self):
|
||||||
|
|
||||||
for media_type in fireEvent('media.types', merge = True):
|
for media_type in fireEvent('media.types', merge = True):
|
||||||
def tempChar(*args, **kwargs):
|
tempChar = lambda *args, **kwargs : self.charView(type = media_type, **kwargs)
|
||||||
return self.charView(types = media_type, *args, **kwargs)
|
|
||||||
addApiView('%s.available_chars' % media_type, tempChar)
|
addApiView('%s.available_chars' % media_type, tempChar)
|
||||||
|
|
||||||
def delete(self, media_id, delete_from = None):
|
def delete(self, media_id, delete_from = None):
|
||||||
|
|
||||||
db = get_session()
|
try:
|
||||||
|
db = get_db()
|
||||||
|
|
||||||
media = db.query(Media).filter_by(id = media_id).first()
|
media = db.get('id', media_id)
|
||||||
if media:
|
if media:
|
||||||
deleted = False
|
deleted = False
|
||||||
if delete_from == 'all':
|
|
||||||
db.delete(media)
|
|
||||||
db.commit()
|
|
||||||
deleted = True
|
|
||||||
else:
|
|
||||||
done_status = fireEvent('status.get', 'done', single = True)
|
|
||||||
|
|
||||||
total_releases = len(media.releases)
|
media_releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||||
total_deleted = 0
|
|
||||||
new_movie_status = None
|
if delete_from == 'all':
|
||||||
for release in media.releases:
|
# Delete connected releases
|
||||||
if delete_from in ['wanted', 'snatched', 'late']:
|
for release in media_releases:
|
||||||
if release.status_id != done_status.get('id'):
|
db.delete(release)
|
||||||
db.delete(release)
|
|
||||||
total_deleted += 1
|
|
||||||
new_movie_status = 'done'
|
|
||||||
elif delete_from == 'manage':
|
|
||||||
if release.status_id == done_status.get('id'):
|
|
||||||
db.delete(release)
|
|
||||||
total_deleted += 1
|
|
||||||
new_movie_status = 'active'
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
if total_releases == total_deleted:
|
|
||||||
db.delete(media)
|
db.delete(media)
|
||||||
db.commit()
|
|
||||||
deleted = True
|
deleted = True
|
||||||
elif new_movie_status:
|
|
||||||
new_status = fireEvent('status.get', new_movie_status, single = True)
|
|
||||||
media.profile_id = None
|
|
||||||
media.status_id = new_status.get('id')
|
|
||||||
db.commit()
|
|
||||||
else:
|
else:
|
||||||
fireEvent('media.restatus', media.id, single = True)
|
|
||||||
|
|
||||||
if deleted:
|
total_releases = len(media_releases)
|
||||||
fireEvent('notify.frontend', type = 'movie.deleted', data = media.to_dict())
|
total_deleted = 0
|
||||||
|
new_media_status = None
|
||||||
|
|
||||||
|
for release in media_releases:
|
||||||
|
if delete_from in ['wanted', 'snatched', 'late']:
|
||||||
|
if release.get('status') != 'done':
|
||||||
|
db.delete(release)
|
||||||
|
total_deleted += 1
|
||||||
|
new_media_status = 'done'
|
||||||
|
elif delete_from == 'manage':
|
||||||
|
if release.get('status') == 'done' or media.get('status') == 'done':
|
||||||
|
db.delete(release)
|
||||||
|
total_deleted += 1
|
||||||
|
|
||||||
|
if (total_releases == total_deleted) or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||||
|
db.delete(media)
|
||||||
|
deleted = True
|
||||||
|
elif new_media_status:
|
||||||
|
media['status'] = new_media_status
|
||||||
|
|
||||||
|
# Remove profile (no use for in manage)
|
||||||
|
if new_media_status == 'done':
|
||||||
|
media['profile_id'] = None
|
||||||
|
|
||||||
|
db.update(media)
|
||||||
|
|
||||||
|
fireEvent('media.untag', media['_id'], 'recent', single = True)
|
||||||
|
else:
|
||||||
|
fireEvent('media.restatus', media.get('_id'), single = True)
|
||||||
|
|
||||||
|
if deleted:
|
||||||
|
fireEvent('notify.frontend', type = 'media.deleted', data = media)
|
||||||
|
except:
|
||||||
|
log.error('Failed deleting media: %s', traceback.format_exc())
|
||||||
|
|
||||||
db.expire_all()
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def deleteView(self, id = '', **kwargs):
|
def deleteView(self, id = '', **kwargs):
|
||||||
@@ -424,35 +487,98 @@ class MediaPlugin(MediaBase):
|
|||||||
def addSingleDeleteView(self):
|
def addSingleDeleteView(self):
|
||||||
|
|
||||||
for media_type in fireEvent('media.types', merge = True):
|
for media_type in fireEvent('media.types', merge = True):
|
||||||
def tempDelete(*args, **kwargs):
|
tempDelete = lambda *args, **kwargs : self.deleteView(type = media_type, **kwargs)
|
||||||
return self.deleteView(types = media_type, *args, **kwargs)
|
addApiView('%s.delete' % media_type, tempDelete, docs = {
|
||||||
addApiView('%s.delete' % media_type, tempDelete)
|
'desc': 'Delete a ' + media_type + ' from the wanted list',
|
||||||
|
'params': {
|
||||||
|
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||||
|
'delete_from': {'desc': 'Delete ' + media_type + ' from this page', 'type': 'string: all (default), wanted, manage'},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
def restatus(self, media_id):
|
def restatus(self, media_id, tag_recent = True, allowed_restatus = None):
|
||||||
|
|
||||||
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
|
try:
|
||||||
|
db = get_db()
|
||||||
|
|
||||||
db = get_session()
|
m = db.get('id', media_id)
|
||||||
|
previous_status = m['status']
|
||||||
|
|
||||||
m = db.query(Media).filter_by(id = media_id).first()
|
log.debug('Changing status for %s', getTitle(m))
|
||||||
if not m or len(m.library.titles) == 0:
|
if not m['profile_id']:
|
||||||
log.debug('Can\'t restatus movie, doesn\'t seem to exist.')
|
m['status'] = 'done'
|
||||||
return False
|
else:
|
||||||
|
m['status'] = 'active'
|
||||||
|
|
||||||
log.debug('Changing status for %s', m.library.titles[0].title)
|
try:
|
||||||
if not m.profile:
|
profile = db.get('id', m['profile_id'])
|
||||||
m.status_id = done_status.get('id')
|
media_releases = fireEvent('release.for_media', m['_id'], single = True)
|
||||||
else:
|
done_releases = [release for release in media_releases if release.get('status') == 'done']
|
||||||
move_to_wanted = True
|
|
||||||
|
|
||||||
for t in m.profile.types:
|
if done_releases:
|
||||||
for release in m.releases:
|
|
||||||
if t.quality.identifier is release.quality.identifier and (release.status_id is done_status.get('id') and t.finish):
|
|
||||||
move_to_wanted = False
|
|
||||||
|
|
||||||
m.status_id = active_status.get('id') if move_to_wanted else done_status.get('id')
|
# Check if we are finished with the media
|
||||||
|
for release in done_releases:
|
||||||
|
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||||
|
m['status'] = 'done'
|
||||||
|
break
|
||||||
|
|
||||||
db.commit()
|
elif previous_status == 'done':
|
||||||
|
m['status'] = 'done'
|
||||||
|
|
||||||
return True
|
except RecordNotFound:
|
||||||
|
log.debug('Failed restatus, keeping previous: %s', traceback.format_exc())
|
||||||
|
m['status'] = previous_status
|
||||||
|
|
||||||
|
# Only update when status has changed
|
||||||
|
if previous_status != m['status'] and (not allowed_restatus or m['status'] in allowed_restatus):
|
||||||
|
db.update(m)
|
||||||
|
|
||||||
|
# Tag media as recent
|
||||||
|
if tag_recent:
|
||||||
|
self.tag(media_id, 'recent', update_edited = True)
|
||||||
|
|
||||||
|
return m['status']
|
||||||
|
except:
|
||||||
|
log.error('Failed restatus: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
def tag(self, media_id, tag, update_edited = False):
|
||||||
|
|
||||||
|
try:
|
||||||
|
db = get_db()
|
||||||
|
m = db.get('id', media_id)
|
||||||
|
|
||||||
|
if update_edited:
|
||||||
|
m['last_edit'] = int(time.time())
|
||||||
|
|
||||||
|
tags = m.get('tags') or []
|
||||||
|
if tag not in tags:
|
||||||
|
tags.append(tag)
|
||||||
|
m['tags'] = tags
|
||||||
|
db.update(m)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
log.error('Failed tagging: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def unTag(self, media_id, tag):
|
||||||
|
|
||||||
|
try:
|
||||||
|
db = get_db()
|
||||||
|
m = db.get('id', media_id)
|
||||||
|
|
||||||
|
tags = m.get('tags') or []
|
||||||
|
if tag in tags:
|
||||||
|
new_tags = list(set(tags))
|
||||||
|
new_tags.remove(tag)
|
||||||
|
|
||||||
|
m['tags'] = new_tags
|
||||||
|
db.update(m)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
log.error('Failed untagging: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
return False
|
||||||
|
|||||||
0
couchpotato/core/migration/__init__.py → couchpotato/core/media/_base/providers/automation/__init__.py
Executable file → Normal file
0
couchpotato/core/migration/__init__.py → couchpotato/core/media/_base/providers/automation/__init__.py
Executable file → Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.base import Provider
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AutomationBase(Provider):
|
||||||
|
pass
|
||||||
@@ -1,9 +1,3 @@
|
|||||||
from couchpotato.core.event import addEvent, fireEvent
|
|
||||||
from couchpotato.core.helpers.variable import tryFloat, mergeDicts, md5, \
|
|
||||||
possibleTitles, getTitle
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.core.plugins.base import Plugin
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
@@ -11,8 +5,23 @@ import time
|
|||||||
import traceback
|
import traceback
|
||||||
import xml.etree.ElementTree as XMLTree
|
import xml.etree.ElementTree as XMLTree
|
||||||
|
|
||||||
|
try:
|
||||||
|
from xml.etree.ElementTree import ParseError as XmlParseError
|
||||||
|
except ImportError:
|
||||||
|
from xml.parsers.expat import ExpatError as XmlParseError
|
||||||
|
|
||||||
|
from couchpotato.core.event import addEvent, fireEvent
|
||||||
|
from couchpotato.core.helpers.encoding import ss
|
||||||
|
from couchpotato.core.helpers.variable import tryFloat, mergeDicts, md5, \
|
||||||
|
possibleTitles
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.plugins.base import Plugin
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class MultiProvider(Plugin):
|
class MultiProvider(Plugin):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -35,8 +44,8 @@ class MultiProvider(Plugin):
|
|||||||
|
|
||||||
class Provider(Plugin):
|
class Provider(Plugin):
|
||||||
|
|
||||||
type = None # movie, show, subtitle, trailer, ...
|
type = None # movie, show, subtitle, trailer, ...
|
||||||
http_time_between_calls = 10 # Default timeout for url requests
|
http_time_between_calls = 10 # Default timeout for url requests
|
||||||
|
|
||||||
last_available_check = {}
|
last_available_check = {}
|
||||||
is_available = {}
|
is_available = {}
|
||||||
@@ -62,7 +71,7 @@ class Provider(Plugin):
|
|||||||
|
|
||||||
def getJsonData(self, url, decode_from = None, **kwargs):
|
def getJsonData(self, url, decode_from = None, **kwargs):
|
||||||
|
|
||||||
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('params', {})))
|
cache_key = md5(url)
|
||||||
data = self.getCache(cache_key, url, **kwargs)
|
data = self.getCache(cache_key, url, **kwargs)
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
@@ -79,7 +88,7 @@ class Provider(Plugin):
|
|||||||
|
|
||||||
def getRSSData(self, url, item_path = 'channel/item', **kwargs):
|
def getRSSData(self, url, item_path = 'channel/item', **kwargs):
|
||||||
|
|
||||||
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('params', {})))
|
cache_key = md5(url)
|
||||||
data = self.getCache(cache_key, url, **kwargs)
|
data = self.getCache(cache_key, url, **kwargs)
|
||||||
|
|
||||||
if data and len(data) > 0:
|
if data and len(data) > 0:
|
||||||
@@ -87,27 +96,32 @@ class Provider(Plugin):
|
|||||||
data = XMLTree.fromstring(data)
|
data = XMLTree.fromstring(data)
|
||||||
return self.getElements(data, item_path)
|
return self.getElements(data, item_path)
|
||||||
except:
|
except:
|
||||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
try:
|
||||||
|
data = XMLTree.fromstring(ss(data))
|
||||||
|
return self.getElements(data, item_path)
|
||||||
|
except XmlParseError:
|
||||||
|
log.error('Invalid XML returned, check "%s" manually for issues', url)
|
||||||
|
except:
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def getHTMLData(self, url, **kwargs):
|
def getHTMLData(self, url, **kwargs):
|
||||||
|
|
||||||
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('data', {})))
|
cache_key = md5(url)
|
||||||
return self.getCache(cache_key, url, **kwargs)
|
return self.getCache(cache_key, url, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class YarrProvider(Provider):
|
class YarrProvider(Provider):
|
||||||
|
|
||||||
protocol = None # nzb, torrent, torrent_magnet
|
protocol = None # nzb, torrent, torrent_magnet
|
||||||
type = 'movie'
|
|
||||||
|
|
||||||
cat_ids = {}
|
cat_ids = {}
|
||||||
cat_backup_id = None
|
cat_backup_id = None
|
||||||
|
|
||||||
sizeGb = ['gb', 'gib']
|
size_gb = ['gb', 'gib']
|
||||||
sizeMb = ['mb', 'mib']
|
size_mb = ['mb', 'mib']
|
||||||
sizeKb = ['kb', 'kib']
|
size_kb = ['kb', 'kib']
|
||||||
|
|
||||||
last_login_check = None
|
last_login_check = None
|
||||||
|
|
||||||
@@ -122,6 +136,9 @@ class YarrProvider(Provider):
|
|||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
def buildUrl(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
def login(self):
|
def login(self):
|
||||||
|
|
||||||
# Check if we are still logged in every hour
|
# Check if we are still logged in every hour
|
||||||
@@ -174,11 +191,11 @@ class YarrProvider(Provider):
|
|||||||
try:
|
try:
|
||||||
return self.urlopen(url, headers = {'User-Agent': Env.getIdentifier()}, show_error = False)
|
return self.urlopen(url, headers = {'User-Agent': Env.getIdentifier()}, show_error = False)
|
||||||
except:
|
except:
|
||||||
log.error('Failed getting nzb from %s: %s', (self.getName(), traceback.format_exc()))
|
log.error('Failed getting release from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
return 'try_next'
|
return 'try_next'
|
||||||
|
|
||||||
def search(self, movie, quality):
|
def search(self, media, quality):
|
||||||
|
|
||||||
if self.isDisabled():
|
if self.isDisabled():
|
||||||
return []
|
return []
|
||||||
@@ -190,15 +207,17 @@ class YarrProvider(Provider):
|
|||||||
|
|
||||||
# Create result container
|
# Create result container
|
||||||
imdb_results = hasattr(self, '_search')
|
imdb_results = hasattr(self, '_search')
|
||||||
results = ResultList(self, movie, quality, imdb_results = imdb_results)
|
results = ResultList(self, media, quality, imdb_results = imdb_results)
|
||||||
|
|
||||||
# Do search based on imdb id
|
# Do search based on imdb id
|
||||||
if imdb_results:
|
if imdb_results:
|
||||||
self._search(movie, quality, results)
|
self._search(media, quality, results)
|
||||||
# Search possible titles
|
# Search possible titles
|
||||||
else:
|
else:
|
||||||
for title in possibleTitles(getTitle(movie['library'])):
|
media_title = fireEvent('library.query', media, include_year = False, single = True)
|
||||||
self._searchOnTitle(title, movie, quality, results)
|
|
||||||
|
for title in possibleTitles(media_title):
|
||||||
|
self._searchOnTitle(title, media, quality, results)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@@ -222,28 +241,33 @@ class YarrProvider(Provider):
|
|||||||
|
|
||||||
def parseSize(self, size):
|
def parseSize(self, size):
|
||||||
|
|
||||||
sizeRaw = size.lower()
|
size_raw = size.lower()
|
||||||
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
|
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
|
||||||
|
|
||||||
for s in self.sizeGb:
|
for s in self.size_gb:
|
||||||
if s in sizeRaw:
|
if s in size_raw:
|
||||||
return size * 1024
|
return size * 1024
|
||||||
|
|
||||||
for s in self.sizeMb:
|
for s in self.size_mb:
|
||||||
if s in sizeRaw:
|
if s in size_raw:
|
||||||
return size
|
return size
|
||||||
|
|
||||||
for s in self.sizeKb:
|
for s in self.size_kb:
|
||||||
if s in sizeRaw:
|
if s in size_raw:
|
||||||
return size / 1024
|
return size / 1024
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def getCatId(self, identifier):
|
def getCatId(self, quality = None):
|
||||||
|
if not quality: quality = {}
|
||||||
|
identifier = quality.get('identifier')
|
||||||
|
|
||||||
for cats in self.cat_ids:
|
want_3d = False
|
||||||
ids, qualities = cats
|
if quality.get('custom'):
|
||||||
if identifier in qualities:
|
want_3d = quality['custom'].get('3d')
|
||||||
|
|
||||||
|
for ids, qualities in self.cat_ids:
|
||||||
|
if identifier in qualities or (want_3d and '3d' in qualities):
|
||||||
return ids
|
return ids
|
||||||
|
|
||||||
if self.cat_backup_id:
|
if self.cat_backup_id:
|
||||||
@@ -278,7 +302,7 @@ class ResultList(list):
|
|||||||
new_result = self.fillResult(result)
|
new_result = self.fillResult(result)
|
||||||
|
|
||||||
is_correct = fireEvent('searcher.correct_release', new_result, self.media, self.quality,
|
is_correct = fireEvent('searcher.correct_release', new_result, self.media, self.quality,
|
||||||
imdb_results = self.kwargs.get('imdb_results', False), single = True)
|
imdb_results = self.kwargs.get('imdb_results', False), single = True)
|
||||||
|
|
||||||
if is_correct and new_result['id'] not in self.result_ids:
|
if is_correct and new_result['id'] not in self.result_ids:
|
||||||
is_correct_weight = float(is_correct)
|
is_correct_weight = float(is_correct)
|
||||||
@@ -288,7 +312,7 @@ class ResultList(list):
|
|||||||
old_score = new_result['score']
|
old_score = new_result['score']
|
||||||
new_result['score'] = int(old_score * is_correct_weight)
|
new_result['score'] = int(old_score * is_correct_weight)
|
||||||
|
|
||||||
log.info('Found correct release with weight %.02f, old_score(%d) now scaled to score(%d)', (
|
log.info2('Found correct release with weight %.02f, old_score(%d) now scaled to score(%d)', (
|
||||||
is_correct_weight,
|
is_correct_weight,
|
||||||
old_score,
|
old_score,
|
||||||
new_result['score']
|
new_result['score']
|
||||||
5
couchpotato/core/media/_base/providers/info/base.py
Normal file
5
couchpotato/core/media/_base/providers/info/base.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from couchpotato.core.media._base.providers.base import Provider
|
||||||
|
|
||||||
|
|
||||||
|
class BaseInfoProvider(Provider):
|
||||||
|
type = 'unknown'
|
||||||
8
couchpotato/core/media/_base/providers/metadata/base.py
Normal file
8
couchpotato/core/media/_base/providers/metadata/base.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.plugins.base import Plugin
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MetaDataBase(Plugin):
|
||||||
|
pass
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
from couchpotato.core.providers.base import YarrProvider
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from couchpotato.core.media._base.providers.base import YarrProvider
|
||||||
|
|
||||||
|
|
||||||
class NZBProvider(YarrProvider):
|
class NZBProvider(YarrProvider):
|
||||||
|
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
from bs4 import BeautifulSoup
|
|
||||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
|
||||||
from couchpotato.core.helpers.variable import tryInt
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.core.providers.nzb.base import NZBProvider
|
|
||||||
from couchpotato.environment import Env
|
|
||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, simplifyString
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BinSearch(NZBProvider):
|
class Base(NZBProvider):
|
||||||
|
|
||||||
urls = {
|
urls = {
|
||||||
'download': 'https://www.binsearch.info/fcgi/nzb.fcgi?q=%s',
|
'download': 'https://www.binsearch.info/fcgi/nzb.fcgi?q=%s',
|
||||||
@@ -18,29 +18,17 @@ class BinSearch(NZBProvider):
|
|||||||
'search': 'https://www.binsearch.info/index.php?%s',
|
'search': 'https://www.binsearch.info/index.php?%s',
|
||||||
}
|
}
|
||||||
|
|
||||||
http_time_between_calls = 4 # Seconds
|
http_time_between_calls = 4 # Seconds
|
||||||
|
|
||||||
def _search(self, movie, quality, results):
|
def _search(self, media, quality, results):
|
||||||
|
|
||||||
arguments = tryUrlencode({
|
data = self.getHTMLData(self.urls['search'] % self.buildUrl(media, quality))
|
||||||
'q': movie['library']['identifier'],
|
|
||||||
'm': 'n',
|
|
||||||
'max': 400,
|
|
||||||
'adv_age': Env.setting('retention', 'nzb'),
|
|
||||||
'adv_sort': 'date',
|
|
||||||
'adv_col': 'on',
|
|
||||||
'adv_nfo': 'on',
|
|
||||||
'minsize': quality.get('size_min'),
|
|
||||||
'maxsize': quality.get('size_max'),
|
|
||||||
})
|
|
||||||
|
|
||||||
data = self.getHTMLData(self.urls['search'] % arguments)
|
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
try:
|
try:
|
||||||
|
|
||||||
html = BeautifulSoup(data)
|
html = BeautifulSoup(data)
|
||||||
main_table = html.find('table', attrs = {'id':'r2'})
|
main_table = html.find('table', attrs = {'id': 'r2'})
|
||||||
|
|
||||||
if not main_table:
|
if not main_table:
|
||||||
return
|
return
|
||||||
@@ -48,11 +36,11 @@ class BinSearch(NZBProvider):
|
|||||||
items = main_table.find_all('tr')
|
items = main_table.find_all('tr')
|
||||||
|
|
||||||
for row in items:
|
for row in items:
|
||||||
title = row.find('span', attrs = {'class':'s'})
|
title = row.find('span', attrs = {'class': 's'})
|
||||||
|
|
||||||
if not title: continue
|
if not title: continue
|
||||||
|
|
||||||
nzb_id = row.find('input', attrs = {'type':'checkbox'})['name']
|
nzb_id = row.find('input', attrs = {'type': 'checkbox'})['name']
|
||||||
info = row.find('span', attrs = {'class':'d'})
|
info = row.find('span', attrs = {'class':'d'})
|
||||||
size_match = re.search('size:.(?P<size>[0-9\.]+.[GMB]+)', info.text)
|
size_match = re.search('size:.(?P<size>[0-9\.]+.[GMB]+)', info.text)
|
||||||
|
|
||||||
@@ -62,10 +50,10 @@ class BinSearch(NZBProvider):
|
|||||||
|
|
||||||
def extra_check(item):
|
def extra_check(item):
|
||||||
parts = re.search('available:.(?P<parts>\d+)./.(?P<total>\d+)', info.text)
|
parts = re.search('available:.(?P<parts>\d+)./.(?P<total>\d+)', info.text)
|
||||||
total = tryInt(parts.group('total'))
|
total = float(tryInt(parts.group('total')))
|
||||||
parts = tryInt(parts.group('parts'))
|
parts = float(tryInt(parts.group('parts')))
|
||||||
|
|
||||||
if (total / parts) < 0.95 or ((total / parts) >= 0.95 and not ('par2' in info.text.lower() or 'pa3' in info.text.lower())):
|
if (total / parts) < 1 and ((total / parts) < 0.95 or ((total / parts) >= 0.95 and not ('par2' in info.text.lower() or 'pa3' in info.text.lower()))):
|
||||||
log.info2('Wrong: \'%s\', not complete: %s out of %s', (item['name'], parts, total))
|
log.info2('Wrong: \'%s\', not complete: %s out of %s', (item['name'], parts, total))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -77,7 +65,7 @@ class BinSearch(NZBProvider):
|
|||||||
|
|
||||||
results.append({
|
results.append({
|
||||||
'id': nzb_id,
|
'id': nzb_id,
|
||||||
'name': title.text,
|
'name': simplifyString(title.text),
|
||||||
'age': tryInt(age),
|
'age': tryInt(age),
|
||||||
'size': self.parseSize(size_match.group('size')),
|
'size': self.parseSize(size_match.group('size')),
|
||||||
'url': self.urls['download'] % nzb_id,
|
'url': self.urls['download'] % nzb_id,
|
||||||
@@ -102,3 +90,31 @@ class BinSearch(NZBProvider):
|
|||||||
|
|
||||||
return 'try_next'
|
return 'try_next'
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'binsearch',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'nzb_providers',
|
||||||
|
'name': 'binsearch',
|
||||||
|
'description': 'Free provider, less accurate. See <a href="https://www.binsearch.info/">BinSearch</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAATklEQVQY02NwQAMMWAXOnz+PKvD//3/CAvM//z+fgiwAAs+RBab4PP//vwbFjPlAffgEChzOo2r5fBuIfRAC5w8D+QUofkkp8MHjOWQAAM3Sbogztg2wAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
271
couchpotato/core/media/_base/providers/nzb/newznab.py
Normal file
271
couchpotato/core/media/_base/providers/nzb/newznab.py
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
from urlparse import urlparse
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
import re
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
|
||||||
|
from couchpotato.core.helpers.rss import RSS
|
||||||
|
from couchpotato.core.helpers.variable import cleanHost, splitString, tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.base import ResultList
|
||||||
|
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||||
|
from couchpotato.environment import Env
|
||||||
|
from dateutil.parser import parse
|
||||||
|
from requests import HTTPError
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(NZBProvider, RSS):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'detail': 'details/%s',
|
||||||
|
'download': 't=get&id=%s'
|
||||||
|
}
|
||||||
|
|
||||||
|
passwords_regex = 'password|wachtwoord'
|
||||||
|
limits_reached = {}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def search(self, media, quality):
|
||||||
|
hosts = self.getHosts()
|
||||||
|
|
||||||
|
results = ResultList(self, media, quality, imdb_results = True)
|
||||||
|
|
||||||
|
for host in hosts:
|
||||||
|
if self.isDisabled(host):
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._searchOnHost(host, media, quality, results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _searchOnHost(self, host, media, quality, results):
|
||||||
|
|
||||||
|
query = self.buildUrl(media, host)
|
||||||
|
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||||
|
nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})
|
||||||
|
|
||||||
|
for nzb in nzbs:
|
||||||
|
|
||||||
|
date = None
|
||||||
|
spotter = None
|
||||||
|
for item in nzb:
|
||||||
|
if date and spotter:
|
||||||
|
break
|
||||||
|
if item.attrib.get('name') == 'usenetdate':
|
||||||
|
date = item.attrib.get('value')
|
||||||
|
break
|
||||||
|
|
||||||
|
# Get the name of the person who posts the spot
|
||||||
|
if item.attrib.get('name') == 'poster':
|
||||||
|
if "@spot.net" in item.attrib.get('value'):
|
||||||
|
spotter = item.attrib.get('value').split("@")[0]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not date:
|
||||||
|
date = self.getTextElement(nzb, 'pubDate')
|
||||||
|
|
||||||
|
name = self.getTextElement(nzb, 'title')
|
||||||
|
detail_url = self.getTextElement(nzb, 'guid')
|
||||||
|
nzb_id = detail_url.split('/')[-1:].pop()
|
||||||
|
|
||||||
|
if '://' not in detail_url:
|
||||||
|
detail_url = (cleanHost(host['host']) + self.urls['detail']) % tryUrlencode(nzb_id)
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
name_extra = ''
|
||||||
|
if spotter:
|
||||||
|
name_extra = spotter
|
||||||
|
|
||||||
|
description = ''
|
||||||
|
if "@spot.net" in nzb_id:
|
||||||
|
try:
|
||||||
|
# Get details for extended description to retrieve passwords
|
||||||
|
query = self.buildDetailsUrl(nzb_id, host['api_key'])
|
||||||
|
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||||
|
nzb_details = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})[0]
|
||||||
|
|
||||||
|
description = self.getTextElement(nzb_details, 'description')
|
||||||
|
|
||||||
|
# Extract a password from the description
|
||||||
|
password = re.search('(?:' + self.passwords_regex + ')(?: *)(?:\:|\=)(?: *)(.*?)\<br\>|\n|$', description, flags = re.I).group(1)
|
||||||
|
if password:
|
||||||
|
name += ' {{%s}}' % password.strip()
|
||||||
|
except:
|
||||||
|
log.debug('Error getting details of "%s": %s', (name, traceback.format_exc()))
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': nzb_id,
|
||||||
|
'provider_extra': urlparse(host['host']).hostname or host['host'],
|
||||||
|
'name': toUnicode(name),
|
||||||
|
'name_extra': name_extra,
|
||||||
|
'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))),
|
||||||
|
'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024,
|
||||||
|
'url': ((self.getUrl(host['host']) + self.urls['download']) % tryUrlencode(nzb_id)) + self.getApiExt(host),
|
||||||
|
'detail_url': detail_url,
|
||||||
|
'content': self.getTextElement(nzb, 'description'),
|
||||||
|
'description': description,
|
||||||
|
'score': host['extra_score'],
|
||||||
|
})
|
||||||
|
|
||||||
|
def getHosts(self):
|
||||||
|
|
||||||
|
uses = splitString(str(self.conf('use')), clean = False)
|
||||||
|
hosts = splitString(self.conf('host'), clean = False)
|
||||||
|
api_keys = splitString(self.conf('api_key'), clean = False)
|
||||||
|
extra_score = splitString(self.conf('extra_score'), clean = False)
|
||||||
|
custom_tags = splitString(self.conf('custom_tag'), clean = False)
|
||||||
|
|
||||||
|
list = []
|
||||||
|
for nr in range(len(hosts)):
|
||||||
|
|
||||||
|
try: key = api_keys[nr]
|
||||||
|
except: key = ''
|
||||||
|
|
||||||
|
try: host = hosts[nr]
|
||||||
|
except: host = ''
|
||||||
|
|
||||||
|
try: score = tryInt(extra_score[nr])
|
||||||
|
except: score = 0
|
||||||
|
|
||||||
|
try: custom_tag = custom_tags[nr]
|
||||||
|
except: custom_tag = ''
|
||||||
|
|
||||||
|
list.append({
|
||||||
|
'use': uses[nr],
|
||||||
|
'host': host,
|
||||||
|
'api_key': key,
|
||||||
|
'extra_score': score,
|
||||||
|
'custom_tag': custom_tag
|
||||||
|
})
|
||||||
|
|
||||||
|
return list
|
||||||
|
|
||||||
|
def belongsTo(self, url, provider = None, host = None):
|
||||||
|
|
||||||
|
hosts = self.getHosts()
|
||||||
|
|
||||||
|
for host in hosts:
|
||||||
|
result = super(Base, self).belongsTo(url, host = host['host'], provider = provider)
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
def getUrl(self, host):
|
||||||
|
if '?page=newznabapi' in host:
|
||||||
|
return cleanHost(host)[:-1] + '&'
|
||||||
|
|
||||||
|
return cleanHost(host) + 'api?'
|
||||||
|
|
||||||
|
def isDisabled(self, host = None):
|
||||||
|
return not self.isEnabled(host)
|
||||||
|
|
||||||
|
def isEnabled(self, host = None):
|
||||||
|
|
||||||
|
# Return true if at least one is enabled and no host is given
|
||||||
|
if host is None:
|
||||||
|
for host in self.getHosts():
|
||||||
|
if self.isEnabled(host):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
return NZBProvider.isEnabled(self) and host['host'] and host['api_key'] and int(host['use'])
|
||||||
|
|
||||||
|
def getApiExt(self, host):
|
||||||
|
return '&apikey=%s' % host['api_key']
|
||||||
|
|
||||||
|
def download(self, url = '', nzb_id = ''):
|
||||||
|
host = urlparse(url).hostname
|
||||||
|
|
||||||
|
if self.limits_reached.get(host):
|
||||||
|
# Try again in 3 hours
|
||||||
|
if self.limits_reached[host] > time.time() - 10800:
|
||||||
|
return 'try_next'
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self.urlopen(url, show_error = False, headers = {'User-Agent': Env.getIdentifier()})
|
||||||
|
self.limits_reached[host] = False
|
||||||
|
return data
|
||||||
|
except HTTPError as e:
|
||||||
|
sc = e.response.status_code
|
||||||
|
if sc in [503, 429]:
|
||||||
|
response = e.read().lower()
|
||||||
|
if sc == 429 or 'maximum api' in response or 'download limit' in response:
|
||||||
|
if not self.limits_reached.get(host):
|
||||||
|
log.error('Limit reached / to many requests for newznab provider: %s', host)
|
||||||
|
self.limits_reached[host] = time.time()
|
||||||
|
return 'try_next'
|
||||||
|
|
||||||
|
log.error('Failed download from %s: %s', (host, traceback.format_exc()))
|
||||||
|
|
||||||
|
return 'try_next'
|
||||||
|
|
||||||
|
def buildDetailsUrl(self, nzb_id, api_key):
|
||||||
|
query = tryUrlencode({
|
||||||
|
't': 'details',
|
||||||
|
'id': nzb_id,
|
||||||
|
'apikey': api_key,
|
||||||
|
})
|
||||||
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'newznab',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'nzb_providers',
|
||||||
|
'name': 'newznab',
|
||||||
|
'order': 10,
|
||||||
|
'description': 'Enable <a href="http://newznab.com/" target="_blank">NewzNab</a> such as <a href="https://nzb.su" target="_blank">NZB.su</a>, \
|
||||||
|
<a href="https://nzbs.org" target="_blank">NZBs.org</a>, <a href="http://dognzb.cr/" target="_blank">DOGnzb.cr</a>, \
|
||||||
|
<a href="https://github.com/spotweb/spotweb" target="_blank">Spotweb</a>, <a href="https://nzbgeek.info/" target="_blank">NZBGeek</a>, \
|
||||||
|
<a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAgMAAABinRfyAAAACVBMVEVjhwD///86aRovd/sBAAAAMklEQVQI12NgAIPQUCCRmQkjssDEShiRuRIqwZqZGcDAGBrqANUhGgIkWAOABKMDxCAA24UK50b26SAAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'use',
|
||||||
|
'default': '0,0,0,0,0'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://api.nzbgeek.info,https://www.nzbfinder.ws',
|
||||||
|
'description': 'The hostname of your newznab provider',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'default': '0,0,0,0,0',
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'custom_tag',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Custom tag',
|
||||||
|
'default': ',,,,',
|
||||||
|
'description': 'Add custom tags, for example add rls=1 to get only scene releases from nzbs.org',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'api_key',
|
||||||
|
'default': ',,,,',
|
||||||
|
'label': 'Api Key',
|
||||||
|
'description': 'Can be found on your profile page',
|
||||||
|
'type': 'combined',
|
||||||
|
'combine': ['use', 'host', 'api_key', 'extra_score', 'custom_tag'],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,37 +1,28 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
from couchpotato.core.helpers.rss import RSS
|
from couchpotato.core.helpers.rss import RSS
|
||||||
from couchpotato.core.helpers.variable import tryInt
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
from couchpotato.core.logger import CPLog
|
from couchpotato.core.logger import CPLog
|
||||||
from couchpotato.core.providers.nzb.base import NZBProvider
|
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
import time
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class NZBClub(NZBProvider, RSS):
|
class Base(NZBProvider, RSS):
|
||||||
|
|
||||||
urls = {
|
urls = {
|
||||||
'search': 'http://www.nzbclub.com/nzbfeed.aspx?%s',
|
'search': 'https://www.nzbclub.com/nzbfeeds.aspx?%s',
|
||||||
}
|
}
|
||||||
|
|
||||||
http_time_between_calls = 4 #seconds
|
http_time_between_calls = 4 # seconds
|
||||||
|
|
||||||
def _searchOnTitle(self, title, movie, quality, results):
|
def _search(self, media, quality, results):
|
||||||
|
|
||||||
q = '"%s %s"' % (title, movie['library']['year'])
|
nzbs = self.getRSSData(self.urls['search'] % self.buildUrl(media))
|
||||||
|
|
||||||
params = tryUrlencode({
|
|
||||||
'q': q,
|
|
||||||
'ig': 1,
|
|
||||||
'rpp': 200,
|
|
||||||
'st': 5,
|
|
||||||
'sp': 1,
|
|
||||||
'ns': 1,
|
|
||||||
})
|
|
||||||
|
|
||||||
nzbs = self.getRSSData(self.urls['search'] % params)
|
|
||||||
|
|
||||||
for nzb in nzbs:
|
for nzb in nzbs:
|
||||||
|
|
||||||
@@ -64,7 +55,7 @@ class NZBClub(NZBProvider, RSS):
|
|||||||
def getMoreInfo(self, item):
|
def getMoreInfo(self, item):
|
||||||
full_description = self.getCache('nzbclub.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
full_description = self.getCache('nzbclub.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||||
html = BeautifulSoup(full_description)
|
html = BeautifulSoup(full_description)
|
||||||
nfo_pre = html.find('pre', attrs = {'class':'nfo'})
|
nfo_pre = html.find('pre', attrs = {'class': 'nfo'})
|
||||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||||
|
|
||||||
item['description'] = description
|
item['description'] = description
|
||||||
@@ -78,3 +69,32 @@ class NZBClub(NZBProvider, RSS):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'nzbclub',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'nzb_providers',
|
||||||
|
'name': 'NZBClub',
|
||||||
|
'description': 'Free provider, less accurate. See <a href="https://www.nzbclub.com/">NZBClub</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACEUlEQVQ4y3VSMWgUQRR9/8/s7OzeJSdnTsVGghLEYBNQjBpQiRBFhIB2EcHG1kbs0murhZAmVocExEZQ0c7CxkLINYcJJpoYj9wZcnu72fF21uJSXMzuhyne58/j/fcf4b+KokgBIOSU53lxP5b9oNVqDT36dH+5UjoiKvIwPFEEgWBshGZ3E7/NOupL9fMjx0e+ZhKsrq+c/FPZKJi0w4FsQXMBDEJsd7BNW9h2tuyP9vfTALIJkMIu1hYRtINM+dpzcWc0sbkreK4fUEogyraAmKGF3+7vcT/wtR9QwkCabSAzQQuvk0uglAo5YaQ5DASGYjfMXcHVOqKu6NmR7iehlKAdHWUqWPv1c3i+9uwVdRlEBGaGEAJCCrDo9ShhvF6qPq8tL57bp+DbRn2sHtUuCY9YphLMu5921VhrwYJ5tbt0tt6sjQP4vEfB2Ikz7/ytwbeR6ljHkXCUA6UcOLtPOg4MYhtH8ZcLw5er+xQMDAwEURRNl96X596Y6oxFwsw9fmtTOAr2Ik19nL365FZpsLSdnQPPM8aYewc+lDcX4rkHqbQMAGTJXulOLzycmr1bKBTi3DOGYagajcahiaOT89fbM0/dxEsUu3aidfPljWO3HzebzYNBELi5Z5RSJlrrHd/3w8lT114MrVTWOn875fHRiYVisRhorWMpZXdvNnLKGCOstb0AMlulVJI19w/+nceU4D0aCwAAAABJRU5ErkJggg==',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
89
couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py
Normal file
89
couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from couchpotato.core.event import fireEvent
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||||
|
from couchpotato.core.helpers.rss import RSS
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(NZBProvider, RSS):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'search': 'https://api.omgwtfnzbs.org/json/?%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
cat_ids = [
|
||||||
|
([15], ['dvdrip', 'scr', 'r5', 'tc', 'ts', 'cam']),
|
||||||
|
([15, 16], ['brrip']),
|
||||||
|
([16], ['720p', '1080p', 'bd50']),
|
||||||
|
([17], ['dvdr']),
|
||||||
|
]
|
||||||
|
cat_backup_id = 'movie'
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, movie, quality, results):
|
||||||
|
|
||||||
|
q = '%s %s' % (title, movie['info']['year'])
|
||||||
|
params = tryUrlencode({
|
||||||
|
'search': q,
|
||||||
|
'catid': ','.join([str(x) for x in self.getCatId(quality)]),
|
||||||
|
'user': self.conf('username', default = ''),
|
||||||
|
'api': self.conf('api_key', default = ''),
|
||||||
|
})
|
||||||
|
|
||||||
|
nzbs = self.getJsonData(self.urls['search'] % params)
|
||||||
|
|
||||||
|
if isinstance(nzbs, list):
|
||||||
|
for nzb in nzbs:
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': nzb.get('nzbid'),
|
||||||
|
'name': toUnicode(nzb.get('release')),
|
||||||
|
'age': self.calculateAge(tryInt(nzb.get('usenetage'))),
|
||||||
|
'size': tryInt(nzb.get('sizebytes')) / 1024 / 1024,
|
||||||
|
'url': nzb.get('getnzb'),
|
||||||
|
'detail_url': nzb.get('details'),
|
||||||
|
'description': nzb.get('weblink')
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'omgwtfnzbs',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'nzb_providers',
|
||||||
|
'name': 'OMGWTFNZBs',
|
||||||
|
'description': 'See <a href="http://omgwtfnzbs.org/">OMGWTFNZBs</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQEAIAAADAAbR1AAADbElEQVR4AZ2UW0ybZRiAy/OvdHaLYvB0YTRIFi7GkM44zRLmIfNixkWdiRMyYoxRE8/TC7MYvXCGEBmr3mxLwVMwY0wYA7e6Wso4lB6h/U9taSlMGIfBXLYlJMyo0S///2dJI5lxN8/F2/f9nu9737e/jYmXr6KTbN9BGG9HE/NotQ76UWziNzrXFiETk/5ARUNH+7+0kW7fSgTl0VKGOLZzidOkmuuIo7q2oTArNLPIzhdIkqXkerFOm2CaD/5bcKrjIL2c3fkhPxOq93Kcb91v46fV9TQKF4TgV/TbUsQtzfCaK6jMOd5DJrguSIIhexmqqVxN0FXbRR8/ND/LYTTj6J7nl2gnL47OkDW4KJhnQHCa6JpKVNJGA3OC58nwBJoZ//ebbIyKpBxjrr0o1q1FMRkrKXZnHWF85VvxMrJxibwhGyd0f5bLnKzqJs1k0Sfo+EU8hdAUvkbcwKEgs2D0OiV4jmmD1zb+Tp6er0JMMvDxPo5xev9zTBF683NS+N56n1YiB95B5crr93KRuKhKI0tb0Kw2mgLLqTjLEWO8424i9IvURaYeOckwf3+/yCC9e3bQQ/MuD+Monk0k+XFXMUfx7z5EEP+XlXi5tLlMxH8zLppw7idJrugcus30kC86gc7UrQqjLIukM8zWHOACeU+TiMxXN6ExVOkgz4lvPEzice1GIVhxhG4CrZvpl6TH55giKWqXGLy9hZh5aUtgDSew/msSyCKpl+DDNfxJc8NBIsxUxUnz14O/oONu+IIIvso9TLBQ1SY5rUhuSzUhAqJ2mRXBLDOCeUtgUZXsaObT8BffhUJPqWgiV+3zKKzYH0ClvTRLhD77HIqVkyh5jThnivehoG+qJctIRSPn6bxvO4FCgTl9c1DmbpjLajbQFE8aW5SU3rg+zOPGUjTUF9NFpLEbH2c/KmGYlY69/GQJVtGMSUcEp9eCbB1nctbxHTLRdTUkGDf+B02uGWRG3OvpJ/zSMwzif+oxVBID3cQKBavLCiPmB2PM2UuSCUPgrX4VDb97AwEG67bh4+KTOlncvu3M31BwA5rLHbCfEjwkNDky9e/SSbSxnD46Pg0RJtpXRvhmBSZHpRjWtKwFybjuQeXaKxto4WjLZZZvVmC17pZLJFkwxm5++PS2Mrwc7nyIMYZe/IzoP5d6QgEybqTXAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'api_key',
|
||||||
|
'label': 'Api Key',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'default': 20,
|
||||||
|
'type': 'int',
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
141
couchpotato/core/media/_base/providers/torrent/awesomehd.py
Normal file
141
couchpotato/core/media/_base/providers/torrent/awesomehd.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://awesome-hd.net/',
|
||||||
|
'detail': 'https://awesome-hd.net/torrents.php?torrentid=%s',
|
||||||
|
'search': 'https://awesome-hd.net/searchapi.php?action=imdbsearch&passkey=%s&imdb=%s&internal=%s',
|
||||||
|
'download': 'https://awesome-hd.net/torrents.php?action=download&id=%s&authkey=%s&torrent_pass=%s',
|
||||||
|
}
|
||||||
|
http_time_between_calls = 1
|
||||||
|
|
||||||
|
def _search(self, movie, quality, results):
|
||||||
|
|
||||||
|
data = self.getHTMLData(self.urls['search'] % (self.conf('passkey'), getIdentifier(movie), self.conf('only_internal')))
|
||||||
|
|
||||||
|
if data:
|
||||||
|
try:
|
||||||
|
soup = BeautifulSoup(data)
|
||||||
|
|
||||||
|
if soup.find('error'):
|
||||||
|
log.error(soup.find('error').get_text())
|
||||||
|
return
|
||||||
|
|
||||||
|
authkey = soup.find('authkey').get_text()
|
||||||
|
entries = soup.find_all('torrent')
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
|
||||||
|
torrentscore = 0
|
||||||
|
torrent_id = entry.find('id').get_text()
|
||||||
|
name = entry.find('name').get_text()
|
||||||
|
year = entry.find('year').get_text()
|
||||||
|
releasegroup = entry.find('releasegroup').get_text()
|
||||||
|
resolution = entry.find('resolution').get_text()
|
||||||
|
encoding = entry.find('encoding').get_text()
|
||||||
|
freeleech = entry.find('freeleech').get_text()
|
||||||
|
torrent_desc = '/ %s / %s / %s ' % (releasegroup, resolution, encoding)
|
||||||
|
|
||||||
|
if freeleech == '0.25' and self.conf('prefer_internal'):
|
||||||
|
torrent_desc += '/ Internal'
|
||||||
|
torrentscore += 200
|
||||||
|
|
||||||
|
if encoding == 'x264' and self.conf('favor') in ['encode', 'both']:
|
||||||
|
torrentscore += 300
|
||||||
|
if re.search('Remux', encoding) and self.conf('favor') in ['remux', 'both']:
|
||||||
|
torrentscore += 200
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': torrent_id,
|
||||||
|
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
|
||||||
|
'url': self.urls['download'] % (torrent_id, authkey, self.conf('passkey')),
|
||||||
|
'detail_url': self.urls['detail'] % torrent_id,
|
||||||
|
'size': tryInt(entry.find('size').get_text()) / 1048576,
|
||||||
|
'seeders': tryInt(entry.find('seeders').get_text()),
|
||||||
|
'leechers': tryInt(entry.find('leechers').get_text()),
|
||||||
|
'score': torrentscore
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'awesomehd',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'Awesome-HD',
|
||||||
|
'description': '<a href="https://awesome-hd.net">AHD</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC+UlEQVR4AV1SO0y6dxQ9H4g8CoIoohZ5NA0aR2UgkYpNB5uocTSaLlrDblMH09Gt8d90r3YpJkanxjA4GGkbO7RNxSABq8jDGnkpD+UD5NV7Bxvbk9wvv+/3uPece66A/yEWi42FQqHVfD7/cbPZtIEglUpjOp3uZHR0dBvAn3gDIRqNgjE4OKj0+Xzf3NzcfD4wMCCjf5TLZbTbbajVatzf3+Pu7q5uNpt35ufnvwBQAScQRREEldfr9RWLxan+/n5YrVa+jFarhVfQQyQSCU4EhULhX15engEgSrjC0dHRVqlUmjQYDBgaGgKtuTqz4mTgIoVCASaTCX19fajVapOHh4dbFJBks9mxcDi8qtFoJEajkfVyJWi1WkxMTMDhcIAT8x6D7/Dd6+vr1fHx8TGp2+3+iqo5+YCzBwIBToK5ubl/mQwPDyMSibAs2Gw2UHNRrValz8/PDUk8Hv9EqVRCr9fj4uICTNflcqFer+Pg4AB7e3uoVCq8x9Rxfn6O7u5uqFQq8FspZXxHTekggByA3W4Hr9PpNDeRL3I1cMhkMrBrnZ2dyGQyvNYIs7OzVbJNPjIyAraLwYdcjR8wXl5eIJfLwRIFQQDLYkm3t7c1CdGPPT4+cpOImp4PODMeaK+n10As2jBbrHifHOjS6qAguVFimkqlwAMmIQnHV1dX4NDQhVwuhyZTV6pgIktzDzkkk0lEwhEEzs7ASQr5Ai4vL1nuccfCwsLO/v6+p9FoyJhF6ekJro/cPCzIZLNQa7rQoK77/SdgWWpKkCaJ5EB9aWnpe6nH40nRMBnJV4f5gw+FX3/5GX/8/htXRZdOzzqhJWn6nl6YbTZqqhrhULD16fT0d8FgcFtYW1vD5uamfGVl5cd4IjldKhZACdkJvKfWUANrxEaJV4hiGVaL1b+7653hXzwRZQr2X76xsfG1xWIRaZzbNPv/CdrjEL9cX/+WXFBSgEPgzxuwG3Yans9OT0+naBZMIJDNfzudzp8WFxd/APAX3uAf9WOTxOPLdosAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'passkey',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'only_internal',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Only search for internal releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'prefer_internal',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Favors internal releases over non-internal releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'favor',
|
||||||
|
'advanced': True,
|
||||||
|
'default': 'both',
|
||||||
|
'type': 'dropdown',
|
||||||
|
'values': [('Encodes & Remuxes', 'both'), ('Encodes', 'encode'), ('Remuxes', 'remux'), ('None', 'none')],
|
||||||
|
'description': 'Give extra scoring to encodes or remuxes.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
|
|
||||||
@@ -1,8 +1,11 @@
|
|||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
from couchpotato.core.helpers.variable import getImdb, md5, cleanHost
|
from couchpotato.core.helpers.variable import getImdb, md5, cleanHost
|
||||||
from couchpotato.core.logger import CPLog
|
from couchpotato.core.logger import CPLog
|
||||||
from couchpotato.core.providers.base import YarrProvider
|
from couchpotato.core.media._base.providers.base import YarrProvider
|
||||||
from couchpotato.environment import Env
|
from couchpotato.environment import Env
|
||||||
import time
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
@@ -41,14 +44,15 @@ class TorrentProvider(YarrProvider):
|
|||||||
|
|
||||||
prop_name = 'proxy.%s' % proxy
|
prop_name = 'proxy.%s' % proxy
|
||||||
last_check = float(Env.prop(prop_name, default = 0))
|
last_check = float(Env.prop(prop_name, default = 0))
|
||||||
if last_check > time.time() - 1209600:
|
|
||||||
|
if last_check > time.time() - 86400:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
data = ''
|
data = ''
|
||||||
try:
|
try:
|
||||||
data = self.urlopen(proxy, timeout = 3, show_error = False)
|
data = self.urlopen(proxy, timeout = 3, show_error = False)
|
||||||
except:
|
except:
|
||||||
log.debug('Failed %s proxy %s', (self.getName(), proxy))
|
log.debug('Failed %s proxy %s: %s', (self.getName(), proxy, traceback.format_exc()))
|
||||||
|
|
||||||
if self.correctProxy(data):
|
if self.correctProxy(data):
|
||||||
log.debug('Using proxy for %s: %s', (self.getName(), proxy))
|
log.debug('Using proxy for %s: %s', (self.getName(), proxy))
|
||||||
@@ -63,9 +67,10 @@ class TorrentProvider(YarrProvider):
|
|||||||
|
|
||||||
return cleanHost(self.proxy_domain).rstrip('/') + url
|
return cleanHost(self.proxy_domain).rstrip('/') + url
|
||||||
|
|
||||||
def correctProxy(self):
|
def correctProxy(self, data):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class TorrentMagnetProvider(TorrentProvider):
|
class TorrentMagnetProvider(TorrentProvider):
|
||||||
|
|
||||||
protocol = 'torrent_magnet'
|
protocol = 'torrent_magnet'
|
||||||
139
couchpotato/core/media/_base/providers/torrent/bithdtv.py
Normal file
139
couchpotato/core/media/_base/providers/torrent/bithdtv.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://www.bit-hdtv.com/',
|
||||||
|
'login': 'https://www.bit-hdtv.com/takelogin.php',
|
||||||
|
'login_check': 'https://www.bit-hdtv.com/messages.php',
|
||||||
|
'detail': 'https://www.bit-hdtv.com/details.php?id=%s',
|
||||||
|
'search': 'https://www.bit-hdtv.com/torrents.php?',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Searches for movies only - BiT-HDTV's subcategory and resolution search filters appear to be broken
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def _search(self, media, quality, results):
|
||||||
|
|
||||||
|
query = self.buildUrl(media, quality)
|
||||||
|
|
||||||
|
url = "%s&%s" % (self.urls['search'], query)
|
||||||
|
|
||||||
|
data = self.getHTMLData(url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
# Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML
|
||||||
|
split_data = data.partition('-->')
|
||||||
|
if '## SELECT COUNT(' in split_data[0]:
|
||||||
|
data = split_data[2]
|
||||||
|
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_table = html.find('table', attrs = {'width': '750', 'class': ''})
|
||||||
|
if result_table is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = result_table.find_all('tr')
|
||||||
|
for result in entries[1:]:
|
||||||
|
|
||||||
|
cells = result.find_all('td')
|
||||||
|
link = cells[2].find('a')
|
||||||
|
torrent_id = link['href'].replace('/details.php?id=', '')
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': torrent_id,
|
||||||
|
'name': link.contents[0].get_text(),
|
||||||
|
'url': cells[0].find('a')['href'],
|
||||||
|
'detail_url': self.urls['detail'] % torrent_id,
|
||||||
|
'size': self.parseSize(cells[6].get_text()),
|
||||||
|
'seeders': tryInt(cells[8].string),
|
||||||
|
'leechers': tryInt(cells[9].string),
|
||||||
|
'get_more_info': self.getMoreInfo,
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
}
|
||||||
|
|
||||||
|
def getMoreInfo(self, item):
|
||||||
|
full_description = self.getCache('bithdtv.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||||
|
html = BeautifulSoup(full_description)
|
||||||
|
nfo_pre = html.find('table', attrs = {'class': 'detail'})
|
||||||
|
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||||
|
|
||||||
|
item['description'] = description
|
||||||
|
return item
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return 'logout.php' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'bithdtv',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'BiT-HDTV',
|
||||||
|
'description': '<a href="https://bit-hdtv.com">BiT-HDTV</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABMklEQVR4AZ3Qu0ojcQCF8W9MJcQbJNgEEQUbQVIqWgnaWfkIvoCgggixEAmIhRtY2GV3w7KwU61B0EYIxmiw0YCik84ipaCuc0nmP5dcjIUgOjqDvxf4OAdf9mnMLcUJyPyGSCP+YRdC+Kp8iagJKhuS+InYRhTGgDbeV2uEMand4ZRxizjXHQEimxhraAnUr73BNqQxMiNeV2SwcjTLEVtb4Zl10mXutvOWm2otw5Sxz6TGTbdd6ncuYvVLXAXrvM+ruyBpy1S3JLGDfUQ1O6jn5vTsrJXvqSt4UNfj6vxTRPxBHER5QeSirhLGk/5rWN+ffB1XZuxjnDy1q87m7TS+xOGA+Iv4gfkbaw+nOMXHDHnITGEk0VfRFnn4Po4vNYm6RGukmggR0L08+l+e4HMeASo/i6AJUjLgAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
137
couchpotato/core/media/_base/providers/torrent/bitsoup.py
Normal file
137
couchpotato/core/media/_base/providers/torrent/bitsoup.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup, SoupStrainer
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://www.bitsoup.me/',
|
||||||
|
'login': 'https://www.bitsoup.me/takelogin.php',
|
||||||
|
'login_check': 'https://www.bitsoup.me/my.php',
|
||||||
|
'search': 'https://www.bitsoup.me/browse.php?%s',
|
||||||
|
'baseurl': 'https://www.bitsoup.me/%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
only_tables_tags = SoupStrainer('table')
|
||||||
|
|
||||||
|
torrent_name_cell = 1
|
||||||
|
torrent_download_cell = 2
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, movie, quality, results):
|
||||||
|
|
||||||
|
url = self.urls['search'] % self.buildUrl(title, movie, quality)
|
||||||
|
data = self.getHTMLData(url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_table = html.find('table', attrs = {'class': 'koptekst'})
|
||||||
|
if not result_table or 'nothing found!' in data.lower():
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = result_table.find_all('tr')
|
||||||
|
for result in entries[1:]:
|
||||||
|
|
||||||
|
all_cells = result.find_all('td')
|
||||||
|
|
||||||
|
torrent = all_cells[self.torrent_name_cell].find('a')
|
||||||
|
download = all_cells[self.torrent_download_cell].find('a')
|
||||||
|
|
||||||
|
torrent_id = torrent['href']
|
||||||
|
torrent_id = torrent_id.replace('details.php?id=', '')
|
||||||
|
torrent_id = torrent_id.replace('&hit=1', '')
|
||||||
|
|
||||||
|
torrent_name = torrent.getText()
|
||||||
|
|
||||||
|
torrent_size = self.parseSize(all_cells[8].getText())
|
||||||
|
torrent_seeders = tryInt(all_cells[10].getText())
|
||||||
|
torrent_leechers = tryInt(all_cells[11].getText())
|
||||||
|
torrent_url = self.urls['baseurl'] % download['href']
|
||||||
|
torrent_detail_url = self.urls['baseurl'] % torrent['href']
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': torrent_id,
|
||||||
|
'name': torrent_name,
|
||||||
|
'size': torrent_size,
|
||||||
|
'seeders': torrent_seeders,
|
||||||
|
'leechers': torrent_leechers,
|
||||||
|
'url': torrent_url,
|
||||||
|
'detail_url': torrent_detail_url,
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'ssl': 'yes',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return 'logout.php' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'bitsoup',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'Bitsoup',
|
||||||
|
'description': '<a href="https://bitsoup.me">Bitsoup</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAB8ElEQVR4AbWSS2sTURiGz3euk0mswaE37HhNhIrajQheFgF3rgR/lAt/gOBCXNZlo6AbqfUWRVCxi04wqUnTRibpJLaJzdzOOZ6WUumyC5/VHOb9eN/FA91uFx0FjI4IPfgiGLTWH73tn348GKmN7ijD0d2b41fO5qJEaX24AWNIUrVQCTTJ3Llx6vbV6Vtzk7Gi9+ebi996guFDDYAQAVj4FExP5qdOZB49W62t/zH3hECcwsPnbWeMXz6Xi2K1f0ApeK3hMCHHbP5gvvoriBgFAAQJEAxhjJ4u+YWTNsVI6b1JgtPWZkoIefKy4fcii2OTw2BABs7wj3bYDlLL4rvjGWOdTser1j5Xf7c3Q/MbHQYApxItvnm31mhQQ71eX2vUB76/vsWB2hg0QuogrMwLIG8P3InM2/eVGXeDViqVwWB79vRU2lgJYmdHcgXCTAXQFJTN5HguvDCR2Hxsxe8EvT54nlcul5vNpqDIEgwRQanAhAAABgRIyiQcjpIkkTOuWyqVoN/vSylX67XXH74uV1vHRUyxxFqbLBCSmBpiXSq6xcL5QrGYzWZ3XQIAwdlOJB+/aL764ucdmncYs0WsCI7kvTnn+qyDMEnTVCn1Tz5KsBFg6fvWcmsUAcnYNC/g2hnromvvqbHvxv+39S+MX+bWkFXwAgAAAABJRU5ErkJggg==',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
130
couchpotato/core/media/_base/providers/torrent/hdaccess.py
Normal file
130
couchpotato/core/media/_base/providers/torrent/hdaccess.py
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://hdaccess.net/',
|
||||||
|
'detail': 'https://hdaccess.net/details.php?id=%s',
|
||||||
|
'search': 'https://hdaccess.net/searchapi.php?apikey=%s&username=%s&imdbid=%s&internal=%s',
|
||||||
|
'download': 'https://hdaccess.net/grab.php?torrent=%s&apikey=%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def _search(self, movie, quality, results):
|
||||||
|
data = self.getJsonData(self.urls['search'] % (self.conf('apikey'), self.conf('username'), getIdentifier(movie), self.conf('internal_only')))
|
||||||
|
|
||||||
|
if data:
|
||||||
|
try:
|
||||||
|
#for result in data[]:
|
||||||
|
for key, result in data.iteritems():
|
||||||
|
if tryInt(result['total_results']) == 0:
|
||||||
|
return
|
||||||
|
torrentscore = self.conf('extra_score')
|
||||||
|
releasegroup = result['releasegroup']
|
||||||
|
resolution = result['resolution']
|
||||||
|
encoding = result['encoding']
|
||||||
|
freeleech = tryInt(result['freeleech'])
|
||||||
|
seeders = tryInt(result['seeders'])
|
||||||
|
torrent_desc = '/ %s / %s / %s / %s seeders' % (releasegroup, resolution, encoding, seeders)
|
||||||
|
|
||||||
|
if freeleech > 0 and self.conf('prefer_internal'):
|
||||||
|
torrent_desc += '/ Internal'
|
||||||
|
torrentscore += 200
|
||||||
|
|
||||||
|
if seeders == 0:
|
||||||
|
torrentscore = 0
|
||||||
|
|
||||||
|
name = result['release_name']
|
||||||
|
year = tryInt(result['year'])
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': tryInt(result['torrentid']),
|
||||||
|
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
|
||||||
|
'url': self.urls['download'] % (result['torrentid'], self.conf('apikey')),
|
||||||
|
'detail_url': self.urls['detail'] % result['torrentid'],
|
||||||
|
'size': tryInt(result['size']),
|
||||||
|
'seeders': tryInt(result['seeders']),
|
||||||
|
'leechers': tryInt(result['leechers']),
|
||||||
|
'age': tryInt(result['age']),
|
||||||
|
'score': torrentscore
|
||||||
|
})
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
config = [{
|
||||||
|
'name': 'hdaccess',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'HDAccess',
|
||||||
|
'wizard': True,
|
||||||
|
'description': '<a href="https://hdaccess.net">HDAccess</a>',
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABmJLR0QA/wD/AP+gvaeTAAADuUlEQVQ4yz3T209bdQAH8O/vnNNzWno5FIpAKZdSLi23gWMDtumWuSXOyzJj9M1kyIOPS1xiYuKe9GUPezZZnGIiMTqTxS1bdIuYkG2MWKBAKYVszOgKFkrbA+259HfO+fli/PwPHzI+Pg5CCEAI2VcUlEsl1tHdU7P5bGOkWChEaaUCwvHpmkD93POn6bwgCMQGAMYYYwyCruuQnE7SPzjIstvb8l+bm5fXkokJSmlQEkUQAIpSRH5vd0tyum7I/sA1Z5VH2ctmiGWZjHw4McE1NAZtQ9fD25kXt1VN7es7dNjuGRjiJFeVpWo6slsZPhF/Ys/PPeIs2056ff7zIOS5rpU5/viJEwwEnu3Mi18dojjw0aWP6amz57h9RSE/35zinq2nuGjvIQwOj7K2SKeZWkk0auXSSZ+/ZopSy+CbW1pQKpWu6Jr2/qVPPqWRjm6HWi6Tm999g3RyGbndLCqGgVBrO3F7fHykK0YX47NNtGLYlBq/c+H2iD+3k704dHQUDcFmQVXLyP6zhfTqCl45fQYjx17FemoJunoAk1bQFGoVhkdPwNC0ix2dMT+3llodM02rKdo7gN3dHAEhuH/vNgDg3Pl3cPaNt2GZJpYX5lBbFwClBukfGobL5WrayW6NccVCISY4HIQxYts2Q3J5CXOPHuLlo6NoCoXQ2hbG0JFRpJYWcVDIQ5ZlyL5qW5b9hNlWjKsYBgzDgKppMCoGHty7A0orOHbyNNweL+obGnDm9TdhWSYS8Vn4a2shOZ0QJRGSKIHjeGGtWNhjqqpyG+k04k8eozPai9ZwByavf4kfpyZxZGwMfYOHsbwQx34hB5dL4syKweRq/xpXHwzNapqWSSYWMDszzYqFPEaOn4KiKJiZfoCZ6d8Am+GtC++iXCpjaf4P9vefT8HzfKarp3eWRKMxCILwuWXSz977YIK2RTodDoGH1+OG1+tDlbsKkuiAJEngeWBjNUUnv7rucIiOLyzTvMKJTgnVtbVXLctK3L31g+NAUajL5bEptaDpOnTdgGkzVHl9drms0ju3fnJIkphoaQtfbQiFwAcCAY5wnCE5Xff3i8XX4o9nGksH+8zl9hAGZlWMCivkc9z0L3fZ999+LTCGZKi55YJTFHfye3sc6e/vB88LpK6+iWlqSS4WcpcNXZtwOp3B6mo/REmCSSkEgd+qq3vpRkt75Fp9Y1BZWZwnhq4zEovF/u/MATAti4U7umvyu9kR27aikihC9vvTnV2xufVUMu/2uIksy/9tZvgX49fLmAMx3bsAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
'description': 'Enter your site username.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'apikey',
|
||||||
|
'default': '',
|
||||||
|
'label': 'API Key',
|
||||||
|
'description': 'Enter your site api key. This can be find on <a href="https://hdaccess.net/usercp.php?action=security">Profile Security</a>',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met. HDAccess minimum is 1:1.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met. HDAccess minimum is 48 hours.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'prefer_internal',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Favors internal releases over non-internal releases.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'internal_only',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Internal Only',
|
||||||
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'description': 'Only download releases marked as HDAccess internal',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
127
couchpotato/core/media/_base/providers/torrent/hdbits.py
Normal file
127
couchpotato/core/media/_base/providers/torrent/hdbits.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import re
|
||||||
|
import json
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://hdbits.org/',
|
||||||
|
'detail': 'https://hdbits.org/details.php?id=%s',
|
||||||
|
'download': 'https://hdbits.org/download.php?id=%s&passkey=%s',
|
||||||
|
'api': 'https://hdbits.org/api/torrents'
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def _post_query(self, **params):
|
||||||
|
|
||||||
|
post_data = {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'passkey': self.conf('passkey')
|
||||||
|
}
|
||||||
|
post_data.update(params)
|
||||||
|
|
||||||
|
if self.conf('internal_only'):
|
||||||
|
post_data.update({'origin': [1]})
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = self.getJsonData(self.urls['api'], data = json.dumps(post_data))
|
||||||
|
|
||||||
|
if result:
|
||||||
|
if result['status'] != 0:
|
||||||
|
log.error('Error searching hdbits: %s' % result['message'])
|
||||||
|
else:
|
||||||
|
return result['data']
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _search(self, movie, quality, results):
|
||||||
|
|
||||||
|
match = re.match(r'tt(\d{7})', getIdentifier(movie))
|
||||||
|
|
||||||
|
data = self._post_query(imdb = {'id': match.group(1)})
|
||||||
|
|
||||||
|
if data:
|
||||||
|
try:
|
||||||
|
for result in data:
|
||||||
|
results.append({
|
||||||
|
'id': result['id'],
|
||||||
|
'name': result['name'],
|
||||||
|
'url': self.urls['download'] % (result['id'], self.conf('passkey')),
|
||||||
|
'detail_url': self.urls['detail'] % result['id'],
|
||||||
|
'size': tryInt(result['size']) / 1024 / 1024,
|
||||||
|
'seeders': tryInt(result['seeders']),
|
||||||
|
'leechers': tryInt(result['leechers'])
|
||||||
|
})
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'hdbits',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'HDBits',
|
||||||
|
'wizard': True,
|
||||||
|
'description': '<a href="http://hdbits.org">HDBits</a>',
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABi0lEQVR4AZWSzUsbQRjGdyabTcvSNPTSHlpQQeMHJApC8CJRvHgQQU969+LJP8G7f4N3DwpeFRQvRr0EKaUl0ATSpkigUNFsMl/r9NmZLCEHA/nNO5PfvMPDm0DI6fV3ZxiolEICe1oZCBVCCmBPKwOh2ErKBHGE4KYEXBpSLkUlqO4LcM7f+6nVhRnOhSkOz/hexk+tL+YL0yPF2YmN4tynD++4gTLGkNNac9YFLoREBR1+cnF3dFY6v/m6PD+FaXiNJtgA4xYbABxiGrz6+6HWaI5/+Qh37YS0/3Znc8UxwNGBIIBX22z+/ZdJ+4wzyjpR4PEpODg8tgUXBv2iWUzSpa12B0IR6n6lvt8Aek2lZHb084+fdRNgrwY8z81PjhVy2d2ttUrtV/lbBa+JXGEpDMPnoF2tN1QYRqVUtf6nFbThb7wk7le395elcqhASLb39okDiHY00VCtCTEHwSiH4AI0lkOiT1dwMeSfT3SRxiQWNO7Zwj1egkoVIQFMKvSiC3bcjXq9Jf8DcDIRT3hh10kAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'passkey',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'internal_only',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Internal Only',
|
||||||
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'description': 'Only download releases marked as HDBits internal'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
195
couchpotato/core/media/_base/providers/torrent/ilovetorrents.py
Normal file
195
couchpotato/core/media/_base/providers/torrent/ilovetorrents.py
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'download': 'https://www.ilovetorrents.me/%s',
|
||||||
|
'detail': 'https://www.ilovetorrents.me/%s',
|
||||||
|
'search': 'https://www.ilovetorrents.me/browse.php?search=%s&page=%s&cat=%s',
|
||||||
|
'test': 'https://www.ilovetorrents.me/',
|
||||||
|
'login': 'https://www.ilovetorrents.me/takelogin.php',
|
||||||
|
'login_check': 'https://www.ilovetorrents.me'
|
||||||
|
}
|
||||||
|
|
||||||
|
cat_ids = [
|
||||||
|
(['41'], ['720p', '1080p', 'brrip']),
|
||||||
|
(['19'], ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr']),
|
||||||
|
(['20'], ['dvdr'])
|
||||||
|
]
|
||||||
|
|
||||||
|
cat_backup_id = 200
|
||||||
|
disable_provider = False
|
||||||
|
http_time_between_calls = 1
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, movie, quality, results):
|
||||||
|
|
||||||
|
page = 0
|
||||||
|
total_pages = 1
|
||||||
|
cats = self.getCatId(quality)
|
||||||
|
|
||||||
|
while page < total_pages:
|
||||||
|
|
||||||
|
movieTitle = tryUrlencode('"%s" %s' % (title, movie['info']['year']))
|
||||||
|
search_url = self.urls['search'] % (movieTitle, page, cats[0])
|
||||||
|
page += 1
|
||||||
|
|
||||||
|
data = self.getHTMLData(search_url)
|
||||||
|
if data:
|
||||||
|
try:
|
||||||
|
|
||||||
|
results_table = None
|
||||||
|
|
||||||
|
data_split = splitString(data, '<table')
|
||||||
|
soup = None
|
||||||
|
for x in data_split:
|
||||||
|
soup = BeautifulSoup(x)
|
||||||
|
results_table = soup.find('table', attrs = {'class': 'koptekst'})
|
||||||
|
if results_table:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not results_table:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
pagelinks = soup.findAll(href = re.compile('page'))
|
||||||
|
page_numbers = [int(re.search('page=(?P<page_number>.+'')', i['href']).group('page_number')) for i in pagelinks]
|
||||||
|
total_pages = max(page_numbers)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
entries = results_table.find_all('tr')
|
||||||
|
|
||||||
|
for result in entries[1:]:
|
||||||
|
prelink = result.find(href = re.compile('details.php'))
|
||||||
|
link = prelink['href']
|
||||||
|
download = result.find('a', href = re.compile('download.php'))['href']
|
||||||
|
|
||||||
|
if link and download:
|
||||||
|
|
||||||
|
def extra_score(item):
|
||||||
|
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
|
||||||
|
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
|
||||||
|
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
|
||||||
|
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
|
||||||
|
|
||||||
|
return confirmed + trusted + vip + moderated
|
||||||
|
|
||||||
|
id = re.search('id=(?P<id>\d+)&', link).group('id')
|
||||||
|
url = self.urls['download'] % download
|
||||||
|
|
||||||
|
fileSize = self.parseSize(result.select('td.rowhead')[5].text)
|
||||||
|
results.append({
|
||||||
|
'id': id,
|
||||||
|
'name': toUnicode(prelink.find('b').text),
|
||||||
|
'url': url,
|
||||||
|
'detail_url': self.urls['detail'] % link,
|
||||||
|
'size': fileSize,
|
||||||
|
'seeders': tryInt(result.find_all('td')[2].string),
|
||||||
|
'leechers': tryInt(result.find_all('td')[3].string),
|
||||||
|
'extra_score': extra_score,
|
||||||
|
'get_more_info': self.getMoreInfo
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'submit': 'Welcome to ILT',
|
||||||
|
}
|
||||||
|
|
||||||
|
def getMoreInfo(self, item):
|
||||||
|
cache_key = 'ilt.%s' % item['id']
|
||||||
|
description = self.getCache(cache_key)
|
||||||
|
|
||||||
|
if not description:
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_description = self.getHTMLData(item['detail_url'])
|
||||||
|
html = BeautifulSoup(full_description)
|
||||||
|
nfo_pre = html.find('td', attrs = {'class': 'main'}).findAll('table')[1]
|
||||||
|
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||||
|
except:
|
||||||
|
log.error('Failed getting more info for %s', item['name'])
|
||||||
|
description = ''
|
||||||
|
|
||||||
|
self.setCache(cache_key, description, timeout = 25920000)
|
||||||
|
|
||||||
|
item['description'] = description
|
||||||
|
return item
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return 'logout.php' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'ilovetorrents',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'ILoveTorrents',
|
||||||
|
'description': 'Where the Love of Torrents is Born',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACPUlEQVR4AYWM0U9SbxjH3+v266I/oNvWZTfd2J1d0ZqbZEFwWrUImOKs4YwtumFKZvvlJJADR2TCQQlMPKg5NmpREgaekAPnBATKgmK1LqQlx6awHnZWF1Tr2Xfvvs+7z+dB0mlO7StpAh+M4S/2jbo3w8+xvJvlnSneEt+10zwer5ujNUOoChjALWFw5XOwdCAk/P57cGvPl+Oht0W7VJHN5NC1uW1BON4hGjXbwpVWMZhsy9v7sEIXAsDNYBXgdkEoIKyWD2CF8ut/aOXTZc/fBSgLWw1BgA4BDHOV0GkT90cBQpXahU5TFomsb38XhJC5/Tbh1P8c6rJlBeGfAeyMhUFwNVcs9lxV9Ot0dwmyd+mrNvRtbJ2fSPC6Z3Vsvub2z3sDFACAAYzk0+kUyxEkyfN7PopqNBro55A+P6yPKIrL5zF1HwjdeBJJCObIsZO79bo3sHhWhglo5WMV3mazuVPb4fLvSL8/FAkB1hK6rXQPwYhMyROK8VK5LAiH/jsMt0HQjxiN4/ePdoilllcqDyt3Mkg8mRBNbIhMb8RERkowQA/p76g0/UDDdCoNmDminM0qSK5vlpE5kugCHhNPxntwWmJPYTMZtYcFR6ABHQsVRlYLukVORaaULvqKI46keFSCv77kSPS6kxrPptLNDHgz16fWBtyxe6v5h08LUy+KI8ushqTPWWIX8Sg6b45IrGtyW6zXFb/hpQf9m3oqfWuB0fpSw0uZ4WB69En69uOk2rmO2V52PXj+A/mI4ESKpb2HAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'label': 'Username',
|
||||||
|
'type': 'string',
|
||||||
|
'default': '',
|
||||||
|
'description': 'The user name for your ILT account',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'label': 'Password',
|
||||||
|
'type': 'password',
|
||||||
|
'default': '',
|
||||||
|
'description': 'The password for your ILT account.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
172
couchpotato/core/media/_base/providers/torrent/iptorrents.py
Normal file
172
couchpotato/core/media/_base/providers/torrent/iptorrents.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://iptorrents.eu/',
|
||||||
|
'base_url': 'https://iptorrents.eu',
|
||||||
|
'login': 'https://iptorrents.eu/torrents/',
|
||||||
|
'login_check': 'https://iptorrents.eu/inbox.php',
|
||||||
|
'search': 'https://iptorrents.eu/torrents/?%s%%s&q=%s&qf=ti&p=%%d',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
cat_backup_id = None
|
||||||
|
|
||||||
|
def buildUrl(self, title, media, quality):
|
||||||
|
return self._buildUrl(title.replace(':', ''), quality)
|
||||||
|
|
||||||
|
def _buildUrl(self, query, quality):
|
||||||
|
|
||||||
|
cat_ids = self.getCatId(quality)
|
||||||
|
|
||||||
|
if not cat_ids:
|
||||||
|
log.warning('Unable to find category ids for identifier "%s"', quality.get('identifier'))
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.urls['search'] % ("&".join(("l%d=" % x) for x in cat_ids), tryUrlencode(query).replace('%', '%%'))
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, media, quality, results):
|
||||||
|
|
||||||
|
freeleech = '' if not self.conf('freeleech') else '&free=on'
|
||||||
|
|
||||||
|
base_url = self.buildUrl(title, media, quality)
|
||||||
|
if not base_url: return
|
||||||
|
|
||||||
|
pages = 1
|
||||||
|
current_page = 1
|
||||||
|
while current_page <= pages and not self.shuttingDown():
|
||||||
|
data = self.getHTMLData(base_url % (freeleech, current_page))
|
||||||
|
|
||||||
|
if data:
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
page_nav = html.find('span', attrs = {'class': 'page_nav'})
|
||||||
|
if page_nav:
|
||||||
|
next_link = page_nav.find("a", text = "Next")
|
||||||
|
if next_link:
|
||||||
|
final_page_link = next_link.previous_sibling.previous_sibling
|
||||||
|
pages = int(final_page_link.string)
|
||||||
|
|
||||||
|
result_table = html.find('table', attrs = {'class': 'torrents'})
|
||||||
|
|
||||||
|
if not result_table or 'nothing found!' in data.lower():
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = result_table.find_all('tr')
|
||||||
|
|
||||||
|
for result in entries[1:]:
|
||||||
|
|
||||||
|
torrent = result.find_all('td')
|
||||||
|
if len(torrent) <= 1:
|
||||||
|
break
|
||||||
|
|
||||||
|
torrent = torrent[1].find('a')
|
||||||
|
|
||||||
|
torrent_id = torrent['href'].replace('/details.php?id=', '')
|
||||||
|
torrent_name = six.text_type(torrent.string)
|
||||||
|
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href'].replace(' ', '.')
|
||||||
|
torrent_details_url = self.urls['base_url'] + torrent['href']
|
||||||
|
torrent_size = self.parseSize(result.find_all('td')[5].string)
|
||||||
|
torrent_seeders = tryInt(result.find('td', attrs = {'class': 'ac t_seeders'}).string)
|
||||||
|
torrent_leechers = tryInt(result.find('td', attrs = {'class': 'ac t_leechers'}).string)
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': torrent_id,
|
||||||
|
'name': torrent_name,
|
||||||
|
'url': torrent_download_url,
|
||||||
|
'detail_url': torrent_details_url,
|
||||||
|
'size': torrent_size,
|
||||||
|
'seeders': torrent_seeders,
|
||||||
|
'leechers': torrent_leechers,
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
break
|
||||||
|
|
||||||
|
current_page += 1
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'login': 'submit',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return 'don\'t have an account' not in output.lower()
|
||||||
|
|
||||||
|
def loginCheckSuccess(self, output):
|
||||||
|
return '/logout.php' in output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'iptorrents',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'IPTorrents',
|
||||||
|
'description': '<a href="https://iptorrents.eu">IPTorrents</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABRklEQVR42qWQO0vDUBiG8zeKY3EqQUtNO7g0J6ZJ1+ifKIIFQXAqDYKCyaaYxM3udrZLHdRFhXrZ6liCW6mubfk874EESgqaeOCF7/Y8hEh41aq6yZi2nyZgBGya9XKtZs4No05pAkZV2YbEmyMMsoSxLQeC46wCTdPPY4HruPQyGIhF97qLWsS78Miydn4XdK46NJ9OsQAYBzMIMf8MQ9wtCnTdWCaIDx/u7uljOIQEe0hiIWPamSTLay3+RxOCSPI9+RJAo7Er9r2bnqjBFAqyK+VyK4f5/Cr5ni8OFKVCz49PFI5GdNvvU7ttE1M1zMU+8AMqFksEhrMnQsBDzqmDAwzx2ehRLwT7yyCI+vSC99c3mozH1NxrJgWWtR1BOECfEJSVCm6WCzJGCA7+IWhBsM4zywDPwEp4vCjx2DzBH2ODAfsDb33Ps6dQwJgAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'freeleech',
|
||||||
|
'default': 0,
|
||||||
|
'type': 'bool',
|
||||||
|
'description': 'Only search for [FreeLeech] torrents.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -0,0 +1,181 @@
|
|||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentMagnetProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'detail': '%s/%s',
|
||||||
|
'search': '%s/%s-i%s/',
|
||||||
|
}
|
||||||
|
|
||||||
|
cat_ids = [
|
||||||
|
(['cam'], ['cam']),
|
||||||
|
(['telesync'], ['ts', 'tc']),
|
||||||
|
(['screener', 'tvrip'], ['screener']),
|
||||||
|
(['x264', '720p', '1080p', 'blu-ray', 'hdrip'], ['bd50', '1080p', '720p', 'brrip']),
|
||||||
|
(['dvdrip'], ['dvdrip']),
|
||||||
|
(['dvd'], ['dvdr']),
|
||||||
|
]
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
cat_backup_id = None
|
||||||
|
|
||||||
|
proxy_list = [
|
||||||
|
'https://kickass.to',
|
||||||
|
'http://kickass.pw',
|
||||||
|
'http://kickassto.come.in',
|
||||||
|
'http://katproxy.ws',
|
||||||
|
'http://kickass.bitproxy.eu',
|
||||||
|
'http://katph.eu',
|
||||||
|
'http://kickassto.come.in',
|
||||||
|
]
|
||||||
|
|
||||||
|
def _search(self, media, quality, results):
|
||||||
|
|
||||||
|
data = self.getHTMLData(self.urls['search'] % (self.getDomain(), 'm', getIdentifier(media).replace('tt', '')))
|
||||||
|
|
||||||
|
if data:
|
||||||
|
|
||||||
|
cat_ids = self.getCatId(quality)
|
||||||
|
table_order = ['name', 'size', None, 'age', 'seeds', 'leechers']
|
||||||
|
|
||||||
|
try:
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
resultdiv = html.find('div', attrs = {'class': 'tabs'})
|
||||||
|
for result in resultdiv.find_all('div', recursive = False):
|
||||||
|
if result.get('id').lower().strip('tab-') not in cat_ids:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
for temp in result.find_all('tr'):
|
||||||
|
if temp['class'] is 'firstr' or not temp.get('id'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
new = {}
|
||||||
|
|
||||||
|
nr = 0
|
||||||
|
for td in temp.find_all('td'):
|
||||||
|
column_name = table_order[nr]
|
||||||
|
if column_name:
|
||||||
|
|
||||||
|
if column_name == 'name':
|
||||||
|
link = td.find('div', {'class': 'torrentname'}).find_all('a')[2]
|
||||||
|
new['id'] = temp.get('id')[-7:]
|
||||||
|
new['name'] = link.text
|
||||||
|
new['url'] = td.find('a', 'imagnet')['href']
|
||||||
|
new['detail_url'] = self.urls['detail'] % (self.getDomain(), link['href'][1:])
|
||||||
|
new['verified'] = True if td.find('a', 'iverify') else False
|
||||||
|
new['score'] = 100 if new['verified'] else 0
|
||||||
|
elif column_name is 'size':
|
||||||
|
new['size'] = self.parseSize(td.text)
|
||||||
|
elif column_name is 'age':
|
||||||
|
new['age'] = self.ageToDays(td.text)
|
||||||
|
elif column_name is 'seeds':
|
||||||
|
new['seeders'] = tryInt(td.text)
|
||||||
|
elif column_name is 'leechers':
|
||||||
|
new['leechers'] = tryInt(td.text)
|
||||||
|
|
||||||
|
nr += 1
|
||||||
|
|
||||||
|
# Only store verified torrents
|
||||||
|
if self.conf('only_verified') and not new['verified']:
|
||||||
|
continue
|
||||||
|
|
||||||
|
results.append(new)
|
||||||
|
except:
|
||||||
|
log.error('Failed parsing KickAssTorrents: %s', traceback.format_exc())
|
||||||
|
|
||||||
|
except AttributeError:
|
||||||
|
log.debug('No search results found.')
|
||||||
|
|
||||||
|
def ageToDays(self, age_str):
|
||||||
|
age = 0
|
||||||
|
age_str = age_str.replace(' ', ' ')
|
||||||
|
|
||||||
|
regex = '(\d*.?\d+).(sec|hour|day|week|month|year)+'
|
||||||
|
matches = re.findall(regex, age_str)
|
||||||
|
for match in matches:
|
||||||
|
nr, size = match
|
||||||
|
mult = 1
|
||||||
|
if size == 'week':
|
||||||
|
mult = 7
|
||||||
|
elif size == 'month':
|
||||||
|
mult = 30.5
|
||||||
|
elif size == 'year':
|
||||||
|
mult = 365
|
||||||
|
|
||||||
|
age += tryInt(nr) * mult
|
||||||
|
|
||||||
|
return tryInt(age)
|
||||||
|
|
||||||
|
def isEnabled(self):
|
||||||
|
return super(Base, self).isEnabled() and self.getDomain()
|
||||||
|
|
||||||
|
def correctProxy(self, data):
|
||||||
|
return 'search query' in data.lower()
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'kickasstorrents',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'KickAssTorrents',
|
||||||
|
'description': '<a href="https://kat.ph/">KickAssTorrents</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACD0lEQVR42pXK20uTcRjA8d/fsJsuap0orBuFlm3hir3JJvQOVmuwllN20Lb2isI2nVHKjBqrCWYaNnNuBrkSWxglhDVJOkBdSWUOq5FgoiOrMdRJ2xPPxW+8OUf1ge/FcyCUSVe2qedK5U/OxNTTXRNXEQ52Glb4O6dNEfK1auJkvRY7+/zxnQbA/D596laXcY3OWOiaIX2393SGznUmxkUo/YkDgqHemuzobQ7+NV+reo5Q1mqp68GABdY3+/EloO+JeN4tEqiFU8f3CwhyWo9E7wfMgI0ELTDx0AvjIxcgvZoC9P7NMN7yMmrFeoKa68rfDfmrARsNN0Ihr55cx59ctZWSiwS5bLKpwW4dYJH+M/B6/CYszE0BFZ+egG+Ln+HRoBN/cpl1pV6COIMkOnBVA/w+fXgGKJVM4LxhumMleoL06hJ3wKcCfl+/TAKKx17gnFePRwkqxR4BQSpFkbCrrQJueI7mWpyfATQ9OQY43+uv/+PutBycJ3y2qn2x7jY50GJvnwLKZjOwspyE5I8F4N+1yr1uwqcs3ym63Hwo29EiAyzUWQVr6WVAS4lZCPutQG/2GtES2YiW3d3XflYKtL72kzAcdEDHeSa3czeIMyyz/TApRKvcFfE0isHbJMnrHCf6xTLb1ORvWNlWo91cvHrJUQo0o6ZoRi7dIiT/g2WEDi27Iyov21xMCvgNfXvtwIACfHwAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'domain',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Proxy server',
|
||||||
|
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'only_verified',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'description': 'Only search for verified releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
@@ -1,67 +1,42 @@
|
|||||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
|
||||||
from couchpotato.core.helpers.variable import getTitle, tryInt, mergeDicts
|
|
||||||
from couchpotato.core.logger import CPLog
|
|
||||||
from couchpotato.core.providers.torrent.base import TorrentProvider
|
|
||||||
from dateutil.parser import parse
|
|
||||||
import htmlentitydefs
|
import htmlentitydefs
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||||
|
from couchpotato.core.helpers.variable import getTitle, tryInt, mergeDicts, getIdentifier
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
from dateutil.parser import parse
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
log = CPLog(__name__)
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PassThePopcorn(TorrentProvider):
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
urls = {
|
urls = {
|
||||||
'domain': 'https://tls.passthepopcorn.me',
|
'domain': 'https://tls.passthepopcorn.me',
|
||||||
'detail': 'https://tls.passthepopcorn.me/torrents.php?torrentid=%s',
|
'detail': 'https://tls.passthepopcorn.me/torrents.php?torrentid=%s',
|
||||||
'torrent': 'https://tls.passthepopcorn.me/torrents.php',
|
'torrent': 'https://tls.passthepopcorn.me/torrents.php',
|
||||||
'login': 'https://tls.passthepopcorn.me/ajax.php?action=login',
|
'login': 'https://tls.passthepopcorn.me/ajax.php?action=login',
|
||||||
'login_check': 'https://tls.passthepopcorn.me/ajax.php?action=login',
|
'login_check': 'https://tls.passthepopcorn.me/ajax.php?action=login',
|
||||||
'search': 'https://tls.passthepopcorn.me/search/%s/0/7/%d'
|
'search': 'https://tls.passthepopcorn.me/search/%s/0/7/%d'
|
||||||
}
|
}
|
||||||
|
|
||||||
http_time_between_calls = 2
|
http_time_between_calls = 2
|
||||||
|
|
||||||
quality_search_params = {
|
def _search(self, media, quality, results):
|
||||||
'bd50': {'media': 'Blu-ray', 'format': 'BD50'},
|
|
||||||
'1080p': {'resolution': '1080p'},
|
|
||||||
'720p': {'resolution': '720p'},
|
|
||||||
'brrip': {'media': 'Blu-ray'},
|
|
||||||
'dvdr': {'resolution': 'anysd'},
|
|
||||||
'dvdrip': {'media': 'DVD'},
|
|
||||||
'scr': {'media': 'DVD-Screener'},
|
|
||||||
'r5': {'media': 'R5'},
|
|
||||||
'tc': {'media': 'TC'},
|
|
||||||
'ts': {'media': 'TS'},
|
|
||||||
'cam': {'media': 'CAM'}
|
|
||||||
}
|
|
||||||
|
|
||||||
post_search_filters = {
|
movie_title = getTitle(media)
|
||||||
'bd50': {'Codec': ['BD50']},
|
|
||||||
'1080p': {'Resolution': ['1080p']},
|
|
||||||
'720p': {'Resolution': ['720p']},
|
|
||||||
'brrip': {'Source': ['Blu-ray'], 'Quality': ['High Definition'], 'Container': ['!ISO']},
|
|
||||||
'dvdr': {'Codec': ['DVD5', 'DVD9']},
|
|
||||||
'dvdrip': {'Source': ['DVD'], 'Codec': ['!DVD5', '!DVD9']},
|
|
||||||
'scr': {'Source': ['DVD-Screener']},
|
|
||||||
'r5': {'Source': ['R5']},
|
|
||||||
'tc': {'Source': ['TC']},
|
|
||||||
'ts': {'Source': ['TS']},
|
|
||||||
'cam': {'Source': ['CAM']}
|
|
||||||
}
|
|
||||||
|
|
||||||
def _search(self, movie, quality, results):
|
|
||||||
|
|
||||||
movie_title = getTitle(movie['library'])
|
|
||||||
quality_id = quality['identifier']
|
quality_id = quality['identifier']
|
||||||
|
|
||||||
params = mergeDicts(self.quality_search_params[quality_id].copy(), {
|
params = mergeDicts(self.quality_search_params[quality_id].copy(), {
|
||||||
'order_by': 'relevance',
|
'order_by': 'relevance',
|
||||||
'order_way': 'descending',
|
'order_way': 'descending',
|
||||||
'searchstr': movie['library']['identifier']
|
'searchstr': getIdentifier(media)
|
||||||
})
|
})
|
||||||
|
|
||||||
url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params))
|
url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params))
|
||||||
@@ -88,11 +63,15 @@ class PassThePopcorn(TorrentProvider):
|
|||||||
if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']:
|
if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']:
|
||||||
torrentdesc += ' HQ'
|
torrentdesc += ' HQ'
|
||||||
if self.conf('prefer_golden'):
|
if self.conf('prefer_golden'):
|
||||||
torrentscore += 200
|
torrentscore += 5000
|
||||||
|
if 'FreeleechType' in torrent:
|
||||||
|
torrentdesc += ' Freeleech'
|
||||||
|
if self.conf('prefer_freeleech'):
|
||||||
|
torrentscore += 7000
|
||||||
if 'Scene' in torrent and torrent['Scene']:
|
if 'Scene' in torrent and torrent['Scene']:
|
||||||
torrentdesc += ' Scene'
|
torrentdesc += ' Scene'
|
||||||
if self.conf('prefer_scene'):
|
if self.conf('prefer_scene'):
|
||||||
torrentscore += 50
|
torrentscore += 2000
|
||||||
if 'RemasterTitle' in torrent and torrent['RemasterTitle']:
|
if 'RemasterTitle' in torrent and torrent['RemasterTitle']:
|
||||||
torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle'])
|
torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle'])
|
||||||
|
|
||||||
@@ -161,24 +140,24 @@ class PassThePopcorn(TorrentProvider):
|
|||||||
|
|
||||||
def htmlToUnicode(self, text):
|
def htmlToUnicode(self, text):
|
||||||
def fixup(m):
|
def fixup(m):
|
||||||
text = m.group(0)
|
txt = m.group(0)
|
||||||
if text[:2] == "&#":
|
if txt[:2] == "&#":
|
||||||
# character reference
|
# character reference
|
||||||
try:
|
try:
|
||||||
if text[:3] == "&#x":
|
if txt[:3] == "&#x":
|
||||||
return unichr(int(text[3:-1], 16))
|
return unichr(int(txt[3:-1], 16))
|
||||||
else:
|
else:
|
||||||
return unichr(int(text[2:-1]))
|
return unichr(int(txt[2:-1]))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# named entity
|
# named entity
|
||||||
try:
|
try:
|
||||||
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
|
txt = unichr(htmlentitydefs.name2codepoint[txt[1:-1]])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return text # leave as is
|
return txt # leave as is
|
||||||
return re.sub("&#?\w+;", fixup, u'%s' % text)
|
return re.sub("&#?\w+;", fixup, six.u('%s') % text)
|
||||||
|
|
||||||
def unicodeToASCII(self, text):
|
def unicodeToASCII(self, text):
|
||||||
import unicodedata
|
import unicodedata
|
||||||
@@ -189,11 +168,11 @@ class PassThePopcorn(TorrentProvider):
|
|||||||
|
|
||||||
def getLoginParams(self):
|
def getLoginParams(self):
|
||||||
return {
|
return {
|
||||||
'username': self.conf('username'),
|
'username': self.conf('username'),
|
||||||
'password': self.conf('password'),
|
'password': self.conf('password'),
|
||||||
'passkey': self.conf('passkey'),
|
'passkey': self.conf('passkey'),
|
||||||
'keeplogged': '1',
|
'keeplogged': '1',
|
||||||
'login': 'Login'
|
'login': 'Login'
|
||||||
}
|
}
|
||||||
|
|
||||||
def loginSuccess(self, output):
|
def loginSuccess(self, output):
|
||||||
@@ -203,3 +182,98 @@ class PassThePopcorn(TorrentProvider):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
loginCheckSuccess = loginSuccess
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'passthepopcorn',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'PassThePopcorn',
|
||||||
|
'description': '<a href="https://passthepopcorn.me">PassThePopcorn.me</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAARklEQVQoz2NgIAP8BwMiGWRpIN1JNWn/t6T9f532+W8GkNt7vzz9UkfarZVpb68BuWlbnqW1nU7L2DMx7eCoBlpqGOppCQB83zIgIg+wWQAAAABJRU5ErkJggg==',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'domain',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Proxy server',
|
||||||
|
'description': 'Domain for requests (HTTPS only!), keep empty to use default (tls.passthepopcorn.me).',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'passkey',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'prefer_golden',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'label': 'Prefer golden',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Favors Golden Popcorn-releases over all other releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'prefer_freeleech',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'label': 'Prefer Freeleech',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Favors torrents marked as freeleech over all other releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'prefer_scene',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'label': 'Prefer scene',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Favors scene-releases over non-scene releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'require_approval',
|
||||||
|
'advanced': True,
|
||||||
|
'type': 'bool',
|
||||||
|
'label': 'Require approval',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Require staff-approval for releases to be accepted.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
136
couchpotato/core/media/_base/providers/torrent/sceneaccess.py
Normal file
136
couchpotato/core/media/_base/providers/torrent/sceneaccess.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://www.sceneaccess.eu/',
|
||||||
|
'login': 'https://www.sceneaccess.eu/login',
|
||||||
|
'login_check': 'https://www.sceneaccess.eu/inbox',
|
||||||
|
'detail': 'https://www.sceneaccess.eu/details?id=%s',
|
||||||
|
'search': 'https://www.sceneaccess.eu/browse?c%d=%d',
|
||||||
|
'archive': 'https://www.sceneaccess.eu/archive?&c%d=%d',
|
||||||
|
'download': 'https://www.sceneaccess.eu/%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, media, quality, results):
|
||||||
|
|
||||||
|
url = self.buildUrl(title, media, quality)
|
||||||
|
data = self.getHTMLData(url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
resultsTable = html.find('table', attrs = {'id': 'torrents-table'})
|
||||||
|
if resultsTable is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = resultsTable.find_all('tr', attrs = {'class': 'tt_row'})
|
||||||
|
for result in entries:
|
||||||
|
|
||||||
|
link = result.find('td', attrs = {'class': 'ttr_name'}).find('a')
|
||||||
|
url = result.find('td', attrs = {'class': 'td_dl'}).find('a')
|
||||||
|
seeders = result.find('td', attrs = {'class': 'ttr_seeders'}).find('a')
|
||||||
|
leechers = result.find('td', attrs = {'class': 'ttr_leechers'}).find('a')
|
||||||
|
torrent_id = link['href'].replace('details?id=', '')
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': torrent_id,
|
||||||
|
'name': link['title'],
|
||||||
|
'url': self.urls['download'] % url['href'],
|
||||||
|
'detail_url': self.urls['detail'] % torrent_id,
|
||||||
|
'size': self.parseSize(result.find('td', attrs = {'class': 'ttr_size'}).contents[0]),
|
||||||
|
'seeders': tryInt(seeders.string) if seeders else 0,
|
||||||
|
'leechers': tryInt(leechers.string) if leechers else 0,
|
||||||
|
'get_more_info': self.getMoreInfo,
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getMoreInfo(self, item):
|
||||||
|
full_description = self.getCache('sceneaccess.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||||
|
html = BeautifulSoup(full_description)
|
||||||
|
nfo_pre = html.find('div', attrs = {'id': 'details_table'})
|
||||||
|
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||||
|
|
||||||
|
item['description'] = description
|
||||||
|
return item
|
||||||
|
|
||||||
|
# Login
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'submit': 'come on in',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return '/inbox' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'sceneaccess',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'SceneAccess',
|
||||||
|
'description': '<a href="https://sceneaccess.eu/">SceneAccess</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAACT0lEQVR4AYVQS0sbURidO3OTmajJ5FElTTOkPmZ01GhHrIq0aoWAj1Vc+A/cuRMXbl24V9SlCGqrLhVFCrooEhCp2BAx0mobTY2kaR7qmOm87EXL1EWxh29xL+c7nPMdgGHYO5bF/gdbefnr6WlbWRnxluMwAB4Z0uEgXa7nwaDL7+/RNPzxbYvb/XJ0FBYVfd/ayh0fQ4qCGEHcm0KLRZUk7Pb2YRJPRwcsKMidnKD3t9VVT3s7BDh+z5FOZ3Vfn3h+Hltfx00mRRSRWFcUmmVNhYVqPn8dj3va2oh+txvcQRVF9ebm1fi4k+dRFbosY5rm4Hk7xxULQnJnx93S4g0EIEEQRoDLo6PrWEw8Pc0eHLwYGopMTDirqlJ7eyhYYGHhfgfHCcKYksZGVB/NcXI2mw6HhZERqrjYTNPHi4tFPh8aJIYIhgPlcCRDoZLW1s75+Z/7+59nZ/OJhLWigqAoKZX6Mjf3dXkZ3pydGYLc4aEoCCkInzQ1fRobS2xuvllaonkedfArnY5OTdGVldBkOADgqq2Nr6z8CIWaJietDHOhKB+HhwFKC6Gnq4ukKJvP9zcSbjYDXbeVlkKzuZBhnnV3e3t6UOmaJO0ODibW1hB1GYkg8R/gup7Z3TVZLJ5AILW9LcZiVpYtYBhw16O3t7cauckyeF9Tgz0ATpL2+nopmWycmbnY2LiKRjFk6/d7+/vRJfl4HGzV1T0UIM43MGBvaIBWK/YvwM5w+IMgGH8tkyEgvIpE7M3Nt6qqZrNyOq1kMmouh455Ggz+BhKY4GEc2CfwAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
172
couchpotato/core/media/_base/providers/torrent/thepiratebay.py
Normal file
172
couchpotato/core/media/_base/providers/torrent/thepiratebay.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentMagnetProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'detail': '%s/torrent/%s',
|
||||||
|
'search': '%s/search/%%s/%%s/7/%%s'
|
||||||
|
}
|
||||||
|
|
||||||
|
cat_backup_id = 200
|
||||||
|
disable_provider = False
|
||||||
|
http_time_between_calls = 0
|
||||||
|
|
||||||
|
proxy_list = [
|
||||||
|
'https://dieroschtibay.org',
|
||||||
|
'https://thebay.al',
|
||||||
|
'https://thepiratebay.se',
|
||||||
|
'http://thepiratebay.se.net',
|
||||||
|
'http://thebootlegbay.com',
|
||||||
|
'http://tpb.ninja.so',
|
||||||
|
'http://proxybay.fr',
|
||||||
|
'http://pirateproxy.in',
|
||||||
|
'http://piratebay.skey.sk',
|
||||||
|
'http://pirateproxy.be',
|
||||||
|
'http://bayproxy.li',
|
||||||
|
'http://proxybay.pw',
|
||||||
|
]
|
||||||
|
|
||||||
|
def _search(self, media, quality, results):
|
||||||
|
|
||||||
|
page = 0
|
||||||
|
total_pages = 1
|
||||||
|
cats = self.getCatId(quality)
|
||||||
|
|
||||||
|
base_search_url = self.urls['search'] % self.getDomain()
|
||||||
|
|
||||||
|
while page < total_pages:
|
||||||
|
|
||||||
|
search_url = base_search_url % self.buildUrl(media, page, cats)
|
||||||
|
|
||||||
|
page += 1
|
||||||
|
|
||||||
|
data = self.getHTMLData(search_url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
try:
|
||||||
|
soup = BeautifulSoup(data)
|
||||||
|
results_table = soup.find('table', attrs = {'id': 'searchResult'})
|
||||||
|
|
||||||
|
if not results_table:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
total_pages = len(soup.find('div', attrs = {'align': 'center'}).find_all('a'))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
entries = results_table.find_all('tr')
|
||||||
|
for result in entries[1:]:
|
||||||
|
link = result.find(href = re.compile('torrent\/\d+\/'))
|
||||||
|
download = result.find(href = re.compile('magnet:'))
|
||||||
|
|
||||||
|
try:
|
||||||
|
size = re.search('Size (?P<size>.+),', six.text_type(result.select('font.detDesc')[0])).group('size')
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if link and download:
|
||||||
|
|
||||||
|
def extra_score(item):
|
||||||
|
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
|
||||||
|
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
|
||||||
|
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
|
||||||
|
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
|
||||||
|
|
||||||
|
return confirmed + trusted + vip + moderated
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': re.search('/(?P<id>\d+)/', link['href']).group('id'),
|
||||||
|
'name': six.text_type(link.string),
|
||||||
|
'url': download['href'],
|
||||||
|
'detail_url': self.getDomain(link['href']),
|
||||||
|
'size': self.parseSize(size),
|
||||||
|
'seeders': tryInt(result.find_all('td')[2].string),
|
||||||
|
'leechers': tryInt(result.find_all('td')[3].string),
|
||||||
|
'extra_score': extra_score,
|
||||||
|
'get_more_info': self.getMoreInfo
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def isEnabled(self):
|
||||||
|
return super(Base, self).isEnabled() and self.getDomain()
|
||||||
|
|
||||||
|
def correctProxy(self, data):
|
||||||
|
return 'title="Pirate Search"' in data
|
||||||
|
|
||||||
|
def getMoreInfo(self, item):
|
||||||
|
full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||||
|
html = BeautifulSoup(full_description)
|
||||||
|
nfo_pre = html.find('div', attrs = {'class': 'nfo'})
|
||||||
|
description = ''
|
||||||
|
try:
|
||||||
|
description = toUnicode(nfo_pre.text)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
item['description'] = description
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'thepiratebay',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'ThePirateBay',
|
||||||
|
'description': 'The world\'s largest bittorrent tracker. <a href="http://fucktimkuik.org/">ThePirateBay</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAA3UlEQVQY02P4DwT/YADIZvj//7qnozMYODmtAAusZoCDELDAegYGViZhAWZmRoYoqIDupfhNN1M3dTBEggXWMZg9jZRXV77YxhAOFpjDwMAPMoCXmcHsF1SAQZ6bQY2VgUEbKHClcAYzg3mINEO8jSCD478/DPsZmvqWblu1bOmStes3Pp0ezVDF4Gif0Hfx9///74/ObRZ2YNiZ47C8XIRBxFJR0jbSSUud4f9zAQWn8NTuziAt2zy5xIMM/z8LFX0E+fD/x0MRDCeA1v7Z++Y/FDzyvAtyBxIA+h8A8ZKLeT+lJroAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'domain',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Proxy server',
|
||||||
|
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
137
couchpotato/core/media/_base/providers/torrent/torrentbytes.py
Normal file
137
couchpotato/core/media/_base/providers/torrent/torrentbytes.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://www.torrentbytes.net/',
|
||||||
|
'login': 'https://www.torrentbytes.net/takelogin.php',
|
||||||
|
'login_check': 'https://www.torrentbytes.net/inbox.php',
|
||||||
|
'detail': 'https://www.torrentbytes.net/details.php?id=%s',
|
||||||
|
'search': 'https://www.torrentbytes.net/browse.php?search=%s&cat=%d',
|
||||||
|
'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
cat_ids = [
|
||||||
|
([5], ['720p', '1080p', 'bd50']),
|
||||||
|
([19], ['cam']),
|
||||||
|
([19], ['ts', 'tc']),
|
||||||
|
([19], ['r5', 'scr']),
|
||||||
|
([19], ['dvdrip']),
|
||||||
|
([19], ['brrip']),
|
||||||
|
([20], ['dvdr']),
|
||||||
|
]
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
cat_backup_id = None
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, movie, quality, results):
|
||||||
|
|
||||||
|
url = self.urls['search'] % (tryUrlencode('%s %s' % (title.replace(':', ''), movie['info']['year'])), self.getCatId(quality)[0])
|
||||||
|
data = self.getHTMLData(url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_table = html.find('table', attrs = {'border': '1'})
|
||||||
|
if not result_table:
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = result_table.find_all('tr')
|
||||||
|
|
||||||
|
for result in entries[1:]:
|
||||||
|
cells = result.find_all('td')
|
||||||
|
|
||||||
|
link = cells[1].find('a', attrs = {'class': 'index'})
|
||||||
|
|
||||||
|
full_id = link['href'].replace('details.php?id=', '')
|
||||||
|
torrent_id = full_id[:6]
|
||||||
|
name = toUnicode(link.get('title', link.contents[0]).encode('ISO-8859-1')).strip()
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': torrent_id,
|
||||||
|
'name': name,
|
||||||
|
'url': self.urls['download'] % (torrent_id, name),
|
||||||
|
'detail_url': self.urls['detail'] % torrent_id,
|
||||||
|
'size': self.parseSize(cells[6].contents[0] + cells[6].contents[2]),
|
||||||
|
'seeders': tryInt(cells[8].find('span').contents[0]),
|
||||||
|
'leechers': tryInt(cells[9].find('span').contents[0]),
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'login': 'submit',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return 'logout.php' in output.lower() or 'Welcome' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'torrentbytes',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'TorrentBytes',
|
||||||
|
'description': '<a href="http://torrentbytes.net">TorrentBytes</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAeFBMVEUAAAAAAEQAA1QAEmEAKnQALHYAMoEAOokAQpIASYsASZgAS5UATZwATosATpgAVJ0AWZwAYZ4AZKAAaZ8Ab7IAcbMAfccAgcQAgcsAhM4AiscAjMkAmt0AoOIApecAp/EAqvQAs+kAt+wA3P8A4f8A//8VAAAfDbiaAl08AAAAjUlEQVQYGQXBO04DQRAFwHqz7Z8sECIl5f73ISRD5GBs7UxTlWfg9vYXnvJRQJqOL88D6BAwJtMMumHUVCl60aa6H93IrIv0b+157f1lpk+fm87lMWrZH0vncKbXdRUQrRmrh9C6Iwkq6rg4PXZcyXmbizzeV/g+rDra0rGve8jPKLSOJNi2AQAwAGjwD7ApPkEHdtPQAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
119
couchpotato/core/media/_base/providers/torrent/torrentday.py
Normal file
119
couchpotato/core/media/_base/providers/torrent/torrentday.py
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import re
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://torrentday.eu/',
|
||||||
|
'login': 'https://torrentday.eu/torrents/',
|
||||||
|
'login_check': 'https://torrentday.eu/userdetails.php',
|
||||||
|
'detail': 'https://torrentday.eu/details.php?id=%s',
|
||||||
|
'search': 'https://torrentday.eu/V3/API/API.php',
|
||||||
|
'download': 'https://torrentday.eu/download.php/%s/%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, media, quality, results):
|
||||||
|
|
||||||
|
query = '"%s" %s' % (title, media['info']['year'])
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'/browse.php?': None,
|
||||||
|
'cata': 'yes',
|
||||||
|
'jxt': 8,
|
||||||
|
'jxw': 'b',
|
||||||
|
'search': query,
|
||||||
|
}
|
||||||
|
|
||||||
|
data = self.getJsonData(self.urls['search'], data = data)
|
||||||
|
try: torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||||
|
except: return
|
||||||
|
|
||||||
|
for torrent in torrents:
|
||||||
|
results.append({
|
||||||
|
'id': torrent['id'],
|
||||||
|
'name': torrent['name'],
|
||||||
|
'url': self.urls['download'] % (torrent['id'], torrent['fname']),
|
||||||
|
'detail_url': self.urls['detail'] % torrent['id'],
|
||||||
|
'size': self.parseSize(torrent.get('size')),
|
||||||
|
'seeders': tryInt(torrent.get('seed')),
|
||||||
|
'leechers': tryInt(torrent.get('leech')),
|
||||||
|
})
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'submit.x': 18,
|
||||||
|
'submit.y': 11,
|
||||||
|
'submit': 'submit',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
often = re.search('You tried too often, please wait .*</div>', output)
|
||||||
|
if often:
|
||||||
|
raise Exception(often.group(0)[:-6].strip())
|
||||||
|
|
||||||
|
return 'Password not correct' not in output
|
||||||
|
|
||||||
|
def loginCheckSuccess(self, output):
|
||||||
|
return 'logout.php' in output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'torrentday',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'TorrentDay',
|
||||||
|
'description': '<a href="https://torrentday.eu/">TorrentDay</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC5ElEQVQ4y12TXUgUURTH//fO7Di7foeQJH6gEEEIZZllVohfSG/6UA+RSFAQQj74VA8+Bj30lmAlRVSEvZRfhNhaka5ZUG1paKaW39tq5O6Ou+PM3M4o6m6X+XPPzD3zm/+dcy574r515WfIW8CZBM4YAA5Gc/aQC3yd7oXYEONcsISE5dTDh91HS0t7FEWhBUAeN9ynV/d9qJAgE4AECURAcVsGlCCnly26LMA0IQwTa52dje3d3e3hcPi8qqrrMjcVYI3EHCQZlkFOHBwR2QHh2ASAAIJxWGAQEDxjePhs3527XjJwnb37OHBq0T+Tyyjh+9KnEzNJ7nouc1Q/3A3HGsOvnJy+PSUlj81w2Lny9WuJ6+3AmTjD4HOcrdR2dWXLRQePvyaSLfQOPMPC8mC9iHCsOxSyzJCelzdSXlNzD5ujpb25Wbfc/XXJemTXF4+nnCNq+AMLe50uFfEJTiw4GXSFtiHL0SnIq66+p0kSArqO+eH3RdsAv9+f5vW7L7GICq6rmM8XBCAXlBw90rOyxibn5yzfkg/L09M52/jxqdESaIrBXHYZZbB1GX8cEpySxKIB8S5XcOnvqpli1zuwmrTtoLjw5LOK/eeuWsE4JH5IRPaPZKiKigmPp+5pa+u1aEjIMhEgrRkmi9mgxGUhM7LNJSzOzsE3+cOeExovXOjdytE0LV4zqNZUtV0uZzAGoGkhDH/2YHZiErmv4uyWQnZZWc+hoqL3WzlTExN5hhA8IEwkZWZOxwB++30YG/9GkYCPvqAaHAW5uWPROW86OmqCprUR7z1yZDAGQNuCvkoB/baIKUBWMTYymv+gra3eJNvjXu+B562tFyXqTJ6YuHK8rKwvBmC3vR7cOCPQLWFz8LnfXWUrJo9U19BwMyUlJRjTSMJ2ENxUiGxq9KXQfwqYlnWstvbR5aamG9g0uzM8Q4OFt++3NNixQ2NgYmeN03FOTUv7XVpV9aKisvLl1vN/WVhNc/Fi1NEAAAAASUVORK5CYII=',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
126
couchpotato/core/media/_base/providers/torrent/torrentleech.py
Normal file
126
couchpotato/core/media/_base/providers/torrent/torrentleech.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://www.torrentleech.org/',
|
||||||
|
'login': 'https://www.torrentleech.org/user/account/login/',
|
||||||
|
'login_check': 'https://torrentleech.org/user/messages',
|
||||||
|
'detail': 'https://www.torrentleech.org/torrent/%s',
|
||||||
|
'search': 'https://www.torrentleech.org/torrents/browse/index/query/%s/categories/%s',
|
||||||
|
'download': 'https://www.torrentleech.org%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
cat_backup_id = None
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, media, quality, results):
|
||||||
|
|
||||||
|
url = self.urls['search'] % self.buildUrl(title, media, quality)
|
||||||
|
|
||||||
|
data = self.getHTMLData(url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_table = html.find('table', attrs = {'id': 'torrenttable'})
|
||||||
|
if not result_table:
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = result_table.find_all('tr')
|
||||||
|
|
||||||
|
for result in entries[1:]:
|
||||||
|
|
||||||
|
link = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||||
|
url = result.find('td', attrs = {'class': 'quickdownload'}).find('a')
|
||||||
|
details = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': link['href'].replace('/torrent/', ''),
|
||||||
|
'name': six.text_type(link.string),
|
||||||
|
'url': self.urls['download'] % url['href'],
|
||||||
|
'detail_url': self.urls['download'] % details['href'],
|
||||||
|
'size': self.parseSize(result.find_all('td')[4].string),
|
||||||
|
'seeders': tryInt(result.find('td', attrs = {'class': 'seeders'}).string),
|
||||||
|
'leechers': tryInt(result.find('td', attrs = {'class': 'leechers'}).string),
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'remember_me': 'on',
|
||||||
|
'login': 'submit',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return '/user/account/logout' in output.lower() or 'welcome back' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'torrentleech',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'TorrentLeech',
|
||||||
|
'description': '<a href="http://torrentleech.org">TorrentLeech</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACHUlEQVR4AZVSO48SYRSdGTCBEMKzILLAWiybkKAGMZRUUJEoDZX7B9zsbuQPYEEjNLTQkYgJDwsoSaxspEBsCITXjjNAIKi8AkzceXgmbHQ1NJ5iMufmO9/9zrmXlCSJ+B8o75J8Pp/NZj0eTzweBy0Wi4PBYD6f12o1r9ebTCZx+22HcrnMsuxms7m6urTZ7LPZDMVYLBZ8ZV3yo8aq9Pq0wzCMTqe77dDv9y8uLyAWBH6xWOyL0K/56fcb+rrPgPZ6PZfLRe1fsl6vCUmGKIqoqNXqdDr9Dbjps9znUV0uTqdTjuPkDoVCIfcuJ4gizjMMm8u9vW+1nr04czqdK56c37CbKY9j2+1WEARZ0Gq1RFHAz2q1qlQqXxoN69HRcDjUarW8ZD6QUigUOnY8uKYH8N1sNkul9yiGw+F6vS4Rxn8EsodEIqHRaOSnq9T7ajQazWQycEIR1AEBYDabSZJyHDucJyegwWBQr9ebTCaKvHd4cCQANUU9evwQ1Ofz4YvUKUI43GE8HouSiFiNRhOowWBIpVLyHITJkuW3PwgAEf3pgIwxF5r+OplMEsk3CPT5szCMnY7EwUdhwUh/CXiej0Qi3idPz89fdrpdbsfBzH7S3Q9K5pP4c0sAKpVKoVAQGO1ut+t0OoFAQHkH2Da/3/+but3uarWK0ZMQoNdyucRutdttmqZxMTzY7XaYxsrgtUjEZrNhkSwWyy/0NCatZumrNQAAAABJRU5ErkJggg==',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 20,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
188
couchpotato/core/media/_base/providers/torrent/torrentpotato.py
Normal file
188
couchpotato/core/media/_base/providers/torrent/torrentpotato.py
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
from urlparse import urlparse
|
||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.encoding import toUnicode
|
||||||
|
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.base import ResultList
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {}
|
||||||
|
limits_reached = {}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def search(self, media, quality):
|
||||||
|
hosts = self.getHosts()
|
||||||
|
|
||||||
|
results = ResultList(self, media, quality, imdb_results = True)
|
||||||
|
|
||||||
|
for host in hosts:
|
||||||
|
if self.isDisabled(host):
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._searchOnHost(host, media, quality, results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _searchOnHost(self, host, media, quality, results):
|
||||||
|
|
||||||
|
torrents = self.getJsonData(self.buildUrl(media, host), cache_timeout = 1800)
|
||||||
|
|
||||||
|
if torrents:
|
||||||
|
try:
|
||||||
|
if torrents.get('error'):
|
||||||
|
log.error('%s: %s', (torrents.get('error'), host['host']))
|
||||||
|
elif torrents.get('results'):
|
||||||
|
for torrent in torrents.get('results', []):
|
||||||
|
results.append({
|
||||||
|
'id': torrent.get('torrent_id'),
|
||||||
|
'protocol': 'torrent' if re.match('^(http|https|ftp)://.*$', torrent.get('download_url')) else 'torrent_magnet',
|
||||||
|
'provider_extra': urlparse(host['host']).hostname or host['host'],
|
||||||
|
'name': toUnicode(torrent.get('release_name')),
|
||||||
|
'url': torrent.get('download_url'),
|
||||||
|
'detail_url': torrent.get('details_url'),
|
||||||
|
'size': torrent.get('size'),
|
||||||
|
'score': host['extra_score'],
|
||||||
|
'seeders': torrent.get('seeders'),
|
||||||
|
'leechers': torrent.get('leechers'),
|
||||||
|
'seed_ratio': host['seed_ratio'],
|
||||||
|
'seed_time': host['seed_time'],
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (host['host'], traceback.format_exc()))
|
||||||
|
|
||||||
|
def getHosts(self):
|
||||||
|
|
||||||
|
uses = splitString(str(self.conf('use')), clean = False)
|
||||||
|
hosts = splitString(self.conf('host'), clean = False)
|
||||||
|
names = splitString(self.conf('name'), clean = False)
|
||||||
|
seed_times = splitString(self.conf('seed_time'), clean = False)
|
||||||
|
seed_ratios = splitString(self.conf('seed_ratio'), clean = False)
|
||||||
|
pass_keys = splitString(self.conf('pass_key'), clean = False)
|
||||||
|
extra_score = splitString(self.conf('extra_score'), clean = False)
|
||||||
|
|
||||||
|
host_list = []
|
||||||
|
for nr in range(len(hosts)):
|
||||||
|
|
||||||
|
try: key = pass_keys[nr]
|
||||||
|
except: key = ''
|
||||||
|
|
||||||
|
try: host = hosts[nr]
|
||||||
|
except: host = ''
|
||||||
|
|
||||||
|
try: name = names[nr]
|
||||||
|
except: name = ''
|
||||||
|
|
||||||
|
try: ratio = seed_ratios[nr]
|
||||||
|
except: ratio = ''
|
||||||
|
|
||||||
|
try: seed_time = seed_times[nr]
|
||||||
|
except: seed_time = ''
|
||||||
|
|
||||||
|
host_list.append({
|
||||||
|
'use': uses[nr],
|
||||||
|
'host': host,
|
||||||
|
'name': name,
|
||||||
|
'seed_ratio': tryFloat(ratio),
|
||||||
|
'seed_time': tryInt(seed_time),
|
||||||
|
'pass_key': key,
|
||||||
|
'extra_score': tryInt(extra_score[nr]) if len(extra_score) > nr else 0
|
||||||
|
})
|
||||||
|
|
||||||
|
return host_list
|
||||||
|
|
||||||
|
def belongsTo(self, url, provider = None, host = None):
|
||||||
|
|
||||||
|
hosts = self.getHosts()
|
||||||
|
|
||||||
|
for host in hosts:
|
||||||
|
result = super(Base, self).belongsTo(url, host = host['host'], provider = provider)
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
def isDisabled(self, host = None):
|
||||||
|
return not self.isEnabled(host)
|
||||||
|
|
||||||
|
def isEnabled(self, host = None):
|
||||||
|
|
||||||
|
# Return true if at least one is enabled and no host is given
|
||||||
|
if host is None:
|
||||||
|
for host in self.getHosts():
|
||||||
|
if self.isEnabled(host):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
return TorrentProvider.isEnabled(self) and host['host'] and host['pass_key'] and int(host['use'])
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'torrentpotato',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'TorrentPotato',
|
||||||
|
'order': 10,
|
||||||
|
'description': 'CouchPotato torrent provider. Checkout <a href="https://github.com/RuudBurger/CouchPotatoServer/wiki/CouchPotato-Torrent-Provider">the wiki page about this provider</a> for more info.',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABSElEQVR4AZ2Nz0oCURTGv8t1YMpqUxt9ARFxoQ/gQtppgvUKcu/sxB5iBJkogspaBC6iVUplEC6kv+oiiKDNhAtt16roP0HQgdsMLgaxfvy4nHP4Pi48qE2g4v91JOqT1CH/UnA7w7icUlLawyEdj+ZI/7h6YluWbRiddHonHh9M70aj7VTKzuXuikUMci/EO/ACnAI15599oAk8AR/AgxBQNCzreD7bmpl+FOIVuAHqQDUcJo+AK+CZFKLt95/MpSmMt0TiW9POxse6UvYZ6zB2wFgjFiNpOGesR0rZ0PVPXf8KhUCl22CwClz4eN8weoZBb9c0bdPsOWvHx/cYu9Y0CoNoZTJrwAbn5DrnZc6XOV+igVbnsgo0IxEomlJuA1vUIYGyq3PZBChwmExCUSmVZgMBDIUCK4UCFIv5vHIhm/XUDeAf/ADbcpd5+aXSWQAAAABJRU5ErkJggg==',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'use',
|
||||||
|
'default': ''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'host',
|
||||||
|
'default': '',
|
||||||
|
'description': 'The url path of your TorrentPotato provider.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'default': '0',
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'name',
|
||||||
|
'label': 'Username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'default': '1',
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'default': '40',
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'pass_key',
|
||||||
|
'default': ',',
|
||||||
|
'label': 'Pass Key',
|
||||||
|
'description': 'Can be found on your profile page',
|
||||||
|
'type': 'combined',
|
||||||
|
'combine': ['use', 'host', 'pass_key', 'name', 'seed_ratio', 'seed_time', 'extra_score'],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
134
couchpotato/core/media/_base/providers/torrent/torrentshack.py
Normal file
134
couchpotato/core/media/_base/providers/torrent/torrentshack.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import traceback
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from couchpotato.core.helpers.variable import tryInt
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentProvider):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'test': 'https://torrentshack.me/',
|
||||||
|
'login': 'https://torrentshack.me/login.php',
|
||||||
|
'login_check': 'https://torrentshack.me/inbox.php',
|
||||||
|
'detail': 'https://torrentshack.me/torrent/%s',
|
||||||
|
'search': 'https://torrentshack.me/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
|
||||||
|
'download': 'https://torrentshack.me/%s',
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 1 # Seconds
|
||||||
|
|
||||||
|
def _search(self, media, quality, results):
|
||||||
|
|
||||||
|
url = self.urls['search'] % self.buildUrl(media, quality)
|
||||||
|
data = self.getHTMLData(url)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
html = BeautifulSoup(data)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result_table = html.find('table', attrs = {'id': 'torrent_table'})
|
||||||
|
if not result_table:
|
||||||
|
return
|
||||||
|
|
||||||
|
entries = result_table.find_all('tr', attrs = {'class': 'torrent'})
|
||||||
|
|
||||||
|
for result in entries:
|
||||||
|
|
||||||
|
link = result.find('span', attrs = {'class': 'torrent_name_link'}).parent
|
||||||
|
url = result.find('td', attrs = {'class': 'torrent_td'}).find('a')
|
||||||
|
size = result.find('td', attrs = {'class': 'size'}).contents[0].strip('\n ')
|
||||||
|
tds = result.find_all('td')
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': link['href'].replace('torrents.php?torrentid=', ''),
|
||||||
|
'name': six.text_type(link.span.string).translate({ord(six.u('\xad')): None}),
|
||||||
|
'url': self.urls['download'] % url['href'],
|
||||||
|
'detail_url': self.urls['download'] % link['href'],
|
||||||
|
'size': self.parseSize(size),
|
||||||
|
'seeders': tryInt(tds[len(tds)-2].string),
|
||||||
|
'leechers': tryInt(tds[len(tds)-1].string),
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
def getLoginParams(self):
|
||||||
|
return {
|
||||||
|
'username': self.conf('username'),
|
||||||
|
'password': self.conf('password'),
|
||||||
|
'keeplogged': '1',
|
||||||
|
'login': 'Login',
|
||||||
|
}
|
||||||
|
|
||||||
|
def loginSuccess(self, output):
|
||||||
|
return 'logout.php' in output.lower()
|
||||||
|
|
||||||
|
loginCheckSuccess = loginSuccess
|
||||||
|
|
||||||
|
def getSceneOnly(self):
|
||||||
|
return '1' if self.conf('scene_only') else ''
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'torrentshack',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'TorrentShack',
|
||||||
|
'description': '<a href="https://torrentshack.me/">TorrentShack</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABmElEQVQoFQXBzY2cVRiE0afqvd84CQiAnxWWtyxsS6ThINBYg2Dc7mZBMEjE4mzs6e9WcY5+ePNuVFJJodQAoLo+SaWCy9rcV8cmjah3CI6iYu7oRU30kE5xxELRfamklY3k1NL19sSm7vPzP/ZdNZzKVDaY2sPZJBh9fv5ITrmG2+Vp4e1sPchVqTCQZJnVXi+/L4uuAJGly1+Pw8CprLbi8Om7tbT19/XRqJUk11JP9uHj9ulxhXbvJbI9qJvr5YkGXFG2IBT8tXczt+sfzDZCp3765f3t9tHEHGEDACma77+8o4oATKk+/PfW9YmHruRFjWoVSFsVsGu1YSKq6Oc37+n98unPZSRlY7vsKDqN+92X3yR9+PdXee3iJNKMStqdcZqoTJbUSi5JOkpfRlhSI0mSpEmCFKoU7FqSNOLAk54uGwCStMUCgLrVic62g7oDoFmmdI+P3S0pDe1xvDqb6XrZqbtzShWNoh9fv/XQHaDdM9OqrZi2M7M3UrB2vlkPS1IbdEBk7UiSoD6VlZ6aKWer4aH4f/AvKoHUTjuyAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'username',
|
||||||
|
'default': '',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'password',
|
||||||
|
'default': '',
|
||||||
|
'type': 'password',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'scene_only',
|
||||||
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'description': 'Only allow scene releases.'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}]
|
||||||
129
couchpotato/core/media/_base/providers/torrent/torrentz.py
Normal file
129
couchpotato/core/media/_base/providers/torrent/torrentz.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||||
|
from couchpotato.core.helpers.rss import RSS
|
||||||
|
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||||
|
from couchpotato.core.logger import CPLog
|
||||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
log = CPLog(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Base(TorrentMagnetProvider, RSS):
|
||||||
|
|
||||||
|
urls = {
|
||||||
|
'detail': 'https://torrentz.eu/%s',
|
||||||
|
'search': 'https://torrentz.eu/feed?q=%s',
|
||||||
|
'verified_search': 'https://torrentz.eu/feed_verified?q=%s'
|
||||||
|
}
|
||||||
|
|
||||||
|
http_time_between_calls = 0
|
||||||
|
|
||||||
|
def _searchOnTitle(self, title, media, quality, results):
|
||||||
|
|
||||||
|
search_url = self.urls['verified_search'] if self.conf('verified_only') else self.urls['search']
|
||||||
|
|
||||||
|
# Create search parameters
|
||||||
|
search_params = self.buildUrl(title, media, quality)
|
||||||
|
|
||||||
|
smin = quality.get('size_min')
|
||||||
|
smax = quality.get('size_max')
|
||||||
|
if smin and smax:
|
||||||
|
search_params += ' size %sm - %sm' % (smin, smax)
|
||||||
|
|
||||||
|
min_seeds = tryInt(self.conf('minimal_seeds'))
|
||||||
|
if min_seeds:
|
||||||
|
search_params += ' seed > %s' % (min_seeds - 1)
|
||||||
|
|
||||||
|
rss_data = self.getRSSData(search_url % search_params)
|
||||||
|
|
||||||
|
if rss_data:
|
||||||
|
try:
|
||||||
|
|
||||||
|
for result in rss_data:
|
||||||
|
|
||||||
|
name = self.getTextElement(result, 'title')
|
||||||
|
detail_url = self.getTextElement(result, 'link')
|
||||||
|
description = self.getTextElement(result, 'description')
|
||||||
|
|
||||||
|
magnet = splitString(detail_url, '/')[-1]
|
||||||
|
magnet_url = 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (magnet.upper(), tryUrlencode(name), tryUrlencode('udp://tracker.openbittorrent.com/announce'))
|
||||||
|
|
||||||
|
reg = re.search('Size: (?P<size>\d+) MB Seeds: (?P<seeds>[\d,]+) Peers: (?P<peers>[\d,]+)', six.text_type(description))
|
||||||
|
size = reg.group('size')
|
||||||
|
seeds = reg.group('seeds').replace(',', '')
|
||||||
|
peers = reg.group('peers').replace(',', '')
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'id': magnet,
|
||||||
|
'name': six.text_type(name),
|
||||||
|
'url': magnet_url,
|
||||||
|
'detail_url': detail_url,
|
||||||
|
'size': tryInt(size),
|
||||||
|
'seeders': tryInt(seeds),
|
||||||
|
'leechers': tryInt(peers),
|
||||||
|
})
|
||||||
|
|
||||||
|
except:
|
||||||
|
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||||
|
|
||||||
|
|
||||||
|
config = [{
|
||||||
|
'name': 'torrentz',
|
||||||
|
'groups': [
|
||||||
|
{
|
||||||
|
'tab': 'searcher',
|
||||||
|
'list': 'torrent_providers',
|
||||||
|
'name': 'Torrentz',
|
||||||
|
'description': 'Torrentz is a free, fast and powerful meta-search engine. <a href="https://torrentz.eu/">Torrentz</a>',
|
||||||
|
'wizard': True,
|
||||||
|
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAQklEQVQ4y2NgAALjtJn/ycEMlGiGG0IVAxiwAKzOxaKGARcgxgC8YNSAwWoAzuRMjgsIugqfAUR5CZcBRIcHsWEAADSA96Ig020yAAAAAElFTkSuQmCC',
|
||||||
|
'options': [
|
||||||
|
{
|
||||||
|
'name': 'enabled',
|
||||||
|
'type': 'enabler',
|
||||||
|
'default': True
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'verified_only',
|
||||||
|
'type': 'bool',
|
||||||
|
'default': True,
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Only search verified releases',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'minimal_seeds',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 1,
|
||||||
|
'advanced': True,
|
||||||
|
'description': 'Only return releases with minimal X seeds',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_ratio',
|
||||||
|
'label': 'Seed ratio',
|
||||||
|
'type': 'float',
|
||||||
|
'default': 1,
|
||||||
|
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'seed_time',
|
||||||
|
'label': 'Seed time',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 40,
|
||||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'extra_score',
|
||||||
|
'advanced': True,
|
||||||
|
'label': 'Extra Score',
|
||||||
|
'type': 'int',
|
||||||
|
'default': 0,
|
||||||
|
'description': 'Starting score for each release found via this provider.',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user