Compare commits
2253 Commits
build/2.0.
...
tv
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
147e565249 | ||
|
|
4e568ff515 | ||
|
|
02b6659235 | ||
|
|
dacc3d8f47 | ||
|
|
4f140bb1ac | ||
|
|
3dffaa7075 | ||
|
|
51c8de0fc3 | ||
|
|
4f23ccc284 | ||
|
|
a6ff34a47f | ||
|
|
f1a2d960bc | ||
|
|
4e7069e0c6 | ||
|
|
477a47e45e | ||
|
|
a3264240ab | ||
|
|
f9d9fffedb | ||
|
|
6b4e9a3fac | ||
|
|
6787289846 | ||
|
|
d31a2e2768 | ||
|
|
c992680209 | ||
|
|
65f0dc25d2 | ||
|
|
b616af3a83 | ||
|
|
ca13107330 | ||
|
|
c7ce18f8c2 | ||
|
|
b6f288a522 | ||
|
|
cb48ca03df | ||
|
|
7b6641d709 | ||
|
|
3c12a2c4bf | ||
|
|
259e2bc61c | ||
|
|
9f6e4cc2fa | ||
|
|
a763957334 | ||
|
|
06293dc0a2 | ||
|
|
38a5d967dd | ||
|
|
4cdb9bc81d | ||
|
|
2104cb2839 | ||
|
|
d4a4bd40a8 | ||
|
|
ba47d7eea7 | ||
|
|
c9638ec3fa | ||
|
|
14d636d098 | ||
|
|
e1d4df7937 | ||
|
|
e08d06ba31 | ||
|
|
984ee7580d | ||
|
|
ab118ea580 | ||
|
|
f897eebb41 | ||
|
|
755873c5e7 | ||
|
|
dbc254efbe | ||
|
|
9de8ed2dee | ||
|
|
230b7f47cc | ||
|
|
58878d8a0f | ||
|
|
d9bb1bfbfb | ||
|
|
f8674f9baa | ||
|
|
20f1076037 | ||
|
|
e84f2aa04c | ||
|
|
01f70051f8 | ||
|
|
492f69b149 | ||
|
|
2270b2a28b | ||
|
|
b5a0418a36 | ||
|
|
e595722139 | ||
|
|
78ba855c68 | ||
|
|
158f638fb9 | ||
|
|
5bea9dd04f | ||
|
|
910393d00e | ||
|
|
4b66b0ea07 | ||
|
|
543226450c | ||
|
|
b9dbadda0b | ||
|
|
7cb214d8a2 | ||
|
|
f6d4ddbe80 | ||
|
|
faefd7a5b5 | ||
|
|
8f02b0eea0 | ||
|
|
39d0f91de2 | ||
|
|
b3d75cb485 | ||
|
|
17b940a271 | ||
|
|
3338b72d1f | ||
|
|
70ca31a265 | ||
|
|
d7f43c2cf8 | ||
|
|
b1f88c1c48 | ||
|
|
6fa6d530ec | ||
|
|
11e7fb23ca | ||
|
|
da9d2b5ed8 | ||
|
|
2599bac1a4 | ||
|
|
0bae509311 | ||
|
|
2fa7834e6e | ||
|
|
2deb6ee6a7 | ||
|
|
0d166025d0 | ||
|
|
7861416dc5 | ||
|
|
2639c5e9ad | ||
|
|
c4db4ace13 | ||
|
|
db367a80d1 | ||
|
|
3093b21555 | ||
|
|
9b62e32da8 | ||
|
|
a0b3ee8186 | ||
|
|
d70da1edce | ||
|
|
7c674b3aab | ||
|
|
98540f2fcd | ||
|
|
2f0e197320 | ||
|
|
db49585818 | ||
|
|
160bc1a5c4 | ||
|
|
8e23b02653 | ||
|
|
41e69aeac3 | ||
|
|
be30200a18 | ||
|
|
387650d040 | ||
|
|
052d64eb39 | ||
|
|
a3a8a820fe | ||
|
|
1b724b5606 | ||
|
|
5fc9d7182c | ||
|
|
c948216e33 | ||
|
|
035b99bc8a | ||
|
|
f74b837faa | ||
|
|
4c198f7116 | ||
|
|
76322c0145 | ||
|
|
12150c5efc | ||
|
|
4a9452672a | ||
|
|
f7eeaf3eda | ||
|
|
002ce4d4e1 | ||
|
|
80df57f2b6 | ||
|
|
0358378cae | ||
|
|
fa054b6b34 | ||
|
|
4b9e226cc6 | ||
|
|
6c586f8b19 | ||
|
|
bb609e073b | ||
|
|
02571d0f5d | ||
|
|
ca24bf031c | ||
|
|
af8806e292 | ||
|
|
4f646094b5 | ||
|
|
6e8503cfc5 | ||
|
|
4879bc6251 | ||
|
|
ab253f9030 | ||
|
|
bc6d197004 | ||
|
|
1de457fa8d | ||
|
|
9e564c49b3 | ||
|
|
50a150f570 | ||
|
|
8d55b0c92a | ||
|
|
5a2df62462 | ||
|
|
9d21dd9196 | ||
|
|
3b34196901 | ||
|
|
bad26026ae | ||
|
|
6e455e62d5 | ||
|
|
c97bd38c83 | ||
|
|
356322c5b1 | ||
|
|
9dbb477dd8 | ||
|
|
089609d5d2 | ||
|
|
487ddf1c25 | ||
|
|
83b4c17969 | ||
|
|
08c381cf0d | ||
|
|
286f14a6d2 | ||
|
|
0b14fe5454 | ||
|
|
c5a0d521d1 | ||
|
|
4a1f70da09 | ||
|
|
87e97cd8a5 | ||
|
|
e0dffe20a4 | ||
|
|
73d37584ad | ||
|
|
5fd3e86624 | ||
|
|
b0ff526c95 | ||
|
|
3cfe90d581 | ||
|
|
1d60d9caf1 | ||
|
|
8e0d1520e8 | ||
|
|
b07f91d6a5 | ||
|
|
43af091b02 | ||
|
|
5f0543ba42 | ||
|
|
ef8cd1aa40 | ||
|
|
e01fe51b9e | ||
|
|
afa782194d | ||
|
|
77e602f359 | ||
|
|
a6063b0665 | ||
|
|
9a7e4ea500 | ||
|
|
1daedb7259 | ||
|
|
8e82e976f1 | ||
|
|
8b445ac9f9 | ||
|
|
91c24105cc | ||
|
|
13df26851e | ||
|
|
ca58d25785 | ||
|
|
42d728f71e | ||
|
|
659960899e | ||
|
|
d40b052cbc | ||
|
|
282f6fb73a | ||
|
|
416c9eabde | ||
|
|
b4a15f344d | ||
|
|
c545c9aab1 | ||
|
|
c0492a41d9 | ||
|
|
60e8c3ad9b | ||
|
|
cfd92b8268 | ||
|
|
436883a96d | ||
|
|
c381b719b1 | ||
|
|
81d4d9a4e2 | ||
|
|
e2df3a4dfd | ||
|
|
7df92f2882 | ||
|
|
072b6d09fa | ||
|
|
3869e350bf | ||
|
|
058846f54f | ||
|
|
cd836f3660 | ||
|
|
d75f58f5ec | ||
|
|
f2b0d3f80b | ||
|
|
a366d57278 | ||
|
|
a821d85bf2 | ||
|
|
a1ce3e0d6b | ||
|
|
e7be5c7809 | ||
|
|
894f46a741 | ||
|
|
7d5efad20c | ||
|
|
ba14c95e82 | ||
|
|
2ad249b195 | ||
|
|
89f3b6624e | ||
|
|
3546f29caf | ||
|
|
e3414fe91f | ||
|
|
bdadd00d93 | ||
|
|
dd7de31e9f | ||
|
|
deb7943203 | ||
|
|
4e78b0cac1 | ||
|
|
c8f0cdc90f | ||
|
|
ce80ac5a33 | ||
|
|
5e438e5343 | ||
|
|
6897dab647 | ||
|
|
12dd9c6b14 | ||
|
|
478dc0f242 | ||
|
|
5d886ccf1f | ||
|
|
7f466f9c08 | ||
|
|
7fbd89a317 | ||
|
|
6f620f451b | ||
|
|
dea5bbbf1c | ||
|
|
68bde6086d | ||
|
|
34bb8c7993 | ||
|
|
74c7cf4381 | ||
|
|
efe0a4af53 | ||
|
|
b9c6d983e1 | ||
|
|
3d6ce1c2e2 | ||
|
|
accf19bb26 | ||
|
|
4126007cac | ||
|
|
9f12fe2636 | ||
|
|
9fb348f3a4 | ||
|
|
e749d132cd | ||
|
|
bed9458604 | ||
|
|
7984ee9fcf | ||
|
|
69e3e36fae | ||
|
|
456563eab0 | ||
|
|
a06bfcb3bf | ||
|
|
fe2e508e4c | ||
|
|
72cb53bcc0 | ||
|
|
90be6ec38b | ||
|
|
212d5c5432 | ||
|
|
b10e25ab8c | ||
|
|
5c4f8186df | ||
|
|
02d4a7625b | ||
|
|
8018ef979f | ||
|
|
482f5f82e6 | ||
|
|
88f8cd708b | ||
|
|
aa92d76eb4 | ||
|
|
3e05bc8d78 | ||
|
|
7b6fa4f0e5 | ||
|
|
cd1dc39ef2 | ||
|
|
4de9879927 | ||
|
|
479e20d8f3 | ||
|
|
f7ed5d4b2f | ||
|
|
bda44848a1 | ||
|
|
f3ae8a05cc | ||
|
|
43275297e9 | ||
|
|
d79556f36f | ||
|
|
8fe3d6f58f | ||
|
|
a1ca367037 | ||
|
|
bfdf565a0d | ||
|
|
c77eaabbff | ||
|
|
44063dfcc5 | ||
|
|
c2c98f644b | ||
|
|
74caecbe89 | ||
|
|
a721a40d5e | ||
|
|
338e645579 | ||
|
|
5f2dd0aac3 | ||
|
|
0f434afd33 | ||
|
|
364527b0b2 | ||
|
|
ac857301ac | ||
|
|
c038c66dc9 | ||
|
|
c81891683c | ||
|
|
d787cb0cdb | ||
|
|
2d5a3e7564 | ||
|
|
7ae178e2a6 | ||
|
|
e885ade131 | ||
|
|
0925dd08bc | ||
|
|
050d8ccfda | ||
|
|
4efdca91d5 | ||
|
|
0d128a3525 | ||
|
|
0f97e57307 | ||
|
|
0771aeac3b | ||
|
|
cd0afd20e5 | ||
|
|
324920cd8c | ||
|
|
12cda35494 | ||
|
|
1c1af9f90c | ||
|
|
687221f035 | ||
|
|
a99d52392f | ||
|
|
bd6690b159 | ||
|
|
b13df16b53 | ||
|
|
06f49be090 | ||
|
|
0b48ad5084 | ||
|
|
32ce93d2e9 | ||
|
|
e0479e79bd | ||
|
|
04e22b3966 | ||
|
|
40a5ce087b | ||
|
|
330e15bbcb | ||
|
|
d201d9fff9 | ||
|
|
f765794c99 | ||
|
|
34320e617d | ||
|
|
169ddeef5d | ||
|
|
33ad4c22c7 | ||
|
|
265f90fe69 | ||
|
|
099b72ed27 | ||
|
|
d20c0ee37e | ||
|
|
f6030a333a | ||
|
|
4cbc089de2 | ||
|
|
c45c04659f | ||
|
|
61a9037835 | ||
|
|
ad33c0bcca | ||
|
|
7afc524a9f | ||
|
|
c5a4bc9a1b | ||
|
|
1c0178dbaf | ||
|
|
dbf7feca3e | ||
|
|
d92de8ec4e | ||
|
|
8347da5a58 | ||
|
|
59e248d7de | ||
|
|
12e556e1d1 | ||
|
|
14d3ab93da | ||
|
|
e27ece512f | ||
|
|
b88d8efc8d | ||
|
|
9ec4c2837e | ||
|
|
ffc3fc9ec4 | ||
|
|
a566b4f428 | ||
|
|
69819460f3 | ||
|
|
24a8cb41fe | ||
|
|
1de0443492 | ||
|
|
bb19b380b4 | ||
|
|
b6b936ddf3 | ||
|
|
b00b6acba8 | ||
|
|
3941076c06 | ||
|
|
7401201af2 | ||
|
|
5c586fbf30 | ||
|
|
5c891b7e8e | ||
|
|
5425fcae9e | ||
|
|
4008cce12f | ||
|
|
d227105527 | ||
|
|
508649e6b6 | ||
|
|
b4e25d4345 | ||
|
|
733f925c75 | ||
|
|
40e910192e | ||
|
|
424a3cd892 | ||
|
|
9f6036c8d6 | ||
|
|
5af5749d4a | ||
|
|
f01449f14c | ||
|
|
03dff14ee9 | ||
|
|
e55302592a | ||
|
|
dbeaab052d | ||
|
|
9f07dd5a21 | ||
|
|
b933cd8718 | ||
|
|
8d85dde2c6 | ||
|
|
eaaa8dc834 | ||
|
|
5350dbf0ce | ||
|
|
28ffad10ab | ||
|
|
a37517bf6a | ||
|
|
fab9b96c8e | ||
|
|
50d6882a98 | ||
|
|
94064ac7da | ||
|
|
1c5f19a68a | ||
|
|
a26abd0dbb | ||
|
|
fb9080c18a | ||
|
|
15980471b0 | ||
|
|
b11bb9cdac | ||
|
|
474cd45fc5 | ||
|
|
0b6843a1b9 | ||
|
|
fdcdf07fa6 | ||
|
|
5617953d39 | ||
|
|
964144996f | ||
|
|
37214dd413 | ||
|
|
5a08fed0b6 | ||
|
|
443866ef04 | ||
|
|
96275adaff | ||
|
|
33884deb6c | ||
|
|
7db291fc93 | ||
|
|
9df14bd55a | ||
|
|
1e183625c9 | ||
|
|
643be19711 | ||
|
|
21a1770f3f | ||
|
|
07063d855a | ||
|
|
cf95e417f1 | ||
|
|
3f92ed0ea0 | ||
|
|
578b74f2c0 | ||
|
|
8e17b9aea5 | ||
|
|
6f766aae8c | ||
|
|
5797348bb3 | ||
|
|
57ca5067ff | ||
|
|
e8ff8a41de | ||
|
|
0b5dfe826a | ||
|
|
67fbcc8238 | ||
|
|
dd61c7dc21 | ||
|
|
3786b5435f | ||
|
|
1857e047b0 | ||
|
|
648ac7793f | ||
|
|
664ce6421f | ||
|
|
cfb77a1076 | ||
|
|
f65ddbbb9e | ||
|
|
76126271fc | ||
|
|
3faece0b4c | ||
|
|
530d3cd91e | ||
|
|
e659aba176 | ||
|
|
a196a499ae | ||
|
|
58bd9cd7a1 | ||
|
|
9dd9f850c6 | ||
|
|
cbecb74307 | ||
|
|
6833e78546 | ||
|
|
30c56f29d0 | ||
|
|
8ae1e58614 | ||
|
|
83e8ae392d | ||
|
|
c0297f10cb | ||
|
|
41052ae508 | ||
|
|
2d243d51e4 | ||
|
|
fdec80f676 | ||
|
|
5d3b0deb4d | ||
|
|
f68c356944 | ||
|
|
553f8d6ccd | ||
|
|
60fb3e33ae | ||
|
|
9b7c1db509 | ||
|
|
963ce356fb | ||
|
|
dcd0364ecc | ||
|
|
a2da428777 | ||
|
|
876c602710 | ||
|
|
79cb716ced | ||
|
|
ba9c975335 | ||
|
|
ef407bcb3c | ||
|
|
2898a066fe | ||
|
|
7950c4bdb4 | ||
|
|
2499012d88 | ||
|
|
7788669de1 | ||
|
|
d7f6fad3dd | ||
|
|
699c562d34 | ||
|
|
36d8225389 | ||
|
|
17ba9ee96b | ||
|
|
2769fc28d3 | ||
|
|
f5f3cfba50 | ||
|
|
1b1c77d225 | ||
|
|
cfc49e286b | ||
|
|
a2b3677c59 | ||
|
|
e5cfafdb00 | ||
|
|
bff05925e8 | ||
|
|
05f4b2b8ce | ||
|
|
2eac294643 | ||
|
|
f6789f79ea | ||
|
|
0b5976bdb1 | ||
|
|
7d2b2b9809 | ||
|
|
cce92dc1f8 | ||
|
|
fa7e59e842 | ||
|
|
8635f0ddb2 | ||
|
|
c90a423012 | ||
|
|
f0daee669b | ||
|
|
d252b660f5 | ||
|
|
e717a49c0c | ||
|
|
426155e65c | ||
|
|
6b9b446e3d | ||
|
|
ab2b2cfe6e | ||
|
|
4b236c6ed6 | ||
|
|
2396fadf04 | ||
|
|
a3bffb5867 | ||
|
|
1b44fc40af | ||
|
|
b894139ca1 | ||
|
|
daa0662869 | ||
|
|
81de9529c3 | ||
|
|
6b06caf00d | ||
|
|
9370366112 | ||
|
|
32bcf6e615 | ||
|
|
aa804471a7 | ||
|
|
681d8b1ddc | ||
|
|
c82b1f51e3 | ||
|
|
6d048e0003 | ||
|
|
0314910bbe | ||
|
|
3bd831782c | ||
|
|
40f01dca6f | ||
|
|
8dead66b58 | ||
|
|
18807191c0 | ||
|
|
9d9630a27a | ||
|
|
8ac851555d | ||
|
|
27f331a1fc | ||
|
|
e6b4d32506 | ||
|
|
a28ee58a1f | ||
|
|
47749c2d73 | ||
|
|
d6d0ff724a | ||
|
|
ba65700aad | ||
|
|
84a7cfe07d | ||
|
|
9ccd4a5e84 | ||
|
|
616434a00f | ||
|
|
4cf62f73da | ||
|
|
0145aecab4 | ||
|
|
6c4184d1f5 | ||
|
|
9d011b42a9 | ||
|
|
bf81b5cacc | ||
|
|
8d2b6e4097 | ||
|
|
50d8399f09 | ||
|
|
bc99b77dbe | ||
|
|
1c7edc9487 | ||
|
|
90c06fb3c9 | ||
|
|
10a04c16ba | ||
|
|
90a618bd7e | ||
|
|
b630b84ab0 | ||
|
|
a5ee362fc0 | ||
|
|
7c0870b6b8 | ||
|
|
a42264b280 | ||
|
|
e714604ec0 | ||
|
|
c094120f04 | ||
|
|
6691c8ddd7 | ||
|
|
013705c318 | ||
|
|
bda6f92a4d | ||
|
|
7ceb8dc79c | ||
|
|
7f48210c97 | ||
|
|
23c440cd58 | ||
|
|
0097167dec | ||
|
|
21e5f156bb | ||
|
|
08f55314d5 | ||
|
|
577bf09859 | ||
|
|
c446cd2fb0 | ||
|
|
06a8414f12 | ||
|
|
1ac01456a9 | ||
|
|
b86853f06f | ||
|
|
311a2798dd | ||
|
|
fe9998fb9d | ||
|
|
ce648c5d35 | ||
|
|
5a2a9bbf9a | ||
|
|
0f8ab05fd4 | ||
|
|
b87c00c041 | ||
|
|
8999f51dc9 | ||
|
|
d5e19db5e6 | ||
|
|
675bee83ca | ||
|
|
33e5dd1fdb | ||
|
|
4ff2794c83 | ||
|
|
81f9302da1 | ||
|
|
93f4b8b537 | ||
|
|
0587d2f8db | ||
|
|
6ba25b5468 | ||
|
|
cc10969506 | ||
|
|
c2eb50a7ee | ||
|
|
33d24068fd | ||
|
|
3a4c191b11 | ||
|
|
e06b4ccb3f | ||
|
|
3c6b86ea28 | ||
|
|
c4a9a13d6c | ||
|
|
c0f1a3c603 | ||
|
|
9d3425061a | ||
|
|
c2dcd2f67d | ||
|
|
24b822aecd | ||
|
|
a7d3de766f | ||
|
|
b56c897e4b | ||
|
|
df14032107 | ||
|
|
66b4821f7f | ||
|
|
d301cde266 | ||
|
|
0590a0d722 | ||
|
|
fc71a03a12 | ||
|
|
923c794e39 | ||
|
|
e7fbff5b3f | ||
|
|
1bd556fbb3 | ||
|
|
18a870f8c3 | ||
|
|
3e2a2c3bee | ||
|
|
73e74881a6 | ||
|
|
b37112600e | ||
|
|
6172ce4960 | ||
|
|
3d277e1c01 | ||
|
|
b3b13899f1 | ||
|
|
7c4a59539a | ||
|
|
e6dfb3da16 | ||
|
|
8e220ededa | ||
|
|
11126f8083 | ||
|
|
ac8a13db22 | ||
|
|
5ab10ff97a | ||
|
|
f3b0346ba2 | ||
|
|
96c94f97f4 | ||
|
|
192c0200e5 | ||
|
|
03ae8f459c | ||
|
|
377fdd9e5e | ||
|
|
daec7d20fe | ||
|
|
66a149590b | ||
|
|
1b6f010df2 | ||
|
|
7e4bc29b59 | ||
|
|
0284fa9b0a | ||
|
|
e5bcea59b5 | ||
|
|
16f603ced2 | ||
|
|
bdcb3b7e33 | ||
|
|
0def6fcfe3 | ||
|
|
75a352fef3 | ||
|
|
07eb1f7f4c | ||
|
|
8e35c02763 | ||
|
|
c1f6d9a858 | ||
|
|
3e20a3bac7 | ||
|
|
818570fd2d | ||
|
|
bcd2d22fbf | ||
|
|
ffc99cd4f4 | ||
|
|
bb56750c1a | ||
|
|
b08d587a22 | ||
|
|
47f4132b39 | ||
|
|
faefab5554 | ||
|
|
243a033055 | ||
|
|
db1eeaae38 | ||
|
|
8c2960e891 | ||
|
|
d6a86e8616 | ||
|
|
5260f42378 | ||
|
|
84f28f3c54 | ||
|
|
860b6793fb | ||
|
|
df03409d7a | ||
|
|
6a81f2241d | ||
|
|
5ce817cee6 | ||
|
|
7cdf124f9d | ||
|
|
ff46aa0226 | ||
|
|
669e331f6c | ||
|
|
4179ba642b | ||
|
|
00954d98f7 | ||
|
|
037e77860b | ||
|
|
47e187449d | ||
|
|
06e9afbe69 | ||
|
|
bfe8aa5f5f | ||
|
|
e51ddd7a50 | ||
|
|
442552c024 | ||
|
|
ce4806df64 | ||
|
|
0c2e65c92b | ||
|
|
b01aa2b385 | ||
|
|
2e04890756 | ||
|
|
1657857b4a | ||
|
|
72383592ba | ||
|
|
d093f935f9 | ||
|
|
8cc7d101aa | ||
|
|
f39eebbd22 | ||
|
|
3ac8bc738a | ||
|
|
0eac041a26 | ||
|
|
ab0f5daaf3 | ||
|
|
b59a0f82ab | ||
|
|
9b75e6af5c | ||
|
|
aa37f2b0ef | ||
|
|
d22237a5cc | ||
|
|
26f5e8aa4b | ||
|
|
9072c6cae0 | ||
|
|
8739c1197f | ||
|
|
a477973862 | ||
|
|
95ce26d261 | ||
|
|
8c934c1ca8 | ||
|
|
349d7d4866 | ||
|
|
f1ea8fa693 | ||
|
|
7ed0c6f099 | ||
|
|
685210aee3 | ||
|
|
af64961502 | ||
|
|
342e61da48 | ||
|
|
8ce30f0aad | ||
|
|
ae42b62b3c | ||
|
|
63b8e3ff1a | ||
|
|
7faa7c3dba | ||
|
|
91c3df7c46 | ||
|
|
eba36b6d57 | ||
|
|
ae3d9c0a0a | ||
|
|
090eb6f14d | ||
|
|
44de06f518 | ||
|
|
84a2afe08f | ||
|
|
b23db7541d | ||
|
|
7410288781 | ||
|
|
98a85f6950 | ||
|
|
c89c99b272 | ||
|
|
3f16dbd09c | ||
|
|
e547851905 | ||
|
|
cbb0462948 | ||
|
|
a185292578 | ||
|
|
cec1f54cdd | ||
|
|
0112a3141b | ||
|
|
5f93b08c23 | ||
|
|
ff0de896c4 | ||
|
|
6d98f67668 | ||
|
|
5d5cf5cf29 | ||
|
|
610edea20e | ||
|
|
8f4219a93c | ||
|
|
9540ae5a19 | ||
|
|
0f7c3f5d0f | ||
|
|
39fb3a1107 | ||
|
|
e609931d2c | ||
|
|
70d94cda8c | ||
|
|
5c89a52f23 | ||
|
|
686e0a9441 | ||
|
|
e8dcf5ee02 | ||
|
|
95369e79a5 | ||
|
|
eb0a8454bc | ||
|
|
4f059c2549 | ||
|
|
fb7dbd5716 | ||
|
|
07fc4b3728 | ||
|
|
2d5b02baf9 | ||
|
|
f8b2547a45 | ||
|
|
f8cc8acfec | ||
|
|
17787c5a4f | ||
|
|
304de5adb6 | ||
|
|
46db38c5bf | ||
|
|
99e77e409a | ||
|
|
6152ddbd5f | ||
|
|
f99a94d685 | ||
|
|
47f58ff45f | ||
|
|
f225066130 | ||
|
|
83c5d701b3 | ||
|
|
ffb3359e66 | ||
|
|
0861b21532 | ||
|
|
e7420367f1 | ||
|
|
1998c779c7 | ||
|
|
93eb33811a | ||
|
|
d7bf9dba01 | ||
|
|
d5c6942266 | ||
|
|
e870fab277 | ||
|
|
f3ae63c7a9 | ||
|
|
3df1f1b153 | ||
|
|
74fd7c684e | ||
|
|
745b262800 | ||
|
|
72f6516a1c | ||
|
|
7bb723d6b3 | ||
|
|
2ccdc8ffdc | ||
|
|
1cabf64993 | ||
|
|
c55bd5a35d | ||
|
|
77a3552797 | ||
|
|
81efd4bce7 | ||
|
|
98183ccc1e | ||
|
|
09df863b6c | ||
|
|
4e70c1882b | ||
|
|
d38d581d1d | ||
|
|
61c95240c2 | ||
|
|
59347400c3 | ||
|
|
f976e04597 | ||
|
|
1602fe88e6 | ||
|
|
bb4252363d | ||
|
|
d4eca60b1d | ||
|
|
0a0a1704be | ||
|
|
b13b32952f | ||
|
|
0978ac33bc | ||
|
|
5a4467adb9 | ||
|
|
6e8b7d25e5 | ||
|
|
0f555dbb85 | ||
|
|
f50852fee0 | ||
|
|
1f647b3cc7 | ||
|
|
caf4eab104 | ||
|
|
334078fc34 | ||
|
|
25b1d86c50 | ||
|
|
78e2ff4870 | ||
|
|
ad94cce283 | ||
|
|
b4610e5c23 | ||
|
|
e12dcc2fb8 | ||
|
|
a818276b6d | ||
|
|
269d779df7 | ||
|
|
b63f7b7e5d | ||
|
|
b4a3ac8081 | ||
|
|
bbaaaa72fb | ||
|
|
89c83001ca | ||
|
|
61f1fdabd1 | ||
|
|
28062eacb6 | ||
|
|
8bdbf8df2e | ||
|
|
27e4800ed2 | ||
|
|
37bc54e01e | ||
|
|
6115f83a09 | ||
|
|
a8159c9e55 | ||
|
|
f734e27d23 | ||
|
|
8a118df636 | ||
|
|
a19b75760f | ||
|
|
1224b98745 | ||
|
|
6243ed3bd5 | ||
|
|
41e94e1e22 | ||
|
|
8c6940c351 | ||
|
|
384a2e0e15 | ||
|
|
a691841756 | ||
|
|
ff94bd6a90 | ||
|
|
00419910b4 | ||
|
|
21c9d7fcc3 | ||
|
|
e314c605f1 | ||
|
|
8316b5cb29 | ||
|
|
be46ed12ac | ||
|
|
a2d22b6feb | ||
|
|
f4e373447e | ||
|
|
5b2dfffe0f | ||
|
|
43e4ed6e2d | ||
|
|
b347f761a7 | ||
|
|
445724573d | ||
|
|
8c5e0cf0a7 | ||
|
|
c6016a25df | ||
|
|
2e50eb487c | ||
|
|
70e5f1a6d8 | ||
|
|
5a0a5ad83b | ||
|
|
3f0a0f552b | ||
|
|
63fd35a95c | ||
|
|
9cfa7fa2a3 | ||
|
|
cfc9f524a7 | ||
|
|
db163e7bd1 | ||
|
|
f3cd569e77 | ||
|
|
a95671491d | ||
|
|
95295e47ab | ||
|
|
a54e9ddd9c | ||
|
|
0e5f89d7d6 | ||
|
|
742d5cbfb3 | ||
|
|
e3fa695ad4 | ||
|
|
d6675f3311 | ||
|
|
64850a45da | ||
|
|
6aa0b7c748 | ||
|
|
fd80728857 | ||
|
|
9618a8d543 | ||
|
|
8281fdc08b | ||
|
|
949f76cd50 | ||
|
|
ec3a6e65ae | ||
|
|
d74578ec66 | ||
|
|
865dd24901 | ||
|
|
e063028c7d | ||
|
|
db11c1b7a8 | ||
|
|
89d7a924fb | ||
|
|
fe16115b20 | ||
|
|
07db34beb9 | ||
|
|
f42fb2fdd2 | ||
|
|
5853a373f3 | ||
|
|
951fbdccbd | ||
|
|
c6d20eb91f | ||
|
|
b68cea3921 | ||
|
|
36125f1067 | ||
|
|
eee16c7a3d | ||
|
|
bf1d93f256 | ||
|
|
42e3c95f87 | ||
|
|
ee702d92e6 | ||
|
|
f5aae23111 | ||
|
|
178f770b16 | ||
|
|
1b2d72531f | ||
|
|
7e08454edd | ||
|
|
72d318323e | ||
|
|
b611a98bae | ||
|
|
8b2eb50f29 | ||
|
|
9631be1ee4 | ||
|
|
988d0d6e35 | ||
|
|
48ec6fc757 | ||
|
|
0921c5e160 | ||
|
|
83843ae210 | ||
|
|
e5e768c56f | ||
|
|
f775c9da0b | ||
|
|
6a9c3dac77 | ||
|
|
11aaaecb7b | ||
|
|
12c08154c5 | ||
|
|
79d6a6f85f | ||
|
|
4513f03e8f | ||
|
|
f3adfca9c5 | ||
|
|
0b61ec1e13 | ||
|
|
8492c9b214 | ||
|
|
2a60c52483 | ||
|
|
917e813607 | ||
|
|
c20f64685f | ||
|
|
471229216a | ||
|
|
28661ab11a | ||
|
|
11c348a3d7 | ||
|
|
ffe6b7dd70 | ||
|
|
720af9085a | ||
|
|
8916ea5299 | ||
|
|
bf9a43b3d1 | ||
|
|
bca597c4e2 | ||
|
|
33e2f63ed5 | ||
|
|
e3f6df7120 | ||
|
|
58f198ddad | ||
|
|
61edcfe4f3 | ||
|
|
85bc3ddde6 | ||
|
|
488b631c38 | ||
|
|
da1b430200 | ||
|
|
bef76f0118 | ||
|
|
dd6baa72fa | ||
|
|
519b832d8c | ||
|
|
131326675e | ||
|
|
73dfa232f9 | ||
|
|
3f64173905 | ||
|
|
5cc4260d8e | ||
|
|
7f33a3847c | ||
|
|
c9be74ce80 | ||
|
|
f5d29eafe0 | ||
|
|
274e2c1cc2 | ||
|
|
c2233f7474 | ||
|
|
75f22f44a1 | ||
|
|
d60a8a71b7 | ||
|
|
f8bfd6fd3f | ||
|
|
f2bc735bc0 | ||
|
|
9e471ac389 | ||
|
|
ca34cbd180 | ||
|
|
9f4ea662da | ||
|
|
3172a4d030 | ||
|
|
c58315e2ee | ||
|
|
dc0ea5b3f6 | ||
|
|
b50cf1cf4c | ||
|
|
a7a4499dd4 | ||
|
|
f78261ee32 | ||
|
|
b69898d624 | ||
|
|
cfa8702654 | ||
|
|
dd9c65db4c | ||
|
|
9e7e29f03f | ||
|
|
2066625bf0 | ||
|
|
7af1d00ea2 | ||
|
|
5b279a48cb | ||
|
|
3d6e84e11c | ||
|
|
527d6ab7ff | ||
|
|
8a295c72ba | ||
|
|
d2b82a37b2 | ||
|
|
ec4e680d62 | ||
|
|
aa80ed3d4b | ||
|
|
1d4a7894e8 | ||
|
|
2d28cb6897 | ||
|
|
89e561c991 | ||
|
|
d16dd7c75d | ||
|
|
e51e6b2171 | ||
|
|
3eaf5c9bc0 | ||
|
|
4625c920c3 | ||
|
|
2ce0f7beb4 | ||
|
|
81ba95f540 | ||
|
|
b25e0ea393 | ||
|
|
2632b34438 | ||
|
|
ff60013335 | ||
|
|
a0bcb03dde | ||
|
|
f0044a9342 | ||
|
|
04fb81e071 | ||
|
|
642b665418 | ||
|
|
a5fa0681ed | ||
|
|
22e922e860 | ||
|
|
0126f5ae84 | ||
|
|
cfb246fa84 | ||
|
|
651119b7dd | ||
|
|
f944a70a9c | ||
|
|
9056f5ae59 | ||
|
|
ed62c981cc | ||
|
|
2a7ba28903 | ||
|
|
e8ec2ef8d1 | ||
|
|
864e8654c3 | ||
|
|
e11453aafb | ||
|
|
c1596f098c | ||
|
|
e57620f67c | ||
|
|
e481763967 | ||
|
|
2b3d755c64 | ||
|
|
fc2db36820 | ||
|
|
4fdea782f3 | ||
|
|
188a1a3b03 | ||
|
|
8e2014f2d4 | ||
|
|
fb95d7923f | ||
|
|
fe5ca69f36 | ||
|
|
1086b808dc | ||
|
|
0050e5cdfc | ||
|
|
6b357674d0 | ||
|
|
8f44dfcde5 | ||
|
|
82c0592e49 | ||
|
|
28ab4576d5 | ||
|
|
2debd5598f | ||
|
|
d86d44e2d4 | ||
|
|
3d85460dc8 | ||
|
|
52ce85fbf2 | ||
|
|
6d70533e0b | ||
|
|
4d8338e829 | ||
|
|
a4e48e1f6b | ||
|
|
790a74f9e4 | ||
|
|
893dde9958 | ||
|
|
d448b8cd99 | ||
|
|
ca2c4a0b3e | ||
|
|
499b8193ab | ||
|
|
1f18d2b09c | ||
|
|
a92d6fd35c | ||
|
|
12adde8f80 | ||
|
|
6437079be3 | ||
|
|
8b747dff9b | ||
|
|
027ff43dfd | ||
|
|
f50c8504cf | ||
|
|
30f5a3944c | ||
|
|
a1c0b000a4 | ||
|
|
f22778aacb | ||
|
|
888ee07f65 | ||
|
|
aa5937c278 | ||
|
|
4831c80598 | ||
|
|
886a271d19 | ||
|
|
8dfb0d1d5c | ||
|
|
1f982b7999 | ||
|
|
eb1556f3e8 | ||
|
|
061b79eac0 | ||
|
|
3bbeec513a | ||
|
|
a6be59bbea | ||
|
|
96e8a909d8 | ||
|
|
8724076601 | ||
|
|
4b356aba3e | ||
|
|
a13e0a75e8 | ||
|
|
ecf91d616b | ||
|
|
0d4d0f3126 | ||
|
|
b9a8ca14c3 | ||
|
|
f7e1a2a5eb | ||
|
|
c3bc9c8591 | ||
|
|
3380e20e3a | ||
|
|
a2c87e1b7d | ||
|
|
a609b401c4 | ||
|
|
9098e44513 | ||
|
|
62524e01e1 | ||
|
|
78bf1d274e | ||
|
|
461e469f28 | ||
|
|
99252074be | ||
|
|
e4e7ae3621 | ||
|
|
63743dd2b6 | ||
|
|
a254886bad | ||
|
|
aab10fb599 | ||
|
|
9d55ecffe9 | ||
|
|
00b613d2e0 | ||
|
|
fe24322f7c | ||
|
|
660e20dada | ||
|
|
18c8e803a4 | ||
|
|
15a19949b8 | ||
|
|
ebc5a66375 | ||
|
|
1120b4ab51 | ||
|
|
f91081e39c | ||
|
|
9e991e1595 | ||
|
|
afac06081c | ||
|
|
b773228719 | ||
|
|
7001ed476d | ||
|
|
31c39650a9 | ||
|
|
fbae706b0f | ||
|
|
88c328af8e | ||
|
|
cbd8981ee2 | ||
|
|
3101926e9b | ||
|
|
c9e0910c55 | ||
|
|
d65667ce16 | ||
|
|
7d7251862c | ||
|
|
4d02a969c2 | ||
|
|
e20c776364 | ||
|
|
c55404699e | ||
|
|
6240e4eba0 | ||
|
|
cf86719607 | ||
|
|
76943b6529 | ||
|
|
ca8bbdc293 | ||
|
|
8e6f12a897 | ||
|
|
52c64c1a6a | ||
|
|
ca94d48f8b | ||
|
|
d860680823 | ||
|
|
d1dbf3745a | ||
|
|
4b1151bda1 | ||
|
|
18c64e493b | ||
|
|
fc6839b441 | ||
|
|
405b63acdd | ||
|
|
f3dee50448 | ||
|
|
04e550ebe7 | ||
|
|
05b58819d6 | ||
|
|
63c72853f4 | ||
|
|
f20cce0176 | ||
|
|
723cbcd8bd | ||
|
|
dfbb84caae | ||
|
|
009d6cafaf | ||
|
|
bd9a4289d1 | ||
|
|
29a34fef8c | ||
|
|
08e2a3a883 | ||
|
|
2d37022525 | ||
|
|
bb3faaf2cd | ||
|
|
2c43b9a926 | ||
|
|
964ed5f497 | ||
|
|
b47a94852a | ||
|
|
f318524070 | ||
|
|
04539edb45 | ||
|
|
5cf21452c1 | ||
|
|
799299c7cc | ||
|
|
458330d325 | ||
|
|
973bec9e6a | ||
|
|
1f941a5105 | ||
|
|
8217fecb33 | ||
|
|
1dda7edf1c | ||
|
|
3c03e400f0 | ||
|
|
6388d97c5c | ||
|
|
161e3086fa | ||
|
|
b3f1f938be | ||
|
|
082da6e3a6 | ||
|
|
d9b9447242 | ||
|
|
d743282578 | ||
|
|
7eee6f0b96 | ||
|
|
dda3fca4b4 | ||
|
|
8648b2f948 | ||
|
|
f52cbd24f8 | ||
|
|
5ea13eeffd | ||
|
|
6cc802952f | ||
|
|
190b9db645 | ||
|
|
81949b9cad | ||
|
|
894e419f40 | ||
|
|
cdc6c036aa | ||
|
|
1e9168f682 | ||
|
|
790415dd4f | ||
|
|
679e0ea2c3 | ||
|
|
bd167403c3 | ||
|
|
13abe62bed | ||
|
|
4147c5b870 | ||
|
|
37d4755aae | ||
|
|
a9f416c4c5 | ||
|
|
8a11f246b1 | ||
|
|
8d44577dca | ||
|
|
72457d8d10 | ||
|
|
3bb44f8d9f | ||
|
|
279297b8fa | ||
|
|
c71e661daf | ||
|
|
f8820c06fe | ||
|
|
907b40e3c6 | ||
|
|
d318e163bb | ||
|
|
6e9c36a503 | ||
|
|
c9e9fe86aa | ||
|
|
c4f4e2b524 | ||
|
|
95246b90f6 | ||
|
|
2fad29df51 | ||
|
|
a95320e162 | ||
|
|
31b8805b5e | ||
|
|
9e69d4e153 | ||
|
|
aa5ecd7b42 | ||
|
|
15f90aa503 | ||
|
|
ec86bc4a38 | ||
|
|
a3efc64901 | ||
|
|
c929ecbac0 | ||
|
|
cc32e49060 | ||
|
|
05c41460c2 | ||
|
|
794efaa209 | ||
|
|
b0e93ee18c | ||
|
|
0393b51db6 | ||
|
|
464c8ad71c | ||
|
|
9df0e01874 | ||
|
|
bf2beb2530 | ||
|
|
f0b096d41a | ||
|
|
c948f38469 | ||
|
|
516cbd73bd | ||
|
|
680ae53cf4 | ||
|
|
99b99a992d | ||
|
|
fb9d52c2b9 | ||
|
|
5cc471cc87 | ||
|
|
07c7171fbb | ||
|
|
c15dd2dec9 | ||
|
|
a408cc0246 | ||
|
|
c2568432e7 | ||
|
|
91f3cda995 | ||
|
|
28aa908513 | ||
|
|
5e24b11c21 | ||
|
|
4cdf71513f | ||
|
|
7e6d9c02f6 | ||
|
|
afc4f73e36 | ||
|
|
5ef0c52277 | ||
|
|
c23b014cff | ||
|
|
f13cddfb26 | ||
|
|
623f6f3ed0 | ||
|
|
a158716c8b | ||
|
|
9df7f7b22c | ||
|
|
8e5c24282e | ||
|
|
266429311b | ||
|
|
d74342adee | ||
|
|
4408d99524 | ||
|
|
0168b9cbea | ||
|
|
e69421226b | ||
|
|
f08d34b816 | ||
|
|
4a36c3b6a8 | ||
|
|
be0b708d32 | ||
|
|
1cea50bcfb | ||
|
|
55483cf736 | ||
|
|
16f8a1159f | ||
|
|
d4d03a846e | ||
|
|
7bccc46583 | ||
|
|
dc61e9916f | ||
|
|
cf2b5f72ae | ||
|
|
fe397caafc | ||
|
|
787405ae62 | ||
|
|
0b01bbc52e | ||
|
|
190e1d2c4f | ||
|
|
8a822e35e2 | ||
|
|
64a196f21d | ||
|
|
a24d4a9e3b | ||
|
|
dafa70b7e3 | ||
|
|
32b9bc3345 | ||
|
|
a7b8f992d3 | ||
|
|
0c66b8067e | ||
|
|
7b3645ea7c | ||
|
|
69569758d9 | ||
|
|
55777531d5 | ||
|
|
99ce8dacbf | ||
|
|
138a3b1f3c | ||
|
|
d49c663c64 | ||
|
|
e9a457e263 | ||
|
|
3b0e07100f | ||
|
|
74561500b5 | ||
|
|
3b519aeac9 | ||
|
|
9a55961786 | ||
|
|
ea5d274f4d | ||
|
|
f57f2444fe | ||
|
|
fd768df9e5 | ||
|
|
4db68e4887 | ||
|
|
6d4297a5fb | ||
|
|
ab413f2f3e | ||
|
|
574255c4b6 | ||
|
|
008ba39856 | ||
|
|
cff1b3abdb | ||
|
|
231c5b8ca1 | ||
|
|
640664494e | ||
|
|
951b7b8425 | ||
|
|
c9980539f0 | ||
|
|
7eb802b42a | ||
|
|
2f4f3ce0fe | ||
|
|
824ac86d18 | ||
|
|
4553726423 | ||
|
|
f0bde7316d | ||
|
|
4eaddadf8c | ||
|
|
9dd98b29be | ||
|
|
732946d38a | ||
|
|
966f8c36b1 | ||
|
|
ca070e67e7 | ||
|
|
b468048d95 | ||
|
|
50c5044fe8 | ||
|
|
46b2d6ba6e | ||
|
|
8aec5cf605 | ||
|
|
54af80d5ad | ||
|
|
8b2cd62211 | ||
|
|
2fc4809821 | ||
|
|
bde6de1789 | ||
|
|
029ae20573 | ||
|
|
c72cca4ea2 | ||
|
|
0f071be762 | ||
|
|
fdcddaaffc | ||
|
|
cddf47f113 | ||
|
|
76f3f5253a | ||
|
|
d833a04293 | ||
|
|
2e96860380 | ||
|
|
3e2e6385cf | ||
|
|
ccc2028690 | ||
|
|
81dbc1ca79 | ||
|
|
e9a3059be2 | ||
|
|
3d5b33856f | ||
|
|
8d2e3a1919 | ||
|
|
f3380c4fed | ||
|
|
8a58d7f973 | ||
|
|
37b98cb835 | ||
|
|
50262112b8 | ||
|
|
4b9f9862fc | ||
|
|
df60d70592 | ||
|
|
1b5bc1fa05 | ||
|
|
e4993eac24 | ||
|
|
bd1bb1ee91 | ||
|
|
2c1c57333c | ||
|
|
a466cbcf16 | ||
|
|
379f62a339 | ||
|
|
eaf2974f8d | ||
|
|
99e641a30d | ||
|
|
88d6148500 | ||
|
|
f53364eb6c | ||
|
|
b8f78e311d | ||
|
|
bb6e1e2909 | ||
|
|
c62c6664ce | ||
|
|
8ae4e3be18 | ||
|
|
0065ff5086 | ||
|
|
28d073f934 | ||
|
|
df1cb0ae08 | ||
|
|
31a1af43d5 | ||
|
|
8951e9fc90 | ||
|
|
357166414c | ||
|
|
e1a311de40 | ||
|
|
ab923cc592 | ||
|
|
99947fb135 | ||
|
|
185cb0196a | ||
|
|
309ec50691 | ||
|
|
f865484182 | ||
|
|
ed19fd0254 | ||
|
|
cec88319fe | ||
|
|
d31b7eb72d | ||
|
|
b7d93b84dd | ||
|
|
4008774908 | ||
|
|
b4275639f5 | ||
|
|
accce789ba | ||
|
|
091b1fefd2 | ||
|
|
899b1f9b96 | ||
|
|
0ce5c51c67 | ||
|
|
d6709469f6 | ||
|
|
da760db340 | ||
|
|
4242a5cedb | ||
|
|
8c41046836 | ||
|
|
3e43e3fc4c | ||
|
|
c5e6ce0e48 | ||
|
|
3ad527eb62 | ||
|
|
e622e68701 | ||
|
|
af2a6bf031 | ||
|
|
731419b61f | ||
|
|
0fafd83d76 | ||
|
|
003b78a66e | ||
|
|
4ade857f01 | ||
|
|
a90a4d1bc2 | ||
|
|
658596659f | ||
|
|
165676407a | ||
|
|
59e6d68416 | ||
|
|
e6d76db250 | ||
|
|
3b3288c53d | ||
|
|
16cf220741 | ||
|
|
5131cb0ae1 | ||
|
|
db4f7a216a | ||
|
|
3f8b97feb9 | ||
|
|
a27673eaa4 | ||
|
|
8e3291a1b0 | ||
|
|
89c04902e8 | ||
|
|
e29b100374 | ||
|
|
941d4414ce | ||
|
|
dc830324ae | ||
|
|
3f37fc1e11 | ||
|
|
3442129610 | ||
|
|
e9d29f10c1 | ||
|
|
8996dd34c2 | ||
|
|
e2c5be0fcd | ||
|
|
3d42c55560 | ||
|
|
9d287f140b | ||
|
|
5a8f28764d | ||
|
|
a2c5074d66 | ||
|
|
6acc125d4f | ||
|
|
7b9ebc2f34 | ||
|
|
4e0d6ec980 | ||
|
|
c1944c987d | ||
|
|
cdb889a985 | ||
|
|
f6281c6dcc | ||
|
|
c832a9e2b2 | ||
|
|
0c4851e436 | ||
|
|
ce1b205993 | ||
|
|
b771aa303f | ||
|
|
81178b4c8b | ||
|
|
0317681597 | ||
|
|
ddba0e318f | ||
|
|
3ef9591abd | ||
|
|
0d3c0c4077 | ||
|
|
22b32364b6 | ||
|
|
db8fd20d67 | ||
|
|
3c061095e9 | ||
|
|
05853bca89 | ||
|
|
aa489bb709 | ||
|
|
0b70465578 | ||
|
|
5c64ba3c9e | ||
|
|
e119020016 | ||
|
|
9b92a3d396 | ||
|
|
c73dc10aeb | ||
|
|
c5ee0a576e | ||
|
|
3e2ede585a | ||
|
|
ba3dd263ac | ||
|
|
7c955ecc80 | ||
|
|
48193b38c5 | ||
|
|
2f5a233e63 | ||
|
|
7b86fe5587 | ||
|
|
5396343940 | ||
|
|
fa1baa73e8 | ||
|
|
d984f11cbf | ||
|
|
ae666bd9b6 | ||
|
|
d023eb8f1f | ||
|
|
9fa62de6dd | ||
|
|
7c5748ac87 | ||
|
|
d6fa5c97db | ||
|
|
47de84259d | ||
|
|
f2b483b16e | ||
|
|
98efe89833 | ||
|
|
f8872e2803 | ||
|
|
3717443e85 | ||
|
|
a1fd581bca | ||
|
|
6a4bc1eb08 | ||
|
|
94d1f99315 | ||
|
|
7c51bdbdaf | ||
|
|
d275dfd8cc | ||
|
|
82b879fbb4 | ||
|
|
cc32bd7050 | ||
|
|
4f4ba470e0 | ||
|
|
ce47429701 | ||
|
|
550051b3f6 | ||
|
|
a1ba39b3d3 | ||
|
|
b4ad7b459f | ||
|
|
b149528406 | ||
|
|
22c257618d | ||
|
|
e1c3c334d9 | ||
|
|
c5e7159952 | ||
|
|
fe8946e3b5 | ||
|
|
c354d3c6d5 | ||
|
|
53cd907db1 | ||
|
|
605f340be5 | ||
|
|
e014ce7a47 | ||
|
|
579c1fa53c | ||
|
|
4bfb5c6397 | ||
|
|
639d635913 | ||
|
|
37e5f2c48b | ||
|
|
583bb1d0d9 | ||
|
|
d0cffb5863 | ||
|
|
548686ebfe | ||
|
|
0635c571e4 | ||
|
|
5af8fd0b21 | ||
|
|
4764925ae6 | ||
|
|
80e9831c03 | ||
|
|
f7e1fa1406 | ||
|
|
dc73e5c58f | ||
|
|
526d383929 | ||
|
|
89f7cfb896 | ||
|
|
6abc4cc549 | ||
|
|
6aa7cfc0fe | ||
|
|
345d0b8211 | ||
|
|
eb17afc368 | ||
|
|
c12b189f5f | ||
|
|
5edc745727 | ||
|
|
bc877df513 | ||
|
|
57cb22c9aa | ||
|
|
719aca88b7 | ||
|
|
b1e66478f0 | ||
|
|
25f0462c15 | ||
|
|
caded0694c | ||
|
|
39190495be | ||
|
|
1cc998bc95 | ||
|
|
54c7aad57a | ||
|
|
1c8fed5457 | ||
|
|
8e51513ee0 | ||
|
|
1788440a5c | ||
|
|
f467e4d75a | ||
|
|
1e3f8410c0 | ||
|
|
cbb7b96391 | ||
|
|
5f24338bd2 | ||
|
|
56f049cd7d | ||
|
|
a09e8b63ae | ||
|
|
400643cbcd | ||
|
|
ce68a37441 | ||
|
|
1377b6315c | ||
|
|
83e7a8d765 | ||
|
|
0e18dcb8a1 | ||
|
|
7277ef3bd8 | ||
|
|
4bdd4eab64 | ||
|
|
5bf3b929a2 | ||
|
|
66967f8326 | ||
|
|
e9abf982fe | ||
|
|
3535f44db9 | ||
|
|
c772758683 | ||
|
|
2fc097c0e8 | ||
|
|
c9d7418899 | ||
|
|
1317a4c6b7 | ||
|
|
4b0a5bdd9b | ||
|
|
2b57bdcd03 | ||
|
|
65f039e9ed | ||
|
|
3be6389fbf | ||
|
|
9bf01e3a0b | ||
|
|
1305327564 | ||
|
|
97b6cf013f | ||
|
|
e1a6b813a5 | ||
|
|
b0e30921ae | ||
|
|
f4c4f013da | ||
|
|
43ef982d95 | ||
|
|
d930bc4afd | ||
|
|
6dbdd4c0be | ||
|
|
93bd75acc8 | ||
|
|
bdeace8a68 | ||
|
|
efdf70acb2 | ||
|
|
d31ca2677e | ||
|
|
3a117b6077 | ||
|
|
6d2889f88d | ||
|
|
213b03589a | ||
|
|
79fd5fe332 | ||
|
|
25a5b72d26 | ||
|
|
8970e7fbba | ||
|
|
e96724beaf | ||
|
|
73d7d01ae4 | ||
|
|
34c69786de | ||
|
|
8587b9b780 | ||
|
|
b9f88f431b | ||
|
|
df90ee0a55 | ||
|
|
32a4075979 | ||
|
|
99606e22d6 | ||
|
|
5fd0253089 | ||
|
|
a46241bb9f | ||
|
|
a8087c8ce9 | ||
|
|
0a90ad5db7 | ||
|
|
75bda46f64 | ||
|
|
a0d2a64e57 | ||
|
|
d1c3f0c241 | ||
|
|
107606ce65 | ||
|
|
32646d0608 | ||
|
|
eabd2b6c41 | ||
|
|
b8ac093182 | ||
|
|
bac3055726 | ||
|
|
5e683b5a48 | ||
|
|
955814397a | ||
|
|
10fe175ff5 | ||
|
|
bca4a2e241 | ||
|
|
3925d4c215 | ||
|
|
8ca5c62575 | ||
|
|
f178825d21 | ||
|
|
95d0dacd28 | ||
|
|
b6f850dc27 | ||
|
|
38ce63795c | ||
|
|
bbf42da875 | ||
|
|
8df0ecc223 | ||
|
|
c37bf12c8a | ||
|
|
83051b2576 | ||
|
|
75360f734c | ||
|
|
87754047fa | ||
|
|
f121db059e | ||
|
|
c9e693287c | ||
|
|
0876d1ff8e | ||
|
|
6bbcc5af77 | ||
|
|
6a9f6a6fc8 | ||
|
|
1da3546f2d | ||
|
|
d233425a77 | ||
|
|
8883d505ba | ||
|
|
c51d806840 | ||
|
|
13a0c4607d | ||
|
|
fd8e50b533 | ||
|
|
682216dcf4 | ||
|
|
6bda5f5b03 | ||
|
|
6174f121c8 | ||
|
|
89daa836e7 | ||
|
|
7c5616cc79 | ||
|
|
27fdbff619 | ||
|
|
516447a104 | ||
|
|
0c6c172d6a | ||
|
|
d11f9d26c0 | ||
|
|
a2cb0ec8ad | ||
|
|
1bddadf3a4 | ||
|
|
f0f843f746 | ||
|
|
317a1f119b | ||
|
|
b128ef17c9 | ||
|
|
cc4350b0f9 | ||
|
|
fe2290fccb | ||
|
|
0b00f2d9e6 | ||
|
|
e7aa91b3e1 | ||
|
|
333abd2486 | ||
|
|
226835e3d0 | ||
|
|
48db4c8b8e | ||
|
|
ae4e15286a | ||
|
|
1b96489656 | ||
|
|
99c899ea3a | ||
|
|
8f76dd7a2e | ||
|
|
1f2c2269e6 | ||
|
|
201185f7e7 | ||
|
|
e38d68c019 | ||
|
|
91332e06e5 | ||
|
|
96b4af1fea | ||
|
|
e4d67645b7 | ||
|
|
b4bccc9be2 | ||
|
|
d6ddee236a | ||
|
|
364e355114 | ||
|
|
7d4f9d60b1 | ||
|
|
116bc839fc | ||
|
|
153d4b2b1d | ||
|
|
2f4f140662 | ||
|
|
475ac1bb9c | ||
|
|
49015b7d64 | ||
|
|
99efcce4d0 | ||
|
|
c3c971db23 | ||
|
|
8011634b7a | ||
|
|
ededfcb822 | ||
|
|
92a0af5ce3 | ||
|
|
ffaffbc66f | ||
|
|
2596bbe2bc | ||
|
|
3310bdf551 | ||
|
|
19d357b866 | ||
|
|
871aecb689 | ||
|
|
00bb055474 | ||
|
|
f10d182468 | ||
|
|
74a4e7d19d | ||
|
|
c7c64c6002 | ||
|
|
8474d0d95d | ||
|
|
4a5c878c36 | ||
|
|
2b0a70355a | ||
|
|
9b5166826f | ||
|
|
3b1efb2c30 | ||
|
|
324415be15 | ||
|
|
b5d2a41d60 | ||
|
|
cc3aad49ed | ||
|
|
7c44f9ab13 | ||
|
|
2365e1859f | ||
|
|
03700e0a04 | ||
|
|
1ff4901846 | ||
|
|
d70a71a12e | ||
|
|
866d9621cb | ||
|
|
2d3fc03a00 | ||
|
|
19f782e4a5 | ||
|
|
fdd851d29a | ||
|
|
6cd38a3469 | ||
|
|
bfa3b87188 | ||
|
|
628fda2097 | ||
|
|
69a9fa1193 | ||
|
|
9e0805ec89 | ||
|
|
f67c6fe8be | ||
|
|
8d38fa87a4 | ||
|
|
7c79c6d1f3 | ||
|
|
b0781b45f8 | ||
|
|
ee53539906 | ||
|
|
c8ab6a06fb | ||
|
|
c75ac51eb7 | ||
|
|
33d7d994d4 | ||
|
|
da6d749072 | ||
|
|
96291f63da | ||
|
|
bef2b28acc | ||
|
|
6464bb065d | ||
|
|
8b45b6f1a0 | ||
|
|
302f571837 | ||
|
|
70ba5d80cd | ||
|
|
ac30152930 | ||
|
|
ad01a3da4d | ||
|
|
5f5f17112a | ||
|
|
156da670e8 | ||
|
|
821c26f35b | ||
|
|
a092f394fa | ||
|
|
18e3194e27 | ||
|
|
08a1e1e582 | ||
|
|
074005ed02 | ||
|
|
7660a3d78f | ||
|
|
9211e60804 | ||
|
|
87f295be28 | ||
|
|
cfa89c8921 | ||
|
|
70f834d925 | ||
|
|
41dde209d5 | ||
|
|
6b4e4fd440 | ||
|
|
b83b2453a0 | ||
|
|
82d31d996d | ||
|
|
4faa617039 | ||
|
|
a1d2276668 | ||
|
|
19c50f728e | ||
|
|
a94307c59f | ||
|
|
c6403e87f1 | ||
|
|
5d350ef5ac | ||
|
|
b56cd3439e | ||
|
|
4fd1d986dd | ||
|
|
25693d44eb | ||
|
|
43af25a30e | ||
|
|
023278e0c0 | ||
|
|
55d57bc07b | ||
|
|
a81a262fb6 | ||
|
|
c37360f848 | ||
|
|
d7700900db | ||
|
|
0634c79f74 | ||
|
|
31b3c2ef64 | ||
|
|
4a71f2c556 | ||
|
|
9783409756 | ||
|
|
faa136a365 | ||
|
|
c7e85c00ca | ||
|
|
94647bbb57 | ||
|
|
bd73b94ea4 | ||
|
|
1aa26a5a6c | ||
|
|
d764d0f096 | ||
|
|
df13a0edc2 | ||
|
|
52a0de3b59 | ||
|
|
38886b28f7 | ||
|
|
226cf6fc38 | ||
|
|
203a52bfd1 | ||
|
|
1b6bf13619 | ||
|
|
bc94e90994 | ||
|
|
347125365f | ||
|
|
59a718be20 | ||
|
|
c41b3a612a | ||
|
|
23f77df911 | ||
|
|
117b952455 | ||
|
|
7714504831 | ||
|
|
5c61c24c04 | ||
|
|
b11e1d48e0 | ||
|
|
a6ce114284 | ||
|
|
88d512eacc | ||
|
|
f4d5366c93 | ||
|
|
ac9aaec7b8 | ||
|
|
0c5b950c87 | ||
|
|
47141f8e4f | ||
|
|
ec302fe665 | ||
|
|
7f304b0c28 | ||
|
|
8f88f7d89b | ||
|
|
400fd461ab | ||
|
|
cd8d2d4808 | ||
|
|
4cfa79488f | ||
|
|
b5993bcc21 | ||
|
|
6af00bf026 | ||
|
|
97c456c9e1 | ||
|
|
08f44197f3 | ||
|
|
779c7d2942 | ||
|
|
7fd14e0283 | ||
|
|
7d32a8750d | ||
|
|
110e0b78fc | ||
|
|
bc77812488 | ||
|
|
3e28cd5c95 | ||
|
|
2715dbaaa5 | ||
|
|
3baf12d3e4 | ||
|
|
a428d36604 | ||
|
|
b5207bc88c | ||
|
|
910578a2ac | ||
|
|
88176997e7 | ||
|
|
233e6f9be0 | ||
|
|
1fd11fb547 | ||
|
|
8bfd206578 | ||
|
|
62c6fd2e40 | ||
|
|
ac2d2a0463 | ||
|
|
c1e4b47b99 | ||
|
|
32b479467a | ||
|
|
6cab2b34d6 | ||
|
|
9e744199fe | ||
|
|
b22021e7f0 | ||
|
|
68bdf47ea4 | ||
|
|
af2876bd71 | ||
|
|
1e5d6bad2a | ||
|
|
f6c836157d | ||
|
|
d10874f216 | ||
|
|
700713abcf | ||
|
|
5180426fc1 | ||
|
|
e1c8a08f2f | ||
|
|
16f0bcc3ac | ||
|
|
9c98a38604 | ||
|
|
1b03c7e474 | ||
|
|
689feb78d0 | ||
|
|
336b15b199 | ||
|
|
4a4bb819ec | ||
|
|
48be010f33 | ||
|
|
104e21b314 | ||
|
|
aaf5cab138 | ||
|
|
22b744340a | ||
|
|
2954558004 | ||
|
|
b797590a4e | ||
|
|
9d71fe1724 | ||
|
|
9ad0ed642d | ||
|
|
cbd217271d | ||
|
|
65896497fb | ||
|
|
54a37b577d | ||
|
|
f1948ffb6a | ||
|
|
7dd3b0ed15 | ||
|
|
11fcfa8202 | ||
|
|
199e61ea14 | ||
|
|
0daa6c8eff | ||
|
|
b1b5f97f03 | ||
|
|
32d5587669 | ||
|
|
c13c0f24e5 | ||
|
|
7eb1d72333 | ||
|
|
3d6ec1feba | ||
|
|
c267232160 | ||
|
|
48f4b008df | ||
|
|
ae1f181fbf | ||
|
|
cbfee72d51 | ||
|
|
ee709054f2 | ||
|
|
ee60ec962b | ||
|
|
e013e38c5e | ||
|
|
20aa78105f | ||
|
|
770590e4f2 | ||
|
|
8e9e7b49ea | ||
|
|
08554889fd | ||
|
|
8ac2869de3 | ||
|
|
bb8e8a0df5 | ||
|
|
e2bd6a91cd | ||
|
|
ed0e5ef497 | ||
|
|
e1e475e605 | ||
|
|
cef5b04eb1 | ||
|
|
7e44af936d | ||
|
|
6aec5a9a60 | ||
|
|
79c75c886b | ||
|
|
bf6bcaed72 | ||
|
|
70bc2a6656 | ||
|
|
695cdea447 | ||
|
|
d0735a6d58 | ||
|
|
175c26bea9 | ||
|
|
8a298edd4e | ||
|
|
9860a1c138 | ||
|
|
3dff598d03 | ||
|
|
62b571d5f1 | ||
|
|
3af6623a91 | ||
|
|
c73ed8a4c5 | ||
|
|
4d5ba65254 | ||
|
|
91856f1159 | ||
|
|
f7da408f83 | ||
|
|
2824c55231 | ||
|
|
874655846c | ||
|
|
1620acedb1 | ||
|
|
6395e5dbbb | ||
|
|
251d9cdb8a | ||
|
|
623571acbb | ||
|
|
250f07ffa7 | ||
|
|
8917d7c16c | ||
|
|
d759280c18 | ||
|
|
67bc3903d4 | ||
|
|
cf6f83a44b | ||
|
|
4b15563ba3 | ||
|
|
dc36e15448 | ||
|
|
0b6330e98b | ||
|
|
2e93687bb4 | ||
|
|
0f925a466a | ||
|
|
16eeeda787 | ||
|
|
52f1df98bb | ||
|
|
a0ccff23a3 | ||
|
|
b8bed627a8 | ||
|
|
8d058d9dc8 | ||
|
|
57e92ff8d3 | ||
|
|
6eff724f97 | ||
|
|
55c3fe503b | ||
|
|
7f1ac63c58 | ||
|
|
2bb2e28f91 | ||
|
|
0bdffc5036 | ||
|
|
7202fbf084 | ||
|
|
317c3afb7a | ||
|
|
577baeca59 | ||
|
|
7c680cac10 | ||
|
|
0fadbd52a3 | ||
|
|
38e204dfe8 | ||
|
|
bf62653531 | ||
|
|
d851be41d3 | ||
|
|
3bd1875321 | ||
|
|
448c1d69a7 | ||
|
|
c99a5cb535 | ||
|
|
b824ef93bd | ||
|
|
0492e90d6f | ||
|
|
4ffda9f705 | ||
|
|
b32d4fc42d | ||
|
|
c92aa91aa7 | ||
|
|
a6c32a7e30 | ||
|
|
4330dc39bf | ||
|
|
da50b19b6b | ||
|
|
797018fb8a | ||
|
|
3a8f891c7d | ||
|
|
56a788286c | ||
|
|
fd95364d5f | ||
|
|
470fde0890 | ||
|
|
f12d878c0b | ||
|
|
e8993932c1 | ||
|
|
e3933e4ddc | ||
|
|
dd67239b6e | ||
|
|
1ea0d3bd8b | ||
|
|
8b952d4be6 | ||
|
|
9e8a3bc701 | ||
|
|
76807176fb | ||
|
|
3650624e4b | ||
|
|
585c509aba | ||
|
|
fc8db130e0 | ||
|
|
046c7e732f | ||
|
|
564a27461d | ||
|
|
682d678f91 | ||
|
|
4ebbc1a01d | ||
|
|
4ec32a6403 | ||
|
|
412627aab0 | ||
|
|
2584abda0e | ||
|
|
7692322fba | ||
|
|
954018fea2 | ||
|
|
ebf37f7310 | ||
|
|
f22b836ede | ||
|
|
1cea786d66 | ||
|
|
9be10f7b79 | ||
|
|
1f35d0ec2f | ||
|
|
9fcf36a2ff | ||
|
|
30f5a66487 | ||
|
|
60e0ad1f5d | ||
|
|
ed60b4670e | ||
|
|
318daaf083 | ||
|
|
182987218b | ||
|
|
5ff8c7302f | ||
|
|
398712403b | ||
|
|
63f72eb23b | ||
|
|
9dea6d7200 | ||
|
|
36f63bdf99 | ||
|
|
a09fc14625 | ||
|
|
71e280238d | ||
|
|
e20bb13649 | ||
|
|
ed8108a9d8 | ||
|
|
c0b3c9a330 | ||
|
|
8a252bff64 | ||
|
|
d3d3106fc9 | ||
|
|
1ebb09226d | ||
|
|
52163428e9 | ||
|
|
da9dda2c2b | ||
|
|
a4a14cae96 | ||
|
|
989d6c55c4 | ||
|
|
06a211a24a | ||
|
|
1c3e6ba930 | ||
|
|
55af696b7c | ||
|
|
99123ad1c3 | ||
|
|
636e9514e8 | ||
|
|
cdf9cf5cf4 | ||
|
|
797dedfcbb | ||
|
|
b61de4866c | ||
|
|
47e649643f | ||
|
|
931951ff37 | ||
|
|
6f42b4c316 | ||
|
|
58c446de2d | ||
|
|
74bf6bc411 | ||
|
|
ad3c24f950 | ||
|
|
998e487fe8 | ||
|
|
93346b0c63 | ||
|
|
7d9920691f | ||
|
|
b1942678b4 | ||
|
|
8c77d0d775 | ||
|
|
3e667ee39a | ||
|
|
52b2858ac2 | ||
|
|
6fcb4c2058 | ||
|
|
7411670e22 | ||
|
|
cfd23c395a | ||
|
|
2e8f670e94 | ||
|
|
18a88eab51 | ||
|
|
84e9f9794d | ||
|
|
628c0e5dcc | ||
|
|
cdee08bd36 | ||
|
|
7ed43da425 | ||
|
|
461a0b3645 | ||
|
|
bd56539103 | ||
|
|
9bcd3de69b | ||
|
|
d8f57963a1 | ||
|
|
bf59d2f357 | ||
|
|
5328f7fe69 | ||
|
|
fb90f6591b | ||
|
|
9eea42b121 | ||
|
|
d66722e737 | ||
|
|
374f8ba1de | ||
|
|
74c984dec3 | ||
|
|
52ea0215f0 | ||
|
|
ea3d719b32 | ||
|
|
fd1e655075 | ||
|
|
47d37c2ec9 | ||
|
|
9f8d439780 | ||
|
|
7e1bdc99eb | ||
|
|
dac36d7f55 | ||
|
|
9d495a10ec | ||
|
|
9bb99319ba | ||
|
|
bc8d8dcd04 | ||
|
|
b2d9a7675d | ||
|
|
2477197656 | ||
|
|
171083b2f1 | ||
|
|
e592eb969f | ||
|
|
db1493f138 | ||
|
|
57c270f8fa | ||
|
|
bfe8bc89c0 | ||
|
|
0a00862495 | ||
|
|
7dd53d93cd | ||
|
|
abe65d4064 | ||
|
|
4977b31ba6 | ||
|
|
c1beb85ba5 | ||
|
|
ca9a78eea4 | ||
|
|
9bf006f4d3 | ||
|
|
3bb2a082b7 | ||
|
|
92d11522d2 | ||
|
|
44cfdc1503 | ||
|
|
2fdcbedea8 | ||
|
|
787c7fd966 | ||
|
|
09b4ad6937 | ||
|
|
580d43aeaf | ||
|
|
a1a7fec15f | ||
|
|
6dcd74d116 | ||
|
|
187f5a8a93 | ||
|
|
2eb938147a | ||
|
|
deffb75c14 | ||
|
|
f91707bfbe | ||
|
|
8aba7825dc | ||
|
|
b8b5b2fef2 | ||
|
|
f4d6d69184 | ||
|
|
a5b1c685e1 | ||
|
|
609805b84d | ||
|
|
00d1da7c01 | ||
|
|
7335726c7d | ||
|
|
02779939f0 | ||
|
|
6c6f015f40 | ||
|
|
f087d38b86 | ||
|
|
c78957f55c | ||
|
|
9ce0c47cd4 | ||
|
|
60034f2c96 | ||
|
|
c9a4af218e | ||
|
|
c5c2e61e06 | ||
|
|
b2930dd6a7 | ||
|
|
4aa6700ceb | ||
|
|
267ecfacab | ||
|
|
007597239f | ||
|
|
5699abf1be | ||
|
|
a6ccd037e2 | ||
|
|
009991ce4c | ||
|
|
6ef788a8f4 | ||
|
|
fa37f7d40a | ||
|
|
b195cebac7 | ||
|
|
8aeea60888 | ||
|
|
6e0857c6c1 | ||
|
|
260fdbe3b3 | ||
|
|
2f30c6c781 | ||
|
|
d5b4da655a | ||
|
|
1694ed7758 | ||
|
|
ee6cc6d319 | ||
|
|
7670e320ba | ||
|
|
15ab745bd0 | ||
|
|
7468b33991 | ||
|
|
750e02f38a | ||
|
|
40324ee89f | ||
|
|
95d146fea2 | ||
|
|
dc20b68a37 | ||
|
|
563e3072a5 | ||
|
|
b3ba4db00b | ||
|
|
9db1f3430e | ||
|
|
ec19932eef | ||
|
|
a4c1480a1a | ||
|
|
91e0452320 | ||
|
|
ad80ea7885 | ||
|
|
daf31870f3 | ||
|
|
1c20cda389 | ||
|
|
631759d833 | ||
|
|
ca02c66f26 | ||
|
|
3ac095d359 | ||
|
|
35d49f6a5e | ||
|
|
e1bc223de0 | ||
|
|
e065ead9b3 | ||
|
|
f9471f9b9b | ||
|
|
2612b50d06 | ||
|
|
d9ce2906a0 | ||
|
|
b76397f98e | ||
|
|
fcad9e0be5 | ||
|
|
2934347865 | ||
|
|
315f1b0207 | ||
|
|
965bd79a86 | ||
|
|
c18563e34b | ||
|
|
161e0de8d5 | ||
|
|
40aeca0740 | ||
|
|
63dd7fa7c0 | ||
|
|
5c0d8a7fef | ||
|
|
509b49caf1 | ||
|
|
38c51cf79c | ||
|
|
0b693bba4e | ||
|
|
1258f34c78 | ||
|
|
510c0d5f56 | ||
|
|
cdb630e580 | ||
|
|
65fbd38105 | ||
|
|
1570132a55 | ||
|
|
7b5b748d23 | ||
|
|
041601c4a5 | ||
|
|
f692fd0202 | ||
|
|
e7b4de56f2 | ||
|
|
4a616a0c04 | ||
|
|
b2ab114b6d | ||
|
|
0814675d2a | ||
|
|
13df35462b | ||
|
|
899868f51e | ||
|
|
ee466aebce | ||
|
|
687ef2662e | ||
|
|
5aa29acbd3 | ||
|
|
a8523e6d01 | ||
|
|
1c2b3d063b | ||
|
|
551a000893 | ||
|
|
0d82d425cc | ||
|
|
0e1cea1034 | ||
|
|
2b75153148 | ||
|
|
c170615fb3 | ||
|
|
f6e84b6a35 | ||
|
|
6144f09a1f | ||
|
|
de142e8050 | ||
|
|
d0c1a119fd | ||
|
|
8fd80d3185 | ||
|
|
ae28c82858 | ||
|
|
1766764c7d | ||
|
|
129f8d72bd | ||
|
|
f946389d60 | ||
|
|
7314b5ecae | ||
|
|
7b0806355f | ||
|
|
49cf72e058 | ||
|
|
a11cad619d | ||
|
|
c1d35e8a57 | ||
|
|
fede348fbd | ||
|
|
f3c60e8fa6 | ||
|
|
00e53439ed | ||
|
|
368fced0c4 | ||
|
|
666771fb0f | ||
|
|
9e3f978677 | ||
|
|
f467d1c4f7 | ||
|
|
d8fc9d937e | ||
|
|
0a749ce913 | ||
|
|
e6db505cf7 | ||
|
|
9e8d6aaaa1 | ||
|
|
e814b551b4 | ||
|
|
080da48223 | ||
|
|
897330e646 | ||
|
|
c4c7b5b1a9 | ||
|
|
b90861bc63 | ||
|
|
6d1297a85f | ||
|
|
dfd2c33657 | ||
|
|
f5af551325 | ||
|
|
7aad27c3d2 | ||
|
|
60ff3b08d4 | ||
|
|
7a5588d5de | ||
|
|
56b6fbbe7f | ||
|
|
46c408befb | ||
|
|
6f808fc25a | ||
|
|
4cba44fbb1 | ||
|
|
91c45bad71 | ||
|
|
a30caefc04 | ||
|
|
eb20fda878 | ||
|
|
4a5aa02e6c | ||
|
|
25b37ad915 | ||
|
|
bbcceb982a | ||
|
|
c41f5eb84d | ||
|
|
89dc9e90b2 | ||
|
|
39b1dedf12 | ||
|
|
be28820fb2 | ||
|
|
0654c8cf07 | ||
|
|
2f5cb81029 | ||
|
|
067d6e8514 | ||
|
|
42e19e1e2b | ||
|
|
6b846b91b4 | ||
|
|
5838a41813 | ||
|
|
3cd5513c0c | ||
|
|
bfdc8d1053 | ||
|
|
30ec8216e1 | ||
|
|
12c3fc6ce3 | ||
|
|
7b3a1409d5 | ||
|
|
924bed06cb | ||
|
|
8b0aa7a6b3 | ||
|
|
367c385fff | ||
|
|
840efb1571 | ||
|
|
9ba19d27a6 | ||
|
|
1d603e1ec2 | ||
|
|
7818b43045 | ||
|
|
3936100000 | ||
|
|
1a846b04ee | ||
|
|
384a355a53 | ||
|
|
58ad5c3938 | ||
|
|
6ee68d1418 | ||
|
|
6e45c14ac5 | ||
|
|
e786c9c79a | ||
|
|
518ac16814 | ||
|
|
1d07eafa83 | ||
|
|
1600b6d0ea | ||
|
|
6de3a7246e | ||
|
|
cbd29df52a | ||
|
|
92998bafc8 | ||
|
|
1022753213 | ||
|
|
b85942989d | ||
|
|
f2f43a2231 | ||
|
|
2979a8edec | ||
|
|
0e90739786 | ||
|
|
185a530b59 | ||
|
|
4f6b31d14a | ||
|
|
f1dde5c925 | ||
|
|
64afa3701a | ||
|
|
cb0b6614c6 | ||
|
|
177063d39c | ||
|
|
be595aba91 | ||
|
|
66d9d853af | ||
|
|
95a68af795 | ||
|
|
c1937ea71f | ||
|
|
a7bd8c822a | ||
|
|
0eff4f0096 | ||
|
|
4d7fa08805 | ||
|
|
5fd4312ff8 | ||
|
|
a600430be4 | ||
|
|
f77b598899 | ||
|
|
ac045539d1 | ||
|
|
5b0fa9054b | ||
|
|
3c2a00b17b | ||
|
|
47ddf31f76 | ||
|
|
57ae06e139 | ||
|
|
7f4373e000 | ||
|
|
63609bb52c | ||
|
|
f0af184262 | ||
|
|
72cc3576d3 | ||
|
|
3fe7d2ea15 | ||
|
|
8eed54f1f7 | ||
|
|
c7ee8a0635 | ||
|
|
33a6a7d3a0 | ||
|
|
2851781a72 | ||
|
|
45b9919f67 | ||
|
|
207e846ae6 | ||
|
|
a83c276aa2 | ||
|
|
4cdb99a383 | ||
|
|
8fe60a893c | ||
|
|
0c44c48628 | ||
|
|
6a18e546ca | ||
|
|
4cedccb178 | ||
|
|
eab9a735a9 | ||
|
|
1df05cf344 | ||
|
|
843ff0eabc | ||
|
|
5a23be2224 | ||
|
|
45c8817c62 | ||
|
|
7f87b255f9 | ||
|
|
665c84c6de | ||
|
|
b91a077c91 | ||
|
|
59b924efe7 | ||
|
|
730718a396 | ||
|
|
3c0edc0d6a | ||
|
|
7c234ab7e9 | ||
|
|
b82319cb54 | ||
|
|
6685495400 | ||
|
|
b216589e88 | ||
|
|
744aa153f6 | ||
|
|
67612fce98 | ||
|
|
72ba1a173c | ||
|
|
989e217775 | ||
|
|
b0d556c8eb | ||
|
|
1a54d8fad9 | ||
|
|
f9ace29cab | ||
|
|
a97570027d | ||
|
|
de36faa0a7 | ||
|
|
19641bd897 | ||
|
|
2c64641a1b | ||
|
|
5ac1118db3 | ||
|
|
717b88b5fe | ||
|
|
158a7fc311 | ||
|
|
2c46279617 | ||
|
|
b843d5f13b | ||
|
|
4aff3f0495 | ||
|
|
4406f133b9 | ||
|
|
572dfd529e | ||
|
|
2cb6ddfe9a | ||
|
|
250236bd25 | ||
|
|
7f24563bba | ||
|
|
5d6a9ad2d0 | ||
|
|
0115bf254e | ||
|
|
b9c2b42725 | ||
|
|
e54928720a | ||
|
|
f8f22cdef7 | ||
|
|
e694276a8d | ||
|
|
a8369b4e93 | ||
|
|
73b7bcc6ce | ||
|
|
fab8e66fe1 | ||
|
|
1cd8040692 | ||
|
|
4db1b57c70 | ||
|
|
7268e02386 | ||
|
|
805aa3ca9f | ||
|
|
29cb34551c | ||
|
|
d267be4455 | ||
|
|
92f4ade371 | ||
|
|
9235eda73b | ||
|
|
1fe23afd1b | ||
|
|
09637c3069 | ||
|
|
2cafd509fc | ||
|
|
62cc570ab2 | ||
|
|
1ec9370e68 | ||
|
|
5b4c60ecba | ||
|
|
7b7488ece8 | ||
|
|
4ba7ff9f27 | ||
|
|
df2d1aca4b | ||
|
|
4fcba70c9a | ||
|
|
d0fc20ca6e | ||
|
|
9402b54f9b | ||
|
|
f0e7795b9b | ||
|
|
bba18d8bc9 | ||
|
|
0494e5fc8f | ||
|
|
df1b46272d | ||
|
|
b06dbd3069 | ||
|
|
ed068f09b0 | ||
|
|
5e852d05ee | ||
|
|
d111393bd6 | ||
|
|
f84aa8c638 | ||
|
|
89bff73431 | ||
|
|
8e07dfc730 | ||
|
|
cd16dddf13 | ||
|
|
25605c45b9 | ||
|
|
b6d0d54609 | ||
|
|
98981dac27 | ||
|
|
ddf03cbcf2 | ||
|
|
1e1abf407c | ||
|
|
1267cdac4d | ||
|
|
05bcee12ae | ||
|
|
fc3f15e0cf | ||
|
|
0a7765f639 | ||
|
|
c214458770 | ||
|
|
bfe501c84a | ||
|
|
e034465df8 | ||
|
|
a7b78d4131 | ||
|
|
3eed34c710 | ||
|
|
9cb3bef156 | ||
|
|
46c7e3fbed | ||
|
|
a49a00a25f | ||
|
|
eed0382b41 | ||
|
|
673843fb66 | ||
|
|
4e45c94fc3 | ||
|
|
0a11dc6673 | ||
|
|
4ede2c20a1 | ||
|
|
af0cf523e3 | ||
|
|
3908e00650 | ||
|
|
f9bdf6da1c | ||
|
|
811f35b028 | ||
|
|
87cdf9222d | ||
|
|
2ca2cc9597 | ||
|
|
edb232df60 | ||
|
|
af113c0ffd | ||
|
|
856b495995 | ||
|
|
a56bbf0b3b | ||
|
|
4b54113f08 | ||
|
|
52371b7705 | ||
|
|
629bead919 | ||
|
|
c7cd72787f | ||
|
|
ec6e2c240f | ||
|
|
3187a0f820 | ||
|
|
f86b9299c4 | ||
|
|
d27d0abeb0 | ||
|
|
7c59348138 | ||
|
|
ab53f44157 | ||
|
|
b35f325d94 | ||
|
|
393c14de54 | ||
|
|
bff17c0b95 | ||
|
|
d172828ac5 | ||
|
|
9500ac73fc | ||
|
|
e2cf7e4421 | ||
|
|
c087a6b49b |
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
from logging import handlers
|
||||
from os.path import dirname
|
||||
import logging
|
||||
@@ -9,7 +10,6 @@ import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import time
|
||||
|
||||
# Root path
|
||||
base_path = dirname(os.path.abspath(__file__))
|
||||
@@ -18,7 +18,12 @@ base_path = dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, os.path.join(base_path, 'libs'))
|
||||
|
||||
from couchpotato.environment import Env
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
from couchpotato.core.helpers.variable import getDataDir, removePyc
|
||||
|
||||
|
||||
# Remove pyc files before dynamic load (sees .pyc files regular .py modules)
|
||||
removePyc(base_path)
|
||||
|
||||
|
||||
class Loader(object):
|
||||
|
||||
@@ -28,7 +33,7 @@ class Loader(object):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
self.options = getOptions(base_path, sys.argv[1:])
|
||||
self.options = getOptions(sys.argv[1:])
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
@@ -49,7 +54,7 @@ class Loader(object):
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
os.makedirs(self.log_dir)
|
||||
|
||||
# Logging
|
||||
from couchpotato.core.logger import CPLog
|
||||
@@ -66,10 +71,11 @@ class Loader(object):
|
||||
signal.signal(signal.SIGTERM, lambda signum, stack_frame: sys.exit(1))
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
addEvent('app.after_shutdown', self.afterShutdown)
|
||||
addEvent('app.do_shutdown', self.setRestart)
|
||||
|
||||
def afterShutdown(self, restart):
|
||||
def setRestart(self, restart):
|
||||
self.do_restart = restart
|
||||
return True
|
||||
|
||||
def onExit(self, signal, frame):
|
||||
from couchpotato.core.event import fireEvent
|
||||
@@ -97,7 +103,6 @@ class Loader(object):
|
||||
|
||||
# Release log files and shutdown logger
|
||||
logging.shutdown()
|
||||
time.sleep(3)
|
||||
|
||||
args = [sys.executable] + [os.path.join(base_path, os.path.basename(__file__))] + sys.argv[1:]
|
||||
subprocess.Popen(args)
|
||||
@@ -132,14 +137,15 @@ if __name__ == '__main__':
|
||||
pass
|
||||
except SystemExit:
|
||||
raise
|
||||
except socket.error as (nr, msg):
|
||||
except socket.error as e:
|
||||
# log when socket receives SIGINT, but continue.
|
||||
# previous code would have skipped over other types of IO errors too.
|
||||
nr, msg = e
|
||||
if nr != 4:
|
||||
try:
|
||||
l.log.critical(traceback.format_exc())
|
||||
except:
|
||||
print traceback.format_exc()
|
||||
print(traceback.format_exc())
|
||||
raise
|
||||
except:
|
||||
try:
|
||||
@@ -148,7 +154,7 @@ if __name__ == '__main__':
|
||||
if l:
|
||||
l.log.critical(traceback.format_exc())
|
||||
else:
|
||||
print traceback.format_exc()
|
||||
print(traceback.format_exc())
|
||||
except:
|
||||
print traceback.format_exc()
|
||||
print(traceback.format_exc())
|
||||
raise
|
||||
|
||||
231
Desktop.py
231
Desktop.py
@@ -1,231 +0,0 @@
|
||||
from esky.util import appdir_from_executable #@UnresolvedImport
|
||||
from threading import Thread
|
||||
from version import VERSION
|
||||
from wx.lib.softwareupdate import SoftwareUpdate
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import webbrowser
|
||||
import wx
|
||||
|
||||
# Include proper dirs
|
||||
if hasattr(sys, 'frozen'):
|
||||
import libs
|
||||
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
|
||||
else:
|
||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
lib_dir = os.path.join(base_path, 'libs')
|
||||
|
||||
sys.path.insert(0, base_path)
|
||||
sys.path.insert(0, lib_dir)
|
||||
|
||||
from couchpotato.environment import Env
|
||||
|
||||
class TaskBarIcon(wx.TaskBarIcon):
|
||||
|
||||
TBMENU_OPEN = wx.NewId()
|
||||
TBMENU_SETTINGS = wx.NewId()
|
||||
TBMENU_EXIT = wx.ID_EXIT
|
||||
|
||||
closed = False
|
||||
menu = False
|
||||
enabled = False
|
||||
|
||||
def __init__(self, frame):
|
||||
wx.TaskBarIcon.__init__(self)
|
||||
self.frame = frame
|
||||
|
||||
icon = wx.Icon('icon.png', wx.BITMAP_TYPE_PNG)
|
||||
self.SetIcon(icon)
|
||||
|
||||
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
|
||||
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
|
||||
|
||||
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
|
||||
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
|
||||
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
|
||||
|
||||
def OnTaskBarClick(self, evt):
|
||||
menu = self.CreatePopupMenu()
|
||||
self.PopupMenu(menu)
|
||||
menu.Destroy()
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
if self.menu:
|
||||
self.open_menu.Enable(True)
|
||||
self.setting_menu.Enable(True)
|
||||
|
||||
self.open_menu.SetText('Open')
|
||||
|
||||
def CreatePopupMenu(self):
|
||||
|
||||
if not self.menu:
|
||||
self.menu = wx.Menu()
|
||||
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
|
||||
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
|
||||
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
|
||||
|
||||
if not self.enabled:
|
||||
self.open_menu.Enable(False)
|
||||
self.setting_menu.Enable(False)
|
||||
|
||||
self.open_menu.SetText('Loading...')
|
||||
|
||||
return self.menu
|
||||
|
||||
def onOpen(self, event):
|
||||
url = self.frame.parent.getSetting('base_url')
|
||||
webbrowser.open(url)
|
||||
|
||||
def onSettings(self, event):
|
||||
url = self.frame.parent.getSetting('base_url') + '/settings/'
|
||||
webbrowser.open(url)
|
||||
|
||||
def onTaskBarClose(self, evt):
|
||||
if self.closed:
|
||||
return
|
||||
|
||||
self.closed = True
|
||||
|
||||
self.RemoveIcon()
|
||||
wx.CallAfter(self.frame.Close)
|
||||
|
||||
|
||||
def makeIcon(self, img):
|
||||
if "wxMSW" in wx.PlatformInfo:
|
||||
img = img.Scale(16, 16)
|
||||
elif "wxGTK" in wx.PlatformInfo:
|
||||
img = img.Scale(22, 22)
|
||||
|
||||
icon = wx.IconFromBitmap(img.CopyFromBitmap())
|
||||
return icon
|
||||
|
||||
|
||||
class MainFrame(wx.Frame):
|
||||
|
||||
def __init__(self, parent):
|
||||
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
|
||||
|
||||
self.parent = parent
|
||||
self.tbicon = TaskBarIcon(self)
|
||||
|
||||
|
||||
class WorkerThread(Thread):
|
||||
|
||||
def __init__(self, desktop):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self._desktop = desktop
|
||||
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
args = ['--quiet']
|
||||
self.options = getOptions(base_path, args)
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
settings.setFile(self.options.config_file)
|
||||
|
||||
# Create data dir if needed
|
||||
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
|
||||
if self.data_dir == '':
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
self.data_dir = getDataDir()
|
||||
|
||||
if not os.path.isdir(self.data_dir):
|
||||
os.makedirs(self.data_dir)
|
||||
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
|
||||
try:
|
||||
from couchpotato.runner import runCouchPotato
|
||||
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._desktop.frame.Close()
|
||||
|
||||
|
||||
class CouchPotatoApp(wx.App, SoftwareUpdate):
|
||||
|
||||
settings = {}
|
||||
events = {}
|
||||
restart = False
|
||||
closing = False
|
||||
|
||||
def OnInit(self):
|
||||
|
||||
# Updater
|
||||
base_url = 'http://couchpota.to/updates/%s/' % VERSION
|
||||
self.InitUpdates(base_url, base_url + 'changelog.html',
|
||||
icon = wx.Icon('icon.png'))
|
||||
|
||||
self.frame = MainFrame(self)
|
||||
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
|
||||
|
||||
# CouchPotato thread
|
||||
self.worker = WorkerThread(self)
|
||||
|
||||
return True
|
||||
|
||||
def onAppLoad(self):
|
||||
self.frame.tbicon.enable()
|
||||
|
||||
def setSettings(self, settings = {}):
|
||||
self.settings = settings
|
||||
|
||||
def getSetting(self, name):
|
||||
return self.settings.get(name)
|
||||
|
||||
def addEvents(self, events = {}):
|
||||
for name in events.iterkeys():
|
||||
self.events[name] = events[name]
|
||||
|
||||
def onClose(self, event):
|
||||
|
||||
if not self.closing:
|
||||
self.closing = True
|
||||
self.frame.tbicon.onTaskBarClose(event)
|
||||
|
||||
onClose = self.events.get('onClose')
|
||||
onClose(event)
|
||||
|
||||
def afterShutdown(self, restart = False):
|
||||
self.frame.Destroy()
|
||||
self.restart = restart
|
||||
self.ExitMainLoop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
app = CouchPotatoApp(redirect = False)
|
||||
app.MainLoop()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
if app.restart:
|
||||
|
||||
def appexe_from_executable(exepath):
|
||||
appdir = appdir_from_executable(exepath)
|
||||
exename = os.path.basename(exepath)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
|
||||
return os.path.join(appdir, "Contents", "MacOS", exename)
|
||||
|
||||
return os.path.join(appdir, exename)
|
||||
|
||||
exe = appexe_from_executable(sys.executable)
|
||||
os.chdir(os.path.dirname(exe))
|
||||
|
||||
os.execv(exe, [exe] + sys.argv[1:])
|
||||
51
README.md
51
README.md
@@ -1,4 +1,4 @@
|
||||
CouchPotato Server
|
||||
CouchPotato
|
||||
=====
|
||||
|
||||
CouchPotato (CP) is an automatic NZB and torrent downloader. You can keep a "movies I want"-list and it will search for NZBs/torrents of these movies every X hours.
|
||||
@@ -7,7 +7,7 @@ Once a movie is found, it will send it to SABnzbd or download the torrent to a s
|
||||
|
||||
## Running from Source
|
||||
|
||||
CouchPotatoServer can be run from source. This will use *git* as updater, so make sure that is installed also.
|
||||
CouchPotatoServer can be run from source. This will use *git* as updater, so make sure that is installed.
|
||||
|
||||
Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for more details:
|
||||
|
||||
@@ -17,9 +17,9 @@ Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for
|
||||
* Open up `Git Bash` (or CMD) and go to the folder you want to install CP. Something like Program Files.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`.
|
||||
* You can now start CP via `CouchPotatoServer\CouchPotato.py` to start
|
||||
* Your browser should open up, but if it doesn't go to: `http://localhost:5050/`
|
||||
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||
|
||||
OSx:
|
||||
OS X:
|
||||
|
||||
* If you're on Leopard (10.5) install Python 2.6+: [Python 2.6.5](http://www.python.org/download/releases/2.6.5/)
|
||||
* Install [GIT](http://git-scm.com/)
|
||||
@@ -27,16 +27,43 @@ OSx:
|
||||
* Go to your App folder `cd /Applications`
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py`
|
||||
* Your browser should open up, but if it doesn't go to: `http://localhost:5050/`
|
||||
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||
|
||||
Linux (ubuntu / debian):
|
||||
Linux:
|
||||
|
||||
* Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* (Ubuntu / Debian) Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* (Fedora / CentOS) Install [GIT](http://git-scm.com/) with `yum install git`
|
||||
* 'cd' to the folder of your choosing.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py` to start
|
||||
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* Change the paths inside the init script. `sudo nano /etc/init.d/couchpotato`
|
||||
* Make it executable. `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* Add it to defaults. `sudo update-rc.d couchpotato defaults`
|
||||
* Open your browser and go to: `http://localhost:5050/`
|
||||
* (Ubuntu / Debian) To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||
* (systemd) To run on boot copy the systemd config `sudo cp CouchPotatoServer/init/couchpotato.fedora.service /etc/systemd/system/couchpotato.service`
|
||||
* (systemd) Update the systemd config file with your user and path to CouchPotato.py
|
||||
* (systemd) Enable it at boot with `sudo systemctl enable couchpotato`
|
||||
* Open your browser and go to `http://localhost:5050/`
|
||||
|
||||
Docker:
|
||||
* You can use [razorgirl's Dockerfile](https://github.com/razorgirl/docker-couchpotato) to quickly build your own isolated app container. It's based on the Linux instructions above. For more info about Docker check out the [official website](https://www.docker.com).
|
||||
|
||||
FreeBSD :
|
||||
|
||||
* Update your ports tree `sudo portsnap fetch update`
|
||||
* Install Python 2.6+ [lang/python](http://www.freshports.org/lang/python) with `cd /usr/ports/lang/python; sudo make install clean`
|
||||
* Install port [databases/py-sqlite3](http://www.freshports.org/databases/py-sqlite3) with `cd /usr/ports/databases/py-sqlite3; sudo make install clean`
|
||||
* Add a symlink to 'python2' `sudo ln -s /usr/local/bin/python /usr/local/bin/python2`
|
||||
* Install port [ftp/libcurl](http://www.freshports.org/ftp/libcurl) with `cd /usr/ports/ftp/fpc-libcurl; sudo make install clean`
|
||||
* Install port [ftp/curl](http://www.freshports.org/ftp/bcurl), deselect 'Asynchronous DNS resolution via c-ares' when prompted as part of config `cd /usr/ports/ftp/fpc-libcurl; sudo make install clean`
|
||||
* Install port [textproc/docbook-xml-450](http://www.freshports.org/textproc/docbook-xml-450) with `cd /usr/ports/textproc/docbook-xml-450; sudo make install clean`
|
||||
* Install port [GIT](http://git-scm.com/) with `cd /usr/ports/devel/git; sudo make install clean`
|
||||
* 'cd' to the folder of your choosing.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then run `sudo python CouchPotatoServer/CouchPotato.py` to start for the first time
|
||||
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/freebsd /etc/rc.d/couchpotato`
|
||||
* Change the paths inside the init script. `sudo vim /etc/rc.d/couchpotato`
|
||||
* Make init script executable. `sudo chmod +x /etc/rc.d/couchpotato`
|
||||
* Add init to startup. `sudo echo 'couchpotato_enable="YES"' >> /etc/rc.conf`
|
||||
* Open your browser and go to: `http://server:5050/`
|
||||
|
||||
@@ -1,15 +1,40 @@
|
||||
#So you feel like posting a bug, sending me a pull request or just telling me how awesome I am. No problem!
|
||||
# Contributing to CouchPotatoServer
|
||||
|
||||
##Just make sure you think of the following things:
|
||||
1. [Contributing](#contributing)
|
||||
2. [Submitting an Issue](#issues)
|
||||
3. [Submitting a Pull Request](#pull-requests)
|
||||
|
||||
* Search through the existing (and closed) issues first. See if you can get your answer there.
|
||||
* Double check the result manually, because it could be an external issue.
|
||||
* Post logs! Without seeing what is going on, I can't reproduce the error.
|
||||
* What is the movie + quality you are searching for.
|
||||
* What are you settings for the specific problem.
|
||||
* What providers are you using. (While your logs include these, scanning through hundred of lines of log isn't my hobby).
|
||||
* Give me a short step by step of how to reproduce.
|
||||
* What hardware / OS are you using and what are the limits? NAS can be slow and maybe have a different python installed then when you use CP on OSX or Windows for example.
|
||||
* I will mark issues with the "can't reproduce" tag. Don't go asking me "why closed" if it clearly says the issue in the tag ;)
|
||||
## Contributing
|
||||
Thank you for your interest in contributing to CouchPotato. There are several ways to help out, even if you've never worked on an open source project before.
|
||||
If you've found a bug or want to request a feature, you can report it by [posting an issue](https://github.com/RuudBurger/CouchPotatoServer/issues/new) - be sure to read the [guidelines](#issues) first!
|
||||
If you want to contribute your own work, please read the [guidelines](#pull-requests) for submitting a pull request.
|
||||
Lastly, for anything related to CouchPotato, feel free to stop by the [forum](http://couchpota.to/forum/) or the [#couchpotato](http://webchat.freenode.net/?channels=couchpotato) IRC channel at irc.freenode.net.
|
||||
|
||||
**If I don't get enough info, the change of the issue getting closed is a lot bigger ;)**
|
||||
## Issues
|
||||
Issues are intended for reporting bugs and weird behaviour or suggesting improvements to CouchPotatoServer.
|
||||
Before you submit an issue, please go through the following checklist:
|
||||
* **FILL IN ALL THE FIELDS ASKED FOR**
|
||||
* **POST MORE THAN A SINGLE LINE LOG**, if you do, you'd better have a easy reproducable bug
|
||||
* Search through existing issues (*including closed issues!*) first: you might be able to get your answer there.
|
||||
* Double check your issue manually, because it could be an external issue.
|
||||
* Post logs with your issue: Without seeing what is going on, the developers can't reproduce the error.
|
||||
* Check the logs yourself before submitting them. Obvious errors like permission or HTTP errors are often not related to CouchPotato.
|
||||
* What movie and quality are you searching for?
|
||||
* What are your settings for the specific problem?
|
||||
* What providers are you using? (While your logs include these, scanning through hundreds of lines of logs isn't our hobby)
|
||||
* Post the logs from the *config* directory, please do not copy paste the UI. Use pastebin to store these logs!
|
||||
* Give a short step by step of how to reproduce the error.
|
||||
* What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed than when you use CP on OS X or Windows.
|
||||
* Your issue might be marked with the "can't reproduce" tag. Don't ask why your issue was closed if it says so in the tag.
|
||||
* If you're running on a NAS (QNAP, Austor, Synology etc.) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else!
|
||||
* Do not "bump" issues with "Any updates on this" or whatever. Yes I've seen it, you don't have to remind me of it. There will be an update when the code is done or I need information. If you feel the need to do so, you'd better have more info on the issue.
|
||||
|
||||
The more relevant information you provide, the more likely that your issue will be resolved.
|
||||
If you don't follow any of the checks above, I'll close the issue. If you are wondering why (and ask) I'll block you from posting new issues and the repo.
|
||||
|
||||
## Pull Requests
|
||||
Pull requests are intended for contributing code or documentation to the project. Before you submit a pull request, consider the following:
|
||||
* Make sure your pull request is made for the *develop* branch (or relevant feature branch).
|
||||
* Have you tested your PR? If not, why?
|
||||
* Does your PR have any limitations I should know of?
|
||||
* Is your PR up-to-date with the branch you're trying to push into?
|
||||
|
||||
@@ -1,83 +1,150 @@
|
||||
from couchpotato.api import api_docs, api_docs_missing
|
||||
from couchpotato.core.auth import requires_auth
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.request import getParams, jsonified
|
||||
from couchpotato.core.helpers.variable import md5
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
from flask.app import Flask
|
||||
from flask.blueprints import Blueprint
|
||||
from flask.globals import request
|
||||
from flask.helpers import url_for
|
||||
from flask.templating import render_template
|
||||
from sqlalchemy.engine import create_engine
|
||||
from sqlalchemy.orm import scoped_session
|
||||
from sqlalchemy.orm.session import sessionmaker
|
||||
from werkzeug.utils import redirect
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from couchpotato.api import api_docs, api_docs_missing, api
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.variable import md5, tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
from tornado import template
|
||||
from tornado.web import RequestHandler, authenticated
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
app = Flask(__name__, static_folder = 'nope')
|
||||
web = Blueprint('web', __name__)
|
||||
views = {}
|
||||
template_loader = template.Loader(os.path.join(os.path.dirname(__file__), 'templates'))
|
||||
|
||||
|
||||
def get_session(engine = None):
|
||||
return Env.getSession(engine)
|
||||
class BaseHandler(RequestHandler):
|
||||
|
||||
def addView(route, func, static = False):
|
||||
web.add_url_rule(route + ('' if static else '/'), endpoint = route if route else 'index', view_func = func)
|
||||
def get_current_user(self):
|
||||
username = Env.setting('username')
|
||||
password = Env.setting('password')
|
||||
|
||||
""" Web view """
|
||||
@web.route('/')
|
||||
@requires_auth
|
||||
if username and password:
|
||||
return self.get_secure_cookie('user')
|
||||
else: # Login when no username or password are set
|
||||
return True
|
||||
|
||||
|
||||
# Main web handler
|
||||
class WebHandler(BaseHandler):
|
||||
|
||||
@authenticated
|
||||
def get(self, route, *args, **kwargs):
|
||||
route = route.strip('/')
|
||||
if not views.get(route):
|
||||
page_not_found(self)
|
||||
return
|
||||
|
||||
try:
|
||||
self.write(views[route]())
|
||||
except:
|
||||
log.error("Failed doing web request '%s': %s", (route, traceback.format_exc()))
|
||||
self.write({'success': False, 'error': 'Failed returning results'})
|
||||
|
||||
|
||||
def addView(route, func):
|
||||
views[route] = func
|
||||
|
||||
|
||||
def get_db():
|
||||
return Env.get('db')
|
||||
|
||||
|
||||
# Web view
|
||||
def index():
|
||||
return render_template('index.html', sep = os.sep, fireEvent = fireEvent, env = Env)
|
||||
return template_loader.load('index.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env)
|
||||
addView('', index)
|
||||
|
||||
""" Api view """
|
||||
@web.route('docs/')
|
||||
@requires_auth
|
||||
|
||||
# API docs
|
||||
def apiDocs():
|
||||
from couchpotato import app
|
||||
routes = []
|
||||
for route, x in sorted(app.view_functions.iteritems()):
|
||||
if route[0:4] == 'api.':
|
||||
routes += [route[4:].replace('::', '.')]
|
||||
routes = list(api.keys())
|
||||
|
||||
if api_docs.get(''):
|
||||
del api_docs['']
|
||||
del api_docs_missing['']
|
||||
return render_template('api.html', fireEvent = fireEvent, routes = sorted(routes), api_docs = api_docs, api_docs_missing = sorted(api_docs_missing))
|
||||
|
||||
@web.route('getkey/')
|
||||
def getApiKey():
|
||||
return template_loader.load('api.html').generate(fireEvent = fireEvent, routes = sorted(routes), api_docs = api_docs, api_docs_missing = sorted(api_docs_missing), Env = Env)
|
||||
|
||||
api = None
|
||||
params = getParams()
|
||||
username = Env.setting('username')
|
||||
password = Env.setting('password')
|
||||
addView('docs', apiDocs)
|
||||
|
||||
if (params.get('u') == md5(username) or not username) and (params.get('p') == password or not password):
|
||||
api = Env.setting('api_key')
|
||||
|
||||
return jsonified({
|
||||
'success': api is not None,
|
||||
'api_key': api
|
||||
})
|
||||
# Database debug manager
|
||||
def databaseManage():
|
||||
return template_loader.load('database.html').generate(fireEvent = fireEvent, Env = Env)
|
||||
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(error):
|
||||
index_url = url_for('web.index')
|
||||
url = request.path[len(index_url):]
|
||||
addView('database', databaseManage)
|
||||
|
||||
|
||||
# Make non basic auth option to get api key
|
||||
class KeyHandler(RequestHandler):
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
api_key = None
|
||||
|
||||
try:
|
||||
username = Env.setting('username')
|
||||
password = Env.setting('password')
|
||||
|
||||
if (self.get_argument('u') == md5(username) or not username) and (self.get_argument('p') == password or not password):
|
||||
api_key = Env.setting('api_key')
|
||||
|
||||
self.write({
|
||||
'success': api_key is not None,
|
||||
'api_key': api_key
|
||||
})
|
||||
except:
|
||||
log.error('Failed doing key request: %s', (traceback.format_exc()))
|
||||
self.write({'success': False, 'error': 'Failed returning results'})
|
||||
|
||||
|
||||
class LoginHandler(BaseHandler):
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
|
||||
if self.get_current_user():
|
||||
self.redirect(Env.get('web_base'))
|
||||
else:
|
||||
self.write(template_loader.load('login.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env))
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
|
||||
api_key = None
|
||||
|
||||
username = Env.setting('username')
|
||||
password = Env.setting('password')
|
||||
|
||||
if (self.get_argument('username') == username or not username) and (md5(self.get_argument('password')) == password or not password):
|
||||
api_key = Env.setting('api_key')
|
||||
|
||||
if api_key:
|
||||
remember_me = tryInt(self.get_argument('remember_me', default = 0))
|
||||
self.set_secure_cookie('user', api_key, expires_days = 30 if remember_me > 0 else None)
|
||||
|
||||
self.redirect(Env.get('web_base'))
|
||||
|
||||
|
||||
class LogoutHandler(BaseHandler):
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
self.clear_cookie('user')
|
||||
self.redirect('%slogin/' % Env.get('web_base'))
|
||||
|
||||
|
||||
def page_not_found(rh):
|
||||
index_url = Env.get('web_base')
|
||||
url = rh.request.uri[len(index_url):]
|
||||
|
||||
if url[:3] != 'api':
|
||||
if request.path != '/':
|
||||
r = request.url.replace(request.path, index_url + '#' + url)
|
||||
else:
|
||||
r = '%s%s' % (request.url.rstrip('/'), index_url + '#' + url)
|
||||
return redirect(r)
|
||||
r = index_url + '#' + url.lstrip('/')
|
||||
rh.redirect(r)
|
||||
else:
|
||||
time.sleep(0.1)
|
||||
return 'Wrong API key used', 404
|
||||
if not Env.get('dev'):
|
||||
time.sleep(0.1)
|
||||
|
||||
rh.set_status(404)
|
||||
rh.write('Wrong API key used')
|
||||
|
||||
@@ -1,50 +1,77 @@
|
||||
from flask.blueprints import Blueprint
|
||||
from flask.helpers import url_for
|
||||
from tornado.web import RequestHandler, asynchronous
|
||||
from werkzeug.utils import redirect
|
||||
from functools import wraps
|
||||
from threading import Thread
|
||||
import json
|
||||
import threading
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
api = Blueprint('api', __name__)
|
||||
api_docs = {}
|
||||
api_docs_missing = []
|
||||
from couchpotato.core.helpers.request import getParams
|
||||
from couchpotato.core.logger import CPLog
|
||||
from tornado.web import RequestHandler, asynchronous
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
api = {}
|
||||
api_locks = {}
|
||||
api_nonblock = {}
|
||||
|
||||
api_docs = {}
|
||||
api_docs_missing = []
|
||||
|
||||
|
||||
def run_async(func):
|
||||
@wraps(func)
|
||||
def async_func(*args, **kwargs):
|
||||
func_hl = Thread(target = func, args = args, kwargs = kwargs)
|
||||
func_hl.start()
|
||||
|
||||
return async_func
|
||||
|
||||
@run_async
|
||||
def run_handler(route, kwargs, callback = None):
|
||||
try:
|
||||
res = api[route](**kwargs)
|
||||
callback(res, route)
|
||||
except:
|
||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||
callback({'success': False, 'error': 'Failed returning results'}, route)
|
||||
|
||||
|
||||
# NonBlock API handler
|
||||
class NonBlockHandler(RequestHandler):
|
||||
|
||||
def __init__(self, application, request, **kwargs):
|
||||
cls = NonBlockHandler
|
||||
cls.stoppers = []
|
||||
super(NonBlockHandler, self).__init__(application, request, **kwargs)
|
||||
stopper = None
|
||||
|
||||
@asynchronous
|
||||
def get(self, route):
|
||||
cls = NonBlockHandler
|
||||
def get(self, route, *args, **kwargs):
|
||||
route = route.strip('/')
|
||||
start, stop = api_nonblock[route]
|
||||
cls.stoppers.append(stop)
|
||||
self.stopper = stop
|
||||
|
||||
start(self.onNewMessage, last_id = self.get_argument("last_id", None))
|
||||
start(self.onNewMessage, last_id = self.get_argument('last_id', None))
|
||||
|
||||
def onNewMessage(self, response):
|
||||
if self.request.connection.stream.closed():
|
||||
self.on_connection_close()
|
||||
return
|
||||
self.finish(response)
|
||||
|
||||
try:
|
||||
self.finish(response)
|
||||
except:
|
||||
log.debug('Failed doing nonblock request, probably already closed: %s', (traceback.format_exc()))
|
||||
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||
except: pass
|
||||
|
||||
def on_connection_close(self):
|
||||
cls = NonBlockHandler
|
||||
|
||||
for stop in cls.stoppers:
|
||||
stop(self.onNewMessage)
|
||||
if self.stopper:
|
||||
self.stopper(self.onNewMessage)
|
||||
|
||||
cls.stoppers = []
|
||||
self.stopper = None
|
||||
|
||||
|
||||
def addApiView(route, func, static = False, docs = None, **kwargs):
|
||||
api.add_url_rule(route + ('' if static else '/'), endpoint = route.replace('.', '::') if route else 'index', view_func = func, **kwargs)
|
||||
if docs:
|
||||
api_docs[route[4:] if route[0:4] == 'api.' else route] = docs
|
||||
else:
|
||||
api_docs_missing.append(route)
|
||||
|
||||
def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
||||
api_nonblock[route] = func_tuple
|
||||
|
||||
@@ -53,9 +80,87 @@ def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
||||
else:
|
||||
api_docs_missing.append(route)
|
||||
|
||||
""" Api view """
|
||||
def index():
|
||||
index_url = url_for('web.index')
|
||||
return redirect(index_url + 'docs/')
|
||||
|
||||
addApiView('', index)
|
||||
# Blocking API handler
|
||||
class ApiHandler(RequestHandler):
|
||||
|
||||
@asynchronous
|
||||
def get(self, route, *args, **kwargs):
|
||||
route = route.strip('/')
|
||||
if not api.get(route):
|
||||
self.write('API call doesn\'t seem to exist')
|
||||
self.finish()
|
||||
return
|
||||
|
||||
# Create lock if it doesn't exist
|
||||
if route in api_locks and not api_locks.get(route):
|
||||
api_locks[route] = threading.Lock()
|
||||
|
||||
api_locks[route].acquire()
|
||||
|
||||
try:
|
||||
|
||||
kwargs = {}
|
||||
for x in self.request.arguments:
|
||||
kwargs[x] = urllib.unquote(self.get_argument(x))
|
||||
|
||||
# Split array arguments
|
||||
kwargs = getParams(kwargs)
|
||||
kwargs['_request'] = self
|
||||
|
||||
# Remove t random string
|
||||
try: del kwargs['t']
|
||||
except: pass
|
||||
|
||||
# Add async callback handler
|
||||
run_handler(route, kwargs, callback = self.taskFinished)
|
||||
|
||||
except:
|
||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||
try:
|
||||
self.write({'success': False, 'error': 'Failed returning results'})
|
||||
self.finish()
|
||||
except:
|
||||
log.error('Failed write error "%s": %s', (route, traceback.format_exc()))
|
||||
|
||||
api_locks[route].release()
|
||||
|
||||
post = get
|
||||
|
||||
def taskFinished(self, result, route):
|
||||
|
||||
if not self.request.connection.stream.closed():
|
||||
try:
|
||||
# Check JSONP callback
|
||||
jsonp_callback = self.get_argument('callback_func', default = None)
|
||||
|
||||
if jsonp_callback:
|
||||
self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')')
|
||||
self.set_header("Content-Type", "text/javascript")
|
||||
self.finish()
|
||||
elif isinstance(result, tuple) and result[0] == 'redirect':
|
||||
self.redirect(result[1])
|
||||
else:
|
||||
self.write(result)
|
||||
self.finish()
|
||||
except UnicodeDecodeError:
|
||||
log.error('Failed proper encode: %s', traceback.format_exc())
|
||||
except:
|
||||
log.debug('Failed doing request, probably already closed: %s', (traceback.format_exc()))
|
||||
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||
except: pass
|
||||
|
||||
api_locks[route].release()
|
||||
|
||||
|
||||
def addApiView(route, func, static = False, docs = None, **kwargs):
|
||||
|
||||
if static: func(route)
|
||||
else:
|
||||
api[route] = func
|
||||
api_locks[route] = threading.Lock()
|
||||
|
||||
if docs:
|
||||
api_docs[route[4:] if route[0:4] == 'api.' else route] = docs
|
||||
else:
|
||||
api_docs_missing.append(route)
|
||||
|
||||
300
couchpotato/core/_base/_core.py
Normal file
300
couchpotato/core/_base/_core.py
Normal file
@@ -0,0 +1,300 @@
|
||||
from uuid import uuid4
|
||||
import os
|
||||
import platform
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
import webbrowser
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.variable import cleanHost, md5, isSubFolder
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from tornado.ioloop import IOLoop
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Core'
|
||||
|
||||
|
||||
class Core(Plugin):
|
||||
|
||||
ignore_restart = [
|
||||
'Core.restart', 'Core.shutdown',
|
||||
'Updater.check', 'Updater.autoUpdate',
|
||||
]
|
||||
shutdown_started = False
|
||||
|
||||
def __init__(self):
|
||||
addApiView('app.shutdown', self.shutdown, docs = {
|
||||
'desc': 'Shutdown the app.',
|
||||
'return': {'type': 'string: shutdown'}
|
||||
})
|
||||
addApiView('app.restart', self.restart, docs = {
|
||||
'desc': 'Restart the app.',
|
||||
'return': {'type': 'string: restart'}
|
||||
})
|
||||
addApiView('app.available', self.available, docs = {
|
||||
'desc': 'Check if app available.'
|
||||
})
|
||||
addApiView('app.version', self.versionView, docs = {
|
||||
'desc': 'Get version.'
|
||||
})
|
||||
|
||||
addEvent('app.shutdown', self.shutdown)
|
||||
addEvent('app.restart', self.restart)
|
||||
addEvent('app.load', self.launchBrowser, priority = 1)
|
||||
addEvent('app.base_url', self.createBaseUrl)
|
||||
addEvent('app.api_url', self.createApiUrl)
|
||||
addEvent('app.version', self.version)
|
||||
addEvent('app.load', self.checkDataDir)
|
||||
addEvent('app.load', self.cleanUpFolders)
|
||||
|
||||
addEvent('setting.save.core.password', self.md5Password)
|
||||
addEvent('setting.save.core.api_key', self.checkApikey)
|
||||
|
||||
# Make sure we can close-down with ctrl+c properly
|
||||
if not Env.get('desktop'):
|
||||
self.signalHandler()
|
||||
|
||||
# Set default urlopen timeout
|
||||
import socket
|
||||
socket.setdefaulttimeout(30)
|
||||
|
||||
def md5Password(self, value):
|
||||
return md5(value) if value else ''
|
||||
|
||||
def checkApikey(self, value):
|
||||
return value if value and len(value) > 3 else uuid4().hex
|
||||
|
||||
def checkDataDir(self):
|
||||
if isSubFolder(Env.get('data_dir'), Env.get('app_dir')):
|
||||
log.error('You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted.')
|
||||
|
||||
return True
|
||||
|
||||
def cleanUpFolders(self):
|
||||
only_clean = ['couchpotato', 'libs', 'init']
|
||||
self.deleteEmptyFolder(Env.get('app_dir'), show_error = False, only_clean = only_clean)
|
||||
|
||||
def available(self, **kwargs):
|
||||
return {
|
||||
'success': True
|
||||
}
|
||||
|
||||
def shutdown(self, **kwargs):
|
||||
if self.shutdown_started:
|
||||
return False
|
||||
|
||||
def shutdown():
|
||||
self.initShutdown()
|
||||
|
||||
if IOLoop.current()._closing:
|
||||
shutdown()
|
||||
else:
|
||||
IOLoop.current().add_callback(shutdown)
|
||||
|
||||
return 'shutdown'
|
||||
|
||||
def restart(self, **kwargs):
|
||||
if self.shutdown_started:
|
||||
return False
|
||||
|
||||
def restart():
|
||||
self.initShutdown(restart = True)
|
||||
IOLoop.current().add_callback(restart)
|
||||
|
||||
return 'restarting'
|
||||
|
||||
def initShutdown(self, restart = False):
|
||||
if self.shutdown_started:
|
||||
log.info('Already shutting down')
|
||||
return
|
||||
|
||||
log.info('Shutting down' if not restart else 'Restarting')
|
||||
|
||||
self.shutdown_started = True
|
||||
|
||||
fireEvent('app.do_shutdown', restart = restart)
|
||||
log.debug('Every plugin got shutdown event')
|
||||
|
||||
loop = True
|
||||
starttime = time.time()
|
||||
while loop:
|
||||
log.debug('Asking who is running')
|
||||
still_running = fireEvent('plugin.running', merge = True)
|
||||
log.debug('Still running: %s', still_running)
|
||||
|
||||
if len(still_running) == 0:
|
||||
break
|
||||
elif starttime < time.time() - 30: # Always force break after 30s wait
|
||||
break
|
||||
|
||||
running = list(set(still_running) - set(self.ignore_restart))
|
||||
if len(running) > 0:
|
||||
log.info('Waiting on plugins to finish: %s', running)
|
||||
else:
|
||||
loop = False
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
log.debug('Safe to shutdown/restart')
|
||||
|
||||
loop = IOLoop.current()
|
||||
|
||||
try:
|
||||
if not loop._closing:
|
||||
loop.stop()
|
||||
except RuntimeError:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed shutting down the server: %s', traceback.format_exc())
|
||||
|
||||
fireEvent('app.after_shutdown', restart = restart)
|
||||
|
||||
def launchBrowser(self):
|
||||
|
||||
if Env.setting('launch_browser'):
|
||||
log.info('Launching browser')
|
||||
|
||||
url = self.createBaseUrl()
|
||||
try:
|
||||
webbrowser.open(url, 2, 1)
|
||||
except:
|
||||
try:
|
||||
webbrowser.open(url, 1, 1)
|
||||
except:
|
||||
log.error('Could not launch a browser.')
|
||||
|
||||
def createBaseUrl(self):
|
||||
host = Env.setting('host')
|
||||
if host == '0.0.0.0' or host == '':
|
||||
host = 'localhost'
|
||||
port = Env.setting('port')
|
||||
|
||||
return '%s:%d%s' % (cleanHost(host).rstrip('/'), int(port), Env.get('web_base'))
|
||||
|
||||
def createApiUrl(self):
|
||||
return '%sapi/%s' % (self.createBaseUrl(), Env.setting('api_key'))
|
||||
|
||||
def version(self):
|
||||
ver = fireEvent('updater.info', single = True) or {'version': {}}
|
||||
|
||||
if os.name == 'nt': platf = 'windows'
|
||||
elif 'Darwin' in platform.platform(): platf = 'osx'
|
||||
else: platf = 'linux'
|
||||
|
||||
return '%s - %s-%s - v2' % (platf, ver.get('version').get('type') or 'unknown', ver.get('version').get('hash') or 'unknown')
|
||||
|
||||
def versionView(self, **kwargs):
|
||||
return {
|
||||
'version': self.version()
|
||||
}
|
||||
|
||||
def signalHandler(self):
|
||||
if Env.get('daemonized'): return
|
||||
|
||||
def signal_handler(*args, **kwargs):
|
||||
fireEvent('app.shutdown', single = True)
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'core',
|
||||
'order': 1,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'basics',
|
||||
'description': 'Needs restart before changes take effect.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'port',
|
||||
'default': 5050,
|
||||
'type': 'int',
|
||||
'description': 'The port I should listen to.',
|
||||
},
|
||||
{
|
||||
'name': 'ssl_cert',
|
||||
'description': 'Path to SSL server.crt',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'ssl_key',
|
||||
'description': 'Path to SSL server.key',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'launch_browser',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': 'Launch the browser when I start.',
|
||||
'wizard': True,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'advanced',
|
||||
'description': "For those who know what they're doing",
|
||||
'advanced': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': uuid4().hex,
|
||||
'readonly': 1,
|
||||
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
|
||||
},
|
||||
{
|
||||
'name': 'debug',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable debugging.',
|
||||
},
|
||||
{
|
||||
'name': 'development',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
|
||||
},
|
||||
{
|
||||
'name': 'data_dir',
|
||||
'type': 'directory',
|
||||
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
|
||||
},
|
||||
{
|
||||
'name': 'url_base',
|
||||
'default': '',
|
||||
'description': 'When using mod_proxy use this to append the url with this.',
|
||||
},
|
||||
{
|
||||
'name': 'permission_folder',
|
||||
'default': '0755',
|
||||
'label': 'Folder CHMOD',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755). <a target="_blank" href="http://permissions-calculator.org/">Calculate the correct value</a>',
|
||||
},
|
||||
{
|
||||
'name': 'permission_file',
|
||||
'default': '0644',
|
||||
'label': 'File CHMOD',
|
||||
'description': 'See Folder CHMOD description, but for files',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,100 +0,0 @@
|
||||
from .main import Core
|
||||
from uuid import uuid4
|
||||
|
||||
def start():
|
||||
return Core()
|
||||
|
||||
config = [{
|
||||
'name': 'core',
|
||||
'order': 1,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'basics',
|
||||
'description': 'Needs restart before changes take effect.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'port',
|
||||
'default': 5050,
|
||||
'type': 'int',
|
||||
'description': 'The port I should listen to.',
|
||||
},
|
||||
{
|
||||
'name': 'ssl_cert',
|
||||
'description': 'Path to SSL server.crt',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'ssl_key',
|
||||
'description': 'Path to SSL server.key',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'launch_browser',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': 'Launch the browser when I start.',
|
||||
'wizard': True,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'advanced',
|
||||
'description': "For those who know what they're doing",
|
||||
'advanced': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': uuid4().hex,
|
||||
'readonly': 1,
|
||||
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
|
||||
},
|
||||
{
|
||||
'name': 'debug',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable debugging.',
|
||||
},
|
||||
{
|
||||
'name': 'development',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Disables some checks/downloads for faster reloading.',
|
||||
},
|
||||
{
|
||||
'name': 'data_dir',
|
||||
'type': 'directory',
|
||||
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
|
||||
},
|
||||
{
|
||||
'name': 'url_base',
|
||||
'default': '',
|
||||
'description': 'When using mod_proxy use this to append the url with this.',
|
||||
},
|
||||
{
|
||||
'name': 'permission_folder',
|
||||
'default': '0755',
|
||||
'label': 'Folder CHMOD',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
|
||||
},
|
||||
{
|
||||
'name': 'permission_file',
|
||||
'default': '0755',
|
||||
'label': 'File CHMOD',
|
||||
'description': 'Same as Folder CHMOD but for files',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,185 +0,0 @@
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.request import jsonified
|
||||
from couchpotato.core.helpers.variable import cleanHost, md5
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from tornado.ioloop import IOLoop
|
||||
from uuid import uuid4
|
||||
import os
|
||||
import platform
|
||||
import signal
|
||||
import time
|
||||
import traceback
|
||||
import webbrowser
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Core(Plugin):
|
||||
|
||||
ignore_restart = [
|
||||
'Core.restart', 'Core.shutdown',
|
||||
'Updater.check', 'Updater.autoUpdate',
|
||||
]
|
||||
shutdown_started = False
|
||||
|
||||
def __init__(self):
|
||||
addApiView('app.shutdown', self.shutdown, docs = {
|
||||
'desc': 'Shutdown the app.',
|
||||
'return': {'type': 'string: shutdown'}
|
||||
})
|
||||
addApiView('app.restart', self.restart, docs = {
|
||||
'desc': 'Restart the app.',
|
||||
'return': {'type': 'string: restart'}
|
||||
})
|
||||
addApiView('app.available', self.available, docs = {
|
||||
'desc': 'Check if app available.'
|
||||
})
|
||||
addApiView('app.version', self.versionView, docs = {
|
||||
'desc': 'Get version.'
|
||||
})
|
||||
|
||||
addEvent('app.shutdown', self.shutdown)
|
||||
addEvent('app.restart', self.restart)
|
||||
addEvent('app.load', self.launchBrowser, priority = 1)
|
||||
addEvent('app.base_url', self.createBaseUrl)
|
||||
addEvent('app.api_url', self.createApiUrl)
|
||||
addEvent('app.version', self.version)
|
||||
addEvent('app.load', self.checkDataDir)
|
||||
|
||||
addEvent('setting.save.core.password', self.md5Password)
|
||||
addEvent('setting.save.core.api_key', self.checkApikey)
|
||||
|
||||
# Make sure we can close-down with ctrl+c properly
|
||||
if not Env.get('desktop'):
|
||||
self.signalHandler()
|
||||
|
||||
def md5Password(self, value):
|
||||
return md5(value.encode(Env.get('encoding'))) if value else ''
|
||||
|
||||
def checkApikey(self, value):
|
||||
return value if value and len(value) > 3 else uuid4().hex
|
||||
|
||||
def checkDataDir(self):
|
||||
if Env.get('app_dir') in Env.get('data_dir'):
|
||||
log.error('You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted.')
|
||||
|
||||
return True
|
||||
|
||||
def available(self):
|
||||
return jsonified({
|
||||
'success': True
|
||||
})
|
||||
|
||||
def shutdown(self):
|
||||
if self.shutdown_started:
|
||||
return False
|
||||
|
||||
def shutdown():
|
||||
self.initShutdown()
|
||||
IOLoop.instance().add_callback(shutdown)
|
||||
|
||||
return 'shutdown'
|
||||
|
||||
def restart(self):
|
||||
if self.shutdown_started:
|
||||
return False
|
||||
|
||||
def restart():
|
||||
self.initShutdown(restart = True)
|
||||
IOLoop.instance().add_callback(restart)
|
||||
|
||||
return 'restarting'
|
||||
|
||||
def initShutdown(self, restart = False):
|
||||
if self.shutdown_started:
|
||||
log.info('Already shutting down')
|
||||
return
|
||||
|
||||
log.info('Shutting down' if not restart else 'Restarting')
|
||||
|
||||
self.shutdown_started = True
|
||||
|
||||
fireEvent('app.do_shutdown')
|
||||
log.debug('Every plugin got shutdown event')
|
||||
|
||||
loop = True
|
||||
starttime = time.time()
|
||||
while loop:
|
||||
log.debug('Asking who is running')
|
||||
still_running = fireEvent('plugin.running', merge = True)
|
||||
log.debug('Still running: %s', still_running)
|
||||
|
||||
if len(still_running) == 0:
|
||||
break
|
||||
elif starttime < time.time() - 30: # Always force break after 30s wait
|
||||
break
|
||||
|
||||
running = list(set(still_running) - set(self.ignore_restart))
|
||||
if len(running) > 0:
|
||||
log.info('Waiting on plugins to finish: %s', running)
|
||||
else:
|
||||
loop = False
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
log.debug('Save to shutdown/restart')
|
||||
|
||||
try:
|
||||
IOLoop.instance().stop()
|
||||
except RuntimeError:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed shutting down the server: %s', traceback.format_exc())
|
||||
|
||||
fireEvent('app.after_shutdown', restart = restart)
|
||||
|
||||
def launchBrowser(self):
|
||||
|
||||
if Env.setting('launch_browser'):
|
||||
log.info('Launching browser')
|
||||
|
||||
url = self.createBaseUrl()
|
||||
try:
|
||||
webbrowser.open(url, 2, 1)
|
||||
except:
|
||||
try:
|
||||
webbrowser.open(url, 1, 1)
|
||||
except:
|
||||
log.error('Could not launch a browser.')
|
||||
|
||||
def createBaseUrl(self):
|
||||
host = Env.setting('host')
|
||||
if host == '0.0.0.0' or host == '':
|
||||
host = 'localhost'
|
||||
port = Env.setting('port')
|
||||
|
||||
return '%s:%d%s' % (cleanHost(host).rstrip('/'), int(port), '/' + Env.setting('url_base').lstrip('/') if Env.setting('url_base') else '')
|
||||
|
||||
def createApiUrl(self):
|
||||
return '%s/api/%s' % (self.createBaseUrl(), Env.setting('api_key'))
|
||||
|
||||
def version(self):
|
||||
ver = fireEvent('updater.info', single = True)
|
||||
|
||||
if os.name == 'nt': platf = 'windows'
|
||||
elif 'Darwin' in platform.platform(): platf = 'osx'
|
||||
else: platf = 'linux'
|
||||
|
||||
return '%s - %s-%s - v2' % (platf, ver.get('version')['type'], ver.get('version')['hash'])
|
||||
|
||||
def versionView(self):
|
||||
return jsonified({
|
||||
'version': self.version()
|
||||
})
|
||||
|
||||
def signalHandler(self):
|
||||
if Env.get('daemonized'): return
|
||||
|
||||
def signal_handler(signal, frame):
|
||||
fireEvent('app.shutdown')
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
212
couchpotato/core/_base/clientscript.py
Normal file
212
couchpotato/core/_base/clientscript.py
Normal file
@@ -0,0 +1,212 @@
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from minify.cssmin import cssmin
|
||||
from minify.jsmin import jsmin
|
||||
from tornado.web import StaticFileHandler
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ClientScript'
|
||||
|
||||
|
||||
class ClientScript(Plugin):
|
||||
|
||||
core_static = {
|
||||
'style': [
|
||||
'style/main.css',
|
||||
'style/uniform.generic.css',
|
||||
'style/uniform.css',
|
||||
'style/settings.css',
|
||||
],
|
||||
'script': [
|
||||
'scripts/library/mootools.js',
|
||||
'scripts/library/mootools_more.js',
|
||||
'scripts/library/uniform.js',
|
||||
'scripts/library/form_replacement/form_check.js',
|
||||
'scripts/library/form_replacement/form_radio.js',
|
||||
'scripts/library/form_replacement/form_dropdown.js',
|
||||
'scripts/library/form_replacement/form_selectoption.js',
|
||||
'scripts/library/question.js',
|
||||
'scripts/library/scrollspy.js',
|
||||
'scripts/library/spin.js',
|
||||
'scripts/library/Array.stableSort.js',
|
||||
'scripts/library/async.js',
|
||||
'scripts/couchpotato.js',
|
||||
'scripts/api.js',
|
||||
'scripts/library/history.js',
|
||||
'scripts/page.js',
|
||||
'scripts/block.js',
|
||||
'scripts/block/navigation.js',
|
||||
'scripts/block/footer.js',
|
||||
'scripts/block/menu.js',
|
||||
'scripts/page/home.js',
|
||||
'scripts/page/settings.js',
|
||||
'scripts/page/about.js',
|
||||
],
|
||||
}
|
||||
|
||||
urls = {'style': {}, 'script': {}}
|
||||
minified = {'style': {}, 'script': {}}
|
||||
paths = {'style': {}, 'script': {}}
|
||||
comment = {
|
||||
'style': '/*** %s:%d ***/\n',
|
||||
'script': '// %s:%d\n'
|
||||
}
|
||||
|
||||
html = {
|
||||
'style': '<link rel="stylesheet" href="%s" type="text/css">',
|
||||
'script': '<script type="text/javascript" src="%s"></script>',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
addEvent('register_style', self.registerStyle)
|
||||
addEvent('register_script', self.registerScript)
|
||||
|
||||
addEvent('clientscript.get_styles', self.getStyles)
|
||||
addEvent('clientscript.get_scripts', self.getScripts)
|
||||
|
||||
if not Env.get('dev'):
|
||||
addEvent('app.load', self.minify)
|
||||
|
||||
self.addCore()
|
||||
|
||||
def addCore(self):
|
||||
|
||||
for static_type in self.core_static:
|
||||
for rel_path in self.core_static.get(static_type):
|
||||
file_path = os.path.join(Env.get('app_dir'), 'couchpotato', 'static', rel_path)
|
||||
core_url = 'static/%s' % rel_path
|
||||
|
||||
if static_type == 'script':
|
||||
self.registerScript(core_url, file_path, position = 'front')
|
||||
else:
|
||||
self.registerStyle(core_url, file_path, position = 'front')
|
||||
|
||||
def minify(self):
|
||||
|
||||
# Create cache dir
|
||||
cache = Env.get('cache_dir')
|
||||
parent_dir = os.path.join(cache, 'minified')
|
||||
self.makeDir(parent_dir)
|
||||
|
||||
Env.get('app').add_handlers(".*$", [(Env.get('web_base') + 'minified/(.*)', StaticFileHandler, {'path': parent_dir})])
|
||||
|
||||
for file_type in ['style', 'script']:
|
||||
ext = 'js' if file_type is 'script' else 'css'
|
||||
positions = self.paths.get(file_type, {})
|
||||
for position in positions:
|
||||
files = positions.get(position)
|
||||
self._minify(file_type, files, position, position + '.' + ext)
|
||||
|
||||
def _minify(self, file_type, files, position, out):
|
||||
|
||||
cache = Env.get('cache_dir')
|
||||
out_name = out
|
||||
out = os.path.join(cache, 'minified', out_name)
|
||||
|
||||
raw = []
|
||||
for file_path in files:
|
||||
f = open(file_path, 'r').read()
|
||||
|
||||
if file_type == 'script':
|
||||
data = jsmin(f)
|
||||
else:
|
||||
data = self.prefix(f)
|
||||
data = cssmin(data)
|
||||
data = data.replace('../images/', '../static/images/')
|
||||
data = data.replace('../fonts/', '../static/fonts/')
|
||||
data = data.replace('../../static/', '../static/') # Replace inside plugins
|
||||
|
||||
raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data})
|
||||
|
||||
# Combine all files together with some comments
|
||||
data = ''
|
||||
for r in raw:
|
||||
data += self.comment.get(file_type) % (ss(r.get('file')), r.get('date'))
|
||||
data += r.get('data') + '\n\n'
|
||||
|
||||
self.createFile(out, data.strip())
|
||||
|
||||
if not self.minified.get(file_type):
|
||||
self.minified[file_type] = {}
|
||||
if not self.minified[file_type].get(position):
|
||||
self.minified[file_type][position] = []
|
||||
|
||||
minified_url = 'minified/%s?%s' % (out_name, tryInt(os.path.getmtime(out)))
|
||||
self.minified[file_type][position].append(minified_url)
|
||||
|
||||
def getStyles(self, *args, **kwargs):
|
||||
return self.get('style', *args, **kwargs)
|
||||
|
||||
def getScripts(self, *args, **kwargs):
|
||||
return self.get('script', *args, **kwargs)
|
||||
|
||||
def get(self, type, as_html = False, location = 'head'):
|
||||
|
||||
data = '' if as_html else []
|
||||
|
||||
try:
|
||||
try:
|
||||
if not Env.get('dev'):
|
||||
return self.minified[type][location]
|
||||
except:
|
||||
pass
|
||||
|
||||
return self.urls[type][location]
|
||||
except:
|
||||
log.error('Error getting minified %s, %s: %s', (type, location, traceback.format_exc()))
|
||||
|
||||
return data
|
||||
|
||||
def registerStyle(self, api_path, file_path, position = 'head'):
|
||||
self.register(api_path, file_path, 'style', position)
|
||||
|
||||
def registerScript(self, api_path, file_path, position = 'head'):
|
||||
self.register(api_path, file_path, 'script', position)
|
||||
|
||||
def register(self, api_path, file_path, type, location):
|
||||
|
||||
api_path = '%s?%s' % (api_path, tryInt(os.path.getmtime(file_path)))
|
||||
|
||||
if not self.urls[type].get(location):
|
||||
self.urls[type][location] = []
|
||||
self.urls[type][location].append(api_path)
|
||||
|
||||
if not self.paths[type].get(location):
|
||||
self.paths[type][location] = []
|
||||
self.paths[type][location].append(file_path)
|
||||
|
||||
prefix_properties = ['border-radius', 'transform', 'transition', 'box-shadow']
|
||||
prefix_tags = ['ms', 'moz', 'webkit']
|
||||
|
||||
def prefix(self, data):
|
||||
|
||||
trimmed_data = re.sub('(\t|\n|\r)+', '', data)
|
||||
|
||||
new_data = ''
|
||||
colon_split = trimmed_data.split(';')
|
||||
for splt in colon_split:
|
||||
curl_split = splt.strip().split('{')
|
||||
for curly in curl_split:
|
||||
curly = curly.strip()
|
||||
for prop in self.prefix_properties:
|
||||
if curly[:len(prop) + 1] == prop + ':':
|
||||
for tag in self.prefix_tags:
|
||||
new_data += ' -%s-%s; ' % (tag, curly)
|
||||
|
||||
new_data += curly + (' { ' if len(curl_split) > 1 else ' ')
|
||||
|
||||
new_data += '; '
|
||||
|
||||
new_data = new_data.replace('{ ;', '; ').replace('} ;', '} ')
|
||||
|
||||
return new_data
|
||||
@@ -1,6 +0,0 @@
|
||||
from .main import ClientScript
|
||||
|
||||
def start():
|
||||
return ClientScript()
|
||||
|
||||
config = []
|
||||
@@ -1,56 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class ClientScript(Plugin):
|
||||
|
||||
urls = {
|
||||
'style': {},
|
||||
'script': {},
|
||||
}
|
||||
|
||||
html = {
|
||||
'style': '<link rel="stylesheet" href="%s" type="text/css">',
|
||||
'script': '<script type="text/javascript" src="%s"></script>',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
addEvent('register_style', self.registerStyle)
|
||||
addEvent('register_script', self.registerScript)
|
||||
|
||||
addEvent('clientscript.get_styles', self.getStyles)
|
||||
addEvent('clientscript.get_scripts', self.getScripts)
|
||||
|
||||
def getStyles(self, *args, **kwargs):
|
||||
return self.get('style', *args, **kwargs)
|
||||
|
||||
def getScripts(self, *args, **kwargs):
|
||||
return self.get('script', *args, **kwargs)
|
||||
|
||||
def get(self, type, as_html = False, location = 'head'):
|
||||
|
||||
data = '' if as_html else []
|
||||
|
||||
try:
|
||||
return self.urls[type][location]
|
||||
except Exception, e:
|
||||
log.error(e)
|
||||
|
||||
return data
|
||||
|
||||
def registerStyle(self, path, position = 'head'):
|
||||
self.register(path, 'style', position)
|
||||
|
||||
def registerScript(self, path, position = 'head'):
|
||||
self.register(path, 'script', position)
|
||||
|
||||
def register(self, filepath, type, location):
|
||||
|
||||
if not self.urls[type].get(location):
|
||||
self.urls[type][location] = []
|
||||
|
||||
filePath = filepath
|
||||
self.urls[type][location].append(filePath)
|
||||
@@ -5,6 +5,9 @@ from couchpotato.environment import Env
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Desktop'
|
||||
|
||||
|
||||
if Env.get('desktop'):
|
||||
|
||||
class Desktop(Plugin):
|
||||
@@ -1,6 +0,0 @@
|
||||
from .main import Desktop
|
||||
|
||||
def start():
|
||||
return Desktop()
|
||||
|
||||
config = []
|
||||
20
couchpotato/core/_base/downloader/__init__.py
Normal file
20
couchpotato/core/_base/downloader/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from .main import Downloader
|
||||
|
||||
|
||||
def autoload():
|
||||
return Downloader()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'download_providers',
|
||||
'groups': [
|
||||
{
|
||||
'label': 'Downloaders',
|
||||
'description': 'You can select different downloaders for each type (usenet / torrent)',
|
||||
'type': 'list',
|
||||
'name': 'download_providers',
|
||||
'tab': 'downloaders',
|
||||
'options': [],
|
||||
},
|
||||
],
|
||||
}]
|
||||
232
couchpotato/core/_base/downloader/main.py
Normal file
232
couchpotato/core/_base/downloader/main.py
Normal file
@@ -0,0 +1,232 @@
|
||||
from base64 import b32decode, b16encode
|
||||
import random
|
||||
import re
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.variable import mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import Provider
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
## This is here to load the static files
|
||||
class Downloader(Plugin):
|
||||
pass
|
||||
|
||||
|
||||
class DownloaderBase(Provider):
|
||||
|
||||
protocol = []
|
||||
http_time_between_calls = 0
|
||||
status_support = True
|
||||
|
||||
torrent_sources = [
|
||||
'https://zoink.it/torrent/%s.torrent',
|
||||
'http://torrage.com/torrent/%s.torrent',
|
||||
'https://torcache.net/torrent/%s.torrent',
|
||||
]
|
||||
|
||||
torrent_trackers = [
|
||||
'udp://tracker.istole.it:80/announce',
|
||||
'http://tracker.istole.it/announce',
|
||||
'udp://fr33domtracker.h33t.com:3310/announce',
|
||||
'http://tracker.publicbt.com/announce',
|
||||
'udp://tracker.publicbt.com:80/announce',
|
||||
'http://tracker.ccc.de/announce',
|
||||
'udp://tracker.ccc.de:80/announce',
|
||||
'http://exodus.desync.com/announce',
|
||||
'http://exodus.desync.com:6969/announce',
|
||||
'http://tracker.publichd.eu/announce',
|
||||
'udp://tracker.publichd.eu:80/announce',
|
||||
'http://tracker.openbittorrent.com/announce',
|
||||
'udp://tracker.openbittorrent.com/announce',
|
||||
'udp://tracker.openbittorrent.com:80/announce',
|
||||
'udp://open.demonii.com:1337/announce',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
addEvent('download', self._download)
|
||||
addEvent('download.enabled', self._isEnabled)
|
||||
addEvent('download.enabled_protocols', self.getEnabledProtocol)
|
||||
addEvent('download.status', self._getAllDownloadStatus)
|
||||
addEvent('download.remove_failed', self._removeFailed)
|
||||
addEvent('download.pause', self._pause)
|
||||
addEvent('download.process_complete', self._processComplete)
|
||||
addApiView('download.%s.test' % self.getName().lower(), self._test)
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
for download_protocol in self.protocol:
|
||||
if self.isEnabled(manual = True, data = {'protocol': download_protocol}):
|
||||
return self.protocol
|
||||
|
||||
return []
|
||||
|
||||
def _download(self, data = None, media = None, manual = False, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
if self.isDisabled(manual, data):
|
||||
return
|
||||
return self.download(data = data, media = media, filedata = filedata)
|
||||
|
||||
def download(self, *args, **kwargs):
|
||||
return False
|
||||
|
||||
def _getAllDownloadStatus(self, download_ids):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
ids = [download_id['id'] for download_id in download_ids if download_id['downloader'] == self.getName()]
|
||||
|
||||
if ids:
|
||||
return self.getAllDownloadStatus(ids)
|
||||
else:
|
||||
return
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
return []
|
||||
|
||||
def _removeFailed(self, release_download):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
if release_download and release_download.get('downloader') == self.getName():
|
||||
if self.conf('delete_failed'):
|
||||
return self.removeFailed(release_download)
|
||||
|
||||
return False
|
||||
return
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
return
|
||||
|
||||
def _processComplete(self, release_download):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
if release_download and release_download.get('downloader') == self.getName():
|
||||
if self.conf('remove_complete', default = False):
|
||||
return self.processComplete(release_download = release_download, delete_files = self.conf('delete_files', default = False))
|
||||
|
||||
return False
|
||||
return
|
||||
|
||||
def processComplete(self, release_download, delete_files):
|
||||
return
|
||||
|
||||
def isCorrectProtocol(self, protocol):
|
||||
is_correct = protocol in self.protocol
|
||||
|
||||
if not is_correct:
|
||||
log.debug("Downloader doesn't support this protocol")
|
||||
|
||||
return is_correct
|
||||
|
||||
def magnetToTorrent(self, magnet_link):
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', magnet_link)[0].upper()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
sources = self.torrent_sources
|
||||
random.shuffle(sources)
|
||||
|
||||
for source in sources:
|
||||
try:
|
||||
filedata = self.urlopen(source % torrent_hash, headers = {'Referer': ''}, show_error = False)
|
||||
if 'torcache' in filedata and 'file not found' in filedata.lower():
|
||||
continue
|
||||
|
||||
return filedata
|
||||
except:
|
||||
log.debug('Torrent hash "%s" wasn\'t found on: %s', (torrent_hash, source))
|
||||
|
||||
log.error('Failed converting magnet url to torrent: %s', torrent_hash)
|
||||
return False
|
||||
|
||||
def downloadReturnId(self, download_id):
|
||||
return {
|
||||
'downloader': self.getName(),
|
||||
'status_support': self.status_support,
|
||||
'id': download_id
|
||||
}
|
||||
|
||||
def isDisabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
|
||||
return not self.isEnabled(manual, data)
|
||||
|
||||
def _isEnabled(self, manual, data = None):
|
||||
if not data: data = {}
|
||||
|
||||
if not self.isEnabled(manual, data):
|
||||
return
|
||||
return True
|
||||
|
||||
def isEnabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
|
||||
d_manual = self.conf('manual', default = False)
|
||||
return super(DownloaderBase, self).isEnabled() and \
|
||||
(d_manual and manual or d_manual is False) and \
|
||||
(not data or self.isCorrectProtocol(data.get('protocol')))
|
||||
|
||||
def _test(self, **kwargs):
|
||||
t = self.test()
|
||||
if isinstance(t, tuple):
|
||||
return {'success': t[0], 'msg': t[1]}
|
||||
return {'success': t}
|
||||
|
||||
def test(self):
|
||||
return False
|
||||
|
||||
def _pause(self, release_download, pause = True):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
if release_download and release_download.get('downloader') == self.getName():
|
||||
self.pause(release_download, pause)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def pause(self, release_download, pause):
|
||||
return
|
||||
|
||||
|
||||
class ReleaseDownloadList(list):
|
||||
|
||||
provider = None
|
||||
|
||||
def __init__(self, provider, **kwargs):
|
||||
|
||||
self.provider = provider
|
||||
self.kwargs = kwargs
|
||||
|
||||
super(ReleaseDownloadList, self).__init__()
|
||||
|
||||
def extend(self, results):
|
||||
for r in results:
|
||||
self.append(r)
|
||||
|
||||
def append(self, result):
|
||||
new_result = self.fillResult(result)
|
||||
super(ReleaseDownloadList, self).append(new_result)
|
||||
|
||||
def fillResult(self, result):
|
||||
|
||||
defaults = {
|
||||
'id': 0,
|
||||
'status': 'busy',
|
||||
'downloader': self.provider.getName(),
|
||||
'folder': '',
|
||||
'files': [],
|
||||
}
|
||||
|
||||
return mergeDicts(defaults, result)
|
||||
|
||||
76
couchpotato/core/_base/downloader/static/downloaders.js
Normal file
76
couchpotato/core/_base/downloader/static/downloaders.js
Normal file
@@ -0,0 +1,76 @@
|
||||
var DownloadersBase = new Class({
|
||||
|
||||
Implements: [Events],
|
||||
|
||||
initialize: function(){
|
||||
var self = this;
|
||||
|
||||
// Add test buttons to settings page
|
||||
App.addEvent('loadSettings', self.addTestButtons.bind(self));
|
||||
|
||||
},
|
||||
|
||||
// Downloaders setting tests
|
||||
addTestButtons: function(){
|
||||
var self = this;
|
||||
|
||||
var setting_page = App.getPage('Settings');
|
||||
setting_page.addEvent('create', function(){
|
||||
Object.each(setting_page.tabs.downloaders.groups, self.addTestButton.bind(self))
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
addTestButton: function(fieldset, plugin_name){
|
||||
var self = this,
|
||||
button_name = self.testButtonName(fieldset);
|
||||
|
||||
if(button_name.contains('Downloaders')) return;
|
||||
|
||||
new Element('.ctrlHolder.test_button').adopt(
|
||||
new Element('a.button', {
|
||||
'text': button_name,
|
||||
'events': {
|
||||
'click': function(){
|
||||
var button = fieldset.getElement('.test_button .button');
|
||||
button.set('text', 'Connecting...');
|
||||
|
||||
Api.request('download.'+plugin_name+'.test', {
|
||||
'onComplete': function(json){
|
||||
|
||||
button.set('text', button_name);
|
||||
|
||||
var message;
|
||||
if(json.success){
|
||||
message = new Element('span.success', {
|
||||
'text': 'Connection successful'
|
||||
}).inject(button, 'after')
|
||||
}
|
||||
else {
|
||||
var msg_text = 'Connection failed. Check logs for details.';
|
||||
if(json.hasOwnProperty('msg')) msg_text = json.msg;
|
||||
message = new Element('span.failed', {
|
||||
'text': msg_text
|
||||
}).inject(button, 'after')
|
||||
}
|
||||
|
||||
(function(){
|
||||
message.destroy();
|
||||
}).delay(3000)
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(fieldset);
|
||||
|
||||
},
|
||||
|
||||
testButtonName: function(fieldset){
|
||||
var name = String(fieldset.getElement('h2').innerHTML).substring(0,String(fieldset.getElement('h2').innerHTML).indexOf("<span"));
|
||||
return 'Test '+name;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
var Downloaders = new DownloadersBase();
|
||||
@@ -5,6 +5,8 @@ from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Scheduler'
|
||||
|
||||
|
||||
class Scheduler(Plugin):
|
||||
|
||||
@@ -16,60 +18,29 @@ class Scheduler(Plugin):
|
||||
|
||||
addEvent('schedule.cron', self.cron)
|
||||
addEvent('schedule.interval', self.interval)
|
||||
addEvent('schedule.start', self.start)
|
||||
addEvent('schedule.restart', self.start)
|
||||
|
||||
addEvent('app.load', self.start)
|
||||
addEvent('schedule.remove', self.remove)
|
||||
addEvent('schedule.queue', self.queue)
|
||||
|
||||
self.sched = Sched(misfire_grace_time = 60)
|
||||
self.sched.start()
|
||||
self.started = True
|
||||
|
||||
def remove(self, identifier):
|
||||
for type in ['interval', 'cron']:
|
||||
for cron_type in ['intervals', 'crons']:
|
||||
try:
|
||||
self.sched.unschedule_job(getattr(self, type)[identifier]['job'])
|
||||
log.debug('%s unscheduled %s', (type.capitalize(), identifier))
|
||||
self.sched.unschedule_job(getattr(self, cron_type)[identifier]['job'])
|
||||
log.debug('%s unscheduled %s', (cron_type.capitalize(), identifier))
|
||||
except:
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
|
||||
# Stop all running
|
||||
self.stop()
|
||||
|
||||
# Crons
|
||||
for identifier in self.crons:
|
||||
try:
|
||||
self.remove(identifier)
|
||||
cron = self.crons[identifier]
|
||||
job = self.sched.add_cron_job(cron['handle'], day = cron['day'], hour = cron['hour'], minute = cron['minute'])
|
||||
cron['job'] = job
|
||||
except ValueError, e:
|
||||
log.error('Failed adding cronjob: %s', e)
|
||||
|
||||
# Intervals
|
||||
for identifier in self.intervals:
|
||||
try:
|
||||
self.remove(identifier)
|
||||
interval = self.intervals[identifier]
|
||||
job = self.sched.add_interval_job(interval['handle'], hours = interval['hours'], minutes = interval['minutes'], seconds = interval['seconds'])
|
||||
interval['job'] = job
|
||||
except ValueError, e:
|
||||
log.error('Failed adding interval cronjob: %s', e)
|
||||
|
||||
# Start it
|
||||
log.debug('Starting scheduler')
|
||||
self.sched.start()
|
||||
self.started = True
|
||||
log.debug('Scheduler started')
|
||||
|
||||
def doShutdown(self):
|
||||
super(Scheduler, self).doShutdown()
|
||||
def doShutdown(self, *args, **kwargs):
|
||||
self.stop()
|
||||
return super(Scheduler, self).doShutdown(*args, **kwargs)
|
||||
|
||||
def stop(self):
|
||||
if self.started:
|
||||
log.debug('Stopping scheduler')
|
||||
self.sched.shutdown()
|
||||
self.sched.shutdown(wait = False)
|
||||
log.debug('Scheduler stopped')
|
||||
self.started = False
|
||||
|
||||
@@ -82,6 +53,7 @@ class Scheduler(Plugin):
|
||||
'day': day,
|
||||
'hour': hour,
|
||||
'minute': minute,
|
||||
'job': self.sched.add_cron_job(handle, day = day, hour = hour, minute = minute)
|
||||
}
|
||||
|
||||
def interval(self, identifier = '', handle = None, hours = 0, minutes = 0, seconds = 0):
|
||||
@@ -93,4 +65,18 @@ class Scheduler(Plugin):
|
||||
'hours': hours,
|
||||
'minutes': minutes,
|
||||
'seconds': seconds,
|
||||
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
|
||||
}
|
||||
|
||||
return True
|
||||
|
||||
def queue(self, handlers = None):
|
||||
if not handlers: handlers = []
|
||||
|
||||
for h in handlers:
|
||||
h()
|
||||
|
||||
if self.shuttingDown():
|
||||
break
|
||||
|
||||
return True
|
||||
@@ -1,6 +0,0 @@
|
||||
from .main import Scheduler
|
||||
|
||||
def start():
|
||||
return Scheduler()
|
||||
|
||||
config = []
|
||||
@@ -1,8 +1,10 @@
|
||||
from .main import Updater
|
||||
from couchpotato.environment import Env
|
||||
import os
|
||||
|
||||
def start():
|
||||
from .main import Updater
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
def autoload():
|
||||
return Updater()
|
||||
|
||||
config = [{
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.helpers.request import jsonified
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from datetime import datetime
|
||||
from dateutil.parser import parse
|
||||
from git.repository import LocalRepository
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
import traceback
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from threading import RLock
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import removePyc
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from dateutil.parser import parse
|
||||
from git.repository import LocalRepository
|
||||
import version
|
||||
from six.moves import filter
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -22,6 +27,7 @@ log = CPLog(__name__)
|
||||
class Updater(Plugin):
|
||||
|
||||
available_notified = False
|
||||
_lock = RLock()
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -32,11 +38,11 @@ class Updater(Plugin):
|
||||
else:
|
||||
self.updater = SourceUpdater()
|
||||
|
||||
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
|
||||
addEvent('app.load', self.autoUpdate)
|
||||
addEvent('app.load', self.logVersion, priority = 10000)
|
||||
addEvent('app.load', self.setCrons)
|
||||
addEvent('updater.info', self.info)
|
||||
|
||||
addApiView('updater.info', self.getInfo, docs = {
|
||||
addApiView('updater.info', self.info, docs = {
|
||||
'desc': 'Get updater information',
|
||||
'return': {
|
||||
'type': 'object',
|
||||
@@ -52,8 +58,21 @@ class Updater(Plugin):
|
||||
'return': {'type': 'see updater.info'}
|
||||
})
|
||||
|
||||
addEvent('setting.save.updater.enabled.after', self.setCrons)
|
||||
|
||||
def logVersion(self):
|
||||
info = self.info()
|
||||
log.info('=== VERSION %s, using %s ===', (info.get('version', {}).get('repr', 'UNKNOWN'), self.updater.getName()))
|
||||
|
||||
def setCrons(self):
|
||||
|
||||
fireEvent('schedule.remove', 'updater.check', single = True)
|
||||
if self.isEnabled():
|
||||
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
|
||||
self.autoUpdate() # Check after enabling
|
||||
|
||||
def autoUpdate(self):
|
||||
if self.check() and self.conf('automatic') and not self.updater.update_failed:
|
||||
if self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed:
|
||||
if self.updater.doUpdate():
|
||||
|
||||
# Notify before restarting
|
||||
@@ -71,31 +90,40 @@ class Updater(Plugin):
|
||||
|
||||
return False
|
||||
|
||||
def check(self):
|
||||
if self.isDisabled():
|
||||
def check(self, force = False):
|
||||
if not force and self.isDisabled():
|
||||
return
|
||||
|
||||
if self.updater.check():
|
||||
if not self.available_notified and self.conf('notification') and not self.conf('automatic'):
|
||||
fireEvent('updater.available', message = 'A new update is available', data = self.updater.info())
|
||||
info = self.updater.info()
|
||||
version_date = datetime.fromtimestamp(info['update_version']['date'])
|
||||
fireEvent('updater.available', message = 'A new update with hash "%s" is available, this version is from %s' % (info['update_version']['hash'], version_date), data = info)
|
||||
self.available_notified = True
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def info(self):
|
||||
return self.updater.info()
|
||||
def info(self, **kwargs):
|
||||
self._lock.acquire()
|
||||
|
||||
def getInfo(self):
|
||||
return jsonified(self.updater.info())
|
||||
info = {}
|
||||
try:
|
||||
info = self.updater.info()
|
||||
except:
|
||||
log.error('Failed getting updater info: %s', traceback.format_exc())
|
||||
|
||||
def checkView(self):
|
||||
return jsonified({
|
||||
'update_available': self.check(),
|
||||
self._lock.release()
|
||||
|
||||
return info
|
||||
|
||||
def checkView(self, **kwargs):
|
||||
return {
|
||||
'update_available': self.check(force = True),
|
||||
'info': self.updater.info()
|
||||
})
|
||||
}
|
||||
|
||||
def doUpdateView(self):
|
||||
def doUpdateView(self, **kwargs):
|
||||
|
||||
self.check()
|
||||
if not self.updater.update_version:
|
||||
@@ -110,9 +138,15 @@ class Updater(Plugin):
|
||||
if not success:
|
||||
success = True
|
||||
|
||||
return jsonified({
|
||||
return {
|
||||
'success': success
|
||||
})
|
||||
}
|
||||
|
||||
def doShutdown(self, *args, **kwargs):
|
||||
if not Env.get('dev') and not Env.get('desktop'):
|
||||
removePyc(Env.get('app_dir'), show_logs = False)
|
||||
|
||||
return super(Updater, self).doShutdown(*args, **kwargs)
|
||||
|
||||
|
||||
class BaseUpdater(Plugin):
|
||||
@@ -125,50 +159,29 @@ class BaseUpdater(Plugin):
|
||||
update_failed = False
|
||||
update_version = None
|
||||
last_check = 0
|
||||
auto_register_static = False
|
||||
|
||||
def doUpdate(self):
|
||||
pass
|
||||
|
||||
def getInfo(self):
|
||||
return jsonified(self.info())
|
||||
|
||||
def info(self):
|
||||
|
||||
current_version = self.getVersion()
|
||||
|
||||
return {
|
||||
'last_check': self.last_check,
|
||||
'update_version': self.update_version,
|
||||
'version': self.getVersion(),
|
||||
'version': current_version,
|
||||
'repo_name': '%s/%s' % (self.repo_user, self.repo_name),
|
||||
'branch': self.branch,
|
||||
'branch': current_version.get('branch', self.branch),
|
||||
}
|
||||
|
||||
def getVersion(self):
|
||||
pass
|
||||
|
||||
def check(self):
|
||||
pass
|
||||
|
||||
def deletePyc(self, only_excess = True):
|
||||
|
||||
for root, dirs, files in os.walk(ss(Env.get('app_dir'))):
|
||||
|
||||
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
|
||||
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
|
||||
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
|
||||
|
||||
for excess_pyc_file in excess_pyc_files:
|
||||
full_path = os.path.join(root, excess_pyc_file)
|
||||
log.debug('Removing old PYC file: %s', full_path)
|
||||
try:
|
||||
os.remove(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
for dir_name in dirs:
|
||||
full_path = os.path.join(root, dir_name)
|
||||
if len(os.listdir(full_path)) == 0:
|
||||
try:
|
||||
os.rmdir(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
|
||||
|
||||
class GitUpdater(BaseUpdater):
|
||||
|
||||
@@ -178,15 +191,9 @@ class GitUpdater(BaseUpdater):
|
||||
def doUpdate(self):
|
||||
|
||||
try:
|
||||
log.debug('Stashing local changes')
|
||||
self.repo.saveStash()
|
||||
|
||||
log.info('Updating to latest version')
|
||||
self.repo.pull()
|
||||
|
||||
# Delete leftover .pyc files
|
||||
self.deletePyc()
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Failed updating via GIT: %s', traceback.format_exc())
|
||||
@@ -198,17 +205,28 @@ class GitUpdater(BaseUpdater):
|
||||
def getVersion(self):
|
||||
|
||||
if not self.version:
|
||||
|
||||
hash = None
|
||||
date = None
|
||||
branch = self.branch
|
||||
|
||||
try:
|
||||
output = self.repo.getHead() # Yes, please
|
||||
output = self.repo.getHead() # Yes, please
|
||||
log.debug('Git version output: %s', output.hash)
|
||||
self.version = {
|
||||
'hash': output.hash[:8],
|
||||
'date': output.getDate(),
|
||||
'type': 'git',
|
||||
}
|
||||
except Exception, e:
|
||||
|
||||
hash = output.hash[:8]
|
||||
date = output.getDate()
|
||||
branch = self.repo.getCurrentBranch().name
|
||||
except Exception as e:
|
||||
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
|
||||
return 'No GIT'
|
||||
|
||||
self.version = {
|
||||
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, branch, hash or 'unknown_hash', datetime.fromtimestamp(date) if date else 'unknown_date'),
|
||||
'hash': hash,
|
||||
'date': date,
|
||||
'type': 'git',
|
||||
'branch': branch
|
||||
}
|
||||
|
||||
return self.version
|
||||
|
||||
@@ -229,7 +247,7 @@ class GitUpdater(BaseUpdater):
|
||||
local = self.repo.getHead()
|
||||
remote = branch.getHead()
|
||||
|
||||
log.info('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
|
||||
log.debug('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
|
||||
|
||||
if local.getDate() < remote.getDate():
|
||||
self.update_version = {
|
||||
@@ -242,7 +260,6 @@ class GitUpdater(BaseUpdater):
|
||||
return False
|
||||
|
||||
|
||||
|
||||
class SourceUpdater(BaseUpdater):
|
||||
|
||||
def __init__(self):
|
||||
@@ -255,11 +272,11 @@ class SourceUpdater(BaseUpdater):
|
||||
def doUpdate(self):
|
||||
|
||||
try:
|
||||
url = 'https://github.com/%s/%s/tarball/%s' % (self.repo_user, self.repo_name, self.branch)
|
||||
destination = os.path.join(Env.get('cache_dir'), self.update_version.get('hash') + '.tar.gz')
|
||||
extracted_path = os.path.join(Env.get('cache_dir'), 'temp_updater')
|
||||
download_data = fireEvent('cp.source_url', repo = self.repo_user, repo_name = self.repo_name, branch = self.branch, single = True)
|
||||
destination = os.path.join(Env.get('cache_dir'), self.update_version.get('hash')) + '.' + download_data.get('type')
|
||||
|
||||
destination = fireEvent('file.download', url = url, dest = destination, single = True)
|
||||
extracted_path = os.path.join(Env.get('cache_dir'), 'temp_updater')
|
||||
destination = fireEvent('file.download', url = download_data.get('url'), dest = destination, single = True)
|
||||
|
||||
# Cleanup leftover from last time
|
||||
if os.path.isdir(extracted_path):
|
||||
@@ -267,9 +284,15 @@ class SourceUpdater(BaseUpdater):
|
||||
self.makeDir(extracted_path)
|
||||
|
||||
# Extract
|
||||
tar = tarfile.open(destination)
|
||||
tar.extractall(path = extracted_path)
|
||||
tar.close()
|
||||
if download_data.get('type') == 'zip':
|
||||
zip_file = zipfile.ZipFile(destination)
|
||||
zip_file.extractall(extracted_path)
|
||||
zip_file.close()
|
||||
else:
|
||||
tar = tarfile.open(destination)
|
||||
tar.extractall(path = extracted_path)
|
||||
tar.close()
|
||||
|
||||
os.remove(destination)
|
||||
|
||||
if self.replaceWith(os.path.join(extracted_path, os.listdir(extracted_path)[0])):
|
||||
@@ -286,10 +309,12 @@ class SourceUpdater(BaseUpdater):
|
||||
return False
|
||||
|
||||
def replaceWith(self, path):
|
||||
app_dir = ss(Env.get('app_dir'))
|
||||
path = sp(path)
|
||||
app_dir = Env.get('app_dir')
|
||||
data_dir = Env.get('data_dir')
|
||||
|
||||
# Get list of files we want to overwrite
|
||||
self.deletePyc()
|
||||
removePyc(app_dir)
|
||||
existing_files = []
|
||||
for root, subfiles, filenames in os.walk(app_dir):
|
||||
for filename in filenames:
|
||||
@@ -318,22 +343,24 @@ class SourceUpdater(BaseUpdater):
|
||||
log.error('Failed overwriting file "%s": %s', (tofile, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
if Env.get('app_dir') not in Env.get('data_dir'):
|
||||
for still_exists in existing_files:
|
||||
try:
|
||||
os.remove(still_exists)
|
||||
except:
|
||||
log.error('Failed removing non-used file: %s', traceback.format_exc())
|
||||
for still_exists in existing_files:
|
||||
|
||||
if data_dir in still_exists:
|
||||
continue
|
||||
|
||||
try:
|
||||
os.remove(still_exists)
|
||||
except:
|
||||
log.error('Failed removing non-used file: %s', traceback.format_exc())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def removeDir(self, path):
|
||||
try:
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
except OSError, inst:
|
||||
os.chmod(inst.filename, 0777)
|
||||
except OSError as inst:
|
||||
os.chmod(inst.filename, 0o777)
|
||||
self.removeDir(path)
|
||||
|
||||
def getVersion(self):
|
||||
@@ -347,7 +374,8 @@ class SourceUpdater(BaseUpdater):
|
||||
log.debug('Source version output: %s', output)
|
||||
self.version = output
|
||||
self.version['type'] = 'source'
|
||||
except Exception, e:
|
||||
self.version['repr'] = 'source:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.branch, output.get('hash', '')[:8], datetime.fromtimestamp(output.get('date', 0)))
|
||||
except Exception as e:
|
||||
log.error('Failed using source updater. %s', e)
|
||||
return {}
|
||||
|
||||
@@ -377,7 +405,7 @@ class SourceUpdater(BaseUpdater):
|
||||
|
||||
return {
|
||||
'hash': commit['sha'],
|
||||
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
|
||||
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
|
||||
}
|
||||
except:
|
||||
log.error('Failed getting latest request from github: %s', traceback.format_exc())
|
||||
@@ -422,7 +450,7 @@ class DesktopUpdater(BaseUpdater):
|
||||
if latest and latest != current_version.get('hash'):
|
||||
self.update_version = {
|
||||
'hash': latest,
|
||||
'date': None,
|
||||
'date': None,
|
||||
'changelog': self.desktop._changelogURL,
|
||||
}
|
||||
|
||||
@@ -434,6 +462,7 @@ class DesktopUpdater(BaseUpdater):
|
||||
|
||||
def getVersion(self):
|
||||
return {
|
||||
'repr': 'desktop: %s' % self.desktop._esky.active_version,
|
||||
'hash': self.desktop._esky.active_version,
|
||||
'date': None,
|
||||
'type': 'desktop',
|
||||
|
||||
@@ -5,7 +5,7 @@ var UpdaterBase = new Class({
|
||||
initialize: function(){
|
||||
var self = this;
|
||||
|
||||
App.addEvent('load', self.info.bind(self, 1000))
|
||||
App.addEvent('load', self.info.bind(self, 2000));
|
||||
App.addEvent('unload', function(){
|
||||
if(self.timer)
|
||||
clearTimeout(self.timer);
|
||||
@@ -24,7 +24,7 @@ var UpdaterBase = new Class({
|
||||
self.doUpdate();
|
||||
else {
|
||||
App.unBlockPage();
|
||||
App.fireEvent('message', 'No updates available');
|
||||
App.trigger('message', ['No updates available']);
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -66,7 +66,7 @@ var UpdaterBase = new Class({
|
||||
|
||||
var changelog = 'https://github.com/'+data.repo_name+'/compare/'+data.version.hash+'...'+data.branch;
|
||||
if(data.update_version.changelog)
|
||||
changelog = data.update_version.changelog + '#' + data.version.hash+'...'+data.update_version.hash
|
||||
changelog = data.update_version.changelog + '#' + data.version.hash+'...'+data.update_version.hash;
|
||||
|
||||
self.message = new Element('div.message.update').adopt(
|
||||
new Element('span', {
|
||||
@@ -84,7 +84,7 @@ var UpdaterBase = new Class({
|
||||
'click': self.doUpdate.bind(self)
|
||||
}
|
||||
})
|
||||
).inject($(document.body).getElement('.header'))
|
||||
).inject(document.body)
|
||||
},
|
||||
|
||||
doUpdate: function(){
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
from couchpotato.core.helpers.variable import md5
|
||||
from couchpotato.environment import Env
|
||||
from flask import request, Response
|
||||
from functools import wraps
|
||||
|
||||
def check_auth(username, password):
|
||||
return username == Env.setting('username') and password == Env.setting('password')
|
||||
|
||||
def authenticate():
|
||||
return Response(
|
||||
'This is not the page you are looking for. *waves hand*', 401,
|
||||
{'WWW-Authenticate': 'Basic realm="CouchPotato Login"'}
|
||||
)
|
||||
|
||||
def requires_auth(f):
|
||||
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
auth = getattr(request, 'authorization')
|
||||
if Env.setting('username') and Env.setting('password'):
|
||||
if (not auth or not check_auth(auth.username.decode('latin1'), md5(auth.password.decode('latin1').encode(Env.get('encoding'))))):
|
||||
return authenticate()
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
639
couchpotato/core/database.py
Normal file
639
couchpotato/core/database.py
Normal file
@@ -0,0 +1,639 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
from sqlite3 import OperationalError
|
||||
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from CodernityDB.index import IndexException, IndexNotFoundException, IndexConflict
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import toUnicode, sp
|
||||
from couchpotato.core.helpers.variable import getImdb, tryInt, randomString
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Database(object):
|
||||
|
||||
indexes = None
|
||||
db = None
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.indexes = {}
|
||||
|
||||
addApiView('database.list_documents', self.listDocuments)
|
||||
addApiView('database.reindex', self.reindex)
|
||||
addApiView('database.compact', self.compact)
|
||||
addApiView('database.document.update', self.updateDocument)
|
||||
addApiView('database.document.delete', self.deleteDocument)
|
||||
|
||||
addEvent('database.setup.after', self.startup_compact)
|
||||
addEvent('database.setup_index', self.setupIndex)
|
||||
addEvent('database.delete_corrupted', self.deleteCorrupted)
|
||||
|
||||
addEvent('app.migrate', self.migrate)
|
||||
addEvent('app.after_shutdown', self.close)
|
||||
|
||||
def getDB(self):
|
||||
|
||||
if not self.db:
|
||||
from couchpotato import get_db
|
||||
self.db = get_db()
|
||||
|
||||
return self.db
|
||||
|
||||
def close(self, **kwargs):
|
||||
self.getDB().close()
|
||||
|
||||
def setupIndex(self, index_name, klass):
|
||||
|
||||
self.indexes[index_name] = klass
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Category index
|
||||
index_instance = klass(db.path, index_name)
|
||||
try:
|
||||
|
||||
# Make sure store and bucket don't exist
|
||||
exists = []
|
||||
for x in ['buck', 'stor']:
|
||||
full_path = os.path.join(db.path, '%s_%s' % (index_name, x))
|
||||
if os.path.exists(full_path):
|
||||
exists.append(full_path)
|
||||
|
||||
if index_name not in db.indexes_names:
|
||||
|
||||
# Remove existing buckets if index isn't there
|
||||
for x in exists:
|
||||
os.unlink(x)
|
||||
|
||||
# Add index (will restore buckets)
|
||||
db.add_index(index_instance)
|
||||
db.reindex_index(index_name)
|
||||
else:
|
||||
# Previous info
|
||||
previous = db.indexes_names[index_name]
|
||||
previous_version = previous._version
|
||||
current_version = klass._version
|
||||
|
||||
# Only edit index if versions are different
|
||||
if previous_version < current_version:
|
||||
log.debug('Index "%s" already exists, updating and reindexing', index_name)
|
||||
db.destroy_index(previous)
|
||||
db.add_index(index_instance)
|
||||
db.reindex_index(index_name)
|
||||
|
||||
except:
|
||||
log.error('Failed adding index %s: %s', (index_name, traceback.format_exc()))
|
||||
|
||||
def deleteDocument(self, **kwargs):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
|
||||
document_id = kwargs.get('_request').get_argument('id')
|
||||
document = db.get('id', document_id)
|
||||
db.delete(document)
|
||||
|
||||
return {
|
||||
'success': True
|
||||
}
|
||||
except:
|
||||
return {
|
||||
'success': False,
|
||||
'error': traceback.format_exc()
|
||||
}
|
||||
|
||||
def updateDocument(self, **kwargs):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
|
||||
document = json.loads(kwargs.get('_request').get_argument('document'))
|
||||
d = db.update(document)
|
||||
document.update(d)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'document': document
|
||||
}
|
||||
except:
|
||||
return {
|
||||
'success': False,
|
||||
'error': traceback.format_exc()
|
||||
}
|
||||
|
||||
def listDocuments(self, **kwargs):
|
||||
db = self.getDB()
|
||||
|
||||
results = {
|
||||
'unknown': []
|
||||
}
|
||||
|
||||
for document in db.all('id'):
|
||||
key = document.get('_t', 'unknown')
|
||||
|
||||
if kwargs.get('show') and key != kwargs.get('show'):
|
||||
continue
|
||||
|
||||
if not results.get(key):
|
||||
results[key] = []
|
||||
results[key].append(document)
|
||||
|
||||
return results
|
||||
|
||||
def deleteCorrupted(self, _id, traceback_error = ''):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
log.debug('Deleted corrupted document "%s": %s', (_id, traceback_error))
|
||||
corrupted = db.get('id', _id, with_storage = False)
|
||||
db._delete_id_index(corrupted.get('_id'), corrupted.get('_rev'), None)
|
||||
except:
|
||||
log.debug('Failed deleting corrupted: %s', traceback.format_exc())
|
||||
|
||||
def reindex(self, **kwargs):
|
||||
|
||||
success = True
|
||||
try:
|
||||
db = self.getDB()
|
||||
db.reindex()
|
||||
except:
|
||||
log.error('Failed index: %s', traceback.format_exc())
|
||||
success = False
|
||||
|
||||
return {
|
||||
'success': success
|
||||
}
|
||||
|
||||
def compact(self, try_repair = True, **kwargs):
|
||||
|
||||
success = False
|
||||
db = self.getDB()
|
||||
|
||||
# Removing left over compact files
|
||||
db_path = sp(db.path)
|
||||
for f in os.listdir(sp(db.path)):
|
||||
for x in ['_compact_buck', '_compact_stor']:
|
||||
if f[-len(x):] == x:
|
||||
os.unlink(os.path.join(db_path, f))
|
||||
|
||||
try:
|
||||
start = time.time()
|
||||
size = float(db.get_db_details().get('size', 0))
|
||||
log.debug('Compacting database, current size: %sMB', round(size/1048576, 2))
|
||||
|
||||
db.compact()
|
||||
new_size = float(db.get_db_details().get('size', 0))
|
||||
log.debug('Done compacting database in %ss, new size: %sMB, saved: %sMB', (round(time.time()-start, 2), round(new_size/1048576, 2), round((size-new_size)/1048576, 2)))
|
||||
success = True
|
||||
except (IndexException, AttributeError):
|
||||
if try_repair:
|
||||
log.error('Something wrong with indexes, trying repair')
|
||||
|
||||
# Remove all indexes
|
||||
old_indexes = self.indexes.keys()
|
||||
for index_name in old_indexes:
|
||||
try:
|
||||
db.destroy_index(index_name)
|
||||
except IndexNotFoundException:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed removing old index %s', index_name)
|
||||
|
||||
# Add them again
|
||||
for index_name in self.indexes:
|
||||
klass = self.indexes[index_name]
|
||||
|
||||
# Category index
|
||||
index_instance = klass(db.path, index_name)
|
||||
try:
|
||||
db.add_index(index_instance)
|
||||
db.reindex_index(index_name)
|
||||
except IndexConflict:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed adding index %s', index_name)
|
||||
raise
|
||||
|
||||
self.compact(try_repair = False)
|
||||
else:
|
||||
log.error('Failed compact: %s', traceback.format_exc())
|
||||
|
||||
except:
|
||||
log.error('Failed compact: %s', traceback.format_exc())
|
||||
|
||||
return {
|
||||
'success': success
|
||||
}
|
||||
|
||||
# Compact on start
|
||||
def startup_compact(self):
|
||||
from couchpotato import Env
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Try fix for migration failures on desktop
|
||||
if Env.get('desktop'):
|
||||
try:
|
||||
list(db.all('profile', with_doc = True))
|
||||
except RecordNotFound:
|
||||
|
||||
failed_location = '%s_failed' % db.path
|
||||
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old')
|
||||
|
||||
if not os.path.isdir(failed_location) and os.path.isfile(old_db):
|
||||
log.error('Corrupt database, trying migrate again')
|
||||
db.close()
|
||||
|
||||
# Rename database folder
|
||||
os.rename(db.path, '%s_failed' % db.path)
|
||||
|
||||
# Rename .old database to try another migrate
|
||||
os.rename(old_db, old_db[:-4])
|
||||
|
||||
fireEventAsync('app.restart')
|
||||
else:
|
||||
log.error('Migration failed and couldn\'t recover database. Please report on GitHub, with this message.')
|
||||
db.reindex()
|
||||
|
||||
return
|
||||
|
||||
# Check size and compact if needed
|
||||
size = db.get_db_details().get('size')
|
||||
prop_name = 'last_db_compact'
|
||||
last_check = int(Env.prop(prop_name, default = 0))
|
||||
|
||||
if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days
|
||||
self.compact()
|
||||
Env.prop(prop_name, value = int(time.time()))
|
||||
|
||||
def migrate(self):
|
||||
|
||||
from couchpotato import Env
|
||||
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db')
|
||||
if not os.path.isfile(old_db): return
|
||||
|
||||
log.info('=' * 30)
|
||||
log.info('Migrating database, hold on..')
|
||||
time.sleep(1)
|
||||
|
||||
if os.path.isfile(old_db):
|
||||
|
||||
migrate_start = time.time()
|
||||
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(old_db)
|
||||
|
||||
migrate_list = {
|
||||
'category': ['id', 'label', 'order', 'required', 'preferred', 'ignored', 'destination'],
|
||||
'profile': ['id', 'label', 'order', 'core', 'hide'],
|
||||
'profiletype': ['id', 'order', 'finish', 'wait_for', 'quality_id', 'profile_id'],
|
||||
'quality': ['id', 'identifier', 'order', 'size_min', 'size_max'],
|
||||
'movie': ['id', 'last_edit', 'library_id', 'status_id', 'profile_id', 'category_id'],
|
||||
'library': ['id', 'identifier', 'info'],
|
||||
'librarytitle': ['id', 'title', 'default', 'libraries_id'],
|
||||
'library_files__file_library': ['library_id', 'file_id'],
|
||||
'release': ['id', 'identifier', 'movie_id', 'status_id', 'quality_id', 'last_edit'],
|
||||
'releaseinfo': ['id', 'identifier', 'value', 'release_id'],
|
||||
'release_files__file_release': ['release_id', 'file_id'],
|
||||
'status': ['id', 'identifier'],
|
||||
'properties': ['id', 'identifier', 'value'],
|
||||
'file': ['id', 'path', 'type_id'],
|
||||
'filetype': ['identifier', 'id']
|
||||
}
|
||||
|
||||
migrate_data = {}
|
||||
rename_old = False
|
||||
|
||||
try:
|
||||
|
||||
c = conn.cursor()
|
||||
|
||||
for ml in migrate_list:
|
||||
migrate_data[ml] = {}
|
||||
rows = migrate_list[ml]
|
||||
|
||||
try:
|
||||
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||
except:
|
||||
# ignore faulty destination_id database
|
||||
if ml == 'category':
|
||||
migrate_data[ml] = {}
|
||||
else:
|
||||
rename_old = True
|
||||
raise
|
||||
|
||||
for p in c.fetchall():
|
||||
columns = {}
|
||||
for row in migrate_list[ml]:
|
||||
columns[row] = p[rows.index(row)]
|
||||
|
||||
if not migrate_data[ml].get(p[0]):
|
||||
migrate_data[ml][p[0]] = columns
|
||||
else:
|
||||
if not isinstance(migrate_data[ml][p[0]], list):
|
||||
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||
migrate_data[ml][p[0]].append(columns)
|
||||
|
||||
conn.close()
|
||||
|
||||
log.info('Getting data took %s', time.time() - migrate_start)
|
||||
|
||||
db = self.getDB()
|
||||
if not db.opened:
|
||||
return
|
||||
|
||||
# Use properties
|
||||
properties = migrate_data['properties']
|
||||
log.info('Importing %s properties', len(properties))
|
||||
for x in properties:
|
||||
property = properties[x]
|
||||
Env.prop(property.get('identifier'), property.get('value'))
|
||||
|
||||
# Categories
|
||||
categories = migrate_data.get('category', [])
|
||||
log.info('Importing %s categories', len(categories))
|
||||
category_link = {}
|
||||
for x in categories:
|
||||
c = categories[x]
|
||||
|
||||
new_c = db.insert({
|
||||
'_t': 'category',
|
||||
'order': c.get('order', 999),
|
||||
'label': toUnicode(c.get('label', '')),
|
||||
'ignored': toUnicode(c.get('ignored', '')),
|
||||
'preferred': toUnicode(c.get('preferred', '')),
|
||||
'required': toUnicode(c.get('required', '')),
|
||||
'destination': toUnicode(c.get('destination', '')),
|
||||
})
|
||||
|
||||
category_link[x] = new_c.get('_id')
|
||||
|
||||
# Profiles
|
||||
log.info('Importing profiles')
|
||||
new_profiles = db.all('profile', with_doc = True)
|
||||
new_profiles_by_label = {}
|
||||
for x in new_profiles:
|
||||
|
||||
# Remove default non core profiles
|
||||
if not x['doc'].get('core'):
|
||||
db.delete(x['doc'])
|
||||
else:
|
||||
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||
|
||||
profiles = migrate_data['profile']
|
||||
profile_link = {}
|
||||
for x in profiles:
|
||||
p = profiles[x]
|
||||
|
||||
exists = new_profiles_by_label.get(p.get('label'))
|
||||
|
||||
# Update existing with order only
|
||||
if exists and p.get('core'):
|
||||
profile = db.get('id', exists)
|
||||
profile['order'] = tryInt(p.get('order'))
|
||||
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||
db.update(profile)
|
||||
|
||||
profile_link[x] = profile.get('_id')
|
||||
else:
|
||||
|
||||
new_profile = {
|
||||
'_t': 'profile',
|
||||
'label': p.get('label'),
|
||||
'order': int(p.get('order', 999)),
|
||||
'core': p.get('core', False),
|
||||
'qualities': [],
|
||||
'wait_for': [],
|
||||
'finish': []
|
||||
}
|
||||
|
||||
types = migrate_data['profiletype']
|
||||
for profile_type in types:
|
||||
p_type = types[profile_type]
|
||||
if types[profile_type]['profile_id'] == p['id']:
|
||||
if p_type['quality_id']:
|
||||
new_profile['finish'].append(p_type['finish'])
|
||||
new_profile['wait_for'].append(p_type['wait_for'])
|
||||
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||
|
||||
if len(new_profile['qualities']) > 0:
|
||||
new_profile.update(db.insert(new_profile))
|
||||
profile_link[x] = new_profile.get('_id')
|
||||
else:
|
||||
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||
|
||||
# Qualities
|
||||
log.info('Importing quality sizes')
|
||||
new_qualities = db.all('quality', with_doc = True)
|
||||
new_qualities_by_identifier = {}
|
||||
for x in new_qualities:
|
||||
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||
|
||||
qualities = migrate_data['quality']
|
||||
quality_link = {}
|
||||
for x in qualities:
|
||||
q = qualities[x]
|
||||
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||
|
||||
quality = db.get('id', q_id)
|
||||
quality['order'] = q.get('order')
|
||||
quality['size_min'] = tryInt(q.get('size_min'))
|
||||
quality['size_max'] = tryInt(q.get('size_max'))
|
||||
db.update(quality)
|
||||
|
||||
quality_link[x] = quality
|
||||
|
||||
# Titles
|
||||
titles = migrate_data['librarytitle']
|
||||
titles_by_library = {}
|
||||
for x in titles:
|
||||
title = titles[x]
|
||||
if title.get('default'):
|
||||
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||
|
||||
# Releases
|
||||
releaseinfos = migrate_data['releaseinfo']
|
||||
for x in releaseinfos:
|
||||
info = releaseinfos[x]
|
||||
|
||||
# Skip if release doesn't exist for this info
|
||||
if not migrate_data['release'].get(info.get('release_id')):
|
||||
continue
|
||||
|
||||
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||
|
||||
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||
|
||||
releases = migrate_data['release']
|
||||
releases_by_media = {}
|
||||
for x in releases:
|
||||
release = releases[x]
|
||||
if not releases_by_media.get(release.get('movie_id')):
|
||||
releases_by_media[release.get('movie_id')] = []
|
||||
|
||||
releases_by_media[release.get('movie_id')].append(release)
|
||||
|
||||
# Type ids
|
||||
types = migrate_data['filetype']
|
||||
type_by_id = {}
|
||||
for t in types:
|
||||
type = types[t]
|
||||
type_by_id[type.get('id')] = type
|
||||
|
||||
# Media
|
||||
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||
statuses = migrate_data['status']
|
||||
libraries = migrate_data['library']
|
||||
library_files = migrate_data['library_files__file_library']
|
||||
releases_files = migrate_data['release_files__file_release']
|
||||
all_files = migrate_data['file']
|
||||
poster_type = migrate_data['filetype']['poster']
|
||||
medias = migrate_data['movie']
|
||||
for x in medias:
|
||||
m = medias[x]
|
||||
|
||||
status = statuses.get(m['status_id']).get('identifier')
|
||||
l = libraries.get(m['library_id'])
|
||||
|
||||
# Only migrate wanted movies, Skip if no identifier present
|
||||
if not l or not getImdb(l.get('identifier')): continue
|
||||
|
||||
profile_id = profile_link.get(m['profile_id'])
|
||||
category_id = category_link.get(m['category_id'])
|
||||
title = titles_by_library.get(m['library_id'])
|
||||
releases = releases_by_media.get(x, [])
|
||||
info = json.loads(l.get('info', ''))
|
||||
|
||||
files = library_files.get(m['library_id'], [])
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
added_media = fireEvent('movie.add', {
|
||||
'info': info,
|
||||
'identifier': l.get('identifier'),
|
||||
'profile_id': profile_id,
|
||||
'category_id': category_id,
|
||||
'title': title
|
||||
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||
|
||||
if not added_media:
|
||||
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||
continue
|
||||
|
||||
added_media['files'] = added_media.get('files', {})
|
||||
for f in files:
|
||||
ffile = all_files[f.get('file_id')]
|
||||
|
||||
# Only migrate posters
|
||||
if ffile.get('type_id') == poster_type.get('id'):
|
||||
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||
break
|
||||
|
||||
if 'image_poster' in added_media['files']:
|
||||
db.update(added_media)
|
||||
|
||||
for rel in releases:
|
||||
|
||||
empty_info = False
|
||||
if not rel.get('info'):
|
||||
empty_info = True
|
||||
rel['info'] = {}
|
||||
|
||||
quality = quality_link.get(rel.get('quality_id'))
|
||||
if not quality:
|
||||
continue
|
||||
|
||||
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||
|
||||
if rel['info'].get('download_id'):
|
||||
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||
rel['info']['download_info'] = {
|
||||
'id': rel['info'].get('download_id'),
|
||||
'downloader': rel['info'].get('download_downloader'),
|
||||
'status_support': status_support,
|
||||
}
|
||||
|
||||
# Add status to keys
|
||||
rel['info']['status'] = release_status
|
||||
if not empty_info:
|
||||
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||
else:
|
||||
release = {
|
||||
'_t': 'release',
|
||||
'identifier': rel.get('identifier'),
|
||||
'media_id': added_media.get('_id'),
|
||||
'quality': quality.get('identifier'),
|
||||
'status': release_status,
|
||||
'last_edit': int(time.time()),
|
||||
'files': {}
|
||||
}
|
||||
|
||||
# Add downloader info if provided
|
||||
try:
|
||||
release['download_info'] = rel['info']['download_info']
|
||||
del rel['download_info']
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add files
|
||||
release_files = releases_files.get(rel.get('id'), [])
|
||||
if not isinstance(release_files, list):
|
||||
release_files = [release_files]
|
||||
|
||||
if len(release_files) == 0:
|
||||
continue
|
||||
|
||||
for f in release_files:
|
||||
rfile = all_files.get(f.get('file_id'))
|
||||
if not rfile:
|
||||
continue
|
||||
|
||||
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||
|
||||
if not release['files'].get(file_type):
|
||||
release['files'][file_type] = []
|
||||
|
||||
release['files'][file_type].append(rfile.get('path'))
|
||||
|
||||
try:
|
||||
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||
rls.update(release)
|
||||
db.update(rls)
|
||||
except:
|
||||
db.insert(release)
|
||||
|
||||
log.info('Total migration took %s', time.time() - migrate_start)
|
||||
log.info('=' * 30)
|
||||
|
||||
rename_old = True
|
||||
|
||||
except OperationalError:
|
||||
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
|
||||
|
||||
rename_old = True
|
||||
except:
|
||||
log.error('Migration failed: %s', traceback.format_exc())
|
||||
|
||||
|
||||
# rename old database
|
||||
if rename_old:
|
||||
random = randomString()
|
||||
log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random))
|
||||
os.rename(old_db, '%s.%s_old' % (old_db, random))
|
||||
|
||||
if os.path.isfile(old_db + '-wal'):
|
||||
os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random))
|
||||
if os.path.isfile(old_db + '-shm'):
|
||||
os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random))
|
||||
@@ -1,13 +0,0 @@
|
||||
config = {
|
||||
'name': 'download_providers',
|
||||
'groups': [
|
||||
{
|
||||
'label': 'Downloaders',
|
||||
'description': 'You can select different downloaders for each type (usenet / torrent)',
|
||||
'type': 'list',
|
||||
'name': 'download_providers',
|
||||
'tab': 'downloaders',
|
||||
'options': [],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
from base64 import b32decode, b16encode
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.providers.base import Provider
|
||||
import random
|
||||
import re
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Downloader(Provider):
|
||||
|
||||
type = []
|
||||
http_time_between_calls = 0
|
||||
|
||||
torrent_sources = [
|
||||
'http://torrage.com/torrent/%s.torrent',
|
||||
'http://torcache.net/torrent/%s.torrent',
|
||||
]
|
||||
|
||||
torrent_trackers = [
|
||||
'http://tracker.publicbt.com/announce',
|
||||
'udp://tracker.istole.it:80/announce',
|
||||
'udp://fr33domtracker.h33t.com:3310/announce',
|
||||
'http://tracker.istole.it/announce',
|
||||
'http://tracker.ccc.de/announce',
|
||||
'udp://tracker.publicbt.com:80/announce',
|
||||
'udp://tracker.ccc.de:80/announce',
|
||||
'http://exodus.desync.com/announce',
|
||||
'http://exodus.desync.com:6969/announce',
|
||||
'http://tracker.publichd.eu/announce',
|
||||
'http://tracker.openbittorrent.com/announce',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
addEvent('download', self._download)
|
||||
addEvent('download.enabled', self._isEnabled)
|
||||
addEvent('download.enabled_types', self.getEnabledDownloadType)
|
||||
addEvent('download.status', self._getAllDownloadStatus)
|
||||
addEvent('download.remove_failed', self._removeFailed)
|
||||
|
||||
def getEnabledDownloadType(self):
|
||||
for download_type in self.type:
|
||||
if self.isEnabled(manual = True, data = {'type': download_type}):
|
||||
return self.type
|
||||
|
||||
return []
|
||||
|
||||
def _download(self, data = {}, movie = {}, manual = False, filedata = None):
|
||||
if self.isDisabled(manual, data):
|
||||
return
|
||||
return self.download(data = data, movie = movie, filedata = filedata)
|
||||
|
||||
def _getAllDownloadStatus(self):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
return self.getAllDownloadStatus()
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
return
|
||||
|
||||
def _removeFailed(self, item):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
if self.conf('delete_failed', default = True):
|
||||
return self.removeFailed(item)
|
||||
|
||||
return False
|
||||
|
||||
def removeFailed(self, item):
|
||||
return
|
||||
|
||||
def isCorrectType(self, item_type):
|
||||
is_correct = item_type in self.type
|
||||
|
||||
if not is_correct:
|
||||
log.debug("Downloader doesn't support this type")
|
||||
|
||||
return is_correct
|
||||
|
||||
def magnetToTorrent(self, magnet_link):
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', magnet_link)[0].upper()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
sources = self.torrent_sources
|
||||
random.shuffle(sources)
|
||||
|
||||
for source in sources:
|
||||
try:
|
||||
filedata = self.urlopen(source % torrent_hash, headers = {'Referer': ''}, show_error = False)
|
||||
if 'torcache' in filedata and 'file not found' in filedata.lower():
|
||||
continue
|
||||
|
||||
return filedata
|
||||
except:
|
||||
log.debug('Torrent hash "%s" wasn\'t found on: %s', (torrent_hash, source))
|
||||
|
||||
log.error('Failed converting magnet url to torrent: %s', (torrent_hash))
|
||||
return False
|
||||
|
||||
def isDisabled(self, manual, data):
|
||||
return not self.isEnabled(manual, data)
|
||||
|
||||
def _isEnabled(self, manual, data = {}):
|
||||
if not self.isEnabled(manual, data):
|
||||
return
|
||||
return True
|
||||
|
||||
def isEnabled(self, manual, data = {}):
|
||||
d_manual = self.conf('manual', default = False)
|
||||
return super(Downloader, self).isEnabled() and \
|
||||
((d_manual and manual) or (d_manual is False)) and \
|
||||
(not data or self.isCorrectType(data.get('type')))
|
||||
158
couchpotato/core/downloaders/blackhole.py
Normal file
158
couchpotato/core/downloaders/blackhole.py
Normal file
@@ -0,0 +1,158 @@
|
||||
from __future__ import with_statement
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import getDownloadDir
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Blackhole'
|
||||
|
||||
|
||||
class Blackhole(DownloaderBase):
|
||||
|
||||
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
||||
status_support = False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
directory = self.conf('directory')
|
||||
if not directory or not os.path.isdir(directory):
|
||||
log.error('No directory set for blackhole %s download.', data.get('protocol'))
|
||||
else:
|
||||
try:
|
||||
if not filedata or len(filedata) < 50:
|
||||
try:
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
data['protocol'] = 'torrent'
|
||||
except:
|
||||
log.error('Failed download torrent via magnet url: %s', traceback.format_exc())
|
||||
|
||||
if not filedata or len(filedata) < 50:
|
||||
log.error('No nzb/torrent available: %s', data.get('url'))
|
||||
return False
|
||||
|
||||
file_name = self.createFileName(data, filedata, media)
|
||||
full_path = os.path.join(directory, file_name)
|
||||
|
||||
if self.conf('create_subdir'):
|
||||
try:
|
||||
new_path = os.path.splitext(full_path)[0]
|
||||
if not os.path.exists(new_path):
|
||||
os.makedirs(new_path)
|
||||
full_path = os.path.join(new_path, file_name)
|
||||
except:
|
||||
log.error('Couldnt create sub dir, reverting to old one: %s', full_path)
|
||||
|
||||
try:
|
||||
if not os.path.isfile(full_path):
|
||||
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(filedata)
|
||||
os.chmod(full_path, Env.getPermission('file'))
|
||||
return self.downloadReturnId('')
|
||||
else:
|
||||
log.info('File %s already exists.', full_path)
|
||||
return self.downloadReturnId('')
|
||||
|
||||
except:
|
||||
log.error('Failed to download to blackhole %s', traceback.format_exc())
|
||||
pass
|
||||
|
||||
except:
|
||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
directory = self.conf('directory')
|
||||
if directory and os.path.isdir(directory):
|
||||
|
||||
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
|
||||
|
||||
# Check if folder is writable
|
||||
self.createFile(test_file, 'This is a test file')
|
||||
if os.path.isfile(test_file):
|
||||
os.remove(test_file)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
if self.conf('use_for') == 'both':
|
||||
return super(Blackhole, self).getEnabledProtocol()
|
||||
elif self.conf('use_for') == 'torrent':
|
||||
return ['torrent', 'torrent_magnet']
|
||||
else:
|
||||
return ['nzb']
|
||||
|
||||
def isEnabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
for_protocol = ['both']
|
||||
if data and 'torrent' in data.get('protocol'):
|
||||
for_protocol.append('torrent')
|
||||
elif data:
|
||||
for_protocol.append(data.get('protocol'))
|
||||
|
||||
return super(Blackhole, self).isEnabled(manual, data) and \
|
||||
((self.conf('use_for') in for_protocol))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'blackhole',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'blackhole',
|
||||
'label': 'Black hole',
|
||||
'description': 'Download the NZB/Torrent to a specific folder. <em>Note: Seeding and copying/linking features do <strong>not</strong> work with Black hole</em>.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': True,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .nzb (or .torrent) file is saved to.',
|
||||
'default': getDownloadDir()
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'create_subdir',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Create a sub directory when saving the .nzb (or .torrent).',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,48 +0,0 @@
|
||||
from .main import Blackhole
|
||||
from couchpotato.core.helpers.variable import getDownloadDir
|
||||
|
||||
def start():
|
||||
return Blackhole()
|
||||
|
||||
config = [{
|
||||
'name': 'blackhole',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'blackhole',
|
||||
'label': 'Black hole',
|
||||
'description': 'Download the NZB/Torrent to a specific folder.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': True,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .nzb (or .torrent) file is saved to.',
|
||||
'default': getDownloadDir()
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,70 +0,0 @@
|
||||
from __future__ import with_statement
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.logger import CPLog
|
||||
import os
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
class Blackhole(Downloader):
|
||||
|
||||
type = ['nzb', 'torrent', 'torrent_magnet']
|
||||
|
||||
def download(self, data = {}, movie = {}, filedata = None):
|
||||
|
||||
directory = self.conf('directory')
|
||||
if not directory or not os.path.isdir(directory):
|
||||
log.error('No directory set for blackhole %s download.', data.get('type'))
|
||||
else:
|
||||
try:
|
||||
if not filedata or len(filedata) < 50:
|
||||
try:
|
||||
if data.get('type') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
data['type'] = 'torrent'
|
||||
except:
|
||||
log.error('Failed download torrent via magnet url: %s', traceback.format_exc())
|
||||
|
||||
if not filedata or len(filedata) < 50:
|
||||
log.error('No nzb/torrent available: %s', data.get('url'))
|
||||
return False
|
||||
|
||||
fullPath = os.path.join(directory, self.createFileName(data, filedata, movie))
|
||||
|
||||
try:
|
||||
if not os.path.isfile(fullPath):
|
||||
log.info('Downloading %s to %s.', (data.get('type'), fullPath))
|
||||
with open(fullPath, 'wb') as f:
|
||||
f.write(filedata)
|
||||
return True
|
||||
else:
|
||||
log.info('File %s already exists.', fullPath)
|
||||
return True
|
||||
|
||||
except:
|
||||
log.error('Failed to download to blackhole %s', traceback.format_exc())
|
||||
pass
|
||||
|
||||
except:
|
||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def getEnabledDownloadType(self):
|
||||
if self.conf('use_for') == 'both':
|
||||
return super(Blackhole, self).getEnabledDownloadType()
|
||||
elif self.conf('use_for') == 'torrent':
|
||||
return ['torrent', 'torrent_magnet']
|
||||
else:
|
||||
return ['nzb']
|
||||
|
||||
def isEnabled(self, manual, data = {}):
|
||||
for_type = ['both']
|
||||
if data and 'torrent' in data.get('type'):
|
||||
for_type.append('torrent')
|
||||
elif data:
|
||||
for_type.append(data.get('type'))
|
||||
|
||||
return super(Blackhole, self).isEnabled(manual, data) and \
|
||||
((self.conf('use_for') in for_type))
|
||||
389
couchpotato/core/downloaders/deluge.py
Normal file
389
couchpotato/core/downloaders/deluge.py
Normal file
@@ -0,0 +1,389 @@
|
||||
from base64 import b64encode, b16encode, b32decode
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
import os.path
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bencode import bencode as benc, bdecode
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, sp
|
||||
from couchpotato.core.helpers.variable import tryFloat, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from synchronousdeluge import DelugeClient
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Deluge'
|
||||
|
||||
|
||||
class Deluge(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
log = CPLog(__name__)
|
||||
drpc = None
|
||||
|
||||
def connect(self, reconnect = False):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
|
||||
# Force host assignment
|
||||
if len(host) == 1:
|
||||
host.append(80)
|
||||
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
if not self.drpc or reconnect:
|
||||
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
return self.drpc
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.info('Sending "%s" (%s) to Deluge.', (data.get('name'), data.get('protocol')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
# Set parameters for Deluge
|
||||
options = {
|
||||
'add_paused': self.conf('paused', default = 0),
|
||||
'label': self.conf('label')
|
||||
}
|
||||
|
||||
if self.conf('directory'):
|
||||
if os.path.isdir(self.conf('directory')):
|
||||
options['download_location'] = self.conf('directory')
|
||||
else:
|
||||
log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))
|
||||
|
||||
if self.conf('completed_directory'):
|
||||
if os.path.isdir(self.conf('completed_directory')):
|
||||
options['move_completed'] = 1
|
||||
options['move_completed_path'] = self.conf('completed_directory')
|
||||
else:
|
||||
log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))
|
||||
|
||||
if data.get('seed_ratio'):
|
||||
options['stop_at_ratio'] = 1
|
||||
options['stop_ratio'] = tryFloat(data.get('seed_ratio'))
|
||||
|
||||
# Deluge only has seed time as a global option. Might be added in
|
||||
# in a future API release.
|
||||
# if data.get('seed_time'):
|
||||
|
||||
# Send request to Deluge
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
remote_torrent = self.drpc.add_torrent_magnet(data.get('url'), options)
|
||||
else:
|
||||
filename = self.createFileName(data, filedata, media)
|
||||
remote_torrent = self.drpc.add_torrent_file(filename, filedata, options)
|
||||
|
||||
if not remote_torrent:
|
||||
log.error('Failed sending torrent to Deluge')
|
||||
return False
|
||||
|
||||
log.info('Torrent sent to Deluge successfully.')
|
||||
return self.downloadReturnId(remote_torrent)
|
||||
|
||||
def test(self):
|
||||
if self.connect(True) and self.drpc.test():
|
||||
return True
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking Deluge download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
queue = self.drpc.get_alltorrents(ids)
|
||||
|
||||
if not queue:
|
||||
log.debug('Nothing in queue or error')
|
||||
return []
|
||||
|
||||
for torrent_id in queue:
|
||||
torrent = queue[torrent_id]
|
||||
|
||||
if not 'hash' in torrent:
|
||||
# When given a list of ids, deluge will return an empty item for a non-existant torrent.
|
||||
continue
|
||||
|
||||
log.debug('name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_on_completed'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
|
||||
|
||||
# Deluge has no easy way to work out if a torrent is stalled or failing.
|
||||
#status = 'failed'
|
||||
status = 'busy'
|
||||
if torrent['is_seed'] and tryFloat(torrent['ratio']) < tryFloat(torrent['stop_ratio']):
|
||||
# We have torrent['seeding_time'] to work out what the seeding time is, but we do not
|
||||
# have access to the downloader seed_time, as with deluge we have no way to pass it
|
||||
# when the torrent is added. So Deluge will only look at the ratio.
|
||||
# See above comment in download().
|
||||
status = 'seeding'
|
||||
elif torrent['is_seed'] and torrent['is_finished'] and torrent['paused'] and torrent['state'] == 'Paused':
|
||||
status = 'completed'
|
||||
|
||||
download_dir = sp(torrent['save_path'])
|
||||
if torrent['move_on_completed']:
|
||||
download_dir = torrent['move_completed_path']
|
||||
|
||||
torrent_files = []
|
||||
for file_item in torrent['files']:
|
||||
torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent['hash'],
|
||||
'name': torrent['name'],
|
||||
'status': status,
|
||||
'original_status': torrent['state'],
|
||||
'seed_ratio': torrent['ratio'],
|
||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||
'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
|
||||
'files': torrent_files,
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if pause:
|
||||
return self.drpc.pause_torrent([release_download['id']])
|
||||
else:
|
||||
return self.drpc.resume_torrent([release_download['id']])
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.drpc.remove_torrent(release_download['id'], True)
|
||||
|
||||
def processComplete(self, release_download, delete_files = False):
|
||||
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
|
||||
|
||||
|
||||
class DelugeRPC(object):
|
||||
|
||||
host = 'localhost'
|
||||
port = 58846
|
||||
username = None
|
||||
password = None
|
||||
client = None
|
||||
|
||||
def __init__(self, host = 'localhost', port = 58846, username = None, password = None):
|
||||
super(DelugeRPC, self).__init__()
|
||||
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def connect(self):
|
||||
self.client = DelugeClient()
|
||||
self.client.connect(self.host, int(self.port), self.username, self.password)
|
||||
|
||||
def test(self):
|
||||
try:
|
||||
self.connect()
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
def add_torrent_magnet(self, torrent, options):
|
||||
torrent_id = False
|
||||
try:
|
||||
self.connect()
|
||||
torrent_id = self.client.core.add_torrent_magnet(torrent, options).get()
|
||||
if not torrent_id:
|
||||
torrent_id = self._check_torrent(True, torrent)
|
||||
|
||||
if torrent_id and options['label']:
|
||||
self.client.label.set_torrent(torrent_id, options['label']).get()
|
||||
except Exception as err:
|
||||
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
self.disconnect()
|
||||
|
||||
return torrent_id
|
||||
|
||||
def add_torrent_file(self, filename, torrent, options):
|
||||
torrent_id = False
|
||||
try:
|
||||
self.connect()
|
||||
torrent_id = self.client.core.add_torrent_file(filename, b64encode(torrent), options).get()
|
||||
if not torrent_id:
|
||||
torrent_id = self._check_torrent(False, torrent)
|
||||
|
||||
if torrent_id and options['label']:
|
||||
self.client.label.set_torrent(torrent_id, options['label']).get()
|
||||
except Exception as err:
|
||||
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
self.disconnect()
|
||||
|
||||
return torrent_id
|
||||
|
||||
def get_alltorrents(self, ids):
|
||||
ret = False
|
||||
try:
|
||||
self.connect()
|
||||
ret = self.client.core.get_torrents_status({'id': ids}, ('name', 'hash', 'save_path', 'move_completed_path', 'progress', 'state', 'eta', 'ratio', 'stop_ratio', 'is_seed', 'is_finished', 'paused', 'move_on_completed', 'files')).get()
|
||||
except Exception as err:
|
||||
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
self.disconnect()
|
||||
return ret
|
||||
|
||||
def pause_torrent(self, torrent_ids):
|
||||
try:
|
||||
self.connect()
|
||||
self.client.core.pause_torrent(torrent_ids).get()
|
||||
except Exception as err:
|
||||
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
self.disconnect()
|
||||
|
||||
def resume_torrent(self, torrent_ids):
|
||||
try:
|
||||
self.connect()
|
||||
self.client.core.resume_torrent(torrent_ids).get()
|
||||
except Exception as err:
|
||||
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
self.disconnect()
|
||||
|
||||
def remove_torrent(self, torrent_id, remove_local_data):
|
||||
ret = False
|
||||
try:
|
||||
self.connect()
|
||||
ret = self.client.core.remove_torrent(torrent_id, remove_local_data).get()
|
||||
except Exception as err:
|
||||
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
self.disconnect()
|
||||
return ret
|
||||
|
||||
def disconnect(self):
|
||||
self.client.disconnect()
|
||||
|
||||
def _check_torrent(self, magnet, torrent):
|
||||
# Torrent not added, check if it already existed.
|
||||
if magnet:
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
|
||||
else:
|
||||
info = bdecode(torrent)["info"]
|
||||
torrent_hash = sha1(benc(info)).hexdigest()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
torrent_hash = torrent_hash.lower()
|
||||
torrent_check = self.client.core.get_torrent_status(torrent_hash, {}).get()
|
||||
if torrent_check['hash']:
|
||||
return torrent_hash
|
||||
|
||||
return False
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'deluge',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'deluge',
|
||||
'label': 'Deluge',
|
||||
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:58846',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'completed_directory',
|
||||
'type': 'directory',
|
||||
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add to torrents in the Deluge UI.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'description': 'Remove the torrent from Deluge after it has finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
293
couchpotato/core/downloaders/nzbget.py
Normal file
293
couchpotato/core/downloaders/nzbget.py
Normal file
@@ -0,0 +1,293 @@
|
||||
from base64 import standard_b64encode
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import traceback
|
||||
import xmlrpclib
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import ss, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, md5, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NZBGet'
|
||||
|
||||
|
||||
class NZBGet(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
rpc = 'xmlrpc'
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
if not filedata:
|
||||
log.error('Unable to get NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
log.info('Sending "%s" to NZBGet.', data.get('name'))
|
||||
|
||||
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
|
||||
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
if re.search(r"^0", rpc.version()):
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), False, standard_b64encode(filedata.strip()))
|
||||
else:
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), tryInt(self.conf('priority')), False, standard_b64encode(filedata.strip()))
|
||||
|
||||
if xml_response:
|
||||
log.info('NZB sent successfully to NZBGet')
|
||||
nzb_id = md5(data['url']) # about as unique as they come ;)
|
||||
couchpotato_id = "couchpotato=" + nzb_id
|
||||
groups = rpc.listgroups()
|
||||
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
|
||||
confirmed = rpc.editqueue("GroupSetParameter", 0, couchpotato_id, file_id)
|
||||
if confirmed:
|
||||
log.debug('couchpotato parameter set in nzbget download')
|
||||
return self.downloadReturnId(nzb_id)
|
||||
else:
|
||||
log.error('NZBGet could not add %s to the queue.', nzb_name)
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to test connection'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking NZBGet download status.')
|
||||
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return []
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return []
|
||||
|
||||
# Get NZBGet data
|
||||
try:
|
||||
status = rpc.status()
|
||||
groups = rpc.listgroups()
|
||||
queue = rpc.postqueue(0)
|
||||
history = rpc.history()
|
||||
except:
|
||||
log.error('Failed getting data: %s', traceback.format_exc(1))
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for nzb in groups:
|
||||
try:
|
||||
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
|
||||
except:
|
||||
nzb_id = nzb['NZBID']
|
||||
|
||||
if nzb_id in ids:
|
||||
log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
|
||||
timeleft = -1
|
||||
try:
|
||||
if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
|
||||
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
|
||||
except:
|
||||
pass
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb_id,
|
||||
'name': nzb['NZBFilename'],
|
||||
'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
|
||||
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
|
||||
'timeleft': timeleft,
|
||||
})
|
||||
|
||||
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
|
||||
if nzb['NZBID'] in ids:
|
||||
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
|
||||
release_downloads.append({
|
||||
'id': nzb['NZBID'],
|
||||
'name': nzb['NZBFilename'],
|
||||
'original_status': nzb['Stage'],
|
||||
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
|
||||
})
|
||||
|
||||
for nzb in history:
|
||||
try:
|
||||
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
|
||||
except:
|
||||
nzb_id = nzb['NZBID']
|
||||
|
||||
if nzb_id in ids:
|
||||
log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
|
||||
release_downloads.append({
|
||||
'id': nzb_id,
|
||||
'name': nzb['NZBFilename'],
|
||||
'status': 'completed' if nzb['ParStatus'] in ['SUCCESS', 'NONE'] and nzb['ScriptStatus'] in ['SUCCESS', 'NONE'] else 'failed',
|
||||
'original_status': nzb['ParStatus'] + ', ' + nzb['ScriptStatus'],
|
||||
'timeleft': str(timedelta(seconds = 0)),
|
||||
'folder': sp(nzb['DestDir'])
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
try:
|
||||
history = rpc.history()
|
||||
nzb_id = None
|
||||
path = None
|
||||
|
||||
for hist in history:
|
||||
for param in hist['Parameters']:
|
||||
if param['Name'] == 'couchpotato' and param['Value'] == release_download['id']:
|
||||
nzb_id = hist['ID']
|
||||
path = hist['DestDir']
|
||||
|
||||
if nzb_id and path and rpc.editqueue('HistoryDelete', 0, "", [tryInt(nzb_id)]):
|
||||
shutil.rmtree(path, True)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def getRPC(self):
|
||||
url = cleanHost(host = self.conf('host'), ssl = self.conf('ssl'), username = self.conf('username'), password = self.conf('password')) + self.rpc
|
||||
return xmlrpclib.ServerProxy(url)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbget',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbget',
|
||||
'label': 'NZBGet',
|
||||
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:6789',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': 'nzbget',
|
||||
'advanced': True,
|
||||
'description': 'Set a different username to connect. Default: nzbget',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'default': 'Movies',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'advanced': True,
|
||||
'default': '0',
|
||||
'type': 'dropdown',
|
||||
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
|
||||
'description': 'Only change this if you are using NZBget 9.0 or higher',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,54 +0,0 @@
|
||||
from .main import NZBGet
|
||||
|
||||
def start():
|
||||
return NZBGet()
|
||||
|
||||
config = [{
|
||||
'name': 'nzbget',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbget',
|
||||
'label': 'NZBGet',
|
||||
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:6789',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'default': 'Movies',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'default': '0',
|
||||
'type': 'dropdown',
|
||||
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
|
||||
'description': 'Only change this if you are using NZBget 9.0 or higher',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,56 +0,0 @@
|
||||
from base64 import standard_b64encode
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
import re
|
||||
import socket
|
||||
import traceback
|
||||
import xmlrpclib
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
class NZBGet(Downloader):
|
||||
|
||||
type = ['nzb']
|
||||
|
||||
url = 'http://nzbget:%(password)s@%(host)s/xmlrpc'
|
||||
|
||||
def download(self, data = {}, movie = {}, filedata = None):
|
||||
|
||||
if not filedata:
|
||||
log.error('Unable to get NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
log.info('Sending "%s" to NZBGet.', data.get('name'))
|
||||
|
||||
url = self.url % {'host': self.conf('host'), 'password': self.conf('password')}
|
||||
nzb_name = ss('%s.nzb' % self.createNzbName(data, movie))
|
||||
|
||||
rpc = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
|
||||
log.info('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
if re.search(r"^0", rpc.version()):
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), False, standard_b64encode(filedata.strip()))
|
||||
else:
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), tryInt(self.conf('priority')), False, standard_b64encode(filedata.strip()))
|
||||
|
||||
if xml_response:
|
||||
log.info('NZB sent successfully to NZBGet')
|
||||
return True
|
||||
else:
|
||||
log.error('NZBGet could not add %s to the queue.', nzb_name)
|
||||
return False
|
||||
220
couchpotato/core/downloaders/nzbvortex.py
Normal file
220
couchpotato/core/downloaders/nzbvortex.py
Normal file
@@ -0,0 +1,220 @@
|
||||
from base64 import b64encode
|
||||
import os
|
||||
from uuid import uuid4
|
||||
import hashlib
|
||||
import traceback
|
||||
|
||||
from requests import HTTPError
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NZBVortex'
|
||||
|
||||
|
||||
class NZBVortex(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
api_level = None
|
||||
session_id = None
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
# Send the nzb
|
||||
try:
|
||||
nzb_filename = self.createFileName(data, filedata, media, unique_tag = True)
|
||||
response = self.call('nzb/add', files = {'file': (nzb_filename, filedata, 'application/octet-stream')}, parameters = {
|
||||
'name': nzb_filename,
|
||||
'groupname': self.conf('group')
|
||||
})
|
||||
|
||||
if response and response.get('result', '').lower() == 'ok':
|
||||
return self.downloadReturnId(nzb_filename)
|
||||
|
||||
log.error('Something went wrong sending the NZB file. Response: %s', response)
|
||||
return False
|
||||
except:
|
||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
try:
|
||||
login_result = self.login()
|
||||
except:
|
||||
return False
|
||||
|
||||
return login_result
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
raw_statuses = self.call('nzb')
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
for nzb in raw_statuses.get('nzbs', []):
|
||||
nzb_id = os.path.basename(nzb['nzbFileName'])
|
||||
if nzb_id in ids:
|
||||
|
||||
# Check status
|
||||
status = 'busy'
|
||||
if nzb['state'] == 20:
|
||||
status = 'completed'
|
||||
elif nzb['state'] in [21, 22, 24]:
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'temp_id': nzb['id'],
|
||||
'id': nzb_id,
|
||||
'name': nzb['uiTitle'],
|
||||
'status': status,
|
||||
'original_status': nzb['state'],
|
||||
'timeleft': -1,
|
||||
'folder': sp(nzb['destinationPath']),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call('nzb/%s/cancel' % release_download['temp_id'])
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def login(self):
|
||||
|
||||
nonce = self.call('auth/nonce', auth = False).get('authNonce')
|
||||
cnonce = uuid4().hex
|
||||
hashed = b64encode(hashlib.sha256('%s:%s:%s' % (nonce, cnonce, self.conf('api_key'))).digest())
|
||||
|
||||
params = {
|
||||
'nonce': nonce,
|
||||
'cnonce': cnonce,
|
||||
'hash': hashed
|
||||
}
|
||||
|
||||
login_data = self.call('auth/login', parameters = params, auth = False)
|
||||
|
||||
# Save for later
|
||||
if login_data.get('loginResult') == 'successful':
|
||||
self.session_id = login_data.get('sessionID')
|
||||
return True
|
||||
|
||||
log.error('Login failed, please check you api-key')
|
||||
return False
|
||||
|
||||
def call(self, call, parameters = None, is_repeat = False, auth = True, *args, **kwargs):
|
||||
|
||||
# Login first
|
||||
if not parameters: parameters = {}
|
||||
if not self.session_id and auth:
|
||||
self.login()
|
||||
|
||||
# Always add session id to request
|
||||
if self.session_id:
|
||||
parameters['sessionid'] = self.session_id
|
||||
|
||||
params = tryUrlencode(parameters)
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||
|
||||
try:
|
||||
data = self.getJsonData('%s%s' % (url, '?' + params if params else ''), *args, cache_timeout = 0, show_error = False, **kwargs)
|
||||
|
||||
if data:
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
# Try login and do again
|
||||
if not is_repeat:
|
||||
self.login()
|
||||
return self.call(call, parameters = parameters, is_repeat = True, **kwargs)
|
||||
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return {}
|
||||
|
||||
def getApiLevel(self):
|
||||
|
||||
if not self.api_level:
|
||||
|
||||
try:
|
||||
data = self.call('app/apilevel', auth = False)
|
||||
self.api_level = float(data.get('apilevel'))
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||
else:
|
||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||
|
||||
return self.api_level
|
||||
|
||||
def isEnabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbvortex',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbvortex',
|
||||
'label': 'NZBVortex',
|
||||
'description': 'Use <a href="http://www.nzbvortex.com/landing/" target="_blank">NZBVortex</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'https://localhost:4321',
|
||||
'description': 'Hostname with port. Usually <strong>https://localhost:4321</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
{
|
||||
'name': 'group',
|
||||
'label': 'Group',
|
||||
'description': 'The group CP places the nzb in. Make sure to create it in NZBVortex.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,47 +0,0 @@
|
||||
from .main import NZBVortex
|
||||
|
||||
def start():
|
||||
return NZBVortex()
|
||||
|
||||
config = [{
|
||||
'name': 'nzbvortex',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbvortex',
|
||||
'label': 'NZBVortex',
|
||||
'description': 'Use <a href="http://www.nzbvortex.com/landing/" target="_blank">NZBVortex</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'https://localhost:4321',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,170 +0,0 @@
|
||||
from base64 import b64encode
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from urllib2 import URLError
|
||||
from uuid import uuid4
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
import traceback
|
||||
import urllib2
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
class NZBVortex(Downloader):
|
||||
|
||||
type = ['nzb']
|
||||
api_level = None
|
||||
session_id = None
|
||||
|
||||
def download(self, data = {}, movie = {}, filedata = None):
|
||||
|
||||
# Send the nzb
|
||||
try:
|
||||
nzb_filename = self.createFileName(data, filedata, movie)
|
||||
self.call('nzb/add', params = {'file': (ss(nzb_filename), filedata)}, multipart = True)
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
raw_statuses = self.call('nzb')
|
||||
|
||||
statuses = []
|
||||
for item in raw_statuses.get('nzbs', []):
|
||||
|
||||
# Check status
|
||||
status = 'busy'
|
||||
if item['state'] == 20:
|
||||
status = 'completed'
|
||||
elif item['state'] in [21, 22, 24]:
|
||||
status = 'failed'
|
||||
|
||||
statuses.append({
|
||||
'id': item['id'],
|
||||
'name': item['uiTitle'],
|
||||
'status': status,
|
||||
'original_status': item['state'],
|
||||
'timeleft':-1,
|
||||
})
|
||||
|
||||
return statuses
|
||||
|
||||
def removeFailed(self, item):
|
||||
|
||||
log.info('%s failed downloading, deleting...', item['name'])
|
||||
|
||||
try:
|
||||
self.call('nzb/%s/cancel' % item['id'])
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def login(self):
|
||||
|
||||
nonce = self.call('auth/nonce', auth = False).get('authNonce')
|
||||
cnonce = uuid4().hex
|
||||
hashed = b64encode(hashlib.sha256('%s:%s:%s' % (nonce, cnonce, self.conf('api_key'))).digest())
|
||||
|
||||
params = {
|
||||
'nonce': nonce,
|
||||
'cnonce': cnonce,
|
||||
'hash': hashed
|
||||
}
|
||||
|
||||
login_data = self.call('auth/login', parameters = params, auth = False)
|
||||
|
||||
# Save for later
|
||||
if login_data.get('loginResult') == 'successful':
|
||||
self.session_id = login_data.get('sessionID')
|
||||
return True
|
||||
|
||||
log.error('Login failed, please check you api-key')
|
||||
return False
|
||||
|
||||
|
||||
def call(self, call, parameters = {}, repeat = False, auth = True, *args, **kwargs):
|
||||
|
||||
# Login first
|
||||
if not self.session_id and auth:
|
||||
self.login()
|
||||
|
||||
# Always add session id to request
|
||||
if self.session_id:
|
||||
parameters['sessionid'] = self.session_id
|
||||
|
||||
params = tryUrlencode(parameters)
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||
url_opener = urllib2.build_opener(HTTPSHandler())
|
||||
|
||||
try:
|
||||
data = self.urlopen('%s?%s' % (url, params), opener = url_opener, *args, **kwargs)
|
||||
|
||||
if data:
|
||||
return json.loads(data)
|
||||
except URLError, e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
# Try login and do again
|
||||
if not repeat:
|
||||
self.login()
|
||||
return self.call(call, parameters = parameters, repeat = True, *args, **kwargs)
|
||||
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return {}
|
||||
|
||||
def getApiLevel(self):
|
||||
|
||||
if not self.api_level:
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/app/apilevel'
|
||||
url_opener = urllib2.build_opener(HTTPSHandler())
|
||||
|
||||
try:
|
||||
data = self.urlopen(url, opener = url_opener, show_error = False)
|
||||
self.api_level = float(json.loads(data).get('apilevel'))
|
||||
except URLError, e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||
else:
|
||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||
|
||||
return self.api_level
|
||||
|
||||
def isEnabled(self, manual, data):
|
||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||
|
||||
|
||||
class HTTPSConnection(httplib.HTTPSConnection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
|
||||
|
||||
def connect(self):
|
||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
||||
if sys.version_info < (2, 6, 7):
|
||||
if hasattr(self, '_tunnel_host'):
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
else:
|
||||
if self._tunnel_host:
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
|
||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version = ssl.PROTOCOL_TLSv1)
|
||||
|
||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
||||
def https_open(self, req):
|
||||
return self.do_open(HTTPSConnection, req)
|
||||
111
couchpotato/core/downloaders/pneumatic.py
Normal file
111
couchpotato/core/downloaders/pneumatic.py
Normal file
@@ -0,0 +1,111 @@
|
||||
from __future__ import with_statement
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Pneumatic'
|
||||
|
||||
|
||||
class Pneumatic(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
|
||||
status_support = False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
directory = self.conf('directory')
|
||||
if not directory or not os.path.isdir(directory):
|
||||
log.error('No directory set for .strm downloads.')
|
||||
else:
|
||||
try:
|
||||
if not filedata or len(filedata) < 50:
|
||||
log.error('No nzb available!')
|
||||
return False
|
||||
|
||||
full_path = os.path.join(directory, self.createFileName(data, filedata, media))
|
||||
|
||||
try:
|
||||
if not os.path.isfile(full_path):
|
||||
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(filedata)
|
||||
|
||||
nzb_name = self.createNzbName(data, media)
|
||||
strm_path = os.path.join(directory, nzb_name)
|
||||
|
||||
strm_file = open(strm_path + '.strm', 'wb')
|
||||
strmContent = self.strm_syntax % (full_path, nzb_name)
|
||||
strm_file.write(strmContent)
|
||||
strm_file.close()
|
||||
|
||||
return self.downloadReturnId('')
|
||||
|
||||
else:
|
||||
log.info('File %s already exists.', full_path)
|
||||
return self.downloadReturnId('')
|
||||
|
||||
except:
|
||||
log.error('Failed to download .strm: %s', traceback.format_exc())
|
||||
pass
|
||||
|
||||
except:
|
||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||
return False
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
directory = self.conf('directory')
|
||||
if directory and os.path.isdir(directory):
|
||||
|
||||
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
|
||||
|
||||
# Check if folder is writable
|
||||
self.createFile(test_file, 'This is a test file')
|
||||
if os.path.isfile(test_file):
|
||||
os.remove(test_file)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'pneumatic',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'pneumatic',
|
||||
'label': 'Pneumatic',
|
||||
'description': 'Use <a href="http://forum.xbmc.org/showthread.php?tid=97657" target="_blank">Pneumatic</a> to download .strm files.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .strm file is saved to.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,37 +0,0 @@
|
||||
from .main import Pneumatic
|
||||
|
||||
def start():
|
||||
return Pneumatic()
|
||||
|
||||
config = [{
|
||||
'name': 'pneumatic',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'pneumatic',
|
||||
'label': 'Pneumatic',
|
||||
'description': 'Use <a href="http://forum.xbmc.org/showthread.php?tid=97657" target="_blank">Pneumatic</a> to download .strm files.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .strm file is saved to.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,54 +0,0 @@
|
||||
from __future__ import with_statement
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.logger import CPLog
|
||||
import os
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
class Pneumatic(Downloader):
|
||||
|
||||
type = ['nzb']
|
||||
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
|
||||
|
||||
def download(self, data = {}, movie = {}, filedata = None):
|
||||
|
||||
directory = self.conf('directory')
|
||||
if not directory or not os.path.isdir(directory):
|
||||
log.error('No directory set for .strm downloads.')
|
||||
else:
|
||||
try:
|
||||
if not filedata or len(filedata) < 50:
|
||||
log.error('No nzb available!')
|
||||
return False
|
||||
|
||||
fullPath = os.path.join(directory, self.createFileName(data, filedata, movie))
|
||||
|
||||
try:
|
||||
if not os.path.isfile(fullPath):
|
||||
log.info('Downloading %s to %s.', (data.get('type'), fullPath))
|
||||
with open(fullPath, 'wb') as f:
|
||||
f.write(filedata)
|
||||
|
||||
nzb_name = self.createNzbName(data, movie)
|
||||
strm_path = os.path.join(directory, nzb_name)
|
||||
|
||||
strm_file = open(strm_path + '.strm', 'wb')
|
||||
strmContent = self.strm_syntax % (fullPath, nzb_name)
|
||||
strm_file.write(strmContent)
|
||||
strm_file.close()
|
||||
|
||||
return True
|
||||
|
||||
else:
|
||||
log.info('File %s already exists.', fullPath)
|
||||
return True
|
||||
|
||||
except:
|
||||
log.error('Failed to download .strm: %s', traceback.format_exc())
|
||||
pass
|
||||
|
||||
except:
|
||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||
return False
|
||||
return False
|
||||
245
couchpotato/core/downloaders/qbittorrent_.py
Normal file
245
couchpotato/core/downloaders/qbittorrent_.py
Normal file
@@ -0,0 +1,245 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from hashlib import sha1
|
||||
import os
|
||||
|
||||
from bencode import bencode, bdecode
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from qbittorrent.client import QBittorrentClient
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'qBittorrent'
|
||||
|
||||
|
||||
class qBittorrent(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
qb = None
|
||||
|
||||
def __init__(self):
|
||||
super(qBittorrent, self).__init__()
|
||||
|
||||
def connect(self):
|
||||
if self.qb is not None:
|
||||
return self.qb
|
||||
|
||||
url = cleanHost(self.conf('host'), protocol = True, ssl = False)
|
||||
|
||||
if self.conf('username') and self.conf('password'):
|
||||
self.qb = QBittorrentClient(
|
||||
url,
|
||||
username = self.conf('username'),
|
||||
password = self.conf('password')
|
||||
)
|
||||
else:
|
||||
self.qb = QBittorrentClient(url)
|
||||
|
||||
return self.qb
|
||||
|
||||
def test(self):
|
||||
if self.connect():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.debug('Sending "%s" to qBittorrent.', (data.get('name')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
|
||||
if filedata is False:
|
||||
return False
|
||||
|
||||
data['protocol'] = 'torrent'
|
||||
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
# Send request to qBittorrent
|
||||
try:
|
||||
self.qb.add_file(filedata)
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
except Exception as e:
|
||||
log.error('Failed to send torrent to qBittorrent: %s', e)
|
||||
return False
|
||||
|
||||
def getTorrentStatus(self, torrent):
|
||||
|
||||
if torrent.state in ('uploading', 'queuedUP', 'stalledUP'):
|
||||
return 'seeding'
|
||||
|
||||
if torrent.progress == 1:
|
||||
return 'completed'
|
||||
|
||||
return 'busy'
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
log.debug('Checking qBittorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
try:
|
||||
torrents = self.qb.get_torrents()
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for torrent in torrents:
|
||||
if torrent.hash in ids:
|
||||
torrent.update_general() # get extra info
|
||||
torrent_filelist = torrent.get_files()
|
||||
|
||||
torrent_files = []
|
||||
torrent_dir = os.path.join(torrent.save_path, torrent.name)
|
||||
|
||||
if os.path.isdir(torrent_dir):
|
||||
torrent.save_path = torrent_dir
|
||||
|
||||
if len(torrent_filelist) > 1 and os.path.isdir(torrent_dir): # multi file torrent, path.isdir check makes sure we're not in the root download folder
|
||||
for root, _, files in os.walk(torrent.save_path):
|
||||
for f in files:
|
||||
torrent_files.append(sp(os.path.join(root, f)))
|
||||
|
||||
else: # multi or single file placed directly in torrent.save_path
|
||||
for f in torrent_filelist:
|
||||
file_path = os.path.join(torrent.save_path, f.name)
|
||||
if os.path.isfile(file_path):
|
||||
torrent_files.append(sp(file_path))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent.hash,
|
||||
'name': torrent.name,
|
||||
'status': self.getTorrentStatus(torrent),
|
||||
'seed_ratio': torrent.ratio,
|
||||
'original_status': torrent.state,
|
||||
'timeleft': torrent.progress * 100 if torrent.progress else -1, # percentage
|
||||
'folder': sp(torrent.save_path),
|
||||
'files': torrent_files
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
except Exception as e:
|
||||
log.error('Failed to get status from qBittorrent: %s', e)
|
||||
return []
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.qb.get_torrent(release_download['id'])
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if pause:
|
||||
return torrent.pause()
|
||||
return torrent.resume()
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.processComplete(release_download, delete_files = True)
|
||||
|
||||
def processComplete(self, release_download, delete_files):
|
||||
log.debug('Requesting qBittorrent to remove the torrent %s%s.',
|
||||
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.qb.find_torrent(release_download['id'])
|
||||
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if delete_files:
|
||||
torrent.delete() # deletes torrent with data
|
||||
else:
|
||||
torrent.remove() # just removes the torrent, doesn't delete data
|
||||
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'qbittorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'qbittorrent',
|
||||
'label': 'qbittorrent',
|
||||
'description': '',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'http://localhost:8080/',
|
||||
'description': 'RPC Communication URI. Usually <strong>http://localhost:8080/</strong>'
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': False,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent after it finishes seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
335
couchpotato/core/downloaders/rtorrent_.py
Normal file
335
couchpotato/core/downloaders/rtorrent_.py
Normal file
@@ -0,0 +1,335 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
from urlparse import urlparse
|
||||
import os
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import cleanHost, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from bencode import bencode, bdecode
|
||||
from rtorrent import RTorrent
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'rTorrent'
|
||||
|
||||
|
||||
class rTorrent(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
rt = None
|
||||
error_msg = ''
|
||||
|
||||
# Migration url to host options
|
||||
def __init__(self):
|
||||
super(rTorrent, self).__init__()
|
||||
|
||||
addEvent('app.load', self.migrate)
|
||||
addEvent('setting.save.rtorrent.*.after', self.settingsChanged)
|
||||
|
||||
def migrate(self):
|
||||
|
||||
url = self.conf('url')
|
||||
if url:
|
||||
host_split = splitString(url.split('://')[-1], split_on = '/')
|
||||
|
||||
self.conf('ssl', value = url.startswith('https'))
|
||||
self.conf('host', value = host_split[0].strip())
|
||||
self.conf('rpc_url', value = '/'.join(host_split[1:]))
|
||||
|
||||
self.deleteConf('url')
|
||||
|
||||
def settingsChanged(self):
|
||||
# Reset active connection if settings have changed
|
||||
if self.rt:
|
||||
log.debug('Settings have changed, closing active connection')
|
||||
|
||||
self.rt = None
|
||||
return True
|
||||
|
||||
def connect(self, reconnect = False):
|
||||
# Already connected?
|
||||
if not reconnect and self.rt is not None:
|
||||
return self.rt
|
||||
|
||||
url = cleanHost(self.conf('host'), protocol = True, ssl = self.conf('ssl'))
|
||||
|
||||
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled
|
||||
if self.conf('ssl') and url.startswith('httprpc://'):
|
||||
url = url.replace('httprpc://', 'httprpc+https://')
|
||||
|
||||
parsed = urlparse(url)
|
||||
|
||||
# rpc_url is only used on http/https scgi pass-through
|
||||
if parsed.scheme in ['http', 'https']:
|
||||
url += self.conf('rpc_url')
|
||||
|
||||
self.rt = RTorrent(
|
||||
url,
|
||||
self.conf('username'),
|
||||
self.conf('password')
|
||||
)
|
||||
|
||||
self.error_msg = ''
|
||||
try:
|
||||
self.rt._verify_conn()
|
||||
except AssertionError as e:
|
||||
self.error_msg = e.message
|
||||
self.rt = None
|
||||
|
||||
return self.rt
|
||||
|
||||
def test(self):
|
||||
if self.connect(True):
|
||||
return True
|
||||
|
||||
if self.error_msg:
|
||||
return False, 'Connection failed: ' + self.error_msg
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.debug('Sending "%s" to rTorrent.', (data.get('name')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent_params = {}
|
||||
if self.conf('label'):
|
||||
torrent_params['label'] = self.conf('label')
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
# Try download magnet torrents
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
|
||||
if filedata is False:
|
||||
return False
|
||||
|
||||
data['protocol'] = 'torrent'
|
||||
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest().upper()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
# Send request to rTorrent
|
||||
try:
|
||||
# Send torrent to rTorrent
|
||||
torrent = self.rt.load_torrent(filedata, verify_retries=10)
|
||||
|
||||
if not torrent:
|
||||
log.error('Unable to find the torrent, did it fail to load?')
|
||||
return False
|
||||
|
||||
# Set label
|
||||
if self.conf('label'):
|
||||
torrent.set_custom(1, self.conf('label'))
|
||||
|
||||
if self.conf('directory'):
|
||||
torrent.set_directory(self.conf('directory'))
|
||||
|
||||
# Start torrent
|
||||
if not self.conf('paused', default = 0):
|
||||
torrent.start()
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
except Exception as err:
|
||||
log.error('Failed to send torrent to rTorrent: %s', err)
|
||||
return False
|
||||
|
||||
def getTorrentStatus(self, torrent):
|
||||
if not torrent.complete:
|
||||
return 'busy'
|
||||
|
||||
if torrent.open:
|
||||
return 'seeding'
|
||||
|
||||
return 'completed'
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
log.debug('Checking rTorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
try:
|
||||
torrents = self.rt.get_torrents()
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for torrent in torrents:
|
||||
if torrent.info_hash in ids:
|
||||
torrent_directory = os.path.normpath(torrent.directory)
|
||||
torrent_files = []
|
||||
|
||||
for file in torrent.get_files():
|
||||
if not os.path.normpath(file.path).startswith(torrent_directory):
|
||||
file_path = os.path.join(torrent_directory, file.path.lstrip('/'))
|
||||
else:
|
||||
file_path = file.path
|
||||
|
||||
torrent_files.append(sp(file_path))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent.info_hash,
|
||||
'name': torrent.name,
|
||||
'status': self.getTorrentStatus(torrent),
|
||||
'seed_ratio': torrent.ratio,
|
||||
'original_status': torrent.state,
|
||||
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
|
||||
'folder': sp(torrent.directory),
|
||||
'files': torrent_files
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
except Exception as err:
|
||||
log.error('Failed to get status from rTorrent: %s', err)
|
||||
return []
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.rt.find_torrent(release_download['id'])
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if pause:
|
||||
return torrent.pause()
|
||||
return torrent.resume()
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.processComplete(release_download, delete_files = True)
|
||||
|
||||
def processComplete(self, release_download, delete_files):
|
||||
log.debug('Requesting rTorrent to remove the torrent %s%s.',
|
||||
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.rt.find_torrent(release_download['id'])
|
||||
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if delete_files:
|
||||
for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
|
||||
os.unlink(os.path.join(torrent.directory, file_item.path))
|
||||
|
||||
if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
|
||||
# Remove empty directories bottom up
|
||||
try:
|
||||
for path, _, _ in os.walk(sp(torrent.directory), topdown = False):
|
||||
os.rmdir(path)
|
||||
except OSError:
|
||||
log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
|
||||
|
||||
torrent.erase() # just removes the torrent, doesn't delete data
|
||||
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'rtorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'rtorrent',
|
||||
'label': 'rTorrent',
|
||||
'description': '',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:80',
|
||||
'description': 'RPC Communication URI. Usually <strong>scgi://localhost:5000</strong>, '
|
||||
'<strong>httprpc://localhost/rutorrent</strong> or <strong>localhost:80</strong>'
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
'type': 'string',
|
||||
'default': 'RPC2',
|
||||
'advanced': True,
|
||||
'description': 'Change if your RPC mount is at a different path.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to apply on added torrents.',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': False,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent after it finishes seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
281
couchpotato/core/downloaders/sabnzbd.py
Normal file
281
couchpotato/core/downloaders/sabnzbd.py
Normal file
@@ -0,0 +1,281 @@
|
||||
from datetime import timedelta
|
||||
from urllib2 import URLError
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
|
||||
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Sabnzbd'
|
||||
|
||||
|
||||
class Sabnzbd(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.info('Sending "%s" to SABnzbd.', data.get('name'))
|
||||
|
||||
req_params = {
|
||||
'cat': self.conf('category'),
|
||||
'mode': 'addurl',
|
||||
'nzbname': self.createNzbName(data, media),
|
||||
'priority': self.conf('priority'),
|
||||
}
|
||||
|
||||
nzb_filename = None
|
||||
if filedata:
|
||||
if len(filedata) < 50:
|
||||
log.error('No proper nzb available: %s', filedata)
|
||||
return False
|
||||
|
||||
# If it's a .rar, it adds the .rar extension, otherwise it stays .nzb
|
||||
nzb_filename = self.createFileName(data, filedata, media)
|
||||
req_params['mode'] = 'addfile'
|
||||
else:
|
||||
req_params['name'] = data.get('url')
|
||||
|
||||
try:
|
||||
if nzb_filename and req_params.get('mode') is 'addfile':
|
||||
sab_data = self.call(req_params, files = {'nzbfile': (ss(nzb_filename), filedata)})
|
||||
else:
|
||||
sab_data = self.call(req_params)
|
||||
except URLError:
|
||||
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
|
||||
return False
|
||||
except:
|
||||
log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
log.debug('Result from SAB: %s', sab_data)
|
||||
if sab_data.get('status') and not sab_data.get('error'):
|
||||
log.info('NZB sent to SAB successfully.')
|
||||
if filedata:
|
||||
return self.downloadReturnId(sab_data.get('nzo_ids')[0])
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
log.error('Error getting data from SABNZBd: %s', sab_data)
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
try:
|
||||
sab_data = self.call({
|
||||
'mode': 'version',
|
||||
})
|
||||
v = sab_data.split('.')
|
||||
if int(v[0]) == 0 and int(v[1]) < 7:
|
||||
return False, 'Your Sabnzbd client is too old, please update to newest version.'
|
||||
|
||||
# the version check will work even with wrong api key, so we need the next check as well
|
||||
sab_data = self.call({
|
||||
'mode': 'qstatus',
|
||||
})
|
||||
if not sab_data:
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking SABnzbd download status.')
|
||||
|
||||
# Go through Queue
|
||||
try:
|
||||
queue = self.call({
|
||||
'mode': 'queue',
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting queue: %s', traceback.format_exc(1))
|
||||
return []
|
||||
|
||||
# Go through history items
|
||||
try:
|
||||
history = self.call({
|
||||
'mode': 'history',
|
||||
'limit': 15,
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting history json: %s', traceback.format_exc(1))
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
# Get busy releases
|
||||
for nzb in queue.get('slots', []):
|
||||
if nzb['nzo_id'] in ids:
|
||||
status = 'busy'
|
||||
if 'ENCRYPTED / ' in nzb['filename']:
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['nzo_id'],
|
||||
'name': nzb['filename'],
|
||||
'status': status,
|
||||
'original_status': nzb['status'],
|
||||
'timeleft': nzb['timeleft'] if not queue['paused'] else -1,
|
||||
})
|
||||
|
||||
# Get old releases
|
||||
for nzb in history.get('slots', []):
|
||||
if nzb['nzo_id'] in ids:
|
||||
status = 'busy'
|
||||
if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()):
|
||||
status = 'failed'
|
||||
elif nzb['status'] == 'Completed':
|
||||
status = 'completed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['nzo_id'],
|
||||
'name': nzb['name'],
|
||||
'status': status,
|
||||
'original_status': nzb['status'],
|
||||
'timeleft': str(timedelta(seconds = 0)),
|
||||
'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'queue',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def processComplete(self, release_download, delete_files = False):
|
||||
log.debug('Requesting SabNZBd to remove the NZB %s.', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '0',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed removing: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def call(self, request_params, use_json = True, **kwargs):
|
||||
|
||||
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
|
||||
'apikey': self.conf('api_key'),
|
||||
'output': 'json'
|
||||
}))
|
||||
|
||||
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
|
||||
if use_json:
|
||||
d = json.loads(data)
|
||||
if d.get('error'):
|
||||
log.error('Error getting data from SABNZBd: %s', d.get('error'))
|
||||
return {}
|
||||
|
||||
return d.get(request_params['mode']) or d
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'sabnzbd',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'sabnzbd',
|
||||
'label': 'Sabnzbd',
|
||||
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8080',
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
'description': 'Used for all calls to Sabnzbd.',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'label': 'Category',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'label': 'Priority',
|
||||
'type': 'dropdown',
|
||||
'default': '0',
|
||||
'advanced': True,
|
||||
'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)],
|
||||
'description': 'Add to the queue with this priority.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'advanced': True,
|
||||
'label': 'Remove NZB',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the NZB from history after it completed.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,53 +0,0 @@
|
||||
from .main import Sabnzbd
|
||||
|
||||
def start():
|
||||
return Sabnzbd()
|
||||
|
||||
config = [{
|
||||
'name': 'sabnzbd',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'sabnzbd',
|
||||
'label': 'Sabnzbd',
|
||||
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8080',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
'description': 'Used for all calls to Sabnzbd.',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'label': 'Category',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,152 +0,0 @@
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss
|
||||
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from urllib2 import URLError
|
||||
import json
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
class Sabnzbd(Downloader):
|
||||
|
||||
type = ['nzb']
|
||||
|
||||
def download(self, data = {}, movie = {}, filedata = None):
|
||||
|
||||
log.info('Sending "%s" to SABnzbd.', data.get('name'))
|
||||
|
||||
params = {
|
||||
'apikey': self.conf('api_key'),
|
||||
'cat': self.conf('category'),
|
||||
'mode': 'addurl',
|
||||
'nzbname': self.createNzbName(data, movie),
|
||||
}
|
||||
|
||||
if filedata:
|
||||
if len(filedata) < 50:
|
||||
log.error('No proper nzb available: %s', (filedata))
|
||||
return False
|
||||
|
||||
# If it's a .rar, it adds the .rar extension, otherwise it stays .nzb
|
||||
nzb_filename = self.createFileName(data, filedata, movie)
|
||||
params['mode'] = 'addfile'
|
||||
else:
|
||||
params['name'] = data.get('url')
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(params)
|
||||
|
||||
try:
|
||||
if params.get('mode') is 'addfile':
|
||||
sab = self.urlopen(url, timeout = 60, params = {'nzbfile': (ss(nzb_filename), filedata)}, multipart = True, show_error = False)
|
||||
else:
|
||||
sab = self.urlopen(url, timeout = 60, show_error = False)
|
||||
except URLError:
|
||||
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
|
||||
return False
|
||||
except:
|
||||
log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
result = sab.strip()
|
||||
if not result:
|
||||
log.error('SABnzbd didn\'t return anything.')
|
||||
return False
|
||||
|
||||
log.debug('Result text from SAB: %s', result[:40])
|
||||
if result[:2] == 'ok':
|
||||
log.info('NZB sent to SAB successfully.')
|
||||
return True
|
||||
else:
|
||||
log.error(result[:40])
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
log.debug('Checking SABnzbd download status.')
|
||||
|
||||
# Go through Queue
|
||||
try:
|
||||
queue = self.call({
|
||||
'mode': 'queue',
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting queue: %s', traceback.format_exc(1))
|
||||
return False
|
||||
|
||||
# Go through history items
|
||||
try:
|
||||
history = self.call({
|
||||
'mode': 'history',
|
||||
'limit': 15,
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting history json: %s', traceback.format_exc(1))
|
||||
return False
|
||||
|
||||
statuses = []
|
||||
|
||||
# Get busy releases
|
||||
for item in queue.get('slots', []):
|
||||
statuses.append({
|
||||
'id': item['nzo_id'],
|
||||
'name': item['filename'],
|
||||
'status': 'busy',
|
||||
'original_status': item['status'],
|
||||
'timeleft': item['timeleft'] if not queue['paused'] else -1,
|
||||
})
|
||||
|
||||
# Get old releases
|
||||
for item in history.get('slots', []):
|
||||
|
||||
status = 'busy'
|
||||
if item['status'] == 'Failed' or (item['status'] == 'Completed' and item['fail_message'].strip()):
|
||||
status = 'failed'
|
||||
elif item['status'] == 'Completed':
|
||||
status = 'completed'
|
||||
|
||||
statuses.append({
|
||||
'id': item['nzo_id'],
|
||||
'name': item['name'],
|
||||
'status': status,
|
||||
'original_status': item['status'],
|
||||
'timeleft': 0,
|
||||
})
|
||||
|
||||
return statuses
|
||||
|
||||
def removeFailed(self, item):
|
||||
|
||||
log.info('%s failed downloading, deleting...', item['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': item['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def call(self, params, use_json = True):
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(params, {
|
||||
'apikey': self.conf('api_key'),
|
||||
'output': 'json'
|
||||
}))
|
||||
|
||||
data = self.urlopen(url, timeout = 60, show_error = False)
|
||||
if use_json:
|
||||
d = json.loads(data)
|
||||
if d.get('error'):
|
||||
log.error('Error getting data from SABNZBd: %s', d.get('error'))
|
||||
return {}
|
||||
|
||||
return d[params['mode']]
|
||||
else:
|
||||
return data
|
||||
|
||||
226
couchpotato/core/downloaders/synology.py
Normal file
226
couchpotato/core/downloaders/synology.py
Normal file
@@ -0,0 +1,226 @@
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||
from couchpotato.core.helpers.encoding import isInt
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
import requests
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Synology'
|
||||
|
||||
|
||||
class Synology(DownloaderBase):
|
||||
|
||||
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
||||
status_support = False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
response = False
|
||||
log.error('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
|
||||
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
try:
|
||||
# Send request to Synology
|
||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'), self.conf('destination'))
|
||||
if data['protocol'] == 'torrent_magnet':
|
||||
log.info('Adding torrent URL %s', data['url'])
|
||||
response = srpc.create_task(url = data['url'])
|
||||
elif data['protocol'] in ['nzb', 'torrent']:
|
||||
log.info('Adding %s' % data['protocol'])
|
||||
if not filedata:
|
||||
log.error('No %s data found', data['protocol'])
|
||||
else:
|
||||
filename = data['name'] + '.' + data['protocol']
|
||||
response = srpc.create_task(filename = filename, filedata = filedata)
|
||||
except:
|
||||
log.error('Exception while adding torrent: %s', traceback.format_exc())
|
||||
finally:
|
||||
return self.downloadReturnId('') if response else False
|
||||
|
||||
def test(self):
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
try:
|
||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
|
||||
test_result = srpc.test()
|
||||
except:
|
||||
return False
|
||||
|
||||
return test_result
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
if self.conf('use_for') == 'both':
|
||||
return super(Synology, self).getEnabledProtocol()
|
||||
elif self.conf('use_for') == 'torrent':
|
||||
return ['torrent', 'torrent_magnet']
|
||||
else:
|
||||
return ['nzb']
|
||||
|
||||
def isEnabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
|
||||
for_protocol = ['both']
|
||||
if data and 'torrent' in data.get('protocol'):
|
||||
for_protocol.append('torrent')
|
||||
elif data:
|
||||
for_protocol.append(data.get('protocol'))
|
||||
|
||||
return super(Synology, self).isEnabled(manual, data) and\
|
||||
((self.conf('use_for') in for_protocol))
|
||||
|
||||
|
||||
class SynologyRPC(object):
|
||||
|
||||
"""SynologyRPC lite library"""
|
||||
|
||||
def __init__(self, host = 'localhost', port = 5000, username = None, password = None, destination = None):
|
||||
|
||||
super(SynologyRPC, self).__init__()
|
||||
|
||||
self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port)
|
||||
self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port)
|
||||
self.sid = None
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.destination = destination
|
||||
self.session_name = 'DownloadStation'
|
||||
|
||||
def _login(self):
|
||||
if self.username and self.password:
|
||||
args = {'api': 'SYNO.API.Auth', 'account': self.username, 'passwd': self.password, 'version': 2,
|
||||
'method': 'login', 'session': self.session_name, 'format': 'sid'}
|
||||
response = self._req(self.auth_url, args)
|
||||
if response['success']:
|
||||
self.sid = response['data']['sid']
|
||||
log.debug('sid=%s', self.sid)
|
||||
else:
|
||||
log.error('Couldn\'t login to Synology, %s', response)
|
||||
return response['success']
|
||||
else:
|
||||
log.error('User or password missing, not using authentication.')
|
||||
return False
|
||||
|
||||
def _logout(self):
|
||||
args = {'api':'SYNO.API.Auth', 'version':1, 'method':'logout', 'session':self.session_name, '_sid':self.sid}
|
||||
return self._req(self.auth_url, args)
|
||||
|
||||
def _req(self, url, args, files = None):
|
||||
response = {'success': False}
|
||||
try:
|
||||
req = requests.post(url, data = args, files = files)
|
||||
req.raise_for_status()
|
||||
response = json.loads(req.text)
|
||||
if response['success']:
|
||||
log.info('Synology action successfull')
|
||||
return response
|
||||
except requests.ConnectionError as err:
|
||||
log.error('Synology connection error, check your config %s', err)
|
||||
except requests.HTTPError as err:
|
||||
log.error('SynologyRPC HTTPError: %s', err)
|
||||
except Exception as err:
|
||||
log.error('Exception: %s', err)
|
||||
finally:
|
||||
return response
|
||||
|
||||
def create_task(self, url = None, filename = None, filedata = None):
|
||||
""" Creates new download task in Synology DownloadStation. Either specify
|
||||
url or pair (filename, filedata).
|
||||
|
||||
Returns True if task was created, False otherwise
|
||||
"""
|
||||
result = False
|
||||
# login
|
||||
if self._login():
|
||||
args = {'api': 'SYNO.DownloadStation.Task',
|
||||
'version': '1',
|
||||
'method': 'create',
|
||||
'_sid': self.sid}
|
||||
|
||||
if self.destination and len(self.destination) > 0:
|
||||
args['destination'] = self.destination
|
||||
|
||||
if url:
|
||||
log.info('Login success, adding torrent URI')
|
||||
args['uri'] = url
|
||||
response = self._req(self.download_url, args = args)
|
||||
log.info('Response: %s', response)
|
||||
result = response['success']
|
||||
elif filename and filedata:
|
||||
log.info('Login success, adding torrent')
|
||||
files = {'file': (filename, filedata)}
|
||||
response = self._req(self.download_url, args = args, files = files)
|
||||
log.info('Response: %s', response)
|
||||
result = response['success']
|
||||
else:
|
||||
log.error('Invalid use of SynologyRPC.create_task: either url or filename+filedata must be specified')
|
||||
self._logout()
|
||||
|
||||
return result
|
||||
|
||||
def test(self):
|
||||
return bool(self._login())
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'synology',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'synology',
|
||||
'label': 'Synology',
|
||||
'description': 'Use <a href="http://www.synology.com/dsm/home_home_applications_download_station.php" target="_blank">Synology Download Station</a> to download.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:5000',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:5000</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'destination',
|
||||
'description': 'Specify <strong>existing</strong> destination share to where your files will be downloaded, usually <strong>Downloads</strong>',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,45 +0,0 @@
|
||||
from .main import Synology
|
||||
|
||||
def start():
|
||||
return Synology()
|
||||
|
||||
config = [{
|
||||
'name': 'synology',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'synology',
|
||||
'label': 'Synology',
|
||||
'description': 'Use <a href="http://www.synology.com/dsm/home_home_applications_download_station.php" target="_blank">Synology Download Station</a> to download.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:5000',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:5000</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,105 +0,0 @@
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import isInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
import httplib
|
||||
import json
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
class Synology(Downloader):
|
||||
|
||||
type = ['torrent_magnet']
|
||||
log = CPLog(__name__)
|
||||
|
||||
def download(self, data, movie, filedata = None):
|
||||
|
||||
log.error('Sending "%s" (%s) to Synology.', (data.get('name'), data.get('type')))
|
||||
|
||||
# Load host from config and split out port.
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
if data.get('type') == 'torrent':
|
||||
log.error('Can\'t add binary torrent file')
|
||||
return False
|
||||
|
||||
try:
|
||||
# Send request to Transmission
|
||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
|
||||
remote_torrent = srpc.add_torrent_uri(data.get('url'))
|
||||
log.info('Response: %s', remote_torrent)
|
||||
return remote_torrent['success']
|
||||
except Exception, err:
|
||||
log.error('Exception while adding torrent: %s', err)
|
||||
return False
|
||||
|
||||
|
||||
class SynologyRPC(object):
|
||||
|
||||
'''SynologyRPC lite library'''
|
||||
|
||||
def __init__(self, host = 'localhost', port = 5000, username = None, password = None):
|
||||
|
||||
super(SynologyRPC, self).__init__()
|
||||
|
||||
self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port)
|
||||
self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port)
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.session_name = 'DownloadStation'
|
||||
|
||||
def _login(self):
|
||||
if self.username and self.password:
|
||||
args = {'api': 'SYNO.API.Auth', 'account': self.username, 'passwd': self.password, 'version': 2,
|
||||
'method': 'login', 'session': self.session_name, 'format': 'sid'}
|
||||
response = self._req(self.auth_url, args)
|
||||
if response['success'] == True:
|
||||
self.sid = response['data']['sid']
|
||||
log.debug('Sid=%s', self.sid)
|
||||
return response
|
||||
elif self.username or self.password:
|
||||
log.error('User or password missing, not using authentication.')
|
||||
return False
|
||||
|
||||
def _logout(self):
|
||||
args = {'api':'SYNO.API.Auth', 'version':1, 'method':'logout', 'session':self.session_name, '_sid':self.sid}
|
||||
return self._req(self.auth_url, args)
|
||||
|
||||
def _req(self, url, args):
|
||||
req_url = url + '?' + urllib.urlencode(args)
|
||||
try:
|
||||
req_open = urllib2.urlopen(req_url)
|
||||
response = json.loads(req_open.read())
|
||||
if response['success'] == True:
|
||||
log.info('Synology action successfull')
|
||||
return response
|
||||
except httplib.InvalidURL, err:
|
||||
log.error('Invalid Transmission host, check your config %s', err)
|
||||
return False
|
||||
except urllib2.HTTPError, err:
|
||||
log.error('SynologyRPC HTTPError: %s', err)
|
||||
return False
|
||||
except urllib2.URLError, err:
|
||||
log.error('Unable to connect to Synology %s', err)
|
||||
return False
|
||||
|
||||
def add_torrent_uri(self, torrent):
|
||||
log.info('Adding torrent URL %s', torrent)
|
||||
response = {}
|
||||
# login
|
||||
login = self._login()
|
||||
if len(login) > 0 and login['success'] == True:
|
||||
log.info('Login success, adding torrent')
|
||||
args = {'api':'SYNO.DownloadStation.Task', 'version':1, 'method':'create', 'uri':torrent, '_sid':self.sid}
|
||||
response = self._req(self.download_url, args)
|
||||
self._logout()
|
||||
else:
|
||||
log.error('Couldn\'t login to Synology, %s', login)
|
||||
return response
|
||||
|
||||
|
||||
348
couchpotato/core/downloaders/transmission.py
Normal file
348
couchpotato/core/downloaders/transmission.py
Normal file
@@ -0,0 +1,348 @@
|
||||
from base64 import b64encode
|
||||
from datetime import timedelta
|
||||
import httplib
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import urllib2
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Transmission'
|
||||
|
||||
|
||||
class Transmission(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
log = CPLog(__name__)
|
||||
trpc = None
|
||||
|
||||
def connect(self):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host')).rstrip('/').rsplit(':', 1)
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||
return self.trpc
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.info('Sending "%s" (%s) to Transmission.', (data.get('name'), data.get('protocol')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
# Set parameters for adding torrent
|
||||
params = {
|
||||
'paused': self.conf('paused', default = False)
|
||||
}
|
||||
|
||||
if self.conf('directory'):
|
||||
if os.path.isdir(self.conf('directory')):
|
||||
params['download-dir'] = self.conf('directory')
|
||||
else:
|
||||
log.error('Download directory from Transmission settings: %s doesn\'t exist', self.conf('directory'))
|
||||
|
||||
# Change parameters of torrent
|
||||
torrent_params = {}
|
||||
if data.get('seed_ratio'):
|
||||
torrent_params['seedRatioLimit'] = tryFloat(data.get('seed_ratio'))
|
||||
torrent_params['seedRatioMode'] = 1
|
||||
|
||||
if data.get('seed_time'):
|
||||
torrent_params['seedIdleLimit'] = tryInt(data.get('seed_time')) * 60
|
||||
torrent_params['seedIdleMode'] = 1
|
||||
|
||||
# Send request to Transmission
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
remote_torrent = self.trpc.add_torrent_uri(data.get('url'), arguments = params)
|
||||
torrent_params['trackerAdd'] = self.torrent_trackers
|
||||
else:
|
||||
remote_torrent = self.trpc.add_torrent_file(b64encode(filedata), arguments = params)
|
||||
|
||||
if not remote_torrent:
|
||||
log.error('Failed sending torrent to Transmission')
|
||||
return False
|
||||
|
||||
data = remote_torrent.get('torrent-added') or remote_torrent.get('torrent-duplicate')
|
||||
|
||||
# Change settings of added torrents
|
||||
if torrent_params:
|
||||
self.trpc.set_torrent(data['hashString'], torrent_params)
|
||||
|
||||
log.info('Torrent sent to Transmission successfully.')
|
||||
return self.downloadReturnId(data['hashString'])
|
||||
|
||||
def test(self):
|
||||
if self.connect() and self.trpc.get_session():
|
||||
return True
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking Transmission download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
return_params = {
|
||||
'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
|
||||
}
|
||||
|
||||
session = self.trpc.get_session()
|
||||
queue = self.trpc.get_alltorrents(return_params)
|
||||
if not (queue and queue.get('torrents')):
|
||||
log.debug('Nothing in queue or error')
|
||||
return []
|
||||
|
||||
for torrent in queue['torrents']:
|
||||
if torrent['hashString'] in ids:
|
||||
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s',
|
||||
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir']))
|
||||
|
||||
status = 'busy'
|
||||
if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'):
|
||||
status = 'failed'
|
||||
elif torrent['status'] == 0 and torrent['percentDone'] == 1:
|
||||
status = 'completed'
|
||||
elif torrent['status'] in [5, 6]:
|
||||
status = 'seeding'
|
||||
|
||||
if session['incomplete-dir-enabled'] and status == 'busy':
|
||||
torrent_folder = session['incomplete-dir']
|
||||
else:
|
||||
torrent_folder = torrent['downloadDir']
|
||||
|
||||
torrent_files = []
|
||||
for file_item in torrent['files']:
|
||||
torrent_files.append(sp(os.path.join(torrent_folder, file_item['name'])))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent['hashString'],
|
||||
'name': torrent['name'],
|
||||
'status': status,
|
||||
'original_status': torrent['status'],
|
||||
'seed_ratio': torrent['uploadRatio'],
|
||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||
'folder': sp(torrent_folder if len(torrent_files) == 1 else os.path.join(torrent_folder, torrent['name'])),
|
||||
'files': torrent_files
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if pause:
|
||||
return self.trpc.stop_torrent(release_download['id'])
|
||||
else:
|
||||
return self.trpc.start_torrent(release_download['id'])
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.trpc.remove_torrent(release_download['id'], True)
|
||||
|
||||
def processComplete(self, release_download, delete_files = False):
|
||||
log.debug('Requesting Transmission to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
return self.trpc.remove_torrent(release_download['id'], delete_files)
|
||||
|
||||
|
||||
class TransmissionRPC(object):
|
||||
|
||||
"""TransmissionRPC lite library"""
|
||||
def __init__(self, host = 'http://localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
|
||||
super(TransmissionRPC, self).__init__()
|
||||
|
||||
self.url = host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.tag = 0
|
||||
self.session_id = 0
|
||||
self.session = {}
|
||||
if username and password:
|
||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_manager.add_password(realm = 'Transmission', uri = self.url, user = username, passwd = password)
|
||||
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||
opener.addheaders = [('User-agent', 'couchpotato-transmission-client/1.0')]
|
||||
urllib2.install_opener(opener)
|
||||
elif username or password:
|
||||
log.debug('User or password missing, not using authentication.')
|
||||
self.session = self.get_session()
|
||||
|
||||
def _request(self, ojson):
|
||||
self.tag += 1
|
||||
headers = {'x-transmission-session-id': str(self.session_id)}
|
||||
request = urllib2.Request(self.url, json.dumps(ojson).encode('utf-8'), headers)
|
||||
try:
|
||||
open_request = urllib2.urlopen(request)
|
||||
response = json.loads(open_request.read())
|
||||
log.debug('request: %s', json.dumps(ojson))
|
||||
log.debug('response: %s', json.dumps(response))
|
||||
if response['result'] == 'success':
|
||||
log.debug('Transmission action successful')
|
||||
return response['arguments']
|
||||
else:
|
||||
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
|
||||
return False
|
||||
except httplib.InvalidURL as err:
|
||||
log.error('Invalid Transmission host, check your config %s', err)
|
||||
return False
|
||||
except urllib2.HTTPError as err:
|
||||
if err.code == 401:
|
||||
log.error('Invalid Transmission Username or Password, check your config')
|
||||
return False
|
||||
elif err.code == 409:
|
||||
msg = str(err.read())
|
||||
try:
|
||||
self.session_id = \
|
||||
re.search('X-Transmission-Session-Id:\s*(\w+)', msg).group(1)
|
||||
log.debug('X-Transmission-Session-Id: %s', self.session_id)
|
||||
|
||||
# #resend request with the updated header
|
||||
|
||||
return self._request(ojson)
|
||||
except:
|
||||
log.error('Unable to get Transmission Session-Id %s', err)
|
||||
else:
|
||||
log.error('TransmissionRPC HTTPError: %s', err)
|
||||
except urllib2.URLError as err:
|
||||
log.error('Unable to connect to Transmission %s', err)
|
||||
|
||||
def get_session(self):
|
||||
post_data = {'method': 'session-get', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def add_torrent_uri(self, torrent, arguments):
|
||||
arguments['filename'] = torrent
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-add', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def add_torrent_file(self, torrent, arguments):
|
||||
arguments['metainfo'] = torrent
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-add', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def set_torrent(self, torrent_id, arguments):
|
||||
arguments['ids'] = torrent_id
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-set', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def get_alltorrents(self, arguments):
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-get', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def stop_torrent(self, torrent_id):
|
||||
post_data = {'arguments': {'ids': torrent_id}, 'method': 'torrent-stop', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def start_torrent(self, torrent_id):
|
||||
post_data = {'arguments': {'ids': torrent_id}, 'method': 'torrent-start', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def remove_torrent(self, torrent_id, delete_local_data):
|
||||
post_data = {'arguments': {'ids': torrent_id, 'delete-local-data': delete_local_data}, 'method': 'torrent-remove', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'transmission',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'transmission',
|
||||
'label': 'Transmission',
|
||||
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'http://localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>http://localhost:9091</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
'type': 'string',
|
||||
'default': 'transmission',
|
||||
'advanced': True,
|
||||
'description': 'Change if you don\'t run Transmission RPC at the default url.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent from Transmission after it finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'stalled_as_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Consider a stalled torrent as failed',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,63 +0,0 @@
|
||||
from .main import Transmission
|
||||
|
||||
def start():
|
||||
return Transmission()
|
||||
|
||||
config = [{
|
||||
'name': 'transmission',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'transmission',
|
||||
'label': 'Transmission',
|
||||
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Where should Transmission saved the downloaded files?',
|
||||
},
|
||||
{
|
||||
'name': 'ratio',
|
||||
'default': 10,
|
||||
'type': 'int',
|
||||
'advanced': True,
|
||||
'description': 'Stop transfer when reaching ratio',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,148 +0,0 @@
|
||||
from base64 import b64encode
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import isInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
import httplib
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import urllib2
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Transmission(Downloader):
|
||||
|
||||
type = ['torrent', 'torrent_magnet']
|
||||
log = CPLog(__name__)
|
||||
|
||||
def download(self, data, movie, filedata = None):
|
||||
|
||||
log.debug('Sending "%s" (%s) to Transmission.', (data.get('name'), data.get('type')))
|
||||
|
||||
# Load host from config and split out port.
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
# Set parameters for Transmission
|
||||
folder_name = self.createFileName(data, filedata, movie)[:-len(data.get('type')) - 1]
|
||||
folder_path = os.path.join(self.conf('directory', default = ''), folder_name).rstrip(os.path.sep)
|
||||
|
||||
# Create the empty folder to download too
|
||||
self.makeDir(folder_path)
|
||||
|
||||
params = {
|
||||
'paused': self.conf('paused', default = 0),
|
||||
'download-dir': folder_path
|
||||
}
|
||||
|
||||
torrent_params = {}
|
||||
if self.conf('ratio'):
|
||||
torrent_params = {
|
||||
'seedRatioLimit': self.conf('ratio'),
|
||||
'seedRatioMode': self.conf('ratio')
|
||||
}
|
||||
|
||||
if not filedata and data.get('type') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
# Send request to Transmission
|
||||
try:
|
||||
trpc = TransmissionRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
||||
if data.get('type') == 'torrent_magnet':
|
||||
remote_torrent = trpc.add_torrent_uri(data.get('url'), arguments = params)
|
||||
torrent_params['trackerAdd'] = self.torrent_trackers
|
||||
else:
|
||||
remote_torrent = trpc.add_torrent_file(b64encode(filedata), arguments = params)
|
||||
|
||||
# Change settings of added torrents
|
||||
if torrent_params:
|
||||
trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
|
||||
|
||||
return True
|
||||
except Exception, err:
|
||||
log.error('Failed to change settings for transfer: %s', err)
|
||||
return False
|
||||
|
||||
|
||||
class TransmissionRPC(object):
|
||||
|
||||
"""TransmissionRPC lite library"""
|
||||
|
||||
def __init__(self, host = 'localhost', port = 9091, username = None, password = None):
|
||||
|
||||
super(TransmissionRPC, self).__init__()
|
||||
|
||||
self.url = 'http://' + host + ':' + str(port) + '/transmission/rpc'
|
||||
self.tag = 0
|
||||
self.session_id = 0
|
||||
self.session = {}
|
||||
if username and password:
|
||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
||||
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager), urllib2.HTTPDigestAuthHandler(password_manager))
|
||||
opener.addheaders = [('User-agent', 'couchpotato-transmission-client/1.0')]
|
||||
urllib2.install_opener(opener)
|
||||
elif username or password:
|
||||
log.debug('User or password missing, not using authentication.')
|
||||
self.session = self.get_session()
|
||||
|
||||
def _request(self, ojson):
|
||||
self.tag += 1
|
||||
headers = {'x-transmission-session-id': str(self.session_id)}
|
||||
request = urllib2.Request(self.url, json.dumps(ojson).encode('utf-8'), headers)
|
||||
try:
|
||||
open_request = urllib2.urlopen(request)
|
||||
response = json.loads(open_request.read())
|
||||
log.debug('response: %s', json.dumps(response))
|
||||
if response['result'] == 'success':
|
||||
log.debug('Transmission action successfull')
|
||||
return response['arguments']
|
||||
else:
|
||||
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
|
||||
return False
|
||||
except httplib.InvalidURL, err:
|
||||
log.error('Invalid Transmission host, check your config %s', err)
|
||||
return False
|
||||
except urllib2.HTTPError, err:
|
||||
if err.code == 401:
|
||||
log.error('Invalid Transmission Username or Password, check your config')
|
||||
return False
|
||||
elif err.code == 409:
|
||||
msg = str(err.read())
|
||||
try:
|
||||
self.session_id = \
|
||||
re.search('X-Transmission-Session-Id:\s*(\w+)', msg).group(1)
|
||||
log.debug('X-Transmission-Session-Id: %s', self.session_id)
|
||||
|
||||
# #resend request with the updated header
|
||||
|
||||
return self._request(ojson)
|
||||
except:
|
||||
log.error('Unable to get Transmission Session-Id %s', err)
|
||||
else:
|
||||
log.error('TransmissionRPC HTTPError: %s', err)
|
||||
except urllib2.URLError, err:
|
||||
log.error('Unable to connect to Transmission %s', err)
|
||||
|
||||
def get_session(self):
|
||||
post_data = {'method': 'session-get', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def add_torrent_uri(self, torrent, arguments):
|
||||
arguments['filename'] = torrent
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-add', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def add_torrent_file(self, torrent, arguments):
|
||||
arguments['metainfo'] = torrent
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-add', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
def set_torrent(self, torrent_id, arguments):
|
||||
arguments['ids'] = torrent_id
|
||||
post_data = {'arguments': arguments, 'method': 'torrent-set', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
421
couchpotato/core/downloaders/utorrent.py
Normal file
421
couchpotato/core/downloaders/utorrent.py
Normal file
@@ -0,0 +1,421 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
import cookielib
|
||||
import httplib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
from bencode import bencode as benc, bdecode
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, ss, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from multipartpost import MultipartPostHandler
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'uTorrent'
|
||||
|
||||
|
||||
class uTorrent(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
utorrent_api = None
|
||||
status_flags = {
|
||||
'STARTED': 1,
|
||||
'CHECKING': 2,
|
||||
'CHECK-START': 4,
|
||||
'CHECKED': 8,
|
||||
'ERROR': 16,
|
||||
'PAUSED': 32,
|
||||
'QUEUED': 64,
|
||||
'LOADED': 128
|
||||
}
|
||||
|
||||
def connect(self):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
self.utorrent_api = uTorrentAPI(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
return self.utorrent_api
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.debug("Sending '%s' (%s) to uTorrent.", (data.get('name'), data.get('protocol')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
settings = self.utorrent_api.get_settings()
|
||||
if not settings:
|
||||
return False
|
||||
|
||||
#Fix settings in case they are not set for CPS compatibility
|
||||
new_settings = {}
|
||||
if not (settings.get('seed_prio_limitul') == 0 and settings['seed_prio_limitul_flag']):
|
||||
new_settings['seed_prio_limitul'] = 0
|
||||
new_settings['seed_prio_limitul_flag'] = True
|
||||
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
|
||||
|
||||
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
|
||||
new_settings['bt.read_only_on_complete'] = False
|
||||
log.info('Updated uTorrent settings to not set the files to read only after completing.')
|
||||
|
||||
if new_settings:
|
||||
self.utorrent_api.set_settings(new_settings)
|
||||
|
||||
torrent_params = {}
|
||||
if self.conf('label'):
|
||||
torrent_params['label'] = self.conf('label')
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper()
|
||||
torrent_params['trackers'] = '%0D%0A%0D%0A'.join(self.torrent_trackers)
|
||||
else:
|
||||
info = bdecode(filedata)['info']
|
||||
torrent_hash = sha1(benc(info)).hexdigest().upper()
|
||||
|
||||
torrent_filename = self.createFileName(data, filedata, media)
|
||||
|
||||
if data.get('seed_ratio'):
|
||||
torrent_params['seed_override'] = 1
|
||||
torrent_params['seed_ratio'] = tryInt(tryFloat(data['seed_ratio']) * 1000)
|
||||
|
||||
if data.get('seed_time'):
|
||||
torrent_params['seed_override'] = 1
|
||||
torrent_params['seed_time'] = tryInt(data['seed_time']) * 3600
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
# Send request to uTorrent
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
self.utorrent_api.add_torrent_uri(torrent_filename, data.get('url'))
|
||||
else:
|
||||
self.utorrent_api.add_torrent_file(torrent_filename, filedata)
|
||||
|
||||
# Change settings of added torrent
|
||||
self.utorrent_api.set_torrent(torrent_hash, torrent_params)
|
||||
if self.conf('paused', default = 0):
|
||||
self.utorrent_api.pause_torrent(torrent_hash)
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
|
||||
def test(self):
|
||||
if self.connect():
|
||||
build_version = self.utorrent_api.get_build()
|
||||
if not build_version:
|
||||
return False
|
||||
if build_version < 25406: # This build corresponds to version 3.0.0 stable
|
||||
return False, 'Your uTorrent client is too old, please update to newest version.'
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking uTorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
data = self.utorrent_api.get_status()
|
||||
if not data:
|
||||
log.error('Error getting data from uTorrent')
|
||||
return []
|
||||
|
||||
queue = json.loads(data)
|
||||
if queue.get('error'):
|
||||
log.error('Error getting data from uTorrent: %s', queue.get('error'))
|
||||
return []
|
||||
|
||||
if not queue.get('torrents'):
|
||||
log.debug('Nothing in queue')
|
||||
return []
|
||||
|
||||
# Get torrents
|
||||
for torrent in queue['torrents']:
|
||||
if torrent[0] in ids:
|
||||
|
||||
#Get files of the torrent
|
||||
torrent_files = []
|
||||
try:
|
||||
torrent_files = json.loads(self.utorrent_api.get_files(torrent[0]))
|
||||
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
|
||||
except:
|
||||
log.debug('Failed getting files from torrent: %s', torrent[2])
|
||||
|
||||
status = 'busy'
|
||||
if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000:
|
||||
status = 'seeding'
|
||||
elif torrent[1] & self.status_flags['ERROR']:
|
||||
status = 'failed'
|
||||
elif torrent[4] == 1000:
|
||||
status = 'completed'
|
||||
|
||||
if not status == 'busy':
|
||||
self.removeReadOnly(torrent_files)
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent[0],
|
||||
'name': torrent[2],
|
||||
'status': status,
|
||||
'seed_ratio': float(torrent[7]) / 1000,
|
||||
'original_status': torrent[1],
|
||||
'timeleft': str(timedelta(seconds = torrent[10])),
|
||||
'folder': sp(torrent[26]),
|
||||
'files': torrent_files
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if not self.connect():
|
||||
return False
|
||||
return self.utorrent_api.pause_torrent(release_download['id'], pause)
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
if not self.connect():
|
||||
return False
|
||||
return self.utorrent_api.remove_torrent(release_download['id'], remove_data = True)
|
||||
|
||||
def processComplete(self, release_download, delete_files = False):
|
||||
log.debug('Requesting uTorrent to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
if not self.connect():
|
||||
return False
|
||||
return self.utorrent_api.remove_torrent(release_download['id'], remove_data = delete_files)
|
||||
|
||||
def removeReadOnly(self, files):
|
||||
#Removes all read-on ly flags in a for all files
|
||||
for filepath in files:
|
||||
if os.path.isfile(filepath):
|
||||
#Windows only needs S_IWRITE, but we bitwise-or with current perms to preserve other permission bits on Linux
|
||||
os.chmod(filepath, stat.S_IWRITE | os.stat(filepath).st_mode)
|
||||
|
||||
class uTorrentAPI(object):
|
||||
|
||||
def __init__(self, host = 'localhost', port = 8000, username = None, password = None):
|
||||
|
||||
super(uTorrentAPI, self).__init__()
|
||||
|
||||
self.url = 'http://' + str(host) + ':' + str(port) + '/gui/'
|
||||
self.token = ''
|
||||
self.last_time = time.time()
|
||||
cookies = cookielib.CookieJar()
|
||||
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), MultipartPostHandler)
|
||||
self.opener.addheaders = [('User-agent', 'couchpotato-utorrent-client/1.0')]
|
||||
if username and password:
|
||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
||||
self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||
elif username or password:
|
||||
log.debug('User or password missing, not using authentication.')
|
||||
self.token = self.get_token()
|
||||
|
||||
def _request(self, action, data = None):
|
||||
if time.time() > self.last_time + 1800:
|
||||
self.last_time = time.time()
|
||||
self.token = self.get_token()
|
||||
request = urllib2.Request(self.url + '?token=' + self.token + '&' + action, data)
|
||||
try:
|
||||
open_request = self.opener.open(request)
|
||||
response = open_request.read()
|
||||
if response:
|
||||
return response
|
||||
else:
|
||||
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
|
||||
except httplib.InvalidURL as err:
|
||||
log.error('Invalid uTorrent host, check your config %s', err)
|
||||
except urllib2.HTTPError as err:
|
||||
if err.code == 401:
|
||||
log.error('Invalid uTorrent Username or Password, check your config')
|
||||
else:
|
||||
log.error('uTorrent HTTPError: %s', err)
|
||||
except urllib2.URLError as err:
|
||||
log.error('Unable to connect to uTorrent %s', err)
|
||||
return False
|
||||
|
||||
def get_token(self):
|
||||
request = self.opener.open(self.url + 'token.html')
|
||||
token = re.findall('<div.*?>(.*?)</', request.read())[0]
|
||||
return token
|
||||
|
||||
def add_torrent_uri(self, filename, torrent, add_folder = False):
|
||||
action = 'action=add-url&s=%s' % urllib.quote(torrent)
|
||||
if add_folder:
|
||||
action += '&path=%s' % urllib.quote(filename)
|
||||
return self._request(action)
|
||||
|
||||
def add_torrent_file(self, filename, filedata, add_folder = False):
|
||||
action = 'action=add-file'
|
||||
if add_folder:
|
||||
action += '&path=%s' % urllib.quote(filename)
|
||||
return self._request(action, {'torrent_file': (ss(filename), filedata)})
|
||||
|
||||
def set_torrent(self, hash, params):
|
||||
action = 'action=setprops&hash=%s' % hash
|
||||
for k, v in params.items():
|
||||
action += '&s=%s&v=%s' % (k, v)
|
||||
return self._request(action)
|
||||
|
||||
def pause_torrent(self, hash, pause = True):
|
||||
if pause:
|
||||
action = 'action=pause&hash=%s' % hash
|
||||
else:
|
||||
action = 'action=unpause&hash=%s' % hash
|
||||
return self._request(action)
|
||||
|
||||
def stop_torrent(self, hash):
|
||||
action = 'action=stop&hash=%s' % hash
|
||||
return self._request(action)
|
||||
|
||||
def remove_torrent(self, hash, remove_data = False):
|
||||
if remove_data:
|
||||
action = 'action=removedata&hash=%s' % hash
|
||||
else:
|
||||
action = 'action=remove&hash=%s' % hash
|
||||
return self._request(action)
|
||||
|
||||
def get_status(self):
|
||||
action = 'list=1'
|
||||
return self._request(action)
|
||||
|
||||
def get_settings(self):
|
||||
action = 'action=getsettings'
|
||||
settings_dict = {}
|
||||
try:
|
||||
utorrent_settings = json.loads(self._request(action))
|
||||
|
||||
# Create settings dict
|
||||
for setting in utorrent_settings['settings']:
|
||||
if setting[1] == 0: # int
|
||||
settings_dict[setting[0]] = int(setting[2] if not setting[2].strip() == '' else '0')
|
||||
elif setting[1] == 1: # bool
|
||||
settings_dict[setting[0]] = True if setting[2] == 'true' else False
|
||||
elif setting[1] == 2: # string
|
||||
settings_dict[setting[0]] = setting[2]
|
||||
|
||||
#log.debug('uTorrent settings: %s', settings_dict)
|
||||
|
||||
except Exception as err:
|
||||
log.error('Failed to get settings from uTorrent: %s', err)
|
||||
|
||||
return settings_dict
|
||||
|
||||
def set_settings(self, settings_dict = None):
|
||||
if not settings_dict: settings_dict = {}
|
||||
|
||||
for key in settings_dict:
|
||||
if isinstance(settings_dict[key], bool):
|
||||
settings_dict[key] = 1 if settings_dict[key] else 0
|
||||
|
||||
action = 'action=setsetting' + ''.join(['&s=%s&v=%s' % (key, value) for (key, value) in settings_dict.items()])
|
||||
return self._request(action)
|
||||
|
||||
def get_files(self, hash):
|
||||
action = 'action=getfiles&hash=%s' % hash
|
||||
return self._request(action)
|
||||
|
||||
def get_build(self):
|
||||
data = self._request('')
|
||||
if not data:
|
||||
return False
|
||||
response = json.loads(data)
|
||||
return int(response.get('build'))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'utorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'utorrent',
|
||||
'label': 'uTorrent',
|
||||
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> (3.0+) to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8000',
|
||||
'description': 'Port can be found in settings when enabling WebUI.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add torrent as.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent from uTorrent after it finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,55 +0,0 @@
|
||||
from .main import uTorrent
|
||||
|
||||
def start():
|
||||
return uTorrent()
|
||||
|
||||
config = [{
|
||||
'name': 'utorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'utorrent',
|
||||
'label': 'uTorrent',
|
||||
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8000',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:8000</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add torrent as.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,135 +0,0 @@
|
||||
from bencode import bencode, bdecode
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import isInt, ss
|
||||
from couchpotato.core.logger import CPLog
|
||||
from hashlib import sha1
|
||||
from multipartpost import MultipartPostHandler
|
||||
import cookielib
|
||||
import httplib
|
||||
import re
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class uTorrent(Downloader):
|
||||
|
||||
type = ['torrent', 'torrent_magnet']
|
||||
utorrent_api = None
|
||||
|
||||
def download(self, data, movie, filedata = None):
|
||||
|
||||
log.debug('Sending "%s" (%s) to uTorrent.', (data.get('name'), data.get('type')))
|
||||
|
||||
# Load host from config and split out port.
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
torrent_params = {}
|
||||
if self.conf('label'):
|
||||
torrent_params['label'] = self.conf('label')
|
||||
|
||||
if not filedata and data.get('type') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
if data.get('type') == 'torrent_magnet':
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper()
|
||||
torrent_params['trackers'] = '%0D%0A%0D%0A'.join(self.torrent_trackers)
|
||||
else:
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest().upper()
|
||||
torrent_filename = self.createFileName(data, filedata, movie)
|
||||
|
||||
# Send request to uTorrent
|
||||
try:
|
||||
if not self.utorrent_api:
|
||||
self.utorrent_api = uTorrentAPI(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
if data.get('type') == 'torrent_magnet':
|
||||
self.utorrent_api.add_torrent_uri(data.get('url'))
|
||||
else:
|
||||
self.utorrent_api.add_torrent_file(torrent_filename, filedata)
|
||||
|
||||
# Change settings of added torrents
|
||||
self.utorrent_api.set_torrent(torrent_hash, torrent_params)
|
||||
if self.conf('paused', default = 0):
|
||||
self.utorrent_api.pause_torrent(torrent_hash)
|
||||
return True
|
||||
except Exception, err:
|
||||
log.error('Failed to send torrent to uTorrent: %s', err)
|
||||
return False
|
||||
|
||||
|
||||
class uTorrentAPI(object):
|
||||
|
||||
def __init__(self, host = 'localhost', port = 8000, username = None, password = None):
|
||||
|
||||
super(uTorrentAPI, self).__init__()
|
||||
|
||||
self.url = 'http://' + str(host) + ':' + str(port) + '/gui/'
|
||||
self.token = ''
|
||||
self.last_time = time.time()
|
||||
cookies = cookielib.CookieJar()
|
||||
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), MultipartPostHandler)
|
||||
self.opener.addheaders = [('User-agent', 'couchpotato-utorrent-client/1.0')]
|
||||
if username and password:
|
||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
||||
self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||
self.opener.add_handler(urllib2.HTTPDigestAuthHandler(password_manager))
|
||||
elif username or password:
|
||||
log.debug('User or password missing, not using authentication.')
|
||||
self.token = self.get_token()
|
||||
|
||||
def _request(self, action, data = None):
|
||||
if time.time() > self.last_time + 1800:
|
||||
self.last_time = time.time()
|
||||
self.token = self.get_token()
|
||||
request = urllib2.Request(self.url + "?token=" + self.token + "&" + action, data)
|
||||
try:
|
||||
open_request = self.opener.open(request)
|
||||
response = open_request.read()
|
||||
log.debug('response: %s', response)
|
||||
if response:
|
||||
log.debug('uTorrent action successfull')
|
||||
return response
|
||||
else:
|
||||
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
|
||||
except httplib.InvalidURL, err:
|
||||
log.error('Invalid uTorrent host, check your config %s', err)
|
||||
except urllib2.HTTPError, err:
|
||||
if err.code == 401:
|
||||
log.error('Invalid uTorrent Username or Password, check your config')
|
||||
else:
|
||||
log.error('uTorrent HTTPError: %s', err)
|
||||
except urllib2.URLError, err:
|
||||
log.error('Unable to connect to uTorrent %s', err)
|
||||
return False
|
||||
|
||||
def get_token(self):
|
||||
request = self.opener.open(self.url + "token.html")
|
||||
token = re.findall("<div.*?>(.*?)</", request.read())[0]
|
||||
return token
|
||||
|
||||
def add_torrent_uri(self, torrent):
|
||||
action = "action=add-url&s=%s" % urllib.quote(torrent)
|
||||
return self._request(action)
|
||||
|
||||
def add_torrent_file(self, filename, filedata):
|
||||
action = "action=add-file"
|
||||
return self._request(action, {"torrent_file": (ss(filename), filedata)})
|
||||
|
||||
def set_torrent(self, hash, params):
|
||||
action = "action=setprops&hash=%s" % hash
|
||||
for k, v in params.iteritems():
|
||||
action += "&s=%s&v=%s" % (k, v)
|
||||
return self._request(action)
|
||||
|
||||
def pause_torrent(self, hash):
|
||||
action = "action=pause&hash=%s" % hash
|
||||
return self._request(action)
|
||||
@@ -1,12 +1,15 @@
|
||||
from axl.axel import Event
|
||||
from couchpotato.core.helpers.variable import mergeDicts, natcmp
|
||||
from couchpotato.core.logger import CPLog
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
from axl.axel import Event
|
||||
from couchpotato.core.helpers.variable import mergeDicts, natsortKey
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
events = {}
|
||||
|
||||
|
||||
def runHandler(name, handler, *args, **kwargs):
|
||||
try:
|
||||
return handler(*args, **kwargs)
|
||||
@@ -14,44 +17,54 @@ def runHandler(name, handler, *args, **kwargs):
|
||||
from couchpotato.environment import Env
|
||||
log.error('Error in event "%s", that wasn\'t caught: %s%s', (name, traceback.format_exc(), Env.all() if not Env.get('dev') else ''))
|
||||
|
||||
|
||||
def addEvent(name, handler, priority = 100):
|
||||
|
||||
if events.get(name):
|
||||
e = events[name]
|
||||
else:
|
||||
e = events[name] = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
|
||||
if not events.get(name):
|
||||
events[name] = []
|
||||
|
||||
def createHandle(*args, **kwargs):
|
||||
|
||||
h = None
|
||||
try:
|
||||
parent = handler.im_self
|
||||
bc = hasattr(parent, 'beforeCall')
|
||||
if bc: parent.beforeCall(handler)
|
||||
# Open handler
|
||||
has_parent = hasattr(handler, 'im_self')
|
||||
parent = None
|
||||
if has_parent:
|
||||
parent = handler.__self__
|
||||
bc = hasattr(parent, 'beforeCall')
|
||||
if bc: parent.beforeCall(handler)
|
||||
|
||||
# Main event
|
||||
h = runHandler(name, handler, *args, **kwargs)
|
||||
ac = hasattr(parent, 'afterCall')
|
||||
if ac: parent.afterCall(handler)
|
||||
|
||||
# Close handler
|
||||
if parent and has_parent:
|
||||
ac = hasattr(parent, 'afterCall')
|
||||
if ac: parent.afterCall(handler)
|
||||
except:
|
||||
h = runHandler(name, handler, *args, **kwargs)
|
||||
log.error('Failed creating handler %s %s: %s', (name, handler, traceback.format_exc()))
|
||||
|
||||
return h
|
||||
|
||||
e.handle(createHandle, priority = priority)
|
||||
events[name].append({
|
||||
'handler': createHandle,
|
||||
'priority': priority,
|
||||
})
|
||||
|
||||
def removeEvent(name, handler):
|
||||
e = events[name]
|
||||
e -= handler
|
||||
|
||||
def fireEvent(name, *args, **kwargs):
|
||||
if not events.get(name): return
|
||||
if name not in events: return
|
||||
|
||||
#log.debug('Firing event %s', name)
|
||||
try:
|
||||
|
||||
options = {
|
||||
'is_after_event': False, # Fire after event
|
||||
'on_complete': False, # onComplete event
|
||||
'single': False, # Return single handler
|
||||
'merge': False, # Merge items
|
||||
'in_order': False, # Fire them in specific order, waits for the other to finish
|
||||
'is_after_event': False, # Fire after event
|
||||
'on_complete': False, # onComplete event
|
||||
'single': False, # Return single handler
|
||||
'merge': False, # Merge items
|
||||
'in_order': False, # Fire them in specific order, waits for the other to finish
|
||||
}
|
||||
|
||||
# Do options
|
||||
@@ -62,28 +75,41 @@ def fireEvent(name, *args, **kwargs):
|
||||
options[x] = val
|
||||
except: pass
|
||||
|
||||
e = events[name]
|
||||
if len(events[name]) == 1:
|
||||
|
||||
# Lock this event
|
||||
e.lock.acquire()
|
||||
single = None
|
||||
try:
|
||||
single = events[name][0]['handler'](*args, **kwargs)
|
||||
except:
|
||||
log.error('Failed running single event: %s', traceback.format_exc())
|
||||
|
||||
e.asynchronous = False
|
||||
# Don't load thread for single event
|
||||
result = {
|
||||
'single': (single is not None, single),
|
||||
}
|
||||
|
||||
# Make sure only 1 event is fired at a time when order is wanted
|
||||
kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None
|
||||
kwargs['event_return_on_result'] = options['single']
|
||||
else:
|
||||
|
||||
# Fire
|
||||
result = e(*args, **kwargs)
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True)
|
||||
|
||||
# Release lock for this event
|
||||
e.lock.release()
|
||||
for event in events[name]:
|
||||
e.handle(event['handler'], priority = event['priority'])
|
||||
|
||||
# Make sure only 1 event is fired at a time when order is wanted
|
||||
kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None
|
||||
kwargs['event_return_on_result'] = options['single']
|
||||
|
||||
# Fire
|
||||
result = e(*args, **kwargs)
|
||||
|
||||
result_keys = result.keys()
|
||||
result_keys.sort(key = natsortKey)
|
||||
|
||||
if options['single'] and not options['merge']:
|
||||
results = None
|
||||
|
||||
# Loop over results, stop when first not None result is found.
|
||||
for r_key in sorted(result.iterkeys(), cmp = natcmp):
|
||||
for r_key in result_keys:
|
||||
r = result[r_key]
|
||||
if r[0] is True and r[1] is not None:
|
||||
results = r[1]
|
||||
@@ -95,7 +121,7 @@ def fireEvent(name, *args, **kwargs):
|
||||
|
||||
else:
|
||||
results = []
|
||||
for r_key in sorted(result.iterkeys(), cmp = natcmp):
|
||||
for r_key in result_keys:
|
||||
r = result[r_key]
|
||||
if r[0] == True and r[1]:
|
||||
results.append(r[1])
|
||||
@@ -104,11 +130,14 @@ def fireEvent(name, *args, **kwargs):
|
||||
|
||||
# Merge
|
||||
if options['merge'] and len(results) > 0:
|
||||
|
||||
# Dict
|
||||
if isinstance(results[0], dict):
|
||||
results.reverse()
|
||||
|
||||
merged = {}
|
||||
for result in results:
|
||||
merged = mergeDicts(merged, result)
|
||||
merged = mergeDicts(merged, result, prepend_list = True)
|
||||
|
||||
results = merged
|
||||
# Lists
|
||||
@@ -132,23 +161,24 @@ def fireEvent(name, *args, **kwargs):
|
||||
options['on_complete']()
|
||||
|
||||
return results
|
||||
except KeyError, e:
|
||||
pass
|
||||
except Exception:
|
||||
log.error('%s: %s', (name, traceback.format_exc()))
|
||||
|
||||
|
||||
def fireEventAsync(*args, **kwargs):
|
||||
try:
|
||||
my_thread = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
|
||||
my_thread.setDaemon(True)
|
||||
my_thread.start()
|
||||
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
return True
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log.error('%s: %s', (args[0], e))
|
||||
|
||||
|
||||
def errorHandler(error):
|
||||
etype, value, tb = error
|
||||
log.error(''.join(traceback.format_exception(etype, value, tb)))
|
||||
|
||||
|
||||
def getEvent(name):
|
||||
return events[name]
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from string import ascii_letters, digits
|
||||
from urllib import quote_plus
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
import unicodedata
|
||||
|
||||
from chardet import detect
|
||||
from couchpotato.core.logger import CPLog
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
def toSafeString(original):
|
||||
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
|
||||
cleanedFilename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
||||
return ''.join(c for c in cleanedFilename if c in valid_chars)
|
||||
cleaned_filename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
||||
valid_string = ''.join(c for c in cleaned_filename if c in valid_chars)
|
||||
return ' '.join(valid_string.split())
|
||||
|
||||
|
||||
def simplifyString(original):
|
||||
string = stripAccents(original.lower())
|
||||
@@ -19,15 +26,19 @@ def simplifyString(original):
|
||||
split = re.split('\W+|_', string.lower())
|
||||
return toUnicode(' '.join(split))
|
||||
|
||||
|
||||
def toUnicode(original, *args):
|
||||
try:
|
||||
if isinstance(original, unicode):
|
||||
return original
|
||||
else:
|
||||
try:
|
||||
return unicode(original, *args)
|
||||
return six.text_type(original, *args)
|
||||
except:
|
||||
try:
|
||||
detected = detect(original)
|
||||
if detected.get('encoding') == 'utf-8':
|
||||
return original.decode('utf-8')
|
||||
return ek(original, *args)
|
||||
except:
|
||||
raise
|
||||
@@ -36,9 +47,46 @@ def toUnicode(original, *args):
|
||||
ascii_text = str(original).encode('string_escape')
|
||||
return toUnicode(ascii_text)
|
||||
|
||||
|
||||
def ss(original, *args):
|
||||
from couchpotato.environment import Env
|
||||
return toUnicode(original, *args).encode(Env.get('encoding'))
|
||||
|
||||
u_original = toUnicode(original, *args)
|
||||
try:
|
||||
from couchpotato.environment import Env
|
||||
return u_original.encode(Env.get('encoding'))
|
||||
except Exception as e:
|
||||
log.debug('Failed ss encoding char, force UTF8: %s', e)
|
||||
try:
|
||||
return u_original.encode(Env.get('encoding'), 'replace')
|
||||
except:
|
||||
return u_original.encode('utf-8', 'replace')
|
||||
|
||||
|
||||
def sp(path, *args):
|
||||
|
||||
# Standardise encoding, normalise case, path and strip trailing '/' or '\'
|
||||
if not path or len(path) == 0:
|
||||
return path
|
||||
|
||||
# convert windows path (from remote box) to *nix path
|
||||
if os.path.sep == '/' and '\\' in path:
|
||||
path = '/' + path.replace(':', '').replace('\\', '/')
|
||||
|
||||
path = os.path.normpath(ss(path, *args))
|
||||
|
||||
# Remove any trailing path separators
|
||||
if path != os.path.sep:
|
||||
path = path.rstrip(os.path.sep)
|
||||
|
||||
# Add a trailing separator in case it is a root folder on windows (crashes guessit)
|
||||
if len(path) == 2 and path[1] == ':':
|
||||
path = path + os.path.sep
|
||||
|
||||
# Replace *NIX ambiguous '//' at the beginning of a path with '/' (crashes guessit)
|
||||
path = re.sub('^//', '/', path)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def ek(original, *args):
|
||||
if isinstance(original, (str, unicode)):
|
||||
@@ -50,6 +98,7 @@ def ek(original, *args):
|
||||
|
||||
return original
|
||||
|
||||
|
||||
def isInt(value):
|
||||
try:
|
||||
int(value)
|
||||
@@ -57,14 +106,16 @@ def isInt(value):
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def stripAccents(s):
|
||||
return ''.join((c for c in unicodedata.normalize('NFD', toUnicode(s)) if unicodedata.category(c) != 'Mn'))
|
||||
|
||||
|
||||
def tryUrlencode(s):
|
||||
new = u''
|
||||
if isinstance(s, (dict)):
|
||||
for key, value in s.iteritems():
|
||||
new += u'&%s=%s' % (key, tryUrlencode(value))
|
||||
new = six.u('')
|
||||
if isinstance(s, dict):
|
||||
for key, value in s.items():
|
||||
new += six.u('&%s=%s') % (key, tryUrlencode(value))
|
||||
|
||||
return new[1:]
|
||||
else:
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import natcmp
|
||||
from flask.globals import current_app
|
||||
from flask.helpers import json, make_response
|
||||
from urllib import unquote
|
||||
from werkzeug.urls import url_decode
|
||||
import flask
|
||||
import re
|
||||
|
||||
def getParams():
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import natsortKey
|
||||
|
||||
|
||||
def getParams(params):
|
||||
|
||||
params = url_decode(getattr(flask.request, 'environ').get('QUERY_STRING', ''))
|
||||
reg = re.compile('^[a-z0-9_\.]+$')
|
||||
|
||||
current = temp = {}
|
||||
for param, value in sorted(params.iteritems()):
|
||||
# Sort keys
|
||||
param_keys = params.keys()
|
||||
param_keys.sort(key = natsortKey)
|
||||
|
||||
temp = {}
|
||||
for param in param_keys:
|
||||
value = params[param]
|
||||
|
||||
nest = re.split("([\[\]]+)", param)
|
||||
if len(nest) > 1:
|
||||
@@ -36,16 +38,31 @@ def getParams():
|
||||
current = current[item]
|
||||
else:
|
||||
temp[param] = toUnicode(unquote(value))
|
||||
if temp[param].lower() in ['true', 'false']:
|
||||
temp[param] = temp[param].lower() != 'false'
|
||||
|
||||
return dictToList(temp)
|
||||
|
||||
non_decimal = re.compile(r'[^\d.]+')
|
||||
|
||||
def dictToList(params):
|
||||
|
||||
if type(params) is dict:
|
||||
new = {}
|
||||
for x, value in params.iteritems():
|
||||
for x, value in params.items():
|
||||
try:
|
||||
new_value = [dictToList(value[k]) for k in sorted(value.iterkeys(), cmp = natcmp)]
|
||||
convert = lambda text: int(text) if text.isdigit() else text.lower()
|
||||
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
|
||||
sorted_keys = sorted(value.keys(), key = alphanum_key)
|
||||
|
||||
all_ints = 0
|
||||
for pnr in sorted_keys:
|
||||
all_ints += 1 if non_decimal.sub('', pnr) == pnr else 0
|
||||
|
||||
if all_ints == len(sorted_keys):
|
||||
new_value = [dictToList(value[k]) for k in sorted_keys]
|
||||
else:
|
||||
new_value = value
|
||||
except:
|
||||
new_value = value
|
||||
|
||||
@@ -54,29 +71,3 @@ def dictToList(params):
|
||||
new = params
|
||||
|
||||
return new
|
||||
|
||||
def getParam(attr, default = None):
|
||||
try:
|
||||
return getParams().get(attr, default)
|
||||
except:
|
||||
return default
|
||||
|
||||
def padded_jsonify(callback, *args, **kwargs):
|
||||
content = str(callback) + '(' + json.dumps(dict(*args, **kwargs)) + ')'
|
||||
return getattr(current_app, 'response_class')(content, mimetype = 'text/javascript')
|
||||
|
||||
def jsonify(mimetype, *args, **kwargs):
|
||||
content = json.dumps(dict(*args, **kwargs))
|
||||
return getattr(current_app, 'response_class')(content, mimetype = mimetype)
|
||||
|
||||
def jsonified(*args, **kwargs):
|
||||
callback = getParam('callback_func', None)
|
||||
if callback:
|
||||
content = padded_jsonify(callback, *args, **kwargs)
|
||||
else:
|
||||
content = jsonify('application/json', *args, **kwargs)
|
||||
|
||||
response = make_response(content)
|
||||
response.cache_control.no_cache = True
|
||||
|
||||
return response
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
import xml.etree.ElementTree as XMLTree
|
||||
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class RSS(object):
|
||||
|
||||
def getTextElements(self, xml, path):
|
||||
''' Find elements and return tree'''
|
||||
""" Find elements and return tree"""
|
||||
|
||||
textelements = []
|
||||
try:
|
||||
@@ -28,7 +31,7 @@ class RSS(object):
|
||||
return elements
|
||||
|
||||
def getElement(self, xml, path):
|
||||
''' Find element and return text'''
|
||||
""" Find element and return text"""
|
||||
|
||||
try:
|
||||
return xml.find(path)
|
||||
@@ -36,7 +39,7 @@ class RSS(object):
|
||||
return
|
||||
|
||||
def getTextElement(self, xml, path):
|
||||
''' Find element and return text'''
|
||||
""" Find element and return text"""
|
||||
|
||||
try:
|
||||
return xml.find(path).text
|
||||
@@ -46,6 +49,6 @@ class RSS(object):
|
||||
def getItems(self, data, path = 'channel/item'):
|
||||
try:
|
||||
return XMLTree.parse(data).findall(path)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
log.error('Error parsing RSS. %s', e)
|
||||
return []
|
||||
|
||||
327
couchpotato/core/helpers/variable.py
Normal file → Executable file
327
couchpotato/core/helpers/variable.py
Normal file → Executable file
@@ -1,23 +1,52 @@
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toSafeString
|
||||
from couchpotato.core.logger import CPLog
|
||||
import collections
|
||||
import ctypes
|
||||
import hashlib
|
||||
import os.path
|
||||
import os
|
||||
import platform
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss, sp
|
||||
from couchpotato.core.logger import CPLog
|
||||
import six
|
||||
from six.moves import map, zip, filter
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
def fnEscape(pattern):
|
||||
return pattern.replace('[', '[[').replace(']', '[]]').replace('[[', '[[]')
|
||||
|
||||
|
||||
def link(src, dst):
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
if ctypes.windll.kernel32.CreateHardLinkW(six.text_type(dst), six.text_type(src), 0) == 0: raise ctypes.WinError()
|
||||
else:
|
||||
os.link(src, dst)
|
||||
|
||||
|
||||
def symlink(src, dst):
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(six.text_type(dst), six.text_type(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
||||
else:
|
||||
os.symlink(src, dst)
|
||||
|
||||
|
||||
def getUserDir():
|
||||
try:
|
||||
import pwd
|
||||
os.environ['HOME'] = pwd.getpwuid(os.geteuid()).pw_dir
|
||||
os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir)
|
||||
except:
|
||||
pass
|
||||
|
||||
return os.path.expanduser('~')
|
||||
return sp(os.path.expanduser('~'))
|
||||
|
||||
|
||||
def getDownloadDir():
|
||||
user_dir = getUserDir()
|
||||
@@ -31,6 +60,7 @@ def getDownloadDir():
|
||||
|
||||
return user_dir
|
||||
|
||||
|
||||
def getDataDir():
|
||||
|
||||
# Windows
|
||||
@@ -50,10 +80,12 @@ def getDataDir():
|
||||
# Linux
|
||||
return os.path.join(user_dir, '.couchpotato')
|
||||
|
||||
def isDict(object):
|
||||
return isinstance(object, dict)
|
||||
|
||||
def mergeDicts(a, b):
|
||||
def isDict(obj):
|
||||
return isinstance(obj, dict)
|
||||
|
||||
|
||||
def mergeDicts(a, b, prepend_list = False):
|
||||
assert isDict(a), isDict(b)
|
||||
dst = a.copy()
|
||||
|
||||
@@ -67,12 +99,13 @@ def mergeDicts(a, b):
|
||||
if isDict(current_src[key]) and isDict(current_dst[key]):
|
||||
stack.append((current_dst[key], current_src[key]))
|
||||
elif isinstance(current_src[key], list) and isinstance(current_dst[key], list):
|
||||
current_dst[key].extend(current_src[key])
|
||||
current_dst[key] = current_src[key] + current_dst[key] if prepend_list else current_dst[key] + current_src[key]
|
||||
current_dst[key] = removeListDuplicates(current_dst[key])
|
||||
else:
|
||||
current_dst[key] = current_src[key]
|
||||
return dst
|
||||
|
||||
|
||||
def removeListDuplicates(seq):
|
||||
checked = []
|
||||
for e in seq:
|
||||
@@ -80,31 +113,79 @@ def removeListDuplicates(seq):
|
||||
checked.append(e)
|
||||
return checked
|
||||
|
||||
|
||||
def flattenList(l):
|
||||
if isinstance(l, list):
|
||||
return sum(map(flattenList, l))
|
||||
else:
|
||||
return l
|
||||
|
||||
|
||||
def md5(text):
|
||||
return hashlib.md5(text).hexdigest()
|
||||
return hashlib.md5(ss(text)).hexdigest()
|
||||
|
||||
|
||||
def sha1(text):
|
||||
return hashlib.sha1(text).hexdigest()
|
||||
|
||||
|
||||
def isLocalIP(ip):
|
||||
ip = ip.lstrip('htps:/')
|
||||
regex = '/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1)$/'
|
||||
return re.search(regex, ip) is not None or 'localhost' in ip or ip[:4] == '127.'
|
||||
|
||||
|
||||
def getExt(filename):
|
||||
return os.path.splitext(filename)[1][1:]
|
||||
|
||||
def cleanHost(host):
|
||||
if not host.startswith(('http://', 'https://')):
|
||||
host = 'http://' + host
|
||||
|
||||
if not host.endswith('/'):
|
||||
def cleanHost(host, protocol = True, ssl = False, username = None, password = None):
|
||||
"""Return a cleaned up host with given url options set
|
||||
|
||||
Changes protocol to https if ssl is set to True and http if ssl is set to false.
|
||||
>>> cleanHost("localhost:80", ssl=True)
|
||||
'https://localhost:80/'
|
||||
>>> cleanHost("localhost:80", ssl=False)
|
||||
'http://localhost:80/'
|
||||
|
||||
Username and password is managed with the username and password variables
|
||||
>>> cleanHost("localhost:80", username="user", password="passwd")
|
||||
'http://user:passwd@localhost:80/'
|
||||
|
||||
Output without scheme (protocol) can be forced with protocol=False
|
||||
>>> cleanHost("localhost:80", protocol=False)
|
||||
'localhost:80'
|
||||
"""
|
||||
|
||||
if not '://' in host and protocol:
|
||||
host = ('https://' if ssl else 'http://') + host
|
||||
|
||||
if not protocol:
|
||||
host = host.split('://', 1)[-1]
|
||||
|
||||
if protocol and username and password:
|
||||
try:
|
||||
auth = re.findall('^(?:.+?//)(.+?):(.+?)@(?:.+)$', host)
|
||||
if auth:
|
||||
log.error('Cleanhost error: auth already defined in url: %s, please remove BasicAuth from url.', host)
|
||||
else:
|
||||
host = host.replace('://', '://%s:%s@' % (username, password), 1)
|
||||
except:
|
||||
pass
|
||||
|
||||
host = host.rstrip('/ ')
|
||||
if protocol:
|
||||
host += '/'
|
||||
|
||||
return host
|
||||
|
||||
def getImdb(txt, check_inside = True, multiple = False):
|
||||
|
||||
def getImdb(txt, check_inside = False, multiple = False):
|
||||
|
||||
if not check_inside:
|
||||
txt = simplifyString(txt)
|
||||
else:
|
||||
txt = ss(txt)
|
||||
|
||||
if check_inside and os.path.isfile(txt):
|
||||
output = open(txt, 'r')
|
||||
@@ -112,60 +193,220 @@ def getImdb(txt, check_inside = True, multiple = False):
|
||||
output.close()
|
||||
|
||||
try:
|
||||
ids = re.findall('(tt\d{7})', txt)
|
||||
ids = re.findall('(tt\d{4,7})', txt)
|
||||
|
||||
if multiple:
|
||||
return ids if len(ids) > 0 else []
|
||||
return ids[0]
|
||||
return removeDuplicate(['tt%07d' % tryInt(x[2:]) for x in ids]) if len(ids) > 0 else []
|
||||
|
||||
return 'tt%07d' % tryInt(ids[0][2:])
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
def tryInt(s):
|
||||
|
||||
def tryInt(s, default = 0):
|
||||
try: return int(s)
|
||||
except: return 0
|
||||
except: return default
|
||||
|
||||
|
||||
def tryFloat(s):
|
||||
try: return float(s) if '.' in s else tryInt(s)
|
||||
try:
|
||||
if isinstance(s, str):
|
||||
return float(s) if '.' in s else tryInt(s)
|
||||
else:
|
||||
return float(s)
|
||||
except: return 0
|
||||
|
||||
def natsortKey(s):
|
||||
return map(tryInt, re.findall(r'(\d+|\D+)', s))
|
||||
|
||||
def natcmp(a, b):
|
||||
return cmp(natsortKey(a), natsortKey(b))
|
||||
def natsortKey(string_):
|
||||
"""See http://www.codinghorror.com/blog/archives/001018.html"""
|
||||
return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_)]
|
||||
|
||||
def getTitle(library_dict):
|
||||
|
||||
def toIterable(value):
|
||||
if isinstance(value, collections.Iterable):
|
||||
return value
|
||||
return [value]
|
||||
|
||||
|
||||
def getIdentifier(media):
|
||||
return media.get('identifier') or media.get('identifiers', {}).get('imdb')
|
||||
|
||||
|
||||
def getTitle(media_dict):
|
||||
try:
|
||||
try:
|
||||
return library_dict['titles'][0]['title']
|
||||
return media_dict['title']
|
||||
except:
|
||||
try:
|
||||
for title in library_dict.titles:
|
||||
if title.default:
|
||||
return title.title
|
||||
return media_dict['titles'][0]
|
||||
except:
|
||||
log.error('Could not get title for %s', library_dict.identifier)
|
||||
return None
|
||||
|
||||
log.error('Could not get title for %s', library_dict['identifier'])
|
||||
return None
|
||||
try:
|
||||
return media_dict['info']['titles'][0]
|
||||
except:
|
||||
try:
|
||||
return media_dict['media']['info']['titles'][0]
|
||||
except:
|
||||
log.error('Could not get title for %s', getIdentifier(media_dict))
|
||||
return None
|
||||
except:
|
||||
log.error('Could not get title for library item: %s', library_dict)
|
||||
log.error('Could not get title for library item: %s', media_dict)
|
||||
return None
|
||||
|
||||
|
||||
def possibleTitles(raw_title):
|
||||
|
||||
titles = []
|
||||
titles = [
|
||||
toSafeString(raw_title).lower(),
|
||||
raw_title.lower(),
|
||||
simplifyString(raw_title)
|
||||
]
|
||||
|
||||
titles.append(toSafeString(raw_title).lower())
|
||||
titles.append(raw_title.lower())
|
||||
titles.append(simplifyString(raw_title))
|
||||
# replace some chars
|
||||
new_title = raw_title.replace('&', 'and')
|
||||
titles.append(simplifyString(new_title))
|
||||
|
||||
return removeDuplicate(titles)
|
||||
|
||||
return list(set(titles))
|
||||
|
||||
def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
|
||||
return ''.join(random.choice(chars) for x in range(size))
|
||||
|
||||
def splitString(str, split_on = ','):
|
||||
return [x.strip() for x in str.split(split_on)]
|
||||
|
||||
def splitString(str, split_on = ',', clean = True):
|
||||
l = [x.strip() for x in str.split(split_on)] if str else []
|
||||
return removeEmpty(l) if clean else l
|
||||
|
||||
|
||||
def removeEmpty(l):
|
||||
return list(filter(None, l))
|
||||
|
||||
|
||||
def removeDuplicate(l):
|
||||
seen = set()
|
||||
return [x for x in l if x not in seen and not seen.add(x)]
|
||||
|
||||
|
||||
def dictIsSubset(a, b):
|
||||
return all([k in b and b[k] == v for k, v in a.items()])
|
||||
|
||||
|
||||
# Returns True if sub_folder is the same as or inside base_folder
|
||||
def isSubFolder(sub_folder, base_folder):
|
||||
if base_folder and sub_folder:
|
||||
base = sp(os.path.realpath(base_folder)) + os.path.sep
|
||||
subfolder = sp(os.path.realpath(sub_folder)) + os.path.sep
|
||||
return os.path.commonprefix([subfolder, base]) == base
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# From SABNZBD
|
||||
re_password = [re.compile(r'(.+){{([^{}]+)}}$'), re.compile(r'(.+)\s+password\s*=\s*(.+)$', re.I)]
|
||||
|
||||
|
||||
def scanForPassword(name):
|
||||
m = None
|
||||
for reg in re_password:
|
||||
m = reg.search(name)
|
||||
if m: break
|
||||
|
||||
if m:
|
||||
return m.group(1).strip('. '), m.group(2).strip()
|
||||
|
||||
|
||||
under_pat = re.compile(r'_([a-z])')
|
||||
|
||||
def underscoreToCamel(name):
|
||||
return under_pat.sub(lambda x: x.group(1).upper(), name)
|
||||
|
||||
|
||||
def removePyc(folder, only_excess = True, show_logs = True):
|
||||
|
||||
folder = sp(folder)
|
||||
|
||||
for root, dirs, files in os.walk(folder):
|
||||
|
||||
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
|
||||
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
|
||||
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
|
||||
|
||||
for excess_pyc_file in excess_pyc_files:
|
||||
full_path = os.path.join(root, excess_pyc_file)
|
||||
if show_logs: log.debug('Removing old PYC file: %s', full_path)
|
||||
try:
|
||||
os.remove(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
for dir_name in dirs:
|
||||
full_path = os.path.join(root, dir_name)
|
||||
if len(os.listdir(full_path)) == 0:
|
||||
try:
|
||||
os.rmdir(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
|
||||
def getFreeSpace(directories):
|
||||
|
||||
single = not isinstance(directories, (tuple, list))
|
||||
if single:
|
||||
directories = [directories]
|
||||
|
||||
free_space = {}
|
||||
for folder in directories:
|
||||
|
||||
size = None
|
||||
if os.path.isdir(folder):
|
||||
if os.name == 'nt':
|
||||
_, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
|
||||
ctypes.c_ulonglong()
|
||||
if sys.version_info >= (3,) or isinstance(folder, unicode):
|
||||
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW #@UndefinedVariable
|
||||
else:
|
||||
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA #@UndefinedVariable
|
||||
ret = fun(folder, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
|
||||
if ret == 0:
|
||||
raise ctypes.WinError()
|
||||
return [total.value, free.value]
|
||||
else:
|
||||
s = os.statvfs(folder)
|
||||
size = [s.f_blocks * s.f_frsize / (1024 * 1024), (s.f_bavail * s.f_frsize) / (1024 * 1024)]
|
||||
|
||||
if single: return size
|
||||
|
||||
free_space[folder] = size
|
||||
|
||||
return free_space
|
||||
|
||||
|
||||
def getSize(paths):
|
||||
|
||||
single = not isinstance(paths, (tuple, list))
|
||||
if single:
|
||||
paths = [paths]
|
||||
|
||||
total_size = 0
|
||||
for path in paths:
|
||||
path = sp(path)
|
||||
|
||||
if os.path.isdir(path):
|
||||
total_size = 0
|
||||
for dirpath, _, filenames in os.walk(path):
|
||||
for f in filenames:
|
||||
total_size += os.path.getsize(sp(os.path.join(dirpath, f)))
|
||||
|
||||
elif os.path.isfile(path):
|
||||
total_size += os.path.getsize(path)
|
||||
|
||||
return total_size / 1048576 # MB
|
||||
|
||||
|
||||
def find(func, iterable):
|
||||
for item in iterable:
|
||||
if func(item):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,59 +1,71 @@
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
import glob
|
||||
import os
|
||||
import traceback
|
||||
from importhelper import import_module
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Loader(object):
|
||||
|
||||
plugins = {}
|
||||
providers = {}
|
||||
|
||||
modules = {}
|
||||
def __init__(self):
|
||||
self.plugins = {}
|
||||
self.providers = {}
|
||||
self.modules = {}
|
||||
self.paths = {}
|
||||
|
||||
def preload(self, root = ''):
|
||||
|
||||
core = os.path.join(root, 'couchpotato', 'core')
|
||||
|
||||
self.paths = {
|
||||
self.paths.update({
|
||||
'core': (0, 'couchpotato.core._base', os.path.join(core, '_base')),
|
||||
'plugin': (1, 'couchpotato.core.plugins', os.path.join(core, 'plugins')),
|
||||
'notifications': (20, 'couchpotato.core.notifications', os.path.join(core, 'notifications')),
|
||||
'downloaders': (20, 'couchpotato.core.downloaders', os.path.join(core, 'downloaders')),
|
||||
}
|
||||
})
|
||||
|
||||
# Add providers to loader
|
||||
provider_dir = os.path.join(root, 'couchpotato', 'core', 'providers')
|
||||
for provider in os.listdir(provider_dir):
|
||||
path = os.path.join(provider_dir, provider)
|
||||
if os.path.isdir(path):
|
||||
self.paths[provider + '_provider'] = (25, 'couchpotato.core.providers.' + provider, path)
|
||||
# Add media to loader
|
||||
self.addPath(root, ['couchpotato', 'core', 'media'], 25, recursive = True)
|
||||
|
||||
# Add custom plugin folder
|
||||
from couchpotato.environment import Env
|
||||
custom_plugin_dir = os.path.join(Env.get('data_dir'), 'custom_plugins')
|
||||
if os.path.isdir(custom_plugin_dir):
|
||||
sys.path.insert(0, custom_plugin_dir)
|
||||
self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
|
||||
|
||||
for plugin_type, plugin_tuple in self.paths.iteritems():
|
||||
# Loop over all paths and add to module list
|
||||
for plugin_type, plugin_tuple in self.paths.items():
|
||||
priority, module, dir_name = plugin_tuple
|
||||
self.addFromDir(plugin_type, priority, module, dir_name)
|
||||
|
||||
def run(self):
|
||||
did_save = 0
|
||||
|
||||
for priority in self.modules:
|
||||
for module_name, plugin in sorted(self.modules[priority].iteritems()):
|
||||
for priority in sorted(self.modules):
|
||||
for module_name, plugin in sorted(self.modules[priority].items()):
|
||||
|
||||
# Load module
|
||||
try:
|
||||
m = getattr(self.loadModule(module_name), plugin.get('name'))
|
||||
if plugin.get('name')[:2] == '__':
|
||||
continue
|
||||
|
||||
log.info('Loading %s: %s', (plugin['type'], plugin['name']))
|
||||
m = self.loadModule(module_name)
|
||||
if m is None:
|
||||
continue
|
||||
|
||||
# Save default settings for plugin/provider
|
||||
did_save += self.loadSettings(m, module_name, save = False)
|
||||
|
||||
self.loadPlugins(m, plugin.get('name'))
|
||||
self.loadPlugins(m, plugin.get('type'), plugin.get('name'))
|
||||
except ImportError as e:
|
||||
# todo:: subclass ImportError for missing requirements.
|
||||
if (e.message.lower().startswith("missing")):
|
||||
if e.message.lower().startswith("missing"):
|
||||
log.error(e.message)
|
||||
pass
|
||||
# todo:: this needs to be more descriptive.
|
||||
@@ -65,27 +77,40 @@ class Loader(object):
|
||||
if did_save:
|
||||
fireEvent('settings.save')
|
||||
|
||||
def addPath(self, root, base_path, priority, recursive = False):
|
||||
root_path = os.path.join(root, *base_path)
|
||||
for filename in os.listdir(root_path):
|
||||
path = os.path.join(root_path, filename)
|
||||
if os.path.isdir(path) and filename[:2] != '__':
|
||||
if six.u('__init__.py') in os.listdir(path):
|
||||
new_base_path = ''.join(s + '.' for s in base_path) + filename
|
||||
self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
|
||||
|
||||
if recursive:
|
||||
self.addPath(root, base_path + [filename], priority, recursive = True)
|
||||
|
||||
def addFromDir(self, plugin_type, priority, module, dir_name):
|
||||
|
||||
# Load dir module
|
||||
try:
|
||||
m = __import__(module)
|
||||
splitted = module.split('.')
|
||||
for sub in splitted[1:]:
|
||||
m = getattr(m, sub)
|
||||
if module and len(module) > 0:
|
||||
self.addModule(priority, plugin_type, module, os.path.basename(dir_name))
|
||||
|
||||
if hasattr(m, 'config'):
|
||||
fireEvent('settings.options', splitted[-1] + '_config', getattr(m, 'config'))
|
||||
except:
|
||||
raise
|
||||
|
||||
for cur_file in glob.glob(os.path.join(dir_name, '*')):
|
||||
name = os.path.basename(cur_file)
|
||||
if os.path.isdir(os.path.join(dir_name, name)):
|
||||
for name in os.listdir(dir_name):
|
||||
path = os.path.join(dir_name, name)
|
||||
ext = os.path.splitext(path)[1]
|
||||
ext_length = len(ext)
|
||||
if name != 'static' and ((os.path.isdir(path) and os.path.isfile(os.path.join(path, '__init__.py')))
|
||||
or (os.path.isfile(path) and ext == '.py')):
|
||||
name = name[:-ext_length] if ext_length > 0 else name
|
||||
module_name = '%s.%s' % (module, name)
|
||||
self.addModule(priority, plugin_type, module_name, name)
|
||||
|
||||
def loadSettings(self, module, name, save = True):
|
||||
|
||||
if not hasattr(module, 'config'):
|
||||
#log.debug('Skip loading settings for plugin %s as it has no config section' % module.__file__)
|
||||
return False
|
||||
|
||||
try:
|
||||
for section in module.config:
|
||||
fireEvent('settings.options', section['name'], section)
|
||||
@@ -99,16 +124,22 @@ class Loader(object):
|
||||
log.debug('Failed loading settings for "%s": %s', (name, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
def loadPlugins(self, module, name):
|
||||
def loadPlugins(self, module, type, name):
|
||||
|
||||
if not hasattr(module, 'autoload'):
|
||||
#log.debug('Skip startup for plugin %s as it has no start section' % module.__file__)
|
||||
return False
|
||||
try:
|
||||
klass = module.start()
|
||||
klass.registerPlugin()
|
||||
|
||||
if klass and getattr(klass, 'auto_register_static'):
|
||||
klass.registerStatic(module.__file__)
|
||||
# Load single file plugin
|
||||
if isinstance(module.autoload, (str, unicode)):
|
||||
getattr(module, module.autoload)()
|
||||
# Load folder plugin
|
||||
else:
|
||||
module.autoload()
|
||||
|
||||
log.info('Loaded %s: %s', (type, name))
|
||||
return True
|
||||
except Exception, e:
|
||||
except:
|
||||
log.error('Failed loading plugin "%s": %s', (module.__file__, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
@@ -117,6 +148,10 @@ class Loader(object):
|
||||
if not self.modules.get(priority):
|
||||
self.modules[priority] = {}
|
||||
|
||||
module = module.lstrip('.')
|
||||
if plugin_type.startswith('couchpotato_core'):
|
||||
plugin_type = plugin_type[17:]
|
||||
|
||||
self.modules[priority][module] = {
|
||||
'priority': priority,
|
||||
'module': module,
|
||||
@@ -126,10 +161,9 @@ class Loader(object):
|
||||
|
||||
def loadModule(self, name):
|
||||
try:
|
||||
m = __import__(name)
|
||||
splitted = name.split('.')
|
||||
for sub in splitted[1:-1]:
|
||||
m = getattr(m, sub)
|
||||
return m
|
||||
return import_module(name)
|
||||
except ImportError:
|
||||
log.debug('Skip loading module plugin %s: %s', (name, traceback.format_exc()))
|
||||
return None
|
||||
except:
|
||||
raise
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import logging
|
||||
import re
|
||||
import traceback
|
||||
|
||||
|
||||
class CPLog(object):
|
||||
|
||||
context = ''
|
||||
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key']
|
||||
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key', 'passkey']
|
||||
|
||||
Env = None
|
||||
is_develop = False
|
||||
|
||||
def __init__(self, context = ''):
|
||||
if context.endswith('.main'):
|
||||
@@ -14,6 +17,20 @@ class CPLog(object):
|
||||
self.context = context
|
||||
self.logger = logging.getLogger()
|
||||
|
||||
def setup(self):
|
||||
|
||||
if not self.Env:
|
||||
from couchpotato.environment import Env
|
||||
|
||||
self.Env = Env
|
||||
self.is_develop = Env.get('dev')
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
addEvent('app.after_shutdown', self.close)
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
logging.shutdown()
|
||||
|
||||
def info(self, msg, replace_tuple = ()):
|
||||
self.logger.info(self.addContext(msg, replace_tuple))
|
||||
|
||||
@@ -37,23 +54,22 @@ class CPLog(object):
|
||||
|
||||
def safeMessage(self, msg, replace_tuple = ()):
|
||||
|
||||
from couchpotato.environment import Env
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.helpers.encoding import ss, toUnicode
|
||||
|
||||
msg = ss(msg)
|
||||
|
||||
try:
|
||||
msg = msg % replace_tuple
|
||||
except:
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except:
|
||||
self.logger.error(u'Failed encoding stuff to log: %s' % traceback.format_exc())
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) if not isinstance(x, (int, float)) else x for x in list(replace_tuple)])
|
||||
elif isinstance(replace_tuple, dict):
|
||||
msg = msg % dict((k, ss(v) if not isinstance(v, (int, float)) else v) for k, v in replace_tuple.iteritems())
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except Exception as e:
|
||||
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e))
|
||||
|
||||
if not Env.get('dev'):
|
||||
self.setup()
|
||||
if not self.is_develop:
|
||||
|
||||
for replace in self.replace_private:
|
||||
msg = re.sub('(\?%s=)[^\&]+' % replace, '?%s=xxx' % replace, msg)
|
||||
@@ -61,10 +77,10 @@ class CPLog(object):
|
||||
|
||||
# Replace api key
|
||||
try:
|
||||
api_key = Env.setting('api_key')
|
||||
api_key = self.Env.setting('api_key')
|
||||
if api_key:
|
||||
msg = msg.replace(api_key, 'API_KEY')
|
||||
except:
|
||||
pass
|
||||
|
||||
return msg
|
||||
return toUnicode(msg)
|
||||
|
||||
101
couchpotato/core/media/__init__.py
Executable file
101
couchpotato/core/media/__init__.py
Executable file
@@ -0,0 +1,101 @@
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MediaBase(Plugin):
|
||||
|
||||
_type = None
|
||||
|
||||
def initType(self):
|
||||
addEvent('media.types', self.getType)
|
||||
|
||||
def getType(self):
|
||||
return self._type
|
||||
|
||||
def createOnComplete(self, media_id):
|
||||
|
||||
def onComplete():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.searcher.single' % media.get('type')
|
||||
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
|
||||
return onComplete
|
||||
|
||||
def createNotifyFront(self, media_id):
|
||||
|
||||
def notifyFront():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.update' % media.get('type')
|
||||
fireEvent('notify.frontend', type = event_name, data = media)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
|
||||
return notifyFront
|
||||
|
||||
def getDefaultTitle(self, info, ):
|
||||
|
||||
# Set default title
|
||||
default_title = toUnicode(info.get('title'))
|
||||
titles = info.get('titles', [])
|
||||
counter = 0
|
||||
def_title = None
|
||||
for title in titles:
|
||||
if (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title):
|
||||
def_title = toUnicode(title)
|
||||
break
|
||||
counter += 1
|
||||
|
||||
if not def_title:
|
||||
def_title = toUnicode(titles[0])
|
||||
|
||||
return def_title or 'UNKNOWN'
|
||||
|
||||
def getPoster(self, media, image_urls):
|
||||
if 'files' not in media:
|
||||
media['files'] = {}
|
||||
|
||||
existing_files = media['files']
|
||||
|
||||
image_type = 'poster'
|
||||
file_type = 'image_%s' % image_type
|
||||
|
||||
# Make existing unique
|
||||
unique_files = list(set(existing_files.get(file_type, [])))
|
||||
|
||||
# Remove files that can't be found
|
||||
for ef in unique_files:
|
||||
if not os.path.isfile(ef):
|
||||
unique_files.remove(ef)
|
||||
|
||||
# Replace new files list
|
||||
existing_files[file_type] = unique_files
|
||||
if len(existing_files) == 0:
|
||||
del existing_files[file_type]
|
||||
|
||||
# Loop over type
|
||||
for image in image_urls.get(image_type, []):
|
||||
if not isinstance(image, (str, unicode)):
|
||||
continue
|
||||
|
||||
if file_type not in existing_files or len(existing_files.get(file_type, [])) == 0:
|
||||
file_path = fireEvent('file.download', url = image, single = True)
|
||||
if file_path:
|
||||
existing_files[file_type] = [toUnicode(file_path)]
|
||||
break
|
||||
else:
|
||||
break
|
||||
0
couchpotato/core/migration/__init__.py → couchpotato/core/media/_base/__init__.py
Executable file → Normal file
0
couchpotato/core/migration/__init__.py → couchpotato/core/media/_base/__init__.py
Executable file → Normal file
7
couchpotato/core/media/_base/library/__init__.py
Normal file
7
couchpotato/core/media/_base/library/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from .main import Library
|
||||
|
||||
|
||||
def autoload():
|
||||
return Library()
|
||||
|
||||
config = []
|
||||
13
couchpotato/core/media/_base/library/base.py
Normal file
13
couchpotato/core/media/_base/library/base.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
|
||||
class LibraryBase(Plugin):
|
||||
|
||||
_type = None
|
||||
|
||||
def initType(self):
|
||||
addEvent('library.types', self.getType)
|
||||
|
||||
def getType(self):
|
||||
return self._type
|
||||
128
couchpotato/core/media/_base/library/main.py
Executable file
128
couchpotato/core/media/_base/library/main.py
Executable file
@@ -0,0 +1,128 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Library(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.title', self.title)
|
||||
addEvent('library.related', self.related)
|
||||
addEvent('library.tree', self.tree)
|
||||
|
||||
addEvent('library.root', self.root)
|
||||
|
||||
addApiView('library.query', self.queryView)
|
||||
addApiView('library.related', self.relatedView)
|
||||
addApiView('library.tree', self.treeView)
|
||||
|
||||
def queryView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.query', media, single = True)
|
||||
}
|
||||
|
||||
def relatedView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.related', media, single = True)
|
||||
}
|
||||
|
||||
def treeView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.tree', media, single = True)
|
||||
}
|
||||
|
||||
def title(self, library):
|
||||
return fireEvent(
|
||||
'library.query',
|
||||
library,
|
||||
|
||||
condense = False,
|
||||
include_year = False,
|
||||
include_identifier = False,
|
||||
single = True
|
||||
)
|
||||
|
||||
def related(self, media):
|
||||
result = {self.key(media['type']): media}
|
||||
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
result[self.key(cur['type'])] = cur
|
||||
|
||||
children = db.get_many('media_children', media['_id'], with_doc = True)
|
||||
|
||||
for item in children:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
|
||||
result[key].append(item['doc'])
|
||||
|
||||
return result
|
||||
|
||||
def root(self, media):
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
return cur
|
||||
|
||||
def tree(self, media = None, media_id = None):
|
||||
db = get_db()
|
||||
|
||||
if media:
|
||||
result = media
|
||||
elif media_id:
|
||||
result = db.get('id', media_id, with_doc = True)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Find children
|
||||
items = db.get_many('media_children', result['_id'], with_doc = True)
|
||||
keys = []
|
||||
|
||||
# Build children arrays
|
||||
for item in items:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = {}
|
||||
elif type(result[key]) is not dict:
|
||||
result[key] = {}
|
||||
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
|
||||
result[key][item['_id']] = fireEvent('library.tree', item['doc'], single = True)
|
||||
|
||||
# Unique children
|
||||
for key in keys:
|
||||
result[key] = result[key].values()
|
||||
|
||||
# Include releases
|
||||
result['releases'] = fireEvent('release.for_media', result['_id'], single = True)
|
||||
|
||||
return result
|
||||
|
||||
def key(self, media_type):
|
||||
parts = media_type.split('.')
|
||||
return parts[-1]
|
||||
7
couchpotato/core/media/_base/matcher/__init__.py
Normal file
7
couchpotato/core/media/_base/matcher/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from .main import Matcher
|
||||
|
||||
|
||||
def autoload():
|
||||
return Matcher()
|
||||
|
||||
config = []
|
||||
84
couchpotato/core/media/_base/matcher/base.py
Normal file
84
couchpotato/core/media/_base/matcher/base.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MatcherBase(Plugin):
|
||||
type = None
|
||||
|
||||
def __init__(self):
|
||||
if self.type:
|
||||
addEvent('%s.matcher.correct' % self.type, self.correct)
|
||||
|
||||
def correct(self, chain, release, media, quality):
|
||||
raise NotImplementedError()
|
||||
|
||||
def flattenInfo(self, info):
|
||||
# Flatten dictionary of matches (chain info)
|
||||
if isinstance(info, dict):
|
||||
return dict([(key, self.flattenInfo(value)) for key, value in info.items()])
|
||||
|
||||
# Flatten matches
|
||||
result = None
|
||||
|
||||
for match in info:
|
||||
if isinstance(match, dict):
|
||||
if result is None:
|
||||
result = {}
|
||||
|
||||
for key, value in match.items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
|
||||
result[key].append(value)
|
||||
else:
|
||||
if result is None:
|
||||
result = []
|
||||
|
||||
result.append(match)
|
||||
|
||||
return result
|
||||
|
||||
def constructFromRaw(self, match):
|
||||
if not match:
|
||||
return None
|
||||
|
||||
parts = [
|
||||
''.join([
|
||||
y for y in x[1:] if y
|
||||
]) for x in match
|
||||
]
|
||||
|
||||
return ''.join(parts)[:-1].strip()
|
||||
|
||||
def simplifyValue(self, value):
|
||||
if not value:
|
||||
return value
|
||||
|
||||
if isinstance(value, basestring):
|
||||
return simplifyString(value)
|
||||
|
||||
if isinstance(value, list):
|
||||
return [self.simplifyValue(x) for x in value]
|
||||
|
||||
raise ValueError("Unsupported value type")
|
||||
|
||||
def chainMatch(self, chain, group, tags):
|
||||
info = self.flattenInfo(chain.info[group])
|
||||
|
||||
found_tags = []
|
||||
for tag, accepted in tags.items():
|
||||
values = [self.simplifyValue(x) for x in info.get(tag, [None])]
|
||||
|
||||
if any([val in accepted for val in values]):
|
||||
found_tags.append(tag)
|
||||
|
||||
log.debug('tags found: %s, required: %s' % (found_tags, tags.keys()))
|
||||
|
||||
if set(tags.keys()) == set(found_tags):
|
||||
return True
|
||||
|
||||
return all([key in found_tags for key, value in tags.items()])
|
||||
89
couchpotato/core/media/_base/matcher/main.py
Normal file
89
couchpotato/core/media/_base/matcher/main.py
Normal file
@@ -0,0 +1,89 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.variable import possibleTitles
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.matcher.base import MatcherBase
|
||||
from caper import Caper
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Matcher(MatcherBase):
|
||||
|
||||
def __init__(self):
|
||||
super(Matcher, self).__init__()
|
||||
|
||||
self.caper = Caper()
|
||||
|
||||
addEvent('matcher.parse', self.parse)
|
||||
addEvent('matcher.match', self.match)
|
||||
|
||||
addEvent('matcher.flatten_info', self.flattenInfo)
|
||||
addEvent('matcher.construct_from_raw', self.constructFromRaw)
|
||||
|
||||
addEvent('matcher.correct_title', self.correctTitle)
|
||||
addEvent('matcher.correct_quality', self.correctQuality)
|
||||
|
||||
def parse(self, name, parser='scene'):
|
||||
return self.caper.parse(name, parser)
|
||||
|
||||
def match(self, release, media, quality):
|
||||
match = fireEvent('matcher.parse', release['name'], single = True)
|
||||
|
||||
if len(match.chains) < 1:
|
||||
log.info2('Wrong: %s, unable to parse release name (no chains)', release['name'])
|
||||
return False
|
||||
|
||||
for chain in match.chains:
|
||||
if fireEvent('%s.matcher.correct' % media['type'], chain, release, media, quality, single = True):
|
||||
return chain
|
||||
|
||||
return False
|
||||
|
||||
def correctTitle(self, chain, media):
|
||||
root = fireEvent('library.root', media, single = True)
|
||||
|
||||
if 'show_name' not in chain.info or not len(chain.info['show_name']):
|
||||
log.info('Wrong: missing show name in parsed result')
|
||||
return False
|
||||
|
||||
# Get the lower-case parsed show name from the chain
|
||||
chain_words = [x.lower() for x in chain.info['show_name']]
|
||||
|
||||
# Build a list of possible titles of the media we are searching for
|
||||
titles = root['info']['titles']
|
||||
|
||||
# Add year suffix titles (will result in ['<name_one>', '<name_one> <suffix_one>', '<name_two>', ...])
|
||||
suffixes = [None, root['info']['year']]
|
||||
|
||||
titles = [
|
||||
title + ((' %s' % suffix) if suffix else '')
|
||||
for title in titles
|
||||
for suffix in suffixes
|
||||
]
|
||||
|
||||
# Check show titles match
|
||||
# TODO check xem names
|
||||
for title in titles:
|
||||
for valid_words in [x.split(' ') for x in possibleTitles(title)]:
|
||||
|
||||
if valid_words == chain_words:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def correctQuality(self, chain, quality, quality_map):
|
||||
if quality['identifier'] not in quality_map:
|
||||
log.info2('Wrong: unknown preferred quality %s', quality['identifier'])
|
||||
return False
|
||||
|
||||
if 'video' not in chain.info:
|
||||
log.info2('Wrong: no video tags found')
|
||||
return False
|
||||
|
||||
video_tags = quality_map[quality['identifier']]
|
||||
|
||||
if not self.chainMatch(chain, 'video', video_tags):
|
||||
log.info2('Wrong: %s tags not in chain', video_tags)
|
||||
return False
|
||||
|
||||
return True
|
||||
5
couchpotato/core/media/_base/media/__init__.py
Normal file
5
couchpotato/core/media/_base/media/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .main import MediaPlugin
|
||||
|
||||
|
||||
def autoload():
|
||||
return MediaPlugin()
|
||||
199
couchpotato/core/media/_base/media/index.py
Normal file
199
couchpotato/core/media/_base/media/index.py
Normal file
@@ -0,0 +1,199 @@
|
||||
from string import ascii_letters
|
||||
from hashlib import md5
|
||||
|
||||
from CodernityDB.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
|
||||
|
||||
|
||||
class MediaIndex(MultiTreeBasedIndex):
|
||||
_version = 3
|
||||
|
||||
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and (data.get('identifier') or data.get('identifiers')):
|
||||
|
||||
identifiers = data.get('identifiers', {})
|
||||
if data.get('identifier') and 'imdb' not in identifiers:
|
||||
identifiers['imdb'] = data.get('identifier')
|
||||
|
||||
ids = []
|
||||
for x in identifiers:
|
||||
ids.append(md5('%s-%s' % (x, identifiers[x])).hexdigest())
|
||||
|
||||
return ids, None
|
||||
|
||||
|
||||
class MediaStatusIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaStatusIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('status'):
|
||||
return md5(data.get('status')).hexdigest(), None
|
||||
|
||||
|
||||
class MediaTypeIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaTypeIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('type'):
|
||||
return md5(data.get('type')).hexdigest(), None
|
||||
|
||||
|
||||
class TitleSearchIndex(MultiTreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex
|
||||
from itertools import izip
|
||||
from couchpotato.core.helpers.encoding import simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(TitleSearchIndex, self).__init__(*args, **kwargs)
|
||||
self.__l = kwargs.get('w_len', 2)
|
||||
|
||||
def make_key_value(self, data):
|
||||
|
||||
if data.get('_t') == 'media' and len(data.get('title', '')) > 0:
|
||||
|
||||
out = set()
|
||||
title = str(simplifyString(data.get('title').lower()))
|
||||
l = self.__l
|
||||
title_split = title.split()
|
||||
|
||||
for x in range(len(title_split)):
|
||||
combo = ' '.join(title_split[x:])[:32].strip()
|
||||
out.add(combo.rjust(32, '_'))
|
||||
combo_range = max(l, min(len(combo), 32))
|
||||
|
||||
for cx in range(1, combo_range):
|
||||
ccombo = combo[:-cx].strip()
|
||||
if len(ccombo) > l:
|
||||
out.add(ccombo.rjust(32, '_'))
|
||||
|
||||
return out, None
|
||||
|
||||
def make_key(self, key):
|
||||
return key.rjust(32, '_').lower()
|
||||
|
||||
|
||||
class TitleIndex(TreeBasedIndex):
|
||||
_version = 4
|
||||
|
||||
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||
from string import ascii_letters
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(TitleIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return self.simplify(key)
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('title') is not None and len(data.get('title')) > 0:
|
||||
return self.simplify(data['title']), None
|
||||
|
||||
def simplify(self, title):
|
||||
|
||||
title = toUnicode(title)
|
||||
|
||||
nr_prefix = '' if title and len(title) > 0 and title[0] in ascii_letters else '#'
|
||||
title = simplifyString(title)
|
||||
|
||||
for prefix in ['the ', 'an ', 'a ']:
|
||||
if prefix == title[:len(prefix)]:
|
||||
title = title[len(prefix):]
|
||||
break
|
||||
|
||||
return str(nr_prefix + title).ljust(32, ' ')[:32]
|
||||
|
||||
|
||||
class StartsWithIndex(TreeBasedIndex):
|
||||
_version = 3
|
||||
|
||||
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||
from string import ascii_letters
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '1s'
|
||||
super(StartsWithIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return self.first(key)
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('title') is not None:
|
||||
return self.first(data['title']), None
|
||||
|
||||
def first(self, title):
|
||||
title = toUnicode(title)
|
||||
title = simplifyString(title)
|
||||
|
||||
for prefix in ['the ', 'an ', 'a ']:
|
||||
if prefix == title[:len(prefix)]:
|
||||
title = title[len(prefix):]
|
||||
break
|
||||
|
||||
return str(title[0] if title and len(title) > 0 and title[0] in ascii_letters else '#').lower()
|
||||
|
||||
|
||||
|
||||
class MediaChildrenIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaChildrenIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return key
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('parent_id'):
|
||||
return data.get('parent_id'), None
|
||||
|
||||
|
||||
class MediaTagIndex(MultiTreeBasedIndex):
|
||||
_version = 2
|
||||
|
||||
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaTagIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('tags') and len(data.get('tags', [])) > 0:
|
||||
|
||||
tags = set()
|
||||
for tag in data.get('tags', []):
|
||||
tags.add(self.make_key(tag))
|
||||
|
||||
return list(tags), None
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
599
couchpotato/core/media/_base/media/main.py
Executable file
599
couchpotato/core/media/_base/media/main.py
Executable file
@@ -0,0 +1,599 @@
|
||||
from datetime import timedelta
|
||||
import time
|
||||
import traceback
|
||||
from string import ascii_lowercase
|
||||
|
||||
from CodernityDB.database import RecordNotFound, RecordDeleted
|
||||
from couchpotato import tryInt, get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, getImdb, getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media import MediaBase
|
||||
from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex, MediaTagIndex
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MediaPlugin(MediaBase):
|
||||
|
||||
_database = {
|
||||
'media': MediaIndex,
|
||||
'media_search_title': TitleSearchIndex,
|
||||
'media_status': MediaStatusIndex,
|
||||
'media_tag': MediaTagIndex,
|
||||
'media_by_type': MediaTypeIndex,
|
||||
'media_title': TitleIndex,
|
||||
'media_startswith': StartsWithIndex,
|
||||
'media_children': MediaChildrenIndex,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
||||
addApiView('media.refresh', self.refresh, docs = {
|
||||
'desc': 'Refresh a any media type by ID',
|
||||
'params': {
|
||||
'id': {'desc': 'Movie, Show, Season or Episode ID(s) you want to refresh.', 'type': 'int (comma separated)'},
|
||||
}
|
||||
})
|
||||
|
||||
addApiView('media.list', self.listView, docs = {
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter media by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter media by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the media list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all media starting with the letter "a"'},
|
||||
'search': {'desc': 'Search media title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any media returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
|
||||
addApiView('media.get', self.getView, docs = {
|
||||
'desc': 'Get media by id',
|
||||
'params': {
|
||||
'id': {'desc': 'The id of the media'},
|
||||
}
|
||||
})
|
||||
|
||||
addApiView('media.delete', self.deleteView, docs = {
|
||||
'desc': 'Delete a media from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete media from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
|
||||
addApiView('media.available_chars', self.charView)
|
||||
|
||||
addEvent('app.load', self.addSingleRefreshView, priority = 100)
|
||||
addEvent('app.load', self.addSingleListView, priority = 100)
|
||||
addEvent('app.load', self.addSingleCharView, priority = 100)
|
||||
addEvent('app.load', self.addSingleDeleteView, priority = 100)
|
||||
addEvent('app.load', self.cleanupFaults)
|
||||
|
||||
addEvent('media.get', self.get)
|
||||
addEvent('media.with_status', self.withStatus)
|
||||
addEvent('media.with_identifiers', self.withIdentifiers)
|
||||
addEvent('media.list', self.list)
|
||||
addEvent('media.delete', self.delete)
|
||||
addEvent('media.restatus', self.restatus)
|
||||
addEvent('media.tag', self.tag)
|
||||
addEvent('media.untag', self.unTag)
|
||||
|
||||
# Wrongly tagged media files
|
||||
def cleanupFaults(self):
|
||||
medias = fireEvent('media.with_status', 'ignored', single = True) or []
|
||||
|
||||
db = get_db()
|
||||
for media in medias:
|
||||
try:
|
||||
media['status'] = 'done'
|
||||
db.update(media)
|
||||
except:
|
||||
pass
|
||||
|
||||
def refresh(self, id = '', **kwargs):
|
||||
handlers = []
|
||||
ids = splitString(id)
|
||||
|
||||
for x in ids:
|
||||
|
||||
refresh_handler = self.createRefreshHandler(x)
|
||||
if refresh_handler:
|
||||
handlers.append(refresh_handler)
|
||||
|
||||
fireEvent('notify.frontend', type = 'media.busy', data = {'_id': ids})
|
||||
fireEventAsync('schedule.queue', handlers = handlers)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
}
|
||||
|
||||
def createRefreshHandler(self, media_id):
|
||||
|
||||
try:
|
||||
media = get_db().get('id', media_id)
|
||||
event = '%s.update' % media.get('type')
|
||||
|
||||
def handler():
|
||||
fireEvent(event, media_id = media_id, on_complete = self.createOnComplete(media_id))
|
||||
|
||||
return handler
|
||||
|
||||
except:
|
||||
log.error('Refresh handler for non existing media: %s', traceback.format_exc())
|
||||
|
||||
def addSingleRefreshView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
addApiView('%s.refresh' % media_type, self.refresh)
|
||||
|
||||
def get(self, media_id):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
imdb_id = getImdb(str(media_id))
|
||||
|
||||
if imdb_id:
|
||||
media = db.get('media', 'imdb-%s' % imdb_id, with_doc = True)['doc']
|
||||
else:
|
||||
media = db.get('id', media_id)
|
||||
|
||||
if media:
|
||||
|
||||
# Attach category
|
||||
try: media['category'] = db.get('id', media.get('category_id'))
|
||||
except: pass
|
||||
|
||||
media['releases'] = fireEvent('release.for_media', media['_id'], single = True)
|
||||
|
||||
return media
|
||||
|
||||
except (RecordNotFound, RecordDeleted):
|
||||
log.error('Media with id "%s" not found', media_id)
|
||||
except:
|
||||
raise
|
||||
|
||||
def getView(self, id = None, **kwargs):
|
||||
|
||||
media = self.get(id) if id else None
|
||||
|
||||
return {
|
||||
'success': media is not None,
|
||||
'media': media,
|
||||
}
|
||||
|
||||
def withStatus(self, status, types = None, with_doc = True):
|
||||
|
||||
db = get_db()
|
||||
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
|
||||
status = list(status if isinstance(status, (list, tuple)) else [status])
|
||||
|
||||
for s in status:
|
||||
for ms in db.get_many('media_status', s):
|
||||
if with_doc:
|
||||
try:
|
||||
doc = db.get('id', ms['_id'])
|
||||
|
||||
if types and doc.get('type') not in types:
|
||||
continue
|
||||
|
||||
yield doc
|
||||
except (RecordDeleted, RecordNotFound):
|
||||
log.debug('Record not found, skipping: %s', ms['_id'])
|
||||
except (ValueError, EOFError):
|
||||
fireEvent('database.delete_corrupted', ms.get('_id'), traceback_error = traceback.format_exc(0))
|
||||
else:
|
||||
yield ms
|
||||
|
||||
def withIdentifiers(self, identifiers, with_doc = False):
|
||||
db = get_db()
|
||||
|
||||
for x in identifiers:
|
||||
try:
|
||||
return db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||
except:
|
||||
pass
|
||||
|
||||
log.debug('No media found with identifiers: %s', identifiers)
|
||||
return False
|
||||
|
||||
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None):
|
||||
|
||||
db = get_db()
|
||||
|
||||
# Make a list from string
|
||||
if status and not isinstance(status, (list, tuple)):
|
||||
status = [status]
|
||||
if release_status and not isinstance(release_status, (list, tuple)):
|
||||
release_status = [release_status]
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
if with_tags and not isinstance(with_tags, (list, tuple)):
|
||||
with_tags = [with_tags]
|
||||
|
||||
# query media ids
|
||||
if types:
|
||||
all_media_ids = set()
|
||||
for media_type in types:
|
||||
all_media_ids = all_media_ids.union(set([x['_id'] for x in db.get_many('media_by_type', media_type)]))
|
||||
else:
|
||||
all_media_ids = set([x['_id'] for x in db.all('media')])
|
||||
|
||||
media_ids = list(all_media_ids)
|
||||
filter_by = {}
|
||||
|
||||
# Filter on movie status
|
||||
if status and len(status) > 0:
|
||||
filter_by['media_status'] = set()
|
||||
for media_status in fireEvent('media.with_status', status, with_doc = False, single = True):
|
||||
filter_by['media_status'].add(media_status.get('_id'))
|
||||
|
||||
# Filter on release status
|
||||
if release_status and len(release_status) > 0:
|
||||
filter_by['release_status'] = set()
|
||||
for release_status in fireEvent('release.with_status', release_status, with_doc = False, single = True):
|
||||
filter_by['release_status'].add(release_status.get('media_id'))
|
||||
|
||||
# Add search filters
|
||||
if starts_with:
|
||||
starts_with = toUnicode(starts_with.lower())[0]
|
||||
starts_with = starts_with if starts_with in ascii_lowercase else '#'
|
||||
filter_by['starts_with'] = [x['_id'] for x in db.get_many('media_startswith', starts_with)]
|
||||
|
||||
# Add tag filter
|
||||
if with_tags:
|
||||
filter_by['with_tags'] = set()
|
||||
for tag in with_tags:
|
||||
for x in db.get_many('media_tag', tag):
|
||||
filter_by['with_tags'].add(x['_id'])
|
||||
|
||||
# Filter with search query
|
||||
if search:
|
||||
filter_by['search'] = [x['_id'] for x in db.get_many('media_search_title', search)]
|
||||
|
||||
if status_or and 'media_status' in filter_by and 'release_status' in filter_by:
|
||||
filter_by['status'] = list(filter_by['media_status']) + list(filter_by['release_status'])
|
||||
del filter_by['media_status']
|
||||
del filter_by['release_status']
|
||||
|
||||
# Filter by combining ids
|
||||
for x in filter_by:
|
||||
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||
|
||||
offset = 0
|
||||
limit = -1
|
||||
if limit_offset:
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
|
||||
limit = tryInt(splt[0])
|
||||
offset = tryInt(0 if len(splt) is 1 else splt[1])
|
||||
|
||||
# List movies based on title order
|
||||
medias = []
|
||||
for m in db.all('media_title'):
|
||||
media_id = m['_id']
|
||||
if media_id not in media_ids: continue
|
||||
if offset > 0:
|
||||
offset -= 1
|
||||
continue
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
|
||||
# Skip if no media has been found
|
||||
if not media:
|
||||
continue
|
||||
|
||||
# Merge releases with movie dict
|
||||
medias.append(media)
|
||||
|
||||
# remove from media ids
|
||||
media_ids.remove(media_id)
|
||||
if len(media_ids) == 0 or len(medias) == limit: break
|
||||
|
||||
# Sort media by type and return result
|
||||
result = {}
|
||||
|
||||
# Create keys for media types we are listing
|
||||
if types:
|
||||
for media_type in types:
|
||||
result['%ss' % media_type] = []
|
||||
else:
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
result['%ss' % media_type] = []
|
||||
|
||||
total_count = len(medias)
|
||||
|
||||
if total_count == 0:
|
||||
return 0, result
|
||||
|
||||
for kind in medias:
|
||||
result['%ss' % kind['type']].append(kind)
|
||||
|
||||
return total_count, result
|
||||
|
||||
def listView(self, **kwargs):
|
||||
|
||||
total_count, result = self.list(
|
||||
types = splitString(kwargs.get('type')),
|
||||
status = splitString(kwargs.get('status')),
|
||||
release_status = splitString(kwargs.get('release_status')),
|
||||
status_or = kwargs.get('status_or') is not None,
|
||||
limit_offset = kwargs.get('limit_offset'),
|
||||
with_tags = splitString(kwargs.get('with_tags')),
|
||||
starts_with = kwargs.get('starts_with'),
|
||||
search = kwargs.get('search')
|
||||
)
|
||||
|
||||
results = result
|
||||
results['success'] = True
|
||||
results['empty'] = len(result) == 0
|
||||
results['total'] = total_count
|
||||
|
||||
return results
|
||||
|
||||
def addSingleListView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempList = lambda *args, **kwargs : self.listView(type = media_type, **kwargs)
|
||||
addApiView('%s.list' % media_type, tempList, docs = {
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the ' + media_type + ' list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all ' + media_type + 's starting with the letter "a"'},
|
||||
'search': {'desc': 'Search ' + media_type + ' title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any """ + media_type + """s returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
|
||||
def availableChars(self, types = None, status = None, release_status = None):
|
||||
|
||||
db = get_db()
|
||||
|
||||
# Make a list from string
|
||||
if status and not isinstance(status, (list, tuple)):
|
||||
status = [status]
|
||||
if release_status and not isinstance(release_status, (list, tuple)):
|
||||
release_status = [release_status]
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
|
||||
# query media ids
|
||||
if types:
|
||||
all_media_ids = set()
|
||||
for media_type in types:
|
||||
all_media_ids = all_media_ids.union(set([x['_id'] for x in db.get_many('media_by_type', media_type)]))
|
||||
else:
|
||||
all_media_ids = set([x['_id'] for x in db.all('media')])
|
||||
|
||||
media_ids = all_media_ids
|
||||
filter_by = {}
|
||||
|
||||
# Filter on movie status
|
||||
if status and len(status) > 0:
|
||||
filter_by['media_status'] = set()
|
||||
for media_status in fireEvent('media.with_status', status, with_doc = False, single = True):
|
||||
filter_by['media_status'].add(media_status.get('_id'))
|
||||
|
||||
# Filter on release status
|
||||
if release_status and len(release_status) > 0:
|
||||
filter_by['release_status'] = set()
|
||||
for release_status in fireEvent('release.with_status', release_status, with_doc = False, single = True):
|
||||
filter_by['release_status'].add(release_status.get('media_id'))
|
||||
|
||||
# Filter by combining ids
|
||||
for x in filter_by:
|
||||
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||
|
||||
chars = set()
|
||||
for x in db.all('media_startswith'):
|
||||
if x['_id'] in media_ids:
|
||||
chars.add(x['key'])
|
||||
|
||||
if len(chars) == 27:
|
||||
break
|
||||
|
||||
return list(chars)
|
||||
|
||||
def charView(self, **kwargs):
|
||||
|
||||
type = splitString(kwargs.get('type', 'movie'))
|
||||
status = splitString(kwargs.get('status', None))
|
||||
release_status = splitString(kwargs.get('release_status', None))
|
||||
chars = self.availableChars(type, status, release_status)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(chars) == 0,
|
||||
'chars': chars,
|
||||
}
|
||||
|
||||
def addSingleCharView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempChar = lambda *args, **kwargs : self.charView(type = media_type, **kwargs)
|
||||
addApiView('%s.available_chars' % media_type, tempChar)
|
||||
|
||||
def delete(self, media_id, delete_from = None):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
media = db.get('id', media_id)
|
||||
if media:
|
||||
deleted = False
|
||||
|
||||
media_releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||
|
||||
if delete_from == 'all':
|
||||
# Delete connected releases
|
||||
for release in media_releases:
|
||||
db.delete(release)
|
||||
|
||||
db.delete(media)
|
||||
deleted = True
|
||||
else:
|
||||
|
||||
total_releases = len(media_releases)
|
||||
total_deleted = 0
|
||||
new_media_status = None
|
||||
|
||||
for release in media_releases:
|
||||
if delete_from in ['wanted', 'snatched', 'late']:
|
||||
if release.get('status') != 'done':
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
new_media_status = 'done'
|
||||
elif delete_from == 'manage':
|
||||
if release.get('status') == 'done' or media.get('status') == 'done':
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
|
||||
if (total_releases == total_deleted) or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||
db.delete(media)
|
||||
deleted = True
|
||||
elif new_media_status:
|
||||
media['status'] = new_media_status
|
||||
|
||||
# Remove profile (no use for in manage)
|
||||
if new_media_status == 'done':
|
||||
media['profile_id'] = None
|
||||
|
||||
db.update(media)
|
||||
|
||||
fireEvent('media.untag', media['_id'], 'recent', single = True)
|
||||
else:
|
||||
fireEvent('media.restatus', media.get('_id'), single = True)
|
||||
|
||||
if deleted:
|
||||
fireEvent('notify.frontend', type = 'media.deleted', data = media)
|
||||
except:
|
||||
log.error('Failed deleting media: %s', traceback.format_exc())
|
||||
|
||||
return True
|
||||
|
||||
def deleteView(self, id = '', **kwargs):
|
||||
|
||||
ids = splitString(id)
|
||||
for media_id in ids:
|
||||
self.delete(media_id, delete_from = kwargs.get('delete_from', 'all'))
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
}
|
||||
|
||||
def addSingleDeleteView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempDelete = lambda *args, **kwargs : self.deleteView(type = media_type, **kwargs)
|
||||
addApiView('%s.delete' % media_type, tempDelete, docs = {
|
||||
'desc': 'Delete a ' + media_type + ' from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete ' + media_type + ' from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
|
||||
def restatus(self, media_id, tag_recent = True, allowed_restatus = None):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
m = db.get('id', media_id)
|
||||
previous_status = m['status']
|
||||
|
||||
log.debug('Changing status for %s', getTitle(m))
|
||||
if not m['profile_id']:
|
||||
m['status'] = 'done'
|
||||
else:
|
||||
m['status'] = 'active'
|
||||
|
||||
try:
|
||||
profile = db.get('id', m['profile_id'])
|
||||
media_releases = fireEvent('release.for_media', m['_id'], single = True)
|
||||
done_releases = [release for release in media_releases if release.get('status') == 'done']
|
||||
|
||||
if done_releases:
|
||||
|
||||
# Check if we are finished with the media
|
||||
for release in done_releases:
|
||||
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||
m['status'] = 'done'
|
||||
break
|
||||
|
||||
elif previous_status == 'done':
|
||||
m['status'] = 'done'
|
||||
|
||||
except RecordNotFound:
|
||||
log.debug('Failed restatus, keeping previous: %s', traceback.format_exc())
|
||||
m['status'] = previous_status
|
||||
|
||||
# Only update when status has changed
|
||||
if previous_status != m['status'] and (not allowed_restatus or m['status'] in allowed_restatus):
|
||||
db.update(m)
|
||||
|
||||
# Tag media as recent
|
||||
if tag_recent:
|
||||
self.tag(media_id, 'recent', update_edited = True)
|
||||
|
||||
return m['status']
|
||||
except:
|
||||
log.error('Failed restatus: %s', traceback.format_exc())
|
||||
|
||||
def tag(self, media_id, tag, update_edited = False):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
m = db.get('id', media_id)
|
||||
|
||||
if update_edited:
|
||||
m['last_edit'] = int(time.time())
|
||||
|
||||
tags = m.get('tags') or []
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
m['tags'] = tags
|
||||
db.update(m)
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Failed tagging: %s', traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
def unTag(self, media_id, tag):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
m = db.get('id', media_id)
|
||||
|
||||
tags = m.get('tags') or []
|
||||
if tag in tags:
|
||||
new_tags = list(set(tags))
|
||||
new_tags.remove(tag)
|
||||
|
||||
m['tags'] = new_tags
|
||||
db.update(m)
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Failed untagging: %s', traceback.format_exc())
|
||||
|
||||
return False
|
||||
@@ -0,0 +1,8 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import Provider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class AutomationBase(Provider):
|
||||
pass
|
||||
345
couchpotato/core/media/_base/providers/base.py
Normal file
345
couchpotato/core/media/_base/providers/base.py
Normal file
@@ -0,0 +1,345 @@
|
||||
from urlparse import urlparse
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
import xml.etree.ElementTree as XMLTree
|
||||
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.helpers.variable import tryFloat, mergeDicts, md5, \
|
||||
possibleTitles
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MultiProvider(Plugin):
|
||||
|
||||
def __init__(self):
|
||||
self._classes = []
|
||||
|
||||
for Type in self.getTypes():
|
||||
klass = Type()
|
||||
|
||||
# Overwrite name so logger knows what we're talking about
|
||||
klass.setName('%s:%s' % (self.getName(), klass.getName()))
|
||||
|
||||
self._classes.append(klass)
|
||||
|
||||
def getTypes(self):
|
||||
return []
|
||||
|
||||
def getClasses(self):
|
||||
return self._classes
|
||||
|
||||
|
||||
class Provider(Plugin):
|
||||
|
||||
type = None # movie, show, subtitle, trailer, ...
|
||||
http_time_between_calls = 10 # Default timeout for url requests
|
||||
|
||||
last_available_check = {}
|
||||
is_available = {}
|
||||
|
||||
def isAvailable(self, test_url):
|
||||
|
||||
if Env.get('dev'): return True
|
||||
|
||||
now = time.time()
|
||||
host = urlparse(test_url).hostname
|
||||
|
||||
if self.last_available_check.get(host) < now - 900:
|
||||
self.last_available_check[host] = now
|
||||
|
||||
try:
|
||||
self.urlopen(test_url, 30)
|
||||
self.is_available[host] = True
|
||||
except:
|
||||
log.error('"%s" unavailable, trying again in an 15 minutes.', host)
|
||||
self.is_available[host] = False
|
||||
|
||||
return self.is_available.get(host, False)
|
||||
|
||||
def getJsonData(self, url, decode_from = None, **kwargs):
|
||||
|
||||
cache_key = md5(url)
|
||||
data = self.getCache(cache_key, url, **kwargs)
|
||||
|
||||
if data:
|
||||
try:
|
||||
data = data.strip()
|
||||
if decode_from:
|
||||
data = data.decode(decode_from)
|
||||
|
||||
return json.loads(data)
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return []
|
||||
|
||||
def getRSSData(self, url, item_path = 'channel/item', **kwargs):
|
||||
|
||||
cache_key = md5(url)
|
||||
data = self.getCache(cache_key, url, **kwargs)
|
||||
|
||||
if data and len(data) > 0:
|
||||
try:
|
||||
data = XMLTree.fromstring(data)
|
||||
return self.getElements(data, item_path)
|
||||
except:
|
||||
try:
|
||||
data = XMLTree.fromstring(ss(data))
|
||||
return self.getElements(data, item_path)
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return []
|
||||
|
||||
def getHTMLData(self, url, **kwargs):
|
||||
|
||||
cache_key = md5(url)
|
||||
return self.getCache(cache_key, url, **kwargs)
|
||||
|
||||
|
||||
class YarrProvider(Provider):
|
||||
|
||||
protocol = None # nzb, torrent, torrent_magnet
|
||||
|
||||
cat_ids = {}
|
||||
cat_backup_id = None
|
||||
|
||||
size_gb = ['gb', 'gib']
|
||||
size_mb = ['mb', 'mib']
|
||||
size_kb = ['kb', 'kib']
|
||||
|
||||
last_login_check = None
|
||||
|
||||
def __init__(self):
|
||||
addEvent('provider.enabled_protocols', self.getEnabledProtocol)
|
||||
addEvent('provider.belongs_to', self.belongsTo)
|
||||
addEvent('provider.search.%s.%s' % (self.protocol, self.type), self.search)
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
if self.isEnabled():
|
||||
return self.protocol
|
||||
else:
|
||||
return []
|
||||
|
||||
def buildUrl(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def login(self):
|
||||
|
||||
# Check if we are still logged in every hour
|
||||
now = time.time()
|
||||
if self.last_login_check and self.last_login_check < (now - 3600):
|
||||
try:
|
||||
output = self.urlopen(self.urls['login_check'])
|
||||
if self.loginCheckSuccess(output):
|
||||
self.last_login_check = now
|
||||
return True
|
||||
except: pass
|
||||
self.last_login_check = None
|
||||
|
||||
if self.last_login_check:
|
||||
return True
|
||||
|
||||
try:
|
||||
output = self.urlopen(self.urls['login'], data = self.getLoginParams())
|
||||
|
||||
if self.loginSuccess(output):
|
||||
self.last_login_check = now
|
||||
return True
|
||||
|
||||
error = 'unknown'
|
||||
except:
|
||||
error = traceback.format_exc()
|
||||
|
||||
self.last_login_check = None
|
||||
log.error('Failed to login %s: %s', (self.getName(), error))
|
||||
return False
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return True
|
||||
|
||||
def loginCheckSuccess(self, output):
|
||||
return True
|
||||
|
||||
def loginDownload(self, url = '', nzb_id = ''):
|
||||
try:
|
||||
if not self.login():
|
||||
log.error('Failed downloading from %s', self.getName())
|
||||
return self.urlopen(url)
|
||||
except:
|
||||
log.error('Failed downloading from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {}
|
||||
|
||||
def download(self, url = '', nzb_id = ''):
|
||||
try:
|
||||
return self.urlopen(url, headers = {'User-Agent': Env.getIdentifier()}, show_error = False)
|
||||
except:
|
||||
log.error('Failed getting release from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return 'try_next'
|
||||
|
||||
def search(self, media, quality):
|
||||
|
||||
if self.isDisabled():
|
||||
return []
|
||||
|
||||
# Login if needed
|
||||
if self.urls.get('login') and not self.login():
|
||||
log.error('Failed to login to: %s', self.getName())
|
||||
return []
|
||||
|
||||
# Create result container
|
||||
imdb_results = hasattr(self, '_search')
|
||||
results = ResultList(self, media, quality, imdb_results = imdb_results)
|
||||
|
||||
# Do search based on imdb id
|
||||
if imdb_results:
|
||||
self._search(media, quality, results)
|
||||
# Search possible titles
|
||||
else:
|
||||
media_title = fireEvent('library.query', media, include_year = False, single = True)
|
||||
|
||||
for title in possibleTitles(media_title):
|
||||
self._searchOnTitle(title, media, quality, results)
|
||||
|
||||
return results
|
||||
|
||||
def belongsTo(self, url, provider = None, host = None):
|
||||
try:
|
||||
if provider and provider == self.getName():
|
||||
return self
|
||||
|
||||
hostname = urlparse(url).hostname
|
||||
if host and hostname in host:
|
||||
return self
|
||||
else:
|
||||
for url_type in self.urls:
|
||||
download_url = self.urls[url_type]
|
||||
if hostname in download_url:
|
||||
return self
|
||||
except:
|
||||
log.debug('Url %s doesn\'t belong to %s', (url, self.getName()))
|
||||
|
||||
return
|
||||
|
||||
def parseSize(self, size):
|
||||
|
||||
size_raw = size.lower()
|
||||
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
|
||||
|
||||
for s in self.size_gb:
|
||||
if s in size_raw:
|
||||
return size * 1024
|
||||
|
||||
for s in self.size_mb:
|
||||
if s in size_raw:
|
||||
return size
|
||||
|
||||
for s in self.size_kb:
|
||||
if s in size_raw:
|
||||
return size / 1024
|
||||
|
||||
return 0
|
||||
|
||||
def getCatId(self, quality = None):
|
||||
if not quality: quality = {}
|
||||
identifier = quality.get('identifier')
|
||||
|
||||
want_3d = False
|
||||
if quality.get('custom'):
|
||||
want_3d = quality['custom'].get('3d')
|
||||
|
||||
for ids, qualities in self.cat_ids:
|
||||
if identifier in qualities or (want_3d and '3d' in qualities):
|
||||
return ids
|
||||
|
||||
if self.cat_backup_id:
|
||||
return [self.cat_backup_id]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class ResultList(list):
|
||||
|
||||
result_ids = None
|
||||
provider = None
|
||||
media = None
|
||||
quality = None
|
||||
|
||||
def __init__(self, provider, media, quality, **kwargs):
|
||||
|
||||
self.result_ids = []
|
||||
self.provider = provider
|
||||
self.media = media
|
||||
self.quality = quality
|
||||
self.kwargs = kwargs
|
||||
|
||||
super(ResultList, self).__init__()
|
||||
|
||||
def extend(self, results):
|
||||
for r in results:
|
||||
self.append(r)
|
||||
|
||||
def append(self, result):
|
||||
|
||||
new_result = self.fillResult(result)
|
||||
|
||||
is_correct = fireEvent('searcher.correct_release', new_result, self.media, self.quality,
|
||||
imdb_results = self.kwargs.get('imdb_results', False), single = True)
|
||||
|
||||
if is_correct and new_result['id'] not in self.result_ids:
|
||||
is_correct_weight = float(is_correct)
|
||||
|
||||
new_result['score'] += fireEvent('score.calculate', new_result, self.media, single = True)
|
||||
|
||||
old_score = new_result['score']
|
||||
new_result['score'] = int(old_score * is_correct_weight)
|
||||
|
||||
log.info2('Found correct release with weight %.02f, old_score(%d) now scaled to score(%d)', (
|
||||
is_correct_weight,
|
||||
old_score,
|
||||
new_result['score']
|
||||
))
|
||||
|
||||
self.found(new_result)
|
||||
self.result_ids.append(result['id'])
|
||||
|
||||
super(ResultList, self).append(new_result)
|
||||
|
||||
def fillResult(self, result):
|
||||
|
||||
defaults = {
|
||||
'id': 0,
|
||||
'protocol': self.provider.protocol,
|
||||
'type': self.provider.type,
|
||||
'provider': self.provider.getName(),
|
||||
'download': self.provider.loginDownload if self.provider.urls.get('login') else self.provider.download,
|
||||
'seed_ratio': Env.setting('seed_ratio', section = self.provider.getName().lower(), default = ''),
|
||||
'seed_time': Env.setting('seed_time', section = self.provider.getName().lower(), default = ''),
|
||||
'url': '',
|
||||
'name': '',
|
||||
'age': 0,
|
||||
'size': 0,
|
||||
'description': '',
|
||||
'score': 0
|
||||
}
|
||||
|
||||
return mergeDicts(defaults, result)
|
||||
|
||||
def found(self, new_result):
|
||||
if not new_result.get('provider_extra'):
|
||||
new_result['provider_extra'] = ''
|
||||
else:
|
||||
new_result['provider_extra'] = ', %s' % new_result['provider_extra']
|
||||
|
||||
log.info('Found: score(%(score)s) on %(provider)s%(provider_extra)s: %(name)s', new_result)
|
||||
5
couchpotato/core/media/_base/providers/info/base.py
Normal file
5
couchpotato/core/media/_base/providers/info/base.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from couchpotato.core.media._base.providers.base import Provider
|
||||
|
||||
|
||||
class BaseInfoProvider(Provider):
|
||||
type = 'unknown'
|
||||
8
couchpotato/core/media/_base/providers/metadata/base.py
Normal file
8
couchpotato/core/media/_base/providers/metadata/base.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MetaDataBase(Plugin):
|
||||
pass
|
||||
@@ -1,15 +1,14 @@
|
||||
config = {
|
||||
config = [{
|
||||
'name': 'nzb_providers',
|
||||
'groups': [
|
||||
{
|
||||
'label': 'Usenet',
|
||||
'label': 'Usenet Providers',
|
||||
'description': 'Providers searching usenet for new releases',
|
||||
'wizard': True,
|
||||
'type': 'list',
|
||||
'name': 'nzb_providers',
|
||||
'tab': 'searcher',
|
||||
'subtab': 'providers',
|
||||
'options': [],
|
||||
},
|
||||
],
|
||||
}
|
||||
}]
|
||||
@@ -1,9 +1,11 @@
|
||||
from couchpotato.core.providers.base import YarrProvider
|
||||
import time
|
||||
|
||||
from couchpotato.core.media._base.providers.base import YarrProvider
|
||||
|
||||
|
||||
class NZBProvider(YarrProvider):
|
||||
type = 'nzb'
|
||||
|
||||
protocol = 'nzb'
|
||||
|
||||
def calculateAge(self, unix):
|
||||
return int(time.time() - unix) / 24 / 60 / 60
|
||||
120
couchpotato/core/media/_base/providers/nzb/binsearch.py
Normal file
120
couchpotato/core/media/_base/providers/nzb/binsearch.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt, simplifyString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider):
|
||||
|
||||
urls = {
|
||||
'download': 'https://www.binsearch.info/fcgi/nzb.fcgi?q=%s',
|
||||
'detail': 'https://www.binsearch.info%s',
|
||||
'search': 'https://www.binsearch.info/index.php?%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 4 # Seconds
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
data = self.getHTMLData(self.urls['search'] % self.buildUrl(media, quality))
|
||||
|
||||
if data:
|
||||
try:
|
||||
|
||||
html = BeautifulSoup(data)
|
||||
main_table = html.find('table', attrs = {'id': 'r2'})
|
||||
|
||||
if not main_table:
|
||||
return
|
||||
|
||||
items = main_table.find_all('tr')
|
||||
|
||||
for row in items:
|
||||
title = row.find('span', attrs = {'class': 's'})
|
||||
|
||||
if not title: continue
|
||||
|
||||
nzb_id = row.find('input', attrs = {'type': 'checkbox'})['name']
|
||||
info = row.find('span', attrs = {'class':'d'})
|
||||
size_match = re.search('size:.(?P<size>[0-9\.]+.[GMB]+)', info.text)
|
||||
|
||||
age = 0
|
||||
try: age = re.search('(?P<size>\d+d)', row.find_all('td')[-1:][0].text).group('size')[:-1]
|
||||
except: pass
|
||||
|
||||
def extra_check(item):
|
||||
parts = re.search('available:.(?P<parts>\d+)./.(?P<total>\d+)', info.text)
|
||||
total = float(tryInt(parts.group('total')))
|
||||
parts = float(tryInt(parts.group('parts')))
|
||||
|
||||
if (total / parts) < 1 and ((total / parts) < 0.95 or ((total / parts) >= 0.95 and not ('par2' in info.text.lower() or 'pa3' in info.text.lower()))):
|
||||
log.info2('Wrong: \'%s\', not complete: %s out of %s', (item['name'], parts, total))
|
||||
return False
|
||||
|
||||
if 'requires password' in info.text.lower():
|
||||
log.info2('Wrong: \'%s\', passworded', (item['name']))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
'name': simplifyString(title.text),
|
||||
'age': tryInt(age),
|
||||
'size': self.parseSize(size_match.group('size')),
|
||||
'url': self.urls['download'] % nzb_id,
|
||||
'detail_url': self.urls['detail'] % info.find('a')['href'],
|
||||
'extra_check': extra_check
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parse HTML response from BinSearch: %s', traceback.format_exc())
|
||||
|
||||
def download(self, url = '', nzb_id = ''):
|
||||
|
||||
data = {
|
||||
'action': 'nzb',
|
||||
nzb_id: 'on'
|
||||
}
|
||||
|
||||
try:
|
||||
return self.urlopen(url, data = data, show_error = False)
|
||||
except:
|
||||
log.error('Failed getting nzb from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return 'try_next'
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'binsearch',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'binsearch',
|
||||
'description': 'Free provider, less accurate. See <a href="https://www.binsearch.info/">BinSearch</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAATklEQVQY02NwQAMMWAXOnz+PKvD//3/CAvM//z+fgiwAAs+RBab4PP//vwbFjPlAffgEChzOo2r5fBuIfRAC5w8D+QUofkkp8MHjOWQAAM3Sbogztg2wAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
267
couchpotato/core/media/_base/providers/nzb/newznab.py
Normal file
267
couchpotato/core/media/_base/providers/nzb/newznab.py
Normal file
@@ -0,0 +1,267 @@
|
||||
from urlparse import urlparse
|
||||
import time
|
||||
import traceback
|
||||
import re
|
||||
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import cleanHost, splitString, tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import ResultList
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from couchpotato.environment import Env
|
||||
from dateutil.parser import parse
|
||||
from requests import HTTPError
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'detail': 'details/%s',
|
||||
'download': 't=get&id=%s'
|
||||
}
|
||||
|
||||
passwords_regex = 'password|wachtwoord'
|
||||
limits_reached = {}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def search(self, media, quality):
|
||||
hosts = self.getHosts()
|
||||
|
||||
results = ResultList(self, media, quality, imdb_results = True)
|
||||
|
||||
for host in hosts:
|
||||
if self.isDisabled(host):
|
||||
continue
|
||||
|
||||
self._searchOnHost(host, media, quality, results)
|
||||
|
||||
return results
|
||||
|
||||
def _searchOnHost(self, host, media, quality, results):
|
||||
|
||||
query = self.buildUrl(media, host)
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
date = None
|
||||
spotter = None
|
||||
for item in nzb:
|
||||
if date and spotter:
|
||||
break
|
||||
if item.attrib.get('name') == 'usenetdate':
|
||||
date = item.attrib.get('value')
|
||||
break
|
||||
|
||||
# Get the name of the person who posts the spot
|
||||
if item.attrib.get('name') == 'poster':
|
||||
if "@spot.net" in item.attrib.get('value'):
|
||||
spotter = item.attrib.get('value').split("@")[0]
|
||||
continue
|
||||
|
||||
if not date:
|
||||
date = self.getTextElement(nzb, 'pubDate')
|
||||
|
||||
nzb_id = self.getTextElement(nzb, 'guid').split('/')[-1:].pop()
|
||||
name = self.getTextElement(nzb, 'title')
|
||||
|
||||
if not name:
|
||||
continue
|
||||
|
||||
name_extra = ''
|
||||
if spotter:
|
||||
name_extra = spotter
|
||||
|
||||
description = ''
|
||||
if "@spot.net" in nzb_id:
|
||||
try:
|
||||
# Get details for extended description to retrieve passwords
|
||||
query = self.buildDetailsUrl(nzb_id, host['api_key'])
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
nzb_details = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})[0]
|
||||
|
||||
description = self.getTextElement(nzb_details, 'description')
|
||||
|
||||
# Extract a password from the description
|
||||
password = re.search('(?:' + self.passwords_regex + ')(?: *)(?:\:|\=)(?: *)(.*?)\<br\>|\n|$', description, flags = re.I).group(1)
|
||||
if password:
|
||||
name += ' {{%s}}' % password.strip()
|
||||
except:
|
||||
log.debug('Error getting details of "%s": %s', (name, traceback.format_exc()))
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
'provider_extra': urlparse(host['host']).hostname or host['host'],
|
||||
'name': toUnicode(name),
|
||||
'name_extra': name_extra,
|
||||
'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))),
|
||||
'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024,
|
||||
'url': ((self.getUrl(host['host']) + self.urls['download']) % tryUrlencode(nzb_id)) + self.getApiExt(host),
|
||||
'detail_url': (cleanHost(host['host']) + self.urls['detail']) % tryUrlencode(nzb_id),
|
||||
'content': self.getTextElement(nzb, 'description'),
|
||||
'description': description,
|
||||
'score': host['extra_score'],
|
||||
})
|
||||
|
||||
def getHosts(self):
|
||||
|
||||
uses = splitString(str(self.conf('use')), clean = False)
|
||||
hosts = splitString(self.conf('host'), clean = False)
|
||||
api_keys = splitString(self.conf('api_key'), clean = False)
|
||||
extra_score = splitString(self.conf('extra_score'), clean = False)
|
||||
custom_tags = splitString(self.conf('custom_tag'), clean = False)
|
||||
|
||||
list = []
|
||||
for nr in range(len(hosts)):
|
||||
|
||||
try: key = api_keys[nr]
|
||||
except: key = ''
|
||||
|
||||
try: host = hosts[nr]
|
||||
except: host = ''
|
||||
|
||||
try: score = tryInt(extra_score[nr])
|
||||
except: score = 0
|
||||
|
||||
try: custom_tag = custom_tags[nr]
|
||||
except: custom_tag = ''
|
||||
|
||||
list.append({
|
||||
'use': uses[nr],
|
||||
'host': host,
|
||||
'api_key': key,
|
||||
'extra_score': score,
|
||||
'custom_tag': custom_tag
|
||||
})
|
||||
|
||||
return list
|
||||
|
||||
def belongsTo(self, url, provider = None, host = None):
|
||||
|
||||
hosts = self.getHosts()
|
||||
|
||||
for host in hosts:
|
||||
result = super(Base, self).belongsTo(url, host = host['host'], provider = provider)
|
||||
if result:
|
||||
return result
|
||||
|
||||
def getUrl(self, host):
|
||||
if '?page=newznabapi' in host:
|
||||
return cleanHost(host)[:-1] + '&'
|
||||
|
||||
return cleanHost(host) + 'api?'
|
||||
|
||||
def isDisabled(self, host = None):
|
||||
return not self.isEnabled(host)
|
||||
|
||||
def isEnabled(self, host = None):
|
||||
|
||||
# Return true if at least one is enabled and no host is given
|
||||
if host is None:
|
||||
for host in self.getHosts():
|
||||
if self.isEnabled(host):
|
||||
return True
|
||||
return False
|
||||
|
||||
return NZBProvider.isEnabled(self) and host['host'] and host['api_key'] and int(host['use'])
|
||||
|
||||
def getApiExt(self, host):
|
||||
return '&apikey=%s' % host['api_key']
|
||||
|
||||
def download(self, url = '', nzb_id = ''):
|
||||
host = urlparse(url).hostname
|
||||
|
||||
if self.limits_reached.get(host):
|
||||
# Try again in 3 hours
|
||||
if self.limits_reached[host] > time.time() - 10800:
|
||||
return 'try_next'
|
||||
|
||||
try:
|
||||
data = self.urlopen(url, show_error = False)
|
||||
self.limits_reached[host] = False
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc in [503, 429]:
|
||||
response = e.read().lower()
|
||||
if sc == 429 or 'maximum api' in response or 'download limit' in response:
|
||||
if not self.limits_reached.get(host):
|
||||
log.error('Limit reached / to many requests for newznab provider: %s', host)
|
||||
self.limits_reached[host] = time.time()
|
||||
return 'try_next'
|
||||
|
||||
log.error('Failed download from %s: %s', (host, traceback.format_exc()))
|
||||
|
||||
return 'try_next'
|
||||
|
||||
def buildDetailsUrl(self, nzb_id, api_key):
|
||||
query = tryUrlencode({
|
||||
't': 'details',
|
||||
'id': nzb_id,
|
||||
'apikey': api_key,
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'newznab',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'newznab',
|
||||
'order': 10,
|
||||
'description': 'Enable <a href="http://newznab.com/" target="_blank">NewzNab</a> such as <a href="https://nzb.su" target="_blank">NZB.su</a>, \
|
||||
<a href="https://nzbs.org" target="_blank">NZBs.org</a>, <a href="http://dognzb.cr/" target="_blank">DOGnzb.cr</a>, \
|
||||
<a href="https://github.com/spotweb/spotweb" target="_blank">Spotweb</a>, <a href="https://nzbgeek.info/" target="_blank">NZBGeek</a>, \
|
||||
<a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAgMAAABinRfyAAAACVBMVEVjhwD///86aRovd/sBAAAAMklEQVQI12NgAIPQUCCRmQkjssDEShiRuRIqwZqZGcDAGBrqANUhGgIkWAOABKMDxCAA24UK50b26SAAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'name': 'use',
|
||||
'default': '0,0,0,0,0'
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://api.nzbgeek.info,https://www.nzbfinder.ws',
|
||||
'description': 'The hostname of your newznab provider',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': '0,0,0,0,0',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
{
|
||||
'name': 'custom_tag',
|
||||
'advanced': True,
|
||||
'label': 'Custom tag',
|
||||
'default': ',,,,',
|
||||
'description': 'Add custom tags, for example add rls=1 to get only scene releases from nzbs.org',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': ',,,,',
|
||||
'label': 'Api Key',
|
||||
'description': 'Can be found on your profile page',
|
||||
'type': 'combined',
|
||||
'combine': ['use', 'host', 'api_key', 'extra_score', 'custom_tag'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,37 +1,28 @@
|
||||
import time
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.providers.nzb.base import NZBProvider
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from dateutil.parser import parse
|
||||
import time
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class NZBClub(NZBProvider, RSS):
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'search': 'http://www.nzbclub.com/nzbfeed.aspx?%s',
|
||||
'search': 'https://www.nzbclub.com/nzbfeeds.aspx?%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 4 #seconds
|
||||
http_time_between_calls = 4 # seconds
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
q = '"%s %s"' % (title, movie['library']['year'])
|
||||
|
||||
params = tryUrlencode({
|
||||
'q': q,
|
||||
'ig': 1,
|
||||
'rpp': 200,
|
||||
'st': 5,
|
||||
'sp': 1,
|
||||
'ns': 1,
|
||||
})
|
||||
|
||||
nzbs = self.getRSSData(self.urls['search'] % params)
|
||||
nzbs = self.getRSSData(self.urls['search'] % self.buildUrl(media))
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
@@ -64,7 +55,7 @@ class NZBClub(NZBProvider, RSS):
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('nzbclub.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('pre', attrs = {'class':'nfo'})
|
||||
nfo_pre = html.find('pre', attrs = {'class': 'nfo'})
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
|
||||
item['description'] = description
|
||||
@@ -78,3 +69,32 @@ class NZBClub(NZBProvider, RSS):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbclub',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'NZBClub',
|
||||
'description': 'Free provider, less accurate. See <a href="https://www.nzbclub.com/">NZBClub</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACEUlEQVQ4y3VSMWgUQRR9/8/s7OzeJSdnTsVGghLEYBNQjBpQiRBFhIB2EcHG1kbs0murhZAmVocExEZQ0c7CxkLINYcJJpoYj9wZcnu72fF21uJSXMzuhyne58/j/fcf4b+KokgBIOSU53lxP5b9oNVqDT36dH+5UjoiKvIwPFEEgWBshGZ3E7/NOupL9fMjx0e+ZhKsrq+c/FPZKJi0w4FsQXMBDEJsd7BNW9h2tuyP9vfTALIJkMIu1hYRtINM+dpzcWc0sbkreK4fUEogyraAmKGF3+7vcT/wtR9QwkCabSAzQQuvk0uglAo5YaQ5DASGYjfMXcHVOqKu6NmR7iehlKAdHWUqWPv1c3i+9uwVdRlEBGaGEAJCCrDo9ShhvF6qPq8tL57bp+DbRn2sHtUuCY9YphLMu5921VhrwYJ5tbt0tt6sjQP4vEfB2Ikz7/ytwbeR6ljHkXCUA6UcOLtPOg4MYhtH8ZcLw5er+xQMDAwEURRNl96X596Y6oxFwsw9fmtTOAr2Ik19nL365FZpsLSdnQPPM8aYewc+lDcX4rkHqbQMAGTJXulOLzycmr1bKBTi3DOGYagajcahiaOT89fbM0/dxEsUu3aidfPljWO3HzebzYNBELi5Z5RSJlrrHd/3w8lT114MrVTWOn875fHRiYVisRhorWMpZXdvNnLKGCOstb0AMlulVJI19w/+nceU4D0aCwAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
103
couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py
Normal file
103
couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from urlparse import urlparse, parse_qs
|
||||
import time
|
||||
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from dateutil.parser import parse
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'search': 'https://rss.omgwtfnzbs.org/rss-search.php?%s',
|
||||
'detail_url': 'https://omgwtfnzbs.org/details.php?id=%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
cat_ids = [
|
||||
([15], ['dvdrip']),
|
||||
([15, 16], ['brrip']),
|
||||
([16], ['720p', '1080p', 'bd50']),
|
||||
([17], ['dvdr']),
|
||||
]
|
||||
cat_backup_id = 'movie'
|
||||
|
||||
def search(self, movie, quality):
|
||||
|
||||
if quality['identifier'] in fireEvent('quality.pre_releases', single = True):
|
||||
return []
|
||||
|
||||
return super(Base, self).search(movie, quality)
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
q = '%s %s' % (title, movie['info']['year'])
|
||||
params = tryUrlencode({
|
||||
'search': q,
|
||||
'catid': ','.join([str(x) for x in self.getCatId(quality)]),
|
||||
'user': self.conf('username', default = ''),
|
||||
'api': self.conf('api_key', default = ''),
|
||||
})
|
||||
|
||||
nzbs = self.getRSSData(self.urls['search'] % params)
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
enclosure = self.getElement(nzb, 'enclosure').attrib
|
||||
nzb_id = parse_qs(urlparse(self.getTextElement(nzb, 'link')).query).get('id')[0]
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
'name': toUnicode(self.getTextElement(nzb, 'title')),
|
||||
'age': self.calculateAge(int(time.mktime(parse(self.getTextElement(nzb, 'pubDate')).timetuple()))),
|
||||
'size': tryInt(enclosure['length']) / 1024 / 1024,
|
||||
'url': enclosure['url'],
|
||||
'detail_url': self.urls['detail_url'] % nzb_id,
|
||||
'description': self.getTextElement(nzb, 'description')
|
||||
})
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'omgwtfnzbs',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'OMGWTFNZBs',
|
||||
'description': 'See <a href="http://omgwtfnzbs.org/">OMGWTFNZBs</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQEAIAAADAAbR1AAADbElEQVR4AZ2UW0ybZRiAy/OvdHaLYvB0YTRIFi7GkM44zRLmIfNixkWdiRMyYoxRE8/TC7MYvXCGEBmr3mxLwVMwY0wYA7e6Wso4lB6h/U9taSlMGIfBXLYlJMyo0S///2dJI5lxN8/F2/f9nu9737e/jYmXr6KTbN9BGG9HE/NotQ76UWziNzrXFiETk/5ARUNH+7+0kW7fSgTl0VKGOLZzidOkmuuIo7q2oTArNLPIzhdIkqXkerFOm2CaD/5bcKrjIL2c3fkhPxOq93Kcb91v46fV9TQKF4TgV/TbUsQtzfCaK6jMOd5DJrguSIIhexmqqVxN0FXbRR8/ND/LYTTj6J7nl2gnL47OkDW4KJhnQHCa6JpKVNJGA3OC58nwBJoZ//ebbIyKpBxjrr0o1q1FMRkrKXZnHWF85VvxMrJxibwhGyd0f5bLnKzqJs1k0Sfo+EU8hdAUvkbcwKEgs2D0OiV4jmmD1zb+Tp6er0JMMvDxPo5xev9zTBF683NS+N56n1YiB95B5crr93KRuKhKI0tb0Kw2mgLLqTjLEWO8424i9IvURaYeOckwf3+/yCC9e3bQQ/MuD+Monk0k+XFXMUfx7z5EEP+XlXi5tLlMxH8zLppw7idJrugcus30kC86gc7UrQqjLIukM8zWHOACeU+TiMxXN6ExVOkgz4lvPEzice1GIVhxhG4CrZvpl6TH55giKWqXGLy9hZh5aUtgDSew/msSyCKpl+DDNfxJc8NBIsxUxUnz14O/oONu+IIIvso9TLBQ1SY5rUhuSzUhAqJ2mRXBLDOCeUtgUZXsaObT8BffhUJPqWgiV+3zKKzYH0ClvTRLhD77HIqVkyh5jThnivehoG+qJctIRSPn6bxvO4FCgTl9c1DmbpjLajbQFE8aW5SU3rg+zOPGUjTUF9NFpLEbH2c/KmGYlY69/GQJVtGMSUcEp9eCbB1nctbxHTLRdTUkGDf+B02uGWRG3OvpJ/zSMwzif+oxVBID3cQKBavLCiPmB2PM2UuSCUPgrX4VDb97AwEG67bh4+KTOlncvu3M31BwA5rLHbCfEjwkNDky9e/SSbSxnD46Pg0RJtpXRvhmBSZHpRjWtKwFybjuQeXaKxto4WjLZZZvVmC17pZLJFkwxm5++PS2Mrwc7nyIMYZe/IzoP5d6QgEybqTXAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': 20,
|
||||
'type': 'int',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,15 +1,14 @@
|
||||
config = {
|
||||
config = [{
|
||||
'name': 'torrent_providers',
|
||||
'groups': [
|
||||
{
|
||||
'label': 'Torrent',
|
||||
'label': 'Torrent Providers',
|
||||
'description': 'Providers searching torrent sites for new releases',
|
||||
'wizard': True,
|
||||
'type': 'list',
|
||||
'name': 'torrent_providers',
|
||||
'tab': 'searcher',
|
||||
'subtab': 'providers',
|
||||
'options': [],
|
||||
},
|
||||
],
|
||||
}
|
||||
}]
|
||||
141
couchpotato/core/media/_base/providers/torrent/awesomehd.py
Normal file
141
couchpotato/core/media/_base/providers/torrent/awesomehd.py
Normal file
@@ -0,0 +1,141 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://awesome-hd.net/',
|
||||
'detail': 'https://awesome-hd.net/torrents.php?torrentid=%s',
|
||||
'search': 'https://awesome-hd.net/searchapi.php?action=imdbsearch&passkey=%s&imdb=%s&internal=%s',
|
||||
'download': 'https://awesome-hd.net/torrents.php?action=download&id=%s&authkey=%s&torrent_pass=%s',
|
||||
}
|
||||
http_time_between_calls = 1
|
||||
|
||||
def _search(self, movie, quality, results):
|
||||
|
||||
data = self.getHTMLData(self.urls['search'] % (self.conf('passkey'), getIdentifier(movie), self.conf('only_internal')))
|
||||
|
||||
if data:
|
||||
try:
|
||||
soup = BeautifulSoup(data)
|
||||
|
||||
if soup.find('error'):
|
||||
log.error(soup.find('error').get_text())
|
||||
return
|
||||
|
||||
authkey = soup.find('authkey').get_text()
|
||||
entries = soup.find_all('torrent')
|
||||
|
||||
for entry in entries:
|
||||
|
||||
torrentscore = 0
|
||||
torrent_id = entry.find('id').get_text()
|
||||
name = entry.find('name').get_text()
|
||||
year = entry.find('year').get_text()
|
||||
releasegroup = entry.find('releasegroup').get_text()
|
||||
resolution = entry.find('resolution').get_text()
|
||||
encoding = entry.find('encoding').get_text()
|
||||
freeleech = entry.find('freeleech').get_text()
|
||||
torrent_desc = '/ %s / %s / %s ' % (releasegroup, resolution, encoding)
|
||||
|
||||
if freeleech == '0.25' and self.conf('prefer_internal'):
|
||||
torrent_desc += '/ Internal'
|
||||
torrentscore += 200
|
||||
|
||||
if encoding == 'x264' and self.conf('favor') in ['encode', 'both']:
|
||||
torrentscore += 300
|
||||
if re.search('Remux', encoding) and self.conf('favor') in ['remux', 'both']:
|
||||
torrentscore += 200
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
|
||||
'url': self.urls['download'] % (torrent_id, authkey, self.conf('passkey')),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': tryInt(entry.find('size').get_text()) / 1048576,
|
||||
'seeders': tryInt(entry.find('seeders').get_text()),
|
||||
'leechers': tryInt(entry.find('leechers').get_text()),
|
||||
'score': torrentscore
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'awesomehd',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'Awesome-HD',
|
||||
'description': '<a href="https://awesome-hd.net">AHD</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC+UlEQVR4AV1SO0y6dxQ9H4g8CoIoohZ5NA0aR2UgkYpNB5uocTSaLlrDblMH09Gt8d90r3YpJkanxjA4GGkbO7RNxSABq8jDGnkpD+UD5NV7Bxvbk9wvv+/3uPece66A/yEWi42FQqHVfD7/cbPZtIEglUpjOp3uZHR0dBvAn3gDIRqNgjE4OKj0+Xzf3NzcfD4wMCCjf5TLZbTbbajVatzf3+Pu7q5uNpt35ufnvwBQAScQRREEldfr9RWLxan+/n5YrVa+jFarhVfQQyQSCU4EhULhX15engEgSrjC0dHRVqlUmjQYDBgaGgKtuTqz4mTgIoVCASaTCX19fajVapOHh4dbFJBks9mxcDi8qtFoJEajkfVyJWi1WkxMTMDhcIAT8x6D7/Dd6+vr1fHx8TGp2+3+iqo5+YCzBwIBToK5ubl/mQwPDyMSibAs2Gw2UHNRrValz8/PDUk8Hv9EqVRCr9fj4uICTNflcqFer+Pg4AB7e3uoVCq8x9Rxfn6O7u5uqFQq8FspZXxHTekggByA3W4Hr9PpNDeRL3I1cMhkMrBrnZ2dyGQyvNYIs7OzVbJNPjIyAraLwYdcjR8wXl5eIJfLwRIFQQDLYkm3t7c1CdGPPT4+cpOImp4PODMeaK+n10As2jBbrHifHOjS6qAguVFimkqlwAMmIQnHV1dX4NDQhVwuhyZTV6pgIktzDzkkk0lEwhEEzs7ASQr5Ai4vL1nuccfCwsLO/v6+p9FoyJhF6ekJro/cPCzIZLNQa7rQoK77/SdgWWpKkCaJ5EB9aWnpe6nH40nRMBnJV4f5gw+FX3/5GX/8/htXRZdOzzqhJWn6nl6YbTZqqhrhULD16fT0d8FgcFtYW1vD5uamfGVl5cd4IjldKhZACdkJvKfWUANrxEaJV4hiGVaL1b+7653hXzwRZQr2X76xsfG1xWIRaZzbNPv/CdrjEL9cX/+WXFBSgEPgzxuwG3Yans9OT0+naBZMIJDNfzudzp8WFxd/APAX3uAf9WOTxOPLdosAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'passkey',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'only_internal',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'default': 1,
|
||||
'description': 'Only search for internal releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_internal',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'default': 1,
|
||||
'description': 'Favors internal releases over non-internal releases.'
|
||||
},
|
||||
{
|
||||
'name': 'favor',
|
||||
'advanced': True,
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('Encodes & Remuxes', 'both'), ('Encodes', 'encode'), ('Remuxes', 'remux'), ('None', 'none')],
|
||||
'description': 'Give extra scoring to encodes or remuxes.'
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
78
couchpotato/core/media/_base/providers/torrent/base.py
Normal file
78
couchpotato/core/media/_base/providers/torrent/base.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.variable import getImdb, md5, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import YarrProvider
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class TorrentProvider(YarrProvider):
|
||||
|
||||
protocol = 'torrent'
|
||||
|
||||
proxy_domain = None
|
||||
proxy_list = []
|
||||
|
||||
def imdbMatch(self, url, imdbId):
|
||||
if getImdb(url) == imdbId:
|
||||
return True
|
||||
|
||||
if url[:4] == 'http':
|
||||
try:
|
||||
cache_key = md5(url)
|
||||
data = self.getCache(cache_key, url)
|
||||
except IOError:
|
||||
log.error('Failed to open %s.', url)
|
||||
return False
|
||||
|
||||
return getImdb(data) == imdbId
|
||||
|
||||
return False
|
||||
|
||||
def getDomain(self, url = ''):
|
||||
|
||||
forced_domain = self.conf('domain')
|
||||
if forced_domain:
|
||||
return cleanHost(forced_domain).rstrip('/') + url
|
||||
|
||||
if not self.proxy_domain:
|
||||
for proxy in self.proxy_list:
|
||||
|
||||
prop_name = 'proxy.%s' % proxy
|
||||
last_check = float(Env.prop(prop_name, default = 0))
|
||||
|
||||
if last_check > time.time() - 86400:
|
||||
continue
|
||||
|
||||
data = ''
|
||||
try:
|
||||
data = self.urlopen(proxy, timeout = 3, show_error = False)
|
||||
except:
|
||||
log.debug('Failed %s proxy %s: %s', (self.getName(), proxy, traceback.format_exc()))
|
||||
|
||||
if self.correctProxy(data):
|
||||
log.debug('Using proxy for %s: %s', (self.getName(), proxy))
|
||||
self.proxy_domain = proxy
|
||||
break
|
||||
|
||||
Env.prop(prop_name, time.time())
|
||||
|
||||
if not self.proxy_domain:
|
||||
log.error('No %s proxies left, please add one in settings, or let us know which one to add on the forum.', self.getName())
|
||||
return None
|
||||
|
||||
return cleanHost(self.proxy_domain).rstrip('/') + url
|
||||
|
||||
def correctProxy(self, data):
|
||||
return True
|
||||
|
||||
|
||||
class TorrentMagnetProvider(TorrentProvider):
|
||||
|
||||
protocol = 'torrent_magnet'
|
||||
|
||||
download = None
|
||||
139
couchpotato/core/media/_base/providers/torrent/bithdtv.py
Normal file
139
couchpotato/core/media/_base/providers/torrent/bithdtv.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://www.bit-hdtv.com/',
|
||||
'login': 'http://www.bit-hdtv.com/takelogin.php',
|
||||
'login_check': 'http://www.bit-hdtv.com/messages.php',
|
||||
'detail': 'http://www.bit-hdtv.com/details.php?id=%s',
|
||||
'search': 'http://www.bit-hdtv.com/torrents.php?',
|
||||
}
|
||||
|
||||
# Searches for movies only - BiT-HDTV's subcategory and resolution search filters appear to be broken
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
query = self.buildUrl(media, quality)
|
||||
|
||||
url = "%s&%s" % (self.urls['search'], query)
|
||||
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
# Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML
|
||||
split_data = data.partition('-->')
|
||||
if '## SELECT COUNT(' in split_data[0]:
|
||||
data = split_data[2]
|
||||
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'width': '750', 'class': ''})
|
||||
if result_table is None:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
for result in entries[1:]:
|
||||
|
||||
cells = result.find_all('td')
|
||||
link = cells[2].find('a')
|
||||
torrent_id = link['href'].replace('/details.php?id=', '')
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': link.contents[0].get_text(),
|
||||
'url': cells[0].find('a')['href'],
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': self.parseSize(cells[6].get_text()),
|
||||
'seeders': tryInt(cells[8].string),
|
||||
'leechers': tryInt(cells[9].string),
|
||||
'get_more_info': self.getMoreInfo,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
}
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('bithdtv.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('table', attrs = {'class': 'detail'})
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'bithdtv',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'BiT-HDTV',
|
||||
'description': '<a href="http://bit-hdtv.com">BiT-HDTV</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABMklEQVR4AZ3Qu0ojcQCF8W9MJcQbJNgEEQUbQVIqWgnaWfkIvoCgggixEAmIhRtY2GV3w7KwU61B0EYIxmiw0YCik84ipaCuc0nmP5dcjIUgOjqDvxf4OAdf9mnMLcUJyPyGSCP+YRdC+Kp8iagJKhuS+InYRhTGgDbeV2uEMand4ZRxizjXHQEimxhraAnUr73BNqQxMiNeV2SwcjTLEVtb4Zl10mXutvOWm2otw5Sxz6TGTbdd6ncuYvVLXAXrvM+ruyBpy1S3JLGDfUQ1O6jn5vTsrJXvqSt4UNfj6vxTRPxBHER5QeSirhLGk/5rWN+ffB1XZuxjnDy1q87m7TS+xOGA+Iv4gfkbaw+nOMXHDHnITGEk0VfRFnn4Po4vNYm6RGukmggR0L08+l+e4HMeASo/i6AJUjLgAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
137
couchpotato/core/media/_base/providers/torrent/bitsoup.py
Normal file
137
couchpotato/core/media/_base/providers/torrent/bitsoup.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup, SoupStrainer
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.bitsoup.me/',
|
||||
'login': 'https://www.bitsoup.me/takelogin.php',
|
||||
'login_check': 'https://www.bitsoup.me/my.php',
|
||||
'search': 'https://www.bitsoup.me/browse.php?%s',
|
||||
'baseurl': 'https://www.bitsoup.me/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
only_tables_tags = SoupStrainer('table')
|
||||
|
||||
torrent_name_cell = 1
|
||||
torrent_download_cell = 2
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, movie, quality)
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'class': 'koptekst'})
|
||||
if not result_table or 'nothing found!' in data.lower():
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
for result in entries[1:]:
|
||||
|
||||
all_cells = result.find_all('td')
|
||||
|
||||
torrent = all_cells[self.torrent_name_cell].find('a')
|
||||
download = all_cells[self.torrent_download_cell].find('a')
|
||||
|
||||
torrent_id = torrent['href']
|
||||
torrent_id = torrent_id.replace('details.php?id=', '')
|
||||
torrent_id = torrent_id.replace('&hit=1', '')
|
||||
|
||||
torrent_name = torrent.getText()
|
||||
|
||||
torrent_size = self.parseSize(all_cells[8].getText())
|
||||
torrent_seeders = tryInt(all_cells[10].getText())
|
||||
torrent_leechers = tryInt(all_cells[11].getText())
|
||||
torrent_url = self.urls['baseurl'] % download['href']
|
||||
torrent_detail_url = self.urls['baseurl'] % torrent['href']
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': torrent_name,
|
||||
'size': torrent_size,
|
||||
'seeders': torrent_seeders,
|
||||
'leechers': torrent_leechers,
|
||||
'url': torrent_url,
|
||||
'detail_url': torrent_detail_url,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'ssl': 'yes',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'bitsoup',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'Bitsoup',
|
||||
'description': '<a href="https://bitsoup.me">Bitsoup</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAB8ElEQVR4AbWSS2sTURiGz3euk0mswaE37HhNhIrajQheFgF3rgR/lAt/gOBCXNZlo6AbqfUWRVCxi04wqUnTRibpJLaJzdzOOZ6WUumyC5/VHOb9eN/FA91uFx0FjI4IPfgiGLTWH73tn348GKmN7ijD0d2b41fO5qJEaX24AWNIUrVQCTTJ3Llx6vbV6Vtzk7Gi9+ebi996guFDDYAQAVj4FExP5qdOZB49W62t/zH3hECcwsPnbWeMXz6Xi2K1f0ApeK3hMCHHbP5gvvoriBgFAAQJEAxhjJ4u+YWTNsVI6b1JgtPWZkoIefKy4fcii2OTw2BABs7wj3bYDlLL4rvjGWOdTser1j5Xf7c3Q/MbHQYApxItvnm31mhQQ71eX2vUB76/vsWB2hg0QuogrMwLIG8P3InM2/eVGXeDViqVwWB79vRU2lgJYmdHcgXCTAXQFJTN5HguvDCR2Hxsxe8EvT54nlcul5vNpqDIEgwRQanAhAAABgRIyiQcjpIkkTOuWyqVoN/vSylX67XXH74uV1vHRUyxxFqbLBCSmBpiXSq6xcL5QrGYzWZ3XQIAwdlOJB+/aL764ucdmncYs0WsCI7kvTnn+qyDMEnTVCn1Tz5KsBFg6fvWcmsUAcnYNC/g2hnromvvqbHvxv+39S+MX+bWkFXwAgAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
116
couchpotato/core/media/_base/providers/torrent/hdbits.py
Normal file
116
couchpotato/core/media/_base/providers/torrent/hdbits.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import re
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://hdbits.org/',
|
||||
'detail': 'https://hdbits.org/details.php?id=%s',
|
||||
'download': 'https://hdbits.org/download.php?id=%s&passkey=%s',
|
||||
'api': 'https://hdbits.org/api/torrents'
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _post_query(self, **params):
|
||||
|
||||
post_data = {
|
||||
'username': self.conf('username'),
|
||||
'passkey': self.conf('passkey')
|
||||
}
|
||||
post_data.update(params)
|
||||
|
||||
try:
|
||||
result = self.getJsonData(self.urls['api'], data = json.dumps(post_data))
|
||||
|
||||
if result:
|
||||
if result['status'] != 0:
|
||||
log.error('Error searching hdbits: %s' % result['message'])
|
||||
else:
|
||||
return result['data']
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def _search(self, movie, quality, results):
|
||||
|
||||
match = re.match(r'tt(\d{7})', getIdentifier(movie))
|
||||
|
||||
data = self._post_query(imdb = {'id': match.group(1)})
|
||||
|
||||
if data:
|
||||
try:
|
||||
for result in data:
|
||||
results.append({
|
||||
'id': result['id'],
|
||||
'name': result['name'],
|
||||
'url': self.urls['download'] % (result['id'], self.conf('passkey')),
|
||||
'detail_url': self.urls['detail'] % result['id'],
|
||||
'size': tryInt(result['size']) / 1024 / 1024,
|
||||
'seeders': tryInt(result['seeders']),
|
||||
'leechers': tryInt(result['leechers'])
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'hdbits',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'HDBits',
|
||||
'wizard': True,
|
||||
'description': '<a href="http://hdbits.org">HDBits</a>',
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABi0lEQVR4AZWSzUsbQRjGdyabTcvSNPTSHlpQQeMHJApC8CJRvHgQQU969+LJP8G7f4N3DwpeFRQvRr0EKaUl0ATSpkigUNFsMl/r9NmZLCEHA/nNO5PfvMPDm0DI6fV3ZxiolEICe1oZCBVCCmBPKwOh2ErKBHGE4KYEXBpSLkUlqO4LcM7f+6nVhRnOhSkOz/hexk+tL+YL0yPF2YmN4tynD++4gTLGkNNac9YFLoREBR1+cnF3dFY6v/m6PD+FaXiNJtgA4xYbABxiGrz6+6HWaI5/+Qh37YS0/3Znc8UxwNGBIIBX22z+/ZdJ+4wzyjpR4PEpODg8tgUXBv2iWUzSpa12B0IR6n6lvt8Aek2lZHb084+fdRNgrwY8z81PjhVy2d2ttUrtV/lbBa+JXGEpDMPnoF2tN1QYRqVUtf6nFbThb7wk7le395elcqhASLb39okDiHY00VCtCTEHwSiH4AI0lkOiT1dwMeSfT3SRxiQWNO7Zwj1egkoVIQFMKvSiC3bcjXq9Jf8DcDIRT3hh10kAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'passkey',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
195
couchpotato/core/media/_base/providers/torrent/ilovetorrents.py
Normal file
195
couchpotato/core/media/_base/providers/torrent/ilovetorrents.py
Normal file
@@ -0,0 +1,195 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'download': 'https://www.ilovetorrents.me/%s',
|
||||
'detail': 'https://www.ilovetorrents.me/%s',
|
||||
'search': 'https://www.ilovetorrents.me/browse.php?search=%s&page=%s&cat=%s',
|
||||
'test': 'https://www.ilovetorrents.me/',
|
||||
'login': 'https://www.ilovetorrents.me/takelogin.php',
|
||||
'login_check': 'https://www.ilovetorrents.me'
|
||||
}
|
||||
|
||||
cat_ids = [
|
||||
(['41'], ['720p', '1080p', 'brrip']),
|
||||
(['19'], ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr']),
|
||||
(['20'], ['dvdr'])
|
||||
]
|
||||
|
||||
cat_backup_id = 200
|
||||
disable_provider = False
|
||||
http_time_between_calls = 1
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
page = 0
|
||||
total_pages = 1
|
||||
cats = self.getCatId(quality)
|
||||
|
||||
while page < total_pages:
|
||||
|
||||
movieTitle = tryUrlencode('"%s" %s' % (title, movie['info']['year']))
|
||||
search_url = self.urls['search'] % (movieTitle, page, cats[0])
|
||||
page += 1
|
||||
|
||||
data = self.getHTMLData(search_url)
|
||||
if data:
|
||||
try:
|
||||
|
||||
results_table = None
|
||||
|
||||
data_split = splitString(data, '<table')
|
||||
soup = None
|
||||
for x in data_split:
|
||||
soup = BeautifulSoup(x)
|
||||
results_table = soup.find('table', attrs = {'class': 'koptekst'})
|
||||
if results_table:
|
||||
break
|
||||
|
||||
if not results_table:
|
||||
return
|
||||
|
||||
try:
|
||||
pagelinks = soup.findAll(href = re.compile('page'))
|
||||
page_numbers = [int(re.search('page=(?P<page_number>.+'')', i['href']).group('page_number')) for i in pagelinks]
|
||||
total_pages = max(page_numbers)
|
||||
except:
|
||||
pass
|
||||
|
||||
entries = results_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
prelink = result.find(href = re.compile('details.php'))
|
||||
link = prelink['href']
|
||||
download = result.find('a', href = re.compile('download.php'))['href']
|
||||
|
||||
if link and download:
|
||||
|
||||
def extra_score(item):
|
||||
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
|
||||
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
|
||||
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
|
||||
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
|
||||
|
||||
return confirmed + trusted + vip + moderated
|
||||
|
||||
id = re.search('id=(?P<id>\d+)&', link).group('id')
|
||||
url = self.urls['download'] % download
|
||||
|
||||
fileSize = self.parseSize(result.select('td.rowhead')[5].text)
|
||||
results.append({
|
||||
'id': id,
|
||||
'name': toUnicode(prelink.find('b').text),
|
||||
'url': url,
|
||||
'detail_url': self.urls['detail'] % link,
|
||||
'size': fileSize,
|
||||
'seeders': tryInt(result.find_all('td')[2].string),
|
||||
'leechers': tryInt(result.find_all('td')[3].string),
|
||||
'extra_score': extra_score,
|
||||
'get_more_info': self.getMoreInfo
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'submit': 'Welcome to ILT',
|
||||
}
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
cache_key = 'ilt.%s' % item['id']
|
||||
description = self.getCache(cache_key)
|
||||
|
||||
if not description:
|
||||
|
||||
try:
|
||||
full_description = self.getHTMLData(item['detail_url'])
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('td', attrs = {'class': 'main'}).findAll('table')[1]
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
except:
|
||||
log.error('Failed getting more info for %s', item['name'])
|
||||
description = ''
|
||||
|
||||
self.setCache(cache_key, description, timeout = 25920000)
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'ilovetorrents',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'ILoveTorrents',
|
||||
'description': 'Where the Love of Torrents is Born',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACPUlEQVR4AYWM0U9SbxjH3+v266I/oNvWZTfd2J1d0ZqbZEFwWrUImOKs4YwtumFKZvvlJJADR2TCQQlMPKg5NmpREgaekAPnBATKgmK1LqQlx6awHnZWF1Tr2Xfvvs+7z+dB0mlO7StpAh+M4S/2jbo3w8+xvJvlnSneEt+10zwer5ujNUOoChjALWFw5XOwdCAk/P57cGvPl+Oht0W7VJHN5NC1uW1BON4hGjXbwpVWMZhsy9v7sEIXAsDNYBXgdkEoIKyWD2CF8ut/aOXTZc/fBSgLWw1BgA4BDHOV0GkT90cBQpXahU5TFomsb38XhJC5/Tbh1P8c6rJlBeGfAeyMhUFwNVcs9lxV9Ot0dwmyd+mrNvRtbJ2fSPC6Z3Vsvub2z3sDFACAAYzk0+kUyxEkyfN7PopqNBro55A+P6yPKIrL5zF1HwjdeBJJCObIsZO79bo3sHhWhglo5WMV3mazuVPb4fLvSL8/FAkB1hK6rXQPwYhMyROK8VK5LAiH/jsMt0HQjxiN4/ePdoilllcqDyt3Mkg8mRBNbIhMb8RERkowQA/p76g0/UDDdCoNmDminM0qSK5vlpE5kugCHhNPxntwWmJPYTMZtYcFR6ABHQsVRlYLukVORaaULvqKI46keFSCv77kSPS6kxrPptLNDHgz16fWBtyxe6v5h08LUy+KI8ushqTPWWIX8Sg6b45IrGtyW6zXFb/hpQf9m3oqfWuB0fpSw0uZ4WB69En69uOk2rmO2V52PXj+A/mI4ESKpb2HAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'label': 'Username',
|
||||
'type': 'string',
|
||||
'default': '',
|
||||
'description': 'The user name for your ILT account',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'label': 'Password',
|
||||
'type': 'password',
|
||||
'default': '',
|
||||
'description': 'The password for your ILT account.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
172
couchpotato/core/media/_base/providers/torrent/iptorrents.py
Normal file
172
couchpotato/core/media/_base/providers/torrent/iptorrents.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.iptorrents.com/',
|
||||
'base_url': 'https://www.iptorrents.com',
|
||||
'login': 'https://www.iptorrents.com/torrents/',
|
||||
'login_check': 'https://www.iptorrents.com/inbox.php',
|
||||
'search': 'https://www.iptorrents.com/torrents/?%s%%s&q=%s&qf=ti&p=%%d',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def buildUrl(self, title, media, quality):
|
||||
return self._buildUrl(title.replace(':', ''), quality)
|
||||
|
||||
def _buildUrl(self, query, quality):
|
||||
|
||||
cat_ids = self.getCatId(quality)
|
||||
|
||||
if not cat_ids:
|
||||
log.warning('Unable to find category ids for identifier "%s"', quality.get('identifier'))
|
||||
return None
|
||||
|
||||
return self.urls['search'] % ("&".join(("l%d=" % x) for x in cat_ids), tryUrlencode(query).replace('%', '%%'))
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
freeleech = '' if not self.conf('freeleech') else '&free=on'
|
||||
|
||||
base_url = self.buildUrl(title, media, quality)
|
||||
if not base_url: return
|
||||
|
||||
pages = 1
|
||||
current_page = 1
|
||||
while current_page <= pages and not self.shuttingDown():
|
||||
data = self.getHTMLData(base_url % (freeleech, current_page))
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
page_nav = html.find('span', attrs = {'class': 'page_nav'})
|
||||
if page_nav:
|
||||
next_link = page_nav.find("a", text = "Next")
|
||||
if next_link:
|
||||
final_page_link = next_link.previous_sibling.previous_sibling
|
||||
pages = int(final_page_link.string)
|
||||
|
||||
result_table = html.find('table', attrs = {'class': 'torrents'})
|
||||
|
||||
if not result_table or 'nothing found!' in data.lower():
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
|
||||
torrent = result.find_all('td')
|
||||
if len(torrent) <= 1:
|
||||
break
|
||||
|
||||
torrent = torrent[1].find('a')
|
||||
|
||||
torrent_id = torrent['href'].replace('/details.php?id=', '')
|
||||
torrent_name = six.text_type(torrent.string)
|
||||
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href'].replace(' ', '.')
|
||||
torrent_details_url = self.urls['base_url'] + torrent['href']
|
||||
torrent_size = self.parseSize(result.find_all('td')[5].string)
|
||||
torrent_seeders = tryInt(result.find('td', attrs = {'class': 'ac t_seeders'}).string)
|
||||
torrent_leechers = tryInt(result.find('td', attrs = {'class': 'ac t_leechers'}).string)
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': torrent_name,
|
||||
'url': torrent_download_url,
|
||||
'detail_url': torrent_details_url,
|
||||
'size': torrent_size,
|
||||
'seeders': torrent_seeders,
|
||||
'leechers': torrent_leechers,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
break
|
||||
|
||||
current_page += 1
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'don\'t have an account' not in output.lower()
|
||||
|
||||
def loginCheckSuccess(self, output):
|
||||
return '/logout.php' in output.lower()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'iptorrents',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'IPTorrents',
|
||||
'description': '<a href="http://www.iptorrents.com">IPTorrents</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABRklEQVR42qWQO0vDUBiG8zeKY3EqQUtNO7g0J6ZJ1+ifKIIFQXAqDYKCyaaYxM3udrZLHdRFhXrZ6liCW6mubfk874EESgqaeOCF7/Y8hEh41aq6yZi2nyZgBGya9XKtZs4No05pAkZV2YbEmyMMsoSxLQeC46wCTdPPY4HruPQyGIhF97qLWsS78Miydn4XdK46NJ9OsQAYBzMIMf8MQ9wtCnTdWCaIDx/u7uljOIQEe0hiIWPamSTLay3+RxOCSPI9+RJAo7Er9r2bnqjBFAqyK+VyK4f5/Cr5ni8OFKVCz49PFI5GdNvvU7ttE1M1zMU+8AMqFksEhrMnQsBDzqmDAwzx2ehRLwT7yyCI+vSC99c3mozH1NxrJgWWtR1BOECfEJSVCm6WCzJGCA7+IWhBsM4zywDPwEp4vCjx2DzBH2ODAfsDb33Ps6dQwJgAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'freeleech',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Only search for [FreeLeech] torrents.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -0,0 +1,181 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentMagnetProvider):
|
||||
|
||||
urls = {
|
||||
'detail': '%s/%s',
|
||||
'search': '%s/%s-i%s/',
|
||||
}
|
||||
|
||||
cat_ids = [
|
||||
(['cam'], ['cam']),
|
||||
(['telesync'], ['ts', 'tc']),
|
||||
(['screener', 'tvrip'], ['screener']),
|
||||
(['x264', '720p', '1080p', 'blu-ray', 'hdrip'], ['bd50', '1080p', '720p', 'brrip']),
|
||||
(['dvdrip'], ['dvdrip']),
|
||||
(['dvd'], ['dvdr']),
|
||||
]
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
proxy_list = [
|
||||
'https://kickass.to',
|
||||
'http://kickass.pw',
|
||||
'http://kickassto.come.in',
|
||||
'http://katproxy.ws',
|
||||
'http://kickass.bitproxy.eu',
|
||||
'http://katph.eu',
|
||||
'http://kickassto.come.in',
|
||||
]
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
data = self.getHTMLData(self.urls['search'] % (self.getDomain(), 'm', getIdentifier(media).replace('tt', '')))
|
||||
|
||||
if data:
|
||||
|
||||
cat_ids = self.getCatId(quality)
|
||||
table_order = ['name', 'size', None, 'age', 'seeds', 'leechers']
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(data)
|
||||
resultdiv = html.find('div', attrs = {'class': 'tabs'})
|
||||
for result in resultdiv.find_all('div', recursive = False):
|
||||
if result.get('id').lower().strip('tab-') not in cat_ids:
|
||||
continue
|
||||
|
||||
try:
|
||||
for temp in result.find_all('tr'):
|
||||
if temp['class'] is 'firstr' or not temp.get('id'):
|
||||
continue
|
||||
|
||||
new = {}
|
||||
|
||||
nr = 0
|
||||
for td in temp.find_all('td'):
|
||||
column_name = table_order[nr]
|
||||
if column_name:
|
||||
|
||||
if column_name == 'name':
|
||||
link = td.find('div', {'class': 'torrentname'}).find_all('a')[2]
|
||||
new['id'] = temp.get('id')[-7:]
|
||||
new['name'] = link.text
|
||||
new['url'] = td.find('a', 'imagnet')['href']
|
||||
new['detail_url'] = self.urls['detail'] % (self.getDomain(), link['href'][1:])
|
||||
new['verified'] = True if td.find('a', 'iverify') else False
|
||||
new['score'] = 100 if new['verified'] else 0
|
||||
elif column_name is 'size':
|
||||
new['size'] = self.parseSize(td.text)
|
||||
elif column_name is 'age':
|
||||
new['age'] = self.ageToDays(td.text)
|
||||
elif column_name is 'seeds':
|
||||
new['seeders'] = tryInt(td.text)
|
||||
elif column_name is 'leechers':
|
||||
new['leechers'] = tryInt(td.text)
|
||||
|
||||
nr += 1
|
||||
|
||||
# Only store verified torrents
|
||||
if self.conf('only_verified') and not new['verified']:
|
||||
continue
|
||||
|
||||
results.append(new)
|
||||
except:
|
||||
log.error('Failed parsing KickAssTorrents: %s', traceback.format_exc())
|
||||
|
||||
except AttributeError:
|
||||
log.debug('No search results found.')
|
||||
|
||||
def ageToDays(self, age_str):
|
||||
age = 0
|
||||
age_str = age_str.replace(' ', ' ')
|
||||
|
||||
regex = '(\d*.?\d+).(sec|hour|day|week|month|year)+'
|
||||
matches = re.findall(regex, age_str)
|
||||
for match in matches:
|
||||
nr, size = match
|
||||
mult = 1
|
||||
if size == 'week':
|
||||
mult = 7
|
||||
elif size == 'month':
|
||||
mult = 30.5
|
||||
elif size == 'year':
|
||||
mult = 365
|
||||
|
||||
age += tryInt(nr) * mult
|
||||
|
||||
return tryInt(age)
|
||||
|
||||
def isEnabled(self):
|
||||
return super(Base, self).isEnabled() and self.getDomain()
|
||||
|
||||
def correctProxy(self, data):
|
||||
return 'search query' in data.lower()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'kickasstorrents',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'KickAssTorrents',
|
||||
'description': '<a href="https://kat.ph/">KickAssTorrents</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACD0lEQVR42pXK20uTcRjA8d/fsJsuap0orBuFlm3hir3JJvQOVmuwllN20Lb2isI2nVHKjBqrCWYaNnNuBrkSWxglhDVJOkBdSWUOq5FgoiOrMdRJ2xPPxW+8OUf1ge/FcyCUSVe2qedK5U/OxNTTXRNXEQ52Glb4O6dNEfK1auJkvRY7+/zxnQbA/D596laXcY3OWOiaIX2393SGznUmxkUo/YkDgqHemuzobQ7+NV+reo5Q1mqp68GABdY3+/EloO+JeN4tEqiFU8f3CwhyWo9E7wfMgI0ELTDx0AvjIxcgvZoC9P7NMN7yMmrFeoKa68rfDfmrARsNN0Ihr55cx59ctZWSiwS5bLKpwW4dYJH+M/B6/CYszE0BFZ+egG+Ln+HRoBN/cpl1pV6COIMkOnBVA/w+fXgGKJVM4LxhumMleoL06hJ3wKcCfl+/TAKKx17gnFePRwkqxR4BQSpFkbCrrQJueI7mWpyfATQ9OQY43+uv/+PutBycJ3y2qn2x7jY50GJvnwLKZjOwspyE5I8F4N+1yr1uwqcs3ym63Hwo29EiAyzUWQVr6WVAS4lZCPutQG/2GtES2YiW3d3XflYKtL72kzAcdEDHeSa3czeIMyyz/TApRKvcFfE0isHbJMnrHCf6xTLb1ORvWNlWo91cvHrJUQo0o6ZoRi7dIiT/g2WEDi27Iyov21xMCvgNfXvtwIACfHwAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'name': 'domain',
|
||||
'advanced': True,
|
||||
'label': 'Proxy server',
|
||||
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'only_verified',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'description': 'Only search for verified releases.'
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
279
couchpotato/core/media/_base/providers/torrent/passthepopcorn.py
Normal file
279
couchpotato/core/media/_base/providers/torrent/passthepopcorn.py
Normal file
@@ -0,0 +1,279 @@
|
||||
import htmlentitydefs
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import getTitle, tryInt, mergeDicts, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
from dateutil.parser import parse
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'domain': 'https://tls.passthepopcorn.me',
|
||||
'detail': 'https://tls.passthepopcorn.me/torrents.php?torrentid=%s',
|
||||
'torrent': 'https://tls.passthepopcorn.me/torrents.php',
|
||||
'login': 'https://tls.passthepopcorn.me/ajax.php?action=login',
|
||||
'login_check': 'https://tls.passthepopcorn.me/ajax.php?action=login',
|
||||
'search': 'https://tls.passthepopcorn.me/search/%s/0/7/%d'
|
||||
}
|
||||
|
||||
http_time_between_calls = 2
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
movie_title = getTitle(media)
|
||||
quality_id = quality['identifier']
|
||||
|
||||
params = mergeDicts(self.quality_search_params[quality_id].copy(), {
|
||||
'order_by': 'relevance',
|
||||
'order_way': 'descending',
|
||||
'searchstr': getIdentifier(media)
|
||||
})
|
||||
|
||||
url = '%s?json=noredirect&%s' % (self.urls['torrent'], tryUrlencode(params))
|
||||
res = self.getJsonData(url)
|
||||
|
||||
try:
|
||||
if not 'Movies' in res:
|
||||
return
|
||||
|
||||
authkey = res['AuthKey']
|
||||
passkey = res['PassKey']
|
||||
|
||||
for ptpmovie in res['Movies']:
|
||||
if not 'Torrents' in ptpmovie:
|
||||
log.debug('Movie %s (%s) has NO torrents', (ptpmovie['Title'], ptpmovie['Year']))
|
||||
continue
|
||||
|
||||
log.debug('Movie %s (%s) has %d torrents', (ptpmovie['Title'], ptpmovie['Year'], len(ptpmovie['Torrents'])))
|
||||
for torrent in ptpmovie['Torrents']:
|
||||
torrent_id = tryInt(torrent['Id'])
|
||||
torrentdesc = '%s %s %s' % (torrent['Resolution'], torrent['Source'], torrent['Codec'])
|
||||
torrentscore = 0
|
||||
|
||||
if 'GoldenPopcorn' in torrent and torrent['GoldenPopcorn']:
|
||||
torrentdesc += ' HQ'
|
||||
if self.conf('prefer_golden'):
|
||||
torrentscore += 5000
|
||||
if 'FreeleechType' in torrent:
|
||||
torrentdesc += ' Freeleech'
|
||||
if self.conf('prefer_freeleech'):
|
||||
torrentscore += 7000
|
||||
if 'Scene' in torrent and torrent['Scene']:
|
||||
torrentdesc += ' Scene'
|
||||
if self.conf('prefer_scene'):
|
||||
torrentscore += 2000
|
||||
if 'RemasterTitle' in torrent and torrent['RemasterTitle']:
|
||||
torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle'])
|
||||
|
||||
torrentdesc += ' (%s)' % quality_id
|
||||
torrent_name = re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) - %s' % (movie_title, ptpmovie['Year'], torrentdesc))
|
||||
|
||||
def extra_check(item):
|
||||
return self.torrentMeetsQualitySpec(item, quality_id)
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': torrent_name,
|
||||
'Source': torrent['Source'],
|
||||
'Checked': 'true' if torrent['Checked'] else 'false',
|
||||
'Resolution': torrent['Resolution'],
|
||||
'url': '%s?action=download&id=%d&authkey=%s&torrent_pass=%s' % (self.urls['torrent'], torrent_id, authkey, passkey),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'date': tryInt(time.mktime(parse(torrent['UploadTime']).timetuple())),
|
||||
'size': tryInt(torrent['Size']) / (1024 * 1024),
|
||||
'seeders': tryInt(torrent['Seeders']),
|
||||
'leechers': tryInt(torrent['Leechers']),
|
||||
'score': torrentscore,
|
||||
'extra_check': extra_check,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def torrentMeetsQualitySpec(self, torrent, quality):
|
||||
|
||||
if not quality in self.post_search_filters:
|
||||
return True
|
||||
|
||||
reqs = self.post_search_filters[quality].copy()
|
||||
|
||||
if self.conf('require_approval'):
|
||||
log.debug('Config: Require staff-approval activated')
|
||||
reqs['Checked'] = ['true']
|
||||
|
||||
for field, specs in reqs.items():
|
||||
matches_one = False
|
||||
seen_one = False
|
||||
|
||||
if not field in torrent:
|
||||
log.debug('Torrent with ID %s has no field "%s"; cannot apply post-search-filter for quality "%s"', (torrent['id'], field, quality))
|
||||
continue
|
||||
|
||||
for spec in specs:
|
||||
if len(spec) > 0 and spec[0] == '!':
|
||||
# a negative rule; if the field matches, return False
|
||||
if torrent[field] == spec[1:]:
|
||||
return False
|
||||
else:
|
||||
# a positive rule; if any of the possible positive values match the field, return True
|
||||
log.debug('Checking if torrents field %s equals %s' % (field, spec))
|
||||
seen_one = True
|
||||
if torrent[field] == spec:
|
||||
log.debug('Torrent satisfied %s == %s' % (field, spec))
|
||||
matches_one = True
|
||||
|
||||
if seen_one and not matches_one:
|
||||
log.debug('Torrent did not satisfy requirements, ignoring')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def htmlToUnicode(self, text):
|
||||
def fixup(m):
|
||||
txt = m.group(0)
|
||||
if txt[:2] == "&#":
|
||||
# character reference
|
||||
try:
|
||||
if txt[:3] == "&#x":
|
||||
return unichr(int(txt[3:-1], 16))
|
||||
else:
|
||||
return unichr(int(txt[2:-1]))
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
# named entity
|
||||
try:
|
||||
txt = unichr(htmlentitydefs.name2codepoint[txt[1:-1]])
|
||||
except KeyError:
|
||||
pass
|
||||
return txt # leave as is
|
||||
return re.sub("&#?\w+;", fixup, six.u('%s') % text)
|
||||
|
||||
def unicodeToASCII(self, text):
|
||||
import unicodedata
|
||||
return ''.join(c for c in unicodedata.normalize('NFKD', text) if unicodedata.category(c) != 'Mn')
|
||||
|
||||
def htmlToASCII(self, text):
|
||||
return self.unicodeToASCII(self.htmlToUnicode(text))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'passkey': self.conf('passkey'),
|
||||
'keeplogged': '1',
|
||||
'login': 'Login'
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
try:
|
||||
return json.loads(output).get('Result', '').lower() == 'ok'
|
||||
except:
|
||||
return False
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'passthepopcorn',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'PassThePopcorn',
|
||||
'description': '<a href="https://passthepopcorn.me">PassThePopcorn.me</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAARklEQVQoz2NgIAP8BwMiGWRpIN1JNWn/t6T9f532+W8GkNt7vzz9UkfarZVpb68BuWlbnqW1nU7L2DMx7eCoBlpqGOppCQB83zIgIg+wWQAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False
|
||||
},
|
||||
{
|
||||
'name': 'domain',
|
||||
'advanced': True,
|
||||
'label': 'Proxy server',
|
||||
'description': 'Domain for requests (HTTPS only!), keep empty to use default (tls.passthepopcorn.me).',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'passkey',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'prefer_golden',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'label': 'Prefer golden',
|
||||
'default': 1,
|
||||
'description': 'Favors Golden Popcorn-releases over all other releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_freeleech',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'label': 'Prefer Freeleech',
|
||||
'default': 1,
|
||||
'description': 'Favors torrents marked as freeleech over all other releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_scene',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'label': 'Prefer scene',
|
||||
'default': 0,
|
||||
'description': 'Favors scene-releases over non-scene releases.'
|
||||
},
|
||||
{
|
||||
'name': 'require_approval',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'label': 'Require approval',
|
||||
'default': 0,
|
||||
'description': 'Require staff-approval for releases to be accepted.'
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
135
couchpotato/core/media/_base/providers/torrent/sceneaccess.py
Normal file
135
couchpotato/core/media/_base/providers/torrent/sceneaccess.py
Normal file
@@ -0,0 +1,135 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.sceneaccess.eu/',
|
||||
'login': 'https://www.sceneaccess.eu/login',
|
||||
'login_check': 'https://www.sceneaccess.eu/inbox',
|
||||
'detail': 'https://www.sceneaccess.eu/details?id=%s',
|
||||
'search': 'https://www.sceneaccess.eu/browse?c%d=%d',
|
||||
'archive': 'https://www.sceneaccess.eu/archive?&c%d=%d',
|
||||
'download': 'https://www.sceneaccess.eu/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
url = self.buildUrl(title, media, quality)
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
resultsTable = html.find('table', attrs = {'id': 'torrents-table'})
|
||||
if resultsTable is None:
|
||||
return
|
||||
|
||||
entries = resultsTable.find_all('tr', attrs = {'class': 'tt_row'})
|
||||
for result in entries:
|
||||
|
||||
link = result.find('td', attrs = {'class': 'ttr_name'}).find('a')
|
||||
url = result.find('td', attrs = {'class': 'td_dl'}).find('a')
|
||||
leechers = result.find('td', attrs = {'class': 'ttr_leechers'}).find('a')
|
||||
torrent_id = link['href'].replace('details?id=', '')
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': link['title'],
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': self.parseSize(result.find('td', attrs = {'class': 'ttr_size'}).contents[0]),
|
||||
'seeders': tryInt(result.find('td', attrs = {'class': 'ttr_seeders'}).find('a').string),
|
||||
'leechers': tryInt(leechers.string) if leechers else 0,
|
||||
'get_more_info': self.getMoreInfo,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('sceneaccess.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('div', attrs = {'id': 'details_table'})
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
# Login
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'submit': 'come on in',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return '/inbox' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'sceneaccess',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'SceneAccess',
|
||||
'description': '<a href="https://sceneaccess.eu/">SceneAccess</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAACT0lEQVR4AYVQS0sbURidO3OTmajJ5FElTTOkPmZ01GhHrIq0aoWAj1Vc+A/cuRMXbl24V9SlCGqrLhVFCrooEhCp2BAx0mobTY2kaR7qmOm87EXL1EWxh29xL+c7nPMdgGHYO5bF/gdbefnr6WlbWRnxluMwAB4Z0uEgXa7nwaDL7+/RNPzxbYvb/XJ0FBYVfd/ayh0fQ4qCGEHcm0KLRZUk7Pb2YRJPRwcsKMidnKD3t9VVT3s7BDh+z5FOZ3Vfn3h+Hltfx00mRRSRWFcUmmVNhYVqPn8dj3va2oh+txvcQRVF9ebm1fi4k+dRFbosY5rm4Hk7xxULQnJnx93S4g0EIEEQRoDLo6PrWEw8Pc0eHLwYGopMTDirqlJ7eyhYYGHhfgfHCcKYksZGVB/NcXI2mw6HhZERqrjYTNPHi4tFPh8aJIYIhgPlcCRDoZLW1s75+Z/7+59nZ/OJhLWigqAoKZX6Mjf3dXkZ3pydGYLc4aEoCCkInzQ1fRobS2xuvllaonkedfArnY5OTdGVldBkOADgqq2Nr6z8CIWaJietDHOhKB+HhwFKC6Gnq4ukKJvP9zcSbjYDXbeVlkKzuZBhnnV3e3t6UOmaJO0ODibW1hB1GYkg8R/gup7Z3TVZLJ5AILW9LcZiVpYtYBhw16O3t7cauckyeF9Tgz0ATpL2+nopmWycmbnY2LiKRjFk6/d7+/vRJfl4HGzV1T0UIM43MGBvaIBWK/YvwM5w+IMgGH8tkyEgvIpE7M3Nt6qqZrNyOq1kMmouh455Ggz+BhKY4GEc2CfwAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
172
couchpotato/core/media/_base/providers/torrent/thepiratebay.py
Normal file
172
couchpotato/core/media/_base/providers/torrent/thepiratebay.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentMagnetProvider):
|
||||
|
||||
urls = {
|
||||
'detail': '%s/torrent/%s',
|
||||
'search': '%s/search/%%s/%%s/7/%%s'
|
||||
}
|
||||
|
||||
cat_backup_id = 200
|
||||
disable_provider = False
|
||||
http_time_between_calls = 0
|
||||
|
||||
proxy_list = [
|
||||
'https://dieroschtibay.org',
|
||||
'https://thebay.al',
|
||||
'https://thepiratebay.se',
|
||||
'http://thepiratebay.se.net',
|
||||
'http://thebootlegbay.com',
|
||||
'http://tpb.ninja.so',
|
||||
'http://proxybay.fr',
|
||||
'http://pirateproxy.in',
|
||||
'http://piratebay.skey.sk',
|
||||
'http://pirateproxy.be',
|
||||
'http://bayproxy.li',
|
||||
'http://proxybay.pw',
|
||||
]
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
page = 0
|
||||
total_pages = 1
|
||||
cats = self.getCatId(quality)
|
||||
|
||||
base_search_url = self.urls['search'] % self.getDomain()
|
||||
|
||||
while page < total_pages:
|
||||
|
||||
search_url = base_search_url % self.buildUrl(media, page, cats)
|
||||
|
||||
page += 1
|
||||
|
||||
data = self.getHTMLData(search_url)
|
||||
|
||||
if data:
|
||||
try:
|
||||
soup = BeautifulSoup(data)
|
||||
results_table = soup.find('table', attrs = {'id': 'searchResult'})
|
||||
|
||||
if not results_table:
|
||||
return
|
||||
|
||||
try:
|
||||
total_pages = len(soup.find('div', attrs = {'align': 'center'}).find_all('a'))
|
||||
except:
|
||||
pass
|
||||
|
||||
entries = results_table.find_all('tr')
|
||||
for result in entries[1:]:
|
||||
link = result.find(href = re.compile('torrent\/\d+\/'))
|
||||
download = result.find(href = re.compile('magnet:'))
|
||||
|
||||
try:
|
||||
size = re.search('Size (?P<size>.+),', six.text_type(result.select('font.detDesc')[0])).group('size')
|
||||
except:
|
||||
continue
|
||||
|
||||
if link and download:
|
||||
|
||||
def extra_score(item):
|
||||
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
|
||||
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
|
||||
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
|
||||
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
|
||||
|
||||
return confirmed + trusted + vip + moderated
|
||||
|
||||
results.append({
|
||||
'id': re.search('/(?P<id>\d+)/', link['href']).group('id'),
|
||||
'name': six.text_type(link.string),
|
||||
'url': download['href'],
|
||||
'detail_url': self.getDomain(link['href']),
|
||||
'size': self.parseSize(size),
|
||||
'seeders': tryInt(result.find_all('td')[2].string),
|
||||
'leechers': tryInt(result.find_all('td')[3].string),
|
||||
'extra_score': extra_score,
|
||||
'get_more_info': self.getMoreInfo
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def isEnabled(self):
|
||||
return super(Base, self).isEnabled() and self.getDomain()
|
||||
|
||||
def correctProxy(self, data):
|
||||
return 'title="Pirate Search"' in data
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('div', attrs = {'class': 'nfo'})
|
||||
description = ''
|
||||
try:
|
||||
description = toUnicode(nfo_pre.text)
|
||||
except:
|
||||
pass
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'thepiratebay',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'ThePirateBay',
|
||||
'description': 'The world\'s largest bittorrent tracker. <a href="http://fucktimkuik.org/">ThePirateBay</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAA3UlEQVQY02P4DwT/YADIZvj//7qnozMYODmtAAusZoCDELDAegYGViZhAWZmRoYoqIDupfhNN1M3dTBEggXWMZg9jZRXV77YxhAOFpjDwMAPMoCXmcHsF1SAQZ6bQY2VgUEbKHClcAYzg3mINEO8jSCD478/DPsZmvqWblu1bOmStes3Pp0ezVDF4Gif0Hfx9///74/ObRZ2YNiZ47C8XIRBxFJR0jbSSUud4f9zAQWn8NTuziAt2zy5xIMM/z8LFX0E+fD/x0MRDCeA1v7Z++Y/FDzyvAtyBxIA+h8A8ZKLeT+lJroAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False
|
||||
},
|
||||
{
|
||||
'name': 'domain',
|
||||
'advanced': True,
|
||||
'label': 'Proxy server',
|
||||
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
136
couchpotato/core/media/_base/providers/torrent/torrentbytes.py
Normal file
136
couchpotato/core/media/_base/providers/torrent/torrentbytes.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.torrentbytes.net/',
|
||||
'login': 'https://www.torrentbytes.net/takelogin.php',
|
||||
'login_check': 'https://www.torrentbytes.net/inbox.php',
|
||||
'detail': 'https://www.torrentbytes.net/details.php?id=%s',
|
||||
'search': 'https://www.torrentbytes.net/browse.php?search=%s&cat=%d',
|
||||
'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s',
|
||||
}
|
||||
|
||||
cat_ids = [
|
||||
([5], ['720p', '1080p', 'bd50']),
|
||||
([19], ['cam']),
|
||||
([19], ['ts', 'tc']),
|
||||
([19], ['r5', 'scr']),
|
||||
([19], ['dvdrip']),
|
||||
([19], ['brrip']),
|
||||
([20], ['dvdr']),
|
||||
]
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
url = self.urls['search'] % (tryUrlencode('%s %s' % (title.replace(':', ''), movie['info']['year'])), self.getCatId(quality)[0])
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'border': '1'})
|
||||
if not result_table:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
cells = result.find_all('td')
|
||||
|
||||
link = cells[1].find('a', attrs = {'class': 'index'})
|
||||
|
||||
full_id = link['href'].replace('details.php?id=', '')
|
||||
torrent_id = full_id[:6]
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': link.contents[0],
|
||||
'url': self.urls['download'] % (torrent_id, link.contents[0]),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': self.parseSize(cells[6].contents[0] + cells[6].contents[2]),
|
||||
'seeders': tryInt(cells[8].find('span').contents[0]),
|
||||
'leechers': tryInt(cells[9].find('span').contents[0]),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower() or 'Welcome' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentbytes',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentBytes',
|
||||
'description': '<a href="http://torrentbytes.net">TorrentBytes</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAeFBMVEUAAAAAAEQAA1QAEmEAKnQALHYAMoEAOokAQpIASYsASZgAS5UATZwATosATpgAVJ0AWZwAYZ4AZKAAaZ8Ab7IAcbMAfccAgcQAgcsAhM4AiscAjMkAmt0AoOIApecAp/EAqvQAs+kAt+wA3P8A4f8A//8VAAAfDbiaAl08AAAAjUlEQVQYGQXBO04DQRAFwHqz7Z8sECIl5f73ISRD5GBs7UxTlWfg9vYXnvJRQJqOL88D6BAwJtMMumHUVCl60aa6H93IrIv0b+157f1lpk+fm87lMWrZH0vncKbXdRUQrRmrh9C6Iwkq6rg4PXZcyXmbizzeV/g+rDra0rGve8jPKLSOJNi2AQAwAGjwD7ApPkEHdtPQAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
114
couchpotato/core/media/_base/providers/torrent/torrentday.py
Normal file
114
couchpotato/core/media/_base/providers/torrent/torrentday.py
Normal file
@@ -0,0 +1,114 @@
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://www.td.af/',
|
||||
'login': 'http://www.td.af/torrents/',
|
||||
'login_check': 'http://www.torrentday.com/userdetails.php',
|
||||
'detail': 'http://www.td.af/details.php?id=%s',
|
||||
'search': 'http://www.td.af/V3/API/API.php',
|
||||
'download': 'http://www.td.af/download.php/%s/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
query = '"%s" %s' % (title, media['info']['year'])
|
||||
|
||||
data = {
|
||||
'/browse.php?': None,
|
||||
'cata': 'yes',
|
||||
'jxt': 8,
|
||||
'jxw': 'b',
|
||||
'search': query,
|
||||
}
|
||||
|
||||
data = self.getJsonData(self.urls['search'], data = data)
|
||||
try: torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
except: return
|
||||
|
||||
for torrent in torrents:
|
||||
results.append({
|
||||
'id': torrent['id'],
|
||||
'name': torrent['name'],
|
||||
'url': self.urls['download'] % (torrent['id'], torrent['fname']),
|
||||
'detail_url': self.urls['detail'] % torrent['id'],
|
||||
'size': self.parseSize(torrent.get('size')),
|
||||
'seeders': tryInt(torrent.get('seed')),
|
||||
'leechers': tryInt(torrent.get('leech')),
|
||||
})
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'submit.x': 18,
|
||||
'submit.y': 11,
|
||||
'submit': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'Password not correct' not in output
|
||||
|
||||
def loginCheckSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentday',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentDay',
|
||||
'description': '<a href="http://www.td.af/">TorrentDay</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC5ElEQVQ4y12TXUgUURTH//fO7Di7foeQJH6gEEEIZZllVohfSG/6UA+RSFAQQj74VA8+Bj30lmAlRVSEvZRfhNhaka5ZUG1paKaW39tq5O6Ou+PM3M4o6m6X+XPPzD3zm/+dcy574r515WfIW8CZBM4YAA5Gc/aQC3yd7oXYEONcsISE5dTDh91HS0t7FEWhBUAeN9ynV/d9qJAgE4AECURAcVsGlCCnly26LMA0IQwTa52dje3d3e3hcPi8qqrrMjcVYI3EHCQZlkFOHBwR2QHh2ASAAIJxWGAQEDxjePhs3527XjJwnb37OHBq0T+Tyyjh+9KnEzNJ7nouc1Q/3A3HGsOvnJy+PSUlj81w2Lny9WuJ6+3AmTjD4HOcrdR2dWXLRQePvyaSLfQOPMPC8mC9iHCsOxSyzJCelzdSXlNzD5ujpb25Wbfc/XXJemTXF4+nnCNq+AMLe50uFfEJTiw4GXSFtiHL0SnIq66+p0kSArqO+eH3RdsAv9+f5vW7L7GICq6rmM8XBCAXlBw90rOyxibn5yzfkg/L09M52/jxqdESaIrBXHYZZbB1GX8cEpySxKIB8S5XcOnvqpli1zuwmrTtoLjw5LOK/eeuWsE4JH5IRPaPZKiKigmPp+5pa+u1aEjIMhEgrRkmi9mgxGUhM7LNJSzOzsE3+cOeExovXOjdytE0LV4zqNZUtV0uZzAGoGkhDH/2YHZiErmv4uyWQnZZWc+hoqL3WzlTExN5hhA8IEwkZWZOxwB++30YG/9GkYCPvqAaHAW5uWPROW86OmqCprUR7z1yZDAGQNuCvkoB/baIKUBWMTYymv+gra3eJNvjXu+B562tFyXqTJ6YuHK8rKwvBmC3vR7cOCPQLWFz8LnfXWUrJo9U19BwMyUlJRjTSMJ2ENxUiGxq9KXQfwqYlnWstvbR5aamG9g0uzM8Q4OFt++3NNixQ2NgYmeN03FOTUv7XVpV9aKisvLl1vN/WVhNc/Fi1NEAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
188
couchpotato/core/media/_base/providers/torrent/torrentpotato.py
Normal file
188
couchpotato/core/media/_base/providers/torrent/torrentpotato.py
Normal file
@@ -0,0 +1,188 @@
|
||||
from urlparse import urlparse
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import ResultList
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {}
|
||||
limits_reached = {}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def search(self, media, quality):
|
||||
hosts = self.getHosts()
|
||||
|
||||
results = ResultList(self, media, quality, imdb_results = True)
|
||||
|
||||
for host in hosts:
|
||||
if self.isDisabled(host):
|
||||
continue
|
||||
|
||||
self._searchOnHost(host, media, quality, results)
|
||||
|
||||
return results
|
||||
|
||||
def _searchOnHost(self, host, media, quality, results):
|
||||
|
||||
torrents = self.getJsonData(self.buildUrl(media, host), cache_timeout = 1800)
|
||||
|
||||
if torrents:
|
||||
try:
|
||||
if torrents.get('error'):
|
||||
log.error('%s: %s', (torrents.get('error'), host['host']))
|
||||
elif torrents.get('results'):
|
||||
for torrent in torrents.get('results', []):
|
||||
results.append({
|
||||
'id': torrent.get('torrent_id'),
|
||||
'protocol': 'torrent' if re.match('^(http|https|ftp)://.*$', torrent.get('download_url')) else 'torrent_magnet',
|
||||
'provider_extra': urlparse(host['host']).hostname or host['host'],
|
||||
'name': toUnicode(torrent.get('release_name')),
|
||||
'url': torrent.get('download_url'),
|
||||
'detail_url': torrent.get('details_url'),
|
||||
'size': torrent.get('size'),
|
||||
'score': host['extra_score'],
|
||||
'seeders': torrent.get('seeders'),
|
||||
'leechers': torrent.get('leechers'),
|
||||
'seed_ratio': host['seed_ratio'],
|
||||
'seed_time': host['seed_time'],
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (host['host'], traceback.format_exc()))
|
||||
|
||||
def getHosts(self):
|
||||
|
||||
uses = splitString(str(self.conf('use')), clean = False)
|
||||
hosts = splitString(self.conf('host'), clean = False)
|
||||
names = splitString(self.conf('name'), clean = False)
|
||||
seed_times = splitString(self.conf('seed_time'), clean = False)
|
||||
seed_ratios = splitString(self.conf('seed_ratio'), clean = False)
|
||||
pass_keys = splitString(self.conf('pass_key'), clean = False)
|
||||
extra_score = splitString(self.conf('extra_score'), clean = False)
|
||||
|
||||
host_list = []
|
||||
for nr in range(len(hosts)):
|
||||
|
||||
try: key = pass_keys[nr]
|
||||
except: key = ''
|
||||
|
||||
try: host = hosts[nr]
|
||||
except: host = ''
|
||||
|
||||
try: name = names[nr]
|
||||
except: name = ''
|
||||
|
||||
try: ratio = seed_ratios[nr]
|
||||
except: ratio = ''
|
||||
|
||||
try: seed_time = seed_times[nr]
|
||||
except: seed_time = ''
|
||||
|
||||
host_list.append({
|
||||
'use': uses[nr],
|
||||
'host': host,
|
||||
'name': name,
|
||||
'seed_ratio': tryFloat(ratio),
|
||||
'seed_time': tryInt(seed_time),
|
||||
'pass_key': key,
|
||||
'extra_score': tryInt(extra_score[nr]) if len(extra_score) > nr else 0
|
||||
})
|
||||
|
||||
return host_list
|
||||
|
||||
def belongsTo(self, url, provider = None, host = None):
|
||||
|
||||
hosts = self.getHosts()
|
||||
|
||||
for host in hosts:
|
||||
result = super(Base, self).belongsTo(url, host = host['host'], provider = provider)
|
||||
if result:
|
||||
return result
|
||||
|
||||
def isDisabled(self, host = None):
|
||||
return not self.isEnabled(host)
|
||||
|
||||
def isEnabled(self, host = None):
|
||||
|
||||
# Return true if at least one is enabled and no host is given
|
||||
if host is None:
|
||||
for host in self.getHosts():
|
||||
if self.isEnabled(host):
|
||||
return True
|
||||
return False
|
||||
|
||||
return TorrentProvider.isEnabled(self) and host['host'] and host['pass_key'] and int(host['use'])
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentpotato',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentPotato',
|
||||
'order': 10,
|
||||
'description': 'CouchPotato torrent provider. Checkout <a href="https://github.com/RuudBurger/CouchPotatoServer/wiki/CouchPotato-Torrent-Provider">the wiki page about this provider</a> for more info.',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABSElEQVR4AZ2Nz0oCURTGv8t1YMpqUxt9ARFxoQ/gQtppgvUKcu/sxB5iBJkogspaBC6iVUplEC6kv+oiiKDNhAtt16roP0HQgdsMLgaxfvy4nHP4Pi48qE2g4v91JOqT1CH/UnA7w7icUlLawyEdj+ZI/7h6YluWbRiddHonHh9M70aj7VTKzuXuikUMci/EO/ACnAI15599oAk8AR/AgxBQNCzreD7bmpl+FOIVuAHqQDUcJo+AK+CZFKLt95/MpSmMt0TiW9POxse6UvYZ6zB2wFgjFiNpOGesR0rZ0PVPXf8KhUCl22CwClz4eN8weoZBb9c0bdPsOWvHx/cYu9Y0CoNoZTJrwAbn5DrnZc6XOV+igVbnsgo0IxEomlJuA1vUIYGyq3PZBChwmExCUSmVZgMBDIUCK4UCFIv5vHIhm/XUDeAf/ADbcpd5+aXSWQAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'use',
|
||||
'default': ''
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': '',
|
||||
'description': 'The url path of your TorrentPotato provider.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': '0',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
{
|
||||
'name': 'name',
|
||||
'label': 'Username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'default': '1',
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'default': '40',
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'pass_key',
|
||||
'default': ',',
|
||||
'label': 'Pass Key',
|
||||
'description': 'Can be found on your profile page',
|
||||
'type': 'combined',
|
||||
'combine': ['use', 'host', 'pass_key', 'name', 'seed_ratio', 'seed_time', 'extra_score'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user