Compare commits

...

435 Commits

Author SHA1 Message Date
Ruud
d3efda74b2 One up 2014-03-16 09:44:44 +01:00
Ruud
66b849cb29 Merge branch 'refs/heads/master' into desktop
Conflicts:
	version.py
2014-03-16 09:43:32 +01:00
Ruud
b19f98ef5b Merge branch 'refs/heads/develop' 2014-03-15 12:35:28 +01:00
Ruud
ffe6b7dd70 Add boxcar 2 support. closes #2886 2014-03-10 15:42:40 +01:00
Ruud
75f22f44a1 Reference before assigned 2014-03-08 18:55:38 +01:00
Ruud
d60a8a71b7 Check if file has moved, ignore copystat errors. close #2936 2014-03-08 18:18:06 +01:00
Ruud
9e471ac389 Add "I Just Watched" Reddit to userscripts. fix #2621 2014-03-08 14:05:49 +01:00
Ruud
3172a4d030 Check for year in coming soon 2014-03-08 12:27:06 +01:00
Ruud
c58315e2ee Use natural sorting 2014-03-08 11:59:12 +01:00
Ruud
dc0ea5b3f6 Use proper sorting 2014-03-08 11:52:59 +01:00
Ruud
b50cf1cf4c Only allow next year for couldbereleased check 2014-03-08 10:50:11 +01:00
Ruud
b69898d624 Remove double self in filetime check. fixes #2952 2014-03-08 09:37:39 +01:00
Ruud
2066625bf0 Don't use ctime on unix system. Cleanup check a bit. close #2904 2014-03-07 18:58:27 +01:00
Ruud
7af1d00ea2 Allow passwords inside nzb name 2014-03-07 18:10:17 +01:00
Ruud
5b279a48cb Make sure q is first for nzbclub 2014-03-07 17:38:40 +01:00
Ruud
a5fa0681ed Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-03-03 23:35:06 +01:00
Ruud
22e922e860 Split socket error to nr 2014-03-03 23:34:49 +01:00
Ruud Burger
0126f5ae84 Merge pull request #2921 from MLWALK3R/patch-1
replaced a duplicate URL on TPB
2014-03-03 23:24:45 +01:00
Ruud
c389790cf2 Merge branch 'refs/heads/develop' 2014-03-03 22:19:29 +01:00
Ruud
cfb246fa84 Make sure imdb rating exists before using it 2014-03-03 22:11:59 +01:00
Michael Walker
651119b7dd replaced a duplicate URL
replaced a duplicate URL.
2014-02-28 17:35:08 +00:00
Ruud
d7445dfa80 Merge branch 'refs/heads/develop' 2014-02-26 14:00:56 +01:00
Ruud Burger
f944a70a9c Merge pull request #2911 from fuzeman/feature/dev_rtorrent
[rtorrent] Fixed naming issue
2014-02-26 13:46:52 +01:00
Dean Gardiner
9056f5ae59 Fixed naming issue in rtorrent downloader 2014-02-26 14:52:10 +13:00
Ruud
ed62c981cc Add quality tests 2014-02-25 22:04:43 +01:00
Ruud
36782768a4 Merge branch 'refs/heads/develop' 2014-02-25 21:37:29 +01:00
Ruud Burger
2a7ba28903 Merge pull request #2902 from MLWALK3R/develop
SSL'd and Updated
2014-02-25 21:35:45 +01:00
Ruud Burger
e8ec2ef8d1 Merge pull request #2906 from tehspede/develop
Search url has method defined twice (2 and 3) we only want 3.
2014-02-25 21:31:29 +01:00
Ruud
2c9d487614 Update build url 2014-02-25 21:20:59 +01:00
tehspede
864e8654c3 Search url has method defined twice (2 and 3) we only want 3. 2014-02-25 18:29:51 +02:00
Michael
e11453aafb SSL'd and Updated
Add SSL to some URL's and update the Apple RSS link.
2014-02-24 21:59:45 +00:00
Ruud
e57620f67c Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-02-24 22:49:09 +01:00
Ruud
e481763967 Merge branch 'refs/heads/mikke89-downloaders_test' into develop 2014-02-24 22:48:19 +01:00
Ruud
2b3d755c64 Cleanup downloader testbuttons PR 2014-02-24 22:48:12 +01:00
Ruud
fc2db36820 Merge branch 'downloaders_test' of git://github.com/mikke89/CouchPotatoServer into mikke89-downloaders_test
Conflicts:
	couchpotato/core/downloaders/rtorrent/main.py
2014-02-24 22:07:49 +01:00
Ruud Burger
4fdea782f3 Merge pull request #2901 from MLWALK3R/patch-5
Torrentshack description https
2014-02-24 21:57:03 +01:00
Michael Walker
188a1a3b03 HTTP to HTTPS
Updated URL to SSL, better account security.
2014-02-24 20:56:01 +00:00
Ruud Burger
8e2014f2d4 Merge pull request #2899 from MLWALK3R/patch-4
ILoveTorrents use SSL
2014-02-24 21:48:36 +01:00
Michael Walker
fb95d7923f HTTP to HTTPS
Updated URL's to SSL, better account security.
2014-02-24 20:46:47 +00:00
Ruud Burger
fe5ca69f36 Merge pull request #2897 from MLWALK3R/patch-2
Replaced proxies for TPB
2014-02-24 21:41:31 +01:00
Ruud Burger
1086b808dc Merge pull request #2898 from MLWALK3R/patch-3
Changed http to https
2014-02-24 21:32:32 +01:00
Michael Walker
0050e5cdfc Changed http to https
adjusted http to SSL, better security when dealing with logins.
2014-02-24 18:23:18 +00:00
Michael Walker
6b357674d0 Replaced proxies
Remove dead/blocked proxies, Added in new unblocked/working links
2014-02-24 18:18:39 +00:00
Ruud Burger
82c0592e49 Merge pull request #2875 from fuzeman/feature/dev_rtorrent
[rtorrent] Fixed how torrent status is determined
2014-02-24 18:48:01 +01:00
Ruud
28ab4576d5 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-02-24 18:47:08 +01:00
Ruud Burger
2debd5598f Merge pull request #2888 from xombiemp/ptp-golden
PTP Golden release fix
2014-02-24 18:46:53 +01:00
Ruud
d86d44e2d4 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-02-24 18:46:24 +01:00
Ruud Burger
3d85460dc8 Merge pull request #2887 from koppelbakje/develop
[SceneAccess] Change search method to 3 (description)
2014-02-24 18:46:15 +01:00
Ruud
52ce85fbf2 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-02-24 18:45:27 +01:00
Ruud
6d70533e0b Yifi proxy changes 2014-02-24 18:45:19 +01:00
Ruud Burger
4d8338e829 Merge pull request #2878 from fuzeman/feature/rtorrent/httprpc
[rtorrent] HTTP-RPC support
2014-02-24 18:44:12 +01:00
Andrew Parker
a4e48e1f6b I've found that the score applied for Golden torrents is not enough to snatch them reliably. When I set the Prefer Golden setting, I expect it to always choose the Golden release over a Scene release. Here's an excerpt from my log that illustrates this setting failing to grab the Golden release over a Scene release:
02-22 13:56:17 INFO [core.media.movie.searcher] Search for Thor: The Dark World in 720P
02-22 13:56:21 INFO [otato.core.providers.base] Found correct release with weight 1.00, old_score(4581) now scaled to score(4581)
02-22 13:56:21 INFO [otato.core.providers.base] Found: score(4581) on PassThePopcorn: Thor The Dark World (2013) - 720p Blu-ray x264 Scene (720p)
02-22 13:56:21 INFO [otato.core.providers.base] Found correct release with weight 1.00, old_score(1257) now scaled to score(1257)
02-22 13:56:21 INFO [otato.core.providers.base] Found: score(1257) on PassThePopcorn: Thor The Dark World (2013) - 720p Blu-ray x264 HQ With Commentary (720p)
02-22 13:56:21 INFO [core.media._base.searcher] Wrong: Required word missing: thor the dark world 2013 720p web h 264 extras 720p
02-22 13:56:24 INFO [tato.core.plugins.release] Snatched "Thor The Dark World (2013) - 720p Blu-ray x264 Scene (720p)": Thor: The Dark World (2013) in 720P

With this modification it will fix this specific example and hopefully all others.
2014-02-22 15:35:08 -07:00
Leon Koppel
790a74f9e4 Change search method to 3 (description) 2014-02-22 18:43:19 +01:00
mikke89
893dde9958 rTorrent connection test: Error message on version check fail 2014-02-21 20:28:49 +01:00
Dean Gardiner
d448b8cd99 Adjusted rtorrent connect method to work with httprpc URIs, adjusted option descriptions 2014-02-21 15:48:17 +13:00
Dean Gardiner
ca2c4a0b3e Updated rtorrent-python library (HTTP-RPC support)
- Added URI transforming to cleanly support HTTP-RPC
2014-02-21 15:27:35 +13:00
mikke89
499b8193ab Added return message text to frontend 2014-02-21 02:26:04 +01:00
mikke89
1f18d2b09c Test downloader connection: Check version of uTorrent and Sabnzbd 2014-02-21 02:09:16 +01:00
Dean Gardiner
a92d6fd35c Fixed how the status is determined in the rtorrent downloader 2014-02-21 01:13:12 +13:00
Ruud
12adde8f80 Use new id for pushbullet. fix #2864 2014-02-17 20:41:03 +01:00
Ruud
8b747dff9b Use correct var name in nzbvortex 2014-02-16 15:48:55 +01:00
Ruud
027ff43dfd Path encode files in rename. fix #2846 2014-02-16 14:55:35 +01:00
Ruud
f50c8504cf Encode before copy metadata. fix #2832 2014-02-16 14:19:15 +01:00
Ruud
30f5a3944c Use test url for trakt notification test. fix #2798 2014-02-16 14:11:21 +01:00
Ruud
a1c0b000a4 Update TMDB api 2014-02-16 10:48:48 +01:00
Ruud
f22778aacb Use proper check 2014-02-16 10:40:54 +01:00
Ruud
888ee07f65 Check responsecodes 2014-02-16 10:27:31 +01:00
Ruud Burger
aa5937c278 Merge pull request #2824 from fuzeman/feature/dev_rtorrent
[rtorrent] Fixed bug where setting changes would not take effect
2014-02-16 10:13:02 +01:00
Ruud
4831c80598 Update nzbclub url 2014-02-16 09:59:37 +01:00
Ruud
b9a724c8bb Merge branch 'refs/heads/develop' 2014-02-16 09:43:03 +01:00
Ruud
886a271d19 Use correct ordering for request arrays. fix #2810 2014-02-16 09:42:47 +01:00
Ruud
68d826ca1c Merge branch 'refs/heads/develop' 2014-02-15 19:48:07 +01:00
Ruud
8dfb0d1d5c Fire events after tab add 2014-02-15 19:47:55 +01:00
Ruud
d6921882e1 Merge branch 'refs/heads/develop' 2014-02-14 19:39:47 +01:00
Dean Gardiner
3380e20e3a Cleaned up naming of functions in rtorrent downloader 2014-02-08 03:25:11 +13:00
Dean Gardiner
a2c87e1b7d Fixed bug where changes to rtorrent settings wouldn't take effect until a restart 2014-02-08 03:22:59 +13:00
Ruud Burger
9098e44513 Merge pull request #2823 from ramon86/develop
Category changes for Torrent provider TorrentBytes
2014-02-07 12:26:26 +01:00
Ramon van Dam
62524e01e1 * Added category 'bd50' (BR-Disk) to Torrent provider TorrentBytes
* Changed category identifier for category 'brrip' for Torrent provider TorrentBytes (see issue #2795)
2014-02-07 12:08:17 +01:00
Ruud Burger
78bf1d274e Merge pull request #2817 from fuzeman/feature/dev_rtorrent
[rtorrent] Fixed bug which caused large torrents to fail
2014-02-06 14:04:46 +01:00
Dean Gardiner
461e469f28 Updated rtorrent-python library
- Fixed bencode encoding bug with long types
2014-02-07 01:40:11 +13:00
Ruud Burger
e4e7ae3621 Merge pull request #2775 from ressu/fix_rtorrent_connection
Fix rTorrent connectivity
2014-01-31 13:31:32 -08:00
Ruud
9d55ecffe9 Add log var 2014-01-27 21:58:48 +01:00
mikke89
660e20dada Merge branch 'downloaders_test' into downloaders_test_dev
Conflicts:
	couchpotato/core/downloaders/transmission/main.py
2014-01-26 18:37:18 +01:00
mikke89
18c8e803a4 Fixed 'connection test' for Transmission and Sabnzbd 2014-01-26 18:34:42 +01:00
Sami Haahtinen
15a19949b8 Fix rTorrent connectivity
The combination of cleanHost and rTorrent.connect issues caused rTorrent
connections to fail. This update fixes cleanHost() so that it can
actually cope with SSL based hosts and finishes the migration to
cleanHost() in connect()

Conflicts:
	couchpotato/core/helpers/variable.py
2014-01-26 19:26:15 +02:00
mikke89
ebc5a66375 Fixed 'connection test' for Transmission and Sabnzbd 2014-01-26 18:17:23 +01:00
Ruud
f91081e39c uTorrent hostname hint 2014-01-26 10:52:50 +01:00
Ruud
9e991e1595 Fix Yify proxy check 2014-01-26 10:40:15 +01:00
Ruud
afac06081c Defer settings dom injection 2014-01-26 10:05:05 +01:00
Ruud
b773228719 Merge branch 'refs/heads/commit_rollback' into develop 2014-01-26 09:18:52 +01:00
Ruud
7001ed476d Wrap all commits with try/except 2014-01-26 00:33:21 +01:00
Ruud
31c39650a9 Force default title when none match 2014-01-25 15:26:35 +01:00
Ruud
fbae706b0f Use correct var to shuffle 2014-01-25 15:26:00 +01:00
Ruud
88c328af8e Improved manage scanning
Expire after db get
2014-01-24 22:33:22 +01:00
Ruud
cbd8981ee2 Use helper 2014-01-24 16:33:10 +01:00
Ruud
3101926e9b removeDuplicate helper 2014-01-24 15:42:29 +01:00
Ruud
c9e0910c55 Can't use len() on filter iterator. fix #2762 2014-01-24 15:29:24 +01:00
Ruud
d65667ce16 Don't force add basic auth to url 2014-01-24 14:50:54 +01:00
Ruud
7d7251862c Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-23 22:00:40 +01:00
Ruud
4d02a969c2 Merge branch 'refs/heads/georgewhewell-hdbits-api' into develop 2014-01-23 22:00:17 +01:00
Ruud
e20c776364 Use urlopen for HD Bits requests 2014-01-23 22:00:12 +01:00
Ruud
c55404699e Merge branch 'hdbits-api' of git://github.com/georgewhewell/CouchPotatoServer into georgewhewell-hdbits-api 2014-01-23 21:50:31 +01:00
Ruud Burger
6240e4eba0 Merge pull request #2756 from fuzeman/feature/dev_rtorrent
Increased rTorrent load_torrent max waiting time
2014-01-23 12:48:41 -08:00
Ruud
cf86719607 Encode before logging 2014-01-23 00:08:38 +01:00
Ruud
76943b6529 Make sure imdb list_id regex matches whole string.
Thanks @basrieter
2014-01-23 00:00:59 +01:00
Ruud
ca8bbdc293 Allow longer imdb user_id parse 2014-01-22 23:59:25 +01:00
Ruud
8e6f12a897 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-22 23:39:13 +01:00
Ruud
52c64c1a6a Get full imdb (watch)list without login. fix #2715 2014-01-22 23:38:18 +01:00
Ruud
ca94d48f8b No need to try and cach htmldata 2014-01-22 23:31:45 +01:00
Dean Gardiner
d860680823 Increased rTorrent load_torrent max waiting time to 10 retries/seconds 2014-01-22 22:27:19 +13:00
Dean Gardiner
d1dbf3745a Updated rtorrent-python library 2014-01-22 22:24:22 +13:00
Ruud Burger
4b1151bda1 Put future import after shebang 2014-01-22 08:48:22 +01:00
Ruud
18c64e493b Don't cache post requests 2014-01-21 23:06:02 +01:00
Ruud
fc6839b441 Force remove duplicate in suggested movies 2014-01-21 22:38:36 +01:00
Ruud
405b63acdd Remove unused CP automation provider 2014-01-21 21:46:03 +01:00
Ruud
f3dee50448 Properly handle and trigger events 2014-01-21 21:29:54 +01:00
Ruud
04e550ebe7 Merge branch 'refs/heads/ressu-fix_log_lines' into develop 2014-01-21 20:29:11 +01:00
Ruud
05b58819d6 Merge branch 'fix_log_lines' of git://github.com/ressu/CouchPotatoServer into ressu-fix_log_lines 2014-01-21 20:27:32 +01:00
georgewhewell
63c72853f4 Change HDBits provider to use API instead of scraping site 2014-01-21 12:07:38 +00:00
mikke89
f20cce0176 Small fix 2014-01-21 01:38:37 +01:00
mikke89
723cbcd8bd Added 'test connection' button for downloaders 2014-01-21 01:30:13 +01:00
mikke89
dfbb84caae Small fix deluge 2014-01-21 01:12:34 +01:00
mikke89
009d6cafaf Added connection test to the rest of downloaders 2014-01-21 00:24:36 +01:00
Ruud
bd9a4289d1 Rename importlib 2014-01-21 00:19:26 +01:00
Ruud
29a34fef8c py3k port helpers 2014-01-20 23:58:54 +01:00
Ruud
08e2a3a883 Import print function 2014-01-20 23:28:48 +01:00
Ruud
2d37022525 Relative import 2014-01-20 23:28:35 +01:00
Ruud
bb3faaf2cd Exception cleanup 2014-01-20 23:27:58 +01:00
Ruud
2c43b9a926 Update six 2014-01-20 23:23:40 +01:00
mikke89
964ed5f497 Added test connection button for uTorrent 2014-01-20 22:09:03 +01:00
Ruud
b47a94852a Update library: tornado 2014-01-20 16:55:58 +01:00
Ruud
f318524070 Update library: html5lib 2014-01-20 16:50:21 +01:00
Ruud
04539edb45 Update library: APScheduler 2014-01-20 16:47:49 +01:00
Ruud
5cf21452c1 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-19 19:18:39 +01:00
Ruud
799299c7cc Code cleanup 2014-01-19 19:15:58 +01:00
Ruud Burger
458330d325 Merge pull request #2743 from fuzeman/feature/dev_rtorrent
Fixed bug in rTorrent downloader when file paths start with '/'
2014-01-19 03:03:31 -08:00
Ruud Burger
973bec9e6a Merge pull request #2741 from fuzeman/develop_iptorrents
Fixed IPTorrents provider searching (again)
2014-01-19 03:01:54 -08:00
Dean Gardiner
1f941a5105 Ensure files returned from rTorrent are absolute and inside the torrent directory. 2014-01-19 23:05:05 +13:00
Sami Haahtinen
8217fecb33 Make Log messages pasteable 2014-01-19 12:01:35 +02:00
Dean Gardiner
1dda7edf1c Fixed bug when parsing torrents page in IPT provider 2014-01-19 22:45:37 +13:00
Ruud
2cfff73486 Merge branch 'refs/heads/develop' 2014-01-18 19:54:32 +01:00
Ruud Burger
3c03e400f0 Merge pull request #2732 from mano3m/develop_fixhost
Store username and pass in cleanhost
2014-01-18 10:52:02 -08:00
mano3m
6388d97c5c Store username and pass in cleanhost
Fixes #2727
2014-01-18 12:39:59 +01:00
Ruud
0c7dda8d44 Merge branch 'refs/heads/develop' 2014-01-17 23:17:41 +01:00
Ruud
161e3086fa Force year as int on tmdb info. fix #2725 2014-01-17 23:00:15 +01:00
Ruud
b3f1f938be Speedup automation getinfo 2014-01-17 22:38:38 +01:00
Ruud
082da6e3a6 Don't return .text in urlopen 2014-01-17 22:38:02 +01:00
Ruud
d9b9447242 Change cachekey if info not extended 2014-01-17 22:37:01 +01:00
Ruud
dbaa377770 version.master 2014-01-17 16:29:29 +01:00
Ruud
47d2b81d1c Merge branch 'refs/heads/develop' 2014-01-17 16:28:59 +01:00
Ruud
d743282578 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-16 11:48:59 +01:00
Ruud
7eee6f0b96 Set proper branch in version file 2014-01-16 11:48:49 +01:00
Ruud Burger
dda3fca4b4 Merge pull request #2718 from techmunk/deluge_improvements
Deluge: Ignore empty torrent results, select only what is needed
2014-01-15 23:54:39 -08:00
Techmunk
8648b2f948 Only request needed properties from deluge, and fix error when CP asks for torrent hash that is not in deluge. i.e. missing. 2014-01-16 17:13:30 +10:00
Ruud
f52cbd24f8 Remove debug variable 2014-01-15 22:30:00 +01:00
Ruud
5ea13eeffd Catch xbmc turned off error 2014-01-15 21:51:27 +01:00
Ruud
6cc802952f Catch maxretry error
Don't fill logs with duplicate logs
2014-01-15 21:38:00 +01:00
Ruud
190b9db645 Merge branch 'refs/heads/mano3m-develop_cleanhost' into develop 2014-01-15 21:10:14 +01:00
Ruud
81949b9cad Remove prints and actually save deletion 2014-01-15 21:10:06 +01:00
Ruud
894e419f40 Allow config delete 2014-01-15 21:08:19 +01:00
Ruud
cdc6c036aa Merge branch 'develop_cleanhost' of git://github.com/mano3m/CouchPotatoServer into mano3m-develop_cleanhost 2014-01-15 19:31:45 +01:00
Ruud Burger
1e9168f682 Merge pull request #2712 from fuzeman/develop_fix_blackhole
Fixed encoding bug with blackhole downloader
2014-01-15 10:29:53 -08:00
Ruud
790415dd4f Log version at start. fix #2708 2014-01-15 14:25:12 +01:00
Ruud
679e0ea2c3 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-15 12:16:38 +01:00
Ruud Burger
bd167403c3 Merge pull request #2711 from fuzeman/develop_iptorrents
Fixed IPTorrents provider searching
2014-01-15 01:14:03 -08:00
Dean Gardiner
13abe62bed Fixed encoding bug that caused the blackhole downloader to fail 2014-01-15 22:01:35 +13:00
Dean Gardiner
4147c5b870 Fixed issue retrieving seeders and leechers which caused searching to fail on IPT 2014-01-15 20:52:27 +13:00
Ruud
37d4755aae Log when there is an actual problem with the filedata download. fix #2705 2014-01-14 15:59:51 +01:00
Ruud
a9f416c4c5 Variable cleanup 2014-01-14 12:06:47 +01:00
Ruud
8a11f246b1 Add group to untag release 2014-01-14 09:31:29 +01:00
Ruud
8d44577dca Update movie info getter with better exception handling 2014-01-13 23:43:10 +01:00
mano3m
72457d8d10 Log with system encoding 2014-01-13 23:15:10 +01:00
mano3m
3bb44f8d9f Migrate rTorrent options 2014-01-13 23:14:18 +01:00
Ruud
279297b8fa Log as debug for file overwrite 2014-01-13 22:30:41 +01:00
Ruud
c71e661daf Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-13 21:54:08 +01:00
Ruud
f8820c06fe Normcase in folder compare 2014-01-13 21:53:38 +01:00
Ruud
907b40e3c6 Higher z-index for userscript popup. fix #2703 2014-01-13 16:27:08 +01:00
Ruud
d318e163bb Custom tag was never defined 2014-01-12 21:28:49 +01:00
Ruud
6e9c36a503 Lowercase compare 2014-01-12 20:31:59 +01:00
Ruud
c9e9fe86aa Don't normcase in sp function 2014-01-12 20:25:45 +01:00
Ruud
c4f4e2b524 Split identifier by know tag if possible 2014-01-12 17:42:10 +01:00
Ruud
95246b90f6 Merge branch 'refs/heads/mano3m-develop_newznab' into develop 2014-01-12 17:10:52 +01:00
Ruud
2fad29df51 Style custom tag input
Add description to abr
2014-01-12 17:10:30 +01:00
Ruud
a95320e162 Merge branch 'develop_newznab' of git://github.com/mano3m/CouchPotatoServer into mano3m-develop_newznab 2014-01-12 15:18:41 +01:00
Ruud
31b8805b5e Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-12 15:18:34 +01:00
Ruud
9e69d4e153 Queue multiple media refresh 2014-01-12 15:18:13 +01:00
Ruud Burger
aa5ecd7b42 Merge pull request #2687 from mano3m/develop_log_metadata
Log overwriting of metadata files
2014-01-12 00:21:47 -08:00
Ruud
15f90aa503 Merge branch 'develop' of github.com:RuudBurger/CouchPotatoServer into develop 2014-01-12 09:17:40 +01:00
Ruud Burger
ec86bc4a38 Merge pull request #2688 from mano3m/develop_inctran
Add incomplete folder support to Transmission
2014-01-12 00:16:16 -08:00
Ruud Burger
a3efc64901 Merge pull request #2690 from mano3m/develop_renamer
Abort rename when something fails
2014-01-12 00:13:57 -08:00
Ruud Burger
c929ecbac0 Merge pull request #2691 from mano3m/develop_bluray
Fix Bluray.com encoding issue
2014-01-12 00:12:22 -08:00
mano3m
cc32e49060 Fix Bluray.com encoding issue 2014-01-12 01:07:35 +01:00
mano3m
05c41460c2 Downloader cleanHost
Extend the use of clean host (add more checks and features) and make the settings more dummy proof.
2014-01-12 00:52:32 +01:00
mano3m
794efaa209 Abort rename when something fails
And tag the folder with failed_rename so that the release with not be
deleted later on.
2014-01-12 00:41:54 +01:00
mano3m
b0e93ee18c Add custom_tag field to newznab 2014-01-12 00:34:49 +01:00
mano3m
0393b51db6 Add logging 2014-01-11 23:59:16 +01:00
mano3m
464c8ad71c Log overwriting of metadata files
Gives more info for cases like #2641
2014-01-11 23:36:23 +01:00
mano3m
9df0e01874 Add incomplete folder support to Transmission 2014-01-11 23:33:23 +01:00
Ruud
bf2beb2530 Don't fire async event inside an already async event 2014-01-11 20:46:01 +01:00
Ruud
f0b096d41a Don't show empty title on re-add 2014-01-11 20:45:37 +01:00
Ruud
c948f38469 Only add trailer to known quality list. fix #2684 2014-01-11 14:23:35 +01:00
Joel Kåberg
190e1d2c4f Revert "Merge pull request #2596 from WoLpH/linked_file_delete"
This reverts commit a24d4a9e3b, reversing
changes made to b468048d95.
2013-12-19 22:09:35 +01:00
Joel Kåberg
8a822e35e2 Revert "Merge pull request #2560 from coolius/master"
This reverts commit 64a196f21d, reversing
changes made to a24d4a9e3b.
2013-12-19 22:08:54 +01:00
Joel Kåberg
64a196f21d Merge pull request #2560 from coolius/master
Updated YIFY provider to use proxies and magnet links
2013-12-19 12:58:29 -08:00
Joel Kåberg
a24d4a9e3b Merge pull request #2596 from WoLpH/linked_file_delete
Added delete files button
2013-12-19 12:56:45 -08:00
coolius
138a3b1f3c Replaced default YIFY URL with official alternate domain "yify-torrents.im" 2013-12-16 09:21:59 +00:00
WoLpH
9a55961786 Added delete files button 2013-12-12 02:27:09 +01:00
coolius
4eaddadf8c Removed unusable proxy 2013-12-04 13:50:06 +00:00
coolius
9dd98b29be Added proxy options to YIFY provider 2013-12-03 10:32:10 +00:00
coolius
732946d38a Updated YIFY provider to use proxy list 2013-12-03 10:08:14 +00:00
coolius
ca070e67e7 Updated YIFY provider to use proxy and magnet links 2013-12-02 10:53:47 +00:00
Joel Kåberg
b468048d95 directory properly removed 2013-12-01 21:37:07 +01:00
Ruud
029ae20573 Use Object.each for object looping 2013-11-30 11:55:22 +01:00
Ruud
fdcddaaffc Merge branch 'refs/heads/develop' 2013-11-30 11:27:41 +01:00
Ruud
b4275639f5 Merge branch 'refs/heads/develop' 2013-11-19 09:17:24 +01:00
Ruud
f79fcda27f Small one up 2013-11-17 21:22:24 +01:00
Ruud
cdbcad2238 Merge branch 'refs/heads/develop' into desktop 2013-11-17 21:20:30 +01:00
Ruud
d6709469f6 Merge branch 'refs/heads/develop' 2013-11-17 21:20:13 +01:00
Ruud
5d913e87c3 One up! 2013-11-17 20:20:18 +01:00
Ruud
16f02bda27 Merge branch 'refs/heads/develop' into desktop 2013-11-17 20:03:22 +01:00
Ruud
3e43e3fc4c Merge branch 'refs/heads/develop' 2013-11-17 20:02:20 +01:00
Ruud
e622e68701 Merge branch 'refs/heads/develop' 2013-11-17 00:07:01 +01:00
Ruud
a90a4d1bc2 Merge branch 'refs/heads/develop' 2013-11-16 17:24:09 +01:00
Ruud
165676407a Merge branch 'refs/heads/develop' 2013-11-16 14:39:58 +01:00
Ruud
5131cb0ae1 Merge branch 'refs/heads/develop' 2013-11-16 13:32:37 +01:00
Ruud
d023eb8f1f Wrong variable logged in email notification 2013-10-30 23:10:02 +01:00
Ruud Burger
d6fa5c97db Merge pull request #2387 from restanrm/master
Add support for StartTLS and allow modification of SMTP server port
2013-10-30 15:05:34 -07:00
Ruud
3717443e85 Merge branch 'refs/heads/develop' 2013-10-30 21:39:50 +01:00
Adrien RAFFIN
a1ba39b3d3 Add support for starttls and allow modification of SMTP server port 2013-10-23 10:35:32 +02:00
Ruud
b4ad7b459f Merge branch 'refs/heads/develop' 2013-10-22 14:17:45 +02:00
Ruud
5af8fd0b21 Merge branch 'refs/heads/develop' 2013-10-18 17:29:10 +02:00
Ruud
83e7a8d765 Merge branch 'refs/heads/develop' 2013-10-14 21:57:19 +02:00
Ruud
4bdd4eab64 Merge branch 'refs/heads/develop' 2013-10-14 00:02:30 +02:00
Ruud
5e683b5a48 Revert "TorrentBytes login url change. fix #2317"
This reverts commit 95d0dacd28.
2013-10-07 23:43:08 +02:00
Ruud
f178825d21 Merge branch 'refs/heads/develop' 2013-10-07 09:20:57 +02:00
Ruud
fe2290fccb Merge branch 'refs/heads/develop' 2013-09-29 14:00:20 +02:00
Ruud
e4d67645b7 Merge branch 'refs/heads/develop' 2013-09-28 23:43:49 +02:00
Ruud
8d108b92bf One Up 2013-09-23 21:48:12 +02:00
Ruud
46783028b1 Merge branch 'refs/heads/develop' into desktop 2013-09-23 21:36:45 +02:00
Ruud
324415be15 Merge branch 'refs/heads/develop' 2013-09-23 21:35:51 +02:00
Ruud
7c44f9ab13 Merge branch 'refs/heads/develop' 2013-09-23 21:30:19 +02:00
Ruud
628fda2097 Merge branch 'refs/heads/develop' 2013-09-20 18:15:28 +02:00
Ruud
d08c7c57a8 One up! 2013-09-20 17:46:54 +02:00
Ruud
eeeb845ef3 Simplify string before checking on imdb 2013-09-20 17:30:11 +02:00
Ruud
651a063f94 Fix about submenu 2013-09-20 16:33:01 +02:00
Ruud
f20aaa2d9d Hide IE clear button on search 2013-09-20 16:23:42 +02:00
Ruud
ba925ec191 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/plugins/suggestion/main.py
2013-09-20 16:12:40 +02:00
Ruud
da6d749072 Merge branch 'refs/heads/develop' 2013-09-19 22:11:21 +02:00
Ruud
bef2b28acc Merge branch 'refs/heads/develop' 2013-09-18 23:06:29 +02:00
Ruud
302f571837 Merge branch 'refs/heads/develop' 2013-09-18 21:45:03 +02:00
Ruud
41dde209d5 Merge branch 'refs/heads/develop' 2013-09-14 11:41:45 +02:00
Ruud
5d350ef5ac Merge branch 'refs/heads/develop' 2013-09-11 09:29:05 +02:00
Ruud
4fd1d986dd Merge branch 'refs/heads/develop'
Conflicts:
	couchpotato/static/style/main.css
2013-09-11 09:11:04 +02:00
Ruud
55d57bc07b Give minified own FileHandler 2013-09-10 23:25:05 +02:00
Ruud
a81a262fb6 Change static path 2013-09-10 23:25:05 +02:00
Ruud
c37360f848 Login styling 2013-09-10 23:25:05 +02:00
Ruud
d7700900db Login base 2013-09-10 23:25:05 +02:00
Ruud
faa136a365 Merge branch 'refs/heads/develop' 2013-09-10 09:39:18 +02:00
Ruud
bd73b94ea4 Merge branch 'refs/heads/develop' 2013-09-09 22:29:00 +02:00
Ruud
d764d0f096 Merge branch 'refs/heads/develop' 2013-09-08 22:17:03 +02:00
Ruud Burger
fc8db130e0 Merge pull request #1947 from iguyking/patch-1
Update contributing.md
2013-07-15 04:17:17 -07:00
iguyking
682d678f91 Update contributing.md
Fixed to say what was intended
2013-07-14 11:49:48 -05:00
Ruud
3b7376fd18 One up 2013-07-06 01:01:26 +02:00
Ruud
06a211a24a Ignore current suggested results 2013-07-06 00:49:26 +02:00
Ruud
c31b10c798 Ignore current suggested results 2013-07-06 00:49:11 +02:00
Ruud
acda664686 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2013-07-05 22:43:54 +02:00
Ruud
55af696b7c Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2013-07-05 22:18:27 +02:00
Ruud
636e9514e8 Merge branch 'refs/heads/develop' 2013-07-05 22:11:10 +02:00
Ruud
47e649643f Merge branch 'refs/heads/develop'
Conflicts:
	couchpotato/core/helpers/request.py
2013-07-01 23:34:40 +02:00
Ruud
bf59d2f357 Allow unknown keywords for all api calls. fix #1881 2013-06-24 21:22:12 +02:00
Ruud
fb90f6591b Get array arguments as list. fix #1875 2013-06-24 00:26:31 +02:00
Ruud
d66722e737 Allow non trailing slash API calls 2013-06-23 23:30:47 +02:00
Ruud
47d37c2ec9 Merge branch 'refs/heads/develop' 2013-06-23 12:24:01 +02:00
Ruud
e2852407ea One up 2013-06-03 22:22:44 +02:00
Ruud
88e738c6cd Don't show double updater name 2013-06-03 22:22:35 +02:00
Ruud
eaae8bdb0b Merge branch 'refs/heads/develop' into desktop 2013-06-03 22:00:21 +02:00
Ruud
40324ee89f Merge branch 'refs/heads/develop' 2013-06-03 21:59:26 +02:00
Ruud
9db1f3430e Append instead of add for subtitle file list 2013-05-29 19:30:40 +02:00
Ruud
ec19932eef Merge branch 'refs/heads/develop' 2013-05-29 19:10:50 +02:00
Ruud
daf31870f3 Merge branch 'refs/heads/develop' 2013-05-29 14:51:48 +02:00
Ruud
35d49f6a5e Merge branch 'refs/heads/develop' 2013-05-28 21:15:04 +02:00
Ruud
5c0d8a7fef Merge branch 'refs/heads/develop' 2013-05-19 01:19:53 +02:00
Ruud
b2ab114b6d Merge branch 'refs/heads/develop' 2013-05-18 17:29:02 +02:00
Ruud
a8523e6d01 Merge branch 'refs/heads/develop' 2013-05-17 15:48:12 +02:00
Ruud
f946389d60 Merge branch 'refs/heads/develop' 2013-05-11 00:08:51 +02:00
Ruud
821f68909d One up 2013-05-05 21:19:10 +02:00
Ruud
2b8dfed475 Merge branch 'refs/heads/master' into desktop
Conflicts:
	version.py
2013-05-05 20:31:28 +02:00
Ruud
0a749ce913 Merge branch 'refs/heads/develop' 2013-05-05 20:24:40 +02:00
Ruud
dfd2c33657 Extend files, not append 2013-05-05 10:15:19 +02:00
Ruud
7aad27c3d2 Last message check 0 after first message 2013-05-03 23:05:17 +02:00
Ruud
7a5588d5de Merge branch 'refs/heads/develop' 2013-05-03 22:51:35 +02:00
Ruud
f1dde5c925 Merge branch 'refs/heads/develop' 2013-04-14 11:09:32 +02:00
Ruud
0eff4f0096 Merge branch 'master' of github.com:RuudBurger/CouchPotatoServer 2013-04-05 23:59:56 +02:00
Ruud
4d7fa08805 Merge branch 'refs/heads/develop' 2013-04-05 23:57:54 +02:00
Ruud
f0af184262 Merge branch 'refs/heads/develop' 2013-04-02 11:32:20 +02:00
Ruud
5a23be2224 Merge branch 'refs/heads/develop' 2013-03-26 21:42:25 +01:00
Ruud
7f87b255f9 Merge branch 'refs/heads/develop' 2013-03-26 21:10:27 +01:00
Ruud
5ac1118db3 Merge branch 'refs/heads/develop' 2013-03-20 20:32:57 +01:00
Ruud
2c46279617 Merge branch 'refs/heads/develop' 2013-03-20 19:37:15 +01:00
Ruud
5d6a9ad2d0 Merge branch 'refs/heads/develop' 2013-03-19 22:55:39 +01:00
Ruud
607b5ea766 Run exe after install 2013-03-19 21:22:07 +01:00
Ruud
88579cd71a One up 2013-03-19 20:52:07 +01:00
Ruud
6c57316ce6 Use https for changelog 2013-03-19 20:46:00 +01:00
Ruud
6702683da3 Merge branch 'refs/heads/develop' into desktop 2013-03-19 20:34:38 +01:00
Ruud
b9c2b42725 Merge branch 'refs/heads/develop' 2013-03-19 20:28:46 +01:00
Ruud
1ed58586a1 Force install install in AppData
Add images to installer
2013-03-18 23:56:54 +01:00
Ruud
a8369b4e93 Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2013-03-18 21:57:58 +01:00
Ruud
f08ccd4fd8 One up installer 2013-03-17 22:34:04 +01:00
Ruud
312562a9f5 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2013-03-17 16:42:53 +01:00
Ruud
fab8e66fe1 One up
Conflicts:
	version.py
2013-03-17 16:40:22 +01:00
Ruud
4db1b57c70 Merge branch 'refs/heads/develop' 2013-03-17 16:31:31 +01:00
Ruud
b06dbd3069 Merge branch 'refs/heads/develop' 2013-03-12 21:12:18 +01:00
Ruud
f84aa8c638 Merge branch 'refs/heads/develop' 2013-03-09 18:15:26 +01:00
Ruud
8e07dfc730 Merge branch 'refs/heads/develop' 2013-03-08 14:46:01 +01:00
Ruud
a49a00a25f Host to 0.0.0.0 2013-02-14 23:02:44 +01:00
Ruud
673843fb66 Merge branch 'refs/heads/develop' 2013-02-12 23:25:11 +01:00
Ruud
811f35b028 Merge branch 'refs/heads/develop' 2013-02-04 23:11:39 +01:00
Ruud
ec6e2c240f Merge branch 'refs/heads/develop' 2013-01-28 23:21:52 +01:00
Ruud
9e260a89af One up 2013-01-26 14:51:39 +01:00
Ruud
d233e4d22e Merge branch 'refs/heads/develop' into desktop 2013-01-26 13:54:56 +01:00
Ruud
23893dbcb9 Merge branch 'refs/heads/develop' into desktop 2013-01-25 20:13:58 +01:00
Ruud
3187a0f820 Merge branch 'refs/heads/develop' 2013-01-25 15:52:54 +01:00
Ruud
f86b9299c4 Merge branch 'refs/heads/develop' 2013-01-25 14:21:11 +01:00
Ruud
d27d0abeb0 Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2013-01-24 23:35:37 +01:00
Ruud
506871b506 One up 2013-01-23 23:10:55 +01:00
Ruud
6115917660 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2013-01-23 22:57:07 +01:00
Ruud
21df8819d3 Merge branch 'refs/heads/develop' into desktop 2013-01-23 22:55:09 +01:00
Ruud
7c59348138 Merge branch 'refs/heads/develop' 2013-01-23 22:54:29 +01:00
Ruud
ab53f44157 Remove non-int backup folders. closes #1298 2013-01-23 22:23:52 +01:00
Ruud
b35f325d94 Merge branch 'refs/heads/develop' 2013-01-23 22:16:26 +01:00
Ruud
fb3f3e11f6 Merge branch 'refs/heads/develop' into desktop 2013-01-22 21:40:40 +01:00
Ruud
178c8942c3 Merge branch 'refs/heads/develop' into desktop 2013-01-14 19:54:22 +01:00
Ruud
393c14de54 Urlencode spotweb id. fix #1213 2013-01-07 23:12:08 +01:00
Ruud
51e747049d One up 2013-01-07 23:10:42 +01:00
Ruud
0582f7d694 Urlencode spotweb id. fix #1213 2013-01-07 23:10:06 +01:00
Ruud
fa7cac7538 Merge branch 'refs/heads/develop' into desktop 2013-01-07 22:41:55 +01:00
Ruud
bff17c0b95 Merge branch 'refs/heads/develop' 2013-01-07 22:40:37 +01:00
Ruud
d172828ac5 Merge branch 'refs/heads/develop' 2013-01-02 14:12:07 +01:00
Ruud
9500ac73fc Link to downloaders 2013-01-02 13:52:44 +01:00
Ruud
e2cf7e4421 Merge branch 'refs/heads/develop' 2013-01-02 13:44:34 +01:00
Ruud
9a314cfbc4 One up 2012-12-29 00:03:45 +01:00
Ruud
5941d0bf77 Add version to update url 2012-12-29 00:03:36 +01:00
Ruud
d326c1c25c Merge branch 'refs/heads/master' into desktop
Conflicts:
	version.py
2012-12-28 23:31:08 +01:00
Ruud
7e6234298d Merge branch 'refs/heads/develop' 2012-12-28 23:25:40 +01:00
Ruud
d4da206f93 Merge branch 'refs/heads/develop' 2012-12-22 16:33:47 +01:00
Ruud
985a168724 Merge branch 'refs/heads/develop' 2012-12-21 23:18:00 +01:00
Ruud
173c6194ed Merge branch 'refs/heads/develop' 2012-12-19 11:12:26 +01:00
Ruud
bcd23ad10c Merge branch 'refs/heads/develop' 2012-12-17 15:13:00 +01:00
Ruud
898e6f487d Merge branch 'refs/heads/develop' 2012-12-16 23:52:06 +01:00
Ruud
96472a9a8f One up 2012-12-16 23:51:58 +01:00
Ruud
27252561e2 Merge branch 'refs/heads/develop' into desktop 2012-12-16 23:51:24 +01:00
Ruud
6618c3927c Merge branch 'refs/heads/develop' 2012-12-11 23:15:06 +01:00
Ruud
c9e732651f One up 2012-12-01 12:16:58 +01:00
Ruud
7849e7170d Uninstall only create files, no wildcard *.* 2012-12-01 12:16:51 +01:00
Ruud
087894eb4e Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-12-01 11:50:08 +01:00
Ruud
4b58b40226 Merge branch 'refs/heads/develop' 2012-12-01 11:48:54 +01:00
Ruud
3ecc826629 Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2012-11-11 22:06:48 +01:00
Ruud
25f1b8c7a7 Fedora init fix #1009 2012-11-02 18:32:15 +01:00
Ruud
e71da1f14d Use proper description for binary build. fix #1005 2012-11-02 18:24:13 +01:00
Ruud
938b14ba18 One up installer 2012-10-29 20:45:17 +01:00
Ruud
d6522d8f38 One up installer 2012-10-27 18:49:44 +02:00
Ruud
78eab890e7 Merge branch 'refs/heads/develop' into desktop 2012-10-27 18:25:36 +02:00
Ruud
1a56191f83 Don't unzip 2012-10-27 18:22:50 +02:00
Ruud
41c0f34d95 Properly restart 2012-10-27 18:22:40 +02:00
Ruud
37bf205d7a Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-10-27 11:56:57 +02:00
Ruud
32fe3796e4 Merge branch 'refs/heads/develop' 2012-10-26 22:22:47 +02:00
Ruud
359d1aaafa Merge branch 'refs/heads/develop' 2012-10-26 14:54:12 +02:00
Ruud
fb5d336351 Merge branch 'refs/heads/develop' 2012-10-26 14:36:04 +02:00
Ruud
eb30dff986 Merge branch 'refs/heads/develop' 2012-10-13 00:00:44 +02:00
Ruud
9312336962 Merge branch 'refs/heads/develop' 2012-09-24 09:36:59 +02:00
Ruud
aa1fa3eb9a Add description 2012-09-19 15:42:33 +02:00
Ruud
0e2f8a612c Extract zip after build, for testing 2012-09-19 15:29:07 +02:00
Ruud
ade4338ea6 Merge branch 'refs/heads/develop' 2012-09-16 21:32:16 +02:00
Ruud
55b20324c0 Merge branch 'refs/heads/develop' 2012-09-16 12:36:48 +02:00
Ruud
465e7b2abc Merge branch 'refs/heads/develop' into desktop 2012-09-16 12:36:17 +02:00
Ruud
578fb45785 Installer 1 up 2012-09-16 11:35:56 +02:00
Ruud
c0fb28301d Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2012-09-16 10:46:39 +02:00
Ruud
96995bbbe5 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-09-16 10:45:19 +02:00
Ruud
4cfdafebbc Merge branch 'refs/heads/develop' into desktop 2012-09-14 13:15:47 +02:00
Ruud
f9c2503f81 Merge branch 'refs/heads/develop' 2012-09-14 13:15:35 +02:00
Ruud
b97acb8ef5 Merge branch 'refs/heads/develop' into desktop 2012-09-14 13:08:19 +02:00
Ruud
5b4cdf05b1 Merge branch 'refs/heads/develop' 2012-09-14 13:06:56 +02:00
Ruud
d68d2dfdb6 Updated installer 2012-09-09 21:48:38 +02:00
Ruud
39b269a454 Merge branch 'refs/heads/develop' into desktop 2012-09-09 17:32:47 +02:00
Ruud
ac081d3e10 Getting ready for build 2012-09-09 17:28:23 +02:00
Ruud
5d4efb60cf Merge branch 'refs/heads/develop' into desktop 2012-09-08 16:01:49 +02:00
Ruud
6f25a6bdfd Merge branch 'refs/heads/develop' 2012-09-03 10:32:09 +02:00
Ruud
23427e95f7 Merge branch 'refs/heads/develop' 2012-08-26 23:09:51 +02:00
Ruud
cc408b980c Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-08-05 16:18:35 +02:00
Ruud
90a09e573b Merge branch 'refs/heads/develop'
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-08-05 16:15:53 +02:00
Ruud
e1d7440b9d Wrong branch in master 2012-07-15 00:23:44 +02:00
Ruud
59590b3ac9 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-07-14 00:35:00 +02:00
Ruud
ff759dacf3 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-07-11 22:43:45 +02:00
Ruud
a328e44130 Merge branch 'desktop' of github.com:RuudBurger/CouchPotatoServer into desktop 2012-05-15 23:23:56 +02:00
Ruud
7924cac5f9 Update installer version 2012-05-15 23:21:24 +02:00
Ruud
1cef3b0c93 remove --nogit tag 2012-05-15 23:21:24 +02:00
Ruud
3cd59edc8b Import errors
File icon
2012-05-15 23:21:24 +02:00
Ruud
0d624af01d Working PNG 2012-05-15 23:21:24 +02:00
Ruud
a09132570c Change branch to desktop 2012-05-15 23:21:14 +02:00
Ruud
ee3fc38432 Better setup 2012-05-15 23:21:14 +02:00
Ruud
dbf0192c8e Inno setup, start 2012-05-15 23:21:14 +02:00
Ruud
6962cfc3f5 new Desktop runner 2012-05-15 23:21:14 +02:00
Ruud
e096ec3b5b Desktop files 2012-05-15 23:20:05 +02:00
Ruud
b30a74ae0c Merge branch 'refs/heads/develop' into desktop 2012-05-15 23:15:17 +02:00
Ruud
978eeb16c9 Update installer version 2012-05-15 23:14:20 +02:00
Ruud
e5c9d91657 Merge branch 'refs/heads/develop' into desktop 2012-05-15 22:27:22 +02:00
Ruud
fa81c3a07a Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-05-14 22:00:02 +02:00
Ruud
9cdd520d41 Merge branch 'refs/heads/develop' into desktop 2012-05-14 20:22:55 +02:00
Ruud
55d7898771 Merge branch 'refs/heads/develop' into desktop 2012-05-13 12:56:45 +02:00
Ruud
b8256bef97 Merge branch 'refs/heads/develop' into desktop 2012-05-12 00:35:52 +02:00
Ruud
5be9dc0b4a Merge branch 'refs/heads/develop' into desktop 2012-05-09 22:20:53 +02:00
Ruud
7d0be0cefb remove --nogit tag 2012-05-07 22:55:54 +02:00
Ruud
f7ce1edb13 Merge branch 'refs/heads/develop' into desktop 2012-05-07 22:44:01 +02:00
Ruud
5ad9280b60 Merge branch 'refs/heads/develop' into desktop 2012-05-07 22:27:55 +02:00
Ruud
2b353f1b20 Merge branch 'refs/heads/develop' into desktop 2012-05-04 17:29:15 +02:00
Ruud
75ab90b87b Merge branch 'refs/heads/develop' into desktop 2012-05-02 21:40:19 +02:00
Ruud
0219296120 Import errors
File icon
2012-05-02 21:34:45 +02:00
Ruud
20032b3a31 Working PNG 2012-05-01 07:35:44 +02:00
Ruud
ea9e9a8c90 Updater base 2012-05-01 07:35:27 +02:00
Ruud
f7b0ee145b Change branch to desktop 2012-04-30 21:37:04 +02:00
Ruud
cc866738ee Merge branch 'refs/heads/develop' into desktop 2012-04-30 21:32:56 +02:00
Ruud
eadccf6e33 Merge branch 'refs/heads/develop' into desktop 2012-04-29 00:00:25 +02:00
Ruud
b70b66e567 Merge branch 'refs/heads/develop' into desktop 2012-04-28 23:14:59 +02:00
Ruud
5b6792dc20 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	CouchPotato.py
	couchpotato/core/plugins/renamer/main.py
	couchpotato/core/plugins/trailer/__init__.py
2012-04-07 21:35:36 +02:00
Ruud
f498e7343a Better setup 2012-02-25 01:48:58 +01:00
Ruud
6962f441e6 Inno setup, start 2012-02-21 18:50:34 +01:00
Ruud
1def62b1b1 new Desktop runner 2012-02-19 17:13:37 +01:00
Ruud
a4a4a6a185 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	CouchPotato.py
2012-02-19 13:14:56 +01:00
Ruud
d4c9469c1a Remove nfo when not renaming as .orig.nfo 2012-02-19 12:53:55 +01:00
Ruud
3e2d4c5d7b Initial trailer support 2012-02-19 12:48:54 +01:00
Ruud
d03f711d69 kwargs in file.download for urlopen 2012-02-19 12:45:22 +01:00
Ruud
44dd8d9b96 Merge lists, not overwrite 2012-02-19 12:37:25 +01:00
Ruud
549a3be0d8 Merge branch 'refs/heads/develop' into desktop 2012-02-12 00:10:56 +01:00
Ruud
1bb2edf8ec Merge branch 'refs/heads/develop' into desktop 2012-02-11 23:33:14 +01:00
Ruud
84c6f36315 Desktop files 2012-02-11 23:06:14 +01:00
292 changed files with 4383 additions and 2065 deletions

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env python
from __future__ import print_function
from logging import handlers
from os.path import dirname
import logging
@@ -132,14 +133,15 @@ if __name__ == '__main__':
pass
except SystemExit:
raise
except socket.error as (nr, msg):
except socket.error as e:
# log when socket receives SIGINT, but continue.
# previous code would have skipped over other types of IO errors too.
nr, msg = e
if nr != 4:
try:
l.log.critical(traceback.format_exc())
except:
print traceback.format_exc()
print(traceback.format_exc())
raise
except:
try:
@@ -148,7 +150,7 @@ if __name__ == '__main__':
if l:
l.log.critical(traceback.format_exc())
else:
print traceback.format_exc()
print(traceback.format_exc())
except:
print traceback.format_exc()
print(traceback.format_exc())
raise

231
Desktop.py Normal file
View File

@@ -0,0 +1,231 @@
from esky.util import appdir_from_executable #@UnresolvedImport
from threading import Thread
from version import VERSION
from wx.lib.softwareupdate import SoftwareUpdate
import os
import sys
import time
import webbrowser
import wx
# Include proper dirs
if hasattr(sys, 'frozen'):
import libs
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
else:
base_path = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.join(base_path, 'libs')
sys.path.insert(0, base_path)
sys.path.insert(0, lib_dir)
from couchpotato.environment import Env
class TaskBarIcon(wx.TaskBarIcon):
TBMENU_OPEN = wx.NewId()
TBMENU_SETTINGS = wx.NewId()
TBMENU_EXIT = wx.ID_EXIT
closed = False
menu = False
enabled = False
def __init__(self, frame):
wx.TaskBarIcon.__init__(self)
self.frame = frame
icon = wx.Icon('icon.png', wx.BITMAP_TYPE_PNG)
self.SetIcon(icon)
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
def OnTaskBarClick(self, evt):
menu = self.CreatePopupMenu()
self.PopupMenu(menu)
menu.Destroy()
def enable(self):
self.enabled = True
if self.menu:
self.open_menu.Enable(True)
self.setting_menu.Enable(True)
self.open_menu.SetText('Open')
def CreatePopupMenu(self):
if not self.menu:
self.menu = wx.Menu()
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
if not self.enabled:
self.open_menu.Enable(False)
self.setting_menu.Enable(False)
self.open_menu.SetText('Loading...')
return self.menu
def onOpen(self, event):
url = self.frame.parent.getSetting('base_url')
webbrowser.open(url)
def onSettings(self, event):
url = self.frame.parent.getSetting('base_url') + 'settings/about/'
webbrowser.open(url)
def onTaskBarClose(self, evt):
if self.closed:
return
self.closed = True
self.RemoveIcon()
wx.CallAfter(self.frame.Close)
def makeIcon(self, img):
if "wxMSW" in wx.PlatformInfo:
img = img.Scale(16, 16)
elif "wxGTK" in wx.PlatformInfo:
img = img.Scale(22, 22)
icon = wx.IconFromBitmap(img.CopyFromBitmap())
return icon
class MainFrame(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
self.parent = parent
self.tbicon = TaskBarIcon(self)
class WorkerThread(Thread):
def __init__(self, desktop):
Thread.__init__(self)
self.daemon = True
self._desktop = desktop
self.start()
def run(self):
# Get options via arg
from couchpotato.runner import getOptions
args = ['--quiet']
self.options = getOptions(base_path, args)
# Load settings
settings = Env.get('settings')
settings.setFile(self.options.config_file)
# Create data dir if needed
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
if self.data_dir == '':
from couchpotato.core.helpers.variable import getDataDir
self.data_dir = getDataDir()
if not os.path.isdir(self.data_dir):
os.makedirs(self.data_dir)
# Create logging dir
self.log_dir = os.path.join(self.data_dir, 'logs');
if not os.path.isdir(self.log_dir):
os.mkdir(self.log_dir)
try:
from couchpotato.runner import runCouchPotato
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
except:
pass
self._desktop.frame.Close()
class CouchPotatoApp(wx.App, SoftwareUpdate):
settings = {}
events = {}
restart = False
closing = False
def OnInit(self):
# Updater
base_url = 'https://api.couchpota.to/updates/%s'
self.InitUpdates(base_url % VERSION + '/', 'https://couchpota.to/updates/%s' % 'changelog.html',
icon = wx.Icon('icon.png'))
self.frame = MainFrame(self)
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
# CouchPotato thread
self.worker = WorkerThread(self)
return True
def onAppLoad(self):
self.frame.tbicon.enable()
def setSettings(self, settings = {}):
self.settings = settings
def getSetting(self, name):
return self.settings.get(name)
def addEvents(self, events = {}):
for name in events.iterkeys():
self.events[name] = events[name]
def onClose(self, event):
if not self.closing:
self.closing = True
self.frame.tbicon.onTaskBarClose(event)
onClose = self.events.get('onClose')
onClose(event)
def afterShutdown(self, restart = False):
self.frame.Destroy()
self.restart = restart
self.ExitMainLoop()
if __name__ == '__main__':
app = CouchPotatoApp(redirect = False)
app.MainLoop()
time.sleep(1)
if app.restart:
def appexe_from_executable(exepath):
appdir = appdir_from_executable(exepath)
exename = os.path.basename(exepath)
if sys.platform == "darwin":
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
return os.path.join(appdir, "Contents", "MacOS", exename)
return os.path.join(appdir, exename)
exe = appexe_from_executable(sys.executable)
os.chdir(os.path.dirname(exe))
os.execv(exe, [exe] + sys.argv[1:])

View File

@@ -9,13 +9,12 @@ import os
import time
import traceback
log = CPLog(__name__)
log = CPLog(__name__)
views = {}
template_loader = template.Loader(os.path.join(os.path.dirname(__file__), 'templates'))
class BaseHandler(RequestHandler):
def get_current_user(self):
@@ -24,9 +23,10 @@ class BaseHandler(RequestHandler):
if username and password:
return self.get_secure_cookie('user')
else: # Login when no username or password are set
else: # Login when no username or password are set
return True
# Main web handler
class WebHandler(BaseHandler):
@@ -43,11 +43,13 @@ class WebHandler(BaseHandler):
log.error("Failed doing web request '%s': %s", (route, traceback.format_exc()))
self.write({'success': False, 'error': 'Failed returning results'})
def addView(route, func, static = False):
views[route] = func
def get_session(engine = None):
return Env.getSession(engine)
def get_session():
return Env.getSession()
# Web view
@@ -55,12 +57,10 @@ def index():
return template_loader.load('index.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env)
addView('', index)
# API docs
def apiDocs():
routes = []
for route in api.iterkeys():
routes.append(route)
routes = list(api.keys())
if api_docs.get(''):
del api_docs['']
@@ -70,21 +70,22 @@ def apiDocs():
addView('docs', apiDocs)
# Make non basic auth option to get api key
class KeyHandler(RequestHandler):
def get(self, *args, **kwargs):
api = None
api_key = None
try:
username = Env.setting('username')
password = Env.setting('password')
if (self.get_argument('u') == md5(username) or not username) and (self.get_argument('p') == password or not password):
api = Env.setting('api_key')
api_key = Env.setting('api_key')
self.write({
'success': api is not None,
'api_key': api
'success': api_key is not None,
'api_key': api_key
})
except:
log.error('Failed doing key request: %s', (traceback.format_exc()))
@@ -102,20 +103,21 @@ class LoginHandler(BaseHandler):
def post(self, *args, **kwargs):
api = None
api_key = None
username = Env.setting('username')
password = Env.setting('password')
if (self.get_argument('username') == username or not username) and (md5(self.get_argument('password')) == password or not password):
api = Env.setting('api_key')
api_key = Env.setting('api_key')
if api:
if api_key:
remember_me = tryInt(self.get_argument('remember_me', default = 0))
self.set_secure_cookie('user', api, expires_days = 30 if remember_me > 0 else None)
self.set_secure_cookie('user', api_key, expires_days = 30 if remember_me > 0 else None)
self.redirect(Env.get('web_base'))
class LogoutHandler(BaseHandler):
def get(self, *args, **kwargs):
@@ -136,4 +138,3 @@ def page_not_found(rh):
rh.set_status(404)
rh.write('Wrong API key used')

View File

@@ -20,6 +20,7 @@ api_nonblock = {}
api_docs = {}
api_docs_missing = []
def run_async(func):
@wraps(func)
def async_func(*args, **kwargs):
@@ -29,6 +30,7 @@ def run_async(func):
return async_func
# NonBlock API handler
class NonBlockHandler(RequestHandler):
@@ -61,6 +63,7 @@ class NonBlockHandler(RequestHandler):
self.stopper = None
def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
api_nonblock[route] = func_tuple
@@ -69,6 +72,7 @@ def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
else:
api_docs_missing.append(route)
# Blocking API handler
class ApiHandler(RequestHandler):
@@ -98,11 +102,12 @@ class ApiHandler(RequestHandler):
@run_async
def run_handler(callback):
try:
result = api[route](**kwargs)
callback(result)
res = api[route](**kwargs)
callback(res)
except:
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
callback({'success': False, 'error': 'Failed returning results'})
result = yield tornado.gen.Task(run_handler)
# Check JSONP callback
@@ -122,6 +127,7 @@ class ApiHandler(RequestHandler):
api_locks[route].release()
def addApiView(route, func, static = False, docs = None, **kwargs):
if static: func(route)

View File

@@ -1,6 +1,7 @@
from .main import Core
from uuid import uuid4
def start():
return Core()

View File

@@ -117,7 +117,7 @@ class Core(Plugin):
if len(still_running) == 0:
break
elif starttime < time.time() - 30: # Always force break after 30s wait
elif starttime < time.time() - 30: # Always force break after 30s wait
break
running = list(set(still_running) - set(self.ignore_restart))

View File

@@ -1,5 +1,6 @@
from .main import ClientScript
def start():
return ClientScript()

View File

@@ -49,13 +49,14 @@ class ClientScript(Plugin):
'scripts/page/settings.js',
'scripts/page/about.js',
'scripts/page/manage.js',
'scripts/misc/downloaders.js',
],
}
urls = {'style': {}, 'script': {}, }
minified = {'style': {}, 'script': {}, }
paths = {'style': {}, 'script': {}, }
urls = {'style': {}, 'script': {}}
minified = {'style': {}, 'script': {}}
paths = {'style': {}, 'script': {}}
comment = {
'style': '/*** %s:%d ***/\n',
'script': '// %s:%d\n'

View File

@@ -1,5 +1,6 @@
from .main import Desktop
def start():
return Desktop()

View File

@@ -1,5 +1,6 @@
from .main import Scheduler
def start():
return Scheduler()

View File

@@ -17,6 +17,7 @@ class Scheduler(Plugin):
addEvent('schedule.cron', self.cron)
addEvent('schedule.interval', self.interval)
addEvent('schedule.remove', self.remove)
addEvent('schedule.queue', self.queue)
self.sched = Sched(misfire_grace_time = 60)
self.sched.start()
@@ -64,3 +65,14 @@ class Scheduler(Plugin):
'seconds': seconds,
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
}
def queue(self, handlers = None):
if not handlers: handlers = []
for h in handlers:
h()
if self.shuttingDown():
break
return True

View File

@@ -2,6 +2,7 @@ from .main import Updater
from couchpotato.environment import Env
import os
def start():
return Updater()

View File

@@ -15,6 +15,7 @@ import time
import traceback
import version
import zipfile
from six.moves import filter
log = CPLog(__name__)
@@ -32,6 +33,7 @@ class Updater(Plugin):
else:
self.updater = SourceUpdater()
addEvent('app.load', self.logVersion, priority = 10000)
addEvent('app.load', self.setCrons)
addEvent('updater.info', self.info)
@@ -53,12 +55,16 @@ class Updater(Plugin):
addEvent('setting.save.updater.enabled.after', self.setCrons)
def logVersion(self):
info = self.info()
log.info('=== VERSION %s, using %s ===', (info.get('version', {}).get('repr', 'UNKNOWN'), self.updater.getName()))
def setCrons(self):
fireEvent('schedule.remove', 'updater.check', single = True)
if self.isEnabled():
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
self.autoUpdate() # Check after enabling
self.autoUpdate() # Check after enabling
def autoUpdate(self):
if self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed:
@@ -146,6 +152,9 @@ class BaseUpdater(Plugin):
'branch': self.branch,
}
def getVersion(self):
pass
def check(self):
pass
@@ -174,7 +183,6 @@ class BaseUpdater(Plugin):
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
class GitUpdater(BaseUpdater):
def __init__(self, git_command):
@@ -201,14 +209,15 @@ class GitUpdater(BaseUpdater):
if not self.version:
try:
output = self.repo.getHead() # Yes, please
output = self.repo.getHead() # Yes, please
log.debug('Git version output: %s', output.hash)
self.version = {
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.branch, output.hash[:8], datetime.fromtimestamp(output.getDate())),
'hash': output.hash[:8],
'date': output.getDate(),
'type': 'git',
}
except Exception, e:
except Exception as e:
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
return 'No GIT'
@@ -231,7 +240,7 @@ class GitUpdater(BaseUpdater):
local = self.repo.getHead()
remote = branch.getHead()
log.info('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
log.debug('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
if local.getDate() < remote.getDate():
self.update_version = {
@@ -244,7 +253,6 @@ class GitUpdater(BaseUpdater):
return False
class SourceUpdater(BaseUpdater):
def __init__(self):
@@ -270,9 +278,9 @@ class SourceUpdater(BaseUpdater):
# Extract
if download_data.get('type') == 'zip':
zip = zipfile.ZipFile(destination)
zip.extractall(extracted_path)
zip.close()
zip_file = zipfile.ZipFile(destination)
zip_file.extractall(extracted_path)
zip_file.close()
else:
tar = tarfile.open(destination)
tar.extractall(path = extracted_path)
@@ -339,13 +347,12 @@ class SourceUpdater(BaseUpdater):
return True
def removeDir(self, path):
try:
if os.path.isdir(path):
shutil.rmtree(path)
except OSError, inst:
os.chmod(inst.filename, 0777)
except OSError as inst:
os.chmod(inst.filename, 0o777)
self.removeDir(path)
def getVersion(self):
@@ -359,7 +366,8 @@ class SourceUpdater(BaseUpdater):
log.debug('Source version output: %s', output)
self.version = output
self.version['type'] = 'source'
except Exception, e:
self.version['repr'] = 'source:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.branch, output.get('hash', '')[:8], datetime.fromtimestamp(output.get('date', 0)))
except Exception as e:
log.error('Failed using source updater. %s', e)
return {}
@@ -389,7 +397,7 @@ class SourceUpdater(BaseUpdater):
return {
'hash': commit['sha'],
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
}
except:
log.error('Failed getting latest request from github: %s', traceback.format_exc())
@@ -434,7 +442,7 @@ class DesktopUpdater(BaseUpdater):
if latest and latest != current_version.get('hash'):
self.update_version = {
'hash': latest,
'date': None,
'date': None,
'changelog': self.desktop._changelogURL,
}
@@ -446,6 +454,7 @@ class DesktopUpdater(BaseUpdater):
def getVersion(self):
return {
'repr': 'desktop: %s' % self.desktop._esky.active_version,
'hash': self.desktop._esky.active_version,
'date': None,
'type': 'desktop',

View File

@@ -24,7 +24,7 @@ var UpdaterBase = new Class({
self.doUpdate();
else {
App.unBlockPage();
App.on('message', 'No updates available');
App.trigger('message', ['No updates available']);
}
}
})

View File

@@ -1,4 +1,5 @@
from base64 import b32decode, b16encode
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import mergeDicts
from couchpotato.core.logger import CPLog
@@ -42,6 +43,7 @@ class Downloader(Provider):
addEvent('download.remove_failed', self._removeFailed)
addEvent('download.pause', self._pause)
addEvent('download.process_complete', self._processComplete)
addApiView('download.%s.test' % self.getName().lower(), self._test)
def getEnabledProtocol(self):
for download_protocol in self.protocol:
@@ -158,6 +160,15 @@ class Downloader(Provider):
(d_manual and manual or d_manual is False) and \
(not data or self.isCorrectProtocol(data.get('protocol')))
def _test(self):
t = self.test()
if isinstance(t, tuple):
return {'success': t[0], 'msg': t[1]}
return {'success': t}
def test(self):
return False
def _pause(self, release_download, pause = True):
if self.isDisabled(manual = True, data = {}):
return

View File

@@ -1,6 +1,7 @@
from .main import Blackhole
from couchpotato.core.helpers.variable import getDownloadDir
def start():
return Blackhole()

View File

@@ -1,5 +1,6 @@
from __future__ import with_statement
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
import os
@@ -67,6 +68,20 @@ class Blackhole(Downloader):
return False
def test(self):
directory = self.conf('directory')
if directory and os.path.isdir(directory):
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
# Check if folder is writable
self.createFile(test_file, 'This is a test file')
if os.path.isfile(test_file):
os.remove(test_file)
return True
return False
def getEnabledProtocol(self):
if self.conf('use_for') == 'both':
return super(Blackhole, self).getEnabledProtocol()

View File

@@ -1,5 +1,6 @@
from .main import Deluge
def start():
return Deluge()

View File

@@ -2,7 +2,7 @@ from base64 import b64encode, b16encode, b32decode
from bencode import bencode as benc, bdecode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.helpers.variable import tryFloat, cleanHost
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
@@ -20,14 +20,14 @@ class Deluge(Downloader):
log = CPLog(__name__)
drpc = None
def connect(self):
def connect(self, reconnect = False):
# Load host from config and split out port.
host = self.conf('host').split(':')
host = cleanHost(self.conf('host'), protocol = False).split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
if not self.drpc:
if not self.drpc or reconnect:
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
return self.drpc
@@ -86,6 +86,11 @@ class Deluge(Downloader):
log.info('Torrent sent to Deluge successfully.')
return self.downloadReturnId(remote_torrent)
def test(self):
if self.connect(True) and self.drpc.test():
return True
return False
def getAllDownloadStatus(self, ids):
log.debug('Checking Deluge download status.')
@@ -103,8 +108,13 @@ class Deluge(Downloader):
for torrent_id in queue:
torrent = queue[torrent_id]
log.debug('name=%s / id=%s / save_path=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
if not 'hash' in torrent:
# When given a list of ids, deluge will return an empty item for a non-existant torrent.
continue
log.debug('name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_on_completed'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
# Deluge has no easy way to work out if a torrent is stalled or failing.
#status = 'failed'
status = 'busy'
@@ -120,11 +130,11 @@ class Deluge(Downloader):
download_dir = sp(torrent['save_path'])
if torrent['move_on_completed']:
download_dir = torrent['move_completed_path']
torrent_files = []
for file_item in torrent['files']:
torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
release_downloads.append({
'id': torrent['hash'],
'name': torrent['name'],
@@ -152,6 +162,7 @@ class Deluge(Downloader):
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
class DelugeRPC(object):
host = 'localhost'
@@ -172,6 +183,13 @@ class DelugeRPC(object):
self.client = DelugeClient()
self.client.connect(self.host, int(self.port), self.username, self.password)
def test(self):
try:
self.connect()
except:
return False
return True
def add_torrent_magnet(self, torrent, options):
torrent_id = False
try:
@@ -182,7 +200,7 @@ class DelugeRPC(object):
if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label']).get()
except Exception, err:
except Exception as err:
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
finally:
if self.client:
@@ -200,7 +218,7 @@ class DelugeRPC(object):
if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label']).get()
except Exception, err:
except Exception as err:
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
finally:
if self.client:
@@ -212,8 +230,8 @@ class DelugeRPC(object):
ret = False
try:
self.connect()
ret = self.client.core.get_torrents_status({'id': ids}, {}).get()
except Exception, err:
ret = self.client.core.get_torrents_status({'id': ids}, ('name', 'hash', 'save_path', 'move_completed_path', 'progress', 'state', 'eta', 'ratio', 'stop_ratio', 'is_seed', 'is_finished', 'paused', 'move_on_completed', 'files')).get()
except Exception as err:
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
@@ -224,7 +242,7 @@ class DelugeRPC(object):
try:
self.connect()
self.client.core.pause_torrent(torrent_ids).get()
except Exception, err:
except Exception as err:
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
@@ -234,7 +252,7 @@ class DelugeRPC(object):
try:
self.connect()
self.client.core.resume_torrent(torrent_ids).get()
except Exception, err:
except Exception as err:
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
@@ -245,7 +263,7 @@ class DelugeRPC(object):
try:
self.connect()
ret = self.client.core.remove_torrent(torrent_id, remove_local_data).get()
except Exception, err:
except Exception as err:
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:

View File

@@ -1,5 +1,6 @@
from .main import NZBGet
def start():
return NZBGet()

View File

@@ -1,7 +1,7 @@
from base64 import standard_b64encode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import ss, sp
from couchpotato.core.helpers.variable import tryInt, md5
from couchpotato.core.helpers.variable import tryInt, md5, cleanHost
from couchpotato.core.logger import CPLog
from datetime import timedelta
import re
@@ -16,8 +16,7 @@ log = CPLog(__name__)
class NZBGet(Downloader):
protocol = ['nzb']
url = '%(protocol)s://%(username)s:%(password)s@%(host)s/xmlrpc'
rpc = 'xmlrpc'
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
@@ -29,10 +28,10 @@ class NZBGet(Downloader):
log.info('Sending "%s" to NZBGet.', data.get('name'))
url = self.url % {'protocol': 'https' if self.conf('ssl') else 'http', 'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
rpc = xmlrpclib.ServerProxy(url)
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
log.debug('Successfully connected to NZBGet')
@@ -41,7 +40,7 @@ class NZBGet(Downloader):
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError, e:
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
@@ -55,7 +54,7 @@ class NZBGet(Downloader):
if xml_response:
log.info('NZB sent successfully to NZBGet')
nzb_id = md5(data['url']) # about as unique as they come ;)
nzb_id = md5(data['url']) # about as unique as they come ;)
couchpotato_id = "couchpotato=" + nzb_id
groups = rpc.listgroups()
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
@@ -67,13 +66,32 @@ class NZBGet(Downloader):
log.error('NZBGet could not add %s to the queue.', nzb_name)
return False
def test(self):
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to test connection'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
return True
def getAllDownloadStatus(self, ids):
log.debug('Checking NZBGet download status.')
url = self.url % {'protocol': 'https' if self.conf('ssl') else 'http', 'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
rpc = self.getRPC()
rpc = xmlrpclib.ServerProxy(url)
try:
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
log.debug('Successfully connected to NZBGet')
@@ -82,7 +100,7 @@ class NZBGet(Downloader):
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return []
except xmlrpclib.ProtocolError, e:
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
@@ -115,7 +133,7 @@ class NZBGet(Downloader):
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
except:
pass
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
@@ -157,9 +175,8 @@ class NZBGet(Downloader):
log.info('%s failed downloading, deleting...', release_download['name'])
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
rpc = self.getRPC()
rpc = xmlrpclib.ServerProxy(url)
try:
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
log.debug('Successfully connected to NZBGet')
@@ -168,7 +185,7 @@ class NZBGet(Downloader):
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError, e:
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
@@ -193,3 +210,7 @@ class NZBGet(Downloader):
return False
return True
def getRPC(self):
url = cleanHost(host = self.conf('host'), ssl = self.conf('ssl'), username = self.conf('username'), password = self.conf('password')) + self.rpc
return xmlrpclib.ServerProxy(url)

View File

@@ -1,5 +1,6 @@
from .main import NZBVortex
def start():
return NZBVortex()
@@ -22,7 +23,15 @@ config = [{
},
{
'name': 'host',
'default': 'https://localhost:4321',
'default': 'localhost:4321',
'description': 'Hostname with port. Usually <strong>localhost:4321</strong>',
},
{
'name': 'ssl',
'default': 1,
'type': 'bool',
'advanced': True,
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
},
{
'name': 'api_key',

View File

@@ -36,12 +36,20 @@ class NZBVortex(Downloader):
time.sleep(10)
raw_statuses = self.call('nzb')
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if os.path.basename(item['nzbFileName']) == nzb_filename][0]
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if os.path.basename(nzb['nzbFileName']) == nzb_filename][0]
return self.downloadReturnId(nzb_id)
except:
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
return False
def test(self):
try:
login_result = self.login()
except:
return False
return login_result
def getAllDownloadStatus(self, ids):
raw_statuses = self.call('nzb')
@@ -56,13 +64,13 @@ class NZBVortex(Downloader):
status = 'completed'
elif nzb['state'] in [21, 22, 24]:
status = 'failed'
release_downloads.append({
'id': nzb['id'],
'name': nzb['uiTitle'],
'status': status,
'original_status': nzb['state'],
'timeleft':-1,
'timeleft': -1,
'folder': sp(nzb['destinationPath']),
})
@@ -102,7 +110,6 @@ class NZBVortex(Downloader):
log.error('Login failed, please check you api-key')
return False
def call(self, call, parameters = None, repeat = False, auth = True, *args, **kwargs):
# Login first
@@ -116,14 +123,14 @@ class NZBVortex(Downloader):
params = tryUrlencode(parameters)
url = cleanHost(self.conf('host')) + 'api/' + call
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api/' + call
try:
data = self.urlopen('%s?%s' % (url, params), *args, **kwargs)
if data:
return json.loads(data)
except URLError, e:
except URLError as e:
if hasattr(e, 'code') and e.code == 403:
# Try login and do again
if not repeat:
@@ -145,7 +152,7 @@ class NZBVortex(Downloader):
try:
data = self.urlopen(url, show_error = False)
self.api_level = float(json.loads(data).get('apilevel'))
except URLError, e:
except URLError as e:
if hasattr(e, 'code') and e.code == 403:
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
else:
@@ -175,6 +182,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version = ssl.PROTOCOL_TLSv1)
class HTTPSHandler(urllib2.HTTPSHandler):
def https_open(self, req):
return self.do_open(HTTPSConnection, req)

View File

@@ -1,5 +1,6 @@
from .main import Pneumatic
def start():
return Pneumatic()

View File

@@ -1,5 +1,6 @@
from __future__ import with_statement
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.logger import CPLog
import os
import traceback
@@ -26,26 +27,26 @@ class Pneumatic(Downloader):
log.error('No nzb available!')
return False
fullPath = os.path.join(directory, self.createFileName(data, filedata, media))
full_path = os.path.join(directory, self.createFileName(data, filedata, media))
try:
if not os.path.isfile(fullPath):
log.info('Downloading %s to %s.', (data.get('protocol'), fullPath))
with open(fullPath, 'wb') as f:
if not os.path.isfile(full_path):
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
with open(full_path, 'wb') as f:
f.write(filedata)
nzb_name = self.createNzbName(data, media)
strm_path = os.path.join(directory, nzb_name)
strm_file = open(strm_path + '.strm', 'wb')
strmContent = self.strm_syntax % (fullPath, nzb_name)
strmContent = self.strm_syntax % (full_path, nzb_name)
strm_file.write(strmContent)
strm_file.close()
return self.downloadReturnId('')
else:
log.info('File %s already exists.', fullPath)
log.info('File %s already exists.', full_path)
return self.downloadReturnId('')
except:
@@ -56,3 +57,17 @@ class Pneumatic(Downloader):
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
return False
return False
def test(self):
directory = self.conf('directory')
if directory and os.path.isdir(directory):
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
# Check if folder is writable
self.createFile(test_file, 'This is a test file')
if os.path.isfile(test_file):
os.remove(test_file)
return True
return False

View File

@@ -1,5 +1,6 @@
from .main import rTorrent
def start():
return rTorrent()
@@ -20,11 +21,32 @@ config = [{
'type': 'enabler',
'radio_group': 'torrent',
},
# @RuudBurger: How do I migrate this?
# {
# 'name': 'url',
# 'default': 'http://localhost:80/RPC2',
# 'description': 'XML-RPC Endpoint URI. Usually <strong>scgi://localhost:5000</strong> '
# 'or <strong>http://localhost:80/RPC2</strong>'
# },
{
'name': 'url',
'default': 'http://localhost:80/RPC2',
'description': 'XML-RPC Endpoint URI. Usually <strong>scgi://localhost:5000</strong> '
'or <strong>http://localhost:80/RPC2</strong>'
'name': 'host',
'default': 'localhost:80',
'description': 'RPC Communication URI. Usually <strong>scgi://localhost:5000</strong>, '
'<strong>httprpc://localhost/rutorrent</strong> or <strong>localhost:80</strong>'
},
{
'name': 'ssl',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
},
{
'name': 'rpc_url',
'type': 'string',
'default': 'RPC2',
'advanced': True,
'description': 'Change if your RPC mount is at a different path.',
},
{
'name': 'username',

View File

@@ -1,12 +1,15 @@
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.helpers.variable import cleanHost, splitString
from couchpotato.core.logger import CPLog
from base64 import b16encode, b32decode
from bencode import bencode, bdecode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
from rtorrent import RTorrent
from rtorrent.err import MethodError
from urlparse import urlparse
import os
log = CPLog(__name__)
@@ -16,29 +19,75 @@ class rTorrent(Downloader):
protocol = ['torrent', 'torrent_magnet']
rt = None
error_msg = ''
def connect(self):
# Migration url to host options
def __init__(self):
super(rTorrent, self).__init__()
addEvent('app.load', self.migrate)
addEvent('setting.save.rtorrent.*.after', self.settingsChanged)
def migrate(self):
url = self.conf('url')
if url:
host_split = splitString(url.split('://')[-1], split_on = '/')
self.conf('ssl', value = url.startswith('https'))
self.conf('host', value = host_split[0].strip())
self.conf('rpc_url', value = '/'.join(host_split[1:]))
self.deleteConf('url')
def settingsChanged(self):
# Reset active connection if settings have changed
if self.rt:
log.debug('Settings have changed, closing active connection')
self.rt = None
return True
def connect(self, reconnect = False):
# Already connected?
if self.rt is not None:
if not reconnect and self.rt is not None:
return self.rt
# Ensure url is set
if not self.conf('url'):
log.error('Config properties are not filled in correctly, url is missing.')
return False
url = cleanHost(self.conf('host'), protocol = True, ssl = self.conf('ssl'))
parsed = urlparse(url)
# rpc_url is only used on http/https scgi pass-through
if parsed.scheme in ['http', 'https']:
url += self.conf('rpc_url')
if self.conf('username') and self.conf('password'):
self.rt = RTorrent(
self.conf('url'),
url,
self.conf('username'),
self.conf('password')
)
else:
self.rt = RTorrent(self.conf('url'))
self.rt = RTorrent(url)
self.error_msg = ''
try:
self.rt._verify_conn()
except AssertionError as e:
self.error_msg = e.message
self.rt = None
return self.rt
def _update_provider_group(self, name, data):
def test(self):
if self.connect(True):
return True
if self.error_msg:
return False, 'Connection failed: ' + self.error_msg
return False
def updateProviderGroup(self, name, data):
if data.get('seed_time'):
log.info('seeding time ignored, not supported')
@@ -70,7 +119,7 @@ class rTorrent(Downloader):
# Reset group action and disable it
group.set_command()
group.disable()
except MethodError, err:
except MethodError as err:
log.error('Unable to set group options: %s', err.msg)
return False
@@ -87,14 +136,13 @@ class rTorrent(Downloader):
return False
group_name = 'cp_' + data.get('provider').lower()
if not self._update_provider_group(group_name, data):
if not self.updateProviderGroup(group_name, data):
return False
torrent_params = {}
if self.conf('label'):
torrent_params['label'] = self.conf('label')
if not filedata and data.get('protocol') == 'torrent':
log.error('Failed sending torrent, no data')
return False
@@ -118,7 +166,7 @@ class rTorrent(Downloader):
# Send request to rTorrent
try:
# Send torrent to rTorrent
torrent = self.rt.load_torrent(filedata)
torrent = self.rt.load_torrent(filedata, verify_retries=10)
if not torrent:
log.error('Unable to find the torrent, did it fail to load?')
@@ -139,10 +187,25 @@ class rTorrent(Downloader):
torrent.start()
return self.downloadReturnId(torrent_hash)
except Exception, err:
except Exception as err:
log.error('Failed to send torrent to rTorrent: %s', err)
return False
def getTorrentStatus(self, torrent):
if torrent.hashing or torrent.hash_checking or torrent.message:
return 'busy'
if not torrent.complete:
return 'busy'
if not torrent.open:
return 'completed'
if torrent.state and torrent.active:
return 'seeding'
return 'busy'
def getAllDownloadStatus(self, ids):
log.debug('Checking rTorrent download status.')
@@ -156,21 +219,21 @@ class rTorrent(Downloader):
for torrent in torrents:
if torrent.info_hash in ids:
torrent_directory = os.path.normpath(torrent.directory)
torrent_files = []
for file_item in torrent.get_files():
torrent_files.append(sp(os.path.join(torrent.directory, file_item.path)))
status = 'busy'
if torrent.complete:
if torrent.active:
status = 'seeding'
for file in torrent.get_files():
if not os.path.normpath(file.path).startswith(torrent_directory):
file_path = os.path.join(torrent_directory, file.path.lstrip('/'))
else:
status = 'completed'
file_path = file.path
torrent_files.append(sp(file_path))
release_downloads.append({
'id': torrent.info_hash,
'name': torrent.name,
'status': status,
'status': self.getTorrentStatus(torrent),
'seed_ratio': torrent.ratio,
'original_status': torrent.state,
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
@@ -180,7 +243,7 @@ class rTorrent(Downloader):
return release_downloads
except Exception, err:
except Exception as err:
log.error('Failed to get status from rTorrent: %s', err)
return []

View File

@@ -1,5 +1,6 @@
from .main import Sabnzbd
def start():
return Sabnzbd()
@@ -24,6 +25,13 @@ config = [{
'name': 'host',
'default': 'localhost:8080',
},
{
'name': 'ssl',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
},
{
'name': 'api_key',
'label': 'Api Key',

View File

@@ -64,6 +64,26 @@ class Sabnzbd(Downloader):
log.error('Error getting data from SABNZBd: %s', sab_data)
return False
def test(self):
try:
sab_data = self.call({
'mode': 'version',
})
v = sab_data.split('.')
if int(v[0]) == 0 and int(v[1]) < 7:
return False, 'Your Sabnzbd client is too old, please update to newest version.'
# the version check will work even with wrong api key, so we need the next check as well
sab_data = self.call({
'mode': 'qstatus',
})
if not sab_data:
return False
except:
return False
return True
def getAllDownloadStatus(self, ids):
log.debug('Checking SABnzbd download status.')
@@ -95,7 +115,7 @@ class Sabnzbd(Downloader):
status = 'busy'
if 'ENCRYPTED / ' in nzb['filename']:
status = 'failed'
release_downloads.append({
'id': nzb['nzo_id'],
'name': nzb['filename'],
@@ -112,7 +132,7 @@ class Sabnzbd(Downloader):
status = 'failed'
elif nzb['status'] == 'Completed':
status = 'completed'
release_downloads.append({
'id': nzb['nzo_id'],
'name': nzb['name'],
@@ -165,9 +185,9 @@ class Sabnzbd(Downloader):
def call(self, request_params, use_json = True, **kwargs):
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
'apikey': self.conf('api_key'),
'output': 'json'
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
'apikey': self.conf('api_key'),
'output': 'json'
}))
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)

View File

@@ -1,5 +1,6 @@
from .main import Synology
def start():
return Synology()

View File

@@ -1,5 +1,6 @@
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import isInt
from couchpotato.core.helpers.variable import cleanHost
from couchpotato.core.logger import CPLog
import json
import requests
@@ -21,7 +22,7 @@ class Synology(Downloader):
log.error('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
# Load host from config and split out port.
host = self.conf('host').split(':')
host = cleanHost(self.conf('host'), protocol = False).split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
@@ -44,6 +45,16 @@ class Synology(Downloader):
finally:
return self.downloadReturnId('') if response else False
def test(self):
host = cleanHost(self.conf('host'), protocol = False).split(':')
try:
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
test_result = srpc.test()
except:
return False
return test_result
def getEnabledProtocol(self):
if self.conf('use_for') == 'both':
return super(Synology, self).getEnabledProtocol()
@@ -64,6 +75,7 @@ class Synology(Downloader):
return super(Synology, self).isEnabled(manual, data) and\
((self.conf('use_for') in for_protocol))
class SynologyRPC(object):
"""SynologyRPC lite library"""
@@ -106,11 +118,11 @@ class SynologyRPC(object):
if response['success']:
log.info('Synology action successfull')
return response
except requests.ConnectionError, err:
except requests.ConnectionError as err:
log.error('Synology connection error, check your config %s', err)
except requests.HTTPError, err:
except requests.HTTPError as err:
log.error('SynologyRPC HTTPError: %s', err)
except Exception, err:
except Exception as err:
log.error('Exception: %s', err)
finally:
return response
@@ -145,3 +157,6 @@ class SynologyRPC(object):
self._logout()
return result
def test(self):
return bool(self._login())

View File

@@ -1,5 +1,6 @@
from .main import Transmission
def start():
return Transmission()

View File

@@ -1,7 +1,7 @@
from base64 import b64encode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryInt, tryFloat
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
from couchpotato.core.logger import CPLog
from datetime import timedelta
import httplib
@@ -19,15 +19,15 @@ class Transmission(Downloader):
log = CPLog(__name__)
trpc = None
def connect(self):
def connect(self, reconnect = False):
# Load host from config and split out port.
host = self.conf('host').split(':')
host = cleanHost(self.conf('host'), protocol = False).split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
if not self.trpc:
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url'), username = self.conf('username'), password = self.conf('password'))
if not self.trpc or reconnect:
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
return self.trpc
@@ -83,6 +83,11 @@ class Transmission(Downloader):
log.info('Torrent sent to Transmission successfully.')
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
def test(self):
if self.connect(True) and self.trpc.get_session():
return True
return False
def getAllDownloadStatus(self, ids):
log.debug('Checking Transmission download status.')
@@ -96,6 +101,7 @@ class Transmission(Downloader):
'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
}
session = self.trpc.get_session()
queue = self.trpc.get_alltorrents(return_params)
if not (queue and queue.get('torrents')):
log.debug('Nothing in queue or error')
@@ -103,13 +109,9 @@ class Transmission(Downloader):
for torrent in queue['torrents']:
if torrent['hashString'] in ids:
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s',
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished']))
torrent_files = []
for file_item in torrent['files']:
torrent_files.append(sp(os.path.join(torrent['downloadDir'], file_item['name'])))
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s',
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir']))
status = 'busy'
if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'):
status = 'failed'
@@ -117,7 +119,16 @@ class Transmission(Downloader):
status = 'completed'
elif torrent['status'] in [5, 6]:
status = 'seeding'
if session['incomplete-dir-enabled'] and status == 'busy':
torrent_folder = session['incomplete-dir']
else:
torrent_folder = torrent['downloadDir']
torrent_files = []
for file_item in torrent['files']:
torrent_files.append(sp(os.path.join(torrent_folder, file_item['name'])))
release_downloads.append({
'id': torrent['hashString'],
'name': torrent['name'],
@@ -125,7 +136,7 @@ class Transmission(Downloader):
'original_status': torrent['status'],
'seed_ratio': torrent['uploadRatio'],
'timeleft': str(timedelta(seconds = torrent['eta'])),
'folder': sp(torrent['downloadDir'] if len(torrent_files) == 1 else os.path.join(torrent['downloadDir'], torrent['name'])),
'folder': sp(torrent_folder if len(torrent_files) == 1 else os.path.join(torrent_folder, torrent['name'])),
'files': '|'.join(torrent_files)
})
@@ -181,10 +192,10 @@ class TransmissionRPC(object):
else:
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
return False
except httplib.InvalidURL, err:
except httplib.InvalidURL as err:
log.error('Invalid Transmission host, check your config %s', err)
return False
except urllib2.HTTPError, err:
except urllib2.HTTPError as err:
if err.code == 401:
log.error('Invalid Transmission Username or Password, check your config')
return False
@@ -202,7 +213,7 @@ class TransmissionRPC(object):
log.error('Unable to get Transmission Session-Id %s', err)
else:
log.error('TransmissionRPC HTTPError: %s', err)
except urllib2.URLError, err:
except urllib2.URLError as err:
log.error('Unable to connect to Transmission %s', err)
def get_session(self):

View File

@@ -1,5 +1,6 @@
from .main import uTorrent
def start():
return uTorrent()
@@ -23,7 +24,7 @@ config = [{
{
'name': 'host',
'default': 'localhost:8000',
'description': 'Hostname with port. Usually <strong>localhost:8000</strong>',
'description': 'Port can be found in settings when enabling WebUI.',
},
{
'name': 'username',

View File

@@ -2,7 +2,7 @@ from base64 import b16encode, b32decode
from bencode import bencode as benc, bdecode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, ss, sp
from couchpotato.core.helpers.variable import tryInt, tryFloat
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
@@ -37,7 +37,7 @@ class uTorrent(Downloader):
def connect(self):
# Load host from config and split out port.
host = self.conf('host').split(':')
host = cleanHost(self.conf('host'), protocol = False).split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
@@ -66,7 +66,7 @@ class uTorrent(Downloader):
new_settings['seed_prio_limitul_flag'] = True
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
new_settings['bt.read_only_on_complete'] = False
log.info('Updated uTorrent settings to not set the files to read only after completing.')
@@ -115,6 +115,17 @@ class uTorrent(Downloader):
return self.downloadReturnId(torrent_hash)
def test(self):
if self.connect():
build_version = self.utorrent_api.get_build()
if not build_version:
return False
if build_version < 25406: # This build corresponds to version 3.0.0 stable
return False, 'Your uTorrent client is too old, please update to newest version.'
return True
return False
def getAllDownloadStatus(self, ids):
log.debug('Checking uTorrent download status.')
@@ -149,7 +160,7 @@ class uTorrent(Downloader):
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
except:
log.debug('Failed getting files from torrent: %s', torrent[2])
status = 'busy'
if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000:
status = 'seeding'
@@ -157,10 +168,10 @@ class uTorrent(Downloader):
status = 'failed'
elif torrent[4] == 1000:
status = 'completed'
if not status == 'busy':
self.removeReadOnly(torrent_files)
release_downloads.append({
'id': torrent[0],
'name': torrent[2],
@@ -231,14 +242,14 @@ class uTorrentAPI(object):
return response
else:
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
except httplib.InvalidURL, err:
except httplib.InvalidURL as err:
log.error('Invalid uTorrent host, check your config %s', err)
except urllib2.HTTPError, err:
except urllib2.HTTPError as err:
if err.code == 401:
log.error('Invalid uTorrent Username or Password, check your config')
else:
log.error('uTorrent HTTPError: %s', err)
except urllib2.URLError, err:
except urllib2.URLError as err:
log.error('Unable to connect to uTorrent %s', err)
return False
@@ -261,7 +272,7 @@ class uTorrentAPI(object):
def set_torrent(self, hash, params):
action = 'action=setprops&hash=%s' % hash
for k, v in params.iteritems():
for k, v in params.items():
action += '&s=%s&v=%s' % (k, v)
return self._request(action)
@@ -304,7 +315,7 @@ class uTorrentAPI(object):
#log.debug('uTorrent settings: %s', settings_dict)
except Exception, err:
except Exception as err:
log.error('Failed to get settings from uTorrent: %s', err)
return settings_dict
@@ -322,3 +333,10 @@ class uTorrentAPI(object):
def get_files(self, hash):
action = 'action=getfiles&hash=%s' % hash
return self._request(action)
def get_build(self):
data = self._request('')
if not data:
return False
response = json.loads(data)
return int(response.get('build'))

View File

@@ -1,5 +1,5 @@
from axl.axel import Event
from couchpotato.core.helpers.variable import mergeDicts, natcmp
from couchpotato.core.helpers.variable import mergeDicts, natsortKey
from couchpotato.core.logger import CPLog
import threading
import traceback
@@ -7,6 +7,7 @@ import traceback
log = CPLog(__name__)
events = {}
def runHandler(name, handler, *args, **kwargs):
try:
return handler(*args, **kwargs)
@@ -14,6 +15,7 @@ def runHandler(name, handler, *args, **kwargs):
from couchpotato.environment import Env
log.error('Error in event "%s", that wasn\'t caught: %s%s', (name, traceback.format_exc(), Env.all() if not Env.get('dev') else ''))
def addEvent(name, handler, priority = 100):
if not events.get(name):
@@ -27,7 +29,7 @@ def addEvent(name, handler, priority = 100):
has_parent = hasattr(handler, 'im_self')
parent = None
if has_parent:
parent = handler.im_self
parent = handler.__self__
bc = hasattr(parent, 'beforeCall')
if bc: parent.beforeCall(handler)
@@ -48,22 +50,19 @@ def addEvent(name, handler, priority = 100):
'priority': priority,
})
def removeEvent(name, handler):
e = events[name]
e -= handler
def fireEvent(name, *args, **kwargs):
if not events.has_key(name): return
if name not in events: return
#log.debug('Firing event %s', name)
try:
options = {
'is_after_event': False, # Fire after event
'on_complete': False, # onComplete event
'single': False, # Return single handler
'merge': False, # Merge items
'in_order': False, # Fire them in specific order, waits for the other to finish
'is_after_event': False, # Fire after event
'on_complete': False, # onComplete event
'single': False, # Return single handler
'merge': False, # Merge items
'in_order': False, # Fire them in specific order, waits for the other to finish
}
# Do options
@@ -101,11 +100,14 @@ def fireEvent(name, *args, **kwargs):
# Fire
result = e(*args, **kwargs)
result_keys = result.keys()
result_keys.sort(key = natsortKey)
if options['single'] and not options['merge']:
results = None
# Loop over results, stop when first not None result is found.
for r_key in sorted(result.iterkeys(), cmp = natcmp):
for r_key in result_keys:
r = result[r_key]
if r[0] is True and r[1] is not None:
results = r[1]
@@ -117,7 +119,7 @@ def fireEvent(name, *args, **kwargs):
else:
results = []
for r_key in sorted(result.iterkeys(), cmp = natcmp):
for r_key in result_keys:
r = result[r_key]
if r[0] == True and r[1]:
results.append(r[1])
@@ -160,18 +162,21 @@ def fireEvent(name, *args, **kwargs):
except Exception:
log.error('%s: %s', (name, traceback.format_exc()))
def fireEventAsync(*args, **kwargs):
try:
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
t.setDaemon(True)
t.start()
return True
except Exception, e:
except Exception as e:
log.error('%s: %s', (args[0], e))
def errorHandler(error):
etype, value, tb = error
log.error(''.join(traceback.format_exception(etype, value, tb)))
def getEvent(name):
return events[name]

View File

@@ -5,29 +5,32 @@ import os
import re
import traceback
import unicodedata
import six
log = CPLog(__name__)
def toSafeString(original):
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
cleanedFilename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
valid_string = ''.join(c for c in cleanedFilename if c in valid_chars)
cleaned_filename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
valid_string = ''.join(c for c in cleaned_filename if c in valid_chars)
return ' '.join(valid_string.split())
def simplifyString(original):
string = stripAccents(original.lower())
string = toSafeString(' '.join(re.split('\W+', string)))
split = re.split('\W+|_', string.lower())
return toUnicode(' '.join(split))
def toUnicode(original, *args):
try:
if isinstance(original, unicode):
return original
else:
try:
return unicode(original, *args)
return six.text_type(original, *args)
except:
try:
return ek(original, *args)
@@ -38,16 +41,18 @@ def toUnicode(original, *args):
ascii_text = str(original).encode('string_escape')
return toUnicode(ascii_text)
def ss(original, *args):
u_original = toUnicode(original, *args)
try:
from couchpotato.environment import Env
return u_original.encode(Env.get('encoding'))
except Exception, e:
except Exception as e:
log.debug('Failed ss encoding char, force UTF8: %s', e)
return u_original.encode('UTF-8')
def sp(path, *args):
# Standardise encoding, normalise case, path and strip trailing '/' or '\'
@@ -58,7 +63,7 @@ def sp(path, *args):
if os.path.sep == '/' and '\\' in path:
path = '/' + path.replace(':', '').replace('\\', '/')
path = os.path.normcase(os.path.normpath(ss(path, *args)))
path = os.path.normpath(ss(path, *args))
# Remove any trailing path separators
if path != os.path.sep:
@@ -73,6 +78,7 @@ def sp(path, *args):
return path
def ek(original, *args):
if isinstance(original, (str, unicode)):
try:
@@ -83,6 +89,7 @@ def ek(original, *args):
return original
def isInt(value):
try:
int(value)
@@ -90,14 +97,16 @@ def isInt(value):
except ValueError:
return False
def stripAccents(s):
return ''.join((c for c in unicodedata.normalize('NFD', toUnicode(s)) if unicodedata.category(c) != 'Mn'))
def tryUrlencode(s):
new = u''
new = six.u('')
if isinstance(s, dict):
for key, value in s.iteritems():
new += u'&%s=%s' % (key, tryUrlencode(value))
for key, value in s.items():
new += six.u('&%s=%s') % (key, tryUrlencode(value))
return new[1:]
else:

View File

@@ -1,5 +1,5 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import natcmp
from couchpotato.core.helpers.variable import natsortKey
from urllib import unquote
import re
@@ -8,8 +8,13 @@ def getParams(params):
reg = re.compile('^[a-z0-9_\.]+$')
# Sort keys
param_keys = params.keys()
param_keys.sort(key = natsortKey)
temp = {}
for param, value in sorted(params.iteritems()):
for param in param_keys:
value = params[param]
nest = re.split("([\[\]]+)", param)
if len(nest) > 1:
@@ -37,13 +42,17 @@ def getParams(params):
return dictToList(temp)
def dictToList(params):
if type(params) is dict:
new = {}
for x, value in params.iteritems():
for x, value in params.items():
try:
new_value = [dictToList(value[k]) for k in sorted(value.iterkeys(), cmp = natcmp)]
convert = lambda text: int(text) if text.isdigit() else text.lower()
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
sorted_keys = sorted(value.keys(), key = alphanum_key)
new_value = [dictToList(value[k]) for k in sorted_keys]
except:
new_value = value

View File

@@ -3,6 +3,7 @@ import xml.etree.ElementTree as XMLTree
log = CPLog(__name__)
class RSS(object):
def getTextElements(self, xml, path):
@@ -46,6 +47,6 @@ class RSS(object):
def getItems(self, data, path = 'channel/item'):
try:
return XMLTree.parse(data).findall(path)
except Exception, e:
except Exception as e:
log.error('Error parsing RSS. %s', e)
return []

View File

@@ -8,26 +8,32 @@ import random
import re
import string
import sys
import six
from six.moves import map, zip, filter
log = CPLog(__name__)
def fnEscape(pattern):
return pattern.replace('[','[[').replace(']','[]]').replace('[[','[[]')
return pattern.replace('[', '[[').replace(']', '[]]').replace('[[', '[[]')
def link(src, dst):
if os.name == 'nt':
import ctypes
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
if ctypes.windll.kernel32.CreateHardLinkW(six.text_type(dst), six.text_type(src), 0) == 0: raise ctypes.WinError()
else:
os.link(src, dst)
def symlink(src, dst):
if os.name == 'nt':
import ctypes
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
if ctypes.windll.kernel32.CreateSymbolicLinkW(six.text_type(dst), six.text_type(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
else:
os.symlink(src, dst)
def getUserDir():
try:
import pwd
@@ -37,6 +43,7 @@ def getUserDir():
return os.path.expanduser('~')
def getDownloadDir():
user_dir = getUserDir()
@@ -49,6 +56,7 @@ def getDownloadDir():
return user_dir
def getDataDir():
# Windows
@@ -68,8 +76,10 @@ def getDataDir():
# Linux
return os.path.join(user_dir, '.couchpotato')
def isDict(object):
return isinstance(object, dict)
def isDict(obj):
return isinstance(obj, dict)
def mergeDicts(a, b, prepend_list = False):
assert isDict(a), isDict(b)
@@ -91,6 +101,7 @@ def mergeDicts(a, b, prepend_list = False):
current_dst[key] = current_src[key]
return dst
def removeListDuplicates(seq):
checked = []
for e in seq:
@@ -98,35 +109,73 @@ def removeListDuplicates(seq):
checked.append(e)
return checked
def flattenList(l):
if isinstance(l, list):
return sum(map(flattenList, l))
else:
return l
def md5(text):
return hashlib.md5(ss(text)).hexdigest()
def sha1(text):
return hashlib.sha1(text).hexdigest()
def isLocalIP(ip):
ip = ip.lstrip('htps:/')
regex = '/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1)$/'
return re.search(regex, ip) is not None or 'localhost' in ip or ip[:4] == '127.'
def getExt(filename):
return os.path.splitext(filename)[1][1:]
def cleanHost(host):
if not host.startswith(('http://', 'https://')):
host = 'http://' + host
host = host.rstrip('/')
host += '/'
def cleanHost(host, protocol = True, ssl = False, username = None, password = None):
"""Return a cleaned up host with given url options set
Changes protocol to https if ssl is set to True and http if ssl is set to false.
>>> cleanHost("localhost:80", ssl=True)
'https://localhost:80/'
>>> cleanHost("localhost:80", ssl=False)
'http://localhost:80/'
Username and password is managed with the username and password variables
>>> cleanHost("localhost:80", username="user", password="passwd")
'http://user:passwd@localhost:80/'
Output without scheme (protocol) can be forced with protocol=False
>>> cleanHost("localhost:80", protocol=False)
'localhost:80'
"""
if not '://' in host and protocol:
host = ('https://' if ssl else 'http://') + host
if not protocol:
host = host.split('://', 1)[-1]
if protocol and username and password:
try:
auth = re.findall('^(?:.+?//)(.+?):(.+?)@(?:.+)$', host)
if auth:
log.error('Cleanhost error: auth already defined in url: %s, please remove BasicAuth from url.', host)
else:
host = host.replace('://', '://%s:%s@' % (username, password), 1)
except:
pass
host = host.rstrip('/ ')
if protocol:
host += '/'
return host
def getImdb(txt, check_inside = False, multiple = False):
if not check_inside:
@@ -143,7 +192,7 @@ def getImdb(txt, check_inside = False, multiple = False):
ids = re.findall('(tt\d{4,7})', txt)
if multiple:
return list(set(['tt%07d' % tryInt(x[2:]) for x in ids])) if len(ids) > 0 else []
return removeDuplicate(['tt%07d' % tryInt(x[2:]) for x in ids]) if len(ids) > 0 else []
return 'tt%07d' % tryInt(ids[0][2:])
except IndexError:
@@ -151,10 +200,12 @@ def getImdb(txt, check_inside = False, multiple = False):
return False
def tryInt(s, default = 0):
try: return int(s)
except: return default
def tryFloat(s):
try:
if isinstance(s, str):
@@ -163,17 +214,17 @@ def tryFloat(s):
return float(s)
except: return 0
def natsortKey(s):
return map(tryInt, re.findall(r'(\d+|\D+)', s))
def natsortKey(string_):
"""See http://www.codinghorror.com/blog/archives/001018.html"""
return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_)]
def natcmp(a, b):
return cmp(natsortKey(a), natsortKey(b))
def toIterable(value):
if isinstance(value, collections.Iterable):
return value
return [value]
def getTitle(library_dict):
try:
try:
@@ -196,6 +247,7 @@ def getTitle(library_dict):
log.error('Could not get title for library item: %s', library_dict)
return None
def possibleTitles(raw_title):
titles = [
@@ -208,18 +260,42 @@ def possibleTitles(raw_title):
new_title = raw_title.replace('&', 'and')
titles.append(simplifyString(new_title))
return list(set(titles))
return removeDuplicate(titles)
def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def splitString(str, split_on = ',', clean = True):
list = [x.strip() for x in str.split(split_on)] if str else []
return filter(None, list) if clean else list
l = [x.strip() for x in str.split(split_on)] if str else []
return removeEmpty(l) if clean else l
def removeEmpty(l):
return list(filter(None, l))
def removeDuplicate(l):
seen = set()
return [x for x in l if x not in seen and not seen.add(x)]
def dictIsSubset(a, b):
return all([k in b and b[k] == v for k, v in a.items()])
def isSubFolder(sub_folder, base_folder):
# Returns True is sub_folder is the same as or in base_folder
return base_folder.rstrip(os.path.sep) + os.path.sep in sub_folder.rstrip(os.path.sep) + os.path.sep
# Returns True if sub_folder is the same as or inside base_folder
return base_folder and sub_folder and ss(os.path.normpath(base_folder).rstrip(os.path.sep) + os.path.sep) in ss(os.path.normpath(sub_folder).rstrip(os.path.sep) + os.path.sep)
# From SABNZBD
re_password = [re.compile(r'([^/\\]+)[/\\](.+)'), re.compile(r'(.+){{([^{}]+)}}$'), re.compile(r'(.+)\s+password\s*=\s*(.+)$', re.I)]
def scanForPassword(name):
m = None
for reg in re_password:
m = reg.search(name)
if m: break
if m:
return m.group(1).strip('. '), m.group(2).strip()

View File

@@ -1,9 +1,10 @@
from couchpotato.core.event import fireEvent
from couchpotato.core.logger import CPLog
from importlib import import_module
from importhelper import import_module
import os
import sys
import traceback
import six
log = CPLog(__name__)
@@ -37,7 +38,7 @@ class Loader(object):
self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
# Loop over all paths and add to module list
for plugin_type, plugin_tuple in self.paths.iteritems():
for plugin_type, plugin_tuple in self.paths.items():
priority, module, dir_name = plugin_tuple
self.addFromDir(plugin_type, priority, module, dir_name)
@@ -45,7 +46,7 @@ class Loader(object):
did_save = 0
for priority in sorted(self.modules):
for module_name, plugin in sorted(self.modules[priority].iteritems()):
for module_name, plugin in sorted(self.modules[priority].items()):
# Load module
try:
@@ -81,7 +82,7 @@ class Loader(object):
for filename in os.listdir(root_path):
path = os.path.join(root_path, filename)
if os.path.isdir(path) and filename[:2] != '__':
if u'__init__.py' in os.listdir(path):
if six.u('__init__.py') in os.listdir(path):
new_base_path = ''.join(s + '.' for s in base_path) + filename
self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)

View File

@@ -1,6 +1,7 @@
import logging
import re
class CPLog(object):
context = ''
@@ -37,7 +38,7 @@ class CPLog(object):
def safeMessage(self, msg, replace_tuple = ()):
from couchpotato.environment import Env
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.encoding import ss, toUnicode
msg = ss(msg)
@@ -49,8 +50,8 @@ class CPLog(object):
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
else:
msg = msg % ss(replace_tuple)
except Exception, e:
self.logger.error(u'Failed encoding stuff to log "%s": %s' % (msg, e))
except Exception as e:
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e))
if not Env.get('dev'):
@@ -66,4 +67,4 @@ class CPLog(object):
except:
pass
return msg
return toUnicode(msg)

View File

@@ -1,8 +1,11 @@
from couchpotato import get_session
import traceback
from couchpotato import get_session, CPLog
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Media
log = CPLog(__name__)
class MediaBase(Plugin):
@@ -10,8 +13,8 @@ class MediaBase(Plugin):
default_dict = {
'profile': {'types': {'quality': {}}},
'releases': {'status': {}, 'quality': {}, 'files':{}, 'info': {}},
'library': {'titles': {}, 'files':{}},
'releases': {'status': {}, 'quality': {}, 'files': {}, 'info': {}},
'library': {'titles': {}, 'files': {}},
'files': {},
'status': {},
'category': {},
@@ -26,19 +29,33 @@ class MediaBase(Plugin):
def createOnComplete(self, id):
def onComplete():
db = get_session()
media = db.query(Media).filter_by(id = id).first()
fireEventAsync('%s.searcher.single' % media.type, media.to_dict(self.default_dict), on_complete = self.createNotifyFront(id))
db.expire_all()
try:
db = get_session()
media = db.query(Media).filter_by(id = id).first()
media_dict = media.to_dict(self.default_dict)
event_name = '%s.searcher.single' % media.type
fireEvent(event_name, media_dict, on_complete = self.createNotifyFront(id))
except:
log.error('Failed creating onComplete: %s', traceback.format_exc())
finally:
db.close()
return onComplete
def createNotifyFront(self, media_id):
def notifyFront():
db = get_session()
media = db.query(Media).filter_by(id = media_id).first()
fireEvent('notify.frontend', type = '%s.update' % media.type, data = media.to_dict(self.default_dict))
db.expire_all()
try:
db = get_session()
media = db.query(Media).filter_by(id = media_id).first()
media_dict = media.to_dict(self.default_dict)
event_name = '%s.update' % media.type
fireEvent('notify.frontend', type = event_name, data = media_dict)
except:
log.error('Failed creating onComplete: %s', traceback.format_exc())
finally:
db.close()
return notifyFront

View File

@@ -1,5 +1,6 @@
from .main import MediaPlugin
def start():
return MediaPlugin()

View File

@@ -1,8 +1,9 @@
from couchpotato import get_session
import traceback
from couchpotato import get_session, tryInt
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import mergeDicts, splitString, getImdb
from couchpotato.core.helpers.variable import mergeDicts, splitString, getImdb, getTitle
from couchpotato.core.logger import CPLog
from couchpotato.core.media import MediaBase
from couchpotato.core.settings.model import Library, LibraryTitle, Release, \
@@ -70,26 +71,39 @@ class MediaPlugin(MediaBase):
addEvent('media.restatus', self.restatus)
def refresh(self, id = '', **kwargs):
db = get_session()
handlers = []
ids = splitString(id)
for x in splitString(id):
media = db.query(Media).filter_by(id = x).first()
for x in ids:
if media:
# Get current selected title
default_title = ''
for title in media.library.titles:
if title.default: default_title = title.title
refresh_handler = self.createRefreshHandler(x)
if refresh_handler:
handlers.append(refresh_handler)
fireEvent('notify.frontend', type = '%s.busy' % media.type, data = {'id': x})
fireEventAsync('library.update.%s' % media.type, identifier = media.library.identifier, default_title = default_title, force = True, on_complete = self.createOnComplete(x))
db.expire_all()
fireEvent('notify.frontend', type = 'media.busy', data = {'id': [tryInt(x) for x in ids]})
fireEventAsync('schedule.queue', handlers = handlers)
return {
'success': True,
}
def createRefreshHandler(self, id):
db = get_session()
media = db.query(Media).filter_by(id = id).first()
if media:
default_title = getTitle(media.library)
identifier = media.library.identifier
event = 'library.update.%s' % media.type
def handler():
fireEvent(event, identifier = identifier, default_title = default_title, on_complete = self.createOnComplete(id))
if handler:
return handler
def addSingleRefreshView(self):
for media_type in fireEvent('media.types', merge = True):
@@ -110,7 +124,6 @@ class MediaPlugin(MediaBase):
if m:
results = m.to_dict(self.default_dict)
db.expire_all()
return results
def getView(self, id = None, **kwargs):
@@ -239,14 +252,13 @@ class MediaPlugin(MediaBase):
# Merge releases with movie dict
movies.append(mergeDicts(movie_dict[media_id].to_dict({
'library': {'titles': {}, 'files':{}},
'library': {'titles': {}, 'files': {}},
'files': {},
}), {
'releases': releases,
'releases_count': releases_count.get(media_id),
}))
db.expire_all()
return total_count, movies
def listView(self, **kwargs):
@@ -340,7 +352,6 @@ class MediaPlugin(MediaBase):
if len(chars) == 25:
break
db.expire_all()
return ''.join(sorted(chars))
def charView(self, **kwargs):
@@ -365,50 +376,55 @@ class MediaPlugin(MediaBase):
def delete(self, media_id, delete_from = None):
db = get_session()
try:
db = get_session()
media = db.query(Media).filter_by(id = media_id).first()
if media:
deleted = False
if delete_from == 'all':
db.delete(media)
db.commit()
deleted = True
else:
done_status = fireEvent('status.get', 'done', single = True)
total_releases = len(media.releases)
total_deleted = 0
new_movie_status = None
for release in media.releases:
if delete_from in ['wanted', 'snatched', 'late']:
if release.status_id != done_status.get('id'):
db.delete(release)
total_deleted += 1
new_movie_status = 'done'
elif delete_from == 'manage':
if release.status_id == done_status.get('id'):
db.delete(release)
total_deleted += 1
new_movie_status = 'active'
db.commit()
if total_releases == total_deleted:
media = db.query(Media).filter_by(id = media_id).first()
if media:
deleted = False
if delete_from == 'all':
db.delete(media)
db.commit()
deleted = True
elif new_movie_status:
new_status = fireEvent('status.get', new_movie_status, single = True)
media.profile_id = None
media.status_id = new_status.get('id')
db.commit()
else:
fireEvent('media.restatus', media.id, single = True)
done_status = fireEvent('status.get', 'done', single = True)
if deleted:
fireEvent('notify.frontend', type = 'movie.deleted', data = media.to_dict())
total_releases = len(media.releases)
total_deleted = 0
new_movie_status = None
for release in media.releases:
if delete_from in ['wanted', 'snatched', 'late']:
if release.status_id != done_status.get('id'):
db.delete(release)
total_deleted += 1
new_movie_status = 'done'
elif delete_from == 'manage':
if release.status_id == done_status.get('id'):
db.delete(release)
total_deleted += 1
new_movie_status = 'active'
db.commit()
if total_releases == total_deleted:
db.delete(media)
db.commit()
deleted = True
elif new_movie_status:
new_status = fireEvent('status.get', new_movie_status, single = True)
media.profile_id = None
media.status_id = new_status.get('id')
db.commit()
else:
fireEvent('media.restatus', media.id, single = True)
if deleted:
fireEvent('notify.frontend', type = 'movie.deleted', data = media.to_dict())
except:
log.error('Failed deleting media: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
db.expire_all()
return True
def deleteView(self, id = '', **kwargs):
@@ -432,27 +448,33 @@ class MediaPlugin(MediaBase):
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
db = get_session()
try:
db = get_session()
m = db.query(Media).filter_by(id = media_id).first()
if not m or len(m.library.titles) == 0:
log.debug('Can\'t restatus movie, doesn\'t seem to exist.')
return False
m = db.query(Media).filter_by(id = media_id).first()
if not m or len(m.library.titles) == 0:
log.debug('Can\'t restatus movie, doesn\'t seem to exist.')
return False
log.debug('Changing status for %s', m.library.titles[0].title)
if not m.profile:
m.status_id = done_status.get('id')
else:
move_to_wanted = True
log.debug('Changing status for %s', m.library.titles[0].title)
if not m.profile:
m.status_id = done_status.get('id')
else:
move_to_wanted = True
for t in m.profile.types:
for release in m.releases:
if t.quality.identifier is release.quality.identifier and (release.status_id is done_status.get('id') and t.finish):
move_to_wanted = False
for t in m.profile.types:
for release in m.releases:
if t.quality.identifier is release.quality.identifier and (release.status_id is done_status.get('id') and t.finish):
move_to_wanted = False
m.status_id = active_status.get('id') if move_to_wanted else done_status.get('id')
m.status_id = active_status.get('id') if move_to_wanted else done_status.get('id')
db.commit()
db.commit()
return True
return True
except:
log.error('Failed restatus: %s', traceback.format_exc())
db.rollback()
finally:
db.close()

View File

@@ -1,5 +1,6 @@
from .main import Search
def start():
return Search()

View File

@@ -1,5 +1,6 @@
from .main import Searcher
def start():
return Searcher()

View File

@@ -12,7 +12,6 @@ class SearcherBase(Plugin):
def __init__(self):
super(SearcherBase, self).__init__()
addEvent('searcher.progress', self.getProgress)
addEvent('%s.searcher.progress' % self.getType(), self.getProgress)
@@ -26,9 +25,8 @@ class SearcherBase(Plugin):
_type = self.getType()
def setCrons():
fireEvent('schedule.cron', '%s.searcher.all' % _type, self.searchAll,
day = self.conf('cron_day'), hour = self.conf('cron_hour'), minute = self.conf('cron_minute'))
day = self.conf('cron_day'), hour = self.conf('cron_hour'), minute = self.conf('cron_minute'))
addEvent('app.load', setCrons)
addEvent('setting.save.%s_searcher.cron_day.after' % _type, setCrons)

View File

@@ -1,7 +1,7 @@
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import simplifyString
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.helpers.variable import splitString, removeEmpty, removeDuplicate
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.searcher.base import SearcherBase
import datetime
@@ -107,10 +107,10 @@ class Searcher(SearcherBase):
# Hack for older movies that don't contain quality tag
year_name = fireEvent('scanner.name_year', name, single = True)
if len(found) == 0 and movie_year < datetime.datetime.now().year - 3 and not year_name.get('year', None):
if size > 3000: # Assume dvdr
if size > 3000: # Assume dvdr
log.info('Quality was missing in name, assuming it\'s a DVD-R based on the size: %s', size)
found['dvdr'] = True
else: # Assume dvdrip
else: # Assume dvdrip
log.info('Quality was missing in name, assuming it\'s a DVD-Rip based on the size: %s', size)
found['dvdrip'] = True
@@ -150,12 +150,12 @@ class Searcher(SearcherBase):
try: check_names.append(max(re.findall(r'[^[]*\[([^]]*)\]', check_name), key = len).strip())
except: pass
for check_name in list(set(check_names)):
for check_name in removeDuplicate(check_names):
check_movie = fireEvent('scanner.name_year', check_name, single = True)
try:
check_words = filter(None, re.split('\W+', check_movie.get('name', '')))
movie_words = filter(None, re.split('\W+', simplifyString(movie_name)))
check_words = removeEmpty(re.split('\W+', check_movie.get('name', '')))
movie_words = removeEmpty(re.split('\W+', simplifyString(movie_name)))
if len(check_words) > 0 and len(movie_words) > 0 and len(list(set(check_words) - set(movie_words))) == 0:
return True
@@ -173,7 +173,7 @@ class Searcher(SearcherBase):
# Make sure it has required words
required_words = splitString(self.conf('required_words', section = 'searcher').lower())
try: required_words = list(set(required_words + splitString(media['category']['required'].lower())))
try: required_words = removeDuplicate(required_words + splitString(media['category']['required'].lower()))
except: pass
req_match = 0
@@ -187,7 +187,7 @@ class Searcher(SearcherBase):
# Ignore releases
ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
try: ignored_words = list(set(ignored_words + splitString(media['category']['ignored'].lower())))
try: ignored_words = removeDuplicate(ignored_words + splitString(media['category']['ignored'].lower()))
except: pass
ignored_match = 0

View File

@@ -1,5 +1,6 @@
from .main import MovieBase
def start():
return MovieBase()

View File

@@ -1,8 +1,9 @@
import traceback
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString, tryInt
from couchpotato.core.helpers.variable import splitString, tryInt, getTitle
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie import MovieTypeBase
from couchpotato.core.settings.model import Media
@@ -61,7 +62,6 @@ class MovieBase(MovieTypeBase):
except:
pass
library = fireEvent('library.add.movie', single = True, attrs = params, update_after = update_library)
# Status
@@ -71,68 +71,81 @@ class MovieBase(MovieTypeBase):
default_profile = fireEvent('profile.default', single = True)
cat_id = params.get('category_id')
db = get_session()
m = db.query(Media).filter_by(library_id = library.get('id')).first()
added = True
do_search = False
search_after = search_after and self.conf('search_on_add', section = 'moviesearcher')
if not m:
m = Media(
library_id = library.get('id'),
profile_id = params.get('profile_id', default_profile.get('id')),
status_id = status_id if status_id else status_active.get('id'),
category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else None,
)
db.add(m)
db.commit()
onComplete = None
if search_after:
onComplete = self.createOnComplete(m.id)
fireEventAsync('library.update.movie', params.get('identifier'), default_title = params.get('title', ''), on_complete = onComplete)
search_after = False
elif force_readd:
# Clean snatched history
for release in m.releases:
if release.status_id in [downloaded_status.get('id'), snatched_status.get('id'), done_status.get('id')]:
if params.get('ignore_previous', False):
release.status_id = ignored_status.get('id')
else:
fireEvent('release.delete', release.id, single = True)
m.profile_id = params.get('profile_id', default_profile.get('id'))
m.category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else (m.category_id or None)
else:
log.debug('Movie already exists, not updating: %s', params)
added = False
if force_readd:
m.status_id = status_id if status_id else status_active.get('id')
m.last_edit = int(time.time())
do_search = True
db.commit()
# Remove releases
available_status = fireEvent('status.get', 'available', single = True)
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
try:
db = get_session()
m = db.query(Media).filter_by(library_id = library.get('id')).first()
added = True
do_search = False
search_after = search_after and self.conf('search_on_add', section = 'moviesearcher')
if not m:
m = Media(
library_id = library.get('id'),
profile_id = params.get('profile_id', default_profile.get('id')),
status_id = status_id if status_id else status_active.get('id'),
category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else None,
)
db.add(m)
db.commit()
movie_dict = m.to_dict(self.default_dict)
onComplete = None
if search_after:
onComplete = self.createOnComplete(m.id)
if do_search and search_after:
onComplete = self.createOnComplete(m.id)
onComplete()
fireEventAsync('library.update.movie', params.get('identifier'), default_title = params.get('title', ''), on_complete = onComplete)
search_after = False
elif force_readd:
if added:
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = 'Successfully added "%s" to your wanted list.' % params.get('title', ''))
# Clean snatched history
for release in m.releases:
if release.status_id in [downloaded_status.get('id'), snatched_status.get('id'), done_status.get('id')]:
if params.get('ignore_previous', False):
release.status_id = ignored_status.get('id')
else:
fireEvent('release.delete', release.id, single = True)
db.expire_all()
return movie_dict
m.profile_id = params.get('profile_id', default_profile.get('id'))
m.category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else (m.category_id or None)
else:
log.debug('Movie already exists, not updating: %s', params)
added = False
if force_readd:
m.status_id = status_id if status_id else status_active.get('id')
m.last_edit = int(time.time())
do_search = True
db.commit()
# Remove releases
available_status = fireEvent('status.get', 'available', single = True)
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
db.commit()
movie_dict = m.to_dict(self.default_dict)
if do_search and search_after:
onComplete = self.createOnComplete(m.id)
onComplete()
if added:
if params.get('title'):
message = 'Successfully added "%s" to your wanted list.' % params.get('title', '')
else:
title = getTitle(m.library)
if title:
message = 'Successfully added "%s" to your wanted list.' % title
else:
message = 'Succesfully added to your wanted list.'
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = message)
return movie_dict
except:
log.error('Failed deleting media: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
def addView(self, **kwargs):
add_dict = self.add(params = kwargs)
@@ -144,42 +157,51 @@ class MovieBase(MovieTypeBase):
def edit(self, id = '', **kwargs):
db = get_session()
try:
db = get_session()
available_status = fireEvent('status.get', 'available', single = True)
available_status = fireEvent('status.get', 'available', single = True)
ids = splitString(id)
for media_id in ids:
ids = splitString(id)
for media_id in ids:
m = db.query(Media).filter_by(id = media_id).first()
if not m:
continue
m = db.query(Media).filter_by(id = media_id).first()
if not m:
continue
m.profile_id = kwargs.get('profile_id')
m.profile_id = kwargs.get('profile_id')
cat_id = kwargs.get('category_id')
if cat_id is not None:
m.category_id = tryInt(cat_id) if tryInt(cat_id) > 0 else None
cat_id = kwargs.get('category_id')
if cat_id is not None:
m.category_id = tryInt(cat_id) if tryInt(cat_id) > 0 else None
# Remove releases
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
db.commit()
# Remove releases
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
db.commit()
# Default title
if kwargs.get('default_title'):
for title in m.library.titles:
title.default = toUnicode(kwargs.get('default_title', '')).lower() == toUnicode(title.title).lower()
# Default title
if kwargs.get('default_title'):
for title in m.library.titles:
title.default = toUnicode(kwargs.get('default_title', '')).lower() == toUnicode(title.title).lower()
db.commit()
db.commit()
fireEvent('media.restatus', m.id)
fireEvent('media.restatus', m.id)
movie_dict = m.to_dict(self.default_dict)
fireEventAsync('movie.searcher.single', movie_dict, on_complete = self.createNotifyFront(media_id))
movie_dict = m.to_dict(self.default_dict)
fireEventAsync('movie.searcher.single', movie_dict, on_complete = self.createNotifyFront(media_id))
return {
'success': True,
}
except:
log.error('Failed deleting media: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
db.expire_all()
return {
'success': True,
'success': False,
}

View File

@@ -36,10 +36,10 @@ var Movie = new Class({
App.on('movie.update', self.global_events['movie.update']);
// Add spinner on load / search
['movie.busy', 'movie.searcher.started'].each(function(listener){
['media.busy', 'movie.searcher.started'].each(function(listener){
self.global_events[listener] = function(notification){
if(notification.data && self.data.id == notification.data.id)
self.busy(true)
if(notification.data && (self.data.id == notification.data.id || (typeOf(notification.data.id) == 'array' && notification.data.id.indexOf(self.data.id) > -1)))
self.busy(true);
}
App.on(listener, self.global_events[listener]);
})
@@ -329,4 +329,4 @@ var Movie = new Class({
return this.el;
}
});
});

View File

@@ -1,5 +1,6 @@
from .main import MovieLibraryPlugin
def start():
return MovieLibraryPlugin()

View File

@@ -7,13 +7,14 @@ from couchpotato.core.settings.model import Library, LibraryTitle, File
from string import ascii_letters
import time
import traceback
import six
log = CPLog(__name__)
class MovieLibraryPlugin(LibraryBase):
default_dict = {'titles': {}, 'files':{}}
default_dict = {'titles': {}, 'files': {}}
def __init__(self):
addEvent('library.add.movie', self.add)
@@ -25,69 +26,70 @@ class MovieLibraryPlugin(LibraryBase):
primary_provider = attrs.get('primary_provider', 'imdb')
db = get_session()
try:
db = get_session()
l = db.query(Library).filter_by(identifier = attrs.get('identifier')).first()
if not l:
status = fireEvent('status.get', 'needs_update', single = True)
l = Library(
year = attrs.get('year'),
identifier = attrs.get('identifier'),
plot = toUnicode(attrs.get('plot')),
tagline = toUnicode(attrs.get('tagline')),
status_id = status.get('id'),
info = {}
)
l = db.query(Library).filter_by(identifier = attrs.get('identifier')).first()
if not l:
status = fireEvent('status.get', 'needs_update', single = True)
l = Library(
year = attrs.get('year'),
identifier = attrs.get('identifier'),
plot = toUnicode(attrs.get('plot')),
tagline = toUnicode(attrs.get('tagline')),
status_id = status.get('id'),
info = {}
)
title = LibraryTitle(
title = toUnicode(attrs.get('title')),
simple_title = self.simplifyTitle(attrs.get('title')),
)
title = LibraryTitle(
title = toUnicode(attrs.get('title')),
simple_title = self.simplifyTitle(attrs.get('title')),
)
l.titles.append(title)
l.titles.append(title)
db.add(l)
db.commit()
db.add(l)
db.commit()
# Update library info
if update_after is not False:
handle = fireEventAsync if update_after is 'async' else fireEvent
handle('library.update.movie', identifier = l.identifier, default_title = toUnicode(attrs.get('title', '')))
# Update library info
if update_after is not False:
handle = fireEventAsync if update_after is 'async' else fireEvent
handle('library.update.movie', identifier = l.identifier, default_title = toUnicode(attrs.get('title', '')))
library_dict = l.to_dict(self.default_dict)
library_dict = l.to_dict(self.default_dict)
return library_dict
except:
log.error('Failed adding media: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
db.expire_all()
return library_dict
return {}
def update(self, identifier, default_title = '', force = False):
def update(self, identifier, default_title = '', extended = False):
if self.shuttingDown():
return
db = get_session()
library = db.query(Library).filter_by(identifier = identifier).first()
done_status = fireEvent('status.get', 'done', single = True)
try:
db = get_session()
library_dict = None
if library:
library_dict = library.to_dict(self.default_dict)
library = db.query(Library).filter_by(identifier = identifier).first()
done_status = fireEvent('status.get', 'done', single = True)
do_update = True
info = fireEvent('movie.info', merge = True, extended = extended, identifier = identifier)
info = fireEvent('movie.info', merge = True, identifier = identifier)
# Don't need those here
try: del info['in_wanted']
except: pass
try: del info['in_library']
except: pass
# Don't need those here
try: del info['in_wanted']
except: pass
try: del info['in_library']
except: pass
if not info or len(info) == 0:
log.error('Could not update, no movie info to work with: %s', identifier)
return False
if not info or len(info) == 0:
log.error('Could not update, no movie info to work with: %s', identifier)
return False
# Main info
if do_update:
# Main info
library.plot = toUnicode(info.get('plot', ''))
library.tagline = toUnicode(info.get('tagline', ''))
library.year = info.get('year', 0)
@@ -102,6 +104,17 @@ class MovieLibraryPlugin(LibraryBase):
titles = info.get('titles', [])
log.debug('Adding titles: %s', titles)
counter = 0
def_title = None
for title in titles:
if (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title):
def_title = toUnicode(title)
break
counter += 1
if not def_title:
def_title = toUnicode(titles[0])
for title in titles:
if not title:
continue
@@ -109,10 +122,9 @@ class MovieLibraryPlugin(LibraryBase):
t = LibraryTitle(
title = title,
simple_title = self.simplifyTitle(title),
default = (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == u'' and toUnicode(titles[0]) == title)
default = title == def_title
)
library.titles.append(t)
counter += 1
db.commit()
@@ -134,30 +146,43 @@ class MovieLibraryPlugin(LibraryBase):
break
except:
log.debug('Failed to attach to library: %s', traceback.format_exc())
db.rollback()
library_dict = library.to_dict(self.default_dict)
return library_dict
except:
log.error('Failed update media: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
db.expire_all()
return library_dict
return {}
def updateReleaseDate(self, identifier):
db = get_session()
library = db.query(Library).filter_by(identifier = identifier).first()
try:
db = get_session()
library = db.query(Library).filter_by(identifier = identifier).first()
if not library.info:
library_dict = self.update(identifier, force = True)
dates = library_dict.get('info', {}).get('release_date')
else:
dates = library.info.get('release_date')
if not library.info:
library_dict = self.update(identifier)
dates = library_dict.get('info', {}).get('release_date')
else:
dates = library.info.get('release_date')
if dates and (dates.get('expires', 0) < time.time() or dates.get('expires', 0) > time.time() + (604800 * 4)) or not dates:
dates = fireEvent('movie.release_date', identifier = identifier, merge = True)
library.info.update({'release_date': dates })
db.commit()
if dates and (dates.get('expires', 0) < time.time() or dates.get('expires', 0) > time.time() + (604800 * 4)) or not dates:
dates = fireEvent('movie.release_date', identifier = identifier, merge = True)
library.info.update({'release_date': dates})
db.commit()
db.expire_all()
return dates
return dates
except:
log.error('Failed updating release dates: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
return {}
def simplifyTitle(self, title):

View File

@@ -1,6 +1,7 @@
from .main import MovieSearcher
import random
def start():
return MovieSearcher()

View File

@@ -73,10 +73,21 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
db = get_session()
movies = db.query(Media).filter(
movies_raw = db.query(Media).filter(
Media.status.has(identifier = 'active')
).all()
random.shuffle(movies)
random.shuffle(movies_raw)
movies = []
for m in movies_raw:
movies.append(m.to_dict({
'category': {},
'profile': {'types': {'quality': {}}},
'releases': {'status': {}, 'quality': {}},
'library': {'titles': {}, 'files': {}},
'files': {},
}))
self.in_progress = {
'total': len(movies),
@@ -87,21 +98,14 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
search_protocols = fireEvent('searcher.protocols', single = True)
for movie in movies:
movie_dict = movie.to_dict({
'category': {},
'profile': {'types': {'quality': {}}},
'releases': {'status': {}, 'quality': {}},
'library': {'titles': {}, 'files':{}},
'files': {},
})
try:
self.single(movie_dict, search_protocols)
self.single(movie, search_protocols)
except IndexError:
log.error('Forcing library update for %s, if you see this often, please report: %s', (movie_dict['library']['identifier'], traceback.format_exc()))
fireEvent('library.update.movie', movie_dict['library']['identifier'], force = True)
log.error('Forcing library update for %s, if you see this often, please report: %s', (movie['library']['identifier'], traceback.format_exc()))
fireEvent('library.update.movie', movie['library']['identifier'])
except:
log.error('Search failed for %s: %s', (movie_dict['library']['identifier'], traceback.format_exc()))
log.error('Search failed for %s: %s', (movie['library']['identifier'], traceback.format_exc()))
self.in_progress['to_go'] -= 1
@@ -117,7 +121,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
def single(self, movie, search_protocols = None, manual = False):
# movies don't contain 'type' yet, so just set to default here
if not movie.has_key('type'):
if 'type' not in movie:
movie['type'] = 'movie'
# Find out search type
@@ -133,8 +137,6 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
log.debug('Movie doesn\'t have a profile or already done, assuming in manage tab.')
return
db = get_session()
pre_releases = fireEvent('quality.pre_releases', single = True)
release_dates = fireEvent('library.update.movie.release_date', identifier = movie['library']['identifier'], merge = True)
available_status, ignored_status, failed_status = fireEvent('status.get', ['available', 'ignored', 'failed'], single = True)
@@ -150,6 +152,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
fireEvent('notify.frontend', type = 'movie.searcher.started', data = {'id': movie['id']}, message = 'Searching for "%s"' % default_title)
db = get_session()
ret = False
for quality_type in movie['profile']['types']:
@@ -279,13 +282,15 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
now = int(time.time())
now_year = date.today().year
now_month = date.today().month
if (year is None or year < now_year - 1) and (not dates or (dates.get('theater', 0) == 0 and dates.get('dvd', 0) == 0)):
return True
else:
# Don't allow movies with years to far in the future
if year is not None and year > now_year + 1:
add_year = 1 if now_month > 10 else 0 # Only allow +1 year if end of the year
if year is not None and year > (now_year + add_year):
return False
# For movies before 1972
@@ -345,7 +350,10 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
except:
log.error('Failed searching for next release: %s', traceback.format_exc())
db.rollback()
return False
finally:
db.close()
def getSearchTitle(self, media):
if media['type'] == 'movie':

View File

@@ -1,5 +1,6 @@
from .main import Suggestion
def start():
return Suggestion()

View File

@@ -1,7 +1,7 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.helpers.variable import splitString, removeDuplicate
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Media, Library
from couchpotato.environment import Env
@@ -40,7 +40,7 @@ class Suggestion(Plugin):
movies.extend(splitString(Env.prop('suggest_seen', default = '')))
suggestions = fireEvent('movie.suggest', movies = movies, ignore = ignored, single = True)
self.setCache('suggestion_cached', suggestions, timeout = 6048000) # Cache for 10 weeks
self.setCache('suggestion_cached', suggestions, timeout = 6048000) # Cache for 10 weeks
return {
'success': True,
@@ -79,8 +79,10 @@ class Suggestion(Plugin):
seen = [] if not seen else seen
if ignore_imdb:
suggested_imdbs = []
for cs in cached_suggestion:
if cs.get('imdb') != ignore_imdb:
if cs.get('imdb') != ignore_imdb and cs.get('imdb') not in suggested_imdbs:
suggested_imdbs.append(cs.get('imdb'))
new_suggestions.append(cs)
# Get new results and add them
@@ -97,7 +99,7 @@ class Suggestion(Plugin):
movies.extend(seen)
ignored.extend([x.get('imdb') for x in cached_suggestion])
suggestions = fireEvent('movie.suggest', movies = movies, ignore = list(set(ignored)), single = True)
suggestions = fireEvent('movie.suggest', movies = movies, ignore = removeDuplicate(ignored), single = True)
if suggestions:
new_suggestions.extend(suggestions)

View File

@@ -101,7 +101,7 @@ var SuggestList = new Class({
// Add rating
m.info_container.adopt(
m.rating = m.info.rating && m.info.rating.imdb.length == 2 && parseFloat(m.info.rating.imdb[0]) > 0 ? new Element('span.rating', {
m.rating = m.info.rating && m.info.rating.imdb && m.info.rating.imdb.length == 2 && parseFloat(m.info.rating.imdb[0]) > 0 ? new Element('span.rating', {
'text': parseFloat(m.info.rating.imdb[0]),
'title': parseInt(m.info.rating.imdb[1]) + ' votes'
}) : null,

View File

@@ -13,5 +13,6 @@ def upgrade(migrate_engine):
create_column(category_column, movie)
Index('ix_movie_category_id', movie.c.category_id).create()
def downgrade(migrate_engine):
pass

View File

@@ -1,5 +1,6 @@
from .main import Boxcar
def start():
return Boxcar()

View File

@@ -0,0 +1,34 @@
from .main import Boxcar2
def start():
return Boxcar2()
config = [{
'name': 'boxcar2',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'boxcar2',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'token',
'description': ('Your Boxcar access token.', 'Can be found in the app under settings')
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]

View File

@@ -0,0 +1,39 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
log = CPLog(__name__)
class Boxcar2(Notification):
url = 'https://new.boxcar.io/api/notifications'
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
try:
message = message.strip()
long_message = ''
if listener == 'test':
long_message = 'This is a test message'
elif data.get('identifier'):
long_message = 'More movie info <a href="http://www.imdb.com/title/%s/">on IMDB</a>' % data['identifier']
data = {
'user_credentials': self.conf('token'),
'notification[title]': toUnicode(message),
'notification[long_message]': toUnicode(long_message),
}
self.urlopen(self.url, data = data)
except:
log.error('Make sure the token provided is for the correct device')
return False
log.info('Boxcar notification successful.')
return True
def isEnabled(self):
return super(Boxcar2, self).isEnabled() and self.conf('token')

View File

@@ -1,5 +1,6 @@
from .main import CoreNotifier
def start():
return CoreNotifier()

View File

@@ -67,28 +67,42 @@ class CoreNotifier(Notification):
def clean(self):
db = get_session()
db.query(Notif).filter(Notif.added <= (int(time.time()) - 2419200)).delete()
db.commit()
try:
db = get_session()
db.query(Notif).filter(Notif.added <= (int(time.time()) - 2419200)).delete()
db.commit()
except:
log.error('Failed cleaning notification: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
def markAsRead(self, ids = None, **kwargs):
ids = splitString(ids) if ids else None
db = get_session()
try:
db = get_session()
if ids:
q = db.query(Notif).filter(or_(*[Notif.id == tryInt(s) for s in ids]))
else:
q = db.query(Notif).filter_by(read = False)
if ids:
q = db.query(Notif).filter(or_(*[Notif.id == tryInt(s) for s in ids]))
else:
q = db.query(Notif).filter_by(read = False)
q.update({Notif.read: True})
q.update({Notif.read: True})
db.commit()
db.commit()
return {
'success': True
}
except:
log.error('Failed mark as read: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
return {
'success': True
'success': False
}
def listView(self, limit_offset = None, **kwargs):
@@ -140,24 +154,30 @@ class CoreNotifier(Notification):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
db = get_session()
try:
db = get_session()
data['notification_type'] = listener if listener else 'unknown'
data['notification_type'] = listener if listener else 'unknown'
n = Notif(
message = toUnicode(message),
data = data
)
db.add(n)
db.commit()
n = Notif(
message = toUnicode(message),
data = data
)
db.add(n)
db.commit()
ndict = n.to_dict()
ndict['type'] = 'notification'
ndict['time'] = time.time()
ndict = n.to_dict()
ndict['type'] = 'notification'
ndict['time'] = time.time()
self.frontend(type = listener, data = data)
self.frontend(type = listener, data = data)
return True
return True
except:
log.error('Failed notify: %s', traceback.format_exc())
db.rollback()
finally:
db.close()
def frontend(self, type = 'notification', data = None, message = None):
if not data: data = {}

View File

@@ -147,7 +147,7 @@ var NotificationBase = new Class({
// Process data
if(json){
Array.each(json.result, function(result){
App.trigger(result.type, result);
App.trigger(result.type, [result]);
if(result.message && result.read === undefined)
self.showMessage(result.message);
})

View File

@@ -1,5 +1,6 @@
from .main import Email
def start():
return Email()
@@ -30,7 +31,7 @@ config = [{
},
{ 'name': 'smtp_port',
'label': 'SMTP server port',
'default': '25',
'default': '25',
'type': 'int',
},
{

View File

@@ -40,7 +40,7 @@ class Email(Notification):
log.debug("SMTP over SSL %s", ("enabled" if ssl == 1 else "disabled"))
mailserver = smtplib.SMTP_SSL(smtp_server) if ssl == 1 else smtplib.SMTP(smtp_server)
if (starttls):
if starttls:
log.debug("Using StartTLS to initiate the connection with the SMTP server")
mailserver.starttls()

View File

@@ -1,5 +1,6 @@
from .main import Growl
def start():
return Growl()

View File

@@ -37,7 +37,7 @@ class Growl(Notification):
)
self.growl.register()
self.registered = True
except Exception, e:
except Exception as e:
if 'timed out' in str(e):
self.registered = True
else:

View File

@@ -1,5 +1,6 @@
from .main import NMJ
def start():
return NMJ()

View File

@@ -86,18 +86,17 @@ class NMJ(Notification):
'arg3': '',
}
params = tryUrlencode(params)
UPDATE_URL = 'http://%(host)s:8008/metadata_database?%(params)s'
updateUrl = UPDATE_URL % {'host': host, 'params': params}
update_url = 'http://%(host)s:8008/metadata_database?%(params)s' % {'host': host, 'params': params}
try:
response = self.urlopen(updateUrl)
response = self.urlopen(update_url)
except:
return False
try:
et = etree.fromstring(response)
result = et.findtext('returnValue')
except SyntaxError, e:
except SyntaxError as e:
log.error('Unable to parse XML returned from the Popcorn Hour: %s', e)
return False

View File

@@ -1,5 +1,6 @@
from .main import NotifyMyAndroid
def start():
return NotifyMyAndroid()

View File

@@ -2,6 +2,7 @@ from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
import pynma
import six
log = CPLog(__name__)
@@ -26,7 +27,7 @@ class NotifyMyAndroid(Notification):
successful = 0
for key in keys:
if not response[str(key)]['code'] == u'200':
if not response[str(key)]['code'] == six.u('200'):
log.error('Could not send notification to NotifyMyAndroid (%s). %s', (key, response[key]['message']))
else:
successful += 1

View File

@@ -1,5 +1,6 @@
from .main import NotifyMyWP
def start():
return NotifyMyWP()

View File

@@ -2,13 +2,15 @@ from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from pynmwp import PyNMWP
import six
log = CPLog(__name__)
class NotifyMyWP(Notification):
def notify(self, message = '', data = {}, listener = None):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
keys = splitString(self.conf('api_key'))
p = PyNMWP(keys, self.conf('dev_key'))
@@ -16,7 +18,7 @@ class NotifyMyWP(Notification):
response = p.push(application = self.default_title, event = message, description = message, priority = self.conf('priority'), batch_mode = len(keys) > 1)
for key in keys:
if not response[key]['Code'] == u'200':
if not response[key]['Code'] == six.u('200'):
log.error('Could not send notification to NotifyMyWindowsPhone (%s). %s', (key, response[key]['message']))
return False

View File

@@ -1,5 +1,6 @@
from .main import Plex
def start():
return Plex()

View File

@@ -29,7 +29,7 @@ class PlexClientHTTP(PlexClientProtocol):
try:
self.plex.urlopen(url, headers = headers, timeout = 3, show_error = False)
except Exception, err:
except Exception as err:
log.error("Couldn't sent command to Plex: %s", err)
return False
@@ -68,7 +68,7 @@ class PlexClientJSON(PlexClientProtocol):
try:
requests.post(url, headers = headers, timeout = 3, data = json.dumps(request))
except Exception, err:
except Exception as err:
log.error("Couldn't sent command to Plex: %s", err)
return False

View File

@@ -23,9 +23,9 @@ class Plex(Notification):
addEvent('renamer.after', self.addToLibrary)
def addToLibrary(self, message = None, group = {}):
def addToLibrary(self, message = None, group = None):
if self.isDisabled(): return
if not group: group = {}
return self.server.refresh()
@@ -57,7 +57,8 @@ class Plex(Notification):
return success
def notify(self, message = '', data = {}, listener = None):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
return self.notifyClients(message, self.getClientNames())
def test(self, **kwargs):

View File

@@ -1,5 +1,6 @@
from .main import Prowl
def start():
return Prowl()

View File

@@ -22,7 +22,7 @@ class Prowl(Notification):
'priority': self.conf('priority'),
}
headers = {
'Content-type': 'application/x-www-form-urlencoded'
'Content-type': 'application/x-www-form-urlencoded'
}
try:

View File

@@ -1,5 +1,6 @@
from .main import Pushalot
def start():
return Pushalot()

View File

@@ -5,6 +5,7 @@ import traceback
log = CPLog(__name__)
class Pushalot(Notification):
urls = {

View File

@@ -1,5 +1,6 @@
from .main import Pushbullet
def start():
return Pushbullet()

View File

@@ -1,5 +1,5 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
import base64
@@ -32,7 +32,7 @@ class Pushbullet(Notification):
response = self.request(
'pushes',
cache = False,
device_id = device,
device_iden = device,
type = 'note',
title = self.default_title,
body = toUnicode(message)
@@ -46,24 +46,7 @@ class Pushbullet(Notification):
return successful == len(devices)
def getDevices(self):
devices = [d.strip() for d in self.conf('devices').split(',')]
# Remove empty items
devices = [d for d in devices if len(d)]
# Break on any ids that aren't integers
valid_devices = []
for device_id in devices:
d = tryInt(device_id, None)
if not d:
log.error('Device ID "%s" is not valid', device_id)
return None
valid_devices.append(d)
return valid_devices
return splitString(self.conf('devices'))
def request(self, method, cache = True, **kwargs):
try:
@@ -79,7 +62,7 @@ class Pushbullet(Notification):
data = self.urlopen(self.url % method, headers = headers, data = kwargs)
return json.loads(data)
except Exception, ex:
except Exception as ex:
log.error('Pushbullet request failed')
log.debug(ex)

View File

@@ -1,5 +1,6 @@
from .main import Pushover
def start():
return Pushover()

View File

@@ -30,9 +30,9 @@ class Pushover(Notification):
})
http_handler.request('POST',
"/1/messages.json",
headers = {'Content-type': 'application/x-www-form-urlencoded'},
body = tryUrlencode(api_data)
"/1/messages.json",
headers = {'Content-type': 'application/x-www-form-urlencoded'},
body = tryUrlencode(api_data)
)
response = http_handler.getresponse()

View File

@@ -1,5 +1,6 @@
from .main import Synoindex
def start():
return Synoindex()

View File

@@ -26,7 +26,7 @@ class Synoindex(Notification):
out = p.communicate()
log.info('Result from synoindex: %s', str(out))
return True
except OSError, e:
except OSError as e:
log.error('Unable to run synoindex: %s', e)
return False

View File

@@ -1,5 +1,6 @@
from .main import Toasty
def start():
return Toasty()

View File

@@ -5,6 +5,7 @@ import traceback
log = CPLog(__name__)
class Toasty(Notification):
urls = {

View File

@@ -1,5 +1,6 @@
from .main import Trakt
def start():
return Trakt()

View File

@@ -3,12 +3,14 @@ from couchpotato.core.notifications.base import Notification
log = CPLog(__name__)
class Trakt(Notification):
urls = {
'base': 'http://api.trakt.tv/%s',
'library': 'movie/library/%s',
'unwatchlist': 'movie/unwatchlist/%s',
'test': 'account/test/%s',
}
listen_to = ['movie.downloaded']
@@ -16,25 +18,39 @@ class Trakt(Notification):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
post_data = {
'username': self.conf('automation_username'),
'password' : self.conf('automation_password'),
'movies': [{
'imdb_id': data['library']['identifier'],
'title': data['library']['titles'][0]['title'],
'year': data['library']['year']
}] if data else []
}
if listener == 'test':
result = self.call((self.urls['library'] % self.conf('automation_api_key')), post_data)
if self.conf('remove_watchlist_enabled'):
result = result and self.call((self.urls['unwatchlist'] % self.conf('automation_api_key')), post_data)
post_data = {
'username': self.conf('automation_username'),
'password': self.conf('automation_password'),
}
return result
result = self.call((self.urls['test'] % self.conf('automation_api_key')), post_data)
return result
else:
post_data = {
'username': self.conf('automation_username'),
'password': self.conf('automation_password'),
'movies': [{
'imdb_id': data['library']['identifier'],
'title': data['library']['titles'][0]['title'],
'year': data['library']['year']
}] if data else []
}
result = self.call((self.urls['library'] % self.conf('automation_api_key')), post_data)
if self.conf('remove_watchlist_enabled'):
result = result and self.call((self.urls['unwatchlist'] % self.conf('automation_api_key')), post_data)
return result
def call(self, method_url, post_data):
try:
response = self.getJsonData(self.urls['base'] % method_url, data = post_data, cache_timeout = 1)
if response:
if response.get('status') == "success":

View File

@@ -1,5 +1,6 @@
from .main import Twitter
def start():
return Twitter()

View File

@@ -64,7 +64,7 @@ class Twitter(Notification):
api.PostUpdate(update_message[135:] + ' 2/2')
else:
api.PostUpdate(update_message)
except Exception, e:
except Exception as e:
log.error('Error sending tweet: %s', e)
return False

View File

@@ -1,5 +1,6 @@
from .main import XBMC
def start():
return XBMC()

View File

@@ -1,13 +1,13 @@
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from urllib2 import URLError
import base64
import json
import socket
import traceback
import urllib
import requests
from requests.packages.urllib3.exceptions import MaxRetryError
log = CPLog(__name__)
@@ -45,7 +45,7 @@ class XBMC(Notification):
max_successful += len(calls)
response = self.request(host, calls)
else:
response = self.notifyXBMCnoJSON(host, {'title':self.default_title, 'message':message})
response = self.notifyXBMCnoJSON(host, {'title': self.default_title, 'message': message})
if data and data.get('destination_dir') and (not self.conf('only_first') or hosts.index(host) == 0):
response += self.request(host, [('VideoLibrary.Scan', {})])
@@ -168,7 +168,7 @@ class XBMC(Notification):
# manually fake expected response array
return [{'result': 'Error'}]
except requests.exceptions.Timeout:
except (MaxRetryError, requests.exceptions.Timeout):
log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off')
return [{'result': 'Error'}]
except:
@@ -203,7 +203,7 @@ class XBMC(Notification):
log.debug('Returned from request %s: %s', (host, response))
return response
except requests.exceptions.Timeout:
except (MaxRetryError, requests.exceptions.Timeout):
log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off')
return []
except:

View File

@@ -1,5 +1,6 @@
from .main import Xmpp
def start():
return Xmpp()

View File

@@ -1,5 +1,6 @@
from .main import Automation
def start():
return Automation()

View File

@@ -1,10 +1,12 @@
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import ss, toSafeString, \
toUnicode, sp
from couchpotato.core.helpers.variable import getExt, md5, isLocalIP
from couchpotato.core.helpers.variable import getExt, md5, isLocalIP, scanForPassword, tryInt
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
import requests
from requests.packages.urllib3 import Timeout
from requests.packages.urllib3.exceptions import MaxRetryError
from tornado import template
from tornado.web import StaticFileHandler
from urlparse import urlparse
@@ -52,8 +54,11 @@ class Plugin(object):
self.registerStatic(inspect.getfile(self.__class__))
def conf(self, attr, value = None, default = None, section = None):
class_name = self.getName().lower().split(':')
return Env.setting(attr, section = section if section else class_name[0].lower(), value = value, default = default)
class_name = self.getName().lower().split(':')[0].lower()
return Env.setting(attr, section = section if section else class_name, value = value, default = default)
def deleteConf(self, attr):
return Env._settings.delete(attr, section = self.getName().lower().split(':')[0].lower())
def getName(self):
return self._class_name or self.__class__.__name__
@@ -80,7 +85,7 @@ class Plugin(object):
class_name = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
# View path
path = 'static/plugin/%s/' % (class_name)
path = 'static/plugin/%s/' % class_name
# Add handler to Tornado
Env.get('app').add_handlers(".*$", [(Env.get('web_base') + path + '(.*)', StaticFileHandler, {'path': static_folder})])
@@ -97,12 +102,15 @@ class Plugin(object):
self.makeDir(os.path.dirname(path))
if os.path.exists(path):
log.debug('%s already exists, overwriting file with new version', path)
try:
f = open(path, 'w+' if not binary else 'w+b')
f.write(content)
f.close()
os.chmod(path, Env.getPermission('file'))
except Exception, e:
except:
log.error('Unable writing to file "%s": %s', (path, traceback.format_exc()))
if os.path.isfile(path):
os.remove(path)
@@ -113,13 +121,13 @@ class Plugin(object):
if not os.path.isdir(path):
os.makedirs(path, Env.getPermission('folder'))
return True
except Exception, e:
except Exception as e:
log.error('Unable to create folder "%s": %s', (path, e))
return False
# http request
def urlopen(self, url, timeout = 30, data = None, headers = None, files = None, show_error = True, return_raw = False):
def urlopen(self, url, timeout = 30, data = None, headers = None, files = None, show_error = True):
url = urllib2.quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]")
if not headers: headers = {}
@@ -161,15 +169,18 @@ class Plugin(object):
}
method = 'post' if len(data) > 0 or files else 'get'
log.info('Opening url: %s %s, data: %s', (method, url, [x for x in data.iterkeys()] if isinstance(data, dict) else 'with data'))
log.info('Opening url: %s %s, data: %s', (method, url, [x for x in data.keys()] if isinstance(data, dict) else 'with data'))
response = r.request(method, url, verify = False, **kwargs)
data = response.content if return_raw else response.text
if response.status_code == requests.codes.ok:
data = response.content
else:
response.raise_for_status()
self.http_failed_request[host] = 0
except IOError:
except (IOError, MaxRetryError, Timeout):
if show_error:
log.error('Failed opening url in %s: %s %s', (self.getName(), url, traceback.format_exc(1)))
log.error('Failed opening url in %s: %s %s', (self.getName(), url, traceback.format_exc(0)))
# Save failed requests by hosts
try:
@@ -235,31 +246,34 @@ class Plugin(object):
except:
log.error("Something went wrong when finishing the plugin function. Could not find the 'is_running' key")
def getCache(self, cache_key, url = None, **kwargs):
cache_key_md5 = md5(cache_key)
cache = Env.get('cache').get(cache_key_md5)
if cache:
if not Env.get('dev'): log.debug('Getting cache %s', cache_key)
return cache
use_cache = not len(kwargs.get('data', {})) > 0 and not kwargs.get('files')
if use_cache:
cache_key_md5 = md5(cache_key)
cache = Env.get('cache').get(cache_key_md5)
if cache:
if not Env.get('dev'): log.debug('Getting cache %s', cache_key)
return cache
if url:
try:
cache_timeout = 300
if kwargs.has_key('cache_timeout'):
if 'cache_timeout' in kwargs:
cache_timeout = kwargs.get('cache_timeout')
del kwargs['cache_timeout']
data = self.urlopen(url, **kwargs)
if data and cache_timeout > 0:
if data and cache_timeout > 0 and use_cache:
self.setCache(cache_key, data, timeout = cache_timeout)
return data
except:
if not kwargs.get('show_error', True):
raise
log.error('Failed getting cache: %s', (traceback.format_exc()))
log.debug('Failed getting cache: %s', (traceback.format_exc(0)))
return ''
def setCache(self, cache_key, value, timeout = 300):
@@ -269,8 +283,17 @@ class Plugin(object):
return value
def createNzbName(self, data, media):
release_name = data.get('name')
tag = self.cpTag(media)
return '%s%s' % (toSafeString(toUnicode(data.get('name'))[:127 - len(tag)]), tag)
# Check if password is filename
name_password = scanForPassword(data.get('name'))
if name_password:
release_name, password = name_password
tag += '{{%s}}' % password
max_length = 127 - len(tag) # Some filesystems don't support 128+ long filenames
return '%s%s' % (toSafeString(toUnicode(release_name)[:max_length]), tag)
def createFileName(self, data, filedata, media):
name = sp(os.path.join(self.createNzbName(data, media)))
@@ -284,6 +307,43 @@ class Plugin(object):
return ''
def checkFilesChanged(self, files, unchanged_for = 60):
now = time.time()
file_too_new = False
for cur_file in files:
# File got removed while checking
if not os.path.isfile(cur_file):
file_too_new = now
break
# File has changed in last 60 seconds
file_time = self.getFileTimes(cur_file)
for t in file_time:
if t > now - unchanged_for:
file_too_new = tryInt(time.time() - t)
break
if file_too_new:
break
if file_too_new:
try:
time_string = time.ctime(file_time[0])
except:
try:
time_string = time.ctime(file_time[1])
except:
time_string = 'unknown'
return file_too_new, time_string
return False, None
def getFileTimes(self, file_path):
return [os.path.getmtime(file_path), os.path.getctime(file_path) if os.name != 'posix' else 0]
def isDisabled(self):
return not self.isEnabled()

Some files were not shown because too many files have changed in this diff Show More