Compare commits

..

105 Commits

Author SHA1 Message Date
Ruud
d233e4d22e Merge branch 'refs/heads/develop' into desktop 2013-01-26 13:54:56 +01:00
Ruud
23893dbcb9 Merge branch 'refs/heads/develop' into desktop 2013-01-25 20:13:58 +01:00
Ruud
506871b506 One up 2013-01-23 23:10:55 +01:00
Ruud
6115917660 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2013-01-23 22:57:07 +01:00
Ruud
21df8819d3 Merge branch 'refs/heads/develop' into desktop 2013-01-23 22:55:09 +01:00
Ruud
fb3f3e11f6 Merge branch 'refs/heads/develop' into desktop 2013-01-22 21:40:40 +01:00
Ruud
178c8942c3 Merge branch 'refs/heads/develop' into desktop 2013-01-14 19:54:22 +01:00
Ruud
51e747049d One up 2013-01-07 23:10:42 +01:00
Ruud
0582f7d694 Urlencode spotweb id. fix #1213 2013-01-07 23:10:06 +01:00
Ruud
fa7cac7538 Merge branch 'refs/heads/develop' into desktop 2013-01-07 22:41:55 +01:00
Ruud
9a314cfbc4 One up 2012-12-29 00:03:45 +01:00
Ruud
5941d0bf77 Add version to update url 2012-12-29 00:03:36 +01:00
Ruud
d326c1c25c Merge branch 'refs/heads/master' into desktop
Conflicts:
	version.py
2012-12-28 23:31:08 +01:00
Ruud
7e6234298d Merge branch 'refs/heads/develop' 2012-12-28 23:25:40 +01:00
Ruud
d4da206f93 Merge branch 'refs/heads/develop' 2012-12-22 16:33:47 +01:00
Ruud
985a168724 Merge branch 'refs/heads/develop' 2012-12-21 23:18:00 +01:00
Ruud
173c6194ed Merge branch 'refs/heads/develop' 2012-12-19 11:12:26 +01:00
Ruud
bcd23ad10c Merge branch 'refs/heads/develop' 2012-12-17 15:13:00 +01:00
Ruud
898e6f487d Merge branch 'refs/heads/develop' 2012-12-16 23:52:06 +01:00
Ruud
96472a9a8f One up 2012-12-16 23:51:58 +01:00
Ruud
27252561e2 Merge branch 'refs/heads/develop' into desktop 2012-12-16 23:51:24 +01:00
Ruud
6618c3927c Merge branch 'refs/heads/develop' 2012-12-11 23:15:06 +01:00
Ruud
c9e732651f One up 2012-12-01 12:16:58 +01:00
Ruud
7849e7170d Uninstall only create files, no wildcard *.* 2012-12-01 12:16:51 +01:00
Ruud
087894eb4e Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-12-01 11:50:08 +01:00
Ruud
4b58b40226 Merge branch 'refs/heads/develop' 2012-12-01 11:48:54 +01:00
Ruud
3ecc826629 Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2012-11-11 22:06:48 +01:00
Ruud
25f1b8c7a7 Fedora init fix #1009 2012-11-02 18:32:15 +01:00
Ruud
e71da1f14d Use proper description for binary build. fix #1005 2012-11-02 18:24:13 +01:00
Ruud
938b14ba18 One up installer 2012-10-29 20:45:17 +01:00
Ruud
d6522d8f38 One up installer 2012-10-27 18:49:44 +02:00
Ruud
78eab890e7 Merge branch 'refs/heads/develop' into desktop 2012-10-27 18:25:36 +02:00
Ruud
1a56191f83 Don't unzip 2012-10-27 18:22:50 +02:00
Ruud
41c0f34d95 Properly restart 2012-10-27 18:22:40 +02:00
Ruud
37bf205d7a Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-10-27 11:56:57 +02:00
Ruud
32fe3796e4 Merge branch 'refs/heads/develop' 2012-10-26 22:22:47 +02:00
Ruud
359d1aaafa Merge branch 'refs/heads/develop' 2012-10-26 14:54:12 +02:00
Ruud
fb5d336351 Merge branch 'refs/heads/develop' 2012-10-26 14:36:04 +02:00
Ruud
eb30dff986 Merge branch 'refs/heads/develop' 2012-10-13 00:00:44 +02:00
Ruud
9312336962 Merge branch 'refs/heads/develop' 2012-09-24 09:36:59 +02:00
Ruud
aa1fa3eb9a Add description 2012-09-19 15:42:33 +02:00
Ruud
0e2f8a612c Extract zip after build, for testing 2012-09-19 15:29:07 +02:00
Ruud
ade4338ea6 Merge branch 'refs/heads/develop' 2012-09-16 21:32:16 +02:00
Ruud
55b20324c0 Merge branch 'refs/heads/develop' 2012-09-16 12:36:48 +02:00
Ruud
465e7b2abc Merge branch 'refs/heads/develop' into desktop 2012-09-16 12:36:17 +02:00
Ruud
578fb45785 Installer 1 up 2012-09-16 11:35:56 +02:00
Ruud
c0fb28301d Merge branch 'refs/heads/develop'
Conflicts:
	version.py
2012-09-16 10:46:39 +02:00
Ruud
96995bbbe5 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-09-16 10:45:19 +02:00
Ruud
4cfdafebbc Merge branch 'refs/heads/develop' into desktop 2012-09-14 13:15:47 +02:00
Ruud
f9c2503f81 Merge branch 'refs/heads/develop' 2012-09-14 13:15:35 +02:00
Ruud
b97acb8ef5 Merge branch 'refs/heads/develop' into desktop 2012-09-14 13:08:19 +02:00
Ruud
5b4cdf05b1 Merge branch 'refs/heads/develop' 2012-09-14 13:06:56 +02:00
Ruud
d68d2dfdb6 Updated installer 2012-09-09 21:48:38 +02:00
Ruud
39b269a454 Merge branch 'refs/heads/develop' into desktop 2012-09-09 17:32:47 +02:00
Ruud
ac081d3e10 Getting ready for build 2012-09-09 17:28:23 +02:00
Ruud
5d4efb60cf Merge branch 'refs/heads/develop' into desktop 2012-09-08 16:01:49 +02:00
Ruud
6f25a6bdfd Merge branch 'refs/heads/develop' 2012-09-03 10:32:09 +02:00
Ruud
23427e95f7 Merge branch 'refs/heads/develop' 2012-08-26 23:09:51 +02:00
Ruud
cc408b980c Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-08-05 16:18:35 +02:00
Ruud
90a09e573b Merge branch 'refs/heads/develop'
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-08-05 16:15:53 +02:00
Ruud
e1d7440b9d Wrong branch in master 2012-07-15 00:23:44 +02:00
Ruud
59590b3ac9 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-07-14 00:35:00 +02:00
Ruud
ff759dacf3 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	couchpotato/core/_base/updater/main.py
2012-07-11 22:43:45 +02:00
Ruud
a328e44130 Merge branch 'desktop' of github.com:RuudBurger/CouchPotatoServer into desktop 2012-05-15 23:23:56 +02:00
Ruud
7924cac5f9 Update installer version 2012-05-15 23:21:24 +02:00
Ruud
1cef3b0c93 remove --nogit tag 2012-05-15 23:21:24 +02:00
Ruud
3cd59edc8b Import errors
File icon
2012-05-15 23:21:24 +02:00
Ruud
0d624af01d Working PNG 2012-05-15 23:21:24 +02:00
Ruud
a09132570c Change branch to desktop 2012-05-15 23:21:14 +02:00
Ruud
ee3fc38432 Better setup 2012-05-15 23:21:14 +02:00
Ruud
dbf0192c8e Inno setup, start 2012-05-15 23:21:14 +02:00
Ruud
6962cfc3f5 new Desktop runner 2012-05-15 23:21:14 +02:00
Ruud
e096ec3b5b Desktop files 2012-05-15 23:20:05 +02:00
Ruud
b30a74ae0c Merge branch 'refs/heads/develop' into desktop 2012-05-15 23:15:17 +02:00
Ruud
978eeb16c9 Update installer version 2012-05-15 23:14:20 +02:00
Ruud
e5c9d91657 Merge branch 'refs/heads/develop' into desktop 2012-05-15 22:27:22 +02:00
Ruud
fa81c3a07a Merge branch 'refs/heads/develop' into desktop
Conflicts:
	version.py
2012-05-14 22:00:02 +02:00
Ruud
9cdd520d41 Merge branch 'refs/heads/develop' into desktop 2012-05-14 20:22:55 +02:00
Ruud
55d7898771 Merge branch 'refs/heads/develop' into desktop 2012-05-13 12:56:45 +02:00
Ruud
b8256bef97 Merge branch 'refs/heads/develop' into desktop 2012-05-12 00:35:52 +02:00
Ruud
5be9dc0b4a Merge branch 'refs/heads/develop' into desktop 2012-05-09 22:20:53 +02:00
Ruud
7d0be0cefb remove --nogit tag 2012-05-07 22:55:54 +02:00
Ruud
f7ce1edb13 Merge branch 'refs/heads/develop' into desktop 2012-05-07 22:44:01 +02:00
Ruud
5ad9280b60 Merge branch 'refs/heads/develop' into desktop 2012-05-07 22:27:55 +02:00
Ruud
2b353f1b20 Merge branch 'refs/heads/develop' into desktop 2012-05-04 17:29:15 +02:00
Ruud
75ab90b87b Merge branch 'refs/heads/develop' into desktop 2012-05-02 21:40:19 +02:00
Ruud
0219296120 Import errors
File icon
2012-05-02 21:34:45 +02:00
Ruud
20032b3a31 Working PNG 2012-05-01 07:35:44 +02:00
Ruud
ea9e9a8c90 Updater base 2012-05-01 07:35:27 +02:00
Ruud
f7b0ee145b Change branch to desktop 2012-04-30 21:37:04 +02:00
Ruud
cc866738ee Merge branch 'refs/heads/develop' into desktop 2012-04-30 21:32:56 +02:00
Ruud
eadccf6e33 Merge branch 'refs/heads/develop' into desktop 2012-04-29 00:00:25 +02:00
Ruud
b70b66e567 Merge branch 'refs/heads/develop' into desktop 2012-04-28 23:14:59 +02:00
Ruud
5b6792dc20 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	CouchPotato.py
	couchpotato/core/plugins/renamer/main.py
	couchpotato/core/plugins/trailer/__init__.py
2012-04-07 21:35:36 +02:00
Ruud
f498e7343a Better setup 2012-02-25 01:48:58 +01:00
Ruud
6962f441e6 Inno setup, start 2012-02-21 18:50:34 +01:00
Ruud
1def62b1b1 new Desktop runner 2012-02-19 17:13:37 +01:00
Ruud
a4a4a6a185 Merge branch 'refs/heads/develop' into desktop
Conflicts:
	CouchPotato.py
2012-02-19 13:14:56 +01:00
Ruud
d4c9469c1a Remove nfo when not renaming as .orig.nfo 2012-02-19 12:53:55 +01:00
Ruud
3e2d4c5d7b Initial trailer support 2012-02-19 12:48:54 +01:00
Ruud
d03f711d69 kwargs in file.download for urlopen 2012-02-19 12:45:22 +01:00
Ruud
44dd8d9b96 Merge lists, not overwrite 2012-02-19 12:37:25 +01:00
Ruud
549a3be0d8 Merge branch 'refs/heads/develop' into desktop 2012-02-12 00:10:56 +01:00
Ruud
1bb2edf8ec Merge branch 'refs/heads/develop' into desktop 2012-02-11 23:33:14 +01:00
Ruud
84c6f36315 Desktop files 2012-02-11 23:06:14 +01:00
1014 changed files with 86527 additions and 90217 deletions

231
Desktop.py Normal file
View File

@@ -0,0 +1,231 @@
from esky.util import appdir_from_executable #@UnresolvedImport
from threading import Thread
from version import VERSION
from wx.lib.softwareupdate import SoftwareUpdate
import os
import sys
import time
import webbrowser
import wx
# Include proper dirs
if hasattr(sys, 'frozen'):
import libs
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
else:
base_path = os.path.dirname(os.path.abspath(__file__))
lib_dir = os.path.join(base_path, 'libs')
sys.path.insert(0, base_path)
sys.path.insert(0, lib_dir)
from couchpotato.environment import Env
class TaskBarIcon(wx.TaskBarIcon):
TBMENU_OPEN = wx.NewId()
TBMENU_SETTINGS = wx.NewId()
TBMENU_EXIT = wx.ID_EXIT
closed = False
menu = False
enabled = False
def __init__(self, frame):
wx.TaskBarIcon.__init__(self)
self.frame = frame
icon = wx.Icon('icon.png', wx.BITMAP_TYPE_PNG)
self.SetIcon(icon)
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
def OnTaskBarClick(self, evt):
menu = self.CreatePopupMenu()
self.PopupMenu(menu)
menu.Destroy()
def enable(self):
self.enabled = True
if self.menu:
self.open_menu.Enable(True)
self.setting_menu.Enable(True)
self.open_menu.SetText('Open')
def CreatePopupMenu(self):
if not self.menu:
self.menu = wx.Menu()
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
if not self.enabled:
self.open_menu.Enable(False)
self.setting_menu.Enable(False)
self.open_menu.SetText('Loading...')
return self.menu
def onOpen(self, event):
url = self.frame.parent.getSetting('base_url')
webbrowser.open(url)
def onSettings(self, event):
url = self.frame.parent.getSetting('base_url') + '/settings/'
webbrowser.open(url)
def onTaskBarClose(self, evt):
if self.closed:
return
self.closed = True
self.RemoveIcon()
wx.CallAfter(self.frame.Close)
def makeIcon(self, img):
if "wxMSW" in wx.PlatformInfo:
img = img.Scale(16, 16)
elif "wxGTK" in wx.PlatformInfo:
img = img.Scale(22, 22)
icon = wx.IconFromBitmap(img.CopyFromBitmap())
return icon
class MainFrame(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
self.parent = parent
self.tbicon = TaskBarIcon(self)
class WorkerThread(Thread):
def __init__(self, desktop):
Thread.__init__(self)
self.daemon = True
self._desktop = desktop
self.start()
def run(self):
# Get options via arg
from couchpotato.runner import getOptions
args = ['--quiet']
self.options = getOptions(base_path, args)
# Load settings
settings = Env.get('settings')
settings.setFile(self.options.config_file)
# Create data dir if needed
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
if self.data_dir == '':
from couchpotato.core.helpers.variable import getDataDir
self.data_dir = getDataDir()
if not os.path.isdir(self.data_dir):
os.makedirs(self.data_dir)
# Create logging dir
self.log_dir = os.path.join(self.data_dir, 'logs');
if not os.path.isdir(self.log_dir):
os.mkdir(self.log_dir)
try:
from couchpotato.runner import runCouchPotato
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
except:
pass
self._desktop.frame.Close()
class CouchPotatoApp(wx.App, SoftwareUpdate):
settings = {}
events = {}
restart = False
closing = False
def OnInit(self):
# Updater
base_url = 'http://couchpota.to/updates/%s/' % VERSION
self.InitUpdates(base_url, base_url + 'changelog.html',
icon = wx.Icon('icon.png'))
self.frame = MainFrame(self)
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
# CouchPotato thread
self.worker = WorkerThread(self)
return True
def onAppLoad(self):
self.frame.tbicon.enable()
def setSettings(self, settings = {}):
self.settings = settings
def getSetting(self, name):
return self.settings.get(name)
def addEvents(self, events = {}):
for name in events.iterkeys():
self.events[name] = events[name]
def onClose(self, event):
if not self.closing:
self.closing = True
self.frame.tbicon.onTaskBarClose(event)
onClose = self.events.get('onClose')
onClose(event)
def afterShutdown(self, restart = False):
self.frame.Destroy()
self.restart = restart
self.ExitMainLoop()
if __name__ == '__main__':
app = CouchPotatoApp(redirect = False)
app.MainLoop()
time.sleep(1)
if app.restart:
def appexe_from_executable(exepath):
appdir = appdir_from_executable(exepath)
exename = os.path.basename(exepath)
if sys.platform == "darwin":
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
return os.path.join(appdir, "Contents", "MacOS", exename)
return os.path.join(appdir, exename)
exe = appexe_from_executable(sys.executable)
os.chdir(os.path.dirname(exe))
os.execv(exe, [exe] + sys.argv[1:])

View File

@@ -40,23 +40,3 @@ Linux (ubuntu / debian):
* Make it executable. `sudo chmod +x /etc/init.d/couchpotato`
* Add it to defaults. `sudo update-rc.d couchpotato defaults`
* Open your browser and go to: `http://localhost:5050/`
FreeBSD :
* Update your ports tree `sudo portsnap fetch update`
* Install Python 2.6+ [lang/python](http://www.freshports.org/lang/python) with `cd /usr/ports/lang/python; sudo make install clean`
* Install port [databases/py-sqlite3](http://www.freshports.org/databases/py-sqlite3) with `cd /usr/ports/databases/py-sqlite3; sudo make install clean`
* Add a symlink to 'python2' `sudo ln -s /usr/local/bin/python /usr/local/bin/python2`
* Install port [ftp/libcurl](http://www.freshports.org/ftp/libcurl) with `cd /usr/ports/ftp/fpc-libcurl; sudo make install clean`
* Install port [ftp/curl](http://www.freshports.org/ftp/bcurl), deselect 'Asynchronous DNS resolution via c-ares' when prompted as part of config `cd /usr/ports/ftp/fpc-libcurl; sudo make install clean`
* Install port [textproc/docbook-xml-450](http://www.freshports.org/textproc/docbook-xml-450) with `cd /usr/ports/textproc/docbook-xml-450; sudo make install clean`
* Install port [GIT](http://git-scm.com/) with `cd /usr/ports/devel/git; sudo make install clean`
* 'cd' to the folder of your choosing.
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
* Then run `sudo python CouchPotatoServer/CouchPotato.py` to start for the first time
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/freebsd /etc/rc.d/couchpotato`
* Change the paths inside the init script. `sudo vim /etc/init.d/couchpotato`
* Make init script executable. `sudo chmod +x /etc/rc.d/couchpotato`
* Add init to startup. `sudo echo 'couchpotato_enable="YES"' >> /etc/rc.conf`
* Open your browser and go to: `http://server:5050/`

View File

@@ -1,25 +1,15 @@
## Got a issue/feature request or submitting a pull request?
#So you feel like posting a bug, sending me a pull request or just telling me how awesome I am. No problem!
Make sure you think of the following things:
##Just make sure you think of the following things:
## Issue
* Search through the existing (and closed) issues first, see if you can get your answer there.
* Search through the existing (and closed) issues first. See if you can get your answer there.
* Double check the result manually, because it could be an external issue.
* Post logs! Without seeing what is going on, I can't reproduce the error.
* Also check the logs before submitting, obvious errors like permission or http errors are often not related to CP.
* What is the movie + quality you are searching for?
* What are you're settings for the specific problem?
* What providers are you using? (While you're logs include these, scanning through hundred of lines of log isn't our hobby)
* Post the logs from config directory, please do not copy paste the UI. Use pastebin to store these logs!
* Give a short step by step of how to reproduce the error.
* What is the movie + quality you are searching for.
* What are you settings for the specific problem.
* What providers are you using. (While your logs include these, scanning through hundred of lines of log isn't my hobby).
* Give me a short step by step of how to reproduce.
* What hardware / OS are you using and what are the limits? NAS can be slow and maybe have a different python installed then when you use CP on OSX or Windows for example.
* I will mark issues with the "can't reproduce" tag. Don't go asking "why closed" if it clearly says the issue in the tag ;)
* If you're running on a NAS (QNAP, Austor etc..) with pre-made packages, make sure these are setup to use our source repo (RuudBurger/CouchPotatoServer) and nothing else!!
* I will mark issues with the "can't reproduce" tag. Don't go asking me "why closed" if it clearly says the issue in the tag ;)
## Pull Request
* Make sure you're pull request is made for develop branch (or relevant feature branch)
* Have you tested your PR? If not, why?
* Are there any limitations of your PR we should know of?
* Make sure to keep you're PR up-to-date with the branch you're trying to push into.
**If we don't get enough info, the chance of the issue getting closed is a lot bigger ;)**
**If I don't get enough info, the change of the issue getting closed is a lot bigger ;)**

View File

@@ -1,139 +1,83 @@
from couchpotato.api import api_docs, api_docs_missing, api
from couchpotato.api import api_docs, api_docs_missing
from couchpotato.core.auth import requires_auth
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.variable import md5, tryInt
from couchpotato.core.helpers.request import getParams, jsonified
from couchpotato.core.helpers.variable import md5
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
from tornado import template
from tornado.web import RequestHandler, authenticated
from flask.app import Flask
from flask.blueprints import Blueprint
from flask.globals import request
from flask.helpers import url_for
from flask.templating import render_template
from sqlalchemy.engine import create_engine
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm.session import sessionmaker
from werkzeug.utils import redirect
import os
import time
import traceback
log = CPLog(__name__)
app = Flask(__name__, static_folder = 'nope')
web = Blueprint('web', __name__)
views = {}
template_loader = template.Loader(os.path.join(os.path.dirname(__file__), 'templates'))
class BaseHandler(RequestHandler):
def get_current_user(self):
username = Env.setting('username')
password = Env.setting('password')
if username and password:
return self.get_secure_cookie('user')
else: # Login when no username or password are set
return True
# Main web handler
class WebHandler(BaseHandler):
@authenticated
def get(self, route, *args, **kwargs):
route = route.strip('/')
if not views.get(route):
page_not_found(self)
return
try:
self.write(views[route]())
except:
log.error("Failed doing web request '%s': %s", (route, traceback.format_exc()))
self.write({'success': False, 'error': 'Failed returning results'})
def addView(route, func, static = False):
views[route] = func
def get_session(engine = None):
return Env.getSession(engine)
def addView(route, func, static = False):
web.add_url_rule(route + ('' if static else '/'), endpoint = route if route else 'index', view_func = func)
# Web view
""" Web view """
@web.route('/')
@requires_auth
def index():
return template_loader.load('index.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env)
addView('', index)
return render_template('index.html', sep = os.sep, fireEvent = fireEvent, env = Env)
# API docs
""" Api view """
@web.route('docs/')
@requires_auth
def apiDocs():
from couchpotato import app
routes = []
for route in api.iterkeys():
routes.append(route)
for route, x in sorted(app.view_functions.iteritems()):
if route[0:4] == 'api.':
routes += [route[4:].replace('::', '.')]
if api_docs.get(''):
del api_docs['']
del api_docs_missing['']
return render_template('api.html', fireEvent = fireEvent, routes = sorted(routes), api_docs = api_docs, api_docs_missing = sorted(api_docs_missing))
return template_loader.load('api.html').generate(fireEvent = fireEvent, routes = sorted(routes), api_docs = api_docs, api_docs_missing = sorted(api_docs_missing), Env = Env)
@web.route('getkey/')
def getApiKey():
addView('docs', apiDocs)
api = None
params = getParams()
username = Env.setting('username')
password = Env.setting('password')
# Make non basic auth option to get api key
class KeyHandler(RequestHandler):
def get(self, *args, **kwargs):
api = None
if (params.get('u') == md5(username) or not username) and (params.get('p') == password or not password):
api = Env.setting('api_key')
try:
username = Env.setting('username')
password = Env.setting('password')
return jsonified({
'success': api is not None,
'api_key': api
})
if (self.get_argument('u') == md5(username) or not username) and (self.get_argument('p') == password or not password):
api = Env.setting('api_key')
self.write({
'success': api is not None,
'api_key': api
})
except:
log.error('Failed doing key request: %s', (traceback.format_exc()))
self.write({'success': False, 'error': 'Failed returning results'})
class LoginHandler(BaseHandler):
def get(self, *args, **kwargs):
if self.get_current_user():
self.redirect(Env.get('web_base'))
else:
self.write(template_loader.load('login.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env))
def post(self, *args, **kwargs):
api = None
username = Env.setting('username')
password = Env.setting('password')
if (self.get_argument('username') == username or not username) and (md5(self.get_argument('password')) == password or not password):
api = Env.setting('api_key')
if api:
remember_me = tryInt(self.get_argument('remember_me', default = 0))
self.set_secure_cookie('user', api, expires_days = 30 if remember_me > 0 else None)
self.redirect(Env.get('web_base'))
class LogoutHandler(BaseHandler):
def get(self, *args, **kwargs):
self.clear_cookie('user')
self.redirect('%slogin/' % Env.get('web_base'))
def page_not_found(rh):
index_url = Env.get('web_base')
url = rh.request.uri[len(index_url):]
@app.errorhandler(404)
def page_not_found(error):
index_url = url_for('web.index')
url = request.path[len(index_url):]
if url[:3] != 'api':
r = index_url + '#' + url.lstrip('/')
rh.redirect(r)
if request.path != '/':
r = request.url.replace(request.path, index_url + '#' + url)
else:
r = '%s%s' % (request.url.rstrip('/'), index_url + '#' + url)
return redirect(r)
else:
if not Env.get('dev'):
time.sleep(0.1)
rh.set_status(404)
rh.write('Wrong API key used')
time.sleep(0.1)
return 'Wrong API key used', 404

View File

@@ -1,65 +1,49 @@
from couchpotato.core.helpers.request import getParams
from couchpotato.core.logger import CPLog
from functools import wraps
from threading import Thread
from tornado.gen import coroutine
from flask.blueprints import Blueprint
from flask.helpers import url_for
from tornado.web import RequestHandler, asynchronous
import json
import threading
import tornado
import traceback
import urllib
log = CPLog(__name__)
api = {}
api_locks = {}
api_nonblock = {}
from werkzeug.utils import redirect
api = Blueprint('api', __name__)
api_docs = {}
api_docs_missing = []
api_nonblock = {}
def run_async(func):
@wraps(func)
def async_func(*args, **kwargs):
func_hl = Thread(target = func, args = args, kwargs = kwargs)
func_hl.start()
return func_hl
return async_func
# NonBlock API handler
class NonBlockHandler(RequestHandler):
stopper = None
def __init__(self, application, request, **kwargs):
cls = NonBlockHandler
cls.stoppers = []
super(NonBlockHandler, self).__init__(application, request, **kwargs)
@asynchronous
def get(self, route, *args, **kwargs):
route = route.strip('/')
def get(self, route):
cls = NonBlockHandler
start, stop = api_nonblock[route]
self.stopper = stop
cls.stoppers.append(stop)
start(self.onNewMessage, last_id = self.get_argument('last_id', None))
start(self.onNewMessage, last_id = self.get_argument("last_id", None))
def onNewMessage(self, response):
if self.request.connection.stream.closed():
self.on_connection_close()
return
try:
self.finish(response)
except:
log.debug('Failed doing nonblock request, probably already closed: %s', (traceback.format_exc()))
try: self.finish({'success': False, 'error': 'Failed returning results'})
except: pass
self.finish(response)
def on_connection_close(self):
cls = NonBlockHandler
if self.stopper:
self.stopper(self.onNewMessage)
for stop in cls.stoppers:
stop(self.onNewMessage)
self.stopper = None
cls.stoppers = []
def addApiView(route, func, static = False, docs = None, **kwargs):
api.add_url_rule(route + ('' if static else '/'), endpoint = route.replace('.', '::') if route else 'index', view_func = func, **kwargs)
if docs:
api_docs[route[4:] if route[0:4] == 'api.' else route] = docs
else:
api_docs_missing.append(route)
def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
api_nonblock[route] = func_tuple
@@ -69,67 +53,9 @@ def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
else:
api_docs_missing.append(route)
# Blocking API handler
class ApiHandler(RequestHandler):
""" Api view """
def index():
index_url = url_for('web.index')
return redirect(index_url + 'docs/')
@coroutine
def get(self, route, *args, **kwargs):
route = route.strip('/')
if not api.get(route):
self.write('API call doesn\'t seem to exist')
return
api_locks[route].acquire()
try:
kwargs = {}
for x in self.request.arguments:
kwargs[x] = urllib.unquote(self.get_argument(x))
# Split array arguments
kwargs = getParams(kwargs)
# Remove t random string
try: del kwargs['t']
except: pass
# Add async callback handler
@run_async
def run_handler(callback):
try:
result = api[route](**kwargs)
callback(result)
except:
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
callback({'success': False, 'error': 'Failed returning results'})
result = yield tornado.gen.Task(run_handler)
# Check JSONP callback
jsonp_callback = self.get_argument('callback_func', default = None)
if jsonp_callback:
self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')')
self.set_header("Content-Type", "text/javascript")
elif isinstance(result, tuple) and result[0] == 'redirect':
self.redirect(result[1])
else:
self.write(result)
except:
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
self.write({'success': False, 'error': 'Failed returning results'})
api_locks[route].release()
def addApiView(route, func, static = False, docs = None, **kwargs):
if static: func(route)
else:
api[route] = func
api_locks[route] = threading.Lock()
if docs:
api_docs[route[4:] if route[0:4] == 'api.' else route] = docs
else:
api_docs_missing.append(route)
addApiView('', index)

View File

@@ -70,7 +70,7 @@ config = [{
'name': 'development',
'default': 0,
'type': 'bool',
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
'description': 'Disables some checks/downloads for faster reloading.',
},
{
'name': 'data_dir',

View File

@@ -1,5 +1,6 @@
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.request import jsonified
from couchpotato.core.helpers.variable import cleanHost, md5
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
@@ -55,12 +56,8 @@ class Core(Plugin):
if not Env.get('desktop'):
self.signalHandler()
# Set default urlopen timeout
import socket
socket.setdefaulttimeout(30)
def md5Password(self, value):
return md5(value) if value else ''
return md5(value.encode(Env.get('encoding'))) if value else ''
def checkApikey(self, value):
return value if value and len(value) > 3 else uuid4().hex
@@ -71,28 +68,28 @@ class Core(Plugin):
return True
def available(self, **kwargs):
return {
def available(self):
return jsonified({
'success': True
}
})
def shutdown(self, **kwargs):
def shutdown(self):
if self.shutdown_started:
return False
def shutdown():
self.initShutdown()
IOLoop.current().add_callback(shutdown)
IOLoop.instance().add_callback(shutdown)
return 'shutdown'
def restart(self, **kwargs):
def restart(self):
if self.shutdown_started:
return False
def restart():
self.initShutdown(restart = True)
IOLoop.current().add_callback(restart)
IOLoop.instance().add_callback(restart)
return 'restarting'
@@ -128,10 +125,10 @@ class Core(Plugin):
time.sleep(1)
log.debug('Safe to shutdown/restart')
log.debug('Save to shutdown/restart')
try:
IOLoop.current().stop()
IOLoop.instance().stop()
except RuntimeError:
pass
except:
@@ -159,10 +156,10 @@ class Core(Plugin):
host = 'localhost'
port = Env.setting('port')
return '%s:%d%s' % (cleanHost(host).rstrip('/'), int(port), Env.get('web_base'))
return '%s:%d%s' % (cleanHost(host).rstrip('/'), int(port), '/' + Env.setting('url_base').lstrip('/') if Env.setting('url_base') else '')
def createApiUrl(self):
return '%sapi/%s' % (self.createBaseUrl(), Env.setting('api_key'))
return '%s/api/%s' % (self.createBaseUrl(), Env.setting('api_key'))
def version(self):
ver = fireEvent('updater.info', single = True)
@@ -173,16 +170,16 @@ class Core(Plugin):
return '%s - %s-%s - v2' % (platf, ver.get('version')['type'], ver.get('version')['hash'])
def versionView(self, **kwargs):
return {
def versionView(self):
return jsonified({
'version': self.version()
}
})
def signalHandler(self):
if Env.get('daemonized'): return
def signal_handler(signal, frame):
fireEvent('app.shutdown', single = True)
fireEvent('app.shutdown')
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)

View File

@@ -1,64 +1,15 @@
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from minify.cssmin import cssmin
from minify.jsmin import jsmin
from tornado.web import StaticFileHandler
import os
import re
import traceback
log = CPLog(__name__)
class ClientScript(Plugin):
core_static = {
'style': [
'style/main.css',
'style/uniform.generic.css',
'style/uniform.css',
'style/settings.css',
],
'script': [
'scripts/library/mootools.js',
'scripts/library/mootools_more.js',
'scripts/library/uniform.js',
'scripts/library/form_replacement/form_check.js',
'scripts/library/form_replacement/form_radio.js',
'scripts/library/form_replacement/form_dropdown.js',
'scripts/library/form_replacement/form_selectoption.js',
'scripts/library/question.js',
'scripts/library/scrollspy.js',
'scripts/library/spin.js',
'scripts/library/Array.stableSort.js',
'scripts/library/async.js',
'scripts/couchpotato.js',
'scripts/api.js',
'scripts/library/history.js',
'scripts/page.js',
'scripts/block.js',
'scripts/block/navigation.js',
'scripts/block/footer.js',
'scripts/block/menu.js',
'scripts/page/home.js',
'scripts/page/wanted.js',
'scripts/page/settings.js',
'scripts/page/about.js',
'scripts/page/manage.js',
],
}
urls = {'style': {}, 'script': {}, }
minified = {'style': {}, 'script': {}, }
paths = {'style': {}, 'script': {}, }
comment = {
'style': '/*** %s:%d ***/\n',
'script': '// %s:%d\n'
urls = {
'style': {},
'script': {},
}
html = {
@@ -73,77 +24,6 @@ class ClientScript(Plugin):
addEvent('clientscript.get_styles', self.getStyles)
addEvent('clientscript.get_scripts', self.getScripts)
if not Env.get('dev'):
addEvent('app.load', self.minify)
self.addCore()
def addCore(self):
for static_type in self.core_static:
for rel_path in self.core_static.get(static_type):
file_path = os.path.join(Env.get('app_dir'), 'couchpotato', 'static', rel_path)
core_url = 'static/%s' % rel_path
if static_type == 'script':
self.registerScript(core_url, file_path, position = 'front')
else:
self.registerStyle(core_url, file_path, position = 'front')
def minify(self):
# Create cache dir
cache = Env.get('cache_dir')
parent_dir = os.path.join(cache, 'minified')
self.makeDir(parent_dir)
Env.get('app').add_handlers(".*$", [(Env.get('web_base') + 'minified/(.*)', StaticFileHandler, {'path': parent_dir})])
for file_type in ['style', 'script']:
ext = 'js' if file_type is 'script' else 'css'
positions = self.paths.get(file_type, {})
for position in positions:
files = positions.get(position)
self._minify(file_type, files, position, position + '.' + ext)
def _minify(self, file_type, files, position, out):
cache = Env.get('cache_dir')
out_name = out
out = os.path.join(cache, 'minified', out_name)
raw = []
for file_path in files:
f = open(file_path, 'r').read()
if file_type == 'script':
data = jsmin(f)
else:
data = self.prefix(f)
data = cssmin(data)
data = data.replace('../images/', '../static/images/')
data = data.replace('../fonts/', '../static/fonts/')
data = data.replace('../../static/', '../static/') # Replace inside plugins
raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data})
# Combine all files together with some comments
data = ''
for r in raw:
data += self.comment.get(file_type) % (ss(r.get('file')), r.get('date'))
data += r.get('data') + '\n\n'
self.createFile(out, data.strip())
if not self.minified.get(file_type):
self.minified[file_type] = {}
if not self.minified[file_type].get(position):
self.minified[file_type][position] = []
minified_url = 'minified/%s?%s' % (out_name, tryInt(os.path.getmtime(out)))
self.minified[file_type][position].append(minified_url)
def getStyles(self, *args, **kwargs):
return self.get('style', *args, **kwargs)
@@ -155,57 +35,22 @@ class ClientScript(Plugin):
data = '' if as_html else []
try:
try:
if not Env.get('dev'):
return self.minified[type][location]
except:
pass
return self.urls[type][location]
except:
log.error('Error getting minified %s, %s: %s', (type, location, traceback.format_exc()))
except Exception, e:
log.error(e)
return data
def registerStyle(self, api_path, file_path, position = 'head'):
self.register(api_path, file_path, 'style', position)
def registerStyle(self, path, position = 'head'):
self.register(path, 'style', position)
def registerScript(self, api_path, file_path, position = 'head'):
self.register(api_path, file_path, 'script', position)
def registerScript(self, path, position = 'head'):
self.register(path, 'script', position)
def register(self, api_path, file_path, type, location):
api_path = '%s?%s' % (api_path, tryInt(os.path.getmtime(file_path)))
def register(self, filepath, type, location):
if not self.urls[type].get(location):
self.urls[type][location] = []
self.urls[type][location].append(api_path)
if not self.paths[type].get(location):
self.paths[type][location] = []
self.paths[type][location].append(file_path)
prefix_properties = ['border-radius', 'transform', 'transition', 'box-shadow']
prefix_tags = ['ms', 'moz', 'webkit']
def prefix(self, data):
trimmed_data = re.sub('(\t|\n|\r)+', '', data)
new_data = ''
colon_split = trimmed_data.split(';')
for splt in colon_split:
curl_split = splt.strip().split('{')
for curly in curl_split:
curly = curly.strip()
for prop in self.prefix_properties:
if curly[:len(prop) + 1] == prop + ':':
for tag in self.prefix_tags:
new_data += ' -%s-%s; ' % (tag, curly)
new_data += curly + (' { ' if len(curl_split) > 1 else ' ')
new_data += '; '
new_data = new_data.replace('{ ;', '; ').replace('} ;', '} ')
return new_data
filePath = filepath
self.urls[type][location].append(filePath)

View File

@@ -16,28 +16,60 @@ class Scheduler(Plugin):
addEvent('schedule.cron', self.cron)
addEvent('schedule.interval', self.interval)
addEvent('schedule.remove', self.remove)
addEvent('schedule.start', self.start)
addEvent('schedule.restart', self.start)
addEvent('app.load', self.start)
self.sched = Sched(misfire_grace_time = 60)
self.sched.start()
self.started = True
def remove(self, identifier):
for cron_type in ['intervals', 'crons']:
for type in ['interval', 'cron']:
try:
self.sched.unschedule_job(getattr(self, cron_type)[identifier]['job'])
log.debug('%s unscheduled %s', (cron_type.capitalize(), identifier))
self.sched.unschedule_job(getattr(self, type)[identifier]['job'])
log.debug('%s unscheduled %s', (type.capitalize(), identifier))
except:
pass
def doShutdown(self):
def start(self):
# Stop all running
self.stop()
# Crons
for identifier in self.crons:
try:
self.remove(identifier)
cron = self.crons[identifier]
job = self.sched.add_cron_job(cron['handle'], day = cron['day'], hour = cron['hour'], minute = cron['minute'])
cron['job'] = job
except ValueError, e:
log.error('Failed adding cronjob: %s', e)
# Intervals
for identifier in self.intervals:
try:
self.remove(identifier)
interval = self.intervals[identifier]
job = self.sched.add_interval_job(interval['handle'], hours = interval['hours'], minutes = interval['minutes'], seconds = interval['seconds'])
interval['job'] = job
except ValueError, e:
log.error('Failed adding interval cronjob: %s', e)
# Start it
log.debug('Starting scheduler')
self.sched.start()
self.started = True
log.debug('Scheduler started')
def doShutdown(self):
super(Scheduler, self).doShutdown()
self.stop()
return super(Scheduler, self).doShutdown()
def stop(self):
if self.started:
log.debug('Stopping scheduler')
self.sched.shutdown(wait = False)
self.sched.shutdown()
log.debug('Scheduler stopped')
self.started = False
@@ -50,7 +82,6 @@ class Scheduler(Plugin):
'day': day,
'hour': hour,
'minute': minute,
'job': self.sched.add_cron_job(handle, day = day, hour = hour, minute = minute)
}
def interval(self, identifier = '', handle = None, hours = 0, minutes = 0, seconds = 0):
@@ -62,5 +93,4 @@ class Scheduler(Plugin):
'hours': hours,
'minutes': minutes,
'seconds': seconds,
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
}

View File

@@ -1,6 +1,7 @@
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.request import jsonified
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
@@ -14,7 +15,6 @@ import tarfile
import time
import traceback
import version
import zipfile
log = CPLog(__name__)
@@ -32,10 +32,11 @@ class Updater(Plugin):
else:
self.updater = SourceUpdater()
addEvent('app.load', self.setCrons)
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
addEvent('app.load', self.autoUpdate)
addEvent('updater.info', self.info)
addApiView('updater.info', self.info, docs = {
addApiView('updater.info', self.getInfo, docs = {
'desc': 'Get updater information',
'return': {
'type': 'object',
@@ -51,17 +52,8 @@ class Updater(Plugin):
'return': {'type': 'see updater.info'}
})
addEvent('setting.save.updater.enabled.after', self.setCrons)
def setCrons(self):
fireEvent('schedule.remove', 'updater.check', single = True)
if self.isEnabled():
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
self.autoUpdate() # Check after enabling
def autoUpdate(self):
if self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed:
if self.check() and self.conf('automatic') and not self.updater.update_failed:
if self.updater.doUpdate():
# Notify before restarting
@@ -79,30 +71,31 @@ class Updater(Plugin):
return False
def check(self, force = False):
if not force and self.isDisabled():
def check(self):
if self.isDisabled():
return
if self.updater.check():
if not self.available_notified and self.conf('notification') and not self.conf('automatic'):
info = self.updater.info()
version_date = datetime.fromtimestamp(info['update_version']['date'])
fireEvent('updater.available', message = 'A new update with hash "%s" is available, this version is from %s' % (info['update_version']['hash'], version_date), data = info)
fireEvent('updater.available', message = 'A new update is available', data = self.updater.info())
self.available_notified = True
return True
return False
def info(self, **kwargs):
def info(self):
return self.updater.info()
def checkView(self, **kwargs):
return {
'update_available': self.check(force = True),
'info': self.updater.info()
}
def getInfo(self):
return jsonified(self.updater.info())
def doUpdateView(self, **kwargs):
def checkView(self):
return jsonified({
'update_available': self.check(),
'info': self.updater.info()
})
def doUpdateView(self):
self.check()
if not self.updater.update_version:
@@ -117,9 +110,9 @@ class Updater(Plugin):
if not success:
success = True
return {
return jsonified({
'success': success
}
})
class BaseUpdater(Plugin):
@@ -132,11 +125,13 @@ class BaseUpdater(Plugin):
update_failed = False
update_version = None
last_check = 0
auto_register_static = False
def doUpdate(self):
pass
def getInfo(self):
return jsonified(self.info())
def info(self):
return {
'last_check': self.last_check,
@@ -183,6 +178,9 @@ class GitUpdater(BaseUpdater):
def doUpdate(self):
try:
log.debug('Stashing local changes')
self.repo.saveStash()
log.info('Updating to latest version')
self.repo.pull()
@@ -257,11 +255,11 @@ class SourceUpdater(BaseUpdater):
def doUpdate(self):
try:
download_data = fireEvent('cp.source_url', repo = self.repo_user, repo_name = self.repo_name, branch = self.branch, single = True)
destination = os.path.join(Env.get('cache_dir'), self.update_version.get('hash')) + '.' + download_data.get('type')
url = 'https://github.com/%s/%s/tarball/%s' % (self.repo_user, self.repo_name, self.branch)
destination = os.path.join(Env.get('cache_dir'), self.update_version.get('hash') + '.tar.gz')
extracted_path = os.path.join(Env.get('cache_dir'), 'temp_updater')
destination = fireEvent('file.download', url = download_data.get('url'), dest = destination, single = True)
destination = fireEvent('file.download', url = url, dest = destination, single = True)
# Cleanup leftover from last time
if os.path.isdir(extracted_path):
@@ -269,15 +267,9 @@ class SourceUpdater(BaseUpdater):
self.makeDir(extracted_path)
# Extract
if download_data.get('type') == 'zip':
zip = zipfile.ZipFile(destination)
zip.extractall(extracted_path)
zip.close()
else:
tar = tarfile.open(destination)
tar.extractall(path = extracted_path)
tar.close()
tar = tarfile.open(destination)
tar.extractall(path = extracted_path)
tar.close()
os.remove(destination)
if self.replaceWith(os.path.join(extracted_path, os.listdir(extracted_path)[0])):
@@ -295,7 +287,6 @@ class SourceUpdater(BaseUpdater):
def replaceWith(self, path):
app_dir = ss(Env.get('app_dir'))
data_dir = ss(Env.get('data_dir'))
# Get list of files we want to overwrite
self.deletePyc()
@@ -327,15 +318,12 @@ class SourceUpdater(BaseUpdater):
log.error('Failed overwriting file "%s": %s', (tofile, traceback.format_exc()))
return False
for still_exists in existing_files:
if data_dir in still_exists:
continue
try:
os.remove(still_exists)
except:
log.error('Failed removing non-used file: %s', traceback.format_exc())
if Env.get('app_dir') not in Env.get('data_dir'):
for still_exists in existing_files:
try:
os.remove(still_exists)
except:
log.error('Failed removing non-used file: %s', traceback.format_exc())
return True

View File

@@ -5,7 +5,7 @@ var UpdaterBase = new Class({
initialize: function(){
var self = this;
App.addEvent('load', self.info.bind(self, 2000))
App.addEvent('load', self.info.bind(self, 1000))
App.addEvent('unload', function(){
if(self.timer)
clearTimeout(self.timer);
@@ -24,7 +24,7 @@ var UpdaterBase = new Class({
self.doUpdate();
else {
App.unBlockPage();
App.on('message', 'No updates available');
App.fireEvent('message', 'No updates available');
}
}
})
@@ -84,7 +84,7 @@ var UpdaterBase = new Class({
'click': self.doUpdate.bind(self)
}
})
).inject(document.body)
).inject($(document.body).getElement('.header'))
},
doUpdate: function(){

26
couchpotato/core/auth.py Normal file
View File

@@ -0,0 +1,26 @@
from couchpotato.core.helpers.variable import md5
from couchpotato.environment import Env
from flask import request, Response
from functools import wraps
def check_auth(username, password):
return username == Env.setting('username') and password == Env.setting('password')
def authenticate():
return Response(
'This is not the page you are looking for. *waves hand*', 401,
{'WWW-Authenticate': 'Basic realm="CouchPotato Login"'}
)
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = getattr(request, 'authorization')
if Env.setting('username') and Env.setting('password'):
if (not auth or not check_auth(auth.username.decode('latin1'), md5(auth.password.decode('latin1').encode(Env.get('encoding'))))):
return authenticate()
return f(*args, **kwargs)
return decorated

View File

@@ -1,4 +1,4 @@
config = [{
config = {
'name': 'download_providers',
'groups': [
{
@@ -10,4 +10,4 @@ config = [{
'options': [],
},
],
}]
}

View File

@@ -1,6 +1,5 @@
from base64 import b32decode, b16encode
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.base import Provider
import random
@@ -11,13 +10,12 @@ log = CPLog(__name__)
class Downloader(Provider):
protocol = []
type = []
http_time_between_calls = 0
status_support = True
torrent_sources = [
'http://torrage.com/torrent/%s.torrent',
'https://torcache.net/torrent/%s.torrent',
'http://torcache.net/torrent/%s.torrent',
]
torrent_trackers = [
@@ -37,74 +35,48 @@ class Downloader(Provider):
def __init__(self):
addEvent('download', self._download)
addEvent('download.enabled', self._isEnabled)
addEvent('download.enabled_protocols', self.getEnabledProtocol)
addEvent('download.enabled_types', self.getEnabledDownloadType)
addEvent('download.status', self._getAllDownloadStatus)
addEvent('download.remove_failed', self._removeFailed)
addEvent('download.pause', self._pause)
addEvent('download.process_complete', self._processComplete)
def getEnabledProtocol(self):
for download_protocol in self.protocol:
if self.isEnabled(manual = True, data = {'protocol': download_protocol}):
return self.protocol
def getEnabledDownloadType(self):
for download_type in self.type:
if self.isEnabled(manual = True, data = {'type': download_type}):
return self.type
return []
def _download(self, data = None, media = None, manual = False, filedata = None):
if not media: media = {}
if not data: data = {}
def _download(self, data = {}, movie = {}, manual = False, filedata = None):
if self.isDisabled(manual, data):
return
return self.download(data = data, media = media, filedata = filedata)
return self.download(data = data, movie = movie, filedata = filedata)
def _getAllDownloadStatus(self, download_ids):
def _getAllDownloadStatus(self):
if self.isDisabled(manual = True, data = {}):
return
ids = [download_id['id'] for download_id in download_ids if download_id['downloader'] == self.getName()]
return self.getAllDownloadStatus()
if ids:
return self.getAllDownloadStatus(ids)
else:
return
def getAllDownloadStatus(self):
return
def getAllDownloadStatus(self, ids):
return []
def _removeFailed(self, release_download):
def _removeFailed(self, item):
if self.isDisabled(manual = True, data = {}):
return
if release_download and release_download.get('downloader') == self.getName():
if self.conf('delete_failed'):
return self.removeFailed(release_download)
if self.conf('delete_failed', default = True):
return self.removeFailed(item)
return False
return False
def removeFailed(self, item):
return
def removeFailed(self, release_download):
return
def _processComplete(self, release_download):
if self.isDisabled(manual = True, data = {}):
return
if release_download and release_download.get('downloader') == self.getName():
if self.conf('remove_complete', default = False):
return self.processComplete(release_download = release_download, delete_files = self.conf('delete_files', default = False))
return False
return
def processComplete(self, release_download, delete_files):
return
def isCorrectProtocol(self, protocol):
is_correct = protocol in self.protocol
def isCorrectType(self, item_type):
is_correct = item_type in self.type
if not is_correct:
log.debug("Downloader doesn't support this protocol")
log.debug("Downloader doesn't support this type")
return is_correct
@@ -128,77 +100,19 @@ class Downloader(Provider):
except:
log.debug('Torrent hash "%s" wasn\'t found on: %s', (torrent_hash, source))
log.error('Failed converting magnet url to torrent: %s', torrent_hash)
log.error('Failed converting magnet url to torrent: %s', (torrent_hash))
return False
def downloadReturnId(self, download_id):
return {
'downloader': self.getName(),
'status_support': self.status_support,
'id': download_id
}
def isDisabled(self, manual = False, data = None):
if not data: data = {}
def isDisabled(self, manual, data):
return not self.isEnabled(manual, data)
def _isEnabled(self, manual, data = None):
if not data: data = {}
def _isEnabled(self, manual, data = {}):
if not self.isEnabled(manual, data):
return
return True
def isEnabled(self, manual = False, data = None):
if not data: data = {}
def isEnabled(self, manual, data = {}):
d_manual = self.conf('manual', default = False)
return super(Downloader, self).isEnabled() and \
(d_manual and manual or d_manual is False) and \
(not data or self.isCorrectProtocol(data.get('protocol')))
def _pause(self, release_download, pause = True):
if self.isDisabled(manual = True, data = {}):
return
if release_download and release_download.get('downloader') == self.getName():
self.pause(release_download, pause)
return True
return False
def pause(self, release_download, pause):
return
class ReleaseDownloadList(list):
provider = None
def __init__(self, provider, **kwargs):
self.provider = provider
self.kwargs = kwargs
super(ReleaseDownloadList, self).__init__()
def extend(self, results):
for r in results:
self.append(r)
def append(self, result):
new_result = self.fillResult(result)
super(ReleaseDownloadList, self).append(new_result)
def fillResult(self, result):
defaults = {
'id': 0,
'status': 'busy',
'downloader': self.provider.getName(),
'folder': '',
'files': '',
}
return mergeDicts(defaults, result)
((d_manual and manual) or (d_manual is False)) and \
(not data or self.isCorrectType(data.get('type')))

View File

@@ -13,7 +13,7 @@ config = [{
'list': 'download_providers',
'name': 'blackhole',
'label': 'Black hole',
'description': 'Download the NZB/Torrent to a specific folder. <em>Note: Seeding and copying/linking features do <strong>not</strong> work with Black hole</em>.',
'description': 'Download the NZB/Torrent to a specific folder.',
'wizard': True,
'options': [
{
@@ -35,13 +35,6 @@ config = [{
'type': 'dropdown',
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
},
{
'name': 'create_subdir',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Create a sub directory when saving the .nzb (or .torrent).',
},
{
'name': 'manual',
'default': 0,

View File

@@ -1,32 +1,27 @@
from __future__ import with_statement
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
import os
import traceback
log = CPLog(__name__)
class Blackhole(Downloader):
protocol = ['nzb', 'torrent', 'torrent_magnet']
status_support = False
type = ['nzb', 'torrent', 'torrent_magnet']
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
def download(self, data = {}, movie = {}, filedata = None):
directory = self.conf('directory')
if not directory or not os.path.isdir(directory):
log.error('No directory set for blackhole %s download.', data.get('protocol'))
log.error('No directory set for blackhole %s download.', data.get('type'))
else:
try:
if not filedata or len(filedata) < 50:
try:
if data.get('protocol') == 'torrent_magnet':
if data.get('type') == 'torrent_magnet':
filedata = self.magnetToTorrent(data.get('url'))
data['protocol'] = 'torrent'
data['type'] = 'torrent'
except:
log.error('Failed download torrent via magnet url: %s', traceback.format_exc())
@@ -34,28 +29,17 @@ class Blackhole(Downloader):
log.error('No nzb/torrent available: %s', data.get('url'))
return False
file_name = self.createFileName(data, filedata, media)
full_path = os.path.join(directory, file_name)
if self.conf('create_subdir'):
try:
new_path = os.path.splitext(full_path)[0]
if not os.path.exists(new_path):
os.makedirs(new_path)
full_path = os.path.join(new_path, file_name)
except:
log.error('Couldnt create sub dir, reverting to old one: %s', full_path)
fullPath = os.path.join(directory, self.createFileName(data, filedata, movie))
try:
if not os.path.isfile(full_path):
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
with open(full_path, 'wb') as f:
if not os.path.isfile(fullPath):
log.info('Downloading %s to %s.', (data.get('type'), fullPath))
with open(fullPath, 'wb') as f:
f.write(filedata)
os.chmod(full_path, Env.getPermission('file'))
return self.downloadReturnId('')
return True
else:
log.info('File %s already exists.', full_path)
return self.downloadReturnId('')
log.info('File %s already exists.', fullPath)
return True
except:
log.error('Failed to download to blackhole %s', traceback.format_exc())
@@ -67,21 +51,20 @@ class Blackhole(Downloader):
return False
def getEnabledProtocol(self):
def getEnabledDownloadType(self):
if self.conf('use_for') == 'both':
return super(Blackhole, self).getEnabledProtocol()
return super(Blackhole, self).getEnabledDownloadType()
elif self.conf('use_for') == 'torrent':
return ['torrent', 'torrent_magnet']
else:
return ['nzb']
def isEnabled(self, manual = False, data = None):
if not data: data = {}
for_protocol = ['both']
if data and 'torrent' in data.get('protocol'):
for_protocol.append('torrent')
def isEnabled(self, manual, data = {}):
for_type = ['both']
if data and 'torrent' in data.get('type'):
for_type.append('torrent')
elif data:
for_protocol.append(data.get('protocol'))
for_type.append(data.get('type'))
return super(Blackhole, self).isEnabled(manual, data) and \
((self.conf('use_for') in for_protocol))
((self.conf('use_for') in for_type))

View File

@@ -1,90 +0,0 @@
from .main import Deluge
def start():
return Deluge()
config = [{
'name': 'deluge',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
'name': 'deluge',
'label': 'Deluge',
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
'wizard': True,
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'torrent',
},
{
'name': 'host',
'default': 'localhost:58846',
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
},
{
'name': 'username',
},
{
'name': 'password',
'type': 'password',
},
{
'name': 'directory',
'type': 'directory',
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
},
{
'name': 'completed_directory',
'type': 'directory',
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
'advanced': True,
},
{
'name': 'label',
'description': 'Label to add to torrents in the Deluge UI.',
},
{
'name': 'remove_complete',
'label': 'Remove torrent',
'type': 'bool',
'default': True,
'advanced': True,
'description': 'Remove the torrent from Deluge after it has finished seeding.',
},
{
'name': 'delete_files',
'label': 'Remove files',
'default': True,
'type': 'bool',
'advanced': True,
'description': 'Also remove the leftover files.',
},
{
'name': 'paused',
'type': 'bool',
'advanced': True,
'default': False,
'description': 'Add the torrent paused.',
},
{
'name': 'manual',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],
}]

View File

@@ -1,275 +0,0 @@
from base64 import b64encode, b16encode, b32decode
from bencode import bencode as benc, bdecode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
from synchronousdeluge import DelugeClient
import os.path
import re
import traceback
log = CPLog(__name__)
class Deluge(Downloader):
protocol = ['torrent', 'torrent_magnet']
log = CPLog(__name__)
drpc = None
def connect(self):
# Load host from config and split out port.
host = self.conf('host').split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
if not self.drpc:
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
return self.drpc
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
log.info('Sending "%s" (%s) to Deluge.', (data.get('name'), data.get('protocol')))
if not self.connect():
return False
if not filedata and data.get('protocol') == 'torrent':
log.error('Failed sending torrent, no data')
return False
# Set parameters for Deluge
options = {
'add_paused': self.conf('paused', default = 0),
'label': self.conf('label')
}
if self.conf('directory'):
if os.path.isdir(self.conf('directory')):
options['download_location'] = self.conf('directory')
else:
log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))
if self.conf('completed_directory'):
if os.path.isdir(self.conf('completed_directory')):
options['move_completed'] = 1
options['move_completed_path'] = self.conf('completed_directory')
else:
log.error('Download directory from Deluge settings: %s doesn\'t exist', self.conf('directory'))
if data.get('seed_ratio'):
options['stop_at_ratio'] = 1
options['stop_ratio'] = tryFloat(data.get('seed_ratio'))
# Deluge only has seed time as a global option. Might be added in
# in a future API release.
# if data.get('seed_time'):
# Send request to Deluge
if data.get('protocol') == 'torrent_magnet':
remote_torrent = self.drpc.add_torrent_magnet(data.get('url'), options)
else:
filename = self.createFileName(data, filedata, media)
remote_torrent = self.drpc.add_torrent_file(filename, filedata, options)
if not remote_torrent:
log.error('Failed sending torrent to Deluge')
return False
log.info('Torrent sent to Deluge successfully.')
return self.downloadReturnId(remote_torrent)
def getAllDownloadStatus(self, ids):
log.debug('Checking Deluge download status.')
if not self.connect():
return []
release_downloads = ReleaseDownloadList(self)
queue = self.drpc.get_alltorrents(ids)
if not queue:
log.debug('Nothing in queue or error')
return []
for torrent_id in queue:
torrent = queue[torrent_id]
log.debug('name=%s / id=%s / save_path=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
# Deluge has no easy way to work out if a torrent is stalled or failing.
#status = 'failed'
status = 'busy'
if torrent['is_seed'] and tryFloat(torrent['ratio']) < tryFloat(torrent['stop_ratio']):
# We have torrent['seeding_time'] to work out what the seeding time is, but we do not
# have access to the downloader seed_time, as with deluge we have no way to pass it
# when the torrent is added. So Deluge will only look at the ratio.
# See above comment in download().
status = 'seeding'
elif torrent['is_seed'] and torrent['is_finished'] and torrent['paused'] and torrent['state'] == 'Paused':
status = 'completed'
download_dir = sp(torrent['save_path'])
if torrent['move_on_completed']:
download_dir = torrent['move_completed_path']
torrent_files = []
for file_item in torrent['files']:
torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
release_downloads.append({
'id': torrent['hash'],
'name': torrent['name'],
'status': status,
'original_status': torrent['state'],
'seed_ratio': torrent['ratio'],
'timeleft': str(timedelta(seconds = torrent['eta'])),
'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
'files': '|'.join(torrent_files),
})
return release_downloads
def pause(self, release_download, pause = True):
if pause:
return self.drpc.pause_torrent([release_download['id']])
else:
return self.drpc.resume_torrent([release_download['id']])
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
return self.drpc.remove_torrent(release_download['id'], True)
def processComplete(self, release_download, delete_files = False):
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
class DelugeRPC(object):
host = 'localhost'
port = 58846
username = None
password = None
client = None
def __init__(self, host = 'localhost', port = 58846, username = None, password = None):
super(DelugeRPC, self).__init__()
self.host = host
self.port = port
self.username = username
self.password = password
def connect(self):
self.client = DelugeClient()
self.client.connect(self.host, int(self.port), self.username, self.password)
def add_torrent_magnet(self, torrent, options):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_magnet(torrent, options).get()
if not torrent_id:
torrent_id = self._check_torrent(True, torrent)
if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label']).get()
except Exception, err:
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return torrent_id
def add_torrent_file(self, filename, torrent, options):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_file(filename, b64encode(torrent), options).get()
if not torrent_id:
torrent_id = self._check_torrent(False, torrent)
if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label']).get()
except Exception, err:
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return torrent_id
def get_alltorrents(self, ids):
ret = False
try:
self.connect()
ret = self.client.core.get_torrents_status({'id': ids}, {}).get()
except Exception, err:
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return ret
def pause_torrent(self, torrent_ids):
try:
self.connect()
self.client.core.pause_torrent(torrent_ids).get()
except Exception, err:
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
def resume_torrent(self, torrent_ids):
try:
self.connect()
self.client.core.resume_torrent(torrent_ids).get()
except Exception, err:
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
def remove_torrent(self, torrent_id, remove_local_data):
ret = False
try:
self.connect()
ret = self.client.core.remove_torrent(torrent_id, remove_local_data).get()
except Exception, err:
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
finally:
if self.client:
self.disconnect()
return ret
def disconnect(self):
self.client.disconnect()
def _check_torrent(self, magnet, torrent):
# Torrent not added, check if it already existed.
if magnet:
torrent_hash = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
else:
info = bdecode(torrent)["info"]
torrent_hash = sha1(benc(info)).hexdigest()
# Convert base 32 to hex
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash))
torrent_hash = torrent_hash.lower()
torrent_check = self.client.core.get_torrent_status(torrent_hash, {}).get()
if torrent_check['hash']:
return torrent_hash
return False

View File

@@ -12,7 +12,6 @@ config = [{
'name': 'nzbget',
'label': 'NZBGet',
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
'wizard': True,
'options': [
{
'name': 'enabled',
@@ -25,19 +24,6 @@ config = [{
'default': 'localhost:6789',
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
},
{
'name': 'ssl',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
},
{
'name': 'username',
'default': 'nzbget',
'advanced': True,
'description': 'Set a different username to connect. Default: nzbget',
},
{
'name': 'password',
'type': 'password',
@@ -50,7 +36,6 @@ config = [{
},
{
'name': 'priority',
'advanced': True,
'default': '0',
'type': 'dropdown',
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
@@ -63,13 +48,6 @@ config = [{
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],

View File

@@ -1,27 +1,22 @@
from base64 import standard_b64encode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import ss, sp
from couchpotato.core.helpers.variable import tryInt, md5
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from datetime import timedelta
import re
import shutil
import socket
import traceback
import xmlrpclib
log = CPLog(__name__)
class NZBGet(Downloader):
protocol = ['nzb']
type = ['nzb']
url = '%(protocol)s://%(username)s:%(password)s@%(host)s/xmlrpc'
url = 'http://nzbget:%(password)s@%(host)s/xmlrpc'
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
def download(self, data = {}, movie = {}, filedata = None):
if not filedata:
log.error('Unable to get NZB file: %s', traceback.format_exc())
@@ -29,13 +24,13 @@ class NZBGet(Downloader):
log.info('Sending "%s" to NZBGet.', data.get('name'))
url = self.url % {'protocol': 'https' if self.conf('ssl') else 'http', 'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
url = self.url % {'host': self.conf('host'), 'password': self.conf('password')}
nzb_name = ss('%s.nzb' % self.createNzbName(data, movie))
rpc = xmlrpclib.ServerProxy(url)
try:
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
log.debug('Successfully connected to NZBGet')
log.info('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
@@ -55,141 +50,7 @@ class NZBGet(Downloader):
if xml_response:
log.info('NZB sent successfully to NZBGet')
nzb_id = md5(data['url']) # about as unique as they come ;)
couchpotato_id = "couchpotato=" + nzb_id
groups = rpc.listgroups()
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
confirmed = rpc.editqueue("GroupSetParameter", 0, couchpotato_id, file_id)
if confirmed:
log.debug('couchpotato parameter set in nzbget download')
return self.downloadReturnId(nzb_id)
return True
else:
log.error('NZBGet could not add %s to the queue.', nzb_name)
return False
def getAllDownloadStatus(self, ids):
log.debug('Checking NZBGet download status.')
url = self.url % {'protocol': 'https' if self.conf('ssl') else 'http', 'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
rpc = xmlrpclib.ServerProxy(url)
try:
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return []
except xmlrpclib.ProtocolError, e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return []
# Get NZBGet data
try:
status = rpc.status()
groups = rpc.listgroups()
queue = rpc.postqueue(0)
history = rpc.history()
except:
log.error('Failed getting data: %s', traceback.format_exc(1))
return []
release_downloads = ReleaseDownloadList(self)
for nzb in groups:
try:
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
nzb_id = nzb['NZBID']
if nzb_id in ids:
log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
timeleft = -1
try:
if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
except:
pass
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
'timeleft': timeleft,
})
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
if nzb['NZBID'] in ids:
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
release_downloads.append({
'id': nzb['NZBID'],
'name': nzb['NZBFilename'],
'original_status': nzb['Stage'],
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
})
for nzb in history:
try:
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
nzb_id = nzb['NZBID']
if nzb_id in ids:
log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
'status': 'completed' if nzb['ParStatus'] in ['SUCCESS', 'NONE'] and nzb['ScriptStatus'] in ['SUCCESS', 'NONE'] else 'failed',
'original_status': nzb['ParStatus'] + ', ' + nzb['ScriptStatus'],
'timeleft': str(timedelta(seconds = 0)),
'folder': sp(nzb['DestDir'])
})
return release_downloads
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
rpc = xmlrpclib.ServerProxy(url)
try:
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError, e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
try:
history = rpc.history()
nzb_id = None
path = None
for hist in history:
for param in hist['Parameters']:
if param['Name'] == 'couchpotato' and param['Value'] == release_download['id']:
nzb_id = hist['ID']
path = hist['DestDir']
if nzb_id and path and rpc.editqueue('HistoryDelete', 0, "", [tryInt(nzb_id)]):
shutil.rmtree(path, True)
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
return False
return True

View File

@@ -38,7 +38,6 @@ config = [{
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},

View File

@@ -1,6 +1,6 @@
from base64 import b64encode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import tryUrlencode, sp
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import tryUrlencode, ss
from couchpotato.core.helpers.variable import cleanHost
from couchpotato.core.logger import CPLog
from urllib2 import URLError
@@ -8,72 +8,62 @@ from uuid import uuid4
import hashlib
import httplib
import json
import os
import socket
import ssl
import sys
import time
import traceback
import urllib2
log = CPLog(__name__)
class NZBVortex(Downloader):
protocol = ['nzb']
type = ['nzb']
api_level = None
session_id = None
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
def download(self, data = {}, movie = {}, filedata = None):
# Send the nzb
try:
nzb_filename = self.createFileName(data, filedata, media)
self.call('nzb/add', files = {'file': (nzb_filename, filedata)})
nzb_filename = self.createFileName(data, filedata, movie)
self.call('nzb/add', params = {'file': (ss(nzb_filename), filedata)}, multipart = True)
time.sleep(10)
raw_statuses = self.call('nzb')
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if os.path.basename(item['nzbFileName']) == nzb_filename][0]
return self.downloadReturnId(nzb_id)
return True
except:
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
return False
def getAllDownloadStatus(self, ids):
def getAllDownloadStatus(self):
raw_statuses = self.call('nzb')
release_downloads = ReleaseDownloadList(self)
for nzb in raw_statuses.get('nzbs', []):
if nzb['id'] in ids:
statuses = []
for item in raw_statuses.get('nzbs', []):
# Check status
status = 'busy'
if nzb['state'] == 20:
status = 'completed'
elif nzb['state'] in [21, 22, 24]:
status = 'failed'
release_downloads.append({
'id': nzb['id'],
'name': nzb['uiTitle'],
'status': status,
'original_status': nzb['state'],
'timeleft':-1,
'folder': sp(nzb['destinationPath']),
})
# Check status
status = 'busy'
if item['state'] == 20:
status = 'completed'
elif item['state'] in [21, 22, 24]:
status = 'failed'
return release_downloads
statuses.append({
'id': item['id'],
'name': item['uiTitle'],
'status': status,
'original_status': item['state'],
'timeleft':-1,
})
def removeFailed(self, release_download):
return statuses
log.info('%s failed downloading, deleting...', release_download['name'])
def removeFailed(self, item):
log.info('%s failed downloading, deleting...', item['name'])
try:
self.call('nzb/%s/cancel' % release_download['id'])
self.call('nzb/%s/cancel' % item['id'])
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
return False
@@ -103,10 +93,9 @@ class NZBVortex(Downloader):
return False
def call(self, call, parameters = None, repeat = False, auth = True, *args, **kwargs):
def call(self, call, parameters = {}, repeat = False, auth = True, *args, **kwargs):
# Login first
if not parameters: parameters = {}
if not self.session_id and auth:
self.login()
@@ -117,9 +106,10 @@ class NZBVortex(Downloader):
params = tryUrlencode(parameters)
url = cleanHost(self.conf('host')) + 'api/' + call
url_opener = urllib2.build_opener(HTTPSHandler())
try:
data = self.urlopen('%s?%s' % (url, params), *args, **kwargs)
data = self.urlopen('%s?%s' % (url, params), opener = url_opener, *args, **kwargs)
if data:
return json.loads(data)
@@ -128,7 +118,7 @@ class NZBVortex(Downloader):
# Try login and do again
if not repeat:
self.login()
return self.call(call, parameters = parameters, repeat = True, **kwargs)
return self.call(call, parameters = parameters, repeat = True, *args, **kwargs)
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
except:
@@ -141,9 +131,10 @@ class NZBVortex(Downloader):
if not self.api_level:
url = cleanHost(self.conf('host')) + 'api/app/apilevel'
url_opener = urllib2.build_opener(HTTPSHandler())
try:
data = self.urlopen(url, show_error = False)
data = self.urlopen(url, opener = url_opener, show_error = False)
self.api_level = float(json.loads(data).get('apilevel'))
except URLError, e:
if hasattr(e, 'code') and e.code == 403:
@@ -153,8 +144,7 @@ class NZBVortex(Downloader):
return self.api_level
def isEnabled(self, manual = False, data = None):
if not data: data = {}
def isEnabled(self, manual, data):
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()

View File

@@ -6,16 +6,12 @@ import traceback
log = CPLog(__name__)
class Pneumatic(Downloader):
protocol = ['nzb']
type = ['nzb']
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
status_support = False
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
def download(self, data = {}, movie = {}, filedata = None):
directory = self.conf('directory')
if not directory or not os.path.isdir(directory):
@@ -26,15 +22,15 @@ class Pneumatic(Downloader):
log.error('No nzb available!')
return False
fullPath = os.path.join(directory, self.createFileName(data, filedata, media))
fullPath = os.path.join(directory, self.createFileName(data, filedata, movie))
try:
if not os.path.isfile(fullPath):
log.info('Downloading %s to %s.', (data.get('protocol'), fullPath))
log.info('Downloading %s to %s.', (data.get('type'), fullPath))
with open(fullPath, 'wb') as f:
f.write(filedata)
nzb_name = self.createNzbName(data, media)
nzb_name = self.createNzbName(data, movie)
strm_path = os.path.join(directory, nzb_name)
strm_file = open(strm_path + '.strm', 'wb')
@@ -42,11 +38,11 @@ class Pneumatic(Downloader):
strm_file.write(strmContent)
strm_file.close()
return self.downloadReturnId('')
return True
else:
log.info('File %s already exists.', fullPath)
return self.downloadReturnId('')
return True
except:
log.error('Failed to download .strm: %s', traceback.format_exc())

View File

@@ -1,78 +0,0 @@
from .main import rTorrent
def start():
return rTorrent()
config = [{
'name': 'rtorrent',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
'name': 'rtorrent',
'label': 'rTorrent',
'description': '',
'wizard': True,
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'torrent',
},
{
'name': 'url',
'default': 'http://localhost:80/RPC2',
'description': 'XML-RPC Endpoint URI. Usually <strong>scgi://localhost:5000</strong> '
'or <strong>http://localhost:80/RPC2</strong>'
},
{
'name': 'username',
},
{
'name': 'password',
'type': 'password',
},
{
'name': 'label',
'description': 'Label to apply on added torrents.',
},
{
'name': 'directory',
'type': 'directory',
'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
},
{
'name': 'remove_complete',
'label': 'Remove torrent',
'default': False,
'advanced': True,
'type': 'bool',
'description': 'Remove the torrent after it finishes seeding.',
},
{
'name': 'delete_files',
'label': 'Remove files',
'default': True,
'type': 'bool',
'advanced': True,
'description': 'Also remove the leftover files.',
},
{
'name': 'paused',
'type': 'bool',
'advanced': True,
'default': False,
'description': 'Add the torrent paused.',
},
{
'name': 'manual',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
],
}
],
}]

View File

@@ -1,229 +0,0 @@
from base64 import b16encode, b32decode
from bencode import bencode, bdecode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
from rtorrent import RTorrent
from rtorrent.err import MethodError
import os
log = CPLog(__name__)
class rTorrent(Downloader):
protocol = ['torrent', 'torrent_magnet']
rt = None
def connect(self):
# Already connected?
if self.rt is not None:
return self.rt
# Ensure url is set
if not self.conf('url'):
log.error('Config properties are not filled in correctly, url is missing.')
return False
if self.conf('username') and self.conf('password'):
self.rt = RTorrent(
self.conf('url'),
self.conf('username'),
self.conf('password')
)
else:
self.rt = RTorrent(self.conf('url'))
return self.rt
def _update_provider_group(self, name, data):
if data.get('seed_time'):
log.info('seeding time ignored, not supported')
if not name:
return False
if not self.connect():
return False
views = self.rt.get_views()
if name not in views:
self.rt.create_group(name)
group = self.rt.get_group(name)
try:
if data.get('seed_ratio'):
ratio = int(float(data.get('seed_ratio')) * 100)
log.debug('Updating provider ratio to %s, group name: %s', (ratio, name))
# Explicitly set all group options to ensure it is setup correctly
group.set_upload('1M')
group.set_min(ratio)
group.set_max(ratio)
group.set_command('d.stop')
group.enable()
else:
# Reset group action and disable it
group.set_command()
group.disable()
except MethodError, err:
log.error('Unable to set group options: %s', err.msg)
return False
return True
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
log.debug('Sending "%s" to rTorrent.', (data.get('name')))
if not self.connect():
return False
group_name = 'cp_' + data.get('provider').lower()
if not self._update_provider_group(group_name, data):
return False
torrent_params = {}
if self.conf('label'):
torrent_params['label'] = self.conf('label')
if not filedata and data.get('protocol') == 'torrent':
log.error('Failed sending torrent, no data')
return False
# Try download magnet torrents
if data.get('protocol') == 'torrent_magnet':
filedata = self.magnetToTorrent(data.get('url'))
if filedata is False:
return False
data['protocol'] = 'torrent'
info = bdecode(filedata)["info"]
torrent_hash = sha1(bencode(info)).hexdigest().upper()
# Convert base 32 to hex
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash))
# Send request to rTorrent
try:
# Send torrent to rTorrent
torrent = self.rt.load_torrent(filedata)
if not torrent:
log.error('Unable to find the torrent, did it fail to load?')
return False
# Set label
if self.conf('label'):
torrent.set_custom(1, self.conf('label'))
if self.conf('directory'):
torrent.set_directory(self.conf('directory'))
# Set Ratio Group
torrent.set_visible(group_name)
# Start torrent
if not self.conf('paused', default = 0):
torrent.start()
return self.downloadReturnId(torrent_hash)
except Exception, err:
log.error('Failed to send torrent to rTorrent: %s', err)
return False
def getAllDownloadStatus(self, ids):
log.debug('Checking rTorrent download status.')
if not self.connect():
return []
try:
torrents = self.rt.get_torrents()
release_downloads = ReleaseDownloadList(self)
for torrent in torrents:
if torrent.info_hash in ids:
torrent_files = []
for file_item in torrent.get_files():
torrent_files.append(sp(os.path.join(torrent.directory, file_item.path)))
status = 'busy'
if torrent.complete:
if torrent.active:
status = 'seeding'
else:
status = 'completed'
release_downloads.append({
'id': torrent.info_hash,
'name': torrent.name,
'status': status,
'seed_ratio': torrent.ratio,
'original_status': torrent.state,
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
'folder': sp(torrent.directory),
'files': '|'.join(torrent_files)
})
return release_downloads
except Exception, err:
log.error('Failed to get status from rTorrent: %s', err)
return []
def pause(self, release_download, pause = True):
if not self.connect():
return False
torrent = self.rt.find_torrent(release_download['id'])
if torrent is None:
return False
if pause:
return torrent.pause()
return torrent.resume()
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
return self.processComplete(release_download, delete_files = True)
def processComplete(self, release_download, delete_files):
log.debug('Requesting rTorrent to remove the torrent %s%s.',
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
if not self.connect():
return False
torrent = self.rt.find_torrent(release_download['id'])
if torrent is None:
return False
if delete_files:
for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
os.unlink(os.path.join(torrent.directory, file_item.path))
if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
# Remove empty directories bottom up
try:
for path, _, _ in os.walk(torrent.directory, topdown = False):
os.rmdir(path)
except OSError:
log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
torrent.erase() # just removes the torrent, doesn't delete data
return True

View File

@@ -11,7 +11,7 @@ config = [{
'list': 'download_providers',
'name': 'sabnzbd',
'label': 'Sabnzbd',
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.',
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> to download NZBs.',
'wizard': True,
'options': [
{
@@ -34,15 +34,6 @@ config = [{
'label': 'Category',
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
},
{
'name': 'priority',
'label': 'Priority',
'type': 'dropdown',
'default': '0',
'advanced': True,
'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)],
'description': 'Add to the queue with this priority.',
},
{
'name': 'manual',
'default': False,
@@ -50,18 +41,9 @@ config = [{
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'remove_complete',
'advanced': True,
'label': 'Remove NZB',
'default': False,
'type': 'bool',
'description': 'Remove the NZB from history after it completed.',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},

View File

@@ -1,51 +1,46 @@
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import tryUrlencode, ss
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
from datetime import timedelta
from urllib2 import URLError
import json
import os
import traceback
log = CPLog(__name__)
class Sabnzbd(Downloader):
protocol = ['nzb']
type = ['nzb']
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
def download(self, data = {}, movie = {}, filedata = None):
log.info('Sending "%s" to SABnzbd.', data.get('name'))
req_params = {
params = {
'apikey': self.conf('api_key'),
'cat': self.conf('category'),
'mode': 'addurl',
'nzbname': self.createNzbName(data, media),
'priority': self.conf('priority'),
'nzbname': self.createNzbName(data, movie),
}
nzb_filename = None
if filedata:
if len(filedata) < 50:
log.error('No proper nzb available: %s', filedata)
log.error('No proper nzb available: %s', (filedata))
return False
# If it's a .rar, it adds the .rar extension, otherwise it stays .nzb
nzb_filename = self.createFileName(data, filedata, media)
req_params['mode'] = 'addfile'
nzb_filename = self.createFileName(data, filedata, movie)
params['mode'] = 'addfile'
else:
req_params['name'] = data.get('url')
params['name'] = data.get('url')
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(params)
try:
if nzb_filename and req_params.get('mode') is 'addfile':
sab_data = self.call(req_params, files = {'nzbfile': (ss(nzb_filename), filedata)})
if params.get('mode') is 'addfile':
sab = self.urlopen(url, timeout = 60, params = {'nzbfile': (ss(nzb_filename), filedata)}, multipart = True, show_error = False)
else:
sab_data = self.call(req_params)
sab = self.urlopen(url, timeout = 60, show_error = False)
except URLError:
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
return False
@@ -53,18 +48,20 @@ class Sabnzbd(Downloader):
log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0))
return False
log.debug('Result from SAB: %s', sab_data)
if sab_data.get('status') and not sab_data.get('error'):
log.info('NZB sent to SAB successfully.')
if filedata:
return self.downloadReturnId(sab_data.get('nzo_ids')[0])
else:
return True
else:
log.error('Error getting data from SABNZBd: %s', sab_data)
result = sab.strip()
if not result:
log.error('SABnzbd didn\'t return anything.')
return False
def getAllDownloadStatus(self, ids):
log.debug('Result text from SAB: %s', result[:40])
if result[:2] == 'ok':
log.info('NZB sent to SAB successfully.')
return True
else:
log.error(result[:40])
return False
def getAllDownloadStatus(self):
log.debug('Checking SABnzbd download status.')
@@ -75,7 +72,7 @@ class Sabnzbd(Downloader):
})
except:
log.error('Failed getting queue: %s', traceback.format_exc(1))
return []
return False
# Go through history items
try:
@@ -85,61 +82,49 @@ class Sabnzbd(Downloader):
})
except:
log.error('Failed getting history json: %s', traceback.format_exc(1))
return []
return False
release_downloads = ReleaseDownloadList(self)
statuses = []
# Get busy releases
for nzb in queue.get('slots', []):
if nzb['nzo_id'] in ids:
status = 'busy'
if 'ENCRYPTED / ' in nzb['filename']:
status = 'failed'
release_downloads.append({
'id': nzb['nzo_id'],
'name': nzb['filename'],
'status': status,
'original_status': nzb['status'],
'timeleft': nzb['timeleft'] if not queue['paused'] else -1,
})
for item in queue.get('slots', []):
statuses.append({
'id': item['nzo_id'],
'name': item['filename'],
'status': 'busy',
'original_status': item['status'],
'timeleft': item['timeleft'] if not queue['paused'] else -1,
})
# Get old releases
for nzb in history.get('slots', []):
if nzb['nzo_id'] in ids:
status = 'busy'
if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()):
status = 'failed'
elif nzb['status'] == 'Completed':
status = 'completed'
release_downloads.append({
'id': nzb['nzo_id'],
'name': nzb['name'],
'status': status,
'original_status': nzb['status'],
'timeleft': str(timedelta(seconds = 0)),
'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']),
})
for item in history.get('slots', []):
return release_downloads
status = 'busy'
if item['status'] == 'Failed' or (item['status'] == 'Completed' and item['fail_message'].strip()):
status = 'failed'
elif item['status'] == 'Completed':
status = 'completed'
def removeFailed(self, release_download):
statuses.append({
'id': item['nzo_id'],
'name': item['name'],
'status': status,
'original_status': item['status'],
'timeleft': 0,
})
log.info('%s failed downloading, deleting...', release_download['name'])
return statuses
def removeFailed(self, item):
log.info('%s failed downloading, deleting...', item['name'])
try:
self.call({
'mode': 'queue',
'name': 'delete',
'del_files': '1',
'value': release_download['id']
}, use_json = False)
self.call({
'mode': 'history',
'name': 'delete',
'del_files': '1',
'value': release_download['id']
'value': item['id']
}, use_json = False)
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
@@ -147,37 +132,21 @@ class Sabnzbd(Downloader):
return True
def processComplete(self, release_download, delete_files = False):
log.debug('Requesting SabNZBd to remove the NZB %s.', release_download['name'])
def call(self, params, use_json = True):
try:
self.call({
'mode': 'history',
'name': 'delete',
'del_files': '0',
'value': release_download['id']
}, use_json = False)
except:
log.error('Failed removing: %s', traceback.format_exc(0))
return False
return True
def call(self, request_params, use_json = True, **kwargs):
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(params, {
'apikey': self.conf('api_key'),
'output': 'json'
}))
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
data = self.urlopen(url, timeout = 60, show_error = False)
if use_json:
d = json.loads(data)
if d.get('error'):
log.error('Error getting data from SABNZBd: %s', d.get('error'))
return {}
return d.get(request_params['mode']) or d
return d[params['mode']]
else:
return data

View File

@@ -18,7 +18,7 @@ config = [{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'nzb,torrent',
'radio_group': 'torrent',
},
{
'name': 'host',
@@ -32,13 +32,6 @@ config = [{
'name': 'password',
'type': 'password',
},
{
'name': 'use_for',
'label': 'Use for',
'default': 'both',
'type': 'dropdown',
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
},
{
'name': 'manual',
'default': 0,

View File

@@ -1,24 +1,22 @@
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import isInt
from couchpotato.core.logger import CPLog
import httplib
import json
import requests
import traceback
import urllib
import urllib2
log = CPLog(__name__)
class Synology(Downloader):
protocol = ['nzb', 'torrent', 'torrent_magnet']
status_support = False
type = ['torrent_magnet']
log = CPLog(__name__)
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
def download(self, data, movie, filedata = None):
response = False
log.error('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
log.error('Sending "%s" (%s) to Synology.', (data.get('name'), data.get('type')))
# Load host from config and split out port.
host = self.conf('host').split(':')
@@ -26,47 +24,24 @@ class Synology(Downloader):
log.error('Config properties are not filled in correctly, port is missing.')
return False
if data.get('type') == 'torrent':
log.error('Can\'t add binary torrent file')
return False
try:
# Send request to Synology
# Send request to Transmission
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
if data['protocol'] == 'torrent_magnet':
log.info('Adding torrent URL %s', data['url'])
response = srpc.create_task(url = data['url'])
elif data['protocol'] in ['nzb', 'torrent']:
log.info('Adding %s' % data['protocol'])
if not filedata:
log.error('No %s data found', data['protocol'])
else:
filename = data['name'] + '.' + data['protocol']
response = srpc.create_task(filename = filename, filedata = filedata)
except:
log.error('Exception while adding torrent: %s', traceback.format_exc())
finally:
return self.downloadReturnId('') if response else False
remote_torrent = srpc.add_torrent_uri(data.get('url'))
log.info('Response: %s', remote_torrent)
return remote_torrent['success']
except Exception, err:
log.error('Exception while adding torrent: %s', err)
return False
def getEnabledProtocol(self):
if self.conf('use_for') == 'both':
return super(Synology, self).getEnabledProtocol()
elif self.conf('use_for') == 'torrent':
return ['torrent', 'torrent_magnet']
else:
return ['nzb']
def isEnabled(self, manual = False, data = None):
if not data: data = {}
for_protocol = ['both']
if data and 'torrent' in data.get('protocol'):
for_protocol.append('torrent')
elif data:
for_protocol.append(data.get('protocol'))
return super(Synology, self).isEnabled(manual, data) and\
((self.conf('use_for') in for_protocol))
class SynologyRPC(object):
"""SynologyRPC lite library"""
'''SynologyRPC lite library'''
def __init__(self, host = 'localhost', port = 5000, username = None, password = None):
@@ -83,13 +58,11 @@ class SynologyRPC(object):
args = {'api': 'SYNO.API.Auth', 'account': self.username, 'passwd': self.password, 'version': 2,
'method': 'login', 'session': self.session_name, 'format': 'sid'}
response = self._req(self.auth_url, args)
if response['success']:
if response['success'] == True:
self.sid = response['data']['sid']
log.debug('sid=%s', self.sid)
else:
log.error('Couldn\'t login to Synology, %s', response)
return response['success']
else:
log.debug('Sid=%s', self.sid)
return response
elif self.username or self.password:
log.error('User or password missing, not using authentication.')
return False
@@ -97,51 +70,36 @@ class SynologyRPC(object):
args = {'api':'SYNO.API.Auth', 'version':1, 'method':'logout', 'session':self.session_name, '_sid':self.sid}
return self._req(self.auth_url, args)
def _req(self, url, args, files = None):
response = {'success': False}
def _req(self, url, args):
req_url = url + '?' + urllib.urlencode(args)
try:
req = requests.post(url, data = args, files = files)
req.raise_for_status()
response = json.loads(req.text)
if response['success']:
req_open = urllib2.urlopen(req_url)
response = json.loads(req_open.read())
if response['success'] == True:
log.info('Synology action successfull')
return response
except requests.ConnectionError, err:
log.error('Synology connection error, check your config %s', err)
except requests.HTTPError, err:
except httplib.InvalidURL, err:
log.error('Invalid Transmission host, check your config %s', err)
return False
except urllib2.HTTPError, err:
log.error('SynologyRPC HTTPError: %s', err)
except Exception, err:
log.error('Exception: %s', err)
finally:
return response
return False
except urllib2.URLError, err:
log.error('Unable to connect to Synology %s', err)
return False
def create_task(self, url = None, filename = None, filedata = None):
""" Creates new download task in Synology DownloadStation. Either specify
url or pair (filename, filedata).
Returns True if task was created, False otherwise
"""
result = False
def add_torrent_uri(self, torrent):
log.info('Adding torrent URL %s', torrent)
response = {}
# login
if self._login():
args = {'api': 'SYNO.DownloadStation.Task',
'version': '1',
'method': 'create',
'_sid': self.sid}
if url:
log.info('Login success, adding torrent URI')
args['uri'] = url
response = self._req(self.download_url, args = args)
log.info('Response: %s', response)
result = response['success']
elif filename and filedata:
log.info('Login success, adding torrent')
files = {'file': (filename, filedata)}
response = self._req(self.download_url, args = args, files = files)
log.info('Response: %s', response)
result = response['success']
else:
log.error('Invalid use of SynologyRPC.create_task: either url or filename+filedata must be specified')
login = self._login()
if len(login) > 0 and login['success'] == True:
log.info('Login success, adding torrent')
args = {'api':'SYNO.DownloadStation.Task', 'version':1, 'method':'create', 'uri':torrent, '_sid':self.sid}
response = self._req(self.download_url, args)
self._logout()
else:
log.error('Couldn\'t login to Synology, %s', login)
return response
return result

View File

@@ -25,13 +25,6 @@ config = [{
'default': 'localhost:9091',
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
},
{
'name': 'rpc_url',
'type': 'string',
'default': 'transmission',
'advanced': True,
'description': 'Change if you don\'t run Transmission RPC at the default url.',
},
{
'name': 'username',
},
@@ -39,34 +32,24 @@ config = [{
'name': 'password',
'type': 'password',
},
{
'name': 'directory',
'type': 'directory',
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
},
{
'name': 'remove_complete',
'label': 'Remove torrent',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Remove the torrent from Transmission after it finished seeding.',
},
{
'name': 'delete_files',
'label': 'Remove files',
'default': True,
'type': 'bool',
'advanced': True,
'description': 'Also remove the leftover files.',
},
{
'name': 'paused',
'type': 'bool',
'advanced': True,
'default': False,
'description': 'Add the torrent paused.',
},
{
'name': 'directory',
'type': 'directory',
'description': 'Where should Transmission saved the downloaded files?',
},
{
'name': 'ratio',
'default': 10,
'type': 'int',
'advanced': True,
'description': 'Stop transfer when reaching ratio',
},
{
'name': 'manual',
'default': 0,
@@ -74,20 +57,6 @@ config = [{
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'stalled_as_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Consider a stalled torrent as failed',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],

View File

@@ -1,9 +1,7 @@
from base64 import b64encode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryInt, tryFloat
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import isInt
from couchpotato.core.logger import CPLog
from datetime import timedelta
import httplib
import json
import os.path
@@ -15,144 +13,70 @@ log = CPLog(__name__)
class Transmission(Downloader):
protocol = ['torrent', 'torrent_magnet']
type = ['torrent', 'torrent_magnet']
log = CPLog(__name__)
trpc = None
def connect(self):
def download(self, data, movie, filedata = None):
log.debug('Sending "%s" (%s) to Transmission.', (data.get('name'), data.get('type')))
# Load host from config and split out port.
host = self.conf('host').split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
if not self.trpc:
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url'), username = self.conf('username'), password = self.conf('password'))
# Set parameters for Transmission
folder_name = self.createFileName(data, filedata, movie)[:-len(data.get('type')) - 1]
folder_path = os.path.join(self.conf('directory', default = ''), folder_name).rstrip(os.path.sep)
return self.trpc
# Create the empty folder to download too
self.makeDir(folder_path)
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
params = {
'paused': self.conf('paused', default = 0),
'download-dir': folder_path
}
log.info('Sending "%s" (%s) to Transmission.', (data.get('name'), data.get('protocol')))
torrent_params = {}
if self.conf('ratio'):
torrent_params = {
'seedRatioLimit': self.conf('ratio'),
'seedRatioMode': self.conf('ratio')
}
if not self.connect():
return False
if not filedata and data.get('protocol') == 'torrent':
if not filedata and data.get('type') == 'torrent':
log.error('Failed sending torrent, no data')
return False
# Set parameters for adding torrent
params = {
'paused': self.conf('paused', default = False)
}
if self.conf('directory'):
if os.path.isdir(self.conf('directory')):
params['download-dir'] = self.conf('directory')
else:
log.error('Download directory from Transmission settings: %s doesn\'t exist', self.conf('directory'))
# Change parameters of torrent
torrent_params = {}
if data.get('seed_ratio'):
torrent_params['seedRatioLimit'] = tryFloat(data.get('seed_ratio'))
torrent_params['seedRatioMode'] = 1
if data.get('seed_time'):
torrent_params['seedIdleLimit'] = tryInt(data.get('seed_time')) * 60
torrent_params['seedIdleMode'] = 1
# Send request to Transmission
if data.get('protocol') == 'torrent_magnet':
remote_torrent = self.trpc.add_torrent_uri(data.get('url'), arguments = params)
torrent_params['trackerAdd'] = self.torrent_trackers
else:
remote_torrent = self.trpc.add_torrent_file(b64encode(filedata), arguments = params)
try:
trpc = TransmissionRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
if data.get('type') == 'torrent_magnet':
remote_torrent = trpc.add_torrent_uri(data.get('url'), arguments = params)
torrent_params['trackerAdd'] = self.torrent_trackers
else:
remote_torrent = trpc.add_torrent_file(b64encode(filedata), arguments = params)
if not remote_torrent:
log.error('Failed sending torrent to Transmission')
# Change settings of added torrents
if torrent_params:
trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
return True
except Exception, err:
log.error('Failed to change settings for transfer: %s', err)
return False
# Change settings of added torrents
if torrent_params:
self.trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
log.info('Torrent sent to Transmission successfully.')
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
def getAllDownloadStatus(self, ids):
log.debug('Checking Transmission download status.')
if not self.connect():
return []
release_downloads = ReleaseDownloadList(self)
return_params = {
'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
}
queue = self.trpc.get_alltorrents(return_params)
if not (queue and queue.get('torrents')):
log.debug('Nothing in queue or error')
return []
for torrent in queue['torrents']:
if torrent['hashString'] in ids:
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s',
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished']))
torrent_files = []
for file_item in torrent['files']:
torrent_files.append(sp(os.path.join(torrent['downloadDir'], file_item['name'])))
status = 'busy'
if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'):
status = 'failed'
elif torrent['status'] == 0 and torrent['percentDone'] == 1:
status = 'completed'
elif torrent['status'] in [5, 6]:
status = 'seeding'
release_downloads.append({
'id': torrent['hashString'],
'name': torrent['name'],
'status': status,
'original_status': torrent['status'],
'seed_ratio': torrent['uploadRatio'],
'timeleft': str(timedelta(seconds = torrent['eta'])),
'folder': sp(torrent['downloadDir'] if len(torrent_files) == 1 else os.path.join(torrent['downloadDir'], torrent['name'])),
'files': '|'.join(torrent_files)
})
return release_downloads
def pause(self, release_download, pause = True):
if pause:
return self.trpc.stop_torrent(release_download['id'])
else:
return self.trpc.start_torrent(release_download['id'])
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
return self.trpc.remove_torrent(release_download['id'], True)
def processComplete(self, release_download, delete_files = False):
log.debug('Requesting Transmission to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
return self.trpc.remove_torrent(release_download['id'], delete_files)
class TransmissionRPC(object):
"""TransmissionRPC lite library"""
def __init__(self, host = 'localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
def __init__(self, host = 'localhost', port = 9091, username = None, password = None):
super(TransmissionRPC, self).__init__()
self.url = 'http://' + host + ':' + str(port) + '/' + rpc_url + '/rpc'
self.url = 'http://' + host + ':' + str(port) + '/transmission/rpc'
self.tag = 0
self.session_id = 0
self.session = {}
@@ -173,10 +97,9 @@ class TransmissionRPC(object):
try:
open_request = urllib2.urlopen(request)
response = json.loads(open_request.read())
log.debug('request: %s', json.dumps(ojson))
log.debug('response: %s', json.dumps(response))
if response['result'] == 'success':
log.debug('Transmission action successful')
log.debug('Transmission action successfull')
return response['arguments']
else:
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
@@ -223,20 +146,3 @@ class TransmissionRPC(object):
arguments['ids'] = torrent_id
post_data = {'arguments': arguments, 'method': 'torrent-set', 'tag': self.tag}
return self._request(post_data)
def get_alltorrents(self, arguments):
post_data = {'arguments': arguments, 'method': 'torrent-get', 'tag': self.tag}
return self._request(post_data)
def stop_torrent(self, torrent_id):
post_data = {'arguments': {'ids': torrent_id}, 'method': 'torrent-stop', 'tag': self.tag}
return self._request(post_data)
def start_torrent(self, torrent_id):
post_data = {'arguments': {'ids': torrent_id}, 'method': 'torrent-start', 'tag': self.tag}
return self._request(post_data)
def remove_torrent(self, torrent_id, delete_local_data):
post_data = {'arguments': {'ids': torrent_id, 'delete-local-data': delete_local_data}, 'method': 'torrent-remove', 'tag': self.tag}
return self._request(post_data)

View File

@@ -11,7 +11,7 @@ config = [{
'list': 'download_providers',
'name': 'utorrent',
'label': 'uTorrent',
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> (3.0+) to download torrents.',
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> to download torrents.',
'wizard': True,
'options': [
{
@@ -36,26 +36,9 @@ config = [{
'name': 'label',
'description': 'Label to add torrent as.',
},
{
'name': 'remove_complete',
'label': 'Remove torrent',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Remove the torrent from uTorrent after it finished seeding.',
},
{
'name': 'delete_files',
'label': 'Remove files',
'default': True,
'type': 'bool',
'advanced': True,
'description': 'Also remove the leftover files.',
},
{
'name': 'paused',
'type': 'bool',
'advanced': True,
'default': False,
'description': 'Add the torrent paused.',
},
@@ -66,13 +49,6 @@ config = [{
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],

View File

@@ -1,202 +1,69 @@
from base64 import b16encode, b32decode
from bencode import bencode as benc, bdecode
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
from couchpotato.core.helpers.encoding import isInt, ss, sp
from couchpotato.core.helpers.variable import tryInt, tryFloat
from bencode import bencode, bdecode
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import isInt, ss
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
from multipartpost import MultipartPostHandler
import cookielib
import httplib
import json
import os
import re
import stat
import time
import urllib
import urllib2
log = CPLog(__name__)
class uTorrent(Downloader):
protocol = ['torrent', 'torrent_magnet']
type = ['torrent', 'torrent_magnet']
utorrent_api = None
status_flags = {
'STARTED' : 1,
'CHECKING' : 2,
'CHECK-START' : 4,
'CHECKED' : 8,
'ERROR' : 16,
'PAUSED' : 32,
'QUEUED' : 64,
'LOADED' : 128
}
def connect(self):
def download(self, data, movie, filedata = None):
log.debug('Sending "%s" (%s) to uTorrent.', (data.get('name'), data.get('type')))
# Load host from config and split out port.
host = self.conf('host').split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
self.utorrent_api = uTorrentAPI(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
return self.utorrent_api
def download(self, data = None, media = None, filedata = None):
if not media: media = {}
if not data: data = {}
log.debug("Sending '%s' (%s) to uTorrent.", (data.get('name'), data.get('protocol')))
if not self.connect():
return False
settings = self.utorrent_api.get_settings()
if not settings:
return False
#Fix settings in case they are not set for CPS compatibility
new_settings = {}
if not (settings.get('seed_prio_limitul') == 0 and settings['seed_prio_limitul_flag']):
new_settings['seed_prio_limitul'] = 0
new_settings['seed_prio_limitul_flag'] = True
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
new_settings['bt.read_only_on_complete'] = False
log.info('Updated uTorrent settings to not set the files to read only after completing.')
if new_settings:
self.utorrent_api.set_settings(new_settings)
torrent_params = {}
if self.conf('label'):
torrent_params['label'] = self.conf('label')
if not filedata and data.get('protocol') == 'torrent':
if not filedata and data.get('type') == 'torrent':
log.error('Failed sending torrent, no data')
return False
if data.get('protocol') == 'torrent_magnet':
if data.get('type') == 'torrent_magnet':
torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper()
torrent_params['trackers'] = '%0D%0A%0D%0A'.join(self.torrent_trackers)
else:
info = bdecode(filedata)['info']
torrent_hash = sha1(benc(info)).hexdigest().upper()
torrent_filename = self.createFileName(data, filedata, media)
if data.get('seed_ratio'):
torrent_params['seed_override'] = 1
torrent_params['seed_ratio'] = tryInt(tryFloat(data['seed_ratio']) * 1000)
if data.get('seed_time'):
torrent_params['seed_override'] = 1
torrent_params['seed_time'] = tryInt(data['seed_time']) * 3600
# Convert base 32 to hex
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash))
info = bdecode(filedata)["info"]
torrent_hash = sha1(bencode(info)).hexdigest().upper()
torrent_filename = self.createFileName(data, filedata, movie)
# Send request to uTorrent
if data.get('protocol') == 'torrent_magnet':
self.utorrent_api.add_torrent_uri(torrent_filename, data.get('url'))
else:
self.utorrent_api.add_torrent_file(torrent_filename, filedata)
try:
if not self.utorrent_api:
self.utorrent_api = uTorrentAPI(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
# Change settings of added torrent
self.utorrent_api.set_torrent(torrent_hash, torrent_params)
if self.conf('paused', default = 0):
self.utorrent_api.pause_torrent(torrent_hash)
if data.get('type') == 'torrent_magnet':
self.utorrent_api.add_torrent_uri(data.get('url'))
else:
self.utorrent_api.add_torrent_file(torrent_filename, filedata)
return self.downloadReturnId(torrent_hash)
def getAllDownloadStatus(self, ids):
log.debug('Checking uTorrent download status.')
if not self.connect():
return []
release_downloads = ReleaseDownloadList(self)
data = self.utorrent_api.get_status()
if not data:
log.error('Error getting data from uTorrent')
return []
queue = json.loads(data)
if queue.get('error'):
log.error('Error getting data from uTorrent: %s', queue.get('error'))
return []
if not queue.get('torrents'):
log.debug('Nothing in queue')
return []
# Get torrents
for torrent in queue['torrents']:
if torrent[0] in ids:
#Get files of the torrent
torrent_files = []
try:
torrent_files = json.loads(self.utorrent_api.get_files(torrent[0]))
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
except:
log.debug('Failed getting files from torrent: %s', torrent[2])
status = 'busy'
if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000:
status = 'seeding'
elif (torrent[1] & self.status_flags['ERROR']):
status = 'failed'
elif torrent[4] == 1000:
status = 'completed'
if not status == 'busy':
self.removeReadOnly(torrent_files)
release_downloads.append({
'id': torrent[0],
'name': torrent[2],
'status': status,
'seed_ratio': float(torrent[7]) / 1000,
'original_status': torrent[1],
'timeleft': str(timedelta(seconds = torrent[10])),
'folder': sp(torrent[26]),
'files': '|'.join(torrent_files)
})
return release_downloads
def pause(self, release_download, pause = True):
if not self.connect():
# Change settings of added torrents
self.utorrent_api.set_torrent(torrent_hash, torrent_params)
if self.conf('paused', default = 0):
self.utorrent_api.pause_torrent(torrent_hash)
return True
except Exception, err:
log.error('Failed to send torrent to uTorrent: %s', err)
return False
return self.utorrent_api.pause_torrent(release_download['id'], pause)
def removeFailed(self, release_download):
log.info('%s failed downloading, deleting...', release_download['name'])
if not self.connect():
return False
return self.utorrent_api.remove_torrent(release_download['id'], remove_data = True)
def processComplete(self, release_download, delete_files = False):
log.debug('Requesting uTorrent to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
if not self.connect():
return False
return self.utorrent_api.remove_torrent(release_download['id'], remove_data = delete_files)
def removeReadOnly(self, files):
#Removes all read-on ly flags in a for all files
for filepath in files:
if os.path.isfile(filepath):
#Windows only needs S_IWRITE, but we bitwise-or with current perms to preserve other permission bits on Linux
os.chmod(filepath, stat.S_IWRITE | os.stat(filepath).st_mode)
class uTorrentAPI(object):
@@ -223,11 +90,13 @@ class uTorrentAPI(object):
if time.time() > self.last_time + 1800:
self.last_time = time.time()
self.token = self.get_token()
request = urllib2.Request(self.url + '?token=' + self.token + '&' + action, data)
request = urllib2.Request(self.url + "?token=" + self.token + "&" + action, data)
try:
open_request = self.opener.open(request)
response = open_request.read()
log.debug('response: %s', response)
if response:
log.debug('uTorrent action successfull')
return response
else:
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
@@ -243,82 +112,24 @@ class uTorrentAPI(object):
return False
def get_token(self):
request = self.opener.open(self.url + 'token.html')
token = re.findall('<div.*?>(.*?)</', request.read())[0]
request = self.opener.open(self.url + "token.html")
token = re.findall("<div.*?>(.*?)</", request.read())[0]
return token
def add_torrent_uri(self, filename, torrent, add_folder = False):
action = 'action=add-url&s=%s' % urllib.quote(torrent)
if add_folder:
action += '&path=%s' % urllib.quote(filename)
def add_torrent_uri(self, torrent):
action = "action=add-url&s=%s" % urllib.quote(torrent)
return self._request(action)
def add_torrent_file(self, filename, filedata, add_folder = False):
action = 'action=add-file'
if add_folder:
action += '&path=%s' % urllib.quote(filename)
return self._request(action, {'torrent_file': (ss(filename), filedata)})
def add_torrent_file(self, filename, filedata):
action = "action=add-file"
return self._request(action, {"torrent_file": (ss(filename), filedata)})
def set_torrent(self, hash, params):
action = 'action=setprops&hash=%s' % hash
action = "action=setprops&hash=%s" % hash
for k, v in params.iteritems():
action += '&s=%s&v=%s' % (k, v)
action += "&s=%s&v=%s" % (k, v)
return self._request(action)
def pause_torrent(self, hash, pause = True):
if pause:
action = 'action=pause&hash=%s' % hash
else:
action = 'action=unpause&hash=%s' % hash
return self._request(action)
def stop_torrent(self, hash):
action = 'action=stop&hash=%s' % hash
return self._request(action)
def remove_torrent(self, hash, remove_data = False):
if remove_data:
action = 'action=removedata&hash=%s' % hash
else:
action = 'action=remove&hash=%s' % hash
return self._request(action)
def get_status(self):
action = 'list=1'
return self._request(action)
def get_settings(self):
action = 'action=getsettings'
settings_dict = {}
try:
utorrent_settings = json.loads(self._request(action))
# Create settings dict
for setting in utorrent_settings['settings']:
if setting[1] == 0: # int
settings_dict[setting[0]] = int(setting[2] if not setting[2].strip() == '' else '0')
elif setting[1] == 1: # bool
settings_dict[setting[0]] = True if setting[2] == 'true' else False
elif setting[1] == 2: # string
settings_dict[setting[0]] = setting[2]
#log.debug('uTorrent settings: %s', settings_dict)
except Exception, err:
log.error('Failed to get settings from uTorrent: %s', err)
return settings_dict
def set_settings(self, settings_dict = None):
if not settings_dict: settings_dict = {}
for key in settings_dict:
if isinstance(settings_dict[key], bool):
settings_dict[key] = 1 if settings_dict[key] else 0
action = 'action=setsetting' + ''.join(['&s=%s&v=%s' % (key, value) for (key, value) in settings_dict.items()])
return self._request(action)
def get_files(self, hash):
action = 'action=getfiles&hash=%s' % hash
def pause_torrent(self, hash):
action = "action=pause&hash=%s" % hash
return self._request(action)

View File

@@ -16,45 +16,33 @@ def runHandler(name, handler, *args, **kwargs):
def addEvent(name, handler, priority = 100):
if not events.get(name):
events[name] = []
if events.get(name):
e = events[name]
else:
e = events[name] = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
def createHandle(*args, **kwargs):
h = None
try:
# Open handler
has_parent = hasattr(handler, 'im_self')
parent = None
if has_parent:
parent = handler.im_self
bc = hasattr(parent, 'beforeCall')
if bc: parent.beforeCall(handler)
# Main event
parent = handler.im_self
bc = hasattr(parent, 'beforeCall')
if bc: parent.beforeCall(handler)
h = runHandler(name, handler, *args, **kwargs)
# Close handler
if parent and has_parent:
ac = hasattr(parent, 'afterCall')
if ac: parent.afterCall(handler)
ac = hasattr(parent, 'afterCall')
if ac: parent.afterCall(handler)
except:
log.error('Failed creating handler %s %s: %s', (name, handler, traceback.format_exc()))
h = runHandler(name, handler, *args, **kwargs)
return h
events[name].append({
'handler': createHandle,
'priority': priority,
})
e.handle(createHandle, priority = priority)
def removeEvent(name, handler):
e = events[name]
e -= handler
def fireEvent(name, *args, **kwargs):
if not events.has_key(name): return
if not events.get(name): return
#log.debug('Firing event %s', name)
try:
@@ -74,32 +62,22 @@ def fireEvent(name, *args, **kwargs):
options[x] = val
except: pass
if len(events[name]) == 1:
e = events[name]
single = None
try:
single = events[name][0]['handler'](*args, **kwargs)
except:
log.error('Failed running single event: %s', traceback.format_exc())
# Lock this event
e.lock.acquire()
# Don't load thread for single event
result = {
'single': (single is not None, single),
}
e.asynchronous = False
else:
# Make sure only 1 event is fired at a time when order is wanted
kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None
kwargs['event_return_on_result'] = options['single']
e = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
# Fire
result = e(*args, **kwargs)
for event in events[name]:
e.handle(event['handler'], priority = event['priority'])
# Make sure only 1 event is fired at a time when order is wanted
kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None
kwargs['event_return_on_result'] = options['single']
# Fire
result = e(*args, **kwargs)
# Release lock for this event
e.lock.release()
if options['single'] and not options['merge']:
results = None
@@ -126,14 +104,11 @@ def fireEvent(name, *args, **kwargs):
# Merge
if options['merge'] and len(results) > 0:
# Dict
if isinstance(results[0], dict):
results.reverse()
merged = {}
for result in results:
merged = mergeDicts(merged, result, prepend_list = True)
merged = mergeDicts(merged, result)
results = merged
# Lists
@@ -157,14 +132,16 @@ def fireEvent(name, *args, **kwargs):
options['on_complete']()
return results
except KeyError, e:
pass
except Exception:
log.error('%s: %s', (name, traceback.format_exc()))
def fireEventAsync(*args, **kwargs):
try:
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
t.setDaemon(True)
t.start()
my_thread = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
my_thread.setDaemon(True)
my_thread.start()
return True
except Exception, e:
log.error('%s: %s', (args[0], e))

View File

@@ -1,7 +1,6 @@
from couchpotato.core.logger import CPLog
from string import ascii_letters, digits
from urllib import quote_plus
import os
import re
import traceback
import unicodedata
@@ -12,8 +11,7 @@ log = CPLog(__name__)
def toSafeString(original):
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
cleanedFilename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
valid_string = ''.join(c for c in cleanedFilename if c in valid_chars)
return ' '.join(valid_string.split())
return ''.join(c for c in cleanedFilename if c in valid_chars)
def simplifyString(original):
string = stripAccents(original.lower())
@@ -39,39 +37,8 @@ def toUnicode(original, *args):
return toUnicode(ascii_text)
def ss(original, *args):
u_original = toUnicode(original, *args)
try:
from couchpotato.environment import Env
return u_original.encode(Env.get('encoding'))
except Exception, e:
log.debug('Failed ss encoding char, force UTF8: %s', e)
return u_original.encode('UTF-8')
def sp(path, *args):
# Standardise encoding, normalise case, path and strip trailing '/' or '\'
if not path or len(path) == 0:
return path
# convert windows path (from remote box) to *nix path
if os.path.sep == '/' and '\\' in path:
path = '/' + path.replace(':', '').replace('\\', '/')
path = os.path.normcase(os.path.normpath(ss(path, *args)))
# Remove any trailing path separators
if path != os.path.sep:
path = path.rstrip(os.path.sep)
# Add a trailing separator in case it is a root folder on windows (crashes guessit)
if len(path) == 2 and path[1] == ':':
path = path + os.path.sep
# Replace *NIX ambiguous '//' at the beginning of a path with '/' (crashes guessit)
path = re.sub('^//', '/', path)
return path
from couchpotato.environment import Env
return toUnicode(original, *args).encode(Env.get('encoding'))
def ek(original, *args):
if isinstance(original, (str, unicode)):
@@ -95,7 +62,7 @@ def stripAccents(s):
def tryUrlencode(s):
new = u''
if isinstance(s, dict):
if isinstance(s, (dict)):
for key, value in s.iteritems():
new += u'&%s=%s' % (key, tryUrlencode(value))

View File

@@ -1,14 +1,18 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import natcmp
from flask.globals import current_app
from flask.helpers import json, make_response
from urllib import unquote
from werkzeug.urls import url_decode
import flask
import re
def getParams():
def getParams(params):
params = url_decode(getattr(flask.request, 'environ').get('QUERY_STRING', ''))
reg = re.compile('^[a-z0-9_\.]+$')
temp = {}
current = temp = {}
for param, value in sorted(params.iteritems()):
nest = re.split("([\[\]]+)", param)
@@ -32,8 +36,6 @@ def getParams(params):
current = current[item]
else:
temp[param] = toUnicode(unquote(value))
if temp[param].lower() in ['true', 'false']:
temp[param] = temp[param].lower() != 'false'
return dictToList(temp)
@@ -52,3 +54,29 @@ def dictToList(params):
new = params
return new
def getParam(attr, default = None):
try:
return getParams().get(attr, default)
except:
return default
def padded_jsonify(callback, *args, **kwargs):
content = str(callback) + '(' + json.dumps(dict(*args, **kwargs)) + ')'
return getattr(current_app, 'response_class')(content, mimetype = 'text/javascript')
def jsonify(mimetype, *args, **kwargs):
content = json.dumps(dict(*args, **kwargs))
return getattr(current_app, 'response_class')(content, mimetype = mimetype)
def jsonified(*args, **kwargs):
callback = getParam('callback_func', None)
if callback:
content = padded_jsonify(callback, *args, **kwargs)
else:
content = jsonify('application/json', *args, **kwargs)
response = make_response(content)
response.cache_control.no_cache = True
return response

View File

@@ -6,7 +6,7 @@ log = CPLog(__name__)
class RSS(object):
def getTextElements(self, xml, path):
""" Find elements and return tree"""
''' Find elements and return tree'''
textelements = []
try:
@@ -28,7 +28,7 @@ class RSS(object):
return elements
def getElement(self, xml, path):
""" Find element and return text"""
''' Find element and return text'''
try:
return xml.find(path)
@@ -36,7 +36,7 @@ class RSS(object):
return
def getTextElement(self, xml, path):
""" Find element and return text"""
''' Find element and return text'''
try:
return xml.find(path).text

View File

@@ -1,8 +1,7 @@
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss
from couchpotato.core.helpers.encoding import simplifyString, toSafeString
from couchpotato.core.logger import CPLog
import collections
import hashlib
import os
import os.path
import platform
import random
import re
@@ -11,23 +10,6 @@ import sys
log = CPLog(__name__)
def fnEscape(pattern):
return pattern.replace('[','[[').replace(']','[]]').replace('[[','[[]')
def link(src, dst):
if os.name == 'nt':
import ctypes
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
else:
os.link(src, dst)
def symlink(src, dst):
if os.name == 'nt':
import ctypes
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
else:
os.symlink(src, dst)
def getUserDir():
try:
import pwd
@@ -71,7 +53,7 @@ def getDataDir():
def isDict(object):
return isinstance(object, dict)
def mergeDicts(a, b, prepend_list = False):
def mergeDicts(a, b):
assert isDict(a), isDict(b)
dst = a.copy()
@@ -85,7 +67,7 @@ def mergeDicts(a, b, prepend_list = False):
if isDict(current_src[key]) and isDict(current_dst[key]):
stack.append((current_dst[key], current_src[key]))
elif isinstance(current_src[key], list) and isinstance(current_dst[key], list):
current_dst[key] = current_src[key] + current_dst[key] if prepend_list else current_dst[key] + current_src[key]
current_dst[key].extend(current_src[key])
current_dst[key] = removeListDuplicates(current_dst[key])
else:
current_dst[key] = current_src[key]
@@ -105,16 +87,11 @@ def flattenList(l):
return l
def md5(text):
return hashlib.md5(ss(text)).hexdigest()
return hashlib.md5(text).hexdigest()
def sha1(text):
return hashlib.sha1(text).hexdigest()
def isLocalIP(ip):
ip = ip.lstrip('htps:/')
regex = '/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1)$/'
return re.search(regex, ip) is not None or 'localhost' in ip or ip[:4] == '127.'
def getExt(filename):
return os.path.splitext(filename)[1][1:]
@@ -122,17 +99,12 @@ def cleanHost(host):
if not host.startswith(('http://', 'https://')):
host = 'http://' + host
host = host.rstrip('/')
host += '/'
if not host.endswith('/'):
host += '/'
return host
def getImdb(txt, check_inside = False, multiple = False):
if not check_inside:
txt = simplifyString(txt)
else:
txt = ss(txt)
def getImdb(txt, check_inside = True, multiple = False):
if check_inside and os.path.isfile(txt):
output = open(txt, 'r')
@@ -140,27 +112,21 @@ def getImdb(txt, check_inside = False, multiple = False):
output.close()
try:
ids = re.findall('(tt\d{4,7})', txt)
ids = re.findall('(tt\d{7})', txt)
if multiple:
return list(set(['tt%07d' % tryInt(x[2:]) for x in ids])) if len(ids) > 0 else []
return 'tt%07d' % tryInt(ids[0][2:])
return ids if len(ids) > 0 else []
return ids[0]
except IndexError:
pass
return False
def tryInt(s, default = 0):
def tryInt(s):
try: return int(s)
except: return default
except: return 0
def tryFloat(s):
try:
if isinstance(s, str):
return float(s) if '.' in s else tryInt(s)
else:
return float(s)
try: return float(s) if '.' in s else tryInt(s)
except: return 0
def natsortKey(s):
@@ -169,11 +135,6 @@ def natsortKey(s):
def natcmp(a, b):
return cmp(natsortKey(a), natsortKey(b))
def toIterable(value):
if type(value) in [list, tuple]:
return value
return [value]
def getTitle(library_dict):
try:
try:
@@ -184,11 +145,8 @@ def getTitle(library_dict):
if title.default:
return title.title
except:
try:
return library_dict['info']['titles'][0]
except:
log.error('Could not get title for %s', library_dict.identifier)
return None
log.error('Could not get title for %s', library_dict.identifier)
return None
log.error('Could not get title for %s', library_dict['identifier'])
return None
@@ -198,28 +156,16 @@ def getTitle(library_dict):
def possibleTitles(raw_title):
titles = [
toSafeString(raw_title).lower(),
raw_title.lower(),
simplifyString(raw_title)
]
titles = []
# replace some chars
new_title = raw_title.replace('&', 'and')
titles.append(simplifyString(new_title))
titles.append(toSafeString(raw_title).lower())
titles.append(raw_title.lower())
titles.append(simplifyString(raw_title))
return list(set(titles))
def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def splitString(str, split_on = ',', clean = True):
list = [x.strip() for x in str.split(split_on)] if str else []
return filter(None, list) if clean else list
def dictIsSubset(a, b):
return all([k in b and b[k] == v for k, v in a.items()])
def isSubFolder(sub_folder, base_folder):
# Returns True is sub_folder is the same as or in base_folder
return base_folder.rstrip(os.path.sep) + os.path.sep in sub_folder.rstrip(os.path.sep) + os.path.sep
def splitString(str, split_on = ','):
return [x.strip() for x in str.split(split_on)]

View File

@@ -1,19 +1,20 @@
from couchpotato.core.event import fireEvent
from couchpotato.core.logger import CPLog
from importlib import import_module
import glob
import os
import sys
import traceback
log = CPLog(__name__)
class Loader(object):
plugins = {}
providers = {}
modules = {}
def preload(self, root = ''):
core = os.path.join(root, 'couchpotato', 'core')
self.paths = {
@@ -24,19 +25,13 @@ class Loader(object):
}
# Add providers to loader
self.addPath(root, ['couchpotato', 'core', 'providers'], 25, recursive = False)
provider_dir = os.path.join(root, 'couchpotato', 'core', 'providers')
for provider in os.listdir(provider_dir):
path = os.path.join(provider_dir, provider)
if os.path.isdir(path):
self.paths[provider + '_provider'] = (25, 'couchpotato.core.providers.' + provider, path)
# Add media to loader
self.addPath(root, ['couchpotato', 'core', 'media'], 25, recursive = True)
# Add custom plugin folder
from couchpotato.environment import Env
custom_plugin_dir = os.path.join(Env.get('data_dir'), 'custom_plugins')
if os.path.isdir(custom_plugin_dir):
sys.path.insert(0, custom_plugin_dir)
self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
# Loop over all paths and add to module list
for plugin_type, plugin_tuple in self.paths.iteritems():
priority, module, dir_name = plugin_tuple
self.addFromDir(plugin_type, priority, module, dir_name)
@@ -44,17 +39,11 @@ class Loader(object):
def run(self):
did_save = 0
for priority in sorted(self.modules):
for priority in self.modules:
for module_name, plugin in sorted(self.modules[priority].iteritems()):
# Load module
try:
if plugin.get('name')[:2] == '__':
continue
m = self.loadModule(module_name)
if m is None:
continue
m = getattr(self.loadModule(module_name), plugin.get('name'))
log.info('Loading %s: %s', (plugin['type'], plugin['name']))
@@ -64,7 +53,7 @@ class Loader(object):
self.loadPlugins(m, plugin.get('name'))
except ImportError as e:
# todo:: subclass ImportError for missing requirements.
if e.message.lower().startswith("missing"):
if (e.message.lower().startswith("missing")):
log.error(e.message)
pass
# todo:: this needs to be more descriptive.
@@ -76,35 +65,27 @@ class Loader(object):
if did_save:
fireEvent('settings.save')
def addPath(self, root, base_path, priority, recursive = False):
root_path = os.path.join(root, *base_path)
for filename in os.listdir(root_path):
path = os.path.join(root_path, filename)
if os.path.isdir(path) and filename[:2] != '__':
if u'__init__.py' in os.listdir(path):
new_base_path = ''.join(s + '.' for s in base_path) + filename
self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
if recursive:
self.addPath(root, base_path + [filename], priority, recursive = True)
def addFromDir(self, plugin_type, priority, module, dir_name):
# Load dir module
if module and len(module) > 0:
self.addModule(priority, plugin_type, module, os.path.basename(dir_name))
try:
m = __import__(module)
splitted = module.split('.')
for sub in splitted[1:]:
m = getattr(m, sub)
for name in os.listdir(dir_name):
if os.path.isdir(os.path.join(dir_name, name)) and name != 'static' and os.path.isfile(os.path.join(dir_name, name, '__init__.py')):
if hasattr(m, 'config'):
fireEvent('settings.options', splitted[-1] + '_config', getattr(m, 'config'))
except:
raise
for cur_file in glob.glob(os.path.join(dir_name, '*')):
name = os.path.basename(cur_file)
if os.path.isdir(os.path.join(dir_name, name)):
module_name = '%s.%s' % (module, name)
self.addModule(priority, plugin_type, module_name, name)
def loadSettings(self, module, name, save = True):
if not hasattr(module, 'config'):
log.debug('Skip loading settings for plugin %s as it has no config section' % module.__file__)
return False
try:
for section in module.config:
fireEvent('settings.options', section['name'], section)
@@ -119,14 +100,15 @@ class Loader(object):
return False
def loadPlugins(self, module, name):
if not hasattr(module, 'start'):
log.debug('Skip startup for plugin %s as it has no start section' % module.__file__)
return False
try:
module.start()
klass = module.start()
klass.registerPlugin()
if klass and getattr(klass, 'auto_register_static'):
klass.registerStatic(module.__file__)
return True
except:
except Exception, e:
log.error('Failed loading plugin "%s": %s', (module.__file__, traceback.format_exc()))
return False
@@ -135,7 +117,6 @@ class Loader(object):
if not self.modules.get(priority):
self.modules[priority] = {}
module = module.lstrip('.')
self.modules[priority][module] = {
'priority': priority,
'module': module,
@@ -145,9 +126,10 @@ class Loader(object):
def loadModule(self, name):
try:
return import_module(name)
except ImportError:
log.debug('Skip loading module plugin %s: %s', (name, traceback.format_exc()))
return None
m = __import__(name)
splitted = name.split('.')
for sub in splitted[1:-1]:
m = getattr(m, sub)
return m
except:
raise

View File

@@ -1,10 +1,11 @@
import logging
import re
import traceback
class CPLog(object):
context = ''
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key', 'passkey']
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key']
def __init__(self, context = ''):
if context.endswith('.main'):
@@ -49,8 +50,8 @@ class CPLog(object):
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
else:
msg = msg % ss(replace_tuple)
except Exception, e:
self.logger.error(u'Failed encoding stuff to log "%s": %s' % (msg, e))
except:
self.logger.error(u'Failed encoding stuff to log: %s' % traceback.format_exc())
if not Env.get('dev'):

View File

@@ -1,52 +0,0 @@
from couchpotato import get_session
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
from couchpotato.core.helpers.variable import mergeDicts
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Media
class MediaBase(Plugin):
_type = None
default_dict = {
'profile': {'types': {'quality': {}}},
'releases': {'status': {}, 'quality': {}, 'files':{}, 'info': {}},
'library': {'titles': {}, 'files':{}},
'files': {},
'status': {},
'category': {},
}
search_dict = mergeDicts({
'library': {
'related_libraries': {},
'root_library': {}
},
}, default_dict)
def initType(self):
addEvent('media.types', self.getType)
def getType(self):
return self._type
def createOnComplete(self, id):
def onComplete():
db = get_session()
media = db.query(Media).filter_by(id = id).first()
fireEventAsync('%s.searcher.single' % media.type, media.to_dict(self.search_dict), on_complete = self.createNotifyFront(id))
db.expire_all()
return onComplete
def createNotifyFront(self, media_id):
def notifyFront():
db = get_session()
media = db.query(Media).filter_by(id = media_id).first()
fireEvent('notify.frontend', type = '%s.update' % media.type, data = media.to_dict(self.default_dict))
db.expire_all()
return notifyFront

View File

@@ -1,6 +0,0 @@
from .main import Library
def start():
return Library()
config = []

View File

@@ -1,13 +0,0 @@
from couchpotato.core.event import addEvent
from couchpotato.core.plugins.base import Plugin
class LibraryBase(Plugin):
_type = None
def initType(self):
addEvent('library.types', self.getType)
def getType(self):
return self._type

View File

@@ -1,18 +0,0 @@
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.media._base.library.base import LibraryBase
class Library(LibraryBase):
def __init__(self):
addEvent('library.title', self.title)
def title(self, library):
return fireEvent(
'library.query',
library,
condense = False,
include_year = False,
include_identifier = False,
single = True
)

View File

@@ -1,6 +0,0 @@
from .main import Matcher
def start():
return Matcher()
config = []

View File

@@ -1,84 +0,0 @@
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import simplifyString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
class MatcherBase(Plugin):
type = None
def __init__(self):
if self.type:
addEvent('%s.matcher.correct' % self.type, self.correct)
def correct(self, chain, release, media, quality):
raise NotImplementedError()
def flattenInfo(self, info):
# Flatten dictionary of matches (chain info)
if isinstance(info, dict):
return dict([(key, self.flattenInfo(value)) for key, value in info.items()])
# Flatten matches
result = None
for match in info:
if isinstance(match, dict):
if result is None:
result = {}
for key, value in match.items():
if key not in result:
result[key] = []
result[key].append(value)
else:
if result is None:
result = []
result.append(match)
return result
def constructFromRaw(self, match):
if not match:
return None
parts = [
''.join([
y for y in x[1:] if y
]) for x in match
]
return ''.join(parts)[:-1].strip()
def simplifyValue(self, value):
if not value:
return value
if isinstance(value, basestring):
return simplifyString(value)
if isinstance(value, list):
return [self.simplifyValue(x) for x in value]
raise ValueError("Unsupported value type")
def chainMatch(self, chain, group, tags):
info = self.flattenInfo(chain.info[group])
found_tags = []
for tag, accepted in tags.items():
values = [self.simplifyValue(x) for x in info.get(tag, [None])]
if any([val in accepted for val in values]):
found_tags.append(tag)
log.debug('tags found: %s, required: %s' % (found_tags, tags.keys()))
if set(tags.keys()) == set(found_tags):
return True
return all([key in found_tags for key, value in tags.items()])

View File

@@ -1,88 +0,0 @@
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.variable import possibleTitles
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.matcher.base import MatcherBase
from caper import Caper
log = CPLog(__name__)
class Matcher(MatcherBase):
def __init__(self):
super(Matcher, self).__init__()
self.caper = Caper()
addEvent('matcher.parse', self.parse)
addEvent('matcher.match', self.match)
addEvent('matcher.flatten_info', self.flattenInfo)
addEvent('matcher.construct_from_raw', self.constructFromRaw)
addEvent('matcher.correct_title', self.correctTitle)
addEvent('matcher.correct_quality', self.correctQuality)
def parse(self, name, parser='scene'):
return self.caper.parse(name, parser)
def match(self, release, media, quality):
match = fireEvent('matcher.parse', release['name'], single = True)
if len(match.chains) < 1:
log.info2('Wrong: %s, unable to parse release name (no chains)', release['name'])
return False
for chain in match.chains:
if fireEvent('%s.matcher.correct' % media['type'], chain, release, media, quality, single = True):
return chain
return False
def correctTitle(self, chain, media):
root_library = media['library']['root_library']
if 'show_name' not in chain.info or not len(chain.info['show_name']):
log.info('Wrong: missing show name in parsed result')
return False
# Get the lower-case parsed show name from the chain
chain_words = [x.lower() for x in chain.info['show_name']]
# Build a list of possible titles of the media we are searching for
titles = root_library['info']['titles']
# Add year suffix titles (will result in ['<name_one>', '<name_one> <suffix_one>', '<name_two>', ...])
suffixes = [None, root_library['info']['year']]
titles = [
title + ((' %s' % suffix) if suffix else '')
for title in titles
for suffix in suffixes
]
# Check show titles match
# TODO check xem names
for title in titles:
for valid_words in [x.split(' ') for x in possibleTitles(title)]:
if valid_words == chain_words:
return True
return False
def correctQuality(self, chain, quality, quality_map):
if quality['identifier'] not in quality_map:
log.info2('Wrong: unknown preferred quality %s', quality['identifier'])
return False
if 'video' not in chain.info:
log.info2('Wrong: no video tags found')
return False
video_tags = quality_map[quality['identifier']]
if not self.chainMatch(chain, 'video', video_tags):
log.info2('Wrong: %s tags not in chain', video_tags)
return False
return True

View File

@@ -1,6 +0,0 @@
from .main import MediaPlugin
def start():
return MediaPlugin()
config = []

View File

@@ -1,458 +0,0 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import mergeDicts, splitString, getImdb
from couchpotato.core.logger import CPLog
from couchpotato.core.media import MediaBase
from couchpotato.core.settings.model import Library, LibraryTitle, Release, \
Media
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import or_, asc, not_, desc
from string import ascii_lowercase
log = CPLog(__name__)
class MediaPlugin(MediaBase):
def __init__(self):
addApiView('media.refresh', self.refresh, docs = {
'desc': 'Refresh a any media type by ID',
'params': {
'id': {'desc': 'Movie, Show, Season or Episode ID(s) you want to refresh.', 'type': 'int (comma separated)'},
}
})
addApiView('media.list', self.listView, docs = {
'desc': 'List media',
'params': {
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
'status': {'type': 'array or csv', 'desc': 'Filter movie by status. Example:"active,done"'},
'release_status': {'type': 'array or csv', 'desc': 'Filter movie by status of its releases. Example:"snatched,available"'},
'limit_offset': {'desc': 'Limit and offset the movie list. Examples: "50" or "50,30"'},
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all movies starting with the letter "a"'},
'search': {'desc': 'Search movie title'},
},
'return': {'type': 'object', 'example': """{
'success': True,
'empty': bool, any movies returned or not,
'media': array, media found,
}"""}
})
addApiView('media.get', self.getView, docs = {
'desc': 'Get media by id',
'params': {
'id': {'desc': 'The id of the media'},
}
})
addApiView('media.delete', self.deleteView, docs = {
'desc': 'Delete a media from the wanted list',
'params': {
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
'delete_from': {'desc': 'Delete media from this page', 'type': 'string: all (default), wanted, manage'},
}
})
addApiView('media.available_chars', self.charView)
addEvent('app.load', self.addSingleRefreshView)
addEvent('app.load', self.addSingleListView)
addEvent('app.load', self.addSingleCharView)
addEvent('app.load', self.addSingleDeleteView)
addEvent('media.get', self.get)
addEvent('media.list', self.list)
addEvent('media.delete', self.delete)
addEvent('media.restatus', self.restatus)
def refresh(self, id = '', **kwargs):
db = get_session()
for x in splitString(id):
media = db.query(Media).filter_by(id = x).first()
if media:
# Get current selected title
default_title = ''
for title in media.library.titles:
if title.default: default_title = title.title
fireEvent('notify.frontend', type = '%s.busy' % media.type, data = {'id': x})
fireEventAsync('library.update.%s' % media.type, identifier = media.library.identifier, default_title = default_title, force = True, on_complete = self.createOnComplete(x))
db.expire_all()
return {
'success': True,
}
def addSingleRefreshView(self):
for media_type in fireEvent('media.types', merge = True):
addApiView('%s.refresh' % media_type, self.refresh)
def get(self, media_id):
db = get_session()
imdb_id = getImdb(str(media_id))
if imdb_id:
m = db.query(Media).filter(Media.library.has(identifier = imdb_id)).first()
else:
m = db.query(Media).filter_by(id = media_id).first()
results = None
if m:
results = m.to_dict(self.default_dict)
db.expire_all()
return results
def getView(self, id = None, **kwargs):
media = self.get(id) if id else None
return {
'success': media is not None,
'media': media,
}
def list(self, types = None, status = None, release_status = None, limit_offset = None, starts_with = None, search = None, order = None):
db = get_session()
# Make a list from string
if status and not isinstance(status, (list, tuple)):
status = [status]
if release_status and not isinstance(release_status, (list, tuple)):
release_status = [release_status]
if types and not isinstance(types, (list, tuple)):
types = [types]
# query movie ids
q = db.query(Media) \
.with_entities(Media.id) \
.group_by(Media.id)
# Filter on movie status
if status and len(status) > 0:
statuses = fireEvent('status.get', status, single = len(status) > 1)
statuses = [s.get('id') for s in statuses]
q = q.filter(Media.status_id.in_(statuses))
# Filter on release status
if release_status and len(release_status) > 0:
q = q.join(Media.releases)
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
statuses = [s.get('id') for s in statuses]
q = q.filter(Release.status_id.in_(statuses))
# Filter on type
if types and len(types) > 0:
try: q = q.filter(Media.type.in_(types))
except: pass
# Only join when searching / ordering
if starts_with or search or order != 'release_order':
q = q.join(Media.library, Library.titles) \
.filter(LibraryTitle.default == True)
# Add search filters
filter_or = []
if starts_with:
starts_with = toUnicode(starts_with.lower())
if starts_with in ascii_lowercase:
filter_or.append(LibraryTitle.simple_title.startswith(starts_with))
else:
ignore = []
for letter in ascii_lowercase:
ignore.append(LibraryTitle.simple_title.startswith(toUnicode(letter)))
filter_or.append(not_(or_(*ignore)))
if search:
filter_or.append(LibraryTitle.simple_title.like('%%' + search + '%%'))
if len(filter_or) > 0:
q = q.filter(or_(*filter_or))
total_count = q.count()
if total_count == 0:
return 0, []
if order == 'release_order':
q = q.order_by(desc(Release.last_edit))
else:
q = q.order_by(asc(LibraryTitle.simple_title))
if limit_offset:
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
limit = splt[0]
offset = 0 if len(splt) is 1 else splt[1]
q = q.limit(limit).offset(offset)
# Get all media_ids in sorted order
media_ids = [m.id for m in q.all()]
# List release statuses
releases = db.query(Release) \
.filter(Release.media_id.in_(media_ids)) \
.all()
release_statuses = dict((m, set()) for m in media_ids)
releases_count = dict((m, 0) for m in media_ids)
for release in releases:
release_statuses[release.media_id].add('%d,%d' % (release.status_id, release.quality_id))
releases_count[release.media_id] += 1
# Get main movie data
q2 = db.query(Media) \
.options(joinedload_all('library.titles')) \
.options(joinedload_all('library.files')) \
.options(joinedload_all('status')) \
.options(joinedload_all('files'))
q2 = q2.filter(Media.id.in_(media_ids))
results = q2.all()
# Create dict by movie id
movie_dict = {}
for movie in results:
movie_dict[movie.id] = movie
# List movies based on media_ids order
movies = []
for media_id in media_ids:
releases = []
for r in release_statuses.get(media_id):
x = splitString(r)
releases.append({'status_id': x[0], 'quality_id': x[1]})
# Merge releases with movie dict
movies.append(mergeDicts(movie_dict[media_id].to_dict({
'library': {'titles': {}, 'files':{}},
'files': {},
}), {
'releases': releases,
'releases_count': releases_count.get(media_id),
}))
db.expire_all()
return total_count, movies
def listView(self, **kwargs):
types = splitString(kwargs.get('types'))
status = splitString(kwargs.get('status'))
release_status = splitString(kwargs.get('release_status'))
limit_offset = kwargs.get('limit_offset')
starts_with = kwargs.get('starts_with')
search = kwargs.get('search')
order = kwargs.get('order')
total_movies, movies = self.list(
types = types,
status = status,
release_status = release_status,
limit_offset = limit_offset,
starts_with = starts_with,
search = search,
order = order
)
return {
'success': True,
'empty': len(movies) == 0,
'total': total_movies,
'movies': movies,
}
def addSingleListView(self):
for media_type in fireEvent('media.types', merge = True):
def tempList(*args, **kwargs):
return self.listView(types = media_type, *args, **kwargs)
addApiView('%s.list' % media_type, tempList)
def availableChars(self, types = None, status = None, release_status = None):
types = types or []
status = status or []
release_status = release_status or []
db = get_session()
# Make a list from string
if not isinstance(status, (list, tuple)):
status = [status]
if release_status and not isinstance(release_status, (list, tuple)):
release_status = [release_status]
if types and not isinstance(types, (list, tuple)):
types = [types]
q = db.query(Media)
# Filter on movie status
if status and len(status) > 0:
statuses = fireEvent('status.get', status, single = len(release_status) > 1)
statuses = [s.get('id') for s in statuses]
q = q.filter(Media.status_id.in_(statuses))
# Filter on release status
if release_status and len(release_status) > 0:
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
statuses = [s.get('id') for s in statuses]
q = q.join(Media.releases) \
.filter(Release.status_id.in_(statuses))
# Filter on type
if types and len(types) > 0:
try: q = q.filter(Media.type.in_(types))
except: pass
q = q.join(Library, LibraryTitle) \
.with_entities(LibraryTitle.simple_title) \
.filter(LibraryTitle.default == True)
titles = q.all()
chars = set()
for title in titles:
try:
char = title[0][0]
char = char if char in ascii_lowercase else '#'
chars.add(str(char))
except:
log.error('Failed getting title for %s', title.libraries_id)
if len(chars) == 25:
break
db.expire_all()
return ''.join(sorted(chars))
def charView(self, **kwargs):
type = splitString(kwargs.get('type', 'movie'))
status = splitString(kwargs.get('status', None))
release_status = splitString(kwargs.get('release_status', None))
chars = self.availableChars(type, status, release_status)
return {
'success': True,
'empty': len(chars) == 0,
'chars': chars,
}
def addSingleCharView(self):
for media_type in fireEvent('media.types', merge = True):
def tempChar(*args, **kwargs):
return self.charView(types = media_type, *args, **kwargs)
addApiView('%s.available_chars' % media_type, tempChar)
def delete(self, media_id, delete_from = None):
db = get_session()
media = db.query(Media).filter_by(id = media_id).first()
if media:
deleted = False
if delete_from == 'all':
db.delete(media)
db.commit()
deleted = True
else:
done_status = fireEvent('status.get', 'done', single = True)
total_releases = len(media.releases)
total_deleted = 0
new_movie_status = None
for release in media.releases:
if delete_from in ['wanted', 'snatched', 'late']:
if release.status_id != done_status.get('id'):
db.delete(release)
total_deleted += 1
new_movie_status = 'done'
elif delete_from == 'manage':
if release.status_id == done_status.get('id'):
db.delete(release)
total_deleted += 1
new_movie_status = 'active'
db.commit()
if total_releases == total_deleted:
db.delete(media)
db.commit()
deleted = True
elif new_movie_status:
new_status = fireEvent('status.get', new_movie_status, single = True)
media.profile_id = None
media.status_id = new_status.get('id')
db.commit()
else:
fireEvent('media.restatus', media.id, single = True)
if deleted:
fireEvent('notify.frontend', type = 'movie.deleted', data = media.to_dict())
db.expire_all()
return True
def deleteView(self, id = '', **kwargs):
ids = splitString(id)
for media_id in ids:
self.delete(media_id, delete_from = kwargs.get('delete_from', 'all'))
return {
'success': True,
}
def addSingleDeleteView(self):
for media_type in fireEvent('media.types', merge = True):
def tempDelete(*args, **kwargs):
return self.deleteView(types = media_type, *args, **kwargs)
addApiView('%s.delete' % media_type, tempDelete)
def restatus(self, media_id):
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
db = get_session()
m = db.query(Media).filter_by(id = media_id).first()
if not m or len(m.library.titles) == 0:
log.debug('Can\'t restatus movie, doesn\'t seem to exist.')
return False
log.debug('Changing status for %s', m.library.titles[0].title)
if not m.profile:
m.status_id = done_status.get('id')
else:
move_to_wanted = True
for t in m.profile.types:
for release in m.releases:
if t.quality.identifier is release.quality.identifier and (release.status_id is done_status.get('id') and t.finish):
move_to_wanted = False
m.status_id = active_status.get('id') if move_to_wanted else done_status.get('id')
db.commit()
return True

View File

@@ -1,6 +0,0 @@
from .main import Search
def start():
return Search()
config = []

View File

@@ -1,59 +0,0 @@
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.variable import mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
class Search(Plugin):
def __init__(self):
addApiView('search', self.search, docs = {
'desc': 'Search the info in providers for a movie',
'params': {
'q': {'desc': 'The (partial) movie name you want to search for'},
'type': {'desc': 'Search for a specific media type. Leave empty to search all.'},
},
'return': {'type': 'object', 'example': """{
'success': True,
'movies': array,
'show': array,
etc
}"""}
})
addEvent('app.load', self.addSingleSearches)
def search(self, q = '', types = None, **kwargs):
# Make sure types is the correct instance
if isinstance(types, (str, unicode)):
types = [types]
elif isinstance(types, (list, tuple, set)):
types = list(types)
if not types:
result = fireEvent('info.search', q = q, merge = True)
else:
result = {}
for media_type in types:
result[media_type] = fireEvent('%s.search' % media_type)
return mergeDicts({
'success': True,
}, result)
def createSingleSearch(self, media_type):
def singleSearch(q, **kwargs):
return self.search(q, type = media_type, **kwargs)
return singleSearch
def addSingleSearches(self):
for media_type in fireEvent('media.types', merge = True):
addApiView('%s.search' % media_type, self.createSingleSearch(media_type))

View File

@@ -1,280 +0,0 @@
.search_form {
display: inline-block;
vertical-align: middle;
position: absolute;
right: 105px;
top: 0;
text-align: right;
height: 100%;
border-bottom: 4px solid transparent;
transition: all .4s cubic-bezier(0.9,0,0.1,1);
position: absolute;
z-index: 20;
border: 1px solid transparent;
border-width: 0 0 4px;
}
.search_form:hover {
border-color: #047792;
}
@media all and (max-width: 480px) {
.search_form {
right: 44px;
}
}
.search_form.focused,
.search_form.shown {
border-color: #04bce6;
}
.search_form .input {
height: 100%;
overflow: hidden;
width: 45px;
transition: all .4s cubic-bezier(0.9,0,0.1,1);
}
.search_form.focused .input,
.search_form.shown .input {
width: 380px;
background: #4e5969;
}
.search_form .input input {
border-radius: 0;
display: block;
border: 0;
background: none;
color: #FFF;
font-size: 25px;
height: 100%;
padding: 10px;
width: 100%;
opacity: 0;
padding: 0 40px 0 10px;
transition: all .4s ease-in-out .2s;
}
.search_form.focused .input input,
.search_form.shown .input input {
opacity: 1;
}
.search_form input::-ms-clear {
width : 0;
height: 0;
}
@media all and (max-width: 480px) {
.search_form .input input {
font-size: 15px;
}
.search_form.focused .input,
.search_form.shown .input {
width: 277px;
}
}
.search_form .input a {
position: absolute;
top: 0;
right: 0;
width: 44px;
height: 100%;
cursor: pointer;
vertical-align: middle;
text-align: center;
line-height: 66px;
font-size: 15px;
color: #FFF;
}
.search_form .input a:after {
content: "\e03e";
}
.search_form.shown.filled .input a:after {
content: "\e04e";
}
@media all and (max-width: 480px) {
.search_form .input a {
line-height: 44px;
}
}
.search_form .results_container {
text-align: left;
position: absolute;
background: #5c697b;
margin: 4px 0 0;
width: 470px;
min-height: 50px;
box-shadow: 0 20px 20px -10px rgba(0,0,0,0.55);
display: none;
}
@media all and (max-width: 480px) {
.search_form .results_container {
width: 320px;
}
}
.search_form.focused.filled .results_container,
.search_form.shown.filled .results_container {
display: block;
}
.search_form .results {
max-height: 570px;
overflow-x: hidden;
}
.media_result {
overflow: hidden;
height: 50px;
position: relative;
}
.media_result .options {
position: absolute;
height: 100%;
top: 0;
left: 30px;
right: 0;
padding: 13px;
border: 1px solid transparent;
border-width: 1px 0;
border-radius: 0;
box-shadow: inset 0 1px 8px rgba(0,0,0,0.25);
}
.media_result .options > .in_library_wanted {
margin-top: -7px;
}
.media_result .options > div {
border: 0;
}
.media_result .options .thumbnail {
vertical-align: middle;
}
.media_result .options select {
vertical-align: middle;
display: inline-block;
margin-right: 10px;
}
.media_result .options select[name=title] { width: 170px; }
.media_result .options select[name=profile] { width: 90px; }
.media_result .options select[name=category] { width: 80px; }
@media all and (max-width: 480px) {
.media_result .options select[name=title] { width: 90px; }
.media_result .options select[name=profile] { width: 50px; }
.media_result .options select[name=category] { width: 50px; }
}
.media_result .options .button {
vertical-align: middle;
display: inline-block;
}
.media_result .options .message {
height: 100%;
font-size: 20px;
color: #fff;
line-height: 20px;
}
.media_result .data {
position: absolute;
height: 100%;
top: 0;
left: 30px;
right: 0;
background: #5c697b;
cursor: pointer;
border-top: 1px solid rgba(255,255,255, 0.08);
transition: all .4s cubic-bezier(0.9,0,0.1,1);
}
.media_result .data.open {
left: 100% !important;
}
.media_result:last-child .data { border-bottom: 0; }
.media_result .in_wanted, .media_result .in_library {
position: absolute;
bottom: 2px;
left: 14px;
font-size: 11px;
}
.media_result .thumbnail {
width: 34px;
min-height: 100%;
display: block;
margin: 0;
vertical-align: top;
}
.media_result .info {
position: absolute;
top: 20%;
left: 15px;
right: 7px;
vertical-align: middle;
}
.media_result .info h2 {
margin: 0;
font-weight: normal;
font-size: 20px;
padding: 0;
}
.search_form .info h2 {
position: absolute;
width: 100%;
}
.media_result .info h2 .title {
display: block;
margin: 0;
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
}
.search_form .info h2 .title {
position: absolute;
width: 88%;
}
.media_result .info h2 .year {
padding: 0 5px;
text-align: center;
position: absolute;
width: 12%;
right: 0;
}
@media all and (max-width: 480px) {
.search_form .info h2 .year {
font-size: 12px;
margin-top: 7px;
}
}
.search_form .mask,
.media_result .mask {
position: absolute;
height: 100%;
width: 100%;
left: 0;
top: 0;
}

View File

@@ -1,188 +0,0 @@
Block.Search = new Class({
Extends: BlockBase,
cache: {},
create: function(){
var self = this;
var focus_timer = 0;
self.el = new Element('div.search_form').adopt(
new Element('div.input').adopt(
self.input = new Element('input', {
'placeholder': 'Search & add a new media',
'events': {
'keyup': self.keyup.bind(self),
'focus': function(){
if(focus_timer) clearTimeout(focus_timer);
self.el.addClass('focused')
if(this.get('value'))
self.hideResults(false)
},
'blur': function(){
focus_timer = (function(){
self.el.removeClass('focused')
}).delay(100);
}
}
}),
new Element('a.icon2', {
'events': {
'click': self.clear.bind(self),
'touchend': self.clear.bind(self)
}
})
),
self.result_container = new Element('div.results_container', {
'tween': {
'duration': 200
},
'events': {
'mousewheel': function(e){
(e).stopPropagation();
}
}
}).adopt(
self.results = new Element('div.results')
)
);
self.mask = new Element('div.mask').inject(self.result_container).fade('hide');
},
clear: function(e){
var self = this;
(e).preventDefault();
if(self.last_q === ''){
self.input.blur()
self.last_q = null;
}
else {
self.last_q = '';
self.input.set('value', '');
self.input.focus()
self.media = {}
self.results.empty()
self.el.removeClass('filled')
}
},
hideResults: function(bool){
var self = this;
if(self.hidden == bool) return;
self.el[bool ? 'removeClass' : 'addClass']('shown');
if(bool){
History.removeEvent('change', self.hideResults.bind(self, !bool));
self.el.removeEvent('outerClick', self.hideResults.bind(self, !bool));
}
else {
History.addEvent('change', self.hideResults.bind(self, !bool));
self.el.addEvent('outerClick', self.hideResults.bind(self, !bool));
}
self.hidden = bool;
},
keyup: function(e){
var self = this;
self.el[self.q() ? 'addClass' : 'removeClass']('filled')
if(self.q() != self.last_q){
if(self.api_request && self.api_request.isRunning())
self.api_request.cancel();
if(self.autocomplete_timer) clearTimeout(self.autocomplete_timer)
self.autocomplete_timer = self.autocomplete.delay(300, self)
}
},
autocomplete: function(){
var self = this;
if(!self.q()){
self.hideResults(true)
return
}
self.list()
},
list: function(){
var self = this,
q = self.q(),
cache = self.cache[q];
self.hideResults(false);
if(!cache){
self.mask.fade('in');
if(!self.spinner)
self.spinner = createSpinner(self.mask);
self.api_request = Api.request('search', {
'data': {
'q': q
},
'onComplete': self.fill.bind(self, q)
})
}
else
self.fill(q, cache)
self.last_q = q;
},
fill: function(q, json){
var self = this;
self.cache[q] = json
self.media = {}
self.results.empty()
Object.each(json, function(media, type){
if(typeOf(media) == 'array'){
Object.each(media, function(m){
var m = new Block.Search[m.type.capitalize() + 'Item'](m);
$(m).inject(self.results)
self.media[m.imdb || 'r-'+Math.floor(Math.random()*10000)] = m
if(q == m.imdb)
m.showOptions()
});
}
})
// Calculate result heights
var w = window.getSize(),
rc = self.result_container.getCoordinates();
self.results.setStyle('max-height', (w.y - rc.top - 50) + 'px')
self.mask.fade('out')
},
loading: function(bool){
this.el[bool ? 'addClass' : 'removeClass']('loading')
},
q: function(){
return this.input.get('value').trim();
}
});

View File

@@ -1,75 +0,0 @@
from .main import Searcher
def start():
return Searcher()
config = [{
'name': 'searcher',
'order': 20,
'groups': [
{
'tab': 'searcher',
'name': 'searcher',
'label': 'Basics',
'description': 'General search options',
'options': [
{
'name': 'preferred_method',
'label': 'First search',
'description': 'Which of the methods do you prefer',
'default': 'both',
'type': 'dropdown',
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrents', 'torrent')],
},
],
}, {
'tab': 'searcher',
'subtab': 'category',
'subtab_label': 'Categories',
'name': 'filter',
'label': 'Global filters',
'description': 'Prefer, ignore & required words in release names',
'options': [
{
'name': 'preferred_words',
'label': 'Preferred',
'default': '',
'placeholder': 'Example: CtrlHD, Amiable, Wiki',
'description': 'Words that give the releases a higher score.'
},
{
'name': 'required_words',
'label': 'Required',
'default': '',
'placeholder': 'Example: DTS, AC3 & English',
'description': 'Release should contain at least one set of words. Sets are separated by "," and each word within a set must be separated with "&"'
},
{
'name': 'ignored_words',
'label': 'Ignored',
'default': 'german, dutch, french, truefrench, danish, swedish, spanish, italian, korean, dubbed, swesub, korsub, dksubs, vain',
'description': 'Ignores releases that match any of these sets. (Works like explained above)'
},
],
},
],
}, {
'name': 'nzb',
'groups': [
{
'tab': 'searcher',
'name': 'searcher',
'label': 'NZB',
'wizard': True,
'options': [
{
'name': 'retention',
'label': 'Usenet Retention',
'default': 1500,
'type': 'int',
'unit': 'days'
},
],
},
],
}]

View File

@@ -1,45 +0,0 @@
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
class SearcherBase(Plugin):
in_progress = False
def __init__(self):
super(SearcherBase, self).__init__()
addEvent('searcher.progress', self.getProgress)
addEvent('%s.searcher.progress' % self.getType(), self.getProgress)
self.initCron()
def initCron(self):
""" Set the searcher cronjob
Make sure to reset cronjob after setting has changed
"""
_type = self.getType()
def setCrons():
fireEvent('schedule.cron', '%s.searcher.all' % _type, self.searchAll,
day = self.conf('cron_day'), hour = self.conf('cron_hour'), minute = self.conf('cron_minute'))
addEvent('app.load', setCrons)
addEvent('setting.save.%s_searcher.cron_day.after' % _type, setCrons)
addEvent('setting.save.%s_searcher.cron_hour.after' % _type, setCrons)
addEvent('setting.save.%s_searcher.cron_minute.after' % _type, setCrons)
def getProgress(self, **kwargs):
""" Return progress of current searcher"""
progress = {
self.getType(): self.in_progress
}
return progress

View File

@@ -1,218 +0,0 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import simplifyString, toUnicode
from couchpotato.core.helpers.variable import md5, getTitle, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.searcher.base import SearcherBase
from couchpotato.core.settings.model import Media, Release, ReleaseInfo
from couchpotato.environment import Env
from inspect import ismethod, isfunction
import datetime
import re
import time
import traceback
log = CPLog(__name__)
class Searcher(SearcherBase):
def __init__(self):
addEvent('searcher.protocols', self.getSearchProtocols)
addEvent('searcher.contains_other_quality', self.containsOtherQuality)
addEvent('searcher.correct_year', self.correctYear)
addEvent('searcher.correct_name', self.correctName)
addEvent('searcher.correct_words', self.correctWords)
addEvent('searcher.search', self.search)
addApiView('searcher.full_search', self.searchAllView, docs = {
'desc': 'Starts a full search for all media',
})
addApiView('searcher.progress', self.getProgressForAll, docs = {
'desc': 'Get the progress of all media searches',
'return': {'type': 'object', 'example': """{
'movie': False || object, total & to_go,
'show': False || object, total & to_go,
}"""},
})
def searchAllView(self):
results = {}
for _type in fireEvent('media.types'):
results[_type] = fireEvent('%s.searcher.all_view' % _type)
return results
def getProgressForAll(self):
progress = fireEvent('searcher.progress', merge = True)
return progress
def search(self, protocols, media, quality):
results = []
for search_protocol in protocols:
protocol_results = fireEvent('provider.search.%s.%s' % (search_protocol, media['type']), media, quality, merge = True)
if protocol_results:
results += protocol_results
sorted_results = sorted(results, key = lambda k: k['score'], reverse = True)
download_preference = self.conf('preferred_method', section = 'searcher')
if download_preference != 'both':
sorted_results = sorted(sorted_results, key = lambda k: k['protocol'][:3], reverse = (download_preference == 'torrent'))
return sorted_results
def getSearchProtocols(self):
download_protocols = fireEvent('download.enabled_protocols', merge = True)
provider_protocols = fireEvent('provider.enabled_protocols', merge = True)
if download_protocols and len(list(set(provider_protocols) & set(download_protocols))) == 0:
log.error('There aren\'t any providers enabled for your downloader (%s). Check your settings.', ','.join(download_protocols))
return []
for useless_provider in list(set(provider_protocols) - set(download_protocols)):
log.debug('Provider for "%s" enabled, but no downloader.', useless_provider)
search_protocols = download_protocols
if len(search_protocols) == 0:
log.error('There aren\'t any downloaders enabled. Please pick one in settings.')
return []
return search_protocols
def containsOtherQuality(self, nzb, movie_year = None, preferred_quality = None):
if not preferred_quality: preferred_quality = {}
name = nzb['name']
size = nzb.get('size', 0)
nzb_words = re.split('\W+', simplifyString(name))
qualities = fireEvent('quality.all', single = True)
found = {}
for quality in qualities:
# Main in words
if quality['identifier'] in nzb_words:
found[quality['identifier']] = True
# Alt in words
if list(set(nzb_words) & set(quality['alternative'])):
found[quality['identifier']] = True
# Try guessing via quality tags
guess = fireEvent('quality.guess', [nzb.get('name')], single = True)
if guess:
found[guess['identifier']] = True
# Hack for older movies that don't contain quality tag
year_name = fireEvent('scanner.name_year', name, single = True)
if len(found) == 0 and movie_year < datetime.datetime.now().year - 3 and not year_name.get('year', None):
if size > 3000: # Assume dvdr
log.info('Quality was missing in name, assuming it\'s a DVD-R based on the size: %s', size)
found['dvdr'] = True
else: # Assume dvdrip
log.info('Quality was missing in name, assuming it\'s a DVD-Rip based on the size: %s', size)
found['dvdrip'] = True
# Allow other qualities
for allowed in preferred_quality.get('allow'):
if found.get(allowed):
del found[allowed]
return not (found.get(preferred_quality['identifier']) and len(found) == 1)
def correctYear(self, haystack, year, year_range):
if not isinstance(haystack, (list, tuple, set)):
haystack = [haystack]
year_name = {}
for string in haystack:
year_name = fireEvent('scanner.name_year', string, single = True)
if year_name and ((year - year_range) <= year_name.get('year') <= (year + year_range)):
log.debug('Movie year matches range: %s looking for %s', (year_name.get('year'), year))
return True
log.debug('Movie year doesn\'t matche range: %s looking for %s', (year_name.get('year'), year))
return False
def correctName(self, check_name, movie_name):
check_names = [check_name]
# Match names between "
try: check_names.append(re.search(r'([\'"])[^\1]*\1', check_name).group(0))
except: pass
# Match longest name between []
try: check_names.append(max(re.findall(r'[^[]*\[([^]]*)\]', check_name), key = len).strip())
except: pass
for check_name in list(set(check_names)):
check_movie = fireEvent('scanner.name_year', check_name, single = True)
try:
check_words = filter(None, re.split('\W+', check_movie.get('name', '')))
movie_words = filter(None, re.split('\W+', simplifyString(movie_name)))
if len(check_words) > 0 and len(movie_words) > 0 and len(list(set(check_words) - set(movie_words))) == 0:
return True
except:
pass
return False
def correctWords(self, rel_name, media):
media_title = fireEvent('library.title', media['library'], single = True)
media_words = re.split('\W+', simplifyString(media_title))
rel_name = simplifyString(rel_name)
rel_words = re.split('\W+', rel_name)
# Make sure it has required words
required_words = splitString(self.conf('required_words', section = 'searcher').lower())
try: required_words = list(set(required_words + splitString(media['category']['required'].lower())))
except: pass
req_match = 0
for req_set in required_words:
req = splitString(req_set, '&')
req_match += len(list(set(rel_words) & set(req))) == len(req)
if len(required_words) > 0 and req_match == 0:
log.info2('Wrong: Required word missing: %s', rel_name)
return False
# Ignore releases
ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
try: ignored_words = list(set(ignored_words + splitString(media['category']['ignored'].lower())))
except: pass
ignored_match = 0
for ignored_set in ignored_words:
ignored = splitString(ignored_set, '&')
ignored_match += len(list(set(rel_words) & set(ignored))) == len(ignored)
if len(ignored_words) > 0 and ignored_match:
log.info2("Wrong: '%s' contains 'ignored words'", rel_name)
return False
# Ignore porn stuff
pron_tags = ['xxx', 'sex', 'anal', 'tits', 'fuck', 'porn', 'orgy', 'milf', 'boobs', 'erotica', 'erotic', 'cock', 'dick']
pron_words = list(set(rel_words) & set(pron_tags) - set(media_words))
if pron_words:
log.info('Wrong: %s, probably pr0n', rel_name)
return False
return True
class SearchSetupError(Exception):
pass

View File

@@ -1,6 +0,0 @@
from couchpotato.core.media import MediaBase
class MovieTypeBase(MediaBase):
_type = 'movie'

View File

@@ -1,6 +0,0 @@
from .main import MovieBase
def start():
return MovieBase()
config = []

View File

@@ -1,185 +0,0 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString, tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie import MovieTypeBase
from couchpotato.core.settings.model import Media
import time
log = CPLog(__name__)
class MovieBase(MovieTypeBase):
_type = 'movie'
def __init__(self):
# Initialize this type
super(MovieBase, self).__init__()
self.initType()
addApiView('movie.add', self.addView, docs = {
'desc': 'Add new movie to the wanted list',
'params': {
'identifier': {'desc': 'IMDB id of the movie your want to add.'},
'profile_id': {'desc': 'ID of quality profile you want the add the movie in. If empty will use the default profile.'},
'category_id': {'desc': 'ID of category you want the add the movie in. If empty will use no category.'},
'title': {'desc': 'Movie title to use for searches. Has to be one of the titles returned by movie.search.'},
}
})
addApiView('movie.edit', self.edit, docs = {
'desc': 'Add new movie to the wanted list',
'params': {
'id': {'desc': 'Movie ID(s) you want to edit.', 'type': 'int (comma separated)'},
'profile_id': {'desc': 'ID of quality profile you want the edit the movie to.'},
'category_id': {'desc': 'ID of category you want the add the movie in. If empty will use no category.'},
'default_title': {'desc': 'Movie title to use for searches. Has to be one of the titles returned by movie.search.'},
}
})
addEvent('movie.add', self.add)
def add(self, params = None, force_readd = True, search_after = True, update_library = False, status_id = None):
if not params: params = {}
if not params.get('identifier'):
msg = 'Can\'t add movie without imdb identifier.'
log.error(msg)
fireEvent('notify.frontend', type = 'movie.is_tvshow', message = msg)
return False
else:
try:
is_movie = fireEvent('movie.is_movie', identifier = params.get('identifier'), single = True)
if not is_movie:
msg = 'Can\'t add movie, seems to be a TV show.'
log.error(msg)
fireEvent('notify.frontend', type = 'movie.is_tvshow', message = msg)
return False
except:
pass
library = fireEvent('library.add.movie', single = True, attrs = params, update_after = update_library)
# Status
status_active, snatched_status, ignored_status, done_status, downloaded_status = \
fireEvent('status.get', ['active', 'snatched', 'ignored', 'done', 'downloaded'], single = True)
default_profile = fireEvent('profile.default', single = True)
cat_id = params.get('category_id')
db = get_session()
m = db.query(Media).filter_by(library_id = library.get('id')).first()
added = True
do_search = False
search_after = search_after and self.conf('search_on_add', section = 'moviesearcher')
if not m:
m = Media(
library_id = library.get('id'),
profile_id = params.get('profile_id', default_profile.get('id')),
status_id = status_id if status_id else status_active.get('id'),
category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else None,
)
db.add(m)
db.commit()
onComplete = None
if search_after:
onComplete = self.createOnComplete(m.id)
fireEventAsync('library.update.movie', params.get('identifier'), default_title = params.get('title', ''), on_complete = onComplete)
search_after = False
elif force_readd:
# Clean snatched history
for release in m.releases:
if release.status_id in [downloaded_status.get('id'), snatched_status.get('id'), done_status.get('id')]:
if params.get('ignore_previous', False):
release.status_id = ignored_status.get('id')
else:
fireEvent('release.delete', release.id, single = True)
m.profile_id = params.get('profile_id', default_profile.get('id'))
m.category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else (m.category_id or None)
else:
log.debug('Movie already exists, not updating: %s', params)
added = False
if force_readd:
m.status_id = status_id if status_id else status_active.get('id')
m.last_edit = int(time.time())
do_search = True
db.commit()
# Remove releases
available_status = fireEvent('status.get', 'available', single = True)
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
db.commit()
movie_dict = m.to_dict(self.default_dict)
if do_search and search_after:
onComplete = self.createOnComplete(m.id)
onComplete()
if added:
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = 'Successfully added "%s" to your wanted list.' % params.get('title', ''))
db.expire_all()
return movie_dict
def addView(self, **kwargs):
add_dict = self.add(params = kwargs)
return {
'success': True if add_dict else False,
'movie': add_dict,
}
def edit(self, id = '', **kwargs):
db = get_session()
available_status = fireEvent('status.get', 'available', single = True)
ids = splitString(id)
for media_id in ids:
m = db.query(Media).filter_by(id = media_id).first()
if not m:
continue
m.profile_id = kwargs.get('profile_id')
cat_id = kwargs.get('category_id')
if cat_id is not None:
m.category_id = tryInt(cat_id) if tryInt(cat_id) > 0 else None
# Remove releases
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
db.commit()
# Default title
if kwargs.get('default_title'):
for title in m.library.titles:
title.default = toUnicode(kwargs.get('default_title', '')).lower() == toUnicode(title.title).lower()
db.commit()
fireEvent('media.restatus', m.id)
movie_dict = m.to_dict(self.search_dict)
fireEventAsync('movie.searcher.single', movie_dict, on_complete = self.createNotifyFront(media_id))
db.expire_all()
return {
'success': True,
}

View File

@@ -1,927 +0,0 @@
var MovieAction = new Class({
Implements: [Options],
class_name: 'action icon2',
initialize: function(movie, options){
var self = this;
self.setOptions(options);
self.movie = movie;
self.create();
if(self.el)
self.el.addClass(self.class_name)
},
create: function(){},
disable: function(){
if(this.el)
this.el.addClass('disable')
},
enable: function(){
if(this.el)
this.el.removeClass('disable')
},
getTitle: function(){
var self = this;
try {
return self.movie.getTitle();
}
catch(e){
try {
return self.movie.original_title ? self.movie.original_title : self.movie.titles[0];
}
catch(e){
return 'Unknown';
}
}
},
get: function(key){
var self = this;
try {
return self.movie.get(key)
}
catch(e){
return self.movie[key]
}
},
createMask: function(){
var self = this;
self.mask = new Element('div.mask', {
'styles': {
'z-index': '1'
}
}).inject(self.movie, 'top').fade('hide');
//self.positionMask();
},
positionMask: function(){
var self = this,
movie = $(self.movie),
s = movie.getSize()
return;
return self.mask.setStyles({
'width': s.x,
'height': s.y
}).position({
'relativeTo': movie
})
},
toElement: function(){
return this.el || null
}
});
var MA = {};
MA.IMDB = new Class({
Extends: MovieAction,
id: null,
create: function(){
var self = this;
self.id = self.movie.get('imdb') || self.movie.get('identifier');
self.el = new Element('a.imdb', {
'title': 'Go to the IMDB page of ' + self.getTitle(),
'href': 'http://www.imdb.com/title/'+self.id+'/',
'target': '_blank'
});
if(!self.id) self.disable();
}
});
MA.Release = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.releases.download', {
'title': 'Show the releases that are available for ' + self.getTitle(),
'events': {
'click': self.show.bind(self)
}
});
if(self.movie.data.releases.length == 0)
self.el.hide()
else
self.showHelper();
App.on('movie.searcher.ended', function(notification){
if(self.movie.data.id != notification.data.id) return;
self.releases = null;
if(self.options_container){
self.options_container.destroy();
self.options_container = null;
}
});
},
show: function(e){
var self = this;
if(e)
(e).preventDefault();
if(self.releases)
self.createReleases();
else {
self.movie.busy(true);
Api.request('release.for_movie', {
'data': {
'id': self.movie.data.id
},
'onComplete': function(json){
self.movie.busy(false, 1);
if(json && json.releases){
self.releases = json.releases;
self.createReleases();
}
else
alert('Something went wrong, check the logs.');
}
});
}
},
createReleases: function(){
var self = this;
if(!self.options_container){
self.options_container = new Element('div.options').grab(
self.release_container = new Element('div.releases.table')
);
// Header
new Element('div.item.head').adopt(
new Element('span.name', {'text': 'Release name'}),
new Element('span.status', {'text': 'Status'}),
new Element('span.quality', {'text': 'Quality'}),
new Element('span.size', {'text': 'Size'}),
new Element('span.age', {'text': 'Age'}),
new Element('span.score', {'text': 'Score'}),
new Element('span.provider', {'text': 'Provider'})
).inject(self.release_container)
self.releases.each(function(release){
var status = Status.get(release.status_id),
quality = Quality.getProfile(release.quality_id) || {},
info = release.info,
provider = self.get(release, 'provider') + (release.info['provider_extra'] ? self.get(release, 'provider_extra') : '');
release.status = status;
var release_name = self.get(release, 'name');
if(release.files && release.files.length > 0){
try {
var movie_file = release.files.filter(function(file){
var type = File.Type.get(file.type_id);
return type && type.identifier == 'movie'
}).pick();
release_name = movie_file.path.split(Api.getOption('path_sep')).getLast();
}
catch(e){}
}
// Create release
var item = new Element('div', {
'class': 'item '+status.identifier,
'id': 'release_'+release.id
}).adopt(
new Element('span.name', {'text': release_name, 'title': release_name}),
new Element('span.status', {'text': status.identifier, 'class': 'release_status '+status.identifier}),
new Element('span.quality', {'text': quality.get('label') || 'n/a'}),
new Element('span.size', {'text': release.info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
new Element('span.age', {'text': self.get(release, 'age')}),
new Element('span.score', {'text': self.get(release, 'score')}),
new Element('span.provider', { 'text': provider, 'title': provider }),
release.info['detail_url'] ? new Element('a.info.icon2', {
'href': release.info['detail_url'],
'target': '_blank'
}) : new Element('a'),
new Element('a.download.icon2', {
'events': {
'click': function(e){
(e).preventDefault();
if(!this.hasClass('completed'))
self.download(release);
}
}
}),
new Element('a.delete.icon2', {
'events': {
'click': function(e){
(e).preventDefault();
self.ignore(release);
}
}
})
).inject(self.release_container);
release['el'] = item;
if(status.identifier == 'ignored' || status.identifier == 'failed' || status.identifier == 'snatched'){
if(!self.last_release || (self.last_release && self.last_release.status.identifier != 'snatched' && status.identifier == 'snatched'))
self.last_release = release;
}
else if(!self.next_release && status.identifier == 'available'){
self.next_release = release;
}
var update_handle = function(notification) {
if(notification.data.id != release.id) return;
var q = self.movie.quality.getElement('.q_id' + release.quality_id),
status = Status.get(release.status_id),
new_status = Status.get(notification.data.status_id);
release.status_id = new_status.id
release.el.set('class', 'item ' + new_status.identifier);
var status_el = release.el.getElement('.release_status');
status_el.set('class', 'release_status ' + new_status.identifier);
status_el.set('text', new_status.identifier);
if(!q && (new_status.identifier == 'snatched' || new_status.identifier == 'seeding' || new_status.identifier == 'done'))
var q = self.addQuality(release.quality_id);
if(new_status && q && !q.hasClass(new_status.identifier)) {
q.removeClass(status.identifier).addClass(new_status.identifier);
q.set('title', q.get('title').replace(status.label, new_status.label));
}
}
App.on('release.update_status', update_handle);
});
if(self.last_release)
self.release_container.getElements('#release_'+self.last_release.id).addClass('last_release');
if(self.next_release)
self.release_container.getElements('#release_'+self.next_release.id).addClass('next_release');
if(self.next_release || (self.last_release && ['ignored', 'failed'].indexOf(self.last_release.status.identifier) === false)){
self.trynext_container = new Element('div.buttons.try_container').inject(self.release_container, 'top');
var nr = self.next_release,
lr = self.last_release;
self.trynext_container.adopt(
new Element('span.or', {
'text': 'This movie is snatched, if anything went wrong, download'
}),
lr ? new Element('a.button.orange', {
'text': 'the same release again',
'events': {
'click': function(){
self.download(lr);
}
}
}) : null,
nr && lr ? new Element('span.or', {
'text': ','
}) : null,
nr ? [new Element('a.button.green', {
'text': lr ? 'another release' : 'the best release',
'events': {
'click': function(){
self.download(nr);
}
}
}),
new Element('span.or', {
'text': 'or pick one below'
})] : null
)
}
self.last_release = null;
self.next_release = null;
}
// Show it
self.options_container.inject(self.movie, 'top');
self.movie.slide('in', self.options_container);
},
showHelper: function(e){
var self = this;
if(e)
(e).preventDefault();
var has_available = false,
has_snatched = false;
self.movie.data.releases.each(function(release){
if(has_available && has_snatched) return;
var status = Status.get(release.status_id);
if(['snatched', 'downloaded', 'seeding'].contains(status.identifier))
has_snatched = true;
if(['available'].contains(status.identifier))
has_available = true;
});
if(has_available || has_snatched){
self.trynext_container = new Element('div.buttons.trynext').inject(self.movie.info_container);
self.trynext_container.adopt(
has_available ? [new Element('a.icon2.readd', {
'text': has_snatched ? 'Download another release' : 'Download the best release',
'events': {
'click': self.tryNextRelease.bind(self)
}
}),
new Element('a.icon2.download', {
'text': 'pick one yourself',
'events': {
'click': function(){
self.movie.quality.fireEvent('click');
}
}
})] : null,
new Element('a.icon2.completed', {
'text': 'mark this movie done',
'events': {
'click': self.markMovieDone.bind(self)
}
})
)
}
},
get: function(release, type){
return release.info[type] !== undefined ? release.info[type] : 'n/a'
},
download: function(release){
var self = this;
var release_el = self.release_container.getElement('#release_'+release.id),
icon = release_el.getElement('.download.icon2');
if(icon)
icon.addClass('icon spinner').removeClass('download');
Api.request('release.manual_download', {
'data': {
'id': release.id
},
'onComplete': function(json){
if(icon)
icon.removeClass('icon spinner');
if(json.success){
if(icon)
icon.addClass('completed');
release_el.getElement('.release_status').set('text', 'snatched');
}
else
if(icon)
icon.addClass('attention').set('title', 'Something went wrong when downloading, please check logs.');
}
});
},
ignore: function(release){
var self = this;
Api.request('release.ignore', {
'data': {
'id': release.id
},
})
},
markMovieDone: function(){
var self = this;
Api.request('media.delete', {
'data': {
'id': self.movie.get('id'),
'delete_from': 'wanted'
},
'onComplete': function(){
var movie = $(self.movie);
movie.set('tween', {
'duration': 300,
'onComplete': function(){
self.movie.destroy()
}
});
movie.tween('height', 0);
}
});
},
tryNextRelease: function(){
var self = this;
Api.request('movie.searcher.try_next', {
'data': {
'id': self.movie.get('id')
}
});
}
});
MA.Trailer = new Class({
Extends: MovieAction,
id: null,
create: function(){
var self = this;
self.el = new Element('a.trailer', {
'title': 'Watch the trailer of ' + self.getTitle(),
'events': {
'click': self.watch.bind(self)
}
});
},
watch: function(offset){
var self = this;
var data_url = 'https://gdata.youtube.com/feeds/videos?vq="{title}" {year} trailer&max-results=1&alt=json-in-script&orderby=relevance&sortorder=descending&format=5&fmt=18'
var url = data_url.substitute({
'title': encodeURI(self.getTitle()),
'year': self.get('year'),
'offset': offset || 1
}),
size = $(self.movie).getSize(),
height = self.options.height || (size.x/16)*9,
id = 'trailer-'+randomString();
self.player_container = new Element('div[id='+id+']');
self.container = new Element('div.hide.trailer_container')
.adopt(self.player_container)
.inject($(self.movie), 'top');
self.container.setStyle('height', 0);
self.container.removeClass('hide');
self.close_button = new Element('a.hide.hide_trailer', {
'text': 'Hide trailer',
'events': {
'click': self.stop.bind(self)
}
}).inject(self.movie);
self.container.setStyle('height', height);
$(self.movie).setStyle('height', height);
new Request.JSONP({
'url': url,
'onComplete': function(json){
var video_url = json.feed.entry[0].id.$t.split('/'),
video_id = video_url[video_url.length-1];
self.player = new YT.Player(id, {
'height': height,
'width': size.x,
'videoId': video_id,
'playerVars': {
'autoplay': 1,
'showsearch': 0,
'wmode': 'transparent',
'iv_load_policy': 3
}
});
self.close_button.removeClass('hide');
var quality_set = false;
var change_quality = function(state){
if(!quality_set && (state.data == 1 || state.data || 2)){
try {
self.player.setPlaybackQuality('hd720');
quality_set = true;
}
catch(e){
}
}
}
self.player.addEventListener('onStateChange', change_quality);
}
}).send()
},
stop: function(){
var self = this;
self.player.stopVideo();
self.container.addClass('hide');
self.close_button.addClass('hide');
$(self.movie).setStyle('height', null);
setTimeout(function(){
self.container.destroy()
self.close_button.destroy();
}, 1800)
}
});
MA.Edit = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.edit', {
'title': 'Change movie information, like title and quality.',
'events': {
'click': self.editMovie.bind(self)
}
});
},
editMovie: function(e){
var self = this;
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
new Element('div.form').adopt(
self.title_select = new Element('select', {
'name': 'title'
}),
self.profile_select = new Element('select', {
'name': 'profile'
}),
self.category_select = new Element('select', {
'name': 'category'
}).grab(
new Element('option', {'value': -1, 'text': 'None'})
),
new Element('a.button.edit', {
'text': 'Save & Search',
'events': {
'click': self.save.bind(self)
}
})
)
).inject(self.movie, 'top');
Array.each(self.movie.data.library.titles, function(alt){
new Element('option', {
'text': alt.title
}).inject(self.title_select);
if(alt['default'])
self.title_select.set('value', alt.title);
});
// Fill categories
var categories = CategoryList.getAll();
if(categories.length == 0)
self.category_select.hide();
else {
self.category_select.show();
categories.each(function(category){
var category_id = category.data.id;
new Element('option', {
'value': category_id,
'text': category.data.label
}).inject(self.category_select);
if(self.movie.category && self.movie.category.data && self.movie.category.data.id == category_id)
self.category_select.set('value', category_id);
});
}
// Fill profiles
var profiles = Quality.getActiveProfiles();
if(profiles.length == 1)
self.profile_select.hide();
profiles.each(function(profile){
var profile_id = profile.id ? profile.id : profile.data.id;
new Element('option', {
'value': profile_id,
'text': profile.label ? profile.label : profile.data.label
}).inject(self.profile_select);
if(self.movie.get('profile_id') == profile_id)
self.profile_select.set('value', profile_id);
});
}
self.movie.slide('in', self.options_container);
},
save: function(e){
(e).preventDefault();
var self = this;
Api.request('movie.edit', {
'data': {
'id': self.movie.get('id'),
'default_title': self.title_select.get('value'),
'profile_id': self.profile_select.get('value'),
'category_id': self.category_select.get('value')
},
'useSpinner': true,
'spinnerTarget': $(self.movie),
'onComplete': function(){
self.movie.quality.set('text', self.profile_select.getSelected()[0].get('text'));
self.movie.title.set('text', self.title_select.getSelected()[0].get('text'));
}
});
self.movie.slide('out');
}
})
MA.Refresh = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.refresh', {
'title': 'Refresh the movie info and do a forced search',
'events': {
'click': self.doRefresh.bind(self)
}
});
},
doRefresh: function(e){
var self = this;
(e).preventDefault();
Api.request('media.refresh', {
'data': {
'id': self.movie.get('id')
}
});
}
});
MA.Readd = new Class({
Extends: MovieAction,
create: function(){
var self = this;
var movie_done = Status.get(self.movie.data.status_id).identifier == 'done';
if(!movie_done)
var snatched = self.movie.data.releases.filter(function(release){
return release.status && (release.status.identifier == 'snatched' || release.status.identifier == 'downloaded' || release.status.identifier == 'done');
}).length;
if(movie_done || snatched && snatched > 0)
self.el = new Element('a.readd', {
'title': 'Readd the movie and mark all previous snatched/downloaded as ignored',
'events': {
'click': self.doReadd.bind(self)
}
});
},
doReadd: function(e){
var self = this;
(e).preventDefault();
Api.request('movie.add', {
'data': {
'identifier': self.movie.get('identifier'),
'ignore_previous': 1
}
});
}
});
MA.Delete = new Class({
Extends: MovieAction,
Implements: [Chain],
create: function(){
var self = this;
self.el = new Element('a.delete', {
'title': 'Remove the movie from this CP list',
'events': {
'click': self.showConfirm.bind(self)
}
});
},
showConfirm: function(e){
var self = this;
(e).preventDefault();
if(!self.delete_container){
self.delete_container = new Element('div.buttons.delete_container').adopt(
new Element('a.cancel', {
'text': 'Cancel',
'events': {
'click': self.hideConfirm.bind(self)
}
}),
new Element('span.or', {
'text': 'or'
}),
new Element('a.button.delete', {
'text': 'Delete ' + self.movie.title.get('text'),
'events': {
'click': self.del.bind(self)
}
})
).inject(self.movie, 'top');
}
self.movie.slide('in', self.delete_container);
},
hideConfirm: function(e){
var self = this;
(e).preventDefault();
self.movie.removeView();
self.movie.slide('out');
},
del: function(e){
(e).preventDefault();
var self = this;
var movie = $(self.movie);
self.chain(
function(){
self.callChain();
},
function(){
Api.request('media.delete', {
'data': {
'id': self.movie.get('id'),
'delete_from': self.movie.list.options.identifier
},
'onComplete': function(){
movie.set('tween', {
'duration': 300,
'onComplete': function(){
self.movie.destroy()
}
});
movie.tween('height', 0);
}
});
}
);
self.callChain();
}
});
MA.Files = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.directory', {
'title': 'Available files',
'events': {
'click': self.show.bind(self)
}
});
},
show: function(e){
var self = this;
(e).preventDefault();
if(self.releases)
self.showFiles();
else {
self.movie.busy(true);
Api.request('release.for_movie', {
'data': {
'id': self.movie.data.id
},
'onComplete': function(json){
self.movie.busy(false, 1);
if(json && json.releases){
self.releases = json.releases;
self.showFiles();
}
else
alert('Something went wrong, check the logs.');
}
});
}
},
showFiles: function(){
var self = this;
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
self.files_container = new Element('div.files.table')
).inject(self.movie, 'top');
// Header
new Element('div.item.head').adopt(
new Element('span.name', {'text': 'File'}),
new Element('span.type', {'text': 'Type'}),
new Element('span.is_available', {'text': 'Available'})
).inject(self.files_container)
Array.each(self.releases, function(release){
var rel = new Element('div.release').inject(self.files_container);
Array.each(release.files, function(file){
new Element('div.file.item').adopt(
new Element('span.name', {'text': file.path}),
new Element('span.type', {'text': File.Type.get(file.type_id).name}),
new Element('span.available', {'text': file.available})
).inject(rel)
});
});
}
self.movie.slide('in', self.options_container);
},
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,332 +0,0 @@
var Movie = new Class({
Extends: BlockBase,
action: {},
initialize: function(list, options, data){
var self = this;
self.data = data;
self.view = options.view || 'details';
self.list = list;
self.el = new Element('div.movie');
self.profile = Quality.getProfile(data.profile_id) || {};
self.category = CategoryList.getCategory(data.category_id) || {};
self.parent(self, options);
self.addEvents();
},
addEvents: function(){
var self = this;
self.global_events = {}
// Do refresh with new data
self.global_events['movie.update'] = function(notification){
if(self.data.id != notification.data.id) return;
self.busy(false);
self.removeView();
self.update.delay(2000, self, notification);
}
App.on('movie.update', self.global_events['movie.update']);
// Add spinner on load / search
['movie.busy', 'movie.searcher.started'].each(function(listener){
self.global_events[listener] = function(notification){
if(notification.data && self.data.id == notification.data.id)
self.busy(true)
}
App.on(listener, self.global_events[listener]);
})
// Remove spinner
self.global_events['movie.searcher.ended'] = function(notification){
if(notification.data && self.data.id == notification.data.id)
self.busy(false)
}
App.on('movie.searcher.ended', self.global_events['movie.searcher.ended']);
// Reload when releases have updated
self.global_events['release.update_status'] = function(notification){
var data = notification.data
if(data && self.data.id == data.media_id){
if(!self.data.releases)
self.data.releases = [];
self.data.releases.push({'quality_id': data.quality_id, 'status_id': data.status_id});
self.updateReleases();
}
}
App.on('release.update_status', self.global_events['release.update_status']);
},
destroy: function(){
var self = this;
self.el.destroy();
delete self.list.movies_added[self.get('id')];
self.list.movies.erase(self)
self.list.checkIfEmpty();
// Remove events
Object.each(self.global_events, function(handle, listener){
App.off(listener, handle);
});
},
busy: function(set_busy, timeout){
var self = this;
if(!set_busy){
setTimeout(function(){
if(self.spinner){
self.mask.fade('out');
setTimeout(function(){
if(self.mask)
self.mask.destroy();
if(self.spinner)
self.spinner.el.destroy();
self.spinner = null;
self.mask = null;
}, timeout || 400);
}
}, timeout || 1000)
}
else if(!self.spinner) {
self.createMask();
self.spinner = createSpinner(self.mask);
self.mask.fade('in');
}
},
createMask: function(){
var self = this;
self.mask = new Element('div.mask', {
'styles': {
'z-index': 4
}
}).inject(self.el, 'top').fade('hide');
},
positionMask: function(){
var self = this,
s = self.el.getSize()
return self.mask.setStyles({
'width': s.x,
'height': s.y
}).position({
'relativeTo': self.el
})
},
update: function(notification){
var self = this;
self.data = notification.data;
self.el.empty();
self.removeView();
self.profile = Quality.getProfile(self.data.profile_id) || {};
self.category = CategoryList.getCategory(self.data.category_id) || {};
self.create();
self.busy(false);
},
create: function(){
var self = this;
var s = Status.get(self.get('status_id'));
self.el.addClass('status_'+s.identifier);
self.el.adopt(
self.select_checkbox = new Element('input[type=checkbox].inlay', {
'events': {
'change': function(){
self.fireEvent('select')
}
}
}),
self.thumbnail = File.Select.single('poster', self.data.library.files),
self.data_container = new Element('div.data.inlay.light').adopt(
self.info_container = new Element('div.info').adopt(
new Element('div.title').adopt(
self.title = new Element('span', {
'text': self.getTitle() || 'n/a'
}),
self.year = new Element('div.year', {
'text': self.data.library.year || 'n/a'
})
),
self.description = new Element('div.description', {
'text': self.data.library.plot
}),
self.quality = new Element('div.quality', {
'events': {
'click': function(e){
var releases = self.el.getElement('.actions .releases');
if(releases.isVisible())
releases.fireEvent('click', [e])
}
}
})
),
self.actions = new Element('div.actions')
)
);
if(self.thumbnail.empty)
self.el.addClass('no_thumbnail');
//self.changeView(self.view);
self.select_checkbox_class = new Form.Check(self.select_checkbox);
// Add profile
if(self.profile.data)
self.profile.getTypes().each(function(type){
var q = self.addQuality(type.quality_id || type.get('quality_id'));
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
q.addClass('finish');
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
}
});
// Add releases
self.updateReleases();
Object.each(self.options.actions, function(action, key){
self.action[key.toLowerCase()] = action = new self.options.actions[key](self)
if(action.el)
self.actions.adopt(action)
});
},
updateReleases: function(){
var self = this;
if(!self.data.releases || self.data.releases.length == 0) return;
self.data.releases.each(function(release){
var q = self.quality.getElement('.q_id'+ release.quality_id),
status = Status.get(release.status_id);
if(!q && (status.identifier == 'snatched' || status.identifier == 'seeding' || status.identifier == 'done'))
var q = self.addQuality(release.quality_id)
if (status && q && !q.hasClass(status.identifier)){
q.addClass(status.identifier);
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status.label)
}
});
},
addQuality: function(quality_id){
var self = this;
var q = Quality.getQuality(quality_id);
return new Element('span', {
'text': q.label,
'class': 'q_'+q.identifier + ' q_id' + q.id,
'title': ''
}).inject(self.quality);
},
getTitle: function(){
var self = this;
var titles = self.data.library.titles;
var title = titles.filter(function(title){
return title['default']
}).pop()
if(title)
return self.getUnprefixedTitle(title.title)
else if(titles.length > 0)
return self.getUnprefixedTitle(titles[0].title)
return 'Unknown movie'
},
getUnprefixedTitle: function(t){
if(t.substr(0, 4).toLowerCase() == 'the ')
t = t.substr(4) + ', The';
return t;
},
slide: function(direction, el){
var self = this;
if(direction == 'in'){
self.temp_view = self.view;
self.changeView('details');
self.el.addEvent('outerClick', function(){
self.removeView();
self.slide('out')
})
el.show();
self.data_container.addClass('hide_right');
}
else {
self.el.removeEvents('outerClick')
setTimeout(function(){
if(self.el)
self.el.getElements('> :not(.data):not(.poster):not(.movie_container)').hide();
}, 600);
self.data_container.removeClass('hide_right');
}
},
changeView: function(new_view){
var self = this;
if(self.el)
self.el
.removeClass(self.view+'_view')
.addClass(new_view+'_view')
self.view = new_view;
},
removeView: function(){
var self = this;
self.el.removeClass(self.view+'_view')
},
get: function(attr){
return this.data[attr] || this.data.library[attr]
},
select: function(bool){
var self = this;
self.select_checkbox_class[bool ? 'check' : 'uncheck']()
},
isSelected: function(){
return this.select_checkbox.get('checked');
},
toElement: function(){
return this.el;
}
});

View File

@@ -1,229 +0,0 @@
Block.Search.MovieItem = new Class({
Implements: [Options, Events],
initialize: function(info, options){
var self = this;
self.setOptions(options);
self.info = info;
self.alternative_titles = [];
self.create();
},
create: function(){
var self = this,
info = self.info;
self.el = new Element('div.media_result', {
'id': info.imdb
}).adopt(
self.thumbnail = info.images && info.images.poster.length > 0 ? new Element('img.thumbnail', {
'src': info.images.poster[0],
'height': null,
'width': null
}) : null,
self.options_el = new Element('div.options.inlay'),
self.data_container = new Element('div.data', {
'events': {
'click': self.showOptions.bind(self)
}
}).adopt(
self.info_container = new Element('div.info').adopt(
new Element('h2').adopt(
self.title = new Element('span.title', {
'text': info.titles && info.titles.length > 0 ? info.titles[0] : 'Unknown'
}),
self.year = info.year ? new Element('span.year', {
'text': info.year
}) : null
)
)
)
)
if(info.titles)
info.titles.each(function(title){
self.alternativeTitle({
'title': title
});
})
},
alternativeTitle: function(alternative){
var self = this;
self.alternative_titles.include(alternative);
},
getTitle: function(){
var self = this;
try {
return self.info.original_title ? self.info.original_title : self.info.titles[0];
}
catch(e){
return 'Unknown';
}
},
get: function(key){
return this.info[key]
},
showOptions: function(){
var self = this;
self.createOptions();
self.data_container.addClass('open');
self.el.addEvent('outerClick', self.closeOptions.bind(self))
},
closeOptions: function(){
var self = this;
self.data_container.removeClass('open');
self.el.removeEvents('outerClick')
},
add: function(e){
var self = this;
if(e)
(e).preventDefault();
self.loadingMask();
Api.request('movie.add', {
'data': {
'identifier': self.info.imdb,
'title': self.title_select.get('value'),
'profile_id': self.profile_select.get('value'),
'category_id': self.category_select.get('value')
},
'onComplete': function(json){
self.options_el.empty();
self.options_el.adopt(
new Element('div.message', {
'text': json.success ? 'Movie successfully added.' : 'Movie didn\'t add properly. Check logs'
})
);
self.mask.fade('out');
self.fireEvent('added');
},
'onFailure': function(){
self.options_el.empty();
self.options_el.adopt(
new Element('div.message', {
'text': 'Something went wrong, check the logs for more info.'
})
);
self.mask.fade('out');
}
});
},
createOptions: function(){
var self = this,
info = self.info;
if(!self.options_el.hasClass('set')){
if(self.info.in_library){
var in_library = [];
self.info.in_library.releases.each(function(release){
in_library.include(release.quality.label)
});
}
self.options_el.grab(
new Element('div', {
'class': self.info.in_wanted && self.info.in_wanted.profile_id || in_library ? 'in_library_wanted' : ''
}).adopt(
self.info.in_wanted && self.info.in_wanted.profile_id ? new Element('span.in_wanted', {
'text': 'Already in wanted list: ' + Quality.getProfile(self.info.in_wanted.profile_id).get('label')
}) : (in_library ? new Element('span.in_library', {
'text': 'Already in library: ' + in_library.join(', ')
}) : null),
self.title_select = new Element('select', {
'name': 'title'
}),
self.profile_select = new Element('select', {
'name': 'profile'
}),
self.category_select = new Element('select', {
'name': 'category'
}).grab(
new Element('option', {'value': -1, 'text': 'None'})
),
self.add_button = new Element('a.button', {
'text': 'Add',
'events': {
'click': self.add.bind(self)
}
})
)
);
Array.each(self.alternative_titles, function(alt){
new Element('option', {
'text': alt.title
}).inject(self.title_select)
})
// Fill categories
var categories = CategoryList.getAll();
if(categories.length == 0)
self.category_select.hide();
else {
self.category_select.movie();
categories.each(function(category){
new Element('option', {
'value': category.data.id,
'text': category.data.label
}).inject(self.category_select);
});
}
// Fill profiles
var profiles = Quality.getActiveProfiles();
if(profiles.length == 1)
self.profile_select.hide();
profiles.each(function(profile){
new Element('option', {
'value': profile.id ? profile.id : profile.data.id,
'text': profile.label ? profile.label : profile.data.label
}).inject(self.profile_select)
});
self.options_el.addClass('set');
if(categories.length == 0 && self.title_select.getElements('option').length == 1 && profiles.length == 1 &&
!(self.info.in_wanted && self.info.in_wanted.profile_id || in_library))
self.add();
}
},
loadingMask: function(){
var self = this;
self.mask = new Element('div.mask').inject(self.el).fade('hide')
createSpinner(self.mask)
self.mask.fade('in')
},
toElement: function(){
return this.el
}
});

View File

@@ -1,6 +0,0 @@
from .main import MovieLibraryPlugin
def start():
return MovieLibraryPlugin()
config = []

View File

@@ -1,73 +0,0 @@
from .main import MovieSearcher
import random
def start():
return MovieSearcher()
config = [{
'name': 'moviesearcher',
'order': 20,
'groups': [
{
'tab': 'searcher',
'name': 'movie_searcher',
'label': 'Movie search',
'description': 'Search options for movies',
'advanced': True,
'options': [
{
'name': 'always_search',
'default': False,
'migrate_from': 'searcher',
'type': 'bool',
'label': 'Always search',
'description': 'Search for movies even before there is a ETA. Enabling this will probably get you a lot of fakes.',
},
{
'name': 'run_on_launch',
'migrate_from': 'searcher',
'label': 'Run on launch',
'advanced': True,
'default': 0,
'type': 'bool',
'description': 'Force run the searcher after (re)start.',
},
{
'name': 'search_on_add',
'label': 'Search after add',
'advanced': True,
'default': 1,
'type': 'bool',
'description': 'Disable this to only search for movies on cron.',
},
{
'name': 'cron_day',
'migrate_from': 'searcher',
'label': 'Day',
'advanced': True,
'default': '*',
'type': 'string',
'description': '<strong>*</strong>: Every day, <strong>*/2</strong>: Every 2 days, <strong>1</strong>: Every first of the month. See <a href="http://packages.python.org/APScheduler/cronschedule.html">APScheduler</a> for details.',
},
{
'name': 'cron_hour',
'migrate_from': 'searcher',
'label': 'Hour',
'advanced': True,
'default': random.randint(0, 23),
'type': 'string',
'description': '<strong>*</strong>: Every hour, <strong>*/8</strong>: Every 8 hours, <strong>3</strong>: At 3, midnight.',
},
{
'name': 'cron_minute',
'migrate_from': 'searcher',
'label': 'Minute',
'advanced': True,
'default': random.randint(0, 59),
'type': 'string',
'description': "Just keep it random, so the providers don't get DDOSed by every CP user on a 'full' hour."
},
],
},
],
}]

View File

@@ -1,350 +0,0 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
from couchpotato.core.helpers.encoding import simplifyString
from couchpotato.core.helpers.variable import getTitle, possibleTitles, getImdb
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.searcher.base import SearcherBase
from couchpotato.core.media.movie import MovieTypeBase
from couchpotato.core.settings.model import Media, Release
from couchpotato.environment import Env
from datetime import date
import random
import re
import time
import traceback
log = CPLog(__name__)
class MovieSearcher(SearcherBase, MovieTypeBase):
in_progress = False
def __init__(self):
super(MovieSearcher, self).__init__()
addEvent('movie.searcher.all', self.searchAll)
addEvent('movie.searcher.all_view', self.searchAllView)
addEvent('movie.searcher.single', self.single)
addEvent('movie.searcher.try_next_release', self.tryNextRelease)
addEvent('movie.searcher.could_be_released', self.couldBeReleased)
addEvent('searcher.correct_release', self.correctRelease)
addApiView('movie.searcher.try_next', self.tryNextReleaseView, docs = {
'desc': 'Marks the snatched results as ignored and try the next best release',
'params': {
'id': {'desc': 'The id of the movie'},
},
})
addApiView('movie.searcher.full_search', self.searchAllView, docs = {
'desc': 'Starts a full search for all wanted movies',
})
addApiView('movie.searcher.progress', self.getProgress, docs = {
'desc': 'Get the progress of current full search',
'return': {'type': 'object', 'example': """{
'progress': False || object, total & to_go,
}"""},
})
if self.conf('run_on_launch'):
addEvent('app.load', self.searchAll)
def searchAllView(self, **kwargs):
fireEventAsync('movie.searcher.all')
return {
'success': not self.in_progress
}
def searchAll(self):
if self.in_progress:
log.info('Search already in progress')
fireEvent('notify.frontend', type = 'movie.searcher.already_started', data = True, message = 'Full search already in progress')
return
self.in_progress = True
fireEvent('notify.frontend', type = 'movie.searcher.started', data = True, message = 'Full search started')
db = get_session()
movies = db.query(Media).filter(
Media.status.has(identifier = 'active')
).all()
random.shuffle(movies)
self.in_progress = {
'total': len(movies),
'to_go': len(movies),
}
try:
search_protocols = fireEvent('searcher.protocols', single = True)
for movie in movies:
movie_dict = movie.to_dict({
'category': {},
'profile': {'types': {'quality': {}}},
'releases': {'status': {}, 'quality': {}},
'library': {'titles': {}, 'files':{}},
'files': {},
})
try:
self.single(movie_dict, search_protocols)
except IndexError:
log.error('Forcing library update for %s, if you see this often, please report: %s', (movie_dict['library']['identifier'], traceback.format_exc()))
fireEvent('library.update.movie', movie_dict['library']['identifier'], force = True)
except:
log.error('Search failed for %s: %s', (movie_dict['library']['identifier'], traceback.format_exc()))
self.in_progress['to_go'] -= 1
# Break if CP wants to shut down
if self.shuttingDown():
break
except SearchSetupError:
pass
self.in_progress = False
def single(self, movie, search_protocols = None, manual = False):
# movies don't contain 'type' yet, so just set to default here
if not movie.has_key('type'):
movie['type'] = 'movie'
# Find out search type
try:
if not search_protocols:
search_protocols = fireEvent('searcher.protocols', single = True)
except SearchSetupError:
return
done_status = fireEvent('status.get', 'done', single = True)
if not movie['profile'] or (movie['status_id'] == done_status.get('id') and not manual):
log.debug('Movie doesn\'t have a profile or already done, assuming in manage tab.')
return
db = get_session()
pre_releases = fireEvent('quality.pre_releases', single = True)
release_dates = fireEvent('library.update.movie.release_date', identifier = movie['library']['identifier'], merge = True)
available_status, ignored_status, failed_status = fireEvent('status.get', ['available', 'ignored', 'failed'], single = True)
found_releases = []
too_early_to_search = []
default_title = getTitle(movie['library'])
if not default_title:
log.error('No proper info found for movie, removing it from library to cause it from having more issues.')
fireEvent('media.delete', movie['id'], single = True)
return
fireEvent('notify.frontend', type = 'movie.searcher.started', data = {'id': movie['id']}, message = 'Searching for "%s"' % default_title)
ret = False
for quality_type in movie['profile']['types']:
if not self.conf('always_search') and not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates, movie['library']['year']):
too_early_to_search.append(quality_type['quality']['identifier'])
continue
has_better_quality = 0
# See if better quality is available
for release in movie['releases']:
if release['quality']['order'] <= quality_type['quality']['order'] and release['status_id'] not in [available_status.get('id'), ignored_status.get('id'), failed_status.get('id')]:
has_better_quality += 1
# Don't search for quality lower then already available.
if has_better_quality is 0:
log.info('Search for %s in %s', (default_title, quality_type['quality']['label']))
quality = fireEvent('quality.single', identifier = quality_type['quality']['identifier'], single = True)
results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or []
if len(results) == 0:
log.debug('Nothing found for %s in %s', (default_title, quality_type['quality']['label']))
# Check if movie isn't deleted while searching
if not db.query(Media).filter_by(id = movie.get('id')).first():
break
# Add them to this movie releases list
found_releases += fireEvent('release.create_from_search', results, movie, quality_type, single = True)
# Try find a valid result and download it
if fireEvent('release.try_download_result', results, movie, quality_type, manual, single = True):
ret = True
# Remove releases that aren't found anymore
for release in movie.get('releases', []):
if release.get('status_id') == available_status.get('id') and release.get('identifier') not in found_releases:
fireEvent('release.delete', release.get('id'), single = True)
else:
log.info('Better quality (%s) already available or snatched for %s', (quality_type['quality']['label'], default_title))
fireEvent('media.restatus', movie['id'])
break
# Break if CP wants to shut down
if self.shuttingDown() or ret:
break
if len(too_early_to_search) > 0:
log.info2('Too early to search for %s, %s', (too_early_to_search, default_title))
fireEvent('notify.frontend', type = 'movie.searcher.ended', data = {'id': movie['id']})
return ret
def correctRelease(self, nzb = None, media = None, quality = None, **kwargs):
if media.get('type') != 'movie': return
media_title = fireEvent('library.title', media['library'], single = True)
imdb_results = kwargs.get('imdb_results', False)
retention = Env.setting('retention', section = 'nzb')
if nzb.get('seeders') is None and 0 < retention < nzb.get('age', 0):
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (nzb['age'], retention, nzb['name']))
return False
# Check for required and ignored words
if not fireEvent('searcher.correct_words', nzb['name'], media, single = True):
return False
preferred_quality = fireEvent('quality.single', identifier = quality['identifier'], single = True)
# Contains lower quality string
if fireEvent('searcher.contains_other_quality', nzb, movie_year = media['library']['year'], preferred_quality = preferred_quality, single = True):
log.info2('Wrong: %s, looking for %s', (nzb['name'], quality['label']))
return False
# File to small
if nzb['size'] and preferred_quality['size_min'] > nzb['size']:
log.info2('Wrong: "%s" is too small to be %s. %sMB instead of the minimal of %sMB.', (nzb['name'], preferred_quality['label'], nzb['size'], preferred_quality['size_min']))
return False
# File to large
if nzb['size'] and preferred_quality.get('size_max') < nzb['size']:
log.info2('Wrong: "%s" is too large to be %s. %sMB instead of the maximum of %sMB.', (nzb['name'], preferred_quality['label'], nzb['size'], preferred_quality['size_max']))
return False
# Provider specific functions
get_more = nzb.get('get_more_info')
if get_more:
get_more(nzb)
extra_check = nzb.get('extra_check')
if extra_check and not extra_check(nzb):
return False
if imdb_results:
return True
# Check if nzb contains imdb link
if getImdb(nzb.get('description', '')) == media['library']['identifier']:
return True
for raw_title in media['library']['titles']:
for movie_title in possibleTitles(raw_title['title']):
movie_words = re.split('\W+', simplifyString(movie_title))
if fireEvent('searcher.correct_name', nzb['name'], movie_title, single = True):
# if no IMDB link, at least check year range 1
if len(movie_words) > 2 and fireEvent('searcher.correct_year', nzb['name'], media['library']['year'], 1, single = True):
return True
# if no IMDB link, at least check year
if len(movie_words) <= 2 and fireEvent('searcher.correct_year', nzb['name'], media['library']['year'], 0, single = True):
return True
log.info("Wrong: %s, undetermined naming. Looking for '%s (%s)'", (nzb['name'], media_title, media['library']['year']))
return False
def couldBeReleased(self, is_pre_release, dates, year = None):
now = int(time.time())
now_year = date.today().year
if (year is None or year < now_year - 1) and (not dates or (dates.get('theater', 0) == 0 and dates.get('dvd', 0) == 0)):
return True
else:
# Don't allow movies with years to far in the future
if year is not None and year > now_year + 1:
return False
# For movies before 1972
if not dates or dates.get('theater', 0) < 0 or dates.get('dvd', 0) < 0:
return True
if is_pre_release:
# Prerelease 1 week before theaters
if dates.get('theater') - 604800 < now:
return True
else:
# 12 weeks after theater release
if dates.get('theater') > 0 and dates.get('theater') + 7257600 < now:
return True
if dates.get('dvd') > 0:
# 4 weeks before dvd release
if dates.get('dvd') - 2419200 < now:
return True
# Dvd should be released
if dates.get('dvd') < now:
return True
return False
def tryNextReleaseView(self, id = None, **kwargs):
trynext = self.tryNextRelease(id, manual = True)
return {
'success': trynext
}
def tryNextRelease(self, media_id, manual = False):
snatched_status, done_status, ignored_status = fireEvent('status.get', ['snatched', 'done', 'ignored'], single = True)
try:
db = get_session()
rels = db.query(Release) \
.filter_by(media_id = media_id) \
.filter(Release.status_id.in_([snatched_status.get('id'), done_status.get('id')])) \
.all()
for rel in rels:
rel.status_id = ignored_status.get('id')
db.commit()
movie_dict = fireEvent('media.get', media_id = media_id, single = True)
log.info('Trying next release for: %s', getTitle(movie_dict['library']))
fireEvent('movie.searcher.single', movie_dict, manual = manual)
return True
except:
log.error('Failed searching for next release: %s', traceback.format_exc())
return False
class SearchSetupError(Exception):
pass

View File

@@ -1,107 +0,0 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Media, Library
from couchpotato.environment import Env
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import or_
class Suggestion(Plugin):
def __init__(self):
addApiView('suggestion.view', self.suggestView)
addApiView('suggestion.ignore', self.ignoreView)
def suggestView(self, limit = 6, **kwargs):
movies = splitString(kwargs.get('movies', ''))
ignored = splitString(kwargs.get('ignored', ''))
seen = splitString(kwargs.get('seen', ''))
cached_suggestion = self.getCache('suggestion_cached')
if cached_suggestion:
suggestions = cached_suggestion
else:
if not movies or len(movies) == 0:
db = get_session()
active_movies = db.query(Media) \
.options(joinedload_all('library')) \
.filter(or_(*[Media.status.has(identifier = s) for s in ['active', 'done']])).all()
movies = [x.library.identifier for x in active_movies]
if not ignored or len(ignored) == 0:
ignored = splitString(Env.prop('suggest_ignore', default = ''))
if not seen or len(seen) == 0:
movies.extend(splitString(Env.prop('suggest_seen', default = '')))
suggestions = fireEvent('movie.suggest', movies = movies, ignore = ignored, single = True)
self.setCache('suggestion_cached', suggestions, timeout = 6048000) # Cache for 10 weeks
return {
'success': True,
'count': len(suggestions),
'suggestions': suggestions[:int(limit)]
}
def ignoreView(self, imdb = None, limit = 6, remove_only = False, mark_seen = False, **kwargs):
ignored = splitString(Env.prop('suggest_ignore', default = ''))
seen = splitString(Env.prop('suggest_seen', default = ''))
new_suggestions = []
if imdb:
if mark_seen:
seen.append(imdb)
Env.prop('suggest_seen', ','.join(set(seen)))
elif not remove_only:
ignored.append(imdb)
Env.prop('suggest_ignore', ','.join(set(ignored)))
new_suggestions = self.updateSuggestionCache(ignore_imdb = imdb, limit = limit, ignored = ignored, seen = seen)
return {
'result': True,
'ignore_count': len(ignored),
'suggestions': new_suggestions[limit - 1:limit]
}
def updateSuggestionCache(self, ignore_imdb = None, limit = 6, ignored = None, seen = None):
# Combine with previous suggestion_cache
cached_suggestion = self.getCache('suggestion_cached') or []
new_suggestions = []
ignored = [] if not ignored else ignored
seen = [] if not seen else seen
if ignore_imdb:
for cs in cached_suggestion:
if cs.get('imdb') != ignore_imdb:
new_suggestions.append(cs)
# Get new results and add them
if len(new_suggestions) - 1 < limit:
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
db = get_session()
active_movies = db.query(Media) \
.join(Library) \
.with_entities(Library.identifier) \
.filter(Media.status_id.in_([active_status.get('id'), done_status.get('id')])).all()
movies = [x[0] for x in active_movies]
movies.extend(seen)
ignored.extend([x.get('imdb') for x in cached_suggestion])
suggestions = fireEvent('movie.suggest', movies = movies, ignore = list(set(ignored)), single = True)
if suggestions:
new_suggestions.extend(suggestions)
self.setCache('suggestion_cached', new_suggestions, timeout = 3024000)
return new_suggestions

View File

@@ -1,160 +0,0 @@
.suggestions {
}
.suggestions > h2 {
height: 40px;
}
.suggestions .media_result {
display: inline-block;
width: 33.333%;
height: 150px;
}
@media all and (max-width: 960px) {
.suggestions .media_result {
width: 50%;
}
}
@media all and (max-width: 600px) {
.suggestions .media_result {
width: 100%;
}
}
.suggestions .media_result .data {
left: 100px;
background: #4e5969;
border: none;
}
.suggestions .media_result .data .info {
top: 10px;
left: 15px;
right: 15px;
bottom: 10px;
overflow: hidden;
}
.suggestions .media_result .data .info h2 {
white-space: normal;
max-height: 120px;
font-size: 18px;
line-height: 18px;
}
.suggestions .media_result .data .info .rating,
.suggestions .media_result .data .info .genres,
.suggestions .media_result .data .info .year {
position: static;
display: block;
padding: 0;
opacity: .6;
}
.suggestions .media_result .data .info .year {
margin: 10px 0 0;
}
.suggestions .media_result .data .info .rating {
font-size: 20px;
float: right;
margin-top: -20px;
}
.suggestions .media_result .data .info .rating:before {
content: "\e031";
font-family: 'Elusive-Icons';
font-size: 14px;
margin: 0 5px 0 0;
vertical-align: bottom;
}
.suggestions .media_result .data .info .genres {
font-size: 11px;
font-style: italic;
text-align: right;
}
.suggestions .media_result .data .info .plot {
display: block;
font-size: 11px;
overflow: hidden;
text-align: justify;
height: 100%;
z-index: 2;
top: 64px;
position: absolute;
background: #4e5969;
cursor: pointer;
transition: all .4s ease-in-out;
padding: 0 3px 10px 0;
}
.suggestions .media_result .data:before {
bottom: 0;
content: '';
display: block;
height: 10px;
right: 0;
left: 0;
bottom: 10px;
position: absolute;
background: linear-gradient(
0deg,
rgba(78, 89, 105, 1) 0%,
rgba(78, 89, 105, 0) 100%
);
z-index: 3;
pointer-events: none;
}
.suggestions .media_result .data .info .plot.full {
top: 0;
overflow: auto;
}
.suggestions .media_result .data {
cursor: default;
}
.suggestions .media_result .options {
left: 100px;
}
.suggestions .media_result .options select[name=title] { width: 100%; }
.suggestions .media_result .options select[name=profile] { width: 100%; }
.suggestions .media_result .options select[name=category] { width: 100%; }
.suggestions .media_result .button {
position: absolute;
margin: 2px 0 0 0;
right: 15px;
bottom: 15px;
}
.suggestions .media_result .thumbnail {
width: 100px;
}
.suggestions .media_result .actions {
position: absolute;
top: 10px;
right: 10px;
display: none;
width: 140px;
}
.suggestions .media_result:hover .actions {
display: block;
}
.suggestions .media_result:hover h2 .title {
opacity: 0;
}
.suggestions .media_result .data.open .actions {
display: none;
}
.suggestions .media_result .actions a {
margin-left: 10px;
vertical-align: middle;
}

View File

@@ -1,153 +0,0 @@
var SuggestList = new Class({
Implements: [Options, Events],
initialize: function(options){
var self = this;
self.setOptions(options);
self.create();
},
create: function(){
var self = this;
self.el = new Element('div.suggestions', {
'events': {
'click:relay(a.delete)': function(e, el){
(e).stop();
$(el).getParent('.media_result').destroy();
Api.request('suggestion.ignore', {
'data': {
'imdb': el.get('data-ignore')
},
'onComplete': self.fill.bind(self)
});
},
'click:relay(a.eye-open)': function(e, el){
(e).stop();
$(el).getParent('.media_result').destroy();
Api.request('suggestion.ignore', {
'data': {
'imdb': el.get('data-seen'),
'mark_seen': 1
},
'onComplete': self.fill.bind(self)
});
}
}
}).grab(
new Element('h2', {
'text': 'You might like these'
})
);
self.api_request = Api.request('suggestion.view', {
'onComplete': self.fill.bind(self)
});
},
fill: function(json){
var self = this;
if(!json || json.count == 0){
self.el.hide();
}
else {
Object.each(json.suggestions, function(movie){
var m = new Block.Search.MovieItem(movie, {
'onAdded': function(){
self.afterAdded(m, movie)
}
});
m.data_container.grab(
new Element('div.actions').adopt(
new Element('a.add.icon2', {
'title': 'Add movie with your default quality',
'data-add': movie.imdb,
'events': {
'click': m.showOptions.bind(m)
}
}),
$(new MA.IMDB(m)),
$(new MA.Trailer(m, {
'height': 150
})),
new Element('a.delete.icon2', {
'title': 'Don\'t suggest this movie again',
'data-ignore': movie.imdb
}),
new Element('a.eye-open.icon2', {
'title': 'Seen it, like it, don\'t add',
'data-seen': movie.imdb
})
)
);
m.data_container.removeEvents('click');
var plot = false;
if(m.info.plot && m.info.plot.length > 0)
plot = m.info.plot;
// Add rating
m.info_container.adopt(
m.rating = m.info.rating && m.info.rating.imdb.length == 2 && parseFloat(m.info.rating.imdb[0]) > 0 ? new Element('span.rating', {
'text': parseFloat(m.info.rating.imdb[0]),
'title': parseInt(m.info.rating.imdb[1]) + ' votes'
}) : null,
m.genre = m.info.genres && m.info.genres.length > 0 ? new Element('span.genres', {
'text': m.info.genres.slice(0, 3).join(', ')
}) : null,
m.plot = plot ? new Element('span.plot', {
'text': plot,
'events': {
'click': function(){
this.toggleClass('full')
}
}
}) : null
)
$(m).inject(self.el);
});
}
self.fireEvent('loaded');
},
afterAdded: function(m, movie){
var self = this;
setTimeout(function(){
$(m).destroy();
Api.request('suggestion.ignore', {
'data': {
'imdb': movie.imdb,
'remove_only': true
},
'onComplete': self.fill.bind(self)
});
}, 3000);
},
toElement: function(){
return this.el;
}
})

View File

@@ -1,6 +0,0 @@
from .main import ShowBase
def start():
return ShowBase()
config = []

View File

@@ -1,239 +0,0 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media import MediaBase
from couchpotato.core.settings.model import Media
import time
log = CPLog(__name__)
class ShowBase(MediaBase):
_type = 'show'
def __init__(self):
super(ShowBase, self).__init__()
addApiView('show.add', self.addView, docs = {
'desc': 'Add new movie to the wanted list',
'params': {
'identifier': {'desc': 'IMDB id of the movie your want to add.'},
'profile_id': {'desc': 'ID of quality profile you want the add the movie in. If empty will use the default profile.'},
'title': {'desc': 'Movie title to use for searches. Has to be one of the titles returned by movie.search.'},
}
})
addEvent('show.add', self.add)
def addView(self, **kwargs):
add_dict = self.add(params = kwargs)
return {
'success': True if add_dict else False,
'show': add_dict,
}
def add(self, params = {}, force_readd = True, search_after = True, update_library = False, status_id = None):
"""
params
{'category_id': u'-1',
'identifier': u'tt1519931',
'profile_id': u'12',
'thetvdb_id': u'158661',
'title': u'Haven'}
"""
log.debug("show.add")
# Add show parent to db first; need to update library so maps will be in place (if any)
parent = self.addToDatabase(params = params, update_library = True, type = 'show')
# TODO: add by airdate
# Add by Season/Episode numbers
self.addBySeasonEpisode(parent,
params = params,
force_readd = force_readd,
search_after = search_after,
update_library = update_library,
status_id = status_id
)
def addBySeasonEpisode(self, parent, params = {}, force_readd = True, search_after = True, update_library = False, status_id = None):
identifier = params.get('id')
# 'tvdb' will always be the master for our purpose. All mapped data can be mapped
# to another source for downloading, but it will always be remapped back to tvdb numbering
# when renamed so media can be used in media players that use tvdb for info provider
#
# This currently means the episode must actually exist in tvdb in order to be found but
# the numbering can be different
#master = 'tvdb'
#destination = 'scene'
#destination = 'anidb'
#destination = 'rage'
#destination = 'trakt'
# TODO: auto mode. if anime exists use it. if scene exists use it else use tvdb
# XXX: We should abort adding show, etc if either tvdb or xem is down or we will have incorrent mappings
# I think if tvdb gets error we wont have anydata anyway, but we must make sure XEM returns!!!!
# Only the master should return results here; all other info providers should just return False
# since we are just interested in the structure at this point.
seasons = fireEvent('season.info', merge = True, identifier = identifier)
if seasons is not None:
for season in seasons:
# Make sure we are only dealing with 'tvdb' responses at this point
if season.get('primary_provider', None) != 'thetvdb':
continue
season_id = season.get('id', None)
if season_id is None: continue
season_params = {'season_identifier': season_id}
# Calling all info providers; merge your info now for individual season
single_season = fireEvent('season.info', merge = True, identifier = identifier, params = season_params)
single_season['category_id'] = params.get('category_id')
single_season['profile_id'] = params.get('profile_id')
single_season['title'] = single_season.get('original_title', None)
single_season['identifier'] = season_id
single_season['parent_identifier'] = identifier
log.info("Adding Season %s" % season_id)
s = self.addToDatabase(params = single_season, type = "season")
episode_params = {'season_identifier': season_id}
episodes = fireEvent('episode.info', merge = True, identifier = identifier, params = episode_params)
if episodes is not None:
for episode in episodes:
# Make sure we are only dealing with 'tvdb' responses at this point
if episode.get('primary_provider', None) != 'thetvdb':
continue
episode_id = episode.get('id', None)
if episode_id is None: continue
try:
episode_number = int(episode.get('episodenumber', None))
except (ValueError, TypeError):
continue
try:
absolute_number = int(episode.get('absolute_number', None))
except (ValueError, TypeError):
absolute_number = None
episode_params = {'season_identifier': season_id,
'episode_identifier': episode_id,
'episode': episode_number}
if absolute_number:
episode_params['absolute'] = absolute_number
# Calling all info providers; merge your info now for individual episode
single_episode = fireEvent('episode.info', merge = True, identifier = identifier, params = episode_params)
single_episode['category_id'] = params.get('category_id')
single_episode['profile_id'] = params.get('profile_id')
single_episode['title'] = single_episode.get('original_title', None)
single_episode['identifier'] = episode_id
single_episode['parent_identifier'] = single_season['identifier']
log.info("Adding [%sx%s] %s - %s" % (season_id,
episode_number,
params['title'],
single_episode.get('original_title', '')))
e = self.addToDatabase(params = single_episode, type = "episode")
# Start searching now that all the media has been added
if search_after:
onComplete = self.createOnComplete(parent['id'])
onComplete()
return parent
def addToDatabase(self, params = {}, type = "show", force_readd = True, search_after = False, update_library = False, status_id = None):
log.debug("show.addToDatabase")
if not params.get('identifier'):
msg = 'Can\'t add show without imdb identifier.'
log.error(msg)
fireEvent('notify.frontend', type = 'show.is_tvshow', message = msg)
return False
#else:
#try:
#is_show = fireEvent('movie.is_show', identifier = params.get('identifier'), single = True)
#if not is_show:
#msg = 'Can\'t add show, seems to be a TV show.'
#log.error(msg)
#fireEvent('notify.frontend', type = 'show.is_tvshow', message = msg)
#return False
#except:
#pass
library = fireEvent('library.add.%s' % type, single = True, attrs = params, update_after = update_library)
if not library:
return False
# Status
status_active, snatched_status, ignored_status, done_status, downloaded_status = \
fireEvent('status.get', ['active', 'snatched', 'ignored', 'done', 'downloaded'], single = True)
default_profile = fireEvent('profile.default', single = True)
cat_id = params.get('category_id', None)
db = get_session()
m = db.query(Media).filter_by(library_id = library.get('id')).first()
added = True
do_search = False
if not m:
m = Media(
type = type,
library_id = library.get('id'),
profile_id = params.get('profile_id', default_profile.get('id')),
status_id = status_id if status_id else status_active.get('id'),
category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else None,
)
db.add(m)
db.commit()
onComplete = None
if search_after:
onComplete = self.createOnComplete(m.id)
fireEventAsync('library.update.%s' % type, params.get('identifier'), default_title = params.get('title', ''), on_complete = onComplete)
search_after = False
elif force_readd:
# Clean snatched history
for release in m.releases:
if release.status_id in [downloaded_status.get('id'), snatched_status.get('id'), done_status.get('id')]:
if params.get('ignore_previous', False):
release.status_id = ignored_status.get('id')
else:
fireEvent('release.delete', release.id, single = True)
m.profile_id = params.get('profile_id', default_profile.get('id'))
m.category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else None
else:
log.debug('Show already exists, not updating: %s', params)
added = False
if force_readd:
m.status_id = status_id if status_id else status_active.get('id')
m.last_edit = int(time.time())
do_search = True
db.commit()
# Remove releases
available_status = fireEvent('status.get', 'available', single = True)
for rel in m.releases:
if rel.status_id is available_status.get('id'):
db.delete(rel)
db.commit()
show_dict = m.to_dict(self.default_dict)
if do_search and search_after:
onComplete = self.createOnComplete(m.id)
onComplete()
if added:
fireEvent('notify.frontend', type = 'show.added', data = show_dict, message = 'Successfully added "%s" to your wanted list.' % params.get('title', ''))
db.expire_all()
return show_dict

View File

@@ -1,232 +0,0 @@
Block.Search.ShowItem = new Class({
Implements: [Options, Events],
initialize: function(info, options){
var self = this;
self.setOptions(options);
self.info = info;
self.alternative_titles = [];
self.create();
},
create: function(){
var self = this,
info = self.info;
self.el = new Element('div.media_result', {
'id': info.id
}).adopt(
self.thumbnail = info.images && info.images.poster.length > 0 ? new Element('img.thumbnail', {
'src': info.images.poster[0],
'height': null,
'width': null
}) : null,
self.options_el = new Element('div.options.inlay'),
self.data_container = new Element('div.data', {
'events': {
'click': self.showOptions.bind(self)
}
}).adopt(
self.info_container = new Element('div.info').adopt(
new Element('h2').adopt(
self.title = new Element('span.title', {
'text': info.titles && info.titles.length > 0 ? info.titles[0] : 'Unknown'
}),
self.year = info.year ? new Element('span.year', {
'text': info.year
}) : null
)
)
)
)
if(info.titles)
info.titles.each(function(title){
self.alternativeTitle({
'title': title
});
})
},
alternativeTitle: function(alternative){
var self = this;
self.alternative_titles.include(alternative);
},
getTitle: function(){
var self = this;
try {
return self.info.original_title ? self.info.original_title : self.info.titles[0];
}
catch(e){
return 'Unknown';
}
},
get: function(key){
return this.info[key]
},
showOptions: function(){
var self = this;
self.createOptions();
self.data_container.addClass('open');
self.el.addEvent('outerClick', self.closeOptions.bind(self))
},
closeOptions: function(){
var self = this;
self.data_container.removeClass('open');
self.el.removeEvents('outerClick')
},
add: function(e){
var self = this;
if(e)
(e).preventDefault();
self.loadingMask();
Api.request('show.add', {
'data': {
'identifier': self.info.id,
'id': self.info.id,
'type': self.info.type,
'primary_provider': self.info.primary_provider,
'title': self.title_select.get('value'),
'profile_id': self.profile_select.get('value'),
'category_id': self.category_select.get('value')
},
'onComplete': function(json){
self.options_el.empty();
self.options_el.adopt(
new Element('div.message', {
'text': json.added ? 'Show successfully added.' : 'Show didn\'t add properly. Check logs'
})
);
self.mask.fade('out');
self.fireEvent('added');
},
'onFailure': function(){
self.options_el.empty();
self.options_el.adopt(
new Element('div.message', {
'text': 'Something went wrong, check the logs for more info.'
})
);
self.mask.fade('out');
}
});
},
createOptions: function(){
var self = this,
info = self.info;
if(!self.options_el.hasClass('set')){
if(self.info.in_library){
var in_library = [];
self.info.in_library.releases.each(function(release){
in_library.include(release.quality.label)
});
}
self.options_el.grab(
new Element('div', {
'class': self.info.in_wanted && self.info.in_wanted.profile_id || in_library ? 'in_library_wanted' : ''
}).adopt(
self.info.in_wanted && self.info.in_wanted.profile_id ? new Element('span.in_wanted', {
'text': 'Already in wanted list: ' + Quality.getProfile(self.info.in_wanted.profile_id).get('label')
}) : (in_library ? new Element('span.in_library', {
'text': 'Already in library: ' + in_library.join(', ')
}) : null),
self.title_select = new Element('select', {
'name': 'title'
}),
self.profile_select = new Element('select', {
'name': 'profile'
}),
self.category_select = new Element('select', {
'name': 'category'
}).grab(
new Element('option', {'value': -1, 'text': 'None'})
),
self.add_button = new Element('a.button', {
'text': 'Add',
'events': {
'click': self.add.bind(self)
}
})
)
);
Array.each(self.alternative_titles, function(alt){
new Element('option', {
'text': alt.title
}).inject(self.title_select)
})
// Fill categories
var categories = CategoryList.getAll();
if(categories.length == 0)
self.category_select.hide();
else {
self.category_select.show();
categories.each(function(category){
new Element('option', {
'value': category.data.id,
'text': category.data.label
}).inject(self.category_select);
});
}
// Fill profiles
var profiles = Quality.getActiveProfiles();
if(profiles.length == 1)
self.profile_select.hide();
profiles.each(function(profile){
new Element('option', {
'value': profile.id ? profile.id : profile.data.id,
'text': profile.label ? profile.label : profile.data.label
}).inject(self.profile_select)
});
self.options_el.addClass('set');
if(categories.length == 0 && self.title_select.getElements('option').length == 1 && profiles.length == 1 &&
!(self.info.in_wanted && self.info.in_wanted.profile_id || in_library))
self.add();
}
},
loadingMask: function(){
var self = this;
self.mask = new Element('div.mask').inject(self.el).fade('hide')
createSpinner(self.mask)
self.mask.fade('in')
},
toElement: function(){
return this.el
}
});

View File

@@ -1,6 +0,0 @@
from .main import EpisodeLibraryPlugin
def start():
return EpisodeLibraryPlugin()
config = []

View File

@@ -1,266 +0,0 @@
from couchpotato import get_session
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
from couchpotato.core.logger import CPLog
from couchpotato.core.settings.model import EpisodeLibrary, SeasonLibrary, LibraryTitle, File
from couchpotato.core.media._base.library.base import LibraryBase
from couchpotato.core.helpers.variable import tryInt
from string import ascii_letters
import time
import traceback
log = CPLog(__name__)
class EpisodeLibraryPlugin(LibraryBase):
default_dict = {'titles': {}, 'files':{}}
def __init__(self):
addEvent('library.query', self.query)
addEvent('library.identifier', self.identifier)
addEvent('library.add.episode', self.add)
addEvent('library.update.episode', self.update)
addEvent('library.update.episode_release_date', self.updateReleaseDate)
def query(self, library, first = True, condense = True, include_identifier = True, **kwargs):
if library is list or library.get('type') != 'episode':
return
# Get the titles of the season
if not library.get('related_libraries', {}).get('season', []):
log.warning('Invalid library, unable to determine title.')
return
titles = fireEvent(
'library.query',
library['related_libraries']['season'][0],
first=False,
include_identifier=include_identifier,
condense=condense,
single=True
)
identifier = fireEvent('library.identifier', library, single = True)
# Add episode identifier to titles
if include_identifier and identifier.get('episode'):
titles = [title + ('E%02d' % identifier['episode']) for title in titles]
if first:
return titles[0] if titles else None
return titles
def identifier(self, library):
if library.get('type') != 'episode':
return
identifier = {
'season': None,
'episode': None
}
scene_map = library['info'].get('map_episode', {}).get('scene')
if scene_map:
# Use scene mappings if they are available
identifier['season'] = scene_map.get('season')
identifier['episode'] = scene_map.get('episode')
else:
# Fallback to normal season/episode numbers
identifier['season'] = library.get('season_number')
identifier['episode'] = library.get('episode_number')
# Cast identifiers to integers
# TODO this will need changing to support identifiers with trailing 'a', 'b' characters
identifier['season'] = tryInt(identifier['season'], None)
identifier['episode'] = tryInt(identifier['episode'], None)
return identifier
def add(self, attrs = {}, update_after = True):
type = attrs.get('type', 'episode')
primary_provider = attrs.get('primary_provider', 'thetvdb')
db = get_session()
parent_identifier = attrs.get('parent_identifier', None)
parent = None
if parent_identifier:
parent = db.query(SeasonLibrary).filter_by(primary_provider = primary_provider, identifier = attrs.get('parent_identifier')).first()
l = db.query(EpisodeLibrary).filter_by(type = type, identifier = attrs.get('identifier')).first()
if not l:
status = fireEvent('status.get', 'needs_update', single = True)
l = EpisodeLibrary(
type = type,
primary_provider = primary_provider,
year = attrs.get('year'),
identifier = attrs.get('identifier'),
plot = toUnicode(attrs.get('plot')),
tagline = toUnicode(attrs.get('tagline')),
status_id = status.get('id'),
info = {},
parent = parent,
season_number = tryInt(attrs.get('seasonnumber', None)),
episode_number = tryInt(attrs.get('episodenumber', None)),
absolute_number = tryInt(attrs.get('absolute_number', None))
)
title = LibraryTitle(
title = toUnicode(attrs.get('title')),
simple_title = self.simplifyTitle(attrs.get('title')),
)
l.titles.append(title)
db.add(l)
db.commit()
# Update library info
if update_after is not False:
handle = fireEventAsync if update_after is 'async' else fireEvent
handle('library.update.episode', identifier = l.identifier, default_title = toUnicode(attrs.get('title', '')))
library_dict = l.to_dict(self.default_dict)
db.expire_all()
return library_dict
def update(self, identifier, default_title = '', force = False):
if self.shuttingDown():
return
db = get_session()
library = db.query(EpisodeLibrary).filter_by(identifier = identifier).first()
done_status = fireEvent('status.get', 'done', single = True)
if library:
library_dict = library.to_dict(self.default_dict)
do_update = True
parent_identifier = None
if library.parent is not None:
parent_identifier = library.parent.identifier
if library.status_id == done_status.get('id') and not force:
do_update = False
episode_params = {'season_identifier': parent_identifier,
'episode_identifier': identifier,
'episode': library.episode_number,
'absolute': library.absolute_number,}
info = fireEvent('episode.info', merge = True, params = episode_params)
# Don't need those here
try: del info['in_wanted']
except: pass
try: del info['in_library']
except: pass
if not info or len(info) == 0:
log.error('Could not update, no movie info to work with: %s', identifier)
return False
# Main info
if do_update:
library.plot = toUnicode(info.get('plot', ''))
library.tagline = toUnicode(info.get('tagline', ''))
library.year = info.get('year', 0)
library.status_id = done_status.get('id')
library.season_number = tryInt(info.get('seasonnumber', None))
library.episode_number = tryInt(info.get('episodenumber', None))
library.absolute_number = tryInt(info.get('absolute_number', None))
try:
library.last_updated = int(info.get('lastupdated'))
except:
library.last_updated = int(time.time())
library.info.update(info)
db.commit()
# Titles
[db.delete(title) for title in library.titles]
db.commit()
titles = info.get('titles', [])
log.debug('Adding titles: %s', titles)
counter = 0
for title in titles:
if not title:
continue
title = toUnicode(title)
t = LibraryTitle(
title = title,
simple_title = self.simplifyTitle(title),
default = (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == u'' and toUnicode(titles[0]) == title)
)
library.titles.append(t)
counter += 1
db.commit()
# Files
images = info.get('images', [])
for image_type in ['poster']:
for image in images.get(image_type, []):
if not isinstance(image, (str, unicode)):
continue
file_path = fireEvent('file.download', url = image, single = True)
if file_path:
file_obj = fireEvent('file.add', path = file_path, type_tuple = ('image', image_type), single = True)
try:
file_obj = db.query(File).filter_by(id = file_obj.get('id')).one()
library.files.append(file_obj)
db.commit()
break
except:
log.debug('Failed to attach to library: %s', traceback.format_exc())
library_dict = library.to_dict(self.default_dict)
db.expire_all()
return library_dict
def updateReleaseDate(self, identifier):
'''XXX: Not sure what this is for yet in relation to an episode'''
pass
#db = get_session()
#library = db.query(EpisodeLibrary).filter_by(identifier = identifier).first()
#if not library.info:
#library_dict = self.update(identifier, force = True)
#dates = library_dict.get('info', {}).get('release_date')
#else:
#dates = library.info.get('release_date')
#if dates and dates.get('expires', 0) < time.time() or not dates:
#dates = fireEvent('movie.release_date', identifier = identifier, merge = True)
#library.info.update({'release_date': dates })
#db.commit()
#db.expire_all()
#return dates
#TODO: Add to base class
def simplifyTitle(self, title):
title = toUnicode(title)
nr_prefix = '' if title[0] in ascii_letters else '#'
title = simplifyString(title)
for prefix in ['the ']:
if prefix == title[:len(prefix)]:
title = title[len(prefix):]
break
return nr_prefix + title

View File

@@ -1,6 +0,0 @@
from .main import SeasonLibraryPlugin
def start():
return SeasonLibraryPlugin()
config = []

View File

@@ -1,242 +0,0 @@
from couchpotato import get_session
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
from couchpotato.core.logger import CPLog
from couchpotato.core.settings.model import SeasonLibrary, ShowLibrary, LibraryTitle, File
from couchpotato.core.media._base.library.base import LibraryBase
from couchpotato.core.helpers.variable import tryInt
from string import ascii_letters
import time
import traceback
log = CPLog(__name__)
class SeasonLibraryPlugin(LibraryBase):
default_dict = {'titles': {}, 'files':{}}
def __init__(self):
addEvent('library.query', self.query)
addEvent('library.identifier', self.identifier)
addEvent('library.add.season', self.add)
addEvent('library.update.season', self.update)
addEvent('library.update.season_release_date', self.updateReleaseDate)
def query(self, library, first = True, condense = True, include_identifier = True, **kwargs):
if library is list or library.get('type') != 'season':
return
# Get the titles of the show
if not library.get('related_libraries', {}).get('show', []):
log.warning('Invalid library, unable to determine title.')
return
titles = fireEvent(
'library.query',
library['related_libraries']['show'][0],
first=False,
condense=condense,
single=True
)
# Add season map_names if they exist
if 'map_names' in library['info']:
season_names = library['info']['map_names'].get(str(library['season_number']), {})
# Add titles from all locations
# TODO only add name maps from a specific location
for location, names in season_names.items():
titles += [name for name in names if name and name not in titles]
identifier = fireEvent('library.identifier', library, single = True)
# Add season identifier to titles
if include_identifier and identifier.get('season') is not None:
titles = [title + (' S%02d' % identifier['season']) for title in titles]
if first:
return titles[0] if titles else None
return titles
def identifier(self, library):
if library.get('type') != 'season':
return
return {
'season': tryInt(library['season_number'], None)
}
def add(self, attrs = {}, update_after = True):
type = attrs.get('type', 'season')
primary_provider = attrs.get('primary_provider', 'thetvdb')
db = get_session()
parent_identifier = attrs.get('parent_identifier', None)
parent = None
if parent_identifier:
parent = db.query(ShowLibrary).filter_by(primary_provider = primary_provider, identifier = attrs.get('parent_identifier')).first()
l = db.query(SeasonLibrary).filter_by(type = type, identifier = attrs.get('identifier')).first()
if not l:
status = fireEvent('status.get', 'needs_update', single = True)
l = SeasonLibrary(
type = type,
primary_provider = primary_provider,
year = attrs.get('year'),
identifier = attrs.get('identifier'),
plot = toUnicode(attrs.get('plot')),
tagline = toUnicode(attrs.get('tagline')),
status_id = status.get('id'),
info = {},
parent = parent,
)
title = LibraryTitle(
title = toUnicode(attrs.get('title')),
simple_title = self.simplifyTitle(attrs.get('title')),
)
l.titles.append(title)
db.add(l)
db.commit()
# Update library info
if update_after is not False:
handle = fireEventAsync if update_after is 'async' else fireEvent
handle('library.update.season', identifier = l.identifier, default_title = toUnicode(attrs.get('title', '')))
library_dict = l.to_dict(self.default_dict)
db.expire_all()
return library_dict
def update(self, identifier, default_title = '', force = False):
if self.shuttingDown():
return
db = get_session()
library = db.query(SeasonLibrary).filter_by(identifier = identifier).first()
done_status = fireEvent('status.get', 'done', single = True)
if library:
library_dict = library.to_dict(self.default_dict)
do_update = True
parent_identifier = None
if library.parent is not None:
parent_identifier = library.parent.identifier
if library.status_id == done_status.get('id') and not force:
do_update = False
season_params = {'season_identifier': identifier}
info = fireEvent('season.info', merge = True, identifier = parent_identifier, params = season_params)
# Don't need those here
try: del info['in_wanted']
except: pass
try: del info['in_library']
except: pass
if not info or len(info) == 0:
log.error('Could not update, no movie info to work with: %s', identifier)
return False
# Main info
if do_update:
library.plot = toUnicode(info.get('plot', ''))
library.tagline = toUnicode(info.get('tagline', ''))
library.year = info.get('year', 0)
library.status_id = done_status.get('id')
library.season_number = tryInt(info.get('seasonnumber', None))
library.info.update(info)
db.commit()
# Titles
[db.delete(title) for title in library.titles]
db.commit()
titles = info.get('titles', [])
log.debug('Adding titles: %s', titles)
counter = 0
for title in titles:
if not title:
continue
title = toUnicode(title)
t = LibraryTitle(
title = title,
simple_title = self.simplifyTitle(title),
# XXX: default was None; so added a quick hack since we don't really need titiles for seasons anyway
#default = (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == u'' and toUnicode(titles[0]) == title)
default = True,
)
library.titles.append(t)
counter += 1
db.commit()
# Files
images = info.get('images', [])
for image_type in ['poster']:
for image in images.get(image_type, []):
if not isinstance(image, (str, unicode)):
continue
file_path = fireEvent('file.download', url = image, single = True)
if file_path:
file_obj = fireEvent('file.add', path = file_path, type_tuple = ('image', image_type), single = True)
try:
file_obj = db.query(File).filter_by(id = file_obj.get('id')).one()
library.files.append(file_obj)
db.commit()
break
except:
log.debug('Failed to attach to library: %s', traceback.format_exc())
library_dict = library.to_dict(self.default_dict)
db.expire_all()
return library_dict
def updateReleaseDate(self, identifier):
'''XXX: Not sure what this is for yet in relation to a tvshow'''
pass
#db = get_session()
#library = db.query(SeasonLibrary).filter_by(identifier = identifier).first()
#if not library.info:
#library_dict = self.update(identifier, force = True)
#dates = library_dict.get('info', {}).get('release_date')
#else:
#dates = library.info.get('release_date')
#if dates and dates.get('expires', 0) < time.time() or not dates:
#dates = fireEvent('movie.release_date', identifier = identifier, merge = True)
#library.info.update({'release_date': dates })
#db.commit()
#db.expire_all()
#return dates
#TODO: Add to base class
def simplifyTitle(self, title):
title = toUnicode(title)
nr_prefix = '' if title[0] in ascii_letters else '#'
title = simplifyString(title)
for prefix in ['the ']:
if prefix == title[:len(prefix)]:
title = title[len(prefix):]
break
return nr_prefix + title

View File

@@ -1,6 +0,0 @@
from .main import ShowLibraryPlugin
def start():
return ShowLibraryPlugin()
config = []

View File

@@ -1,229 +0,0 @@
from couchpotato import get_session
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
from couchpotato.core.logger import CPLog
from couchpotato.core.settings.model import ShowLibrary, LibraryTitle, File
from couchpotato.core.media._base.library.base import LibraryBase
from qcond.helpers import simplify
from qcond import QueryCondenser
from string import ascii_letters
import time
import traceback
log = CPLog(__name__)
class ShowLibraryPlugin(LibraryBase):
default_dict = {'titles': {}, 'files':{}}
def __init__(self):
self.query_condenser = QueryCondenser()
addEvent('library.query', self.query)
addEvent('library.add.show', self.add)
addEvent('library.update.show', self.update)
addEvent('library.update.show_release_date', self.updateReleaseDate)
def query(self, library, first = True, condense = True, **kwargs):
if library is list or library.get('type') != 'show':
return
titles = [title['title'] for title in library['titles']]
if condense:
# Use QueryCondenser to build a list of optimal search titles
condensed_titles = self.query_condenser.distinct(titles)
if condensed_titles:
# Use condensed titles if we got a valid result
titles = condensed_titles
else:
# Fallback to simplifying titles
titles = [simplify(title) for title in titles]
if first:
return titles[0] if titles else None
return titles
def add(self, attrs = {}, update_after = True):
type = attrs.get('type', 'show')
primary_provider = attrs.get('primary_provider', 'thetvdb')
db = get_session()
l = db.query(ShowLibrary).filter_by(type = type, identifier = attrs.get('identifier')).first()
if not l:
status = fireEvent('status.get', 'needs_update', single = True)
l = ShowLibrary(
type = type,
primary_provider = primary_provider,
year = attrs.get('year'),
identifier = attrs.get('identifier'),
plot = toUnicode(attrs.get('plot')),
tagline = toUnicode(attrs.get('tagline')),
status_id = status.get('id'),
info = {},
parent = None,
)
title = LibraryTitle(
title = toUnicode(attrs.get('title')),
simple_title = self.simplifyTitle(attrs.get('title')),
)
l.titles.append(title)
db.add(l)
db.commit()
# Update library info
if update_after is not False:
handle = fireEventAsync if update_after is 'async' else fireEvent
handle('library.update.show', identifier = l.identifier, default_title = toUnicode(attrs.get('title', '')))
library_dict = l.to_dict(self.default_dict)
db.expire_all()
return library_dict
def update(self, identifier, default_title = '', force = False):
if self.shuttingDown():
return
db = get_session()
library = db.query(ShowLibrary).filter_by(identifier = identifier).first()
done_status = fireEvent('status.get', 'done', single = True)
if library:
library_dict = library.to_dict(self.default_dict)
do_update = True
info = fireEvent('show.info', merge = True, identifier = identifier)
# Don't need those here
try: del info['in_wanted']
except: pass
try: del info['in_library']
except: pass
if not info or len(info) == 0:
log.error('Could not update, no show info to work with: %s', identifier)
return False
# Main info
if do_update:
library.plot = toUnicode(info.get('plot', ''))
library.tagline = toUnicode(info.get('tagline', ''))
library.year = info.get('year', 0)
library.status_id = done_status.get('id')
library.show_status = toUnicode(info.get('status', '').lower())
library.airs_time = info.get('airs_time', None)
# Bits
days_of_week_map = {
u'Monday': 1,
u'Tuesday': 2,
u'Wednesday': 4,
u'Thursday': 8,
u'Friday': 16,
u'Saturday': 32,
u'Sunday': 64,
u'Daily': 127,
}
try:
library.airs_dayofweek = days_of_week_map.get(info.get('airs_dayofweek'))
except:
library.airs_dayofweek = 0
try:
library.last_updated = int(info.get('lastupdated'))
except:
library.last_updated = int(time.time())
library.info.update(info)
db.commit()
# Titles
[db.delete(title) for title in library.titles]
db.commit()
titles = info.get('titles', [])
log.debug('Adding titles: %s', titles)
counter = 0
for title in titles:
if not title:
continue
title = toUnicode(title)
t = LibraryTitle(
title = title,
simple_title = self.simplifyTitle(title),
default = (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == u'' and toUnicode(titles[0]) == title)
)
library.titles.append(t)
counter += 1
db.commit()
# Files
images = info.get('images', [])
for image_type in ['poster']:
for image in images.get(image_type, []):
if not isinstance(image, (str, unicode)):
continue
file_path = fireEvent('file.download', url = image, single = True)
if file_path:
file_obj = fireEvent('file.add', path = file_path, type_tuple = ('image', image_type), single = True)
try:
file_obj = db.query(File).filter_by(id = file_obj.get('id')).one()
library.files.append(file_obj)
db.commit()
break
except:
log.debug('Failed to attach to library: %s', traceback.format_exc())
library_dict = library.to_dict(self.default_dict)
db.expire_all()
return library_dict
def updateReleaseDate(self, identifier):
'''XXX: Not sure what this is for yet in relation to a show'''
pass
#db = get_session()
#library = db.query(ShowLibrary).filter_by(identifier = identifier).first()
#if not library.info:
#library_dict = self.update(identifier, force = True)
#dates = library_dict.get('info', {}).get('release_date')
#else:
#dates = library.info.get('release_date')
#if dates and dates.get('expires', 0) < time.time() or not dates:
#dates = fireEvent('movie.release_date', identifier = identifier, merge = True)
#library.info.update({'release_date': dates })
#db.commit()
#db.expire_all()
#return dates
#TODO: Add to base class
def simplifyTitle(self, title):
title = toUnicode(title)
nr_prefix = '' if title[0] in ascii_letters else '#'
title = simplifyString(title)
for prefix in ['the ']:
if prefix == title[:len(prefix)]:
title = title[len(prefix):]
break
return nr_prefix + title

View File

@@ -1,6 +0,0 @@
from .main import ShowMatcher
def start():
return ShowMatcher()
config = []

View File

@@ -1,127 +0,0 @@
from couchpotato import CPLog
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.variable import dictIsSubset, tryInt, toIterable
from couchpotato.core.media._base.matcher.base import MatcherBase
from couchpotato.core.providers.base import MultiProvider
log = CPLog(__name__)
class ShowMatcher(MultiProvider):
def getTypes(self):
return [Season, Episode]
class Base(MatcherBase):
# TODO come back to this later, think this could be handled better, this is starting to get out of hand....
quality_map = {
'bluray_1080p': {'resolution': ['1080p'], 'source': ['bluray']},
'bluray_720p': {'resolution': ['720p'], 'source': ['bluray']},
'bdrip_1080p': {'resolution': ['1080p'], 'source': ['BDRip']},
'bdrip_720p': {'resolution': ['720p'], 'source': ['BDRip']},
'brrip_1080p': {'resolution': ['1080p'], 'source': ['BRRip']},
'brrip_720p': {'resolution': ['720p'], 'source': ['BRRip']},
'webdl_1080p': {'resolution': ['1080p'], 'source': ['webdl', ['web', 'dl']]},
'webdl_720p': {'resolution': ['720p'], 'source': ['webdl', ['web', 'dl']]},
'webdl_480p': {'resolution': ['480p'], 'source': ['webdl', ['web', 'dl']]},
'hdtv_720p': {'resolution': ['720p'], 'source': ['hdtv']},
'hdtv_sd': {'resolution': ['480p', None], 'source': ['hdtv']},
}
def __init__(self):
super(Base, self).__init__()
addEvent('%s.matcher.correct_identifier' % self.type, self.correctIdentifier)
def correct(self, chain, release, media, quality):
log.info("Checking if '%s' is valid", release['name'])
log.info2('Release parsed as: %s', chain.info)
if not fireEvent('matcher.correct_quality', chain, quality, self.quality_map, single = True):
log.info('Wrong: %s, quality does not match', release['name'])
return False
if not fireEvent('%s.matcher.correct_identifier' % self.type, chain, media):
log.info('Wrong: %s, identifier does not match', release['name'])
return False
if not fireEvent('matcher.correct_title', chain, media):
log.info("Wrong: '%s', undetermined naming.", (' '.join(chain.info['show_name'])))
return False
return True
def correctIdentifier(self, chain, media):
raise NotImplementedError()
def getChainIdentifier(self, chain):
if 'identifier' not in chain.info:
return None
identifier = self.flattenInfo(chain.info['identifier'])
# Try cast values to integers
for key, value in identifier.items():
if isinstance(value, list):
if len(value) <= 1:
value = value[0]
else:
log.warning('Wrong: identifier contains multiple season or episode values, unsupported')
return None
identifier[key] = tryInt(value, value)
return identifier
class Episode(Base):
type = 'episode'
def correctIdentifier(self, chain, media):
identifier = self.getChainIdentifier(chain)
if not identifier:
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)')
return False
# TODO - Parse episode ranges from identifier to determine if they are multi-part episodes
if any([x in identifier for x in ['episode_from', 'episode_to']]):
log.info2('Wrong: releases with identifier ranges are not supported yet')
return False
required = fireEvent('library.identifier', media['library'], single = True)
# TODO - Support air by date episodes
# TODO - Support episode parts
if identifier != required:
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier))
return False
return True
class Season(Base):
type = 'season'
def correctIdentifier(self, chain, media):
identifier = self.getChainIdentifier(chain)
if not identifier:
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)')
return False
# TODO - Parse episode ranges from identifier to determine if they are season packs
if any([x in identifier for x in ['episode_from', 'episode_to']]):
log.info2('Wrong: releases with identifier ranges are not supported yet')
return False
required = fireEvent('library.identifier', media['library'], single = True)
if identifier != required:
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier))
return False
return True

View File

@@ -1,7 +0,0 @@
from .main import ShowSearcher
import random
def start():
return ShowSearcher()
config = []

View File

@@ -1,189 +0,0 @@
from couchpotato import Env, get_session
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.variable import getTitle, toIterable
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.searcher.main import SearchSetupError
from couchpotato.core.media.show._base import ShowBase
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Media
from qcond import QueryCondenser
from qcond.helpers import simplify
log = CPLog(__name__)
class ShowSearcher(Plugin):
type = ['show', 'season', 'episode']
in_progress = False
def __init__(self):
super(ShowSearcher, self).__init__()
self.query_condenser = QueryCondenser()
for type in toIterable(self.type):
addEvent('%s.searcher.single' % type, self.single)
addEvent('searcher.correct_release', self.correctRelease)
def single(self, media, search_protocols = None, manual = False):
show, season, episode = self.getLibraries(media['library'])
db = get_session()
if media['type'] == 'show':
for library in season:
# TODO ideally we shouldn't need to fetch the media for each season library here
m = db.query(Media).filter_by(library_id = library['library_id']).first()
fireEvent('season.searcher.single', m.to_dict(ShowBase.search_dict))
return
# Find out search type
try:
if not search_protocols:
search_protocols = fireEvent('searcher.protocols', single = True)
except SearchSetupError:
return
done_status, available_status, ignored_status, failed_status = fireEvent('status.get', ['done', 'available', 'ignored', 'failed'], single = True)
if not media['profile'] or media['status_id'] == done_status.get('id'):
log.debug('Episode doesn\'t have a profile or already done, assuming in manage tab.')
return
#pre_releases = fireEvent('quality.pre_releases', single = True)
found_releases = []
too_early_to_search = []
default_title = fireEvent('library.query', media['library'], condense = False, single=True)
if not default_title:
log.error('No proper info found for episode, removing it from library to cause it from having more issues.')
#fireEvent('episode.delete', episode['id'], single = True)
return
if not show or not season:
log.error('Unable to find show or season library in database, missing required data for searching')
return
fireEvent('notify.frontend', type = 'show.searcher.started.%s' % media['id'], data = True, message = 'Searching for "%s"' % default_title)
ret = False
has_better_quality = None
for quality_type in media['profile']['types']:
# TODO check air date?
#if not self.conf('always_search') and not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates, movie['library']['year']):
# too_early_to_search.append(quality_type['quality']['identifier'])
# continue
has_better_quality = 0
# See if better quality is available
for release in media['releases']:
if release['quality']['order'] <= quality_type['quality']['order'] and release['status_id'] not in [available_status.get('id'), ignored_status.get('id'), failed_status.get('id')]:
has_better_quality += 1
# Don't search for quality lower then already available.
if has_better_quality is 0:
log.info('Search for %s S%02d%s in %s', (
getTitle(show),
season['season_number'],
"E%02d" % episode['episode_number'] if episode and len(episode) == 1 else "",
quality_type['quality']['label'])
)
quality = fireEvent('quality.single', identifier = quality_type['quality']['identifier'], single = True)
results = fireEvent('searcher.search', search_protocols, media, quality, single = True)
if len(results) == 0:
log.debug('Nothing found for %s in %s', (default_title, quality_type['quality']['label']))
# Check if movie isn't deleted while searching
if not db.query(Media).filter_by(id = media.get('id')).first():
break
# Add them to this movie releases list
found_releases += fireEvent('release.create_from_search', results, media, quality_type, single = True)
# Try find a valid result and download it
if fireEvent('release.try_download_result', results, media, quality_type, manual, single = True):
ret = True
# Remove releases that aren't found anymore
for release in media.get('releases', []):
if release.get('status_id') == available_status.get('id') and release.get('identifier') not in found_releases:
fireEvent('release.delete', release.get('id'), single = True)
else:
log.info('Better quality (%s) already available or snatched for %s', (quality_type['quality']['label'], default_title))
fireEvent('media.restatus', media['id'])
break
# Break if CP wants to shut down
if self.shuttingDown() or ret:
break
if len(too_early_to_search) > 0:
log.info2('Too early to search for %s, %s', (too_early_to_search, default_title))
elif media['type'] == 'season' and not ret and has_better_quality is 0:
# If nothing was found, start searching for episodes individually
log.info('No season pack found, starting individual episode search')
for library in episode:
# TODO ideally we shouldn't need to fetch the media for each episode library here
m = db.query(Media).filter_by(library_id = library['library_id']).first()
fireEvent('episode.searcher.single', m.to_dict(ShowBase.search_dict))
fireEvent('notify.frontend', type = 'show.searcher.ended.%s' % media['id'], data = True)
return ret
def correctRelease(self, release = None, media = None, quality = None, **kwargs):
if media.get('type') not in ['season', 'episode']: return
retention = Env.setting('retention', section = 'nzb')
if release.get('seeders') is None and 0 < retention < release.get('age', 0):
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name']))
return False
# Check for required and ignored words
if not fireEvent('searcher.correct_words', release['name'], media, single = True):
return False
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations)
match = fireEvent('matcher.match', release, media, quality, single = True)
if match:
return match.weight
return False
def getLibraries(self, library):
if 'related_libraries' not in library:
log.warning("'related_libraries' missing from media library, unable to continue searching")
return None, None, None
libraries = library['related_libraries']
# Show always collapses as there can never be any multiples
show = libraries.get('show', [])
show = show[0] if len(show) else None
# Season collapses if the subject is a season or episode
season = libraries.get('season', [])
if library['type'] in ['season', 'episode']:
season = season[0] if len(season) else None
# Episode collapses if the subject is a episode
episode = libraries.get('episode', [])
if library['type'] == 'episode':
episode = episode[0] if len(episode) else None
return show, season, episode

View File

@@ -1,25 +0,0 @@
from migrate.changeset.schema import create_column
from sqlalchemy.schema import MetaData, Column, Table, Index
from sqlalchemy.types import Integer
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
# Change release, add last_edit and index
last_edit_column = Column('last_edit', Integer)
release = Table('release', meta, last_edit_column)
create_column(last_edit_column, release)
Index('ix_release_last_edit', release.c.last_edit).create()
# Change movie last_edit
last_edit_column = Column('last_edit', Integer)
movie = Table('movie', meta, last_edit_column)
Index('ix_movie_last_edit', movie.c.last_edit).create()
def downgrade(migrate_engine):
pass

View File

@@ -1,17 +0,0 @@
from migrate.changeset.schema import create_column
from sqlalchemy.schema import MetaData, Column, Table, Index
from sqlalchemy.types import Integer
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
category_column = Column('category_id', Integer)
movie = Table('movie', meta, category_column)
create_column(category_column, movie)
Index('ix_movie_category_id', movie.c.category_id).create()
def downgrade(migrate_engine):
pass

View File

@@ -1,4 +1,4 @@
config = [{
config = {
'name': 'notification_providers',
'groups': [
{
@@ -10,4 +10,4 @@ config = [{
'options': [],
},
],
}]
}

View File

@@ -1,15 +1,14 @@
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.request import jsonified
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.base import Provider
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
log = CPLog(__name__)
class Notification(Provider):
type = 'notification'
class Notification(Plugin):
default_title = Env.get('appname')
test_message = 'ZOMG Lazors Pewpewpew!'
@@ -17,12 +16,11 @@ class Notification(Provider):
listen_to = [
'renamer.after', 'movie.snatched',
'updater.available', 'updater.updated',
'core.message.important',
]
dont_listen_to = []
def __init__(self):
addEvent('notify.%s' % self.getName().lower(), self._notify)
addEvent('notify.%s' % self.getName().lower(), self.notify)
addApiView(self.testNotifyName(), self.test)
@@ -32,41 +30,29 @@ class Notification(Provider):
addEvent(listener, self.createNotifyHandler(listener))
def createNotifyHandler(self, listener):
def notify(message = None, group = None, data = None):
if not group: group = {}
def notify(message = None, group = {}, data = None):
if not self.conf('on_snatch', default = True) and listener == 'movie.snatched':
return
return self._notify(message = message, data = data if data else group, listener = listener)
return self.notify(message = message, data = data if data else group, listener = listener)
return notify
def getNotificationImage(self, size = 'small'):
return 'https://raw.github.com/RuudBurger/CouchPotatoServer/master/couchpotato/static/images/notify.couch.%s.png' % size
def notify(self, message = '', data = {}, listener = None):
pass
def _notify(self, *args, **kwargs):
if self.isEnabled():
return self.notify(*args, **kwargs)
return False
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
def test(self, **kwargs):
def test(self):
test_type = self.testNotifyName()
log.info('Sending test to %s', test_type)
success = self._notify(
success = self.notify(
message = self.test_message,
data = {},
listener = 'test'
)
return {
'success': success
}
return jsonified({'success': success})
def testNotifyName(self):
return 'notify.%s.test' % self.getName().lower()

View File

@@ -10,20 +10,20 @@ class Boxcar(Notification):
url = 'https://boxcar.io/devices/providers/7MNNXY3UIzVBwvzkKwkC/notifications'
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
try:
message = message.strip()
data = {
params = {
'email': self.conf('email'),
'notification[from_screen_name]': self.default_title,
'notification[message]': toUnicode(message),
'notification[from_remote_service_id]': int(time.time()),
}
self.urlopen(self.url, data = data)
self.urlopen(self.url, params = params)
except:
log.error('Check your email and added services on boxcar.io')
return False

View File

@@ -1,17 +1,15 @@
from couchpotato import get_session
from couchpotato.api import addApiView, addNonBlockApiView
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.request import jsonified, getParam
from couchpotato.core.helpers.variable import tryInt, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from couchpotato.core.settings.model import Notification as Notif
from couchpotato.environment import Env
from operator import itemgetter
from sqlalchemy.sql.expression import or_
import threading
import time
import traceback
import uuid
log = CPLog(__name__)
@@ -19,12 +17,13 @@ log = CPLog(__name__)
class CoreNotifier(Notification):
m_lock = None
m_lock = threading.Lock()
messages = []
listeners = []
listen_to = [
'renamer.after', 'movie.snatched',
'updater.available', 'updater.updated',
'core.message', 'core.message.important',
]
def __init__(self):
@@ -55,15 +54,7 @@ class CoreNotifier(Notification):
addNonBlockApiView('notification.listener', (self.addListener, self.removeListener))
addApiView('notification.listener', self.listener)
fireEvent('schedule.interval', 'core.check_messages', self.checkMessages, hours = 12, single = True)
fireEvent('schedule.interval', 'core.clean_messages', self.cleanMessages, seconds = 15, single = True)
addEvent('app.load', self.clean)
addEvent('app.load', self.checkMessages)
self.messages = []
self.listeners = []
self.m_lock = threading.Lock()
def clean(self):
@@ -72,9 +63,11 @@ class CoreNotifier(Notification):
db.commit()
def markAsRead(self, ids = None, **kwargs):
def markAsRead(self):
ids = splitString(ids) if ids else None
ids = None
if getParam('ids'):
ids = splitString(getParam('ids'))
db = get_session()
@@ -87,13 +80,14 @@ class CoreNotifier(Notification):
db.commit()
return {
return jsonified({
'success': True
}
})
def listView(self, limit_offset = None, **kwargs):
def listView(self):
db = get_session()
limit_offset = getParam('limit_offset', None)
q = db.query(Notif)
@@ -112,33 +106,13 @@ class CoreNotifier(Notification):
ndict['type'] = 'notification'
notifications.append(ndict)
return {
return jsonified({
'success': True,
'empty': len(notifications) == 0,
'notifications': notifications
}
})
def checkMessages(self):
prop_name = 'messages.last_check'
last_check = tryInt(Env.prop(prop_name, default = 0))
messages = fireEvent('cp.messages', last_check = last_check, single = True) or []
for message in messages:
if message.get('time') > last_check:
message['sticky'] = True # Always sticky core messages
message_type = 'core.message.important' if message.get('important') else 'core.message'
fireEvent(message_type, message = message.get('message'), data = message)
if last_check < message.get('time'):
last_check = message.get('time')
Env.prop(prop_name, value = last_check)
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
def notify(self, message = '', data = {}, listener = None):
db = get_session()
@@ -159,10 +133,7 @@ class CoreNotifier(Notification):
return True
def frontend(self, type = 'notification', data = None, message = None):
if not data: data = {}
log.debug('Notifying frontend')
def frontend(self, type = 'notification', data = {}, message = None):
self.m_lock.acquire()
notification = {
@@ -182,12 +153,10 @@ class CoreNotifier(Notification):
'result': [notification],
})
except:
log.debug('Failed sending to listener: %s', traceback.format_exc())
break
self.listeners = []
self.m_lock.release()
log.debug('Done notifying frontend')
self.cleanMessages()
def addListener(self, callback, last_id = None):
@@ -199,75 +168,59 @@ class CoreNotifier(Notification):
'result': messages,
})
self.m_lock.acquire()
self.listeners.append((callback, last_id))
self.m_lock.release()
def removeListener(self, callback):
self.m_lock.acquire()
new_listeners = []
for list_tuple in self.listeners:
try:
listener, last_id = list_tuple
if listener != callback:
new_listeners.append(list_tuple)
if listener == callback:
self.listeners.remove(list_tuple)
except:
log.debug('Failed removing listener: %s', traceback.format_exc())
self.listeners = new_listeners
self.m_lock.release()
pass
def cleanMessages(self):
if len(self.messages) == 0:
return
log.debug('Cleaning messages')
self.m_lock.acquire()
time_ago = (time.time() - 15)
self.messages[:] = [m for m in self.messages if (m['time'] > time_ago)]
for message in self.messages:
if message['time'] < (time.time() - 15):
self.messages.remove(message)
self.m_lock.release()
log.debug('Done cleaning messages')
def getMessages(self, last_id):
log.debug('Getting messages with id: %s', last_id)
self.m_lock.acquire()
recent = []
try:
index = map(itemgetter('message_id'), self.messages).index(last_id)
recent = self.messages[index + 1:]
except:
pass
index = 0
for i in xrange(len(self.messages)):
index = len(self.messages) - i - 1
if self.messages[index]["message_id"] == last_id: break
recent = self.messages[index:]
self.m_lock.release()
log.debug('Returning for %s %s messages', (last_id, len(recent)))
return recent
return recent or []
def listener(self, init = False, **kwargs):
def listener(self):
messages = []
# Get unread
if init:
if getParam('init'):
db = get_session()
notifications = db.query(Notif) \
.filter(or_(Notif.read == False, Notif.added > (time.time() - 259200))) \
.all()
for n in notifications:
ndict = n.to_dict()
ndict['type'] = 'notification'
messages.append(ndict)
return {
return jsonified({
'success': True,
'result': messages,
}
})

View File

@@ -10,8 +10,8 @@ var NotificationBase = new Class({
// Listener
App.addEvent('unload', self.stopPoll.bind(self));
App.addEvent('reload', self.startInterval.bind(self, [true]));
App.on('notification', self.notify.bind(self));
App.on('message', self.showMessage.bind(self));
App.addEvent('notification', self.notify.bind(self));
App.addEvent('message', self.showMessage.bind(self));
// Add test buttons to settings page
App.addEvent('load', self.addTestButtons.bind(self));
@@ -21,18 +21,21 @@ var NotificationBase = new Class({
App.addEvent('load', function(){
App.block.notification = new Block.Menu(self, {
'button_class': 'icon2.eye-open',
'class': 'notification_menu',
'onOpen': self.markAsRead.bind(self)
})
$(App.block.notification).inject(App.getBlock('search'), 'after');
self.badge = new Element('div.badge').inject(App.block.notification, 'top').hide();
/* App.getBlock('notification').addLink(new Element('a.more', {
'href': App.createUrl('notifications'),
'text': 'Show older notifications'
})); */
});
window.addEvent('load', function(){
self.startInterval.delay($(window).getSize().x <= 480 ? 2000 : 100, self);
})
self.startInterval.delay(Browser.safari ? 100 : 0, self)
});
},
@@ -44,19 +47,14 @@ var NotificationBase = new Class({
result.el = App.getBlock('notification').addLink(
new Element('span.'+(result.read ? 'read' : '' )).adopt(
new Element('span.message', {'html': result.message}),
new Element('span.message', {'text': result.message}),
new Element('span.added', {'text': added.timeDiffInWords(), 'title': added})
)
, 'top');
self.notifications.include(result);
if((result.data.important !== undefined || result.data.sticky !== undefined) && !result.read){
var sticky = true
App.trigger('message', [result.message, sticky, result])
}
else if(!result.read){
if(!result.read)
self.setBadge(self.notifications.filter(function(n){ return !n.read}).length)
}
},
@@ -66,26 +64,20 @@ var NotificationBase = new Class({
self.badge[value ? 'show' : 'hide']()
},
markAsRead: function(force_ids){
var self = this,
ids = force_ids;
markAsRead: function(){
var self = this;
if(!force_ids) {
var rn = self.notifications.filter(function(n){
return !n.read && n.data.important === undefined
})
var rn = self.notifications.filter(function(n){
return !n.read
})
var ids = []
rn.each(function(n){
ids.include(n.id)
})
}
var ids = []
rn.each(function(n){
ids.include(n.id)
})
if(ids.length > 0)
Api.request('notification.markread', {
'data': {
'ids': ids.join(',')
},
'onSuccess': function(){
self.setBadge('')
}
@@ -101,20 +93,11 @@ var NotificationBase = new Class({
return;
}
self.request = Api.request('notification.listener', {
Api.request('notification.listener', {
'data': {'init':true},
'onSuccess': self.processData.bind(self)
}).send()
setInterval(function(){
if(self.request && self.request.isRunning()){
self.request.cancel();
self.startPoll()
}
}, 120000);
},
startPoll: function(){
@@ -147,7 +130,7 @@ var NotificationBase = new Class({
// Process data
if(json){
Array.each(json.result, function(result){
App.trigger(result.type, result);
App.fireEvent(result.type, result);
if(result.message && result.read === undefined)
self.showMessage(result.message);
})
@@ -157,44 +140,29 @@ var NotificationBase = new Class({
}
// Restart poll
self.startPoll.delay(1500, self);
self.startPoll()
},
showMessage: function(message, sticky, data){
showMessage: function(message){
var self = this;
if(!self.message_container)
self.message_container = new Element('div.messages').inject(document.body);
var new_message = new Element('div', {
'class': 'message' + (sticky ? ' sticky' : ''),
'html': message
}).inject(self.message_container, 'top');
var new_message = new Element('div.message', {
'text': message
}).inject(self.message_container);
setTimeout(function(){
new_message.addClass('show')
}, 10);
var hide_message = function(){
setTimeout(function(){
new_message.addClass('hide')
setTimeout(function(){
new_message.destroy();
}, 1000);
}
if(sticky)
new_message.grab(
new Element('a.close.icon2', {
'events': {
'click': function(){
self.markAsRead([data.id]);
hide_message();
}
}
})
);
else
setTimeout(hide_message, 4000);
}, 4000);
},
@@ -210,14 +178,11 @@ var NotificationBase = new Class({
},
addTestButton: function(fieldset, plugin_name){
var self = this,
button_name = self.testButtonName(fieldset);
if(button_name.contains('Notifications')) return;
var self = this;
new Element('.ctrlHolder.test_button').adopt(
new Element('a.button', {
'text': button_name,
'text': self.testButtonName(fieldset),
'events': {
'click': function(){
var button = fieldset.getElement('.test_button .button');
@@ -226,7 +191,7 @@ var NotificationBase = new Class({
Api.request('notify.'+plugin_name+'.test', {
'onComplete': function(json){
button.set('text', button_name);
button.set('text', self.testButtonName(fieldset));
if(json.success){
var message = new Element('span.success', {

View File

@@ -28,23 +28,12 @@ config = [{
'name': 'smtp_server',
'label': 'SMTP server',
},
{ 'name': 'smtp_port',
'label': 'SMTP server port',
'default': '25',
'type': 'int',
},
{
'name': 'ssl',
'label': 'Enable SSL',
'default': 0,
'type': 'bool',
},
{
'name': 'starttls',
'label': 'Enable StartTLS',
'default': 0,
'type': 'bool',
},
{
'name': 'smtp_user',
'label': 'SMTP user',

View File

@@ -1,10 +1,7 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from couchpotato.environment import Env
from email.mime.text import MIMEText
from email.utils import formatdate, make_msgid
import smtplib
import traceback
@@ -13,8 +10,8 @@ log = CPLog(__name__)
class Email(Notification):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
# Extract all the settings from settings
from_address = self.conf('from')
@@ -23,30 +20,18 @@ class Email(Notification):
smtp_server = self.conf('smtp_server')
smtp_user = self.conf('smtp_user')
smtp_pass = self.conf('smtp_pass')
smtp_port = self.conf('smtp_port')
starttls = self.conf('starttls')
# Make the basic message
message = MIMEText(toUnicode(message), _charset = Env.get('encoding'))
message = MIMEText(toUnicode(message))
message['Subject'] = self.default_title
message['From'] = from_address
message['To'] = to_address
message['Date'] = formatdate(localtime = 1)
message['Message-ID'] = make_msgid()
try:
# Open the SMTP connection, via SSL if requested
log.debug("Connecting to host %s on port %s" % (smtp_server, smtp_port))
log.debug("SMTP over SSL %s", ("enabled" if ssl == 1 else "disabled"))
mailserver = smtplib.SMTP_SSL(smtp_server) if ssl == 1 else smtplib.SMTP(smtp_server)
if (starttls):
log.debug("Using StartTLS to initiate the connection with the SMTP server")
mailserver.starttls()
# Say hello to the server
mailserver.ehlo()
# Check too see if an login attempt should be attempted
if len(smtp_user) > 0:
log.debug("Logging on to SMTP server using username \'%s\'%s", (smtp_user, " and a password" if len(smtp_pass) > 0 else ""))
@@ -54,7 +39,7 @@ class Email(Notification):
# Send the e-mail
log.debug("Sending the email")
mailserver.sendmail(from_address, splitString(to_address), message.as_string())
mailserver.sendmail(from_address, to_address, message.as_string())
# Close the SMTP connection
mailserver.quit()
@@ -64,5 +49,6 @@ class Email(Notification):
return True
except:
log.error('E-mail failed: %s', traceback.format_exc())
return False
return False

View File

@@ -43,8 +43,8 @@ class Growl(Notification):
else:
log.error('Failed register of growl: %s', traceback.format_exc())
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
self.register()

View File

@@ -1,6 +1,7 @@
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.helpers.request import getParams, jsonified
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
import re
@@ -21,17 +22,21 @@ class NMJ(Notification):
addApiView(self.testNotifyName(), self.test)
addApiView('notify.nmj.auto_config', self.autoConfig)
def autoConfig(self, host = 'localhost', **kwargs):
def autoConfig(self):
params = getParams()
host = params.get('host', 'localhost')
database = ''
mount = ''
try:
terminal = telnetlib.Telnet(host)
except Exception:
log.error('Warning: unable to get a telnet session to %s', host)
log.error('Warning: unable to get a telnet session to %s', (host))
return self.failed()
log.debug('Connected to %s via telnet', host)
log.debug('Connected to %s via telnet', (host))
terminal.read_until('sh-3.00# ')
terminal.write('cat /tmp/source\n')
terminal.write('cat /tmp/netshare\n')
@@ -45,7 +50,7 @@ class NMJ(Notification):
device = match.group(2)
log.info('Found NMJ database %s on device %s', (database, device))
else:
log.error('Could not get current NMJ database on %s, NMJ is probably not running!', host)
log.error('Could not get current NMJ database on %s, NMJ is probably not running!', (host))
return self.failed()
if device.startswith('NETWORK_SHARE/'):
@@ -53,29 +58,28 @@ class NMJ(Notification):
if match:
mount = match.group().replace('127.0.0.1', host)
log.info('Found mounting url on the Popcorn Hour in configuration: %s', mount)
log.info('Found mounting url on the Popcorn Hour in configuration: %s', (mount))
else:
log.error('Detected a network share on the Popcorn Hour, but could not get the mounting url')
return self.failed()
return {
return jsonified({
'success': True,
'database': database,
'mount': mount,
}
})
def addToLibrary(self, message = None, group = None):
def addToLibrary(self, message = None, group = {}):
if self.isDisabled(): return
if not group: group = {}
host = self.conf('host')
mount = self.conf('mount')
database = self.conf('database')
if mount:
log.debug('Try to mount network drive via url: %s', mount)
log.debug('Try to mount network drive via url: %s', (mount))
try:
self.urlopen(mount)
data = self.urlopen(mount)
except:
return False
@@ -98,24 +102,20 @@ class NMJ(Notification):
et = etree.fromstring(response)
result = et.findtext('returnValue')
except SyntaxError, e:
log.error('Unable to parse XML returned from the Popcorn Hour: %s', e)
log.error('Unable to parse XML returned from the Popcorn Hour: %s', (e))
return False
if int(result) > 0:
log.error('Popcorn Hour returned an errorcode: %s', result)
log.error('Popcorn Hour returned an errorcode: %s', (result))
return False
else:
log.info('NMJ started background scan')
return True
def failed(self):
return {
'success': False
}
return jsonified({'success': False})
def test(self, **kwargs):
return {
'success': self.addToLibrary()
}
def test(self):
return jsonified({'success': self.addToLibrary()})

View File

@@ -1,15 +1,16 @@
from .main import Pushbullet
from .main import Notifo
def start():
return Pushbullet()
return Notifo()
config = [{
'name': 'pushbullet',
'name': 'notifo',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'pushbullet',
'name': 'notifo',
'description': 'Keep in mind that Notifo service will end soon.',
'options': [
{
'name': 'enabled',
@@ -17,14 +18,10 @@ config = [{
'type': 'enabler',
},
{
'name': 'api_key',
'label': 'User API Key'
'name': 'username',
},
{
'name': 'devices',
'default': '',
'advanced': True,
'description': 'IDs of devices to send notifications to, empty = all devices'
'name': 'api_key',
},
{
'name': 'on_snatch',

View File

@@ -0,0 +1,39 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from flask.helpers import json
import base64
import traceback
log = CPLog(__name__)
class Notifo(Notification):
url = 'https://api.notifo.com/v1/send_notification'
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
try:
params = {
'label': self.default_title,
'msg': toUnicode(message),
}
headers = {
'Authorization': "Basic %s" % base64.encodestring('%s:%s' % (self.conf('username'), self.conf('api_key')))[:-1]
}
handle = self.urlopen(self.url, params = params, headers = headers)
result = json.loads(handle)
if result['status'] != 'success' or result['response_message'] != 'OK':
raise Exception
except:
log.error('Notification failed: %s', traceback.format_exc())
return False
log.info('Notifo notification successful.')
return True

View File

@@ -8,17 +8,20 @@ log = CPLog(__name__)
class NotifyMyAndroid(Notification):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
nma = pynma.PyNMA()
keys = splitString(self.conf('api_key'))
nma.addkey(keys)
nma.developerkey(self.conf('dev_key'))
# hacky fix for the event type
# as it seems to be part of the message now
self.event = message.split(' ')[0]
response = nma.push(
application = self.default_title,
event = message.split(' ')[0],
event = self.event,
description = message,
priority = self.conf('priority'),
batch_mode = len(keys) > 1

View File

@@ -9,6 +9,7 @@ log = CPLog(__name__)
class NotifyMyWP(Notification):
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
keys = splitString(self.conf('api_key'))
p = PyNMWP(keys, self.conf('dev_key'))

16
couchpotato/core/notifications/plex/__init__.py Executable file → Normal file
View File

@@ -17,22 +17,10 @@ config = [{
'type': 'enabler',
},
{
'name': 'media_server',
'label': 'Media Server',
'name': 'host',
'default': 'localhost',
'description': 'Hostname/IP, default localhost'
},
{
'name': 'clients',
'default': '',
'description': 'Comma separated list of client names\'s (computer names). Top right when you start Plex'
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'description': 'Default should be on localhost',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}

View File

@@ -1,85 +0,0 @@
import json
from couchpotato import CPLog
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import tryUrlencode
import requests
log = CPLog(__name__)
class PlexClientProtocol(object):
def __init__(self, plex):
self.plex = plex
addEvent('notify.plex.notifyClient', self.notify)
def notify(self, client, message):
raise NotImplementedError()
class PlexClientHTTP(PlexClientProtocol):
def request(self, command, client):
url = 'http://%s:%s/xbmcCmds/xbmcHttp/?%s' % (
client['address'],
client['port'],
tryUrlencode(command)
)
headers = {}
try:
self.plex.urlopen(url, headers = headers, timeout = 3, show_error = False)
except Exception, err:
log.error("Couldn't sent command to Plex: %s", err)
return False
return True
def notify(self, client, message):
if client.get('protocol') != 'xbmchttp':
return None
data = {
'command': 'ExecBuiltIn',
'parameter': 'Notification(CouchPotato, %s)' % message
}
return self.request(data, client)
class PlexClientJSON(PlexClientProtocol):
def request(self, method, params, client):
log.debug('sendJSON("%s", %s, %s)', (method, params, client))
url = 'http://%s:%s/jsonrpc' % (
client['address'],
client['port']
)
headers = {
'Content-Type': 'application/json'
}
request = {
'id': 1,
'jsonrpc': '2.0',
'method': method,
'params': params
}
try:
requests.post(url, headers = headers, timeout = 3, data = json.dumps(request))
except Exception, err:
log.error("Couldn't sent command to Plex: %s", err)
return False
return True
def notify(self, client, message):
if client.get('protocol') not in ['xbmcjson', 'plex']:
return None
params = {
'title': 'CouchPotato',
'message': message
}
return self.request('GUI.ShowNotification', params, client)

99
couchpotato/core/notifications/plex/main.py Executable file → Normal file
View File

@@ -1,77 +1,90 @@
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.helpers.request import jsonified
from couchpotato.core.helpers.variable import cleanHost
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from .client import PlexClientHTTP, PlexClientJSON
from .server import PlexServer
from urllib2 import URLError
from xml.dom import minidom
import traceback
log = CPLog(__name__)
class Plex(Notification):
http_time_between_calls = 0
def __init__(self):
super(Plex, self).__init__()
self.server = PlexServer(self)
self.client_protocols = {
'http': PlexClientHTTP(self),
'json': PlexClientJSON(self)
}
addEvent('renamer.after', self.addToLibrary)
def addToLibrary(self, message = None, group = {}):
if self.isDisabled(): return
return self.server.refresh()
log.info('Sending notification to Plex')
hosts = [cleanHost(x.strip() + ':32400') for x in self.conf('host').split(",")]
def getClientNames(self):
return [
x.strip().lower()
for x in self.conf('clients').split(',')
]
for host in hosts:
def notifyClients(self, message, client_names):
success = True
source_type = ['movie']
base_url = '%slibrary/sections' % host
refresh_url = '%s/%%s/refresh' % base_url
for client_name in client_names:
try:
sections_xml = self.urlopen(base_url)
xml_sections = minidom.parseString(sections_xml)
sections = xml_sections.getElementsByTagName('Directory')
client_success = False
client = self.server.clients.get(client_name)
for s in sections:
if s.getAttribute('type') in source_type:
url = refresh_url % s.getAttribute('key')
x = self.urlopen(url)
if client and client['found']:
client_success = fireEvent('notify.plex.notifyClient', client, message, single = True)
except:
log.error('Plex library update failed for %s, Media Server not running: %s', (host, traceback.format_exc(1)))
return False
if not client_success:
if self.server.staleClients() or not client:
log.info('Failed to send notification to client "%s". '
'Client list is stale, updating the client list and retrying.', client_name)
self.server.updateClients(self.getClientNames())
else:
log.warning('Failed to send notification to client %s, skipping this time', client_name)
success = False
return success
return True
def notify(self, message = '', data = {}, listener = None):
return self.notifyClients(message, self.getClientNames())
if self.isDisabled(): return
def test(self, **kwargs):
hosts = [x.strip() + ':3000' for x in self.conf('host').split(",")]
successful = 0
for host in hosts:
if self.send({'command': 'ExecBuiltIn', 'parameter': 'Notification(CouchPotato, %s)' % message}, host):
successful += 1
return successful == len(hosts)
def send(self, command, host):
url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, tryUrlencode(command))
headers = {}
try:
self.urlopen(url, headers = headers, show_error = False)
except URLError:
log.error("Couldn't sent command to Plex, probably just running Media Server")
return False
except:
log.error("Couldn't sent command to Plex: %s", traceback.format_exc())
return False
log.info('Plex notification to %s successful.', host)
return True
def test(self):
test_type = self.testNotifyName()
log.info('Sending test to %s', test_type)
notify_success = self.notify(
success = self.notify(
message = self.test_message,
data = {},
listener = 'test'
)
success2 = self.addToLibrary()
refresh_success = self.addToLibrary()
return {'success': notify_success or refresh_success}
return jsonified({'success': success or success2})

Some files were not shown because too many files have changed in this diff Show More