Compare commits
132 Commits
tv
...
build/2.5.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0622e6e5ab | ||
|
|
f16931906f | ||
|
|
68dcba8853 | ||
|
|
ae8f66df1a | ||
|
|
5237ead5cb | ||
|
|
c4aaa10308 | ||
|
|
d10536a829 | ||
|
|
1e7fa82e11 | ||
|
|
fbccba77a7 | ||
|
|
d3efda74b2 | ||
|
|
66b849cb29 | ||
|
|
b19f98ef5b | ||
|
|
c389790cf2 | ||
|
|
d7445dfa80 | ||
|
|
36782768a4 | ||
|
|
2c9d487614 | ||
|
|
b9a724c8bb | ||
|
|
68d826ca1c | ||
|
|
d6921882e1 | ||
|
|
2cfff73486 | ||
|
|
0c7dda8d44 | ||
|
|
dbaa377770 | ||
|
|
47d2b81d1c | ||
|
|
f79fcda27f | ||
|
|
cdbcad2238 | ||
|
|
5d913e87c3 | ||
|
|
16f02bda27 | ||
|
|
8d108b92bf | ||
|
|
46783028b1 | ||
|
|
d08c7c57a8 | ||
|
|
eeeb845ef3 | ||
|
|
651a063f94 | ||
|
|
f20aaa2d9d | ||
|
|
ba925ec191 | ||
|
|
3b7376fd18 | ||
|
|
c31b10c798 | ||
|
|
acda664686 | ||
|
|
e2852407ea | ||
|
|
88e738c6cd | ||
|
|
eaae8bdb0b | ||
|
|
821f68909d | ||
|
|
2b8dfed475 | ||
|
|
607b5ea766 | ||
|
|
88579cd71a | ||
|
|
6c57316ce6 | ||
|
|
6702683da3 | ||
|
|
1ed58586a1 | ||
|
|
f08ccd4fd8 | ||
|
|
312562a9f5 | ||
|
|
9e260a89af | ||
|
|
d233e4d22e | ||
|
|
23893dbcb9 | ||
|
|
506871b506 | ||
|
|
6115917660 | ||
|
|
21df8819d3 | ||
|
|
fb3f3e11f6 | ||
|
|
178c8942c3 | ||
|
|
51e747049d | ||
|
|
0582f7d694 | ||
|
|
fa7cac7538 | ||
|
|
9a314cfbc4 | ||
|
|
5941d0bf77 | ||
|
|
d326c1c25c | ||
|
|
96472a9a8f | ||
|
|
27252561e2 | ||
|
|
c9e732651f | ||
|
|
7849e7170d | ||
|
|
087894eb4e | ||
|
|
25f1b8c7a7 | ||
|
|
e71da1f14d | ||
|
|
938b14ba18 | ||
|
|
d6522d8f38 | ||
|
|
78eab890e7 | ||
|
|
1a56191f83 | ||
|
|
41c0f34d95 | ||
|
|
37bf205d7a | ||
|
|
aa1fa3eb9a | ||
|
|
0e2f8a612c | ||
|
|
465e7b2abc | ||
|
|
578fb45785 | ||
|
|
96995bbbe5 | ||
|
|
4cfdafebbc | ||
|
|
b97acb8ef5 | ||
|
|
d68d2dfdb6 | ||
|
|
39b269a454 | ||
|
|
ac081d3e10 | ||
|
|
5d4efb60cf | ||
|
|
cc408b980c | ||
|
|
59590b3ac9 | ||
|
|
ff759dacf3 | ||
|
|
a328e44130 | ||
|
|
7924cac5f9 | ||
|
|
1cef3b0c93 | ||
|
|
3cd59edc8b | ||
|
|
0d624af01d | ||
|
|
a09132570c | ||
|
|
ee3fc38432 | ||
|
|
dbf0192c8e | ||
|
|
6962cfc3f5 | ||
|
|
e096ec3b5b | ||
|
|
b30a74ae0c | ||
|
|
978eeb16c9 | ||
|
|
e5c9d91657 | ||
|
|
fa81c3a07a | ||
|
|
9cdd520d41 | ||
|
|
55d7898771 | ||
|
|
b8256bef97 | ||
|
|
5be9dc0b4a | ||
|
|
7d0be0cefb | ||
|
|
f7ce1edb13 | ||
|
|
5ad9280b60 | ||
|
|
2b353f1b20 | ||
|
|
75ab90b87b | ||
|
|
0219296120 | ||
|
|
20032b3a31 | ||
|
|
ea9e9a8c90 | ||
|
|
f7b0ee145b | ||
|
|
cc866738ee | ||
|
|
eadccf6e33 | ||
|
|
b70b66e567 | ||
|
|
5b6792dc20 | ||
|
|
f498e7343a | ||
|
|
6962f441e6 | ||
|
|
1def62b1b1 | ||
|
|
a4a4a6a185 | ||
|
|
d4c9469c1a | ||
|
|
3e2d4c5d7b | ||
|
|
d03f711d69 | ||
|
|
44dd8d9b96 | ||
|
|
549a3be0d8 | ||
|
|
1bb2edf8ec | ||
|
|
84c6f36315 |
@@ -10,6 +10,7 @@ import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import time
|
||||
|
||||
# Root path
|
||||
base_path = dirname(os.path.abspath(__file__))
|
||||
|
||||
232
Desktop.py
Normal file
232
Desktop.py
Normal file
@@ -0,0 +1,232 @@
|
||||
from esky.util import appdir_from_executable #@UnresolvedImport
|
||||
from threading import Thread
|
||||
from version import VERSION
|
||||
from wx.lib.softwareupdate import SoftwareUpdate
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import webbrowser
|
||||
import wx
|
||||
|
||||
# Include proper dirs
|
||||
if hasattr(sys, 'frozen'):
|
||||
import libs
|
||||
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
|
||||
else:
|
||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
lib_dir = os.path.join(base_path, 'libs')
|
||||
|
||||
sys.path.insert(0, base_path)
|
||||
sys.path.insert(0, lib_dir)
|
||||
|
||||
from couchpotato.environment import Env
|
||||
|
||||
class TaskBarIcon(wx.TaskBarIcon):
|
||||
|
||||
TBMENU_OPEN = wx.NewId()
|
||||
TBMENU_SETTINGS = wx.NewId()
|
||||
TBMENU_EXIT = wx.ID_EXIT
|
||||
|
||||
closed = False
|
||||
menu = False
|
||||
enabled = False
|
||||
|
||||
def __init__(self, frame):
|
||||
wx.TaskBarIcon.__init__(self)
|
||||
self.frame = frame
|
||||
|
||||
icon = wx.Icon('icon.png', wx.BITMAP_TYPE_PNG)
|
||||
self.SetIcon(icon)
|
||||
|
||||
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
|
||||
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
|
||||
|
||||
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
|
||||
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
|
||||
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
|
||||
|
||||
def OnTaskBarClick(self, evt):
|
||||
menu = self.CreatePopupMenu()
|
||||
self.PopupMenu(menu)
|
||||
menu.Destroy()
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
if self.menu:
|
||||
self.open_menu.Enable(True)
|
||||
self.setting_menu.Enable(True)
|
||||
|
||||
self.open_menu.SetText('Open')
|
||||
|
||||
def CreatePopupMenu(self):
|
||||
|
||||
if not self.menu:
|
||||
self.menu = wx.Menu()
|
||||
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
|
||||
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
|
||||
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
|
||||
|
||||
if not self.enabled:
|
||||
self.open_menu.Enable(False)
|
||||
self.setting_menu.Enable(False)
|
||||
|
||||
self.open_menu.SetText('Loading...')
|
||||
|
||||
return self.menu
|
||||
|
||||
def onOpen(self, event):
|
||||
url = self.frame.parent.getSetting('base_url')
|
||||
webbrowser.open(url)
|
||||
|
||||
def onSettings(self, event):
|
||||
url = self.frame.parent.getSetting('base_url') + 'settings/about/'
|
||||
webbrowser.open(url)
|
||||
|
||||
def onTaskBarClose(self, evt):
|
||||
if self.closed:
|
||||
return
|
||||
|
||||
self.closed = True
|
||||
|
||||
self.RemoveIcon()
|
||||
wx.CallAfter(self.frame.Close)
|
||||
|
||||
|
||||
def makeIcon(self, img):
|
||||
if "wxMSW" in wx.PlatformInfo:
|
||||
img = img.Scale(16, 16)
|
||||
elif "wxGTK" in wx.PlatformInfo:
|
||||
img = img.Scale(22, 22)
|
||||
|
||||
icon = wx.IconFromBitmap(img.CopyFromBitmap())
|
||||
return icon
|
||||
|
||||
|
||||
class MainFrame(wx.Frame):
|
||||
|
||||
def __init__(self, parent):
|
||||
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
|
||||
|
||||
self.parent = parent
|
||||
self.tbicon = TaskBarIcon(self)
|
||||
|
||||
|
||||
class WorkerThread(Thread):
|
||||
|
||||
def __init__(self, desktop):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self._desktop = desktop
|
||||
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
args = ['--quiet']
|
||||
self.options = getOptions(args)
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
settings.setFile(self.options.config_file)
|
||||
|
||||
# Create data dir if needed
|
||||
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
|
||||
if self.data_dir == '':
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
self.data_dir = getDataDir()
|
||||
|
||||
if not os.path.isdir(self.data_dir):
|
||||
os.makedirs(self.data_dir)
|
||||
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
|
||||
try:
|
||||
from couchpotato.runner import runCouchPotato
|
||||
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._desktop.frame.Close()
|
||||
self._desktop.ExitMainLoop()
|
||||
|
||||
|
||||
class CouchPotatoApp(wx.App, SoftwareUpdate):
|
||||
|
||||
settings = {}
|
||||
events = {}
|
||||
restart = False
|
||||
closing = False
|
||||
|
||||
def OnInit(self):
|
||||
|
||||
# Updater
|
||||
base_url = 'https://api.couchpota.to/updates/%s'
|
||||
self.InitUpdates(base_url % VERSION + '/', 'https://couchpota.to/updates/%s' % 'changelog.html',
|
||||
icon = wx.Icon('icon.png'))
|
||||
|
||||
self.frame = MainFrame(self)
|
||||
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
|
||||
|
||||
# CouchPotato thread
|
||||
self.worker = WorkerThread(self)
|
||||
|
||||
return True
|
||||
|
||||
def onAppLoad(self):
|
||||
self.frame.tbicon.enable()
|
||||
|
||||
def setSettings(self, settings = {}):
|
||||
self.settings = settings
|
||||
|
||||
def getSetting(self, name):
|
||||
return self.settings.get(name)
|
||||
|
||||
def addEvents(self, events = {}):
|
||||
for name in events.iterkeys():
|
||||
self.events[name] = events[name]
|
||||
|
||||
def onClose(self, event):
|
||||
|
||||
if not self.closing:
|
||||
self.closing = True
|
||||
self.frame.tbicon.onTaskBarClose(event)
|
||||
|
||||
onClose = self.events.get('onClose')
|
||||
onClose(event)
|
||||
|
||||
def afterShutdown(self, restart = False):
|
||||
self.frame.Destroy()
|
||||
self.restart = restart
|
||||
self.ExitMainLoop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
app = CouchPotatoApp(redirect = False)
|
||||
app.MainLoop()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
if app.restart:
|
||||
|
||||
def appexe_from_executable(exepath):
|
||||
appdir = appdir_from_executable(exepath)
|
||||
exename = os.path.basename(exepath)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
|
||||
return os.path.join(appdir, "Contents", "MacOS", exename)
|
||||
|
||||
return os.path.join(appdir, exename)
|
||||
|
||||
exe = appexe_from_executable(sys.executable)
|
||||
os.chdir(os.path.dirname(exe))
|
||||
|
||||
os.execv(exe, [exe] + sys.argv[1:])
|
||||
20
README.md
20
README.md
@@ -29,25 +29,19 @@ OS X:
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py`
|
||||
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||
|
||||
Linux:
|
||||
Linux (Ubuntu / Debian):
|
||||
|
||||
* (Ubuntu / Debian) Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* (Fedora / CentOS) Install [GIT](http://git-scm.com/) with `yum install git`
|
||||
* Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* 'cd' to the folder of your choosing.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py` to start
|
||||
* (Ubuntu / Debian) To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||
* (systemd) To run on boot copy the systemd config `sudo cp CouchPotatoServer/init/couchpotato.fedora.service /etc/systemd/system/couchpotato.service`
|
||||
* (systemd) Update the systemd config file with your user and path to CouchPotato.py
|
||||
* (systemd) Enable it at boot with `sudo systemctl enable couchpotato`
|
||||
* To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||
* Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||
* Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||
* Open your browser and go to `http://localhost:5050/`
|
||||
|
||||
Docker:
|
||||
* You can use [razorgirl's Dockerfile](https://github.com/razorgirl/docker-couchpotato) to quickly build your own isolated app container. It's based on the Linux instructions above. For more info about Docker check out the [official website](https://www.docker.com).
|
||||
|
||||
FreeBSD :
|
||||
|
||||
|
||||
@@ -13,8 +13,6 @@ Lastly, for anything related to CouchPotato, feel free to stop by the [forum](ht
|
||||
## Issues
|
||||
Issues are intended for reporting bugs and weird behaviour or suggesting improvements to CouchPotatoServer.
|
||||
Before you submit an issue, please go through the following checklist:
|
||||
* **FILL IN ALL THE FIELDS ASKED FOR**
|
||||
* **POST MORE THAN A SINGLE LINE LOG**, if you do, you'd better have a easy reproducable bug
|
||||
* Search through existing issues (*including closed issues!*) first: you might be able to get your answer there.
|
||||
* Double check your issue manually, because it could be an external issue.
|
||||
* Post logs with your issue: Without seeing what is going on, the developers can't reproduce the error.
|
||||
@@ -27,14 +25,12 @@ Before you submit an issue, please go through the following checklist:
|
||||
* What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed than when you use CP on OS X or Windows.
|
||||
* Your issue might be marked with the "can't reproduce" tag. Don't ask why your issue was closed if it says so in the tag.
|
||||
* If you're running on a NAS (QNAP, Austor, Synology etc.) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else!
|
||||
* Do not "bump" issues with "Any updates on this" or whatever. Yes I've seen it, you don't have to remind me of it. There will be an update when the code is done or I need information. If you feel the need to do so, you'd better have more info on the issue.
|
||||
|
||||
The more relevant information you provide, the more likely that your issue will be resolved.
|
||||
If you don't follow any of the checks above, I'll close the issue. If you are wondering why (and ask) I'll block you from posting new issues and the repo.
|
||||
|
||||
## Pull Requests
|
||||
Pull requests are intended for contributing code or documentation to the project. Before you submit a pull request, consider the following:
|
||||
* Make sure your pull request is made for the *develop* branch (or relevant feature branch).
|
||||
* Have you tested your PR? If not, why?
|
||||
* Does your PR have any limitations I should know of?
|
||||
* Does your PR have any limitations we should know of?
|
||||
* Is your PR up-to-date with the branch you're trying to push into?
|
||||
|
||||
@@ -143,8 +143,6 @@ class ApiHandler(RequestHandler):
|
||||
else:
|
||||
self.write(result)
|
||||
self.finish()
|
||||
except UnicodeDecodeError:
|
||||
log.error('Failed proper encode: %s', traceback.format_exc())
|
||||
except:
|
||||
log.debug('Failed doing request, probably already closed: %s', (traceback.format_exc()))
|
||||
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||
|
||||
@@ -181,13 +181,13 @@ class Core(Plugin):
|
||||
return '%sapi/%s' % (self.createBaseUrl(), Env.setting('api_key'))
|
||||
|
||||
def version(self):
|
||||
ver = fireEvent('updater.info', single = True) or {'version': {}}
|
||||
ver = fireEvent('updater.info', single = True)
|
||||
|
||||
if os.name == 'nt': platf = 'windows'
|
||||
elif 'Darwin' in platform.platform(): platf = 'osx'
|
||||
else: platf = 'linux'
|
||||
|
||||
return '%s - %s-%s - v2' % (platf, ver.get('version').get('type') or 'unknown', ver.get('version').get('hash') or 'unknown')
|
||||
return '%s - %s-%s - v2' % (platf, ver.get('version')['type'], ver.get('version')['hash'])
|
||||
|
||||
def versionView(self, **kwargs):
|
||||
return {
|
||||
@@ -286,13 +286,13 @@ config = [{
|
||||
'name': 'permission_folder',
|
||||
'default': '0755',
|
||||
'label': 'Folder CHMOD',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755). <a target="_blank" href="http://permissions-calculator.org/">Calculate the correct value</a>',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
|
||||
},
|
||||
{
|
||||
'name': 'permission_file',
|
||||
'default': '0644',
|
||||
'default': '0755',
|
||||
'label': 'File CHMOD',
|
||||
'description': 'See Folder CHMOD description, but for files',
|
||||
'description': 'Same as Folder CHMOD but for files',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -205,28 +205,19 @@ class GitUpdater(BaseUpdater):
|
||||
def getVersion(self):
|
||||
|
||||
if not self.version:
|
||||
|
||||
hash = None
|
||||
date = None
|
||||
branch = self.branch
|
||||
|
||||
try:
|
||||
output = self.repo.getHead() # Yes, please
|
||||
log.debug('Git version output: %s', output.hash)
|
||||
|
||||
hash = output.hash[:8]
|
||||
date = output.getDate()
|
||||
branch = self.repo.getCurrentBranch().name
|
||||
self.version = {
|
||||
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.repo.getCurrentBranch().name or self.branch, output.hash[:8], datetime.fromtimestamp(output.getDate())),
|
||||
'hash': output.hash[:8],
|
||||
'date': output.getDate(),
|
||||
'type': 'git',
|
||||
'branch': self.repo.getCurrentBranch().name
|
||||
}
|
||||
except Exception as e:
|
||||
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
|
||||
|
||||
self.version = {
|
||||
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, branch, hash or 'unknown_hash', datetime.fromtimestamp(date) if date else 'unknown_date'),
|
||||
'hash': hash,
|
||||
'date': date,
|
||||
'type': 'git',
|
||||
'branch': branch
|
||||
}
|
||||
return 'No GIT'
|
||||
|
||||
return self.version
|
||||
|
||||
|
||||
@@ -2,15 +2,13 @@ import json
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
from sqlite3 import OperationalError
|
||||
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from CodernityDB.index import IndexException, IndexNotFoundException, IndexConflict
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode, sp
|
||||
from couchpotato.core.helpers.variable import getImdb, tryInt, randomString
|
||||
from couchpotato.core.helpers.variable import getImdb, tryInt
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -33,7 +31,6 @@ class Database(object):
|
||||
|
||||
addEvent('database.setup.after', self.startup_compact)
|
||||
addEvent('database.setup_index', self.setupIndex)
|
||||
addEvent('database.delete_corrupted', self.deleteCorrupted)
|
||||
|
||||
addEvent('app.migrate', self.migrate)
|
||||
addEvent('app.after_shutdown', self.close)
|
||||
@@ -149,17 +146,6 @@ class Database(object):
|
||||
|
||||
return results
|
||||
|
||||
def deleteCorrupted(self, _id, traceback_error = ''):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
log.debug('Deleted corrupted document "%s": %s', (_id, traceback_error))
|
||||
corrupted = db.get('id', _id, with_storage = False)
|
||||
db._delete_id_index(corrupted.get('_id'), corrupted.get('_rev'), None)
|
||||
except:
|
||||
log.debug('Failed deleting corrupted: %s', traceback.format_exc())
|
||||
|
||||
def reindex(self, **kwargs):
|
||||
|
||||
success = True
|
||||
@@ -240,34 +226,6 @@ class Database(object):
|
||||
from couchpotato import Env
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Try fix for migration failures on desktop
|
||||
if Env.get('desktop'):
|
||||
try:
|
||||
list(db.all('profile', with_doc = True))
|
||||
except RecordNotFound:
|
||||
|
||||
failed_location = '%s_failed' % db.path
|
||||
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old')
|
||||
|
||||
if not os.path.isdir(failed_location) and os.path.isfile(old_db):
|
||||
log.error('Corrupt database, trying migrate again')
|
||||
db.close()
|
||||
|
||||
# Rename database folder
|
||||
os.rename(db.path, '%s_failed' % db.path)
|
||||
|
||||
# Rename .old database to try another migrate
|
||||
os.rename(old_db, old_db[:-4])
|
||||
|
||||
fireEventAsync('app.restart')
|
||||
else:
|
||||
log.error('Migration failed and couldn\'t recover database. Please report on GitHub, with this message.')
|
||||
db.reindex()
|
||||
|
||||
return
|
||||
|
||||
# Check size and compact if needed
|
||||
size = db.get_db_details().get('size')
|
||||
prop_name = 'last_db_compact'
|
||||
last_check = int(Env.prop(prop_name, default = 0))
|
||||
@@ -312,328 +270,307 @@ class Database(object):
|
||||
}
|
||||
|
||||
migrate_data = {}
|
||||
rename_old = False
|
||||
|
||||
try:
|
||||
c = conn.cursor()
|
||||
|
||||
c = conn.cursor()
|
||||
for ml in migrate_list:
|
||||
migrate_data[ml] = {}
|
||||
rows = migrate_list[ml]
|
||||
|
||||
for ml in migrate_list:
|
||||
migrate_data[ml] = {}
|
||||
rows = migrate_list[ml]
|
||||
|
||||
try:
|
||||
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||
except:
|
||||
# ignore faulty destination_id database
|
||||
if ml == 'category':
|
||||
migrate_data[ml] = {}
|
||||
else:
|
||||
rename_old = True
|
||||
raise
|
||||
|
||||
for p in c.fetchall():
|
||||
columns = {}
|
||||
for row in migrate_list[ml]:
|
||||
columns[row] = p[rows.index(row)]
|
||||
|
||||
if not migrate_data[ml].get(p[0]):
|
||||
migrate_data[ml][p[0]] = columns
|
||||
else:
|
||||
if not isinstance(migrate_data[ml][p[0]], list):
|
||||
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||
migrate_data[ml][p[0]].append(columns)
|
||||
|
||||
conn.close()
|
||||
|
||||
log.info('Getting data took %s', time.time() - migrate_start)
|
||||
|
||||
db = self.getDB()
|
||||
if not db.opened:
|
||||
return
|
||||
|
||||
# Use properties
|
||||
properties = migrate_data['properties']
|
||||
log.info('Importing %s properties', len(properties))
|
||||
for x in properties:
|
||||
property = properties[x]
|
||||
Env.prop(property.get('identifier'), property.get('value'))
|
||||
|
||||
# Categories
|
||||
categories = migrate_data.get('category', [])
|
||||
log.info('Importing %s categories', len(categories))
|
||||
category_link = {}
|
||||
for x in categories:
|
||||
c = categories[x]
|
||||
|
||||
new_c = db.insert({
|
||||
'_t': 'category',
|
||||
'order': c.get('order', 999),
|
||||
'label': toUnicode(c.get('label', '')),
|
||||
'ignored': toUnicode(c.get('ignored', '')),
|
||||
'preferred': toUnicode(c.get('preferred', '')),
|
||||
'required': toUnicode(c.get('required', '')),
|
||||
'destination': toUnicode(c.get('destination', '')),
|
||||
})
|
||||
|
||||
category_link[x] = new_c.get('_id')
|
||||
|
||||
# Profiles
|
||||
log.info('Importing profiles')
|
||||
new_profiles = db.all('profile', with_doc = True)
|
||||
new_profiles_by_label = {}
|
||||
for x in new_profiles:
|
||||
|
||||
# Remove default non core profiles
|
||||
if not x['doc'].get('core'):
|
||||
db.delete(x['doc'])
|
||||
try:
|
||||
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||
except:
|
||||
# ignore faulty destination_id database
|
||||
if ml == 'category':
|
||||
migrate_data[ml] = {}
|
||||
else:
|
||||
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||
raise
|
||||
|
||||
profiles = migrate_data['profile']
|
||||
profile_link = {}
|
||||
for x in profiles:
|
||||
p = profiles[x]
|
||||
for p in c.fetchall():
|
||||
columns = {}
|
||||
for row in migrate_list[ml]:
|
||||
columns[row] = p[rows.index(row)]
|
||||
|
||||
exists = new_profiles_by_label.get(p.get('label'))
|
||||
|
||||
# Update existing with order only
|
||||
if exists and p.get('core'):
|
||||
profile = db.get('id', exists)
|
||||
profile['order'] = tryInt(p.get('order'))
|
||||
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||
db.update(profile)
|
||||
|
||||
profile_link[x] = profile.get('_id')
|
||||
if not migrate_data[ml].get(p[0]):
|
||||
migrate_data[ml][p[0]] = columns
|
||||
else:
|
||||
if not isinstance(migrate_data[ml][p[0]], list):
|
||||
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||
migrate_data[ml][p[0]].append(columns)
|
||||
|
||||
new_profile = {
|
||||
'_t': 'profile',
|
||||
'label': p.get('label'),
|
||||
'order': int(p.get('order', 999)),
|
||||
'core': p.get('core', False),
|
||||
'qualities': [],
|
||||
'wait_for': [],
|
||||
'finish': []
|
||||
conn.close()
|
||||
|
||||
log.info('Getting data took %s', time.time() - migrate_start)
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Use properties
|
||||
properties = migrate_data['properties']
|
||||
log.info('Importing %s properties', len(properties))
|
||||
for x in properties:
|
||||
property = properties[x]
|
||||
Env.prop(property.get('identifier'), property.get('value'))
|
||||
|
||||
# Categories
|
||||
categories = migrate_data.get('category', [])
|
||||
log.info('Importing %s categories', len(categories))
|
||||
category_link = {}
|
||||
for x in categories:
|
||||
c = categories[x]
|
||||
|
||||
new_c = db.insert({
|
||||
'_t': 'category',
|
||||
'order': c.get('order', 999),
|
||||
'label': toUnicode(c.get('label', '')),
|
||||
'ignored': toUnicode(c.get('ignored', '')),
|
||||
'preferred': toUnicode(c.get('preferred', '')),
|
||||
'required': toUnicode(c.get('required', '')),
|
||||
'destination': toUnicode(c.get('destination', '')),
|
||||
})
|
||||
|
||||
category_link[x] = new_c.get('_id')
|
||||
|
||||
# Profiles
|
||||
log.info('Importing profiles')
|
||||
new_profiles = db.all('profile', with_doc = True)
|
||||
new_profiles_by_label = {}
|
||||
for x in new_profiles:
|
||||
|
||||
# Remove default non core profiles
|
||||
if not x['doc'].get('core'):
|
||||
db.delete(x['doc'])
|
||||
else:
|
||||
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||
|
||||
profiles = migrate_data['profile']
|
||||
profile_link = {}
|
||||
for x in profiles:
|
||||
p = profiles[x]
|
||||
|
||||
exists = new_profiles_by_label.get(p.get('label'))
|
||||
|
||||
# Update existing with order only
|
||||
if exists and p.get('core'):
|
||||
profile = db.get('id', exists)
|
||||
profile['order'] = tryInt(p.get('order'))
|
||||
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||
db.update(profile)
|
||||
|
||||
profile_link[x] = profile.get('_id')
|
||||
else:
|
||||
|
||||
new_profile = {
|
||||
'_t': 'profile',
|
||||
'label': p.get('label'),
|
||||
'order': int(p.get('order', 999)),
|
||||
'core': p.get('core', False),
|
||||
'qualities': [],
|
||||
'wait_for': [],
|
||||
'finish': []
|
||||
}
|
||||
|
||||
types = migrate_data['profiletype']
|
||||
for profile_type in types:
|
||||
p_type = types[profile_type]
|
||||
if types[profile_type]['profile_id'] == p['id']:
|
||||
if p_type['quality_id']:
|
||||
new_profile['finish'].append(p_type['finish'])
|
||||
new_profile['wait_for'].append(p_type['wait_for'])
|
||||
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||
|
||||
if len(new_profile['qualities']) > 0:
|
||||
new_profile.update(db.insert(new_profile))
|
||||
profile_link[x] = new_profile.get('_id')
|
||||
else:
|
||||
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||
|
||||
# Qualities
|
||||
log.info('Importing quality sizes')
|
||||
new_qualities = db.all('quality', with_doc = True)
|
||||
new_qualities_by_identifier = {}
|
||||
for x in new_qualities:
|
||||
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||
|
||||
qualities = migrate_data['quality']
|
||||
quality_link = {}
|
||||
for x in qualities:
|
||||
q = qualities[x]
|
||||
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||
|
||||
quality = db.get('id', q_id)
|
||||
quality['order'] = q.get('order')
|
||||
quality['size_min'] = tryInt(q.get('size_min'))
|
||||
quality['size_max'] = tryInt(q.get('size_max'))
|
||||
db.update(quality)
|
||||
|
||||
quality_link[x] = quality
|
||||
|
||||
# Titles
|
||||
titles = migrate_data['librarytitle']
|
||||
titles_by_library = {}
|
||||
for x in titles:
|
||||
title = titles[x]
|
||||
if title.get('default'):
|
||||
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||
|
||||
# Releases
|
||||
releaseinfos = migrate_data['releaseinfo']
|
||||
for x in releaseinfos:
|
||||
info = releaseinfos[x]
|
||||
|
||||
# Skip if release doesn't exist for this info
|
||||
if not migrate_data['release'].get(info.get('release_id')):
|
||||
continue
|
||||
|
||||
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||
|
||||
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||
|
||||
releases = migrate_data['release']
|
||||
releases_by_media = {}
|
||||
for x in releases:
|
||||
release = releases[x]
|
||||
if not releases_by_media.get(release.get('movie_id')):
|
||||
releases_by_media[release.get('movie_id')] = []
|
||||
|
||||
releases_by_media[release.get('movie_id')].append(release)
|
||||
|
||||
# Type ids
|
||||
types = migrate_data['filetype']
|
||||
type_by_id = {}
|
||||
for t in types:
|
||||
type = types[t]
|
||||
type_by_id[type.get('id')] = type
|
||||
|
||||
# Media
|
||||
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||
statuses = migrate_data['status']
|
||||
libraries = migrate_data['library']
|
||||
library_files = migrate_data['library_files__file_library']
|
||||
releases_files = migrate_data['release_files__file_release']
|
||||
all_files = migrate_data['file']
|
||||
poster_type = migrate_data['filetype']['poster']
|
||||
medias = migrate_data['movie']
|
||||
for x in medias:
|
||||
m = medias[x]
|
||||
|
||||
status = statuses.get(m['status_id']).get('identifier')
|
||||
l = libraries.get(m['library_id'])
|
||||
|
||||
# Only migrate wanted movies, Skip if no identifier present
|
||||
if not l or not getImdb(l.get('identifier')): continue
|
||||
|
||||
profile_id = profile_link.get(m['profile_id'])
|
||||
category_id = category_link.get(m['category_id'])
|
||||
title = titles_by_library.get(m['library_id'])
|
||||
releases = releases_by_media.get(x, [])
|
||||
info = json.loads(l.get('info', ''))
|
||||
|
||||
files = library_files.get(m['library_id'], [])
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
added_media = fireEvent('movie.add', {
|
||||
'info': info,
|
||||
'identifier': l.get('identifier'),
|
||||
'profile_id': profile_id,
|
||||
'category_id': category_id,
|
||||
'title': title
|
||||
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||
|
||||
if not added_media:
|
||||
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||
continue
|
||||
|
||||
added_media['files'] = added_media.get('files', {})
|
||||
for f in files:
|
||||
ffile = all_files[f.get('file_id')]
|
||||
|
||||
# Only migrate posters
|
||||
if ffile.get('type_id') == poster_type.get('id'):
|
||||
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||
break
|
||||
|
||||
if 'image_poster' in added_media['files']:
|
||||
db.update(added_media)
|
||||
|
||||
for rel in releases:
|
||||
|
||||
empty_info = False
|
||||
if not rel.get('info'):
|
||||
empty_info = True
|
||||
rel['info'] = {}
|
||||
|
||||
quality = quality_link.get(rel.get('quality_id'))
|
||||
if not quality:
|
||||
continue
|
||||
|
||||
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||
|
||||
if rel['info'].get('download_id'):
|
||||
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||
rel['info']['download_info'] = {
|
||||
'id': rel['info'].get('download_id'),
|
||||
'downloader': rel['info'].get('download_downloader'),
|
||||
'status_support': status_support,
|
||||
}
|
||||
|
||||
types = migrate_data['profiletype']
|
||||
for profile_type in types:
|
||||
p_type = types[profile_type]
|
||||
if types[profile_type]['profile_id'] == p['id']:
|
||||
if p_type['quality_id']:
|
||||
new_profile['finish'].append(p_type['finish'])
|
||||
new_profile['wait_for'].append(p_type['wait_for'])
|
||||
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||
# Add status to keys
|
||||
rel['info']['status'] = release_status
|
||||
if not empty_info:
|
||||
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||
else:
|
||||
release = {
|
||||
'_t': 'release',
|
||||
'identifier': rel.get('identifier'),
|
||||
'media_id': added_media.get('_id'),
|
||||
'quality': quality.get('identifier'),
|
||||
'status': release_status,
|
||||
'last_edit': int(time.time()),
|
||||
'files': {}
|
||||
}
|
||||
|
||||
if len(new_profile['qualities']) > 0:
|
||||
new_profile.update(db.insert(new_profile))
|
||||
profile_link[x] = new_profile.get('_id')
|
||||
else:
|
||||
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||
# Add downloader info if provided
|
||||
try:
|
||||
release['download_info'] = rel['info']['download_info']
|
||||
del rel['download_info']
|
||||
except:
|
||||
pass
|
||||
|
||||
# Qualities
|
||||
log.info('Importing quality sizes')
|
||||
new_qualities = db.all('quality', with_doc = True)
|
||||
new_qualities_by_identifier = {}
|
||||
for x in new_qualities:
|
||||
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||
# Add files
|
||||
release_files = releases_files.get(rel.get('id'), [])
|
||||
if not isinstance(release_files, list):
|
||||
release_files = [release_files]
|
||||
|
||||
qualities = migrate_data['quality']
|
||||
quality_link = {}
|
||||
for x in qualities:
|
||||
q = qualities[x]
|
||||
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||
|
||||
quality = db.get('id', q_id)
|
||||
quality['order'] = q.get('order')
|
||||
quality['size_min'] = tryInt(q.get('size_min'))
|
||||
quality['size_max'] = tryInt(q.get('size_max'))
|
||||
db.update(quality)
|
||||
|
||||
quality_link[x] = quality
|
||||
|
||||
# Titles
|
||||
titles = migrate_data['librarytitle']
|
||||
titles_by_library = {}
|
||||
for x in titles:
|
||||
title = titles[x]
|
||||
if title.get('default'):
|
||||
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||
|
||||
# Releases
|
||||
releaseinfos = migrate_data['releaseinfo']
|
||||
for x in releaseinfos:
|
||||
info = releaseinfos[x]
|
||||
|
||||
# Skip if release doesn't exist for this info
|
||||
if not migrate_data['release'].get(info.get('release_id')):
|
||||
continue
|
||||
|
||||
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||
|
||||
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||
|
||||
releases = migrate_data['release']
|
||||
releases_by_media = {}
|
||||
for x in releases:
|
||||
release = releases[x]
|
||||
if not releases_by_media.get(release.get('movie_id')):
|
||||
releases_by_media[release.get('movie_id')] = []
|
||||
|
||||
releases_by_media[release.get('movie_id')].append(release)
|
||||
|
||||
# Type ids
|
||||
types = migrate_data['filetype']
|
||||
type_by_id = {}
|
||||
for t in types:
|
||||
type = types[t]
|
||||
type_by_id[type.get('id')] = type
|
||||
|
||||
# Media
|
||||
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||
statuses = migrate_data['status']
|
||||
libraries = migrate_data['library']
|
||||
library_files = migrate_data['library_files__file_library']
|
||||
releases_files = migrate_data['release_files__file_release']
|
||||
all_files = migrate_data['file']
|
||||
poster_type = migrate_data['filetype']['poster']
|
||||
medias = migrate_data['movie']
|
||||
for x in medias:
|
||||
m = medias[x]
|
||||
|
||||
status = statuses.get(m['status_id']).get('identifier')
|
||||
l = libraries.get(m['library_id'])
|
||||
|
||||
# Only migrate wanted movies, Skip if no identifier present
|
||||
if not l or not getImdb(l.get('identifier')): continue
|
||||
|
||||
profile_id = profile_link.get(m['profile_id'])
|
||||
category_id = category_link.get(m['category_id'])
|
||||
title = titles_by_library.get(m['library_id'])
|
||||
releases = releases_by_media.get(x, [])
|
||||
info = json.loads(l.get('info', ''))
|
||||
|
||||
files = library_files.get(m['library_id'], [])
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
added_media = fireEvent('movie.add', {
|
||||
'info': info,
|
||||
'identifier': l.get('identifier'),
|
||||
'profile_id': profile_id,
|
||||
'category_id': category_id,
|
||||
'title': title
|
||||
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||
|
||||
if not added_media:
|
||||
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||
continue
|
||||
|
||||
added_media['files'] = added_media.get('files', {})
|
||||
for f in files:
|
||||
ffile = all_files[f.get('file_id')]
|
||||
|
||||
# Only migrate posters
|
||||
if ffile.get('type_id') == poster_type.get('id'):
|
||||
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||
break
|
||||
|
||||
if 'image_poster' in added_media['files']:
|
||||
db.update(added_media)
|
||||
|
||||
for rel in releases:
|
||||
|
||||
empty_info = False
|
||||
if not rel.get('info'):
|
||||
empty_info = True
|
||||
rel['info'] = {}
|
||||
|
||||
quality = quality_link.get(rel.get('quality_id'))
|
||||
if not quality:
|
||||
if len(release_files) == 0:
|
||||
continue
|
||||
|
||||
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||
for f in release_files:
|
||||
rfile = all_files[f.get('file_id')]
|
||||
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||
|
||||
if rel['info'].get('download_id'):
|
||||
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||
rel['info']['download_info'] = {
|
||||
'id': rel['info'].get('download_id'),
|
||||
'downloader': rel['info'].get('download_downloader'),
|
||||
'status_support': status_support,
|
||||
}
|
||||
if not release['files'].get(file_type):
|
||||
release['files'][file_type] = []
|
||||
|
||||
# Add status to keys
|
||||
rel['info']['status'] = release_status
|
||||
if not empty_info:
|
||||
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||
else:
|
||||
release = {
|
||||
'_t': 'release',
|
||||
'identifier': rel.get('identifier'),
|
||||
'media_id': added_media.get('_id'),
|
||||
'quality': quality.get('identifier'),
|
||||
'status': release_status,
|
||||
'last_edit': int(time.time()),
|
||||
'files': {}
|
||||
}
|
||||
release['files'][file_type].append(rfile.get('path'))
|
||||
|
||||
# Add downloader info if provided
|
||||
try:
|
||||
release['download_info'] = rel['info']['download_info']
|
||||
del rel['download_info']
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add files
|
||||
release_files = releases_files.get(rel.get('id'), [])
|
||||
if not isinstance(release_files, list):
|
||||
release_files = [release_files]
|
||||
|
||||
if len(release_files) == 0:
|
||||
continue
|
||||
|
||||
for f in release_files:
|
||||
rfile = all_files.get(f.get('file_id'))
|
||||
if not rfile:
|
||||
continue
|
||||
|
||||
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||
|
||||
if not release['files'].get(file_type):
|
||||
release['files'][file_type] = []
|
||||
|
||||
release['files'][file_type].append(rfile.get('path'))
|
||||
|
||||
try:
|
||||
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||
rls.update(release)
|
||||
db.update(rls)
|
||||
except:
|
||||
db.insert(release)
|
||||
|
||||
log.info('Total migration took %s', time.time() - migrate_start)
|
||||
log.info('=' * 30)
|
||||
|
||||
rename_old = True
|
||||
|
||||
except OperationalError:
|
||||
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
|
||||
|
||||
rename_old = True
|
||||
except:
|
||||
log.error('Migration failed: %s', traceback.format_exc())
|
||||
try:
|
||||
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||
rls.update(release)
|
||||
db.update(rls)
|
||||
except:
|
||||
db.insert(release)
|
||||
|
||||
log.info('Total migration took %s', time.time() - migrate_start)
|
||||
log.info('=' * 30)
|
||||
|
||||
# rename old database
|
||||
if rename_old:
|
||||
random = randomString()
|
||||
log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random))
|
||||
os.rename(old_db, '%s.%s_old' % (old_db, random))
|
||||
log.info('Renaming old database to %s ', old_db + '.old')
|
||||
os.rename(old_db, old_db + '.old')
|
||||
|
||||
if os.path.isfile(old_db + '-wal'):
|
||||
os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random))
|
||||
if os.path.isfile(old_db + '-shm'):
|
||||
os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random))
|
||||
if os.path.isfile(old_db + '-wal'):
|
||||
os.rename(old_db + '-wal', old_db + '-wal.old')
|
||||
if os.path.isfile(old_db + '-shm'):
|
||||
os.rename(old_db + '-shm', old_db + '-shm.old')
|
||||
|
||||
@@ -27,11 +27,6 @@ class Deluge(DownloaderBase):
|
||||
def connect(self, reconnect = False):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
|
||||
# Force host assignment
|
||||
if len(host) == 1:
|
||||
host.append(80)
|
||||
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
from base64 import b64encode
|
||||
import os
|
||||
from urllib2 import URLError
|
||||
from uuid import uuid4
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from requests import HTTPError
|
||||
import urllib2
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
||||
@@ -29,17 +35,13 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
# Send the nzb
|
||||
try:
|
||||
nzb_filename = self.createFileName(data, filedata, media, unique_tag = True)
|
||||
response = self.call('nzb/add', files = {'file': (nzb_filename, filedata, 'application/octet-stream')}, parameters = {
|
||||
'name': nzb_filename,
|
||||
'groupname': self.conf('group')
|
||||
})
|
||||
nzb_filename = self.createFileName(data, filedata, media)
|
||||
self.call('nzb/add', files = {'file': (nzb_filename, filedata)})
|
||||
|
||||
if response and response.get('result', '').lower() == 'ok':
|
||||
return self.downloadReturnId(nzb_filename)
|
||||
|
||||
log.error('Something went wrong sending the NZB file. Response: %s', response)
|
||||
return False
|
||||
time.sleep(10)
|
||||
raw_statuses = self.call('nzb')
|
||||
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if os.path.basename(nzb['nzbFileName']) == nzb_filename][0]
|
||||
return self.downloadReturnId(nzb_id)
|
||||
except:
|
||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
@@ -58,8 +60,7 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
for nzb in raw_statuses.get('nzbs', []):
|
||||
nzb_id = os.path.basename(nzb['nzbFileName'])
|
||||
if nzb_id in ids:
|
||||
if nzb['id'] in ids:
|
||||
|
||||
# Check status
|
||||
status = 'busy'
|
||||
@@ -69,8 +70,7 @@ class NZBVortex(DownloaderBase):
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'temp_id': nzb['id'],
|
||||
'id': nzb_id,
|
||||
'id': nzb['id'],
|
||||
'name': nzb['uiTitle'],
|
||||
'status': status,
|
||||
'original_status': nzb['state'],
|
||||
@@ -85,7 +85,7 @@ class NZBVortex(DownloaderBase):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call('nzb/%s/cancel' % release_download['temp_id'])
|
||||
self.call('nzb/%s/cancel' % release_download['id'])
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
@@ -114,7 +114,7 @@ class NZBVortex(DownloaderBase):
|
||||
log.error('Login failed, please check you api-key')
|
||||
return False
|
||||
|
||||
def call(self, call, parameters = None, is_repeat = False, auth = True, *args, **kwargs):
|
||||
def call(self, call, parameters = None, repeat = False, auth = True, *args, **kwargs):
|
||||
|
||||
# Login first
|
||||
if not parameters: parameters = {}
|
||||
@@ -127,20 +127,19 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
params = tryUrlencode(parameters)
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api/' + call
|
||||
|
||||
try:
|
||||
data = self.getJsonData('%s%s' % (url, '?' + params if params else ''), *args, cache_timeout = 0, show_error = False, **kwargs)
|
||||
data = self.urlopen('%s?%s' % (url, params), *args, **kwargs)
|
||||
|
||||
if data:
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
return json.loads(data)
|
||||
except URLError as e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
# Try login and do again
|
||||
if not is_repeat:
|
||||
if not repeat:
|
||||
self.login()
|
||||
return self.call(call, parameters = parameters, is_repeat = True, **kwargs)
|
||||
return self.call(call, parameters = parameters, repeat = True, **kwargs)
|
||||
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
except:
|
||||
@@ -152,12 +151,13 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
if not self.api_level:
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/app/apilevel'
|
||||
|
||||
try:
|
||||
data = self.call('app/apilevel', auth = False)
|
||||
self.api_level = float(data.get('apilevel'))
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
data = self.urlopen(url, show_error = False)
|
||||
self.api_level = float(json.loads(data).get('apilevel'))
|
||||
except URLError as e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||
else:
|
||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||
@@ -169,6 +169,29 @@ class NZBVortex(DownloaderBase):
|
||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||
|
||||
|
||||
class HTTPSConnection(httplib.HTTPSConnection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
|
||||
|
||||
def connect(self):
|
||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
||||
if sys.version_info < (2, 6, 7):
|
||||
if hasattr(self, '_tunnel_host'):
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
else:
|
||||
if self._tunnel_host:
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
|
||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version = ssl.PROTOCOL_TLSv1)
|
||||
|
||||
|
||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
||||
def https_open(self, req):
|
||||
return self.do_open(HTTPSConnection, req)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbvortex',
|
||||
'groups': [
|
||||
@@ -188,18 +211,20 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'https://localhost:4321',
|
||||
'description': 'Hostname with port. Usually <strong>https://localhost:4321</strong>',
|
||||
'default': 'localhost:4321',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:4321</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 1,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
{
|
||||
'name': 'group',
|
||||
'label': 'Group',
|
||||
'description': 'The group CP places the nzb in. Make sure to create it in NZBVortex.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
|
||||
@@ -23,14 +23,16 @@ class Transmission(DownloaderBase):
|
||||
log = CPLog(__name__)
|
||||
trpc = None
|
||||
|
||||
def connect(self):
|
||||
def connect(self, reconnect = False):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host')).rstrip('/').rsplit(':', 1)
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||
if not self.trpc or reconnect:
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
return self.trpc
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
@@ -78,17 +80,15 @@ class Transmission(DownloaderBase):
|
||||
log.error('Failed sending torrent to Transmission')
|
||||
return False
|
||||
|
||||
data = remote_torrent.get('torrent-added') or remote_torrent.get('torrent-duplicate')
|
||||
|
||||
# Change settings of added torrents
|
||||
if torrent_params:
|
||||
self.trpc.set_torrent(data['hashString'], torrent_params)
|
||||
self.trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
|
||||
|
||||
log.info('Torrent sent to Transmission successfully.')
|
||||
return self.downloadReturnId(data['hashString'])
|
||||
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
|
||||
|
||||
def test(self):
|
||||
if self.connect() and self.trpc.get_session():
|
||||
if self.connect(True) and self.trpc.get_session():
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -164,11 +164,11 @@ class Transmission(DownloaderBase):
|
||||
class TransmissionRPC(object):
|
||||
|
||||
"""TransmissionRPC lite library"""
|
||||
def __init__(self, host = 'http://localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
def __init__(self, host = 'localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
|
||||
super(TransmissionRPC, self).__init__()
|
||||
|
||||
self.url = host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.url = 'http://' + host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.tag = 0
|
||||
self.session_id = 0
|
||||
self.session = {}
|
||||
@@ -276,8 +276,8 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'http://localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>http://localhost:9091</strong>',
|
||||
'default': 'localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
|
||||
@@ -90,7 +90,7 @@ def fireEvent(name, *args, **kwargs):
|
||||
|
||||
else:
|
||||
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True)
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
|
||||
|
||||
for event in events[name]:
|
||||
e.handle(event['handler'], priority = event['priority'])
|
||||
|
||||
@@ -5,7 +5,6 @@ import re
|
||||
import traceback
|
||||
import unicodedata
|
||||
|
||||
from chardet import detect
|
||||
from couchpotato.core.logger import CPLog
|
||||
import six
|
||||
|
||||
@@ -36,9 +35,6 @@ def toUnicode(original, *args):
|
||||
return six.text_type(original, *args)
|
||||
except:
|
||||
try:
|
||||
detected = detect(original)
|
||||
if detected.get('encoding') == 'utf-8':
|
||||
return original.decode('utf-8')
|
||||
return ek(original, *args)
|
||||
except:
|
||||
raise
|
||||
@@ -56,10 +52,7 @@ def ss(original, *args):
|
||||
return u_original.encode(Env.get('encoding'))
|
||||
except Exception as e:
|
||||
log.debug('Failed ss encoding char, force UTF8: %s', e)
|
||||
try:
|
||||
return u_original.encode(Env.get('encoding'), 'replace')
|
||||
except:
|
||||
return u_original.encode('utf-8', 'replace')
|
||||
return u_original.encode('UTF-8')
|
||||
|
||||
|
||||
def sp(path, *args):
|
||||
|
||||
34
couchpotato/core/helpers/variable.py
Executable file → Normal file
34
couchpotato/core/helpers/variable.py
Executable file → Normal file
@@ -41,11 +41,11 @@ def symlink(src, dst):
|
||||
def getUserDir():
|
||||
try:
|
||||
import pwd
|
||||
os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir)
|
||||
os.environ['HOME'] = pwd.getpwuid(os.geteuid()).pw_dir
|
||||
except:
|
||||
pass
|
||||
|
||||
return sp(os.path.expanduser('~'))
|
||||
return os.path.expanduser('~')
|
||||
|
||||
|
||||
def getDownloadDir():
|
||||
@@ -380,33 +380,3 @@ def getFreeSpace(directories):
|
||||
free_space[folder] = size
|
||||
|
||||
return free_space
|
||||
|
||||
|
||||
def getSize(paths):
|
||||
|
||||
single = not isinstance(paths, (tuple, list))
|
||||
if single:
|
||||
paths = [paths]
|
||||
|
||||
total_size = 0
|
||||
for path in paths:
|
||||
path = sp(path)
|
||||
|
||||
if os.path.isdir(path):
|
||||
total_size = 0
|
||||
for dirpath, _, filenames in os.walk(path):
|
||||
for f in filenames:
|
||||
total_size += os.path.getsize(sp(os.path.join(dirpath, f)))
|
||||
|
||||
elif os.path.isfile(path):
|
||||
total_size += os.path.getsize(path)
|
||||
|
||||
return total_size / 1048576 # MB
|
||||
|
||||
|
||||
def find(func, iterable):
|
||||
for item in iterable:
|
||||
if func(item):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
@@ -59,14 +59,15 @@ class CPLog(object):
|
||||
msg = ss(msg)
|
||||
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) if not isinstance(x, (int, float)) else x for x in list(replace_tuple)])
|
||||
elif isinstance(replace_tuple, dict):
|
||||
msg = msg % dict((k, ss(v) if not isinstance(v, (int, float)) else v) for k, v in replace_tuple.iteritems())
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except Exception as e:
|
||||
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e))
|
||||
msg = msg % replace_tuple
|
||||
except:
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except Exception as e:
|
||||
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e))
|
||||
|
||||
self.setup()
|
||||
if not self.is_develop:
|
||||
|
||||
23
couchpotato/core/media/__init__.py
Executable file → Normal file
23
couchpotato/core/media/__init__.py
Executable file → Normal file
@@ -26,9 +26,9 @@ class MediaBase(Plugin):
|
||||
def onComplete():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.searcher.single' % media.get('type')
|
||||
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||
event_name = '%s.searcher.single' % media.get('type')
|
||||
|
||||
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
|
||||
@@ -39,9 +39,9 @@ class MediaBase(Plugin):
|
||||
def notifyFront():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.update' % media.get('type')
|
||||
fireEvent('notify.frontend', type = event_name, data = media)
|
||||
event_name = '%s.update' % media.get('type')
|
||||
|
||||
fireEvent('notify.frontend', type = event_name, data = media)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
|
||||
@@ -65,13 +65,10 @@ class MediaBase(Plugin):
|
||||
|
||||
return def_title or 'UNKNOWN'
|
||||
|
||||
def getPoster(self, media, image_urls):
|
||||
if 'files' not in media:
|
||||
media['files'] = {}
|
||||
|
||||
existing_files = media['files']
|
||||
|
||||
def getPoster(self, image_urls, existing_files):
|
||||
image_type = 'poster'
|
||||
|
||||
# Remove non-existing files
|
||||
file_type = 'image_%s' % image_type
|
||||
|
||||
# Make existing unique
|
||||
@@ -95,7 +92,7 @@ class MediaBase(Plugin):
|
||||
if file_type not in existing_files or len(existing_files.get(file_type, [])) == 0:
|
||||
file_path = fireEvent('file.download', url = image, single = True)
|
||||
if file_path:
|
||||
existing_files[file_type] = [toUnicode(file_path)]
|
||||
existing_files[file_type] = [file_path]
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
110
couchpotato/core/media/_base/library/main.py
Executable file → Normal file
110
couchpotato/core/media/_base/library/main.py
Executable file → Normal file
@@ -1,47 +1,10 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Library(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.title', self.title)
|
||||
addEvent('library.related', self.related)
|
||||
addEvent('library.tree', self.tree)
|
||||
|
||||
addEvent('library.root', self.root)
|
||||
|
||||
addApiView('library.query', self.queryView)
|
||||
addApiView('library.related', self.relatedView)
|
||||
addApiView('library.tree', self.treeView)
|
||||
|
||||
def queryView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.query', media, single = True)
|
||||
}
|
||||
|
||||
def relatedView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.related', media, single = True)
|
||||
}
|
||||
|
||||
def treeView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.tree', media, single = True)
|
||||
}
|
||||
|
||||
def title(self, library):
|
||||
return fireEvent(
|
||||
@@ -53,76 +16,3 @@ class Library(LibraryBase):
|
||||
include_identifier = False,
|
||||
single = True
|
||||
)
|
||||
|
||||
def related(self, media):
|
||||
result = {self.key(media['type']): media}
|
||||
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
result[self.key(cur['type'])] = cur
|
||||
|
||||
children = db.get_many('media_children', media['_id'], with_doc = True)
|
||||
|
||||
for item in children:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
|
||||
result[key].append(item['doc'])
|
||||
|
||||
return result
|
||||
|
||||
def root(self, media):
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
return cur
|
||||
|
||||
def tree(self, media = None, media_id = None):
|
||||
db = get_db()
|
||||
|
||||
if media:
|
||||
result = media
|
||||
elif media_id:
|
||||
result = db.get('id', media_id, with_doc = True)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Find children
|
||||
items = db.get_many('media_children', result['_id'], with_doc = True)
|
||||
keys = []
|
||||
|
||||
# Build children arrays
|
||||
for item in items:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = {}
|
||||
elif type(result[key]) is not dict:
|
||||
result[key] = {}
|
||||
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
|
||||
result[key][item['_id']] = fireEvent('library.tree', item['doc'], single = True)
|
||||
|
||||
# Unique children
|
||||
for key in keys:
|
||||
result[key] = result[key].values()
|
||||
|
||||
# Include releases
|
||||
result['releases'] = fireEvent('release.for_media', result['_id'], single = True)
|
||||
|
||||
return result
|
||||
|
||||
def key(self, media_type):
|
||||
parts = media_type.split('.')
|
||||
return parts[-1]
|
||||
|
||||
@@ -40,7 +40,7 @@ class Matcher(MatcherBase):
|
||||
return False
|
||||
|
||||
def correctTitle(self, chain, media):
|
||||
root = fireEvent('library.root', media, single = True)
|
||||
root_library = media['library']['root_library']
|
||||
|
||||
if 'show_name' not in chain.info or not len(chain.info['show_name']):
|
||||
log.info('Wrong: missing show name in parsed result')
|
||||
@@ -50,10 +50,10 @@ class Matcher(MatcherBase):
|
||||
chain_words = [x.lower() for x in chain.info['show_name']]
|
||||
|
||||
# Build a list of possible titles of the media we are searching for
|
||||
titles = root['info']['titles']
|
||||
titles = root_library['info']['titles']
|
||||
|
||||
# Add year suffix titles (will result in ['<name_one>', '<name_one> <suffix_one>', '<name_two>', ...])
|
||||
suffixes = [None, root['info']['year']]
|
||||
suffixes = [None, root_library['info']['year']]
|
||||
|
||||
titles = [
|
||||
title + ((' %s' % suffix) if suffix else '')
|
||||
|
||||
156
couchpotato/core/media/_base/media/main.py
Executable file → Normal file
156
couchpotato/core/media/_base/media/main.py
Executable file → Normal file
@@ -1,9 +1,10 @@
|
||||
from datetime import timedelta
|
||||
from operator import itemgetter
|
||||
import time
|
||||
import traceback
|
||||
from string import ascii_lowercase
|
||||
|
||||
from CodernityDB.database import RecordNotFound, RecordDeleted
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from couchpotato import tryInt, get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
@@ -43,15 +44,15 @@ class MediaPlugin(MediaBase):
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter media by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter media by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the media list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all media starting with the letter "a"'},
|
||||
'search': {'desc': 'Search media title'},
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter movie by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter movie by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the movie list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all movies starting with the letter "a"'},
|
||||
'search': {'desc': 'Search movie title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any media returned or not,
|
||||
'empty': bool, any movies returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
@@ -77,7 +78,6 @@ class MediaPlugin(MediaBase):
|
||||
addEvent('app.load', self.addSingleListView, priority = 100)
|
||||
addEvent('app.load', self.addSingleCharView, priority = 100)
|
||||
addEvent('app.load', self.addSingleDeleteView, priority = 100)
|
||||
addEvent('app.load', self.cleanupFaults)
|
||||
|
||||
addEvent('media.get', self.get)
|
||||
addEvent('media.with_status', self.withStatus)
|
||||
@@ -88,18 +88,6 @@ class MediaPlugin(MediaBase):
|
||||
addEvent('media.tag', self.tag)
|
||||
addEvent('media.untag', self.unTag)
|
||||
|
||||
# Wrongly tagged media files
|
||||
def cleanupFaults(self):
|
||||
medias = fireEvent('media.with_status', 'ignored', single = True) or []
|
||||
|
||||
db = get_db()
|
||||
for media in medias:
|
||||
try:
|
||||
media['status'] = 'done'
|
||||
db.update(media)
|
||||
except:
|
||||
pass
|
||||
|
||||
def refresh(self, id = '', **kwargs):
|
||||
handlers = []
|
||||
ids = splitString(id)
|
||||
@@ -121,7 +109,7 @@ class MediaPlugin(MediaBase):
|
||||
|
||||
try:
|
||||
media = get_db().get('id', media_id)
|
||||
event = '%s.update' % media.get('type')
|
||||
event = '%s.update_info' % media.get('type')
|
||||
|
||||
def handler():
|
||||
fireEvent(event, media_id = media_id, on_complete = self.createOnComplete(media_id))
|
||||
@@ -158,7 +146,7 @@ class MediaPlugin(MediaBase):
|
||||
|
||||
return media
|
||||
|
||||
except (RecordNotFound, RecordDeleted):
|
||||
except RecordNotFound:
|
||||
log.error('Media with id "%s" not found', media_id)
|
||||
except:
|
||||
raise
|
||||
@@ -172,13 +160,10 @@ class MediaPlugin(MediaBase):
|
||||
'media': media,
|
||||
}
|
||||
|
||||
def withStatus(self, status, types = None, with_doc = True):
|
||||
def withStatus(self, status, with_doc = True):
|
||||
|
||||
db = get_db()
|
||||
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
|
||||
status = list(status if isinstance(status, (list, tuple)) else [status])
|
||||
|
||||
for s in status:
|
||||
@@ -186,29 +171,24 @@ class MediaPlugin(MediaBase):
|
||||
if with_doc:
|
||||
try:
|
||||
doc = db.get('id', ms['_id'])
|
||||
|
||||
if types and doc.get('type') not in types:
|
||||
continue
|
||||
|
||||
yield doc
|
||||
except (RecordDeleted, RecordNotFound):
|
||||
except RecordNotFound:
|
||||
log.debug('Record not found, skipping: %s', ms['_id'])
|
||||
except (ValueError, EOFError):
|
||||
fireEvent('database.delete_corrupted', ms.get('_id'), traceback_error = traceback.format_exc(0))
|
||||
else:
|
||||
yield ms
|
||||
|
||||
def withIdentifiers(self, identifiers, with_doc = False):
|
||||
|
||||
db = get_db()
|
||||
|
||||
for x in identifiers:
|
||||
try:
|
||||
return db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||
media = db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||
return media
|
||||
except:
|
||||
pass
|
||||
|
||||
log.debug('No media found with identifiers: %s', identifiers)
|
||||
return False
|
||||
|
||||
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None):
|
||||
|
||||
@@ -273,6 +253,10 @@ class MediaPlugin(MediaBase):
|
||||
for x in filter_by:
|
||||
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||
|
||||
total_count = len(media_ids)
|
||||
if total_count == 0:
|
||||
return 0, []
|
||||
|
||||
offset = 0
|
||||
limit = -1
|
||||
if limit_offset:
|
||||
@@ -291,10 +275,6 @@ class MediaPlugin(MediaBase):
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
|
||||
# Skip if no media has been found
|
||||
if not media:
|
||||
continue
|
||||
|
||||
# Merge releases with movie dict
|
||||
medias.append(media)
|
||||
|
||||
@@ -302,30 +282,11 @@ class MediaPlugin(MediaBase):
|
||||
media_ids.remove(media_id)
|
||||
if len(media_ids) == 0 or len(medias) == limit: break
|
||||
|
||||
# Sort media by type and return result
|
||||
result = {}
|
||||
|
||||
# Create keys for media types we are listing
|
||||
if types:
|
||||
for media_type in types:
|
||||
result['%ss' % media_type] = []
|
||||
else:
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
result['%ss' % media_type] = []
|
||||
|
||||
total_count = len(medias)
|
||||
|
||||
if total_count == 0:
|
||||
return 0, result
|
||||
|
||||
for kind in medias:
|
||||
result['%ss' % kind['type']].append(kind)
|
||||
|
||||
return total_count, result
|
||||
return total_count, medias
|
||||
|
||||
def listView(self, **kwargs):
|
||||
|
||||
total_count, result = self.list(
|
||||
total_movies, movies = self.list(
|
||||
types = splitString(kwargs.get('type')),
|
||||
status = splitString(kwargs.get('status')),
|
||||
release_status = splitString(kwargs.get('release_status')),
|
||||
@@ -336,32 +297,19 @@ class MediaPlugin(MediaBase):
|
||||
search = kwargs.get('search')
|
||||
)
|
||||
|
||||
results = result
|
||||
results['success'] = True
|
||||
results['empty'] = len(result) == 0
|
||||
results['total'] = total_count
|
||||
|
||||
return results
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(movies) == 0,
|
||||
'total': total_movies,
|
||||
'movies': movies,
|
||||
}
|
||||
|
||||
def addSingleListView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempList = lambda *args, **kwargs : self.listView(type = media_type, **kwargs)
|
||||
addApiView('%s.list' % media_type, tempList, docs = {
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the ' + media_type + ' list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all ' + media_type + 's starting with the letter "a"'},
|
||||
'search': {'desc': 'Search ' + media_type + ' title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any """ + media_type + """s returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
def tempList(*args, **kwargs):
|
||||
return self.listView(types = media_type, **kwargs)
|
||||
addApiView('%s.list' % media_type, tempList)
|
||||
|
||||
def availableChars(self, types = None, status = None, release_status = None):
|
||||
|
||||
@@ -407,7 +355,7 @@ class MediaPlugin(MediaBase):
|
||||
if x['_id'] in media_ids:
|
||||
chars.add(x['key'])
|
||||
|
||||
if len(chars) == 27:
|
||||
if len(chars) == 25:
|
||||
break
|
||||
|
||||
return list(chars)
|
||||
@@ -428,7 +376,8 @@ class MediaPlugin(MediaBase):
|
||||
def addSingleCharView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempChar = lambda *args, **kwargs : self.charView(type = media_type, **kwargs)
|
||||
def tempChar(*args, **kwargs):
|
||||
return self.charView(types = media_type, **kwargs)
|
||||
addApiView('%s.available_chars' % media_type, tempChar)
|
||||
|
||||
def delete(self, media_id, delete_from = None):
|
||||
@@ -466,16 +415,11 @@ class MediaPlugin(MediaBase):
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
|
||||
if (total_releases == total_deleted) or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||
if (total_releases == total_deleted and media['status'] != 'active') or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||
db.delete(media)
|
||||
deleted = True
|
||||
elif new_media_status:
|
||||
media['status'] = new_media_status
|
||||
|
||||
# Remove profile (no use for in manage)
|
||||
if new_media_status == 'done':
|
||||
media['profile_id'] = None
|
||||
|
||||
db.update(media)
|
||||
|
||||
fireEvent('media.untag', media['_id'], 'recent', single = True)
|
||||
@@ -502,16 +446,11 @@ class MediaPlugin(MediaBase):
|
||||
def addSingleDeleteView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempDelete = lambda *args, **kwargs : self.deleteView(type = media_type, **kwargs)
|
||||
addApiView('%s.delete' % media_type, tempDelete, docs = {
|
||||
'desc': 'Delete a ' + media_type + ' from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete ' + media_type + ' from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
def tempDelete(*args, **kwargs):
|
||||
return self.deleteView(types = media_type, *args, **kwargs)
|
||||
addApiView('%s.delete' % media_type, tempDelete)
|
||||
|
||||
def restatus(self, media_id, tag_recent = True, allowed_restatus = None):
|
||||
def restatus(self, media_id):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
@@ -531,13 +470,12 @@ class MediaPlugin(MediaBase):
|
||||
done_releases = [release for release in media_releases if release.get('status') == 'done']
|
||||
|
||||
if done_releases:
|
||||
# Only look at latest added release
|
||||
release = sorted(done_releases, key = itemgetter('last_edit'), reverse = True)[0]
|
||||
|
||||
# Check if we are finished with the media
|
||||
for release in done_releases:
|
||||
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||
m['status'] = 'done'
|
||||
break
|
||||
|
||||
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||
m['status'] = 'done'
|
||||
elif previous_status == 'done':
|
||||
m['status'] = 'done'
|
||||
|
||||
@@ -546,26 +484,22 @@ class MediaPlugin(MediaBase):
|
||||
m['status'] = previous_status
|
||||
|
||||
# Only update when status has changed
|
||||
if previous_status != m['status'] and (not allowed_restatus or m['status'] in allowed_restatus):
|
||||
if previous_status != m['status']:
|
||||
db.update(m)
|
||||
|
||||
# Tag media as recent
|
||||
if tag_recent:
|
||||
self.tag(media_id, 'recent', update_edited = True)
|
||||
self.tag(media_id, 'recent')
|
||||
|
||||
return m['status']
|
||||
except:
|
||||
log.error('Failed restatus: %s', traceback.format_exc())
|
||||
|
||||
def tag(self, media_id, tag, update_edited = False):
|
||||
def tag(self, media_id, tag):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
m = db.get('id', media_id)
|
||||
|
||||
if update_edited:
|
||||
m['last_edit'] = int(time.time())
|
||||
|
||||
tags = m.get('tags') or []
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
|
||||
@@ -45,7 +45,7 @@ class Base(NZBProvider, RSS):
|
||||
def _searchOnHost(self, host, media, quality, results):
|
||||
|
||||
query = self.buildUrl(media, host)
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
url = '%s&%s' % (self.getUrl(host['host']), query)
|
||||
nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})
|
||||
|
||||
for nzb in nzbs:
|
||||
@@ -83,7 +83,7 @@ class Base(NZBProvider, RSS):
|
||||
try:
|
||||
# Get details for extended description to retrieve passwords
|
||||
query = self.buildDetailsUrl(nzb_id, host['api_key'])
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
url = '%s&%s' % (self.getUrl(host['host']), query)
|
||||
nzb_details = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})[0]
|
||||
|
||||
description = self.getTextElement(nzb_details, 'description')
|
||||
@@ -187,12 +187,11 @@ class Base(NZBProvider, RSS):
|
||||
self.limits_reached[host] = False
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc in [503, 429]:
|
||||
if e.code == 503:
|
||||
response = e.read().lower()
|
||||
if sc == 429 or 'maximum api' in response or 'download limit' in response:
|
||||
if 'maximum api' in response or 'download limit' in response:
|
||||
if not self.limits_reached.get(host):
|
||||
log.error('Limit reached / to many requests for newznab provider: %s', host)
|
||||
log.error('Limit reached for newznab provider: %s', host)
|
||||
self.limits_reached[host] = time.time()
|
||||
return 'try_next'
|
||||
|
||||
@@ -221,7 +220,7 @@ config = [{
|
||||
'description': 'Enable <a href="http://newznab.com/" target="_blank">NewzNab</a> such as <a href="https://nzb.su" target="_blank">NZB.su</a>, \
|
||||
<a href="https://nzbs.org" target="_blank">NZBs.org</a>, <a href="http://dognzb.cr/" target="_blank">DOGnzb.cr</a>, \
|
||||
<a href="https://github.com/spotweb/spotweb" target="_blank">Spotweb</a>, <a href="https://nzbgeek.info/" target="_blank">NZBGeek</a>, \
|
||||
<a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||
<a href="https://smackdownonyou.com" target="_blank">SmackDown</a>, <a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAgMAAABinRfyAAAACVBMVEVjhwD///86aRovd/sBAAAAMklEQVQI12NgAIPQUCCRmQkjssDEShiRuRIqwZqZGcDAGBrqANUhGgIkWAOABKMDxCAA24UK50b26SAAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
@@ -232,30 +231,30 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'use',
|
||||
'default': '0,0,0,0,0'
|
||||
'default': '0,0,0,0,0,0'
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://api.nzbgeek.info,https://www.nzbfinder.ws',
|
||||
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://index.nzbgeek.info, https://smackdownonyou.com, https://www.nzbfinder.ws',
|
||||
'description': 'The hostname of your newznab provider',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': '0,0,0,0,0',
|
||||
'default': '0,0,0,0,0,0',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
{
|
||||
'name': 'custom_tag',
|
||||
'advanced': True,
|
||||
'label': 'Custom tag',
|
||||
'default': ',,,,',
|
||||
'default': ',,,,,',
|
||||
'description': 'Add custom tags, for example add rls=1 to get only scene releases from nzbs.org',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': ',,,,',
|
||||
'default': ',,,,,',
|
||||
'label': 'Api Key',
|
||||
'description': 'Can be found on your profile page',
|
||||
'type': 'combined',
|
||||
|
||||
126
couchpotato/core/media/_base/providers/nzb/nzbindex.py
Normal file
126
couchpotato/core/media/_base/providers/nzb/nzbindex.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import re
|
||||
import time
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from dateutil.parser import parse
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'download': 'https://www.nzbindex.com/download/',
|
||||
'search': 'https://www.nzbindex.com/rss/?%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
nzbs = self.getRSSData(self.urls['search'] % self.buildUrl(media, quality))
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
enclosure = self.getElement(nzb, 'enclosure').attrib
|
||||
nzbindex_id = int(self.getTextElement(nzb, "link").split('/')[4])
|
||||
|
||||
title = self.getTextElement(nzb, "title")
|
||||
|
||||
match = fireEvent('matcher.parse', title, parser='usenet', single = True)
|
||||
if not match.chains:
|
||||
log.info('Unable to parse release with title "%s"', title)
|
||||
continue
|
||||
|
||||
# TODO should we consider other lower-weight chains here?
|
||||
info = fireEvent('matcher.flatten_info', match.chains[0].info, single = True)
|
||||
|
||||
release_name = fireEvent('matcher.construct_from_raw', info.get('release_name'), single = True)
|
||||
|
||||
file_name = info.get('detail', {}).get('file_name')
|
||||
file_name = file_name[0] if file_name else None
|
||||
|
||||
title = release_name or file_name
|
||||
|
||||
# Strip extension from parsed title (if one exists)
|
||||
ext_pos = title.rfind('.')
|
||||
|
||||
# Assume extension if smaller than 4 characters
|
||||
# TODO this should probably be done a better way
|
||||
if len(title[ext_pos + 1:]) <= 4:
|
||||
title = title[:ext_pos]
|
||||
|
||||
if not title:
|
||||
log.info('Unable to find release name from match')
|
||||
continue
|
||||
|
||||
try:
|
||||
description = self.getTextElement(nzb, "description")
|
||||
except:
|
||||
description = ''
|
||||
|
||||
def extra_check(item):
|
||||
if '#c20000' in item['description'].lower():
|
||||
log.info('Wrong: Seems to be passworded: %s', item['name'])
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
results.append({
|
||||
'id': nzbindex_id,
|
||||
'name': title,
|
||||
'age': self.calculateAge(int(time.mktime(parse(self.getTextElement(nzb, "pubDate")).timetuple()))),
|
||||
'size': tryInt(enclosure['length']) / 1024 / 1024,
|
||||
'url': enclosure['url'],
|
||||
'detail_url': enclosure['url'].replace('/download/', '/release/'),
|
||||
'description': description,
|
||||
'get_more_info': self.getMoreInfo,
|
||||
'extra_check': extra_check,
|
||||
})
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
try:
|
||||
if '/nfo/' in item['description'].lower():
|
||||
nfo_url = re.search('href=\"(?P<nfo>.+)\" ', item['description']).group('nfo')
|
||||
full_description = self.getCache('nzbindex.%s' % item['id'], url = nfo_url, cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
item['description'] = toUnicode(html.find('pre', attrs = {'id': 'nfo0'}).text)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbindex',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'nzbindex',
|
||||
'description': 'Free provider, less accurate. See <a href="https://www.nzbindex.com/">NZBIndex</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAo0lEQVR42t2SQQ2AMBAEcUCwUAv94QMLfHliAQtYqIVawEItYAG6yZFMLkUANNlk79Kbbtp2P1j9uKxVV9VWFeStl+Wh3fWK9hNwEoADZkJtMD49AqS5AUjWGx6A+m+ARICGrM5W+wSTB0gETKzdHZwCEZAJ8PGZQN4AiQAmkR9s06EBAugJiBoAAPFfAQcBgZcIHzwA6TYP4JsXeSg3P9L31w3eksbH3zMb/wAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -61,7 +61,7 @@ class Base(TorrentProvider):
|
||||
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
|
||||
'url': self.urls['download'] % (torrent_id, authkey, self.conf('passkey')),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': tryInt(entry.find('size').get_text()) / 1048576,
|
||||
'size': self.parseSize(entry.find('size').get_text()),
|
||||
'seeders': tryInt(entry.find('seeders').get_text()),
|
||||
'leechers': tryInt(entry.find('leechers').get_text()),
|
||||
'score': torrentscore
|
||||
|
||||
@@ -22,9 +22,6 @@ class Base(TorrentProvider):
|
||||
http_time_between_calls = 1 # Seconds
|
||||
only_tables_tags = SoupStrainer('table')
|
||||
|
||||
torrent_name_cell = 1
|
||||
torrent_download_cell = 2
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, movie, quality)
|
||||
@@ -43,8 +40,8 @@ class Base(TorrentProvider):
|
||||
|
||||
all_cells = result.find_all('td')
|
||||
|
||||
torrent = all_cells[self.torrent_name_cell].find('a')
|
||||
download = all_cells[self.torrent_download_cell].find('a')
|
||||
torrent = all_cells[1].find('a')
|
||||
download = all_cells[3].find('a')
|
||||
|
||||
torrent_id = torrent['href']
|
||||
torrent_id = torrent_id.replace('details.php?id=', '')
|
||||
@@ -52,9 +49,9 @@ class Base(TorrentProvider):
|
||||
|
||||
torrent_name = torrent.getText()
|
||||
|
||||
torrent_size = self.parseSize(all_cells[8].getText())
|
||||
torrent_seeders = tryInt(all_cells[10].getText())
|
||||
torrent_leechers = tryInt(all_cells[11].getText())
|
||||
torrent_size = self.parseSize(all_cells[7].getText())
|
||||
torrent_seeders = tryInt(all_cells[9].getText())
|
||||
torrent_leechers = tryInt(all_cells[10].getText())
|
||||
torrent_url = self.urls['baseurl'] % download['href']
|
||||
torrent_detail_url = self.urls['baseurl'] % torrent['href']
|
||||
|
||||
|
||||
@@ -34,7 +34,8 @@ class Base(TorrentMagnetProvider):
|
||||
'http://kickass.pw',
|
||||
'http://kickassto.come.in',
|
||||
'http://katproxy.ws',
|
||||
'http://kickass.bitproxy.eu',
|
||||
'http://www.kickassunblock.info',
|
||||
'http://www.kickassproxy.info',
|
||||
'http://katph.eu',
|
||||
'http://kickassto.come.in',
|
||||
]
|
||||
|
||||
@@ -64,10 +64,6 @@ class Base(TorrentProvider):
|
||||
torrentdesc += ' HQ'
|
||||
if self.conf('prefer_golden'):
|
||||
torrentscore += 5000
|
||||
if 'FreeleechType' in torrent:
|
||||
torrentdesc += ' Freeleech'
|
||||
if self.conf('prefer_freeleech'):
|
||||
torrentscore += 7000
|
||||
if 'Scene' in torrent and torrent['Scene']:
|
||||
torrentdesc += ' Scene'
|
||||
if self.conf('prefer_scene'):
|
||||
@@ -227,14 +223,6 @@ config = [{
|
||||
'default': 1,
|
||||
'description': 'Favors Golden Popcorn-releases over all other releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_freeleech',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'label': 'Prefer Freeleech',
|
||||
'default': 1,
|
||||
'description': 'Favors torrents marked as freeleech over all other releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_scene',
|
||||
'advanced': True,
|
||||
|
||||
@@ -24,16 +24,16 @@ class Base(TorrentMagnetProvider):
|
||||
http_time_between_calls = 0
|
||||
|
||||
proxy_list = [
|
||||
'https://dieroschtibay.org',
|
||||
'https://nobay.net',
|
||||
'https://thebay.al',
|
||||
'https://thepiratebay.se',
|
||||
'http://thepiratebay.se.net',
|
||||
'http://thepiratebay.cd',
|
||||
'http://thebootlegbay.com',
|
||||
'http://tpb.ninja.so',
|
||||
'http://proxybay.fr',
|
||||
'http://www.tpb.gr',
|
||||
'http://tpbproxy.co.uk',
|
||||
'http://pirateproxy.in',
|
||||
'http://piratebay.skey.sk',
|
||||
'http://pirateproxy.be',
|
||||
'http://www.getpirate.com',
|
||||
'http://piratebay.io',
|
||||
'http://bayproxy.li',
|
||||
'http://proxybay.pw',
|
||||
]
|
||||
|
||||
126
couchpotato/core/media/_base/providers/torrent/torrentleech.py
Normal file
126
couchpotato/core/media/_base/providers/torrent/torrentleech.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://www.torrentleech.org/',
|
||||
'login': 'http://www.torrentleech.org/user/account/login/',
|
||||
'login_check': 'http://torrentleech.org/user/messages',
|
||||
'detail': 'http://www.torrentleech.org/torrent/%s',
|
||||
'search': 'http://www.torrentleech.org/torrents/browse/index/query/%s/categories/%d',
|
||||
'download': 'http://www.torrentleech.org%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, media, quality)
|
||||
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'id': 'torrenttable'})
|
||||
if not result_table:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
|
||||
link = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||
url = result.find('td', attrs = {'class': 'quickdownload'}).find('a')
|
||||
details = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||
|
||||
results.append({
|
||||
'id': link['href'].replace('/torrent/', ''),
|
||||
'name': six.text_type(link.string),
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['download'] % details['href'],
|
||||
'size': self.parseSize(result.find_all('td')[4].string),
|
||||
'seeders': tryInt(result.find('td', attrs = {'class': 'seeders'}).string),
|
||||
'leechers': tryInt(result.find('td', attrs = {'class': 'leechers'}).string),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'remember_me': 'on',
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return '/user/account/logout' in output.lower() or 'welcome back' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentleech',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentLeech',
|
||||
'description': '<a href="http://torrentleech.org">TorrentLeech</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACHUlEQVR4AZVSO48SYRSdGTCBEMKzILLAWiybkKAGMZRUUJEoDZX7B9zsbuQPYEEjNLTQkYgJDwsoSaxspEBsCITXjjNAIKi8AkzceXgmbHQ1NJ5iMufmO9/9zrmXlCSJ+B8o75J8Pp/NZj0eTzweBy0Wi4PBYD6f12o1r9ebTCZx+22HcrnMsuxms7m6urTZ7LPZDMVYLBZ8ZV3yo8aq9Pq0wzCMTqe77dDv9y8uLyAWBH6xWOyL0K/56fcb+rrPgPZ6PZfLRe1fsl6vCUmGKIqoqNXqdDr9Dbjps9znUV0uTqdTjuPkDoVCIfcuJ4gizjMMm8u9vW+1nr04czqdK56c37CbKY9j2+1WEARZ0Gq1RFHAz2q1qlQqXxoN69HRcDjUarW8ZD6QUigUOnY8uKYH8N1sNkul9yiGw+F6vS4Rxn8EsodEIqHRaOSnq9T7ajQazWQycEIR1AEBYDabSZJyHDucJyegwWBQr9ebTCaKvHd4cCQANUU9evwQ1Ofz4YvUKUI43GE8HouSiFiNRhOowWBIpVLyHITJkuW3PwgAEf3pgIwxF5r+OplMEsk3CPT5szCMnY7EwUdhwUh/CXiej0Qi3idPz89fdrpdbsfBzH7S3Q9K5pP4c0sAKpVKoVAQGO1ut+t0OoFAQHkH2Da/3/+but3uarWK0ZMQoNdyucRutdttmqZxMTzY7XaYxsrgtUjEZrNhkSwWyy/0NCatZumrNQAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -13,12 +13,12 @@ log = CPLog(__name__)
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://torrentshack.eu/',
|
||||
'login': 'http://torrentshack.eu/login.php',
|
||||
'login_check': 'http://torrentshack.eu/inbox.php',
|
||||
'detail': 'http://torrentshack.eu/torrent/%s',
|
||||
'search': 'http://torrentshack.eu/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
|
||||
'download': 'http://torrentshack.eu/%s',
|
||||
'test': 'https://torrentshack.net/',
|
||||
'login': 'https://torrentshack.net/login.php',
|
||||
'login_check': 'https://torrentshack.net/inbox.php',
|
||||
'detail': 'https://torrentshack.net/torrent/%s',
|
||||
'search': 'https://torrentshack.net/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
|
||||
'download': 'https://torrentshack.net/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
@@ -42,7 +42,6 @@ class Base(TorrentProvider):
|
||||
|
||||
link = result.find('span', attrs = {'class': 'torrent_name_link'}).parent
|
||||
url = result.find('td', attrs = {'class': 'torrent_td'}).find('a')
|
||||
tds = result.find_all('td')
|
||||
|
||||
results.append({
|
||||
'id': link['href'].replace('torrents.php?torrentid=', ''),
|
||||
@@ -50,8 +49,8 @@ class Base(TorrentProvider):
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['download'] % link['href'],
|
||||
'size': self.parseSize(result.find_all('td')[5].string),
|
||||
'seeders': tryInt(tds[len(tds)-2].string),
|
||||
'leechers': tryInt(tds[len(tds)-1].string),
|
||||
'seeders': tryInt(result.find_all('td')[7].string),
|
||||
'leechers': tryInt(result.find_all('td')[8].string),
|
||||
})
|
||||
|
||||
except:
|
||||
@@ -81,7 +80,7 @@ config = [{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentShack',
|
||||
'description': '<a href="http://torrentshack.eu/">TorrentShack</a>',
|
||||
'description': '<a href="https://www.torrentshack.net/">TorrentShack</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABmElEQVQoFQXBzY2cVRiE0afqvd84CQiAnxWWtyxsS6ThINBYg2Dc7mZBMEjE4mzs6e9WcY5+ePNuVFJJodQAoLo+SaWCy9rcV8cmjah3CI6iYu7oRU30kE5xxELRfamklY3k1NL19sSm7vPzP/ZdNZzKVDaY2sPZJBh9fv5ITrmG2+Vp4e1sPchVqTCQZJnVXi+/L4uuAJGly1+Pw8CprLbi8Om7tbT19/XRqJUk11JP9uHj9ulxhXbvJbI9qJvr5YkGXFG2IBT8tXczt+sfzDZCp3765f3t9tHEHGEDACma77+8o4oATKk+/PfW9YmHruRFjWoVSFsVsGu1YSKq6Oc37+n98unPZSRlY7vsKDqN+92X3yR9+PdXee3iJNKMStqdcZqoTJbUSi5JOkpfRlhSI0mSpEmCFKoU7FqSNOLAk54uGwCStMUCgLrVic62g7oDoFmmdI+P3S0pDe1xvDqb6XrZqbtzShWNoh9fv/XQHaDdM9OqrZi2M7M3UrB2vlkPS1IbdEBk7UiSoD6VlZ6aKWer4aH4f/AvKoHUTjuyAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
|
||||
@@ -73,24 +73,4 @@ config = [{
|
||||
],
|
||||
},
|
||||
],
|
||||
}, {
|
||||
'name': 'torrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'name': 'searcher',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'minimum_seeders',
|
||||
'advanced': True,
|
||||
'label': 'Minimum seeders',
|
||||
'description': 'Ignore torrents with seeders below this number',
|
||||
'default': 1,
|
||||
'type': 'int',
|
||||
'unit': 'seeders'
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
@@ -129,11 +129,7 @@ class Searcher(SearcherBase):
|
||||
# Try guessing via quality tags
|
||||
guess = fireEvent('quality.guess', [nzb.get('name')], single = True)
|
||||
|
||||
if guess:
|
||||
return threed == guess.get('is_3d')
|
||||
# If no quality guess, assume not 3d
|
||||
else:
|
||||
return threed == False
|
||||
return threed == guess.get('is_3d')
|
||||
|
||||
def correctYear(self, haystack, year, year_range):
|
||||
|
||||
@@ -178,25 +174,6 @@ class Searcher(SearcherBase):
|
||||
|
||||
return False
|
||||
|
||||
def containsWords(self, rel_name, rel_words, conf, media):
|
||||
|
||||
# Make sure it has required words
|
||||
words = splitString(self.conf('%s_words' % conf, section = 'searcher').lower())
|
||||
try: words = removeDuplicate(words + splitString(media['category'][conf].lower()))
|
||||
except: pass
|
||||
|
||||
req_match = 0
|
||||
for req_set in words:
|
||||
if len(req_set) >= 2 and (req_set[:1] + req_set[-1:]) == '//':
|
||||
if re.search(req_set[1:-1], rel_name):
|
||||
log.debug('Regex match: %s', req_set[1:-1])
|
||||
req_match += 1
|
||||
else:
|
||||
req = splitString(req_set, '&')
|
||||
req_match += len(list(set(rel_words) & set(req))) == len(req)
|
||||
|
||||
return words, req_match > 0
|
||||
|
||||
def correctWords(self, rel_name, media):
|
||||
media_title = fireEvent('searcher.get_search_title', media, single = True)
|
||||
media_words = re.split('\W+', simplifyString(media_title))
|
||||
@@ -204,13 +181,31 @@ class Searcher(SearcherBase):
|
||||
rel_name = simplifyString(rel_name)
|
||||
rel_words = re.split('\W+', rel_name)
|
||||
|
||||
required_words, contains_required = self.containsWords(rel_name, rel_words, 'required', media)
|
||||
if len(required_words) > 0 and not contains_required:
|
||||
# Make sure it has required words
|
||||
required_words = splitString(self.conf('required_words', section = 'searcher').lower())
|
||||
try: required_words = removeDuplicate(required_words + splitString(media['category']['required'].lower()))
|
||||
except: pass
|
||||
|
||||
req_match = 0
|
||||
for req_set in required_words:
|
||||
req = splitString(req_set, '&')
|
||||
req_match += len(list(set(rel_words) & set(req))) == len(req)
|
||||
|
||||
if len(required_words) > 0 and req_match == 0:
|
||||
log.info2('Wrong: Required word missing: %s', rel_name)
|
||||
return False
|
||||
|
||||
ignored_words, contains_ignored = self.containsWords(rel_name, rel_words, 'ignored', media)
|
||||
if len(ignored_words) > 0 and contains_ignored:
|
||||
# Ignore releases
|
||||
ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
|
||||
try: ignored_words = removeDuplicate(ignored_words + splitString(media['category']['ignored'].lower()))
|
||||
except: pass
|
||||
|
||||
ignored_match = 0
|
||||
for ignored_set in ignored_words:
|
||||
ignored = splitString(ignored_set, '&')
|
||||
ignored_match += len(list(set(rel_words) & set(ignored))) == len(ignored)
|
||||
|
||||
if len(ignored_words) > 0 and ignored_match:
|
||||
log.info2("Wrong: '%s' contains 'ignored words'", rel_name)
|
||||
return False
|
||||
|
||||
|
||||
57
couchpotato/core/media/movie/_base/main.py
Executable file → Normal file
57
couchpotato/core/media/movie/_base/main.py
Executable file → Normal file
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
import traceback
|
||||
import time
|
||||
|
||||
@@ -27,10 +28,6 @@ class MovieBase(MovieTypeBase):
|
||||
|
||||
addApiView('movie.add', self.addView, docs = {
|
||||
'desc': 'Add new movie to the wanted list',
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'movie': object
|
||||
}"""},
|
||||
'params': {
|
||||
'identifier': {'desc': 'IMDB id of the movie your want to add.'},
|
||||
'profile_id': {'desc': 'ID of quality profile you want the add the movie in. If empty will use the default profile.'},
|
||||
@@ -49,7 +46,7 @@ class MovieBase(MovieTypeBase):
|
||||
})
|
||||
|
||||
addEvent('movie.add', self.add)
|
||||
addEvent('movie.update', self.update)
|
||||
addEvent('movie.update_info', self.updateInfo)
|
||||
addEvent('movie.update_release_dates', self.updateReleaseDate)
|
||||
|
||||
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
@@ -154,7 +151,8 @@ class MovieBase(MovieTypeBase):
|
||||
for release in fireEvent('release.for_media', m['_id'], single = True):
|
||||
if release.get('status') in ['downloaded', 'snatched', 'seeding', 'done']:
|
||||
if params.get('ignore_previous', False):
|
||||
fireEvent('release.update_status', release['_id'], status = 'ignored')
|
||||
release['status'] = 'ignored'
|
||||
db.update(release)
|
||||
else:
|
||||
fireEvent('release.delete', release['_id'], single = True)
|
||||
|
||||
@@ -174,7 +172,7 @@ class MovieBase(MovieTypeBase):
|
||||
# Trigger update info
|
||||
if added and update_after:
|
||||
# Do full update to get images etc
|
||||
fireEventAsync('movie.update', m['_id'], default_title = params.get('title'), on_complete = onComplete)
|
||||
fireEventAsync('movie.update_info', m['_id'], default_title = params.get('title'), on_complete = onComplete)
|
||||
|
||||
# Remove releases
|
||||
for rel in fireEvent('release.for_media', m['_id'], single = True):
|
||||
@@ -182,9 +180,6 @@ class MovieBase(MovieTypeBase):
|
||||
db.delete(rel)
|
||||
|
||||
movie_dict = fireEvent('media.get', m['_id'], single = True)
|
||||
if not movie_dict:
|
||||
log.debug('Failed adding media, can\'t find it anymore')
|
||||
return False
|
||||
|
||||
if do_search and search_after:
|
||||
onComplete = self.createOnComplete(m['_id'])
|
||||
@@ -261,7 +256,7 @@ class MovieBase(MovieTypeBase):
|
||||
'success': False,
|
||||
}
|
||||
|
||||
def update(self, media_id = None, identifier = None, default_title = None, extended = False):
|
||||
def updateInfo(self, media_id = None, identifier = None, default_title = None, extended = False):
|
||||
"""
|
||||
Update movie information inside media['doc']['info']
|
||||
|
||||
@@ -274,10 +269,6 @@ class MovieBase(MovieTypeBase):
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
lock_key = 'media.get.%s' % media_id if media_id else identifier
|
||||
self.acquireLock(lock_key)
|
||||
|
||||
media = {}
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
@@ -321,16 +312,42 @@ class MovieBase(MovieTypeBase):
|
||||
media['title'] = def_title
|
||||
|
||||
# Files
|
||||
image_urls = info.get('images', [])
|
||||
images = info.get('images', [])
|
||||
media['files'] = media.get('files', {})
|
||||
for image_type in ['poster']:
|
||||
|
||||
self.getPoster(media, image_urls)
|
||||
# Remove non-existing files
|
||||
file_type = 'image_%s' % image_type
|
||||
existing_files = list(set(media['files'].get(file_type, [])))
|
||||
for ef in media['files'].get(file_type, []):
|
||||
if not os.path.isfile(ef):
|
||||
existing_files.remove(ef)
|
||||
|
||||
# Replace new files list
|
||||
media['files'][file_type] = existing_files
|
||||
if len(existing_files) == 0:
|
||||
del media['files'][file_type]
|
||||
|
||||
# Loop over type
|
||||
for image in images.get(image_type, []):
|
||||
if not isinstance(image, (str, unicode)):
|
||||
continue
|
||||
|
||||
if file_type not in media['files'] or len(media['files'].get(file_type, [])) == 0:
|
||||
file_path = fireEvent('file.download', url = image, single = True)
|
||||
if file_path:
|
||||
media['files'][file_type] = [file_path]
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
db.update(media)
|
||||
|
||||
return media
|
||||
except:
|
||||
log.error('Failed update media: %s', traceback.format_exc())
|
||||
|
||||
self.releaseLock(lock_key)
|
||||
return media
|
||||
return {}
|
||||
|
||||
def updateReleaseDate(self, media_id):
|
||||
"""
|
||||
@@ -346,7 +363,7 @@ class MovieBase(MovieTypeBase):
|
||||
media = db.get('id', media_id)
|
||||
|
||||
if not media.get('info'):
|
||||
media = self.update(media_id)
|
||||
media = self.updateInfo(media_id)
|
||||
dates = media.get('info', {}).get('release_date')
|
||||
else:
|
||||
dates = media.get('info').get('release_date')
|
||||
|
||||
@@ -115,15 +115,8 @@ MA.Release = new Class({
|
||||
|
||||
self.releases = null;
|
||||
if(self.options_container){
|
||||
// Releases are currently displayed
|
||||
if(self.options_container.isDisplayed()){
|
||||
self.options_container.destroy();
|
||||
self.createReleases();
|
||||
}
|
||||
else {
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -138,10 +131,10 @@ MA.Release = new Class({
|
||||
|
||||
},
|
||||
|
||||
createReleases: function(refresh){
|
||||
createReleases: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.options_container || refresh){
|
||||
if(!self.options_container){
|
||||
self.options_container = new Element('div.options').grab(
|
||||
self.release_container = new Element('div.releases.table')
|
||||
);
|
||||
|
||||
@@ -54,21 +54,13 @@ var Movie = new Class({
|
||||
// Reload when releases have updated
|
||||
self.global_events['release.update_status'] = function(notification){
|
||||
var data = notification.data;
|
||||
if(data && self.data._id == data.media_id){
|
||||
if(data && self.data._id == data.movie_id){
|
||||
|
||||
if(!self.data.releases)
|
||||
self.data.releases = [];
|
||||
|
||||
var updated = false;
|
||||
self.data.releases.each(function(release){
|
||||
if(release._id == data._id){
|
||||
release['status'] = data.status;
|
||||
updated = true;
|
||||
}
|
||||
});
|
||||
|
||||
if(updated)
|
||||
self.updateReleases();
|
||||
self.data.releases.push({'quality': data.quality, 'status': data.status});
|
||||
self.updateReleases();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -167,7 +159,7 @@ var Movie = new Class({
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.thumbnail = (self.data.files && self.data.files.image_poster && self.data.files.image_poster.length > 0) ? new Element('img', {
|
||||
self.thumbnail = (self.data.files && self.data.files.image_poster) ? new Element('img', {
|
||||
'class': 'type_image poster',
|
||||
'src': Api.createUrl('file.cache') + self.data.files.image_poster[0].split(Api.getOption('path_sep')).pop()
|
||||
}): null,
|
||||
|
||||
@@ -21,6 +21,13 @@ config = [{
|
||||
'type': 'int',
|
||||
'description': 'Maximum number of items displayed from each chart.',
|
||||
},
|
||||
{
|
||||
'name': 'update_interval',
|
||||
'default': 12,
|
||||
'type': 'int',
|
||||
'advanced': True,
|
||||
'description': '(hours)',
|
||||
},
|
||||
{
|
||||
'name': 'hide_wanted',
|
||||
'default': False,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import time
|
||||
|
||||
from couchpotato import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent,fireEvent
|
||||
@@ -12,14 +13,13 @@ log = CPLog(__name__)
|
||||
class Charts(Plugin):
|
||||
|
||||
update_in_progress = False
|
||||
update_interval = 72 # hours
|
||||
|
||||
def __init__(self):
|
||||
addApiView('charts.view', self.automationView)
|
||||
addEvent('app.load', self.setCrons)
|
||||
|
||||
def setCrons(self):
|
||||
fireEvent('schedule.interval', 'charts.update_cache', self.updateViewCache, hours = self.update_interval)
|
||||
fireEvent('schedule.interval', 'charts.update_cache', self.updateViewCache, hours = self.conf('update_interval', default = 12))
|
||||
|
||||
def automationView(self, force_update = False, **kwargs):
|
||||
|
||||
@@ -52,7 +52,7 @@ class Charts(Plugin):
|
||||
for chart in charts:
|
||||
chart['hide_wanted'] = self.conf('hide_wanted')
|
||||
chart['hide_library'] = self.conf('hide_library')
|
||||
self.setCache('charts_cached', charts, timeout = self.update_interval * 3600)
|
||||
self.setCache('charts_cached', charts, timeout = 7200 * tryInt(self.conf('update_interval', default = 12)))
|
||||
except:
|
||||
log.error('Failed refreshing charts')
|
||||
|
||||
|
||||
@@ -264,11 +264,3 @@
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
@media all and (max-width: 480px) {
|
||||
.toggle_menu h2 {
|
||||
font-size: 16px;
|
||||
text-align: center;
|
||||
height: 30px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@ var Charts = new Class({
|
||||
|
||||
Implements: [Options, Events],
|
||||
|
||||
shown_once: false,
|
||||
|
||||
initialize: function(options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
@@ -42,13 +40,17 @@ var Charts = new Class({
|
||||
)
|
||||
);
|
||||
|
||||
if( Cookie.read('suggestions_charts_menu_selected') === 'charts'){
|
||||
self.show();
|
||||
self.fireEvent.delay(0, self, 'created');
|
||||
}
|
||||
if( Cookie.read('suggestions_charts_menu_selected') === 'charts')
|
||||
self.el.show();
|
||||
else
|
||||
self.el.hide();
|
||||
|
||||
self.api_request = Api.request('charts.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
self.fireEvent.delay(0, self, 'created');
|
||||
|
||||
},
|
||||
|
||||
fill: function(json){
|
||||
@@ -155,24 +157,6 @@ var Charts = new Class({
|
||||
|
||||
},
|
||||
|
||||
show: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.show();
|
||||
|
||||
if(!self.shown_once){
|
||||
self.api_request = Api.request('charts.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
self.shown_once = true;
|
||||
}
|
||||
},
|
||||
|
||||
hide: function(){
|
||||
this.el.hide();
|
||||
},
|
||||
|
||||
afterAdded: function(m){
|
||||
|
||||
$(m).getElement('div.chart_number')
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato import fireEvent
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
@@ -7,7 +5,6 @@ from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.automation.base import Automation
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Bluray'
|
||||
@@ -37,49 +34,27 @@ class Bluray(Automation, RSS):
|
||||
|
||||
try:
|
||||
# Stop if the release year is before the minimal year
|
||||
brk = False
|
||||
h3s = soup.body.find_all('h3')
|
||||
for h3 in h3s:
|
||||
if h3.parent.name != 'a':
|
||||
|
||||
try:
|
||||
page_year = tryInt(h3.get_text()[-4:])
|
||||
if page_year > 0 and page_year < self.getMinimal('year'):
|
||||
brk = True
|
||||
except:
|
||||
log.error('Failed determining page year: %s', traceback.format_exc())
|
||||
brk = True
|
||||
break
|
||||
|
||||
if brk:
|
||||
page_year = soup.body.find_all('center')[3].table.tr.find_all('td', recursive = False)[3].h3.get_text().split(', ')[1]
|
||||
if tryInt(page_year) < self.getMinimal('year'):
|
||||
break
|
||||
|
||||
for h3 in h3s:
|
||||
try:
|
||||
if h3.parent.name == 'a':
|
||||
name = h3.get_text().lower().split('blu-ray')[0].strip()
|
||||
for table in soup.body.find_all('center')[3].table.tr.find_all('td', recursive = False)[3].find_all('table')[1:20]:
|
||||
name = table.h3.get_text().lower().split('blu-ray')[0].strip()
|
||||
year = table.small.get_text().split('|')[1].strip()
|
||||
|
||||
if not name.find('/') == -1: # make sure it is not a double movie release
|
||||
continue
|
||||
if not name.find('/') == -1: # make sure it is not a double movie release
|
||||
continue
|
||||
|
||||
if not h3.parent.parent.small: # ignore non-movie tables
|
||||
continue
|
||||
if tryInt(year) < self.getMinimal('year'):
|
||||
continue
|
||||
|
||||
year = h3.parent.parent.small.get_text().split('|')[1].strip()
|
||||
imdb = self.search(name, year)
|
||||
|
||||
if tryInt(year) < self.getMinimal('year'):
|
||||
continue
|
||||
|
||||
imdb = self.search(name, year)
|
||||
|
||||
if imdb:
|
||||
if self.isMinimalMovie(imdb):
|
||||
movies.append(imdb['imdb'])
|
||||
except:
|
||||
log.debug('Error parsing movie html: %s', traceback.format_exc())
|
||||
break
|
||||
if imdb:
|
||||
if self.isMinimalMovie(imdb):
|
||||
movies.append(imdb['imdb'])
|
||||
except:
|
||||
log.debug('Error loading page %s: %s', (page, traceback.format_exc()))
|
||||
log.debug('Error loading page: %s', page)
|
||||
break
|
||||
|
||||
self.conf('backlog', value = False)
|
||||
@@ -159,7 +134,7 @@ config = [{
|
||||
{
|
||||
'name': 'backlog',
|
||||
'advanced': True,
|
||||
'description': ('Parses the history until the minimum movie year is reached. (Takes a while)', 'Will be disabled once it has completed'),
|
||||
'description': 'Parses the history until the minimum movie year is reached. (Will be disabled once it has completed)',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
},
|
||||
|
||||
@@ -2,7 +2,7 @@ import base64
|
||||
import time
|
||||
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
from couchpotato.environment import Env
|
||||
@@ -66,7 +66,7 @@ class CouchPotatoApi(MovieProvider):
|
||||
if not name:
|
||||
return
|
||||
|
||||
name_enc = base64.b64encode(ss(name))
|
||||
name_enc = base64.b64encode(name)
|
||||
return self.getJsonData(self.urls['validate'] % name_enc, headers = self.getRequestHeaders())
|
||||
|
||||
def isMovie(self, identifier = None):
|
||||
|
||||
@@ -4,7 +4,6 @@ from couchpotato import tryInt
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
from requests import HTTPError
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -15,7 +14,7 @@ autoload = 'FanartTV'
|
||||
class FanartTV(MovieProvider):
|
||||
|
||||
urls = {
|
||||
'api': 'http://webservice.fanart.tv/v3/movies/%s?api_key=b28b14e9be662e027cfbc7c3dd600405'
|
||||
'api': 'http://api.fanart.tv/webservice/movie/b28b14e9be662e027cfbc7c3dd600405/%s/JSON/all/1/2'
|
||||
}
|
||||
|
||||
MAX_EXTRAFANART = 20
|
||||
@@ -24,23 +23,23 @@ class FanartTV(MovieProvider):
|
||||
def __init__(self):
|
||||
addEvent('movie.info', self.getArt, priority = 1)
|
||||
|
||||
def getArt(self, identifier = None, extended = True, **kwargs):
|
||||
def getArt(self, identifier = None, **kwargs):
|
||||
|
||||
if not identifier or not extended:
|
||||
log.debug("Getting Extra Artwork from Fanart.tv...")
|
||||
if not identifier:
|
||||
return {}
|
||||
|
||||
images = {}
|
||||
|
||||
try:
|
||||
url = self.urls['api'] % identifier
|
||||
fanart_data = self.getJsonData(url, show_error = False)
|
||||
fanart_data = self.getJsonData(url)
|
||||
|
||||
if fanart_data:
|
||||
log.debug('Found images for %s', fanart_data.get('name'))
|
||||
images = self._parseMovie(fanart_data)
|
||||
except HTTPError as e:
|
||||
log.debug('Failed getting extra art for %s: %s',
|
||||
(identifier, e))
|
||||
name, resource = fanart_data.items()[0]
|
||||
log.debug('Found images for %s', name)
|
||||
images = self._parseMovie(resource)
|
||||
|
||||
except:
|
||||
log.error('Failed getting extra art for %s: %s',
|
||||
(identifier, traceback.format_exc()))
|
||||
@@ -96,7 +95,7 @@ class FanartTV(MovieProvider):
|
||||
for image in images:
|
||||
if tryInt(image.get('likes')) > highscore:
|
||||
highscore = tryInt(image.get('likes'))
|
||||
image_url = image.get('url') or image.get('href')
|
||||
image_url = image.get('url')
|
||||
|
||||
return image_url
|
||||
|
||||
@@ -119,9 +118,7 @@ class FanartTV(MovieProvider):
|
||||
if tryInt(image.get('likes')) > highscore:
|
||||
highscore = tryInt(image.get('likes'))
|
||||
best = image
|
||||
url = best.get('url') or best.get('href')
|
||||
if url:
|
||||
image_urls.append(url)
|
||||
image_urls.append(best.get('url'))
|
||||
pool.remove(best)
|
||||
|
||||
return image_urls
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode, ss, tryUrlencode
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode, ss
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
import tmdb3
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -12,66 +13,54 @@ autoload = 'TheMovieDb'
|
||||
|
||||
|
||||
class TheMovieDb(MovieProvider):
|
||||
|
||||
http_time_between_calls = .35
|
||||
|
||||
configuration = {
|
||||
'images': {
|
||||
'secure_base_url': 'https://image.tmdb.org/t/p/',
|
||||
},
|
||||
}
|
||||
MAX_EXTRATHUMBS = 4
|
||||
|
||||
def __init__(self):
|
||||
addEvent('info.search', self.search, priority = 3)
|
||||
addEvent('movie.search', self.search, priority = 3)
|
||||
addEvent('movie.info', self.getInfo, priority = 3)
|
||||
addEvent('movie.info_by_tmdb', self.getInfo)
|
||||
addEvent('app.load', self.config)
|
||||
|
||||
def config(self):
|
||||
configuration = self.request('configuration')
|
||||
if configuration:
|
||||
self.configuration = configuration
|
||||
# Configure TMDB settings
|
||||
tmdb3.set_key(self.conf('api_key'))
|
||||
tmdb3.set_cache('null')
|
||||
|
||||
def search(self, q, limit = 3):
|
||||
def search(self, q, limit = 12):
|
||||
""" Find movie by name """
|
||||
|
||||
if self.isDisabled():
|
||||
return False
|
||||
|
||||
log.debug('Searching for movie: %s', q)
|
||||
search_string = simplifyString(q)
|
||||
cache_key = 'tmdb.cache.%s.%s' % (search_string, limit)
|
||||
results = self.getCache(cache_key)
|
||||
|
||||
raw = None
|
||||
try:
|
||||
name_year = fireEvent('scanner.name_year', q, single = True)
|
||||
raw = self.request('search/movie', {
|
||||
'query': name_year.get('name', q),
|
||||
'year': name_year.get('year'),
|
||||
'search_type': 'ngram' if limit > 1 else 'phrase'
|
||||
}, return_key = 'results')
|
||||
except:
|
||||
log.error('Failed searching TMDB for "%s": %s', (q, traceback.format_exc()))
|
||||
if not results:
|
||||
log.debug('Searching for movie: %s', q)
|
||||
|
||||
results = []
|
||||
if raw:
|
||||
raw = None
|
||||
try:
|
||||
nr = 0
|
||||
raw = tmdb3.searchMovie(search_string)
|
||||
except:
|
||||
log.error('Failed searching TMDB for "%s": %s', (search_string, traceback.format_exc()))
|
||||
|
||||
for movie in raw:
|
||||
parsed_movie = self.parseMovie(movie, extended = False)
|
||||
if parsed_movie:
|
||||
results.append(parsed_movie)
|
||||
results = []
|
||||
if raw:
|
||||
try:
|
||||
nr = 0
|
||||
|
||||
nr += 1
|
||||
if nr == limit:
|
||||
break
|
||||
for movie in raw:
|
||||
results.append(self.parseMovie(movie, extended = False))
|
||||
|
||||
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
|
||||
nr += 1
|
||||
if nr == limit:
|
||||
break
|
||||
|
||||
return results
|
||||
except SyntaxError as e:
|
||||
log.error('Failed to parse XML response: %s', e)
|
||||
return False
|
||||
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
|
||||
|
||||
self.setCache(cache_key, results)
|
||||
return results
|
||||
except SyntaxError as e:
|
||||
log.error('Failed to parse XML response: %s', e)
|
||||
return False
|
||||
|
||||
return results
|
||||
|
||||
@@ -80,91 +69,101 @@ class TheMovieDb(MovieProvider):
|
||||
if not identifier:
|
||||
return {}
|
||||
|
||||
result = self.parseMovie({
|
||||
'id': identifier
|
||||
}, extended = extended)
|
||||
cache_key = 'tmdb.cache.%s%s' % (identifier, '.ex' if extended else '')
|
||||
result = self.getCache(cache_key)
|
||||
|
||||
return result or {}
|
||||
if not result:
|
||||
try:
|
||||
log.debug('Getting info: %s', cache_key)
|
||||
# noinspection PyArgumentList
|
||||
movie = tmdb3.Movie(identifier)
|
||||
try: exists = movie.title is not None
|
||||
except: exists = False
|
||||
|
||||
if exists:
|
||||
result = self.parseMovie(movie, extended = extended)
|
||||
self.setCache(cache_key, result)
|
||||
else:
|
||||
result = {}
|
||||
except:
|
||||
log.error('Failed getting info for %s: %s', (identifier, traceback.format_exc()))
|
||||
|
||||
return result
|
||||
|
||||
def parseMovie(self, movie, extended = True):
|
||||
|
||||
# Do request, append other items
|
||||
movie = self.request('movie/%s' % movie.get('id'), {
|
||||
'append_to_response': 'alternative_titles' + (',images,casts' if extended else '')
|
||||
})
|
||||
if not movie:
|
||||
return
|
||||
cache_key = 'tmdb.cache.%s%s' % (movie.id, '.ex' if extended else '')
|
||||
movie_data = self.getCache(cache_key)
|
||||
|
||||
# Images
|
||||
poster = self.getImage(movie, type = 'poster', size = 'w154')
|
||||
poster_original = self.getImage(movie, type = 'poster', size = 'original')
|
||||
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
|
||||
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original') if extended else []
|
||||
if not movie_data:
|
||||
|
||||
images = {
|
||||
'poster': [poster] if poster else [],
|
||||
#'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [poster_original] if poster_original else [],
|
||||
'backdrop_original': [backdrop_original] if backdrop_original else [],
|
||||
'actors': {},
|
||||
'extra_thumbs': extra_thumbs
|
||||
}
|
||||
# Images
|
||||
poster = self.getImage(movie, type = 'poster', size = 'w154')
|
||||
poster_original = self.getImage(movie, type = 'poster', size = 'original')
|
||||
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
|
||||
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original', n = self.MAX_EXTRATHUMBS, skipfirst = True)
|
||||
|
||||
# Genres
|
||||
try:
|
||||
genres = [genre.get('name') for genre in movie.get('genres', [])]
|
||||
except:
|
||||
genres = []
|
||||
images = {
|
||||
'poster': [poster] if poster else [],
|
||||
#'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [poster_original] if poster_original else [],
|
||||
'backdrop_original': [backdrop_original] if backdrop_original else [],
|
||||
'actors': {},
|
||||
'extra_thumbs': extra_thumbs
|
||||
}
|
||||
|
||||
# 1900 is the same as None
|
||||
year = str(movie.get('release_date') or '')[:4]
|
||||
if not movie.get('release_date') or year == '1900' or year.lower() == 'none':
|
||||
year = None
|
||||
# Genres
|
||||
try:
|
||||
genres = [genre.name for genre in movie.genres]
|
||||
except:
|
||||
genres = []
|
||||
|
||||
# Gather actors data
|
||||
actors = {}
|
||||
if extended:
|
||||
# 1900 is the same as None
|
||||
year = str(movie.releasedate or '')[:4]
|
||||
if not movie.releasedate or year == '1900' or year.lower() == 'none':
|
||||
year = None
|
||||
|
||||
# Full data
|
||||
cast = movie.get('casts', {}).get('cast', [])
|
||||
# Gather actors data
|
||||
actors = {}
|
||||
if extended:
|
||||
for cast_item in movie.cast:
|
||||
try:
|
||||
actors[toUnicode(cast_item.name)] = toUnicode(cast_item.character)
|
||||
images['actors'][toUnicode(cast_item.name)] = self.getImage(cast_item, type = 'profile', size = 'original')
|
||||
except:
|
||||
log.debug('Error getting cast info for %s: %s', (cast_item, traceback.format_exc()))
|
||||
|
||||
for cast_item in cast:
|
||||
try:
|
||||
actors[toUnicode(cast_item.get('name'))] = toUnicode(cast_item.get('character'))
|
||||
images['actors'][toUnicode(cast_item.get('name'))] = self.getImage(cast_item, type = 'profile', size = 'original')
|
||||
except:
|
||||
log.debug('Error getting cast info for %s: %s', (cast_item, traceback.format_exc()))
|
||||
movie_data = {
|
||||
'type': 'movie',
|
||||
'via_tmdb': True,
|
||||
'tmdb_id': movie.id,
|
||||
'titles': [toUnicode(movie.title)],
|
||||
'original_title': movie.originaltitle,
|
||||
'images': images,
|
||||
'imdb': movie.imdb,
|
||||
'runtime': movie.runtime,
|
||||
'released': str(movie.releasedate),
|
||||
'year': tryInt(year, None),
|
||||
'plot': movie.overview,
|
||||
'genres': genres,
|
||||
'collection': getattr(movie.collection, 'name', None),
|
||||
'actor_roles': actors
|
||||
}
|
||||
|
||||
movie_data = {
|
||||
'type': 'movie',
|
||||
'via_tmdb': True,
|
||||
'tmdb_id': movie.get('id'),
|
||||
'titles': [toUnicode(movie.get('title'))],
|
||||
'original_title': movie.get('original_title'),
|
||||
'images': images,
|
||||
'imdb': movie.get('imdb_id'),
|
||||
'runtime': movie.get('runtime'),
|
||||
'released': str(movie.get('release_date')),
|
||||
'year': tryInt(year, None),
|
||||
'plot': movie.get('overview'),
|
||||
'genres': genres,
|
||||
'collection': getattr(movie.get('belongs_to_collection'), 'name', None),
|
||||
'actor_roles': actors
|
||||
}
|
||||
movie_data = dict((k, v) for k, v in movie_data.items() if v)
|
||||
|
||||
movie_data = dict((k, v) for k, v in movie_data.items() if v)
|
||||
# Add alternative names
|
||||
if movie_data['original_title'] and movie_data['original_title'] not in movie_data['titles']:
|
||||
movie_data['titles'].append(movie_data['original_title'])
|
||||
|
||||
# Add alternative names
|
||||
if movie_data['original_title'] and movie_data['original_title'] not in movie_data['titles']:
|
||||
movie_data['titles'].append(movie_data['original_title'])
|
||||
if extended:
|
||||
for alt in movie.alternate_titles:
|
||||
alt_name = alt.title
|
||||
if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None:
|
||||
movie_data['titles'].append(alt_name)
|
||||
|
||||
# Add alternative titles
|
||||
alternate_titles = movie.get('alternative_titles', {}).get('titles', [])
|
||||
|
||||
for alt in alternate_titles:
|
||||
alt_name = alt.get('title')
|
||||
if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None:
|
||||
movie_data['titles'].append(alt_name)
|
||||
# Cache movie parsed
|
||||
self.setCache(cache_key, movie_data)
|
||||
|
||||
return movie_data
|
||||
|
||||
@@ -172,41 +171,36 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
image_url = ''
|
||||
try:
|
||||
path = movie.get('%s_path' % type)
|
||||
image_url = '%s%s%s' % (self.configuration['images']['secure_base_url'], size, path)
|
||||
image_url = getattr(movie, type).geturl(size = size)
|
||||
except:
|
||||
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
|
||||
|
||||
return image_url
|
||||
|
||||
def getMultImages(self, movie, type = 'backdrops', size = 'original'):
|
||||
def getMultImages(self, movie, type = 'backdrops', size = 'original', n = -1, skipfirst = False):
|
||||
"""
|
||||
If n < 0, return all images. Otherwise return n images.
|
||||
If n > len(getattr(movie, type)), then return all images.
|
||||
If skipfirst is True, then it will skip getattr(movie, type)[0]. This
|
||||
is because backdrops[0] is typically backdrop.
|
||||
"""
|
||||
|
||||
image_urls = []
|
||||
try:
|
||||
for image in movie.get('images', {}).get(type, [])[1:5]:
|
||||
image_urls.append(self.getImage(image, 'file', size))
|
||||
images = getattr(movie, type)
|
||||
if n < 0 or n > len(images):
|
||||
num_images = len(images)
|
||||
else:
|
||||
num_images = n
|
||||
|
||||
for i in range(int(skipfirst), num_images + int(skipfirst)):
|
||||
image_urls.append(images[i].geturl(size = size))
|
||||
|
||||
except:
|
||||
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
|
||||
log.debug('Failed getting %i %s.%s for "%s"', (n, type, size, ss(str(movie))))
|
||||
|
||||
return image_urls
|
||||
|
||||
def request(self, call = '', params = {}, return_key = None):
|
||||
|
||||
params = dict((k, v) for k, v in params.items() if v)
|
||||
params = tryUrlencode(params)
|
||||
|
||||
try:
|
||||
url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
|
||||
data = self.getJsonData(url, show_error = False)
|
||||
except:
|
||||
log.debug('Movie not found: %s, %s', (call, params))
|
||||
data = None
|
||||
|
||||
if data and return_key and return_key in data:
|
||||
data = data.get(return_key)
|
||||
|
||||
return data
|
||||
|
||||
def isDisabled(self):
|
||||
if self.conf('api_key') == '':
|
||||
log.error('No API key provided.')
|
||||
|
||||
2
couchpotato/core/media/movie/providers/metadata/base.py
Executable file → Normal file
2
couchpotato/core/media/movie/providers/metadata/base.py
Executable file → Normal file
@@ -28,7 +28,7 @@ class MovieMetaData(MetaDataBase):
|
||||
|
||||
# Update library to get latest info
|
||||
try:
|
||||
group['media'] = fireEvent('movie.update', group['media'].get('_id'), identifier = getIdentifier(group['media']), extended = True, single = True)
|
||||
group['media'] = fireEvent('movie.update_info', group['media'].get('_id'), identifier = getIdentifier(group['media']), extended = True, single = True)
|
||||
except:
|
||||
log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc())
|
||||
|
||||
|
||||
30
couchpotato/core/media/movie/providers/nzb/nzbindex.py
Normal file
30
couchpotato/core/media/movie/providers/nzb/nzbindex.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.nzb.nzbindex import Base
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
from couchpotato.environment import Env
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NzbIndex'
|
||||
|
||||
|
||||
class NzbIndex(MovieProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
title = fireEvent('library.query', media, include_year = False, single = True)
|
||||
year = media['info']['year']
|
||||
|
||||
query = tryUrlencode({
|
||||
'q': '"%s %s" | "%s (%s)"' % (title, year, title, year),
|
||||
'age': Env.setting('retention', 'nzb'),
|
||||
'sort': 'agedesc',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
'rating': 1,
|
||||
'max': 250,
|
||||
'more': 1,
|
||||
'complete': 1,
|
||||
})
|
||||
return query
|
||||
@@ -13,7 +13,7 @@ class IPTorrents(MovieProvider, Base):
|
||||
([87], ['3d']),
|
||||
([48], ['720p', '1080p', 'bd50']),
|
||||
([72], ['cam', 'ts', 'tc', 'r5', 'scr']),
|
||||
([7,48], ['dvdrip', 'brrip']),
|
||||
([7], ['dvdrip', 'brrip']),
|
||||
([6], ['dvdr']),
|
||||
]
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ class PassThePopcorn(MovieProvider, Base):
|
||||
'bd50': {'media': 'Blu-ray', 'format': 'BD50'},
|
||||
'1080p': {'resolution': '1080p'},
|
||||
'720p': {'resolution': '720p'},
|
||||
'brrip': {'resolution': 'anyhd'},
|
||||
'brrip': {'media': 'Blu-ray'},
|
||||
'dvdr': {'resolution': 'anysd'},
|
||||
'dvdrip': {'media': 'DVD'},
|
||||
'scr': {'media': 'DVD-Screener'},
|
||||
@@ -27,7 +27,7 @@ class PassThePopcorn(MovieProvider, Base):
|
||||
'bd50': {'Codec': ['BD50']},
|
||||
'1080p': {'Resolution': ['1080p']},
|
||||
'720p': {'Resolution': ['720p']},
|
||||
'brrip': {'Quality': ['High Definition'], 'Container': ['!ISO']},
|
||||
'brrip': {'Source': ['Blu-ray'], 'Quality': ['High Definition'], 'Container': ['!ISO']},
|
||||
'dvdr': {'Codec': ['DVD5', 'DVD9']},
|
||||
'dvdrip': {'Source': ['DVD'], 'Codec': ['!DVD5', '!DVD9']},
|
||||
'scr': {'Source': ['DVD-Screener']},
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.torrentleech import Base
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentLeech'
|
||||
|
||||
|
||||
class TorrentLeech(MovieProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([13], ['720p', '1080p']),
|
||||
([8], ['cam']),
|
||||
([9], ['ts', 'tc']),
|
||||
([10], ['r5', 'scr']),
|
||||
([11], ['dvdrip']),
|
||||
([14], ['brrip']),
|
||||
([12], ['dvdr']),
|
||||
]
|
||||
|
||||
def buildUrl(self, title, media, quality):
|
||||
return (
|
||||
tryUrlencode(title.replace(':', '')),
|
||||
self.getCatId(quality)[0]
|
||||
)
|
||||
@@ -3,7 +3,7 @@ import re
|
||||
|
||||
from bs4 import SoupStrainer, BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getTitle, getIdentifier
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.trailer.base import TrailerProvider
|
||||
from requests import HTTPError
|
||||
@@ -29,7 +29,7 @@ class HDTrailers(TrailerProvider):
|
||||
|
||||
url = self.urls['api'] % self.movieUrlName(movie_name)
|
||||
try:
|
||||
data = self.getCache('hdtrailers.%s' % getIdentifier(group), url, show_error = False)
|
||||
data = self.getCache('hdtrailers.%s' % group['identifier'], url, show_error = False)
|
||||
except HTTPError:
|
||||
log.debug('No page found for: %s', movie_name)
|
||||
data = None
|
||||
@@ -59,7 +59,7 @@ class HDTrailers(TrailerProvider):
|
||||
|
||||
url = "%s?%s" % (self.urls['backup'], tryUrlencode({'s':movie_name}))
|
||||
try:
|
||||
data = self.getCache('hdtrailers.alt.%s' % getIdentifier(group), url, show_error = False)
|
||||
data = self.getCache('hdtrailers.alt.%s' % group['identifier'], url, show_error = False)
|
||||
except HTTPError:
|
||||
log.debug('No alternative page found for: %s', movie_name)
|
||||
data = None
|
||||
@@ -68,7 +68,7 @@ class HDTrailers(TrailerProvider):
|
||||
return results
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(data, parse_only = self.only_tables_tags)
|
||||
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||
result_table = html.find_all('h2', text = re.compile(movie_name))
|
||||
|
||||
for h2 in result_table:
|
||||
@@ -90,7 +90,7 @@ class HDTrailers(TrailerProvider):
|
||||
|
||||
results = {'480p':[], '720p':[], '1080p':[]}
|
||||
try:
|
||||
html = BeautifulSoup(data, parse_only = self.only_tables_tags)
|
||||
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||
result_table = html.find('table', attrs = {'class':'bottomTable'})
|
||||
|
||||
for tr in result_table.find_all('tr'):
|
||||
|
||||
@@ -25,6 +25,6 @@ class Filmstarts(UserscriptBase):
|
||||
name = html.find("meta", {"property":"og:title"})['content']
|
||||
|
||||
# Year of production is not available in the meta data, so get it from the table
|
||||
year = table.find(text="Produktionsjahr").parent.parent.next_sibling.text
|
||||
year = table.find("tr", text="Produktionsjahr").parent.parent.parent.td.text
|
||||
|
||||
return self.search(name, year)
|
||||
return self.search(name, year)
|
||||
60
couchpotato/core/media/movie/searcher.py
Executable file → Normal file
60
couchpotato/core/media/movie/searcher.py
Executable file → Normal file
@@ -74,7 +74,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
self.in_progress = True
|
||||
fireEvent('notify.frontend', type = 'movie.searcher.started', data = True, message = 'Full search started')
|
||||
|
||||
medias = [x['_id'] for x in fireEvent('media.with_status', 'active', types = 'movie', with_doc = False, single = True)]
|
||||
medias = [x['_id'] for x in fireEvent('media.with_status', 'active', with_doc = False, single = True)]
|
||||
random.shuffle(medias)
|
||||
|
||||
total = len(medias)
|
||||
@@ -89,13 +89,12 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
for media_id in medias:
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if not media: continue
|
||||
|
||||
try:
|
||||
self.single(media, search_protocols, manual = manual)
|
||||
except IndexError:
|
||||
log.error('Forcing library update for %s, if you see this often, please report: %s', (getIdentifier(media), traceback.format_exc()))
|
||||
fireEvent('movie.update', media_id)
|
||||
fireEvent('movie.update_info', media_id)
|
||||
except:
|
||||
log.error('Search failed for %s: %s', (getIdentifier(media), traceback.format_exc()))
|
||||
|
||||
@@ -141,17 +140,17 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
previous_releases = movie.get('releases', [])
|
||||
too_early_to_search = []
|
||||
outside_eta_results = 0
|
||||
always_search = self.conf('always_search')
|
||||
alway_search = self.conf('always_search')
|
||||
ignore_eta = manual
|
||||
total_result_count = 0
|
||||
|
||||
fireEvent('notify.frontend', type = 'movie.searcher.started', data = {'_id': movie['_id']}, message = 'Searching for "%s"' % default_title)
|
||||
|
||||
# Ignore eta once every 7 days
|
||||
if not always_search:
|
||||
if not alway_search:
|
||||
prop_name = 'last_ignored_eta.%s' % movie['_id']
|
||||
last_ignored_eta = float(Env.prop(prop_name, default = 0))
|
||||
if last_ignored_eta < time.time() - 604800:
|
||||
if last_ignored_eta > time.time() - 604800:
|
||||
ignore_eta = True
|
||||
Env.prop(prop_name, value = time.time())
|
||||
|
||||
@@ -166,12 +165,11 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
'quality': q_identifier,
|
||||
'finish': profile['finish'][index],
|
||||
'wait_for': tryInt(profile['wait_for'][index]),
|
||||
'3d': profile['3d'][index] if profile.get('3d') else False,
|
||||
'minimum_score': profile.get('minimum_score', 1),
|
||||
'3d': profile['3d'][index] if profile.get('3d') else False
|
||||
}
|
||||
|
||||
could_not_be_released = not self.couldBeReleased(q_identifier in pre_releases, release_dates, movie['info']['year'])
|
||||
if not always_search and could_not_be_released:
|
||||
if not alway_search and could_not_be_released:
|
||||
too_early_to_search.append(q_identifier)
|
||||
|
||||
# Skip release, if ETA isn't ignored
|
||||
@@ -197,20 +195,13 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
break
|
||||
|
||||
quality = fireEvent('quality.single', identifier = q_identifier, single = True)
|
||||
log.info('Search for %s in %s%s', (default_title, quality['label'], ' ignoring ETA' if always_search or ignore_eta else ''))
|
||||
log.info('Search for %s in %s%s', (default_title, quality['label'], ' ignoring ETA' if alway_search or ignore_eta else ''))
|
||||
|
||||
# Extend quality with profile customs
|
||||
quality['custom'] = quality_custom
|
||||
|
||||
results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or []
|
||||
|
||||
# Check if movie isn't deleted while searching
|
||||
if not fireEvent('media.get', movie.get('_id'), single = True):
|
||||
break
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, movie, quality, single = True)
|
||||
results_count = len(found_releases)
|
||||
results_count = len(results)
|
||||
total_result_count += results_count
|
||||
if results_count == 0:
|
||||
log.debug('Nothing found for %s in %s', (default_title, quality['label']))
|
||||
@@ -218,12 +209,20 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
# Keep track of releases found outside ETA window
|
||||
outside_eta_results += results_count if could_not_be_released else 0
|
||||
|
||||
# Check if movie isn't deleted while searching
|
||||
if not fireEvent('media.get', movie.get('_id'), single = True):
|
||||
break
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, movie, quality, single = True)
|
||||
|
||||
# Don't trigger download, but notify user of available releases
|
||||
if could_not_be_released and results_count > 0:
|
||||
log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
|
||||
if could_not_be_released:
|
||||
if results_count > 0:
|
||||
log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
|
||||
|
||||
# Try find a valid result and download it
|
||||
if (force_download or not could_not_be_released or always_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True):
|
||||
if (force_download or not could_not_be_released or alway_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True):
|
||||
ret = True
|
||||
|
||||
# Remove releases that aren't found anymore
|
||||
@@ -241,7 +240,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
break
|
||||
|
||||
if total_result_count > 0:
|
||||
fireEvent('media.tag', movie['_id'], 'recent', update_edited = True, single = True)
|
||||
fireEvent('media.tag', movie['_id'], 'recent', single = True)
|
||||
|
||||
if len(too_early_to_search) > 0:
|
||||
log.info2('Too early to search for %s, %s', (too_early_to_search, default_title))
|
||||
@@ -278,7 +277,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
|
||||
# Contains lower quality string
|
||||
contains_other = fireEvent('searcher.contains_other_quality', nzb, movie_year = media['info']['year'], preferred_quality = preferred_quality, single = True)
|
||||
if contains_other and isinstance(contains_other, dict):
|
||||
if contains_other != False:
|
||||
log.info2('Wrong: %s, looking for %s, found %s', (nzb['name'], quality['label'], [x for x in contains_other] if contains_other else 'no quality'))
|
||||
return False
|
||||
|
||||
@@ -382,17 +381,16 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
def tryNextRelease(self, media_id, manual = False, force_download = False):
|
||||
|
||||
try:
|
||||
|
||||
rels = fireEvent('release.for_media', media_id, single = True)
|
||||
db = get_db()
|
||||
rels = fireEvent('media.with_status', ['snatched', 'done'], single = True)
|
||||
|
||||
for rel in rels:
|
||||
if rel.get('status') in ['snatched', 'done']:
|
||||
fireEvent('release.update_status', rel.get('_id'), status = 'ignored')
|
||||
rel['status'] = 'ignored'
|
||||
db.update(rel)
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
log.info('Trying next release for: %s', getTitle(media))
|
||||
self.single(media, manual = manual, force_download = force_download)
|
||||
movie_dict = fireEvent('media.get', media_id, single = True)
|
||||
log.info('Trying next release for: %s', getTitle(movie_dict))
|
||||
self.single(movie_dict, manual = manual, force_download = force_download)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
2
couchpotato/core/media/movie/suggestion/main.py
Executable file → Normal file
2
couchpotato/core/media/movie/suggestion/main.py
Executable file → Normal file
@@ -27,7 +27,7 @@ class Suggestion(Plugin):
|
||||
else:
|
||||
|
||||
if not movies or len(movies) == 0:
|
||||
active_movies = fireEvent('media.with_status', ['active', 'done'], types = 'movie', single = True)
|
||||
active_movies = fireEvent('media.with_status', ['active', 'done'], single = True)
|
||||
movies = [getIdentifier(x) for x in active_movies]
|
||||
|
||||
if not ignored or len(ignored) == 0:
|
||||
|
||||
@@ -2,8 +2,6 @@ var SuggestList = new Class({
|
||||
|
||||
Implements: [Options, Events],
|
||||
|
||||
shown_once: false,
|
||||
|
||||
initialize: function(options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
@@ -46,13 +44,12 @@ var SuggestList = new Class({
|
||||
}
|
||||
});
|
||||
|
||||
var cookie_menu_select = Cookie.read('suggestions_charts_menu_selected') || 'suggestions';
|
||||
if( cookie_menu_select === 'suggestions')
|
||||
self.show();
|
||||
else
|
||||
self.hide();
|
||||
var cookie_menu_select = Cookie.read('suggestions_charts_menu_selected');
|
||||
if( cookie_menu_select === 'suggestions' || cookie_menu_select === null ) self.el.show(); else self.el.hide();
|
||||
|
||||
self.fireEvent('created');
|
||||
self.api_request = Api.request('suggestion.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
@@ -148,24 +145,6 @@ var SuggestList = new Class({
|
||||
|
||||
},
|
||||
|
||||
show: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.show();
|
||||
|
||||
if(!self.shown_once){
|
||||
self.api_request = Api.request('suggestion.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
self.shown_once = true;
|
||||
}
|
||||
},
|
||||
|
||||
hide: function(){
|
||||
this.el.hide();
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el;
|
||||
}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
class ShowTypeBase(MediaBase):
|
||||
_type = 'show'
|
||||
|
||||
def getType(self):
|
||||
if hasattr(self, 'type') and self.type != self._type:
|
||||
return '%s.%s' % (self._type, self.type)
|
||||
|
||||
return self._type
|
||||
@@ -1,4 +0,0 @@
|
||||
from .main import ShowBase
|
||||
|
||||
def autoload():
|
||||
return ShowBase()
|
||||
@@ -1,109 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Episode'
|
||||
|
||||
|
||||
class Episode(MediaBase):
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.episode.add', self.add)
|
||||
addEvent('show.episode.update', self.update)
|
||||
addEvent('show.episode.update_extras', self.updateExtras)
|
||||
|
||||
def add(self, parent_id, info = None, update_after = True, status = None):
|
||||
if not info: info = {}
|
||||
|
||||
identifiers = info.pop('identifiers', None)
|
||||
|
||||
if not identifiers:
|
||||
log.warning('Unable to add episode, missing identifiers (info provider mismatch?)')
|
||||
return
|
||||
|
||||
# Add Season
|
||||
episode_info = {
|
||||
'_t': 'media',
|
||||
'type': 'show.episode',
|
||||
'identifiers': identifiers,
|
||||
'status': status if status else 'active',
|
||||
'parent_id': parent_id,
|
||||
'info': info, # Returned dict by providers
|
||||
}
|
||||
|
||||
# Check if season already exists
|
||||
existing_episode = fireEvent('media.with_identifiers', identifiers, with_doc = True, single = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
if existing_episode:
|
||||
s = existing_episode['doc']
|
||||
s.update(episode_info)
|
||||
|
||||
episode = db.update(s)
|
||||
else:
|
||||
episode = db.insert(episode_info)
|
||||
|
||||
# Update library info
|
||||
if update_after is not False:
|
||||
handle = fireEventAsync if update_after is 'async' else fireEvent
|
||||
handle('show.episode.update_extras', episode, info, store = True, single = True)
|
||||
|
||||
return episode
|
||||
|
||||
def update(self, media_id = None, identifiers = None, info = None):
|
||||
if not info: info = {}
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
db = get_db()
|
||||
|
||||
episode = db.get('id', media_id)
|
||||
|
||||
# Get new info
|
||||
if not info:
|
||||
season = db.get('id', episode['parent_id'])
|
||||
show = db.get('id', season['parent_id'])
|
||||
|
||||
info = fireEvent(
|
||||
'episode.info', show.get('identifiers'), {
|
||||
'season_identifiers': season.get('identifiers'),
|
||||
'season_number': season.get('info', {}).get('number'),
|
||||
|
||||
'episode_identifiers': episode.get('identifiers'),
|
||||
'episode_number': episode.get('info', {}).get('number'),
|
||||
|
||||
'absolute_number': episode.get('info', {}).get('absolute_number')
|
||||
},
|
||||
merge = True
|
||||
)
|
||||
|
||||
info['season_number'] = season.get('info', {}).get('number')
|
||||
|
||||
identifiers = info.pop('identifiers', None) or identifiers
|
||||
|
||||
# Update/create media
|
||||
episode['identifiers'].update(identifiers)
|
||||
episode.update({'info': info})
|
||||
|
||||
self.updateExtras(episode, info)
|
||||
|
||||
db.update(episode)
|
||||
return episode
|
||||
|
||||
def updateExtras(self, episode, info, store=False):
|
||||
db = get_db()
|
||||
|
||||
# Get images
|
||||
image_urls = info.get('images', [])
|
||||
existing_files = episode.get('files', {})
|
||||
self.getPoster(image_urls, existing_files)
|
||||
|
||||
if store:
|
||||
db.update(episode)
|
||||
@@ -1,291 +0,0 @@
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.variable import getTitle, find
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class ShowBase(MediaBase):
|
||||
|
||||
_type = 'show'
|
||||
|
||||
def __init__(self):
|
||||
super(ShowBase, self).__init__()
|
||||
self.initType()
|
||||
|
||||
addApiView('show.add', self.addView, docs = {
|
||||
'desc': 'Add new show to the wanted list',
|
||||
'params': {
|
||||
'identifier': {'desc': 'IMDB id of the show your want to add.'},
|
||||
'profile_id': {'desc': 'ID of quality profile you want the add the show in. If empty will use the default profile.'},
|
||||
'category_id': {'desc': 'ID of category you want the add the show in.'},
|
||||
'title': {'desc': 'Title of the show to use for search and renaming'},
|
||||
}
|
||||
})
|
||||
|
||||
addEvent('show.add', self.add)
|
||||
addEvent('show.update', self.update)
|
||||
addEvent('show.update_extras', self.updateExtras)
|
||||
|
||||
def addView(self, **kwargs):
|
||||
add_dict = self.add(params = kwargs)
|
||||
|
||||
return {
|
||||
'success': True if add_dict else False,
|
||||
'show': add_dict,
|
||||
}
|
||||
|
||||
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
if not params: params = {}
|
||||
|
||||
# Identifiers
|
||||
if not params.get('identifiers'):
|
||||
msg = 'Can\'t add show without at least 1 identifier.'
|
||||
log.error(msg)
|
||||
fireEvent('notify.frontend', type = 'show.no_identifier', message = msg)
|
||||
return False
|
||||
|
||||
info = params.get('info')
|
||||
if not info or (info and len(info.get('titles', [])) == 0):
|
||||
info = fireEvent('show.info', merge = True, identifiers = params.get('identifiers'))
|
||||
|
||||
# Add Show
|
||||
try:
|
||||
m, added = self.create(info, params, force_readd, search_after, update_after)
|
||||
|
||||
result = fireEvent('media.get', m['_id'], single = True)
|
||||
|
||||
if added and notify_after:
|
||||
if params.get('title'):
|
||||
message = 'Successfully added "%s" to your wanted list.' % params.get('title', '')
|
||||
else:
|
||||
title = getTitle(m)
|
||||
if title:
|
||||
message = 'Successfully added "%s" to your wanted list.' % title
|
||||
else:
|
||||
message = 'Successfully added to your wanted list.'
|
||||
|
||||
fireEvent('notify.frontend', type = 'show.added', data = result, message = message)
|
||||
|
||||
return result
|
||||
except:
|
||||
log.error('Failed adding media: %s', traceback.format_exc())
|
||||
|
||||
def create(self, info, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
# Set default title
|
||||
def_title = self.getDefaultTitle(info)
|
||||
|
||||
# Default profile and category
|
||||
default_profile = {}
|
||||
if not params.get('profile_id'):
|
||||
default_profile = fireEvent('profile.default', single = True)
|
||||
|
||||
cat_id = params.get('category_id')
|
||||
|
||||
media = {
|
||||
'_t': 'media',
|
||||
'type': 'show',
|
||||
'title': def_title,
|
||||
'identifiers': info.get('identifiers'),
|
||||
'status': status if status else 'active',
|
||||
'profile_id': params.get('profile_id', default_profile.get('_id')),
|
||||
'category_id': cat_id if cat_id is not None and len(cat_id) > 0 and cat_id != '-1' else None
|
||||
}
|
||||
|
||||
identifiers = info.pop('identifiers', {})
|
||||
seasons = info.pop('seasons', {})
|
||||
|
||||
# Update media with info
|
||||
self.updateInfo(media, info)
|
||||
|
||||
existing_show = fireEvent('media.with_identifiers', params.get('identifiers'), with_doc = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
if existing_show:
|
||||
s = existing_show['doc']
|
||||
s.update(media)
|
||||
|
||||
show = db.update(s)
|
||||
else:
|
||||
show = db.insert(media)
|
||||
|
||||
# Update dict to be usable
|
||||
show.update(media)
|
||||
|
||||
added = True
|
||||
do_search = False
|
||||
search_after = search_after and self.conf('search_on_add', section = 'showsearcher')
|
||||
onComplete = None
|
||||
|
||||
if existing_show:
|
||||
if search_after:
|
||||
onComplete = self.createOnComplete(show['_id'])
|
||||
|
||||
search_after = False
|
||||
elif force_readd:
|
||||
# Clean snatched history
|
||||
for release in fireEvent('release.for_media', show['_id'], single = True):
|
||||
if release.get('status') in ['downloaded', 'snatched', 'done']:
|
||||
if params.get('ignore_previous', False):
|
||||
release['status'] = 'ignored'
|
||||
db.update(release)
|
||||
else:
|
||||
fireEvent('release.delete', release['_id'], single = True)
|
||||
|
||||
show['profile_id'] = params.get('profile_id', default_profile.get('id'))
|
||||
show['category_id'] = media.get('category_id')
|
||||
show['last_edit'] = int(time.time())
|
||||
|
||||
do_search = True
|
||||
db.update(show)
|
||||
else:
|
||||
params.pop('info', None)
|
||||
log.debug('Show already exists, not updating: %s', params)
|
||||
added = False
|
||||
|
||||
# Create episodes
|
||||
self.createEpisodes(show, seasons)
|
||||
|
||||
# Trigger update info
|
||||
if added and update_after:
|
||||
# Do full update to get images etc
|
||||
fireEventAsync('show.update_extras', show.copy(), info, store = True, on_complete = onComplete)
|
||||
|
||||
# Remove releases
|
||||
for rel in fireEvent('release.for_media', show['_id'], single = True):
|
||||
if rel['status'] is 'available':
|
||||
db.delete(rel)
|
||||
|
||||
if do_search and search_after:
|
||||
onComplete = self.createOnComplete(show['_id'])
|
||||
onComplete()
|
||||
|
||||
return show, added
|
||||
|
||||
def createEpisodes(self, m, seasons_info):
|
||||
# Add Seasons
|
||||
for season_nr in seasons_info:
|
||||
season_info = seasons_info[season_nr]
|
||||
episodes = season_info.get('episodes', {})
|
||||
|
||||
season = fireEvent('show.season.add', m.get('_id'), season_info, update_after = False, single = True)
|
||||
|
||||
# Add Episodes
|
||||
for episode_nr in episodes:
|
||||
episode_info = episodes[episode_nr]
|
||||
episode_info['season_number'] = season_nr
|
||||
|
||||
fireEvent('show.episode.add', season.get('_id'), episode_info, update_after = False, single = True)
|
||||
|
||||
def update(self, media_id = None, media = None, identifiers = None, info = None):
|
||||
"""
|
||||
Update movie information inside media['doc']['info']
|
||||
|
||||
@param media_id: document id
|
||||
@param identifiers: identifiers from multiple providers
|
||||
{
|
||||
'thetvdb': 123,
|
||||
'imdb': 'tt123123',
|
||||
..
|
||||
}
|
||||
@param extended: update with extended info (parses more info, actors, images from some info providers)
|
||||
@return: dict, with media
|
||||
"""
|
||||
|
||||
if not info: info = {}
|
||||
if not identifiers: identifiers = {}
|
||||
|
||||
db = get_db()
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
if media is None and media_id:
|
||||
media = db.get('id', media_id)
|
||||
else:
|
||||
log.error('missing "media" and "media_id" parameters, unable to update')
|
||||
return
|
||||
|
||||
if not info:
|
||||
info = fireEvent('show.info', identifiers = media.get('identifiers'), merge = True)
|
||||
|
||||
try:
|
||||
identifiers = info.pop('identifiers', {})
|
||||
seasons = info.pop('seasons', {})
|
||||
|
||||
self.updateInfo(media, info)
|
||||
self.updateEpisodes(media, seasons)
|
||||
self.updateExtras(media, info)
|
||||
|
||||
db.update(media)
|
||||
return media
|
||||
except:
|
||||
log.error('Failed update media: %s', traceback.format_exc())
|
||||
|
||||
return {}
|
||||
|
||||
def updateInfo(self, media, info):
|
||||
db = get_db()
|
||||
|
||||
# Remove season info for later use (save separately)
|
||||
info.pop('in_wanted', None)
|
||||
info.pop('in_library', None)
|
||||
|
||||
if not info or len(info) == 0:
|
||||
log.error('Could not update, no show info to work with: %s', media.get('identifier'))
|
||||
return False
|
||||
|
||||
# Update basic info
|
||||
media['info'] = info
|
||||
|
||||
def updateEpisodes(self, media, seasons):
|
||||
# Fetch current season/episode tree
|
||||
show_tree = fireEvent('library.tree', media_id = media['_id'], single = True)
|
||||
|
||||
# Update seasons
|
||||
for season_num in seasons:
|
||||
season_info = seasons[season_num]
|
||||
episodes = season_info.get('episodes', {})
|
||||
|
||||
# Find season that matches number
|
||||
season = find(lambda s: s.get('info', {}).get('number', 0) == season_num, show_tree.get('seasons', []))
|
||||
|
||||
if not season:
|
||||
log.warning('Unable to find season "%s"', season_num)
|
||||
continue
|
||||
|
||||
# Update season
|
||||
fireEvent('show.season.update', season['_id'], info = season_info, single = True)
|
||||
|
||||
# Update episodes
|
||||
for episode_num in episodes:
|
||||
episode_info = episodes[episode_num]
|
||||
episode_info['season_number'] = season_num
|
||||
|
||||
# Find episode that matches number
|
||||
episode = find(lambda s: s.get('info', {}).get('number', 0) == episode_num, season.get('episodes', []))
|
||||
|
||||
if not episode:
|
||||
log.debug('Creating new episode %s in season %s', (episode_num, season_num))
|
||||
fireEvent('show.episode.add', season.get('_id'), episode_info, update_after = False, single = True)
|
||||
continue
|
||||
|
||||
fireEvent('show.episode.update', episode['_id'], info = episode_info, single = True)
|
||||
|
||||
def updateExtras(self, media, info, store=False):
|
||||
db = get_db()
|
||||
|
||||
# Update image file
|
||||
image_urls = info.get('images', [])
|
||||
self.getPoster(media, image_urls)
|
||||
|
||||
if store:
|
||||
db.update(media)
|
||||
@@ -1,94 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Season'
|
||||
|
||||
|
||||
class Season(MediaBase):
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.season.add', self.add)
|
||||
addEvent('show.season.update', self.update)
|
||||
addEvent('show.season.update_extras', self.updateExtras)
|
||||
|
||||
def add(self, parent_id, info = None, update_after = True, status = None):
|
||||
if not info: info = {}
|
||||
|
||||
identifiers = info.pop('identifiers', None)
|
||||
info.pop('episodes', None)
|
||||
|
||||
# Add Season
|
||||
season_info = {
|
||||
'_t': 'media',
|
||||
'type': 'show.season',
|
||||
'identifiers': identifiers,
|
||||
'status': status if status else 'active',
|
||||
'parent_id': parent_id,
|
||||
'info': info, # Returned dict by providers
|
||||
}
|
||||
|
||||
# Check if season already exists
|
||||
existing_season = fireEvent('media.with_identifiers', identifiers, with_doc = True, single = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
if existing_season:
|
||||
s = existing_season['doc']
|
||||
s.update(season_info)
|
||||
|
||||
season = db.update(s)
|
||||
else:
|
||||
season = db.insert(season_info)
|
||||
|
||||
# Update library info
|
||||
if update_after is not False:
|
||||
handle = fireEventAsync if update_after is 'async' else fireEvent
|
||||
handle('show.season.update_extras', season, info, store = True, single = True)
|
||||
|
||||
return season
|
||||
|
||||
def update(self, media_id = None, identifiers = None, info = None):
|
||||
if not info: info = {}
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
db = get_db()
|
||||
|
||||
season = db.get('id', media_id)
|
||||
show = db.get('id', season['parent_id'])
|
||||
|
||||
# Get new info
|
||||
if not info:
|
||||
info = fireEvent('season.info', show.get('identifiers'), {
|
||||
'season_number': season.get('info', {}).get('number', 0)
|
||||
}, merge = True)
|
||||
|
||||
identifiers = info.pop('identifiers', None) or identifiers
|
||||
info.pop('episodes', None)
|
||||
|
||||
# Update/create media
|
||||
season['identifiers'].update(identifiers)
|
||||
season.update({'info': info})
|
||||
|
||||
self.updateExtras(season, info)
|
||||
|
||||
db.update(season)
|
||||
return season
|
||||
|
||||
def updateExtras(self, season, info, store=False):
|
||||
db = get_db()
|
||||
|
||||
# Get images
|
||||
image_urls = info.get('images', [])
|
||||
existing_files = season.get('files', {})
|
||||
self.getPoster(image_urls, existing_files)
|
||||
|
||||
if store:
|
||||
db.update(season)
|
||||
@@ -1,28 +0,0 @@
|
||||
Page.Shows = new Class({
|
||||
|
||||
Extends: PageBase,
|
||||
|
||||
name: 'shows',
|
||||
title: 'Gimmy gimmy gimmy!',
|
||||
folder_browser: null,
|
||||
|
||||
indexAction: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.wanted){
|
||||
|
||||
// Wanted movies
|
||||
self.wanted = new ShowList({
|
||||
'identifier': 'wanted',
|
||||
'status': 'active',
|
||||
'type': 'show',
|
||||
'actions': [MA.IMDB, MA.Trailer, MA.Release, MA.Edit, MA.Refresh, MA.Readd, MA.Delete],
|
||||
'add_new': true,
|
||||
'on_empty_element': App.createUserscriptButtons().addClass('empty_wanted')
|
||||
});
|
||||
$(self.wanted).inject(self.el);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,474 +0,0 @@
|
||||
var EpisodeAction = new Class({
|
||||
|
||||
Implements: [Options],
|
||||
|
||||
class_name: 'item-action icon2',
|
||||
|
||||
initialize: function(episode, options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.show = episode.show;
|
||||
self.episode = episode;
|
||||
|
||||
self.create();
|
||||
if(self.el)
|
||||
self.el.addClass(self.class_name)
|
||||
},
|
||||
|
||||
create: function(){},
|
||||
|
||||
disable: function(){
|
||||
if(this.el)
|
||||
this.el.addClass('disable')
|
||||
},
|
||||
|
||||
enable: function(){
|
||||
if(this.el)
|
||||
this.el.removeClass('disable')
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.show.getTitle();
|
||||
}
|
||||
catch(e){
|
||||
try {
|
||||
return self.show.original_title ? self.show.original_title : self.show.titles[0];
|
||||
}
|
||||
catch(e){
|
||||
return 'Unknown';
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
get: function(key){
|
||||
var self = this;
|
||||
try {
|
||||
return self.show.get(key)
|
||||
}
|
||||
catch(e){
|
||||
return self.show[key]
|
||||
}
|
||||
},
|
||||
|
||||
createMask: function(){
|
||||
var self = this;
|
||||
self.mask = new Element('div.mask', {
|
||||
'styles': {
|
||||
'z-index': '1'
|
||||
}
|
||||
}).inject(self.show, 'top').fade('hide');
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el || null
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
var EA = {};
|
||||
|
||||
EA.IMDB = new Class({
|
||||
|
||||
Extends: EpisodeAction,
|
||||
id: null,
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.id = self.show.getIdentifier ? self.show.getIdentifier() : self.get('imdb');
|
||||
|
||||
self.el = new Element('a.imdb', {
|
||||
'title': 'Go to the IMDB page of ' + self.getTitle(),
|
||||
'href': 'http://www.imdb.com/title/'+self.id+'/',
|
||||
'target': '_blank'
|
||||
});
|
||||
|
||||
if(!self.id) self.disable();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
EA.Release = new Class({
|
||||
|
||||
Extends: EpisodeAction,
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el = new Element('a.releases.download', {
|
||||
'title': 'Show the releases that are available for ' + self.getTitle(),
|
||||
'events': {
|
||||
'click': self.toggle.bind(self)
|
||||
}
|
||||
});
|
||||
|
||||
self.options = new Element('div.episode-options').inject(self.episode.el);
|
||||
|
||||
if(!self.episode.data.releases || self.episode.data.releases.length == 0)
|
||||
self.el.hide();
|
||||
else
|
||||
self.showHelper();
|
||||
|
||||
App.on('show.searcher.ended', function(notification){
|
||||
if(self.show.data._id != notification.data._id) return;
|
||||
|
||||
self.releases = null;
|
||||
if(self.options_container){
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
toggle: function(e){
|
||||
var self = this;
|
||||
|
||||
if(self.options && self.options.hasClass('expanded')) {
|
||||
self.close();
|
||||
} else {
|
||||
self.open();
|
||||
}
|
||||
},
|
||||
|
||||
open: function(e){
|
||||
var self = this;
|
||||
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.createReleases();
|
||||
|
||||
},
|
||||
|
||||
close: function(e) {
|
||||
var self = this;
|
||||
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.options.setStyle('height', 0)
|
||||
.removeClass('expanded');
|
||||
},
|
||||
|
||||
createReleases: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.releases_table){
|
||||
self.options.adopt(
|
||||
self.releases_table = new Element('div.releases.table')
|
||||
);
|
||||
|
||||
// Header
|
||||
new Element('div.item.head').adopt(
|
||||
new Element('span.name', {'text': 'Release name'}),
|
||||
new Element('span.status', {'text': 'Status'}),
|
||||
new Element('span.quality', {'text': 'Quality'}),
|
||||
new Element('span.size', {'text': 'Size'}),
|
||||
new Element('span.age', {'text': 'Age'}),
|
||||
new Element('span.score', {'text': 'Score'}),
|
||||
new Element('span.provider', {'text': 'Provider'})
|
||||
).inject(self.releases_table);
|
||||
|
||||
if(self.episode.data.releases)
|
||||
self.episode.data.releases.each(function(release){
|
||||
|
||||
var quality = Quality.getQuality(release.quality) || {},
|
||||
info = release.info || {},
|
||||
provider = self.get(release, 'provider') + (info['provider_extra'] ? self.get(release, 'provider_extra') : '');
|
||||
|
||||
var release_name = self.get(release, 'name');
|
||||
if(release.files && release.files.length > 0){
|
||||
try {
|
||||
var movie_file = release.files.filter(function(file){
|
||||
var type = File.Type.get(file.type_id);
|
||||
return type && type.identifier == 'movie'
|
||||
}).pick();
|
||||
release_name = movie_file.path.split(Api.getOption('path_sep')).getLast();
|
||||
}
|
||||
catch(e){}
|
||||
}
|
||||
|
||||
// Create release
|
||||
release['el'] = new Element('div', {
|
||||
'class': 'item '+release.status,
|
||||
'id': 'release_'+release._id
|
||||
}).adopt(
|
||||
new Element('span.name', {'text': release_name, 'title': release_name}),
|
||||
new Element('span.status', {'text': release.status, 'class': 'status '+release.status}),
|
||||
new Element('span.quality', {'text': quality.label + (release.is_3d ? ' 3D' : '') || 'n/a'}),
|
||||
new Element('span.size', {'text': info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
|
||||
new Element('span.age', {'text': self.get(release, 'age')}),
|
||||
new Element('span.score', {'text': self.get(release, 'score')}),
|
||||
new Element('span.provider', { 'text': provider, 'title': provider }),
|
||||
info['detail_url'] ? new Element('a.info.icon2', {
|
||||
'href': info['detail_url'],
|
||||
'target': '_blank'
|
||||
}) : new Element('a'),
|
||||
new Element('a.download.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
if(!this.hasClass('completed'))
|
||||
self.download(release);
|
||||
}
|
||||
}
|
||||
}),
|
||||
new Element('a.delete.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
self.ignore(release);
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(self.releases_table);
|
||||
|
||||
if(release.status == 'ignored' || release.status == 'failed' || release.status == 'snatched'){
|
||||
if(!self.last_release || (self.last_release && self.last_release.status != 'snatched' && release.status == 'snatched'))
|
||||
self.last_release = release;
|
||||
}
|
||||
else if(!self.next_release && release.status == 'available'){
|
||||
self.next_release = release;
|
||||
}
|
||||
|
||||
var update_handle = function(notification) {
|
||||
if(notification.data._id != release._id) return;
|
||||
|
||||
var q = self.show.quality.getElement('.q_' + release.quality),
|
||||
new_status = notification.data.status;
|
||||
|
||||
release.el.set('class', 'item ' + new_status);
|
||||
|
||||
var status_el = release.el.getElement('.release_status');
|
||||
status_el.set('class', 'release_status ' + new_status);
|
||||
status_el.set('text', new_status);
|
||||
|
||||
if(!q && (new_status == 'snatched' || new_status == 'seeding' || new_status == 'done'))
|
||||
q = self.addQuality(release.quality_id);
|
||||
|
||||
if(q && !q.hasClass(new_status)) {
|
||||
q.removeClass(release.status).addClass(new_status);
|
||||
q.set('title', q.get('title').replace(release.status, new_status));
|
||||
}
|
||||
};
|
||||
|
||||
App.on('release.update_status', update_handle);
|
||||
|
||||
});
|
||||
|
||||
if(self.last_release)
|
||||
self.releases_table.getElements('#release_'+self.last_release._id).addClass('last_release');
|
||||
|
||||
if(self.next_release)
|
||||
self.releases_table.getElements('#release_'+self.next_release._id).addClass('next_release');
|
||||
|
||||
if(self.next_release || (self.last_release && ['ignored', 'failed'].indexOf(self.last_release.status) === false)){
|
||||
|
||||
self.trynext_container = new Element('div.buttons.try_container').inject(self.releases_table, 'top');
|
||||
|
||||
var nr = self.next_release,
|
||||
lr = self.last_release;
|
||||
|
||||
self.trynext_container.adopt(
|
||||
new Element('span.or', {
|
||||
'text': 'If anything went wrong, download'
|
||||
}),
|
||||
lr ? new Element('a.button.orange', {
|
||||
'text': 'the same release again',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.download(lr);
|
||||
}
|
||||
}
|
||||
}) : null,
|
||||
nr && lr ? new Element('span.or', {
|
||||
'text': ','
|
||||
}) : null,
|
||||
nr ? [new Element('a.button.green', {
|
||||
'text': lr ? 'another release' : 'the best release',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.download(nr);
|
||||
}
|
||||
}
|
||||
}),
|
||||
new Element('span.or', {
|
||||
'text': 'or pick one below'
|
||||
})] : null
|
||||
)
|
||||
}
|
||||
|
||||
self.last_release = null;
|
||||
self.next_release = null;
|
||||
|
||||
self.episode.el.addEvent('outerClick', function(){
|
||||
self.close();
|
||||
});
|
||||
}
|
||||
|
||||
self.options.setStyle('height', self.releases_table.getSize().y)
|
||||
.addClass('expanded');
|
||||
|
||||
},
|
||||
|
||||
showHelper: function(e){
|
||||
var self = this;
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
var has_available = false,
|
||||
has_snatched = false;
|
||||
|
||||
if(self.episode.data.releases)
|
||||
self.episode.data.releases.each(function(release){
|
||||
if(has_available && has_snatched) return;
|
||||
|
||||
if(['snatched', 'downloaded', 'seeding'].contains(release.status))
|
||||
has_snatched = true;
|
||||
|
||||
if(['available'].contains(release.status))
|
||||
has_available = true;
|
||||
|
||||
});
|
||||
|
||||
if(has_available || has_snatched){
|
||||
|
||||
self.trynext_container = new Element('div.buttons.trynext').inject(self.show.info_container);
|
||||
|
||||
self.trynext_container.adopt(
|
||||
has_available ? [new Element('a.icon2.readd', {
|
||||
'text': has_snatched ? 'Download another release' : 'Download the best release',
|
||||
'events': {
|
||||
'click': self.tryNextRelease.bind(self)
|
||||
}
|
||||
}),
|
||||
new Element('a.icon2.download', {
|
||||
'text': 'pick one yourself',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.show.quality.fireEvent('click');
|
||||
}
|
||||
}
|
||||
})] : null,
|
||||
new Element('a.icon2.completed', {
|
||||
'text': 'mark this movie done',
|
||||
'events': {
|
||||
'click': self.markMovieDone.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
get: function(release, type){
|
||||
return (release.info && release.info[type] !== undefined) ? release.info[type] : 'n/a'
|
||||
},
|
||||
|
||||
download: function(release){
|
||||
var self = this;
|
||||
|
||||
var release_el = self.releases_table.getElement('#release_'+release._id),
|
||||
icon = release_el.getElement('.download.icon2');
|
||||
|
||||
if(icon)
|
||||
icon.addClass('icon spinner').removeClass('download');
|
||||
|
||||
Api.request('release.manual_download', {
|
||||
'data': {
|
||||
'id': release._id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
if(icon)
|
||||
icon.removeClass('icon spinner');
|
||||
|
||||
if(json.success){
|
||||
if(icon)
|
||||
icon.addClass('completed');
|
||||
release_el.getElement('.release_status').set('text', 'snatched');
|
||||
}
|
||||
else
|
||||
if(icon)
|
||||
icon.addClass('attention').set('title', 'Something went wrong when downloading, please check logs.');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
ignore: function(release){
|
||||
|
||||
Api.request('release.ignore', {
|
||||
'data': {
|
||||
'id': release._id
|
||||
}
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
markMovieDone: function(){
|
||||
var self = this;
|
||||
|
||||
Api.request('media.delete', {
|
||||
'data': {
|
||||
'id': self.show.get('_id'),
|
||||
'delete_from': 'wanted'
|
||||
},
|
||||
'onComplete': function(){
|
||||
var movie = $(self.show);
|
||||
movie.set('tween', {
|
||||
'duration': 300,
|
||||
'onComplete': function(){
|
||||
self.show.destroy()
|
||||
}
|
||||
});
|
||||
movie.tween('height', 0);
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
tryNextRelease: function(){
|
||||
var self = this;
|
||||
|
||||
Api.request('movie.searcher.try_next', {
|
||||
'data': {
|
||||
'media_id': self.show.get('_id')
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
EA.Refresh = new Class({
|
||||
|
||||
Extends: EpisodeAction,
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el = new Element('a.refresh', {
|
||||
'title': 'Refresh the movie info and do a forced search',
|
||||
'events': {
|
||||
'click': self.doRefresh.bind(self)
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
doRefresh: function(e){
|
||||
var self = this;
|
||||
(e).preventDefault();
|
||||
|
||||
Api.request('media.refresh', {
|
||||
'data': {
|
||||
'id': self.episode.get('_id')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,128 +0,0 @@
|
||||
var Episode = new Class({
|
||||
|
||||
Extends: BlockBase,
|
||||
|
||||
action: {},
|
||||
|
||||
initialize: function(show, options, data){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.show = show;
|
||||
self.options = options;
|
||||
self.data = data;
|
||||
|
||||
self.profile = self.show.profile;
|
||||
|
||||
self.el = new Element('div.item.episode').adopt(
|
||||
self.detail = new Element('div.item.data')
|
||||
);
|
||||
|
||||
self.create();
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.detail.set('id', 'episode_'+self.data._id);
|
||||
|
||||
self.detail.adopt(
|
||||
new Element('span.episode', {'text': (self.data.info.number || 0)}),
|
||||
new Element('span.name', {'text': self.getTitle()}),
|
||||
new Element('span.firstaired', {'text': self.data.info.firstaired}),
|
||||
|
||||
self.quality = new Element('span.quality', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
var releases = self.detail.getElement('.item-actions .releases');
|
||||
|
||||
if(releases.isVisible())
|
||||
releases.fireEvent('click', [e])
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.actions = new Element('div.item-actions')
|
||||
);
|
||||
|
||||
// Add profile
|
||||
if(self.profile.data) {
|
||||
self.profile.getTypes().each(function(type){
|
||||
var q = self.addQuality(type.get('quality'), type.get('3d'));
|
||||
|
||||
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
|
||||
q.addClass('finish');
|
||||
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add releases
|
||||
self.updateReleases();
|
||||
|
||||
Object.each(self.options.actions, function(action, key){
|
||||
self.action[key.toLowerCase()] = action = new self.options.actions[key](self);
|
||||
if(action.el)
|
||||
self.actions.adopt(action)
|
||||
});
|
||||
},
|
||||
|
||||
updateReleases: function(){
|
||||
var self = this;
|
||||
if(!self.data.releases || self.data.releases.length == 0) return;
|
||||
|
||||
self.data.releases.each(function(release){
|
||||
|
||||
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')),
|
||||
status = release.status;
|
||||
|
||||
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done'))
|
||||
q = self.addQuality(release.quality, release.is_3d || false);
|
||||
|
||||
if (q && !q.hasClass(status)){
|
||||
q.addClass(status);
|
||||
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status)
|
||||
}
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
addQuality: function(quality, is_3d){
|
||||
var self = this,
|
||||
q = Quality.getQuality(quality);
|
||||
|
||||
return new Element('span', {
|
||||
'text': q.label + (is_3d ? ' 3D' : ''),
|
||||
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''),
|
||||
'title': ''
|
||||
}).inject(self.quality);
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
var title = '';
|
||||
|
||||
if(self.data.info.titles && self.data.info.titles.length > 0) {
|
||||
title = self.data.info.titles[0];
|
||||
} else {
|
||||
title = 'Episode ' + self.data.info.number;
|
||||
}
|
||||
|
||||
return title;
|
||||
},
|
||||
|
||||
getIdentifier: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.get('identifiers').imdb;
|
||||
}
|
||||
catch (e){ }
|
||||
|
||||
return self.get('imdb');
|
||||
},
|
||||
|
||||
get: function(attr){
|
||||
return this.data[attr] || this.data.info[attr]
|
||||
}
|
||||
});
|
||||
@@ -1,636 +0,0 @@
|
||||
var ShowList = new Class({
|
||||
|
||||
Implements: [Events, Options],
|
||||
|
||||
options: {
|
||||
navigation: true,
|
||||
limit: 50,
|
||||
load_more: true,
|
||||
loader: true,
|
||||
menu: [],
|
||||
add_new: false,
|
||||
force_view: false
|
||||
},
|
||||
|
||||
movies: [],
|
||||
movies_added: {},
|
||||
total_movies: 0,
|
||||
letters: {},
|
||||
filter: null,
|
||||
|
||||
initialize: function(options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.offset = 0;
|
||||
self.filter = self.options.filter || {
|
||||
'starts_with': null,
|
||||
'search': null
|
||||
};
|
||||
|
||||
self.el = new Element('div.shows').adopt(
|
||||
self.title = self.options.title ? new Element('h2', {
|
||||
'text': self.options.title,
|
||||
'styles': {'display': 'none'}
|
||||
}) : null,
|
||||
self.description = self.options.description ? new Element('div.description', {
|
||||
'html': self.options.description,
|
||||
'styles': {'display': 'none'}
|
||||
}) : null,
|
||||
self.movie_list = new Element('div.list'),
|
||||
self.load_more = self.options.load_more ? new Element('a.load_more', {
|
||||
'events': {
|
||||
'click': self.loadMore.bind(self)
|
||||
}
|
||||
}) : null
|
||||
);
|
||||
|
||||
if($(window).getSize().x <= 480 && !self.options.force_view)
|
||||
self.changeView('list');
|
||||
else
|
||||
self.changeView(self.getSavedView() || self.options.view || 'details');
|
||||
|
||||
self.getMovies();
|
||||
|
||||
App.on('movie.added', self.movieAdded.bind(self));
|
||||
App.on('movie.deleted', self.movieDeleted.bind(self))
|
||||
},
|
||||
|
||||
movieDeleted: function(notification){
|
||||
var self = this;
|
||||
|
||||
if(self.movies_added[notification.data._id]){
|
||||
self.movies.each(function(movie){
|
||||
if(movie.get('_id') == notification.data._id){
|
||||
movie.destroy();
|
||||
delete self.movies_added[notification.data._id];
|
||||
self.setCounter(self.counter_count-1);
|
||||
self.total_movies--;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
self.checkIfEmpty();
|
||||
},
|
||||
|
||||
movieAdded: function(notification){
|
||||
var self = this;
|
||||
|
||||
self.fireEvent('movieAdded', notification);
|
||||
if(self.options.add_new && !self.movies_added[notification.data._id] && notification.data.status == self.options.status){
|
||||
window.scroll(0,0);
|
||||
self.createShow(notification.data, 'top');
|
||||
self.setCounter(self.counter_count+1);
|
||||
|
||||
self.checkIfEmpty();
|
||||
}
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
// Create the alphabet nav
|
||||
if(self.options.navigation)
|
||||
self.createNavigation();
|
||||
|
||||
if(self.options.load_more)
|
||||
self.scrollspy = new ScrollSpy({
|
||||
min: function(){
|
||||
var c = self.load_more.getCoordinates();
|
||||
return c.top - window.document.getSize().y - 300
|
||||
},
|
||||
onEnter: self.loadMore.bind(self)
|
||||
});
|
||||
|
||||
self.created = true;
|
||||
},
|
||||
|
||||
addMovies: function(movies, total){
|
||||
var self = this;
|
||||
|
||||
if(!self.created) self.create();
|
||||
|
||||
// do scrollspy
|
||||
if(movies.length < self.options.limit && self.scrollspy){
|
||||
self.load_more.hide();
|
||||
self.scrollspy.stop();
|
||||
}
|
||||
|
||||
Object.each(movies, function(movie){
|
||||
self.createShow(movie);
|
||||
});
|
||||
|
||||
self.total_movies += total;
|
||||
self.setCounter(total);
|
||||
|
||||
},
|
||||
|
||||
setCounter: function(count){
|
||||
var self = this;
|
||||
|
||||
if(!self.navigation_counter) return;
|
||||
|
||||
self.counter_count = count;
|
||||
self.navigation_counter.set('text', (count || 0) + ' shows');
|
||||
|
||||
if (self.empty_message) {
|
||||
self.empty_message.destroy();
|
||||
self.empty_message = null;
|
||||
}
|
||||
|
||||
if(self.total_movies && count == 0 && !self.empty_message){
|
||||
var message = (self.filter.search ? 'for "'+self.filter.search+'"' : '') +
|
||||
(self.filter.starts_with ? ' in <strong>'+self.filter.starts_with+'</strong>' : '');
|
||||
|
||||
self.empty_message = new Element('.message', {
|
||||
'html': 'No shows found ' + message + '.<br/>'
|
||||
}).grab(
|
||||
new Element('a', {
|
||||
'text': 'Reset filter',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.filter = {
|
||||
'starts_with': null,
|
||||
'search': null
|
||||
};
|
||||
self.navigation_search_input.set('value', '');
|
||||
self.reset();
|
||||
self.activateLetter();
|
||||
self.getMovies(true);
|
||||
self.last_search_value = '';
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(self.movie_list);
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
createShow: function(show, inject_at){
|
||||
var self = this;
|
||||
var m = new Show(self, {
|
||||
'actions': self.options.actions,
|
||||
'view': self.current_view,
|
||||
'onSelect': self.calculateSelected.bind(self)
|
||||
}, show);
|
||||
|
||||
$(m).inject(self.movie_list, inject_at || 'bottom');
|
||||
|
||||
m.fireEvent('injected');
|
||||
|
||||
self.movies.include(m);
|
||||
self.movies_added[show._id] = true;
|
||||
},
|
||||
|
||||
createNavigation: function(){
|
||||
var self = this;
|
||||
var chars = '#ABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
|
||||
self.el.addClass('with_navigation');
|
||||
|
||||
self.navigation = new Element('div.alph_nav').adopt(
|
||||
self.mass_edit_form = new Element('div.mass_edit_form').adopt(
|
||||
new Element('span.select').adopt(
|
||||
self.mass_edit_select = new Element('input[type=checkbox].inlay', {
|
||||
'events': {
|
||||
'change': self.massEditToggleAll.bind(self)
|
||||
}
|
||||
}),
|
||||
self.mass_edit_selected = new Element('span.count', {'text': 0}),
|
||||
self.mass_edit_selected_label = new Element('span', {'text': 'selected'})
|
||||
),
|
||||
new Element('div.quality').adopt(
|
||||
self.mass_edit_quality = new Element('select'),
|
||||
new Element('a.button.orange', {
|
||||
'text': 'Change quality',
|
||||
'events': {
|
||||
'click': self.changeQualitySelected.bind(self)
|
||||
}
|
||||
})
|
||||
),
|
||||
new Element('div.delete').adopt(
|
||||
new Element('span[text=or]'),
|
||||
new Element('a.button.red', {
|
||||
'text': 'Delete',
|
||||
'events': {
|
||||
'click': self.deleteSelected.bind(self)
|
||||
}
|
||||
})
|
||||
),
|
||||
new Element('div.refresh').adopt(
|
||||
new Element('span[text=or]'),
|
||||
new Element('a.button.green', {
|
||||
'text': 'Refresh',
|
||||
'events': {
|
||||
'click': self.refreshSelected.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
),
|
||||
new Element('div.menus').adopt(
|
||||
self.navigation_counter = new Element('span.counter[title=Total]'),
|
||||
self.filter_menu = new Block.Menu(self, {
|
||||
'class': 'filter'
|
||||
}),
|
||||
self.navigation_actions = new Element('ul.actions', {
|
||||
'events': {
|
||||
'click:relay(li)': function(e, el){
|
||||
var a = 'active';
|
||||
self.navigation_actions.getElements('.'+a).removeClass(a);
|
||||
self.changeView(el.get('data-view'));
|
||||
this.addClass(a);
|
||||
|
||||
el.inject(el.getParent(), 'top');
|
||||
el.getSiblings().hide();
|
||||
setTimeout(function(){
|
||||
el.getSiblings().setStyle('display', null);
|
||||
}, 100)
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.navigation_menu = new Block.Menu(self, {
|
||||
'class': 'extra'
|
||||
})
|
||||
)
|
||||
).inject(self.el, 'top');
|
||||
|
||||
// Mass edit
|
||||
self.mass_edit_select_class = new Form.Check(self.mass_edit_select);
|
||||
Quality.getActiveProfiles().each(function(profile){
|
||||
new Element('option', {
|
||||
'value': profile.get('_id'),
|
||||
'text': profile.get('label')
|
||||
}).inject(self.mass_edit_quality)
|
||||
});
|
||||
|
||||
self.filter_menu.addLink(
|
||||
self.navigation_search_input = new Element('input', {
|
||||
'title': 'Search through ' + self.options.identifier,
|
||||
'placeholder': 'Search through ' + self.options.identifier,
|
||||
'events': {
|
||||
'keyup': self.search.bind(self),
|
||||
'change': self.search.bind(self)
|
||||
}
|
||||
})
|
||||
).addClass('search');
|
||||
|
||||
var available_chars;
|
||||
self.filter_menu.addEvent('open', function(){
|
||||
self.navigation_search_input.focus();
|
||||
|
||||
// Get available chars and highlight
|
||||
if(!available_chars && (self.navigation.isDisplayed() || self.navigation.isVisible()))
|
||||
Api.request('media.available_chars', {
|
||||
'data': Object.merge({
|
||||
'type': 'show',
|
||||
'status': self.options.status
|
||||
}, self.filter),
|
||||
'onSuccess': function(json){
|
||||
available_chars = json.chars;
|
||||
|
||||
available_chars.each(function(c){
|
||||
self.letters[c.capitalize()].addClass('available')
|
||||
})
|
||||
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
self.filter_menu.addLink(
|
||||
self.navigation_alpha = new Element('ul.numbers', {
|
||||
'events': {
|
||||
'click:relay(li.available)': function(e, el){
|
||||
self.activateLetter(el.get('data-letter'));
|
||||
self.getMovies(true)
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Actions
|
||||
['mass_edit', 'details', 'list'].each(function(view){
|
||||
var current = self.current_view == view;
|
||||
new Element('li', {
|
||||
'class': 'icon2 ' + view + (current ? ' active ' : ''),
|
||||
'data-view': view
|
||||
}).inject(self.navigation_actions, current ? 'top' : 'bottom');
|
||||
});
|
||||
|
||||
// All
|
||||
self.letters['all'] = new Element('li.letter_all.available.active', {
|
||||
'text': 'ALL'
|
||||
}).inject(self.navigation_alpha);
|
||||
|
||||
// Chars
|
||||
chars.split('').each(function(c){
|
||||
self.letters[c] = new Element('li', {
|
||||
'text': c,
|
||||
'class': 'letter_'+c,
|
||||
'data-letter': c
|
||||
}).inject(self.navigation_alpha);
|
||||
});
|
||||
|
||||
// Add menu or hide
|
||||
if (self.options.menu.length > 0)
|
||||
self.options.menu.each(function(menu_item){
|
||||
self.navigation_menu.addLink(menu_item);
|
||||
});
|
||||
else
|
||||
self.navigation_menu.hide();
|
||||
|
||||
},
|
||||
|
||||
calculateSelected: function(){
|
||||
var self = this;
|
||||
|
||||
var selected = 0,
|
||||
movies = self.movies.length;
|
||||
self.movies.each(function(movie){
|
||||
selected += movie.isSelected() ? 1 : 0
|
||||
});
|
||||
|
||||
var indeterminate = selected > 0 && selected < movies,
|
||||
checked = selected == movies && selected > 0;
|
||||
|
||||
self.mass_edit_select.set('indeterminate', indeterminate);
|
||||
|
||||
self.mass_edit_select_class[checked ? 'check' : 'uncheck']();
|
||||
self.mass_edit_select_class.element[indeterminate ? 'addClass' : 'removeClass']('indeterminate');
|
||||
|
||||
self.mass_edit_selected.set('text', selected);
|
||||
},
|
||||
|
||||
deleteSelected: function(){
|
||||
var self = this,
|
||||
ids = self.getSelectedMovies(),
|
||||
help_msg = self.identifier == 'wanted' ? 'If you do, you won\'t be able to watch them, as they won\'t get downloaded!' : 'Your files will be safe, this will only delete the reference from the CouchPotato manage list';
|
||||
|
||||
var qObj = new Question('Are you sure you want to delete '+ids.length+' movie'+ (ids.length != 1 ? 's' : '') +'?', help_msg, [{
|
||||
'text': 'Yes, delete '+(ids.length != 1 ? 'them' : 'it'),
|
||||
'class': 'delete',
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
this.set('text', 'Deleting..');
|
||||
Api.request('media.delete', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(','),
|
||||
'delete_from': self.options.identifier
|
||||
},
|
||||
'onSuccess': function(){
|
||||
qObj.close();
|
||||
|
||||
var erase_movies = [];
|
||||
self.movies.each(function(movie){
|
||||
if (movie.isSelected()){
|
||||
$(movie).destroy();
|
||||
erase_movies.include(movie);
|
||||
}
|
||||
});
|
||||
|
||||
erase_movies.each(function(movie){
|
||||
self.movies.erase(movie);
|
||||
movie.destroy();
|
||||
self.setCounter(self.counter_count-1);
|
||||
self.total_movies--;
|
||||
});
|
||||
|
||||
self.calculateSelected();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}, {
|
||||
'text': 'Cancel',
|
||||
'cancel': true
|
||||
}]);
|
||||
|
||||
},
|
||||
|
||||
changeQualitySelected: function(){
|
||||
var self = this;
|
||||
var ids = self.getSelectedMovies();
|
||||
|
||||
Api.request('movie.edit', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(','),
|
||||
'profile_id': self.mass_edit_quality.get('value')
|
||||
},
|
||||
'onSuccess': self.search.bind(self)
|
||||
});
|
||||
},
|
||||
|
||||
refreshSelected: function(){
|
||||
var self = this;
|
||||
var ids = self.getSelectedMovies();
|
||||
|
||||
Api.request('media.refresh', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(',')
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
getSelectedMovies: function(){
|
||||
var self = this;
|
||||
|
||||
var ids = [];
|
||||
self.movies.each(function(movie){
|
||||
if (movie.isSelected())
|
||||
ids.include(movie.get('_id'))
|
||||
});
|
||||
|
||||
return ids
|
||||
},
|
||||
|
||||
massEditToggleAll: function(){
|
||||
var self = this;
|
||||
|
||||
var select = self.mass_edit_select.get('checked');
|
||||
|
||||
self.movies.each(function(movie){
|
||||
movie.select(select)
|
||||
});
|
||||
|
||||
self.calculateSelected()
|
||||
},
|
||||
|
||||
reset: function(){
|
||||
var self = this;
|
||||
|
||||
self.movies = [];
|
||||
if(self.mass_edit_select)
|
||||
self.calculateSelected();
|
||||
if(self.navigation_alpha)
|
||||
self.navigation_alpha.getElements('.active').removeClass('active');
|
||||
|
||||
self.offset = 0;
|
||||
if(self.scrollspy){
|
||||
//self.load_more.show();
|
||||
self.scrollspy.start();
|
||||
}
|
||||
},
|
||||
|
||||
activateLetter: function(letter){
|
||||
var self = this;
|
||||
|
||||
self.reset();
|
||||
|
||||
self.letters[letter || 'all'].addClass('active');
|
||||
self.filter.starts_with = letter;
|
||||
|
||||
},
|
||||
|
||||
changeView: function(new_view){
|
||||
var self = this;
|
||||
|
||||
self.el
|
||||
.removeClass(self.current_view+'_list')
|
||||
.addClass(new_view+'_list');
|
||||
|
||||
self.current_view = new_view;
|
||||
Cookie.write(self.options.identifier+'_view2', new_view, {duration: 1000});
|
||||
},
|
||||
|
||||
getSavedView: function(){
|
||||
var self = this;
|
||||
return Cookie.read(self.options.identifier+'_view2');
|
||||
},
|
||||
|
||||
search: function(){
|
||||
var self = this;
|
||||
|
||||
if(self.search_timer) clearTimeout(self.search_timer);
|
||||
self.search_timer = (function(){
|
||||
var search_value = self.navigation_search_input.get('value');
|
||||
if (search_value == self.last_search_value) return;
|
||||
|
||||
self.reset();
|
||||
|
||||
self.activateLetter();
|
||||
self.filter.search = search_value;
|
||||
|
||||
self.getMovies(true);
|
||||
|
||||
self.last_search_value = search_value;
|
||||
|
||||
}).delay(250);
|
||||
|
||||
},
|
||||
|
||||
update: function(){
|
||||
var self = this;
|
||||
|
||||
self.reset();
|
||||
self.getMovies(true);
|
||||
},
|
||||
|
||||
getMovies: function(reset){
|
||||
var self = this;
|
||||
|
||||
if(self.scrollspy){
|
||||
self.scrollspy.stop();
|
||||
self.load_more.set('text', 'loading...');
|
||||
}
|
||||
|
||||
if(self.movies.length == 0 && self.options.loader){
|
||||
|
||||
self.loader_first = new Element('div.loading').adopt(
|
||||
new Element('div.message', {'text': self.options.title ? 'Loading \'' + self.options.title + '\'' : 'Loading...'})
|
||||
).inject(self.el, 'top');
|
||||
|
||||
createSpinner(self.loader_first, {
|
||||
radius: 4,
|
||||
length: 4,
|
||||
width: 1
|
||||
});
|
||||
|
||||
self.el.setStyle('min-height', 93);
|
||||
|
||||
}
|
||||
|
||||
Api.request(self.options.api_call || 'media.list', {
|
||||
'data': Object.merge({
|
||||
'type': self.options.type || 'movie',
|
||||
'status': self.options.status,
|
||||
'limit_offset': self.options.limit ? self.options.limit + ',' + self.offset : null
|
||||
}, self.filter),
|
||||
'onSuccess': function(json){
|
||||
|
||||
if(reset)
|
||||
self.movie_list.empty();
|
||||
|
||||
if(self.loader_first){
|
||||
var lf = self.loader_first;
|
||||
self.loader_first.addClass('hide');
|
||||
self.loader_first = null;
|
||||
setTimeout(function(){
|
||||
lf.destroy();
|
||||
}, 20000);
|
||||
self.el.setStyle('min-height', null);
|
||||
}
|
||||
|
||||
self.store(json.shows);
|
||||
self.addMovies(json.shows, json.total || json.shows.length);
|
||||
if(self.scrollspy) {
|
||||
self.load_more.set('text', 'load more movies');
|
||||
self.scrollspy.start();
|
||||
}
|
||||
|
||||
self.checkIfEmpty();
|
||||
self.fireEvent('loaded');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
loadMore: function(){
|
||||
var self = this;
|
||||
if(self.offset >= self.options.limit)
|
||||
self.getMovies()
|
||||
},
|
||||
|
||||
store: function(movies){
|
||||
var self = this;
|
||||
|
||||
self.offset += movies.length;
|
||||
|
||||
},
|
||||
|
||||
checkIfEmpty: function(){
|
||||
var self = this;
|
||||
|
||||
var is_empty = self.movies.length == 0 && (self.total_movies == 0 || self.total_movies === undefined);
|
||||
|
||||
if(self.title)
|
||||
self.title[is_empty ? 'hide' : 'show']();
|
||||
|
||||
if(self.description)
|
||||
self.description.setStyle('display', [is_empty ? 'none' : '']);
|
||||
|
||||
if(is_empty && self.options.on_empty_element){
|
||||
self.options.on_empty_element.inject(self.loader_first || self.title || self.movie_list, 'after');
|
||||
|
||||
if(self.navigation)
|
||||
self.navigation.hide();
|
||||
|
||||
self.empty_element = self.options.on_empty_element;
|
||||
}
|
||||
else if(self.empty_element){
|
||||
self.empty_element.destroy();
|
||||
|
||||
if(self.navigation)
|
||||
self.navigation.show();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el;
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,230 +0,0 @@
|
||||
Block.Search.ShowItem = new Class({
|
||||
|
||||
Implements: [Options, Events],
|
||||
|
||||
initialize: function(info, options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.info = info;
|
||||
self.alternative_titles = [];
|
||||
|
||||
self.create();
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this,
|
||||
info = self.info;
|
||||
|
||||
self.el = new Element('div.media_result', {
|
||||
'id': info.id
|
||||
}).adopt(
|
||||
self.thumbnail = info.images && info.images.poster.length > 0 ? new Element('img.thumbnail', {
|
||||
'src': info.images.poster[0],
|
||||
'height': null,
|
||||
'width': null
|
||||
}) : null,
|
||||
self.options_el = new Element('div.options.inlay'),
|
||||
self.data_container = new Element('div.data', {
|
||||
'events': {
|
||||
'click': self.showOptions.bind(self)
|
||||
}
|
||||
}).adopt(
|
||||
self.info_container = new Element('div.info').adopt(
|
||||
new Element('h2').adopt(
|
||||
self.title = new Element('span.title', {
|
||||
'text': info.titles && info.titles.length > 0 ? info.titles[0] : 'Unknown'
|
||||
}),
|
||||
self.year = info.year ? new Element('span.year', {
|
||||
'text': info.year
|
||||
}) : null
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if(info.titles)
|
||||
info.titles.each(function(title){
|
||||
self.alternativeTitle({
|
||||
'title': title
|
||||
});
|
||||
})
|
||||
},
|
||||
|
||||
alternativeTitle: function(alternative){
|
||||
var self = this;
|
||||
|
||||
self.alternative_titles.include(alternative);
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
try {
|
||||
return self.info.original_title ? self.info.original_title : self.info.titles[0];
|
||||
}
|
||||
catch(e){
|
||||
return 'Unknown';
|
||||
}
|
||||
},
|
||||
|
||||
get: function(key){
|
||||
return this.info[key]
|
||||
},
|
||||
|
||||
showOptions: function(){
|
||||
var self = this;
|
||||
|
||||
self.createOptions();
|
||||
|
||||
self.data_container.addClass('open');
|
||||
self.el.addEvent('outerClick', self.closeOptions.bind(self))
|
||||
|
||||
},
|
||||
|
||||
closeOptions: function(){
|
||||
var self = this;
|
||||
|
||||
self.data_container.removeClass('open');
|
||||
self.el.removeEvents('outerClick')
|
||||
},
|
||||
|
||||
add: function(e){
|
||||
var self = this;
|
||||
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.loadingMask();
|
||||
|
||||
Api.request('show.add', {
|
||||
'data': {
|
||||
'identifiers': self.info.identifiers,
|
||||
'type': self.info.type,
|
||||
'title': self.title_select.get('value'),
|
||||
'profile_id': self.profile_select.get('value'),
|
||||
'category_id': self.category_select.get('value')
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.options_el.empty();
|
||||
self.options_el.adopt(
|
||||
new Element('div.message', {
|
||||
'text': json.success ? 'Show successfully added.' : 'Show didn\'t add properly. Check logs'
|
||||
})
|
||||
);
|
||||
self.mask.fade('out');
|
||||
|
||||
self.fireEvent('added');
|
||||
},
|
||||
'onFailure': function(){
|
||||
self.options_el.empty();
|
||||
self.options_el.adopt(
|
||||
new Element('div.message', {
|
||||
'text': 'Something went wrong, check the logs for more info.'
|
||||
})
|
||||
);
|
||||
self.mask.fade('out');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
createOptions: function(){
|
||||
var self = this,
|
||||
info = self.info;
|
||||
|
||||
if(!self.options_el.hasClass('set')){
|
||||
|
||||
if(self.info.in_library){
|
||||
var in_library = [];
|
||||
self.info.in_library.releases.each(function(release){
|
||||
in_library.include(release.quality.label)
|
||||
});
|
||||
}
|
||||
|
||||
self.options_el.grab(
|
||||
new Element('div', {
|
||||
'class': self.info.in_wanted && self.info.in_wanted.profile_id || in_library ? 'in_library_wanted' : ''
|
||||
}).adopt(
|
||||
self.info.in_wanted && self.info.in_wanted.profile_id ? new Element('span.in_wanted', {
|
||||
'text': 'Already in wanted list: ' + Quality.getProfile(self.info.in_wanted.profile_id).get('label')
|
||||
}) : (in_library ? new Element('span.in_library', {
|
||||
'text': 'Already in library: ' + in_library.join(', ')
|
||||
}) : null),
|
||||
self.title_select = new Element('select', {
|
||||
'name': 'title'
|
||||
}),
|
||||
self.profile_select = new Element('select', {
|
||||
'name': 'profile'
|
||||
}),
|
||||
self.category_select = new Element('select', {
|
||||
'name': 'category'
|
||||
}).grab(
|
||||
new Element('option', {'value': -1, 'text': 'None'})
|
||||
),
|
||||
self.add_button = new Element('a.button', {
|
||||
'text': 'Add',
|
||||
'events': {
|
||||
'click': self.add.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
Array.each(self.alternative_titles, function(alt){
|
||||
new Element('option', {
|
||||
'text': alt.title
|
||||
}).inject(self.title_select)
|
||||
})
|
||||
|
||||
|
||||
// Fill categories
|
||||
var categories = CategoryList.getAll();
|
||||
|
||||
if(categories.length == 0)
|
||||
self.category_select.hide();
|
||||
else {
|
||||
self.category_select.show();
|
||||
categories.each(function(category){
|
||||
new Element('option', {
|
||||
'value': category.data._id,
|
||||
'text': category.data.label
|
||||
}).inject(self.category_select);
|
||||
});
|
||||
}
|
||||
|
||||
// Fill profiles
|
||||
var profiles = Quality.getActiveProfiles();
|
||||
if(profiles.length == 1)
|
||||
self.profile_select.hide();
|
||||
|
||||
profiles.each(function(profile){
|
||||
new Element('option', {
|
||||
'value': profile.get('_id'),
|
||||
'text': profile.get('label')
|
||||
}).inject(self.profile_select)
|
||||
});
|
||||
|
||||
self.options_el.addClass('set');
|
||||
|
||||
if(categories.length == 0 && self.title_select.getElements('option').length == 1 && profiles.length == 1 &&
|
||||
!(self.info.in_wanted && self.info.in_wanted.profile_id || in_library))
|
||||
self.add();
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
loadingMask: function(){
|
||||
var self = this;
|
||||
|
||||
self.mask = new Element('div.mask').inject(self.el).fade('hide')
|
||||
|
||||
createSpinner(self.mask)
|
||||
self.mask.fade('in')
|
||||
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,127 +0,0 @@
|
||||
var Season = new Class({
|
||||
|
||||
Extends: BlockBase,
|
||||
|
||||
action: {},
|
||||
|
||||
initialize: function(show, options, data){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.show = show;
|
||||
self.options = options;
|
||||
self.data = data;
|
||||
|
||||
self.profile = self.show.profile;
|
||||
|
||||
self.el = new Element('div.item.season').adopt(
|
||||
self.detail = new Element('div.item.data')
|
||||
);
|
||||
|
||||
self.create();
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.detail.set('id', 'season_'+self.data._id);
|
||||
|
||||
self.detail.adopt(
|
||||
new Element('span.name', {'text': self.getTitle()}),
|
||||
|
||||
self.quality = new Element('span.quality', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
var releases = self.detail.getElement('.item-actions .releases');
|
||||
|
||||
if(releases.isVisible())
|
||||
releases.fireEvent('click', [e])
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.actions = new Element('div.item-actions')
|
||||
);
|
||||
|
||||
// Add profile
|
||||
if(self.profile.data) {
|
||||
self.profile.getTypes().each(function(type){
|
||||
var q = self.addQuality(type.get('quality'), type.get('3d'));
|
||||
|
||||
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
|
||||
q.addClass('finish');
|
||||
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add releases
|
||||
self.updateReleases();
|
||||
|
||||
Object.each(self.options.actions, function(action, key){
|
||||
self.action[key.toLowerCase()] = action = new self.options.actions[key](self);
|
||||
if(action.el)
|
||||
self.actions.adopt(action)
|
||||
});
|
||||
},
|
||||
|
||||
updateReleases: function(){
|
||||
var self = this;
|
||||
if(!self.data.releases || self.data.releases.length == 0) return;
|
||||
|
||||
self.data.releases.each(function(release){
|
||||
|
||||
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')),
|
||||
status = release.status;
|
||||
|
||||
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done'))
|
||||
q = self.addQuality(release.quality, release.is_3d || false);
|
||||
|
||||
if (q && !q.hasClass(status)){
|
||||
q.addClass(status);
|
||||
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status)
|
||||
}
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
addQuality: function(quality, is_3d){
|
||||
var self = this,
|
||||
q = Quality.getQuality(quality);
|
||||
|
||||
return new Element('span', {
|
||||
'text': q.label + (is_3d ? ' 3D' : ''),
|
||||
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''),
|
||||
'title': ''
|
||||
}).inject(self.quality);
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
var title = '';
|
||||
|
||||
if(self.data.info.number) {
|
||||
title = 'Season ' + self.data.info.number;
|
||||
} else {
|
||||
// Season 0 / Specials
|
||||
title = 'Specials';
|
||||
}
|
||||
|
||||
return title;
|
||||
},
|
||||
|
||||
getIdentifier: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.get('identifiers').imdb;
|
||||
}
|
||||
catch (e){ }
|
||||
|
||||
return self.get('imdb');
|
||||
},
|
||||
|
||||
get: function(attr){
|
||||
return this.data[attr] || this.data.info[attr]
|
||||
}
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,92 +0,0 @@
|
||||
var Episodes = new Class({
|
||||
initialize: function(show, options) {
|
||||
var self = this;
|
||||
|
||||
self.show = show;
|
||||
self.options = options;
|
||||
},
|
||||
|
||||
open: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.container){
|
||||
self.container = new Element('div.options').grab(
|
||||
self.episodes_container = new Element('div.episodes.table')
|
||||
);
|
||||
|
||||
self.container.inject(self.show, 'top');
|
||||
|
||||
Api.request('library.tree', {
|
||||
'data': {
|
||||
'media_id': self.show.data._id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.data = json.result;
|
||||
|
||||
self.createEpisodes();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
self.show.slide('in', self.container, true);
|
||||
},
|
||||
|
||||
createEpisodes: function() {
|
||||
var self = this;
|
||||
|
||||
self.data.seasons.sort(self.sortSeasons);
|
||||
self.data.seasons.each(function(season) {
|
||||
self.createSeason(season);
|
||||
|
||||
season.episodes.sort(self.sortEpisodes);
|
||||
season.episodes.each(function(episode) {
|
||||
self.createEpisode(episode);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
createSeason: function(season) {
|
||||
var self = this,
|
||||
s = new Season(self.show, self.options, season);
|
||||
|
||||
$(s).inject(self.episodes_container);
|
||||
},
|
||||
|
||||
createEpisode: function(episode){
|
||||
var self = this,
|
||||
e = new Episode(self.show, self.options, episode);
|
||||
|
||||
$(e).inject(self.episodes_container);
|
||||
},
|
||||
|
||||
sortSeasons: function(a, b) {
|
||||
// Move "Specials" to the bottom of the list
|
||||
if(!a.info.number) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if(!b.info.number) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Order seasons descending
|
||||
if(a.info.number < b.info.number)
|
||||
return -1;
|
||||
|
||||
if(a.info.number > b.info.number)
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
},
|
||||
|
||||
sortEpisodes: function(a, b) {
|
||||
// Order episodes descending
|
||||
if(a.info.number < b.info.number)
|
||||
return -1;
|
||||
|
||||
if(a.info.number > b.info.number)
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
@@ -1,370 +0,0 @@
|
||||
var Show = new Class({
|
||||
|
||||
Extends: BlockBase,
|
||||
|
||||
action: {},
|
||||
|
||||
initialize: function(list, options, data){
|
||||
var self = this;
|
||||
|
||||
self.data = data;
|
||||
self.view = options.view || 'details';
|
||||
self.list = list;
|
||||
|
||||
self.el = new Element('div.show');
|
||||
|
||||
self.episodes = new Episodes(self, {
|
||||
'actions': [EA.IMDB, EA.Release, EA.Refresh]
|
||||
});
|
||||
|
||||
self.profile = Quality.getProfile(data.profile_id) || {};
|
||||
self.category = CategoryList.getCategory(data.category_id) || {};
|
||||
self.parent(self, options);
|
||||
|
||||
self.addEvents();
|
||||
},
|
||||
|
||||
addEvents: function(){
|
||||
var self = this;
|
||||
|
||||
self.global_events = {};
|
||||
|
||||
// Do refresh with new data
|
||||
self.global_events['movie.update'] = function(notification){
|
||||
if(self.data._id != notification.data._id) return;
|
||||
|
||||
self.busy(false);
|
||||
self.removeView();
|
||||
self.update.delay(2000, self, notification);
|
||||
};
|
||||
App.on('movie.update', self.global_events['movie.update']);
|
||||
|
||||
// Add spinner on load / search
|
||||
['media.busy', 'movie.searcher.started'].each(function(listener){
|
||||
self.global_events[listener] = function(notification){
|
||||
if(notification.data && (self.data._id == notification.data._id || (typeOf(notification.data._id) == 'array' && notification.data._id.indexOf(self.data._id) > -1)))
|
||||
self.busy(true);
|
||||
};
|
||||
App.on(listener, self.global_events[listener]);
|
||||
});
|
||||
|
||||
// Remove spinner
|
||||
self.global_events['movie.searcher.ended'] = function(notification){
|
||||
if(notification.data && self.data._id == notification.data._id)
|
||||
self.busy(false)
|
||||
};
|
||||
App.on('movie.searcher.ended', self.global_events['movie.searcher.ended']);
|
||||
|
||||
// Reload when releases have updated
|
||||
self.global_events['release.update_status'] = function(notification){
|
||||
var data = notification.data;
|
||||
if(data && self.data._id == data.movie_id){
|
||||
|
||||
if(!self.data.releases)
|
||||
self.data.releases = [];
|
||||
|
||||
self.data.releases.push({'quality': data.quality, 'status': data.status});
|
||||
self.updateReleases();
|
||||
}
|
||||
};
|
||||
|
||||
App.on('release.update_status', self.global_events['release.update_status']);
|
||||
|
||||
},
|
||||
|
||||
destroy: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.destroy();
|
||||
delete self.list.movies_added[self.get('id')];
|
||||
self.list.movies.erase(self);
|
||||
|
||||
self.list.checkIfEmpty();
|
||||
|
||||
// Remove events
|
||||
Object.each(self.global_events, function(handle, listener){
|
||||
App.off(listener, handle);
|
||||
});
|
||||
},
|
||||
|
||||
busy: function(set_busy, timeout){
|
||||
var self = this;
|
||||
|
||||
if(!set_busy){
|
||||
setTimeout(function(){
|
||||
if(self.spinner){
|
||||
self.mask.fade('out');
|
||||
setTimeout(function(){
|
||||
if(self.mask)
|
||||
self.mask.destroy();
|
||||
if(self.spinner)
|
||||
self.spinner.el.destroy();
|
||||
self.spinner = null;
|
||||
self.mask = null;
|
||||
}, timeout || 400);
|
||||
}
|
||||
}, timeout || 1000)
|
||||
}
|
||||
else if(!self.spinner) {
|
||||
self.createMask();
|
||||
self.spinner = createSpinner(self.mask);
|
||||
self.mask.fade('in');
|
||||
}
|
||||
},
|
||||
|
||||
createMask: function(){
|
||||
var self = this;
|
||||
self.mask = new Element('div.mask', {
|
||||
'styles': {
|
||||
'z-index': 4
|
||||
}
|
||||
}).inject(self.el, 'top').fade('hide');
|
||||
},
|
||||
|
||||
update: function(notification){
|
||||
var self = this;
|
||||
|
||||
self.data = notification.data;
|
||||
self.el.empty();
|
||||
self.removeView();
|
||||
|
||||
self.profile = Quality.getProfile(self.data.profile_id) || {};
|
||||
self.category = CategoryList.getCategory(self.data.category_id) || {};
|
||||
self.create();
|
||||
|
||||
self.busy(false);
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.addClass('status_'+self.get('status'));
|
||||
|
||||
var eta = null,
|
||||
eta_date = null,
|
||||
now = Math.round(+new Date()/1000);
|
||||
|
||||
if(self.data.info.release_date)
|
||||
[self.data.info.release_date.dvd, self.data.info.release_date.theater].each(function(timestamp){
|
||||
if (timestamp > 0 && (eta == null || Math.abs(timestamp - now) < Math.abs(eta - now)))
|
||||
eta = timestamp;
|
||||
});
|
||||
|
||||
if(eta){
|
||||
eta_date = new Date(eta * 1000);
|
||||
eta_date = eta_date.toLocaleString('en-us', { month: "long" }) + ' ' + eta_date.getFullYear();
|
||||
}
|
||||
|
||||
self.el.adopt(
|
||||
self.select_checkbox = new Element('input[type=checkbox].inlay', {
|
||||
'events': {
|
||||
'change': function(){
|
||||
self.fireEvent('select')
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.thumbnail = (self.data.files && self.data.files.image_poster) ? new Element('img', {
|
||||
'class': 'type_image poster',
|
||||
'src': Api.createUrl('file.cache') + self.data.files.image_poster[0].split(Api.getOption('path_sep')).pop()
|
||||
}): null,
|
||||
self.data_container = new Element('div.data.inlay.light').adopt(
|
||||
self.info_container = new Element('div.info').adopt(
|
||||
new Element('div.title').adopt(
|
||||
self.title = new Element('a', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
self.episodes.open();
|
||||
}
|
||||
},
|
||||
'text': self.getTitle() || 'n/a'
|
||||
}),
|
||||
self.year = new Element('div.year', {
|
||||
'text': self.data.info.year || 'n/a'
|
||||
})
|
||||
),
|
||||
self.description = new Element('div.description.tiny_scroll', {
|
||||
'text': self.data.info.plot
|
||||
}),
|
||||
self.eta = eta_date && (now+8035200 > eta) ? new Element('div.eta', {
|
||||
'text': eta_date,
|
||||
'title': 'ETA'
|
||||
}) : null,
|
||||
self.quality = new Element('div.quality', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
var releases = self.el.getElement('.actions .releases');
|
||||
if(releases.isVisible())
|
||||
releases.fireEvent('click', [e])
|
||||
}
|
||||
}
|
||||
})
|
||||
),
|
||||
self.actions = new Element('div.actions')
|
||||
)
|
||||
);
|
||||
|
||||
if(!self.thumbnail)
|
||||
self.el.addClass('no_thumbnail');
|
||||
|
||||
//self.changeView(self.view);
|
||||
self.select_checkbox_class = new Form.Check(self.select_checkbox);
|
||||
|
||||
// Add profile
|
||||
if(self.profile.data)
|
||||
self.profile.getTypes().each(function(type){
|
||||
|
||||
var q = self.addQuality(type.get('quality'), type.get('3d'));
|
||||
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
|
||||
q.addClass('finish');
|
||||
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// Add releases
|
||||
self.updateReleases();
|
||||
|
||||
Object.each(self.options.actions, function(action, key){
|
||||
self.action[key.toLowerCase()] = action = new self.options.actions[key](self);
|
||||
if(action.el)
|
||||
self.actions.adopt(action)
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
updateReleases: function(){
|
||||
var self = this;
|
||||
if(!self.data.releases || self.data.releases.length == 0) return;
|
||||
|
||||
self.data.releases.each(function(release){
|
||||
|
||||
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')),
|
||||
status = release.status;
|
||||
|
||||
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done'))
|
||||
q = self.addQuality(release.quality, release.is_3d || false);
|
||||
|
||||
if (q && !q.hasClass(status)){
|
||||
q.addClass(status);
|
||||
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status)
|
||||
}
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
addQuality: function(quality, is_3d){
|
||||
var self = this;
|
||||
|
||||
var q = Quality.getQuality(quality);
|
||||
return new Element('span', {
|
||||
'text': q.label + (is_3d ? ' 3D' : ''),
|
||||
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''),
|
||||
'title': ''
|
||||
}).inject(self.quality);
|
||||
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
if(self.data.title)
|
||||
return self.getUnprefixedTitle(self.data.title);
|
||||
else if(self.data.info.titles.length > 0)
|
||||
return self.getUnprefixedTitle(self.data.info.titles[0]);
|
||||
|
||||
return 'Unknown movie'
|
||||
},
|
||||
|
||||
getUnprefixedTitle: function(t){
|
||||
if(t.substr(0, 4).toLowerCase() == 'the ')
|
||||
t = t.substr(4) + ', The';
|
||||
else if(t.substr(0, 3).toLowerCase() == 'an ')
|
||||
t = t.substr(3) + ', An';
|
||||
else if(t.substr(0, 2).toLowerCase() == 'a ')
|
||||
t = t.substr(2) + ', A';
|
||||
return t;
|
||||
},
|
||||
|
||||
slide: function(direction, el, expand){
|
||||
var self = this;
|
||||
|
||||
if(direction == 'in'){
|
||||
self.temp_view = self.view;
|
||||
self.changeView('details');
|
||||
|
||||
self.el.addEvent('outerClick', function(){
|
||||
self.removeView();
|
||||
self.slide('out')
|
||||
});
|
||||
el.show();
|
||||
|
||||
|
||||
if(expand === true) {
|
||||
self.el.addClass('expanded');
|
||||
self.el.getElements('.table').addClass('expanded');
|
||||
}
|
||||
|
||||
self.data_container.addClass('hide_right');
|
||||
}
|
||||
else {
|
||||
self.el.removeEvents('outerClick');
|
||||
|
||||
setTimeout(function(){
|
||||
if(self.el)
|
||||
{
|
||||
self.el.getElements('> :not(.data):not(.poster):not(.movie_container)').hide();
|
||||
self.el.getElements('.table').removeClass('expanded');
|
||||
}
|
||||
}, 600);
|
||||
|
||||
self.el.removeClass('expanded');
|
||||
self.data_container.removeClass('hide_right');
|
||||
}
|
||||
},
|
||||
|
||||
changeView: function(new_view){
|
||||
var self = this;
|
||||
|
||||
if(self.el)
|
||||
self.el
|
||||
.removeClass(self.view+'_view')
|
||||
.addClass(new_view+'_view');
|
||||
|
||||
self.view = new_view;
|
||||
},
|
||||
|
||||
removeView: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.removeClass(self.view+'_view')
|
||||
},
|
||||
|
||||
getIdentifier: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.get('identifiers').imdb;
|
||||
}
|
||||
catch (e){ }
|
||||
|
||||
return self.get('imdb');
|
||||
},
|
||||
|
||||
get: function(attr){
|
||||
return this.data[attr] || this.data.info[attr]
|
||||
},
|
||||
|
||||
select: function(bool){
|
||||
var self = this;
|
||||
self.select_checkbox_class[bool ? 'check' : 'uncheck']()
|
||||
},
|
||||
|
||||
isSelected: function(){
|
||||
return this.select_checkbox.get('checked');
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el;
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,71 +0,0 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'EpisodeLibraryPlugin'
|
||||
|
||||
|
||||
class EpisodeLibraryPlugin(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.query', self.query)
|
||||
addEvent('library.identifier', self.identifier)
|
||||
|
||||
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs):
|
||||
if media.get('type') != 'show.episode':
|
||||
return
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# Get season titles
|
||||
titles = fireEvent(
|
||||
'library.query', related['season'],
|
||||
|
||||
first = False,
|
||||
include_identifier = include_identifier,
|
||||
condense = condense,
|
||||
|
||||
single = True
|
||||
)
|
||||
|
||||
# Add episode identifier to titles
|
||||
if include_identifier:
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
if identifier and identifier.get('episode'):
|
||||
titles = [title + ('E%02d' % identifier['episode']) for title in titles]
|
||||
|
||||
if first:
|
||||
return titles[0] if titles else None
|
||||
|
||||
return titles
|
||||
|
||||
def identifier(self, media):
|
||||
if media.get('type') != 'show.episode':
|
||||
return
|
||||
|
||||
identifier = {
|
||||
'season': None,
|
||||
'episode': None
|
||||
}
|
||||
|
||||
# TODO identifier mapping
|
||||
# scene_map = media['info'].get('map_episode', {}).get('scene')
|
||||
|
||||
# if scene_map:
|
||||
# # Use scene mappings if they are available
|
||||
# identifier['season'] = scene_map.get('season_nr')
|
||||
# identifier['episode'] = scene_map.get('episode_nr')
|
||||
# else:
|
||||
# Fallback to normal season/episode numbers
|
||||
identifier['season'] = media['info'].get('season_number')
|
||||
identifier['episode'] = media['info'].get('number')
|
||||
|
||||
# Cast identifiers to integers
|
||||
# TODO this will need changing to support identifiers with trailing 'a', 'b' characters
|
||||
identifier['season'] = tryInt(identifier['season'], None)
|
||||
identifier['episode'] = tryInt(identifier['episode'], None)
|
||||
|
||||
return identifier
|
||||
@@ -1,52 +0,0 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'SeasonLibraryPlugin'
|
||||
|
||||
|
||||
class SeasonLibraryPlugin(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.query', self.query)
|
||||
addEvent('library.identifier', self.identifier)
|
||||
|
||||
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs):
|
||||
if media.get('type') != 'show.season':
|
||||
return
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# Get show titles
|
||||
titles = fireEvent(
|
||||
'library.query', related['show'],
|
||||
|
||||
first = False,
|
||||
condense = condense,
|
||||
|
||||
single = True
|
||||
)
|
||||
|
||||
# TODO map_names
|
||||
|
||||
# Add season identifier to titles
|
||||
if include_identifier:
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
if identifier and identifier.get('season') is not None:
|
||||
titles = [title + (' S%02d' % identifier['season']) for title in titles]
|
||||
|
||||
if first:
|
||||
return titles[0] if titles else None
|
||||
|
||||
return titles
|
||||
|
||||
def identifier(self, media):
|
||||
if media.get('type') != 'show.season':
|
||||
return
|
||||
|
||||
return {
|
||||
'season': tryInt(media['info']['number'], None)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
from qcond import QueryCondenser
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ShowLibraryPlugin'
|
||||
|
||||
|
||||
class ShowLibraryPlugin(LibraryBase):
|
||||
query_condenser = QueryCondenser()
|
||||
|
||||
def __init__(self):
|
||||
addEvent('library.query', self.query)
|
||||
|
||||
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs):
|
||||
if media.get('type') != 'show':
|
||||
return
|
||||
|
||||
titles = media['info']['titles']
|
||||
|
||||
if condense:
|
||||
# Use QueryCondenser to build a list of optimal search titles
|
||||
condensed_titles = self.query_condenser.distinct(titles)
|
||||
|
||||
if condensed_titles:
|
||||
# Use condensed titles if we got a valid result
|
||||
titles = condensed_titles
|
||||
else:
|
||||
# Fallback to simplifying titles
|
||||
titles = [simplifyString(title) for title in titles]
|
||||
|
||||
if first:
|
||||
return titles[0] if titles else None
|
||||
|
||||
return titles
|
||||
@@ -1,7 +0,0 @@
|
||||
from .main import ShowMatcher
|
||||
|
||||
|
||||
def autoload():
|
||||
return ShowMatcher()
|
||||
|
||||
config = []
|
||||
@@ -1,72 +0,0 @@
|
||||
from couchpotato import fireEvent, CPLog, tryInt
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.media._base.matcher.base import MatcherBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(MatcherBase):
|
||||
|
||||
# TODO come back to this later, think this could be handled better, this is starting to get out of hand....
|
||||
quality_map = {
|
||||
'bluray_1080p': {'resolution': ['1080p'], 'source': ['bluray']},
|
||||
'bluray_720p': {'resolution': ['720p'], 'source': ['bluray']},
|
||||
|
||||
'bdrip_1080p': {'resolution': ['1080p'], 'source': ['BDRip']},
|
||||
'bdrip_720p': {'resolution': ['720p'], 'source': ['BDRip']},
|
||||
|
||||
'brrip_1080p': {'resolution': ['1080p'], 'source': ['BRRip']},
|
||||
'brrip_720p': {'resolution': ['720p'], 'source': ['BRRip']},
|
||||
|
||||
'webdl_1080p': {'resolution': ['1080p'], 'source': ['webdl', ['web', 'dl']]},
|
||||
'webdl_720p': {'resolution': ['720p'], 'source': ['webdl', ['web', 'dl']]},
|
||||
'webdl_480p': {'resolution': ['480p'], 'source': ['webdl', ['web', 'dl']]},
|
||||
|
||||
'hdtv_720p': {'resolution': ['720p'], 'source': ['hdtv']},
|
||||
'hdtv_sd': {'resolution': ['480p', None], 'source': ['hdtv']},
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
super(Base, self).__init__()
|
||||
|
||||
addEvent('%s.matcher.correct_identifier' % self.type, self.correctIdentifier)
|
||||
|
||||
def correct(self, chain, release, media, quality):
|
||||
log.info("Checking if '%s' is valid", release['name'])
|
||||
log.info2('Release parsed as: %s', chain.info)
|
||||
|
||||
if not fireEvent('matcher.correct_quality', chain, quality, self.quality_map, single = True):
|
||||
log.info('Wrong: %s, quality does not match', release['name'])
|
||||
return False
|
||||
|
||||
if not fireEvent('%s.matcher.correct_identifier' % self.type, chain, media):
|
||||
log.info('Wrong: %s, identifier does not match', release['name'])
|
||||
return False
|
||||
|
||||
if not fireEvent('matcher.correct_title', chain, media):
|
||||
log.info("Wrong: '%s', undetermined naming.", (' '.join(chain.info['show_name'])))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def correctIdentifier(self, chain, media):
|
||||
raise NotImplementedError()
|
||||
|
||||
def getChainIdentifier(self, chain):
|
||||
if 'identifier' not in chain.info:
|
||||
return None
|
||||
|
||||
identifier = self.flattenInfo(chain.info['identifier'])
|
||||
|
||||
# Try cast values to integers
|
||||
for key, value in identifier.items():
|
||||
if isinstance(value, list):
|
||||
if len(value) <= 1:
|
||||
value = value[0]
|
||||
else:
|
||||
log.warning('Wrong: identifier contains multiple season or episode values, unsupported')
|
||||
return None
|
||||
|
||||
identifier[key] = tryInt(value, value)
|
||||
|
||||
return identifier
|
||||
@@ -1,30 +0,0 @@
|
||||
from couchpotato import fireEvent, CPLog
|
||||
from couchpotato.core.media.show.matcher.base import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Episode(Base):
|
||||
type = 'show.episode'
|
||||
|
||||
def correctIdentifier(self, chain, media):
|
||||
identifier = self.getChainIdentifier(chain)
|
||||
if not identifier:
|
||||
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)')
|
||||
return False
|
||||
|
||||
# TODO - Parse episode ranges from identifier to determine if they are multi-part episodes
|
||||
if any([x in identifier for x in ['episode_from', 'episode_to']]):
|
||||
log.info2('Wrong: releases with identifier ranges are not supported yet')
|
||||
return False
|
||||
|
||||
required = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
# TODO - Support air by date episodes
|
||||
# TODO - Support episode parts
|
||||
|
||||
if identifier != required:
|
||||
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,9 +0,0 @@
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.matcher.episode import Episode
|
||||
from couchpotato.core.media.show.matcher.season import Season
|
||||
|
||||
|
||||
class ShowMatcher(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
@@ -1,27 +0,0 @@
|
||||
from couchpotato import fireEvent, CPLog
|
||||
from couchpotato.core.media.show.matcher.base import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Season(Base):
|
||||
type = 'show.season'
|
||||
|
||||
def correctIdentifier(self, chain, media):
|
||||
identifier = self.getChainIdentifier(chain)
|
||||
if not identifier:
|
||||
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)')
|
||||
return False
|
||||
|
||||
# TODO - Parse episode ranges from identifier to determine if they are season packs
|
||||
if any([x in identifier for x in ['episode_from', 'episode_to']]):
|
||||
log.info2('Wrong: releases with identifier ranges are not supported yet')
|
||||
return False
|
||||
|
||||
required = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
if identifier != required:
|
||||
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,13 +0,0 @@
|
||||
from couchpotato.core.media._base.providers.info.base import BaseInfoProvider
|
||||
|
||||
|
||||
class ShowProvider(BaseInfoProvider):
|
||||
type = 'show'
|
||||
|
||||
|
||||
class SeasonProvider(BaseInfoProvider):
|
||||
type = 'show.season'
|
||||
|
||||
|
||||
class EpisodeProvider(BaseInfoProvider):
|
||||
type = 'show.episode'
|
||||
@@ -1,372 +0,0 @@
|
||||
from datetime import datetime
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato import Env
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.show.providers.base import ShowProvider
|
||||
from tvdb_api import tvdb_exceptions
|
||||
from tvdb_api.tvdb_api import Tvdb, Show
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TheTVDb'
|
||||
|
||||
|
||||
class TheTVDb(ShowProvider):
|
||||
|
||||
# TODO: Consider grabbing zips to put less strain on tvdb
|
||||
# TODO: Unicode stuff (check)
|
||||
# TODO: Notigy frontend on error (tvdb down at monent)
|
||||
# TODO: Expose apikey in setting so it can be changed by user
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.info', self.getShowInfo, priority = 1)
|
||||
addEvent('season.info', self.getSeasonInfo, priority = 1)
|
||||
addEvent('episode.info', self.getEpisodeInfo, priority = 1)
|
||||
|
||||
self.tvdb_api_parms = {
|
||||
'apikey': self.conf('api_key'),
|
||||
'banners': True,
|
||||
'language': 'en',
|
||||
'cache': os.path.join(Env.get('cache_dir'), 'thetvdb_api'),
|
||||
}
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
self.tvdb = Tvdb(**self.tvdb_api_parms)
|
||||
self.valid_languages = self.tvdb.config['valid_languages']
|
||||
|
||||
def getShow(self, identifier = None):
|
||||
show = None
|
||||
try:
|
||||
log.debug('Getting show: %s', identifier)
|
||||
show = self.tvdb[int(identifier)]
|
||||
except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
log.error('Failed to getShowInfo for show id "%s": %s', (identifier, traceback.format_exc()))
|
||||
return None
|
||||
|
||||
return show
|
||||
|
||||
def getShowInfo(self, identifiers = None):
|
||||
"""
|
||||
|
||||
@param identifiers: dict with identifiers per provider
|
||||
@return: Full show info including season and episode info
|
||||
"""
|
||||
|
||||
if not identifiers or not identifiers.get('thetvdb'):
|
||||
return None
|
||||
|
||||
identifier = tryInt(identifiers.get('thetvdb'))
|
||||
|
||||
cache_key = 'thetvdb.cache.show.%s' % identifier
|
||||
result = None #self.getCache(cache_key)
|
||||
if result:
|
||||
return result
|
||||
|
||||
show = self.getShow(identifier = identifier)
|
||||
if show:
|
||||
result = self._parseShow(show)
|
||||
self.setCache(cache_key, result)
|
||||
|
||||
return result or {}
|
||||
|
||||
def getSeasonInfo(self, identifiers = None, params = {}):
|
||||
"""Either return a list of all seasons or a single season by number.
|
||||
identifier is the show 'id'
|
||||
"""
|
||||
if not identifiers or not identifiers.get('thetvdb'):
|
||||
return None
|
||||
|
||||
season_number = params.get('season_number', None)
|
||||
identifier = tryInt(identifiers.get('thetvdb'))
|
||||
|
||||
cache_key = 'thetvdb.cache.%s.%s' % (identifier, season_number)
|
||||
log.debug('Getting SeasonInfo: %s', cache_key)
|
||||
result = self.getCache(cache_key) or {}
|
||||
if result:
|
||||
return result
|
||||
|
||||
try:
|
||||
show = self.tvdb[int(identifier)]
|
||||
except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
log.error('Failed parsing TheTVDB SeasonInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
result = []
|
||||
for number, season in show.items():
|
||||
if season_number is not None and number == season_number:
|
||||
result = self._parseSeason(show, number, season)
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
else:
|
||||
result.append(self._parseSeason(show, number, season))
|
||||
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
|
||||
def getEpisodeInfo(self, identifier = None, params = {}):
|
||||
"""Either return a list of all episodes or a single episode.
|
||||
If episode_identifer contains an episode number to search for
|
||||
"""
|
||||
season_number = self.getIdentifier(params.get('season_number', None))
|
||||
episode_identifier = self.getIdentifier(params.get('episode_identifiers', None))
|
||||
identifier = self.getIdentifier(identifier)
|
||||
|
||||
if not identifier and season_number is None:
|
||||
return False
|
||||
|
||||
# season_identifier must contain the 'show id : season number' since there is no tvdb id
|
||||
# for season and we need a reference to both the show id and season number
|
||||
if not identifier and season_number:
|
||||
try:
|
||||
identifier, season_number = season_number.split(':')
|
||||
season_number = int(season_number)
|
||||
except: return None
|
||||
|
||||
cache_key = 'thetvdb.cache.%s.%s.%s' % (identifier, episode_identifier, season_number)
|
||||
log.debug('Getting EpisodeInfo: %s', cache_key)
|
||||
result = self.getCache(cache_key) or {}
|
||||
if result:
|
||||
return result
|
||||
|
||||
try:
|
||||
show = self.tvdb[int(identifier)]
|
||||
except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
log.error('Failed parsing TheTVDB EpisodeInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
result = []
|
||||
for number, season in show.items():
|
||||
if season_number is not None and number != season_number:
|
||||
continue
|
||||
|
||||
for episode in season.values():
|
||||
if episode_identifier is not None and episode['id'] == toUnicode(episode_identifier):
|
||||
result = self._parseEpisode(episode)
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
else:
|
||||
result.append(self._parseEpisode(episode))
|
||||
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
|
||||
def getIdentifier(self, value):
|
||||
if type(value) is dict:
|
||||
return value.get('thetvdb')
|
||||
|
||||
return value
|
||||
|
||||
def _parseShow(self, show):
|
||||
|
||||
#
|
||||
# NOTE: show object only allows direct access via
|
||||
# show['id'], not show.get('id')
|
||||
#
|
||||
def get(name):
|
||||
return show.get(name) if not hasattr(show, 'search') else show[name]
|
||||
|
||||
## Images
|
||||
poster = get('poster')
|
||||
backdrop = get('fanart')
|
||||
|
||||
genres = splitString(get('genre'), '|')
|
||||
if get('firstaired') is not None:
|
||||
try: year = datetime.strptime(get('firstaired'), '%Y-%m-%d').year
|
||||
except: year = None
|
||||
else:
|
||||
year = None
|
||||
|
||||
show_data = {
|
||||
'identifiers': {
|
||||
'thetvdb': tryInt(get('id')),
|
||||
'imdb': get('imdb_id'),
|
||||
'zap2it': get('zap2it_id'),
|
||||
},
|
||||
'type': 'show',
|
||||
'titles': [get('seriesname')],
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [],
|
||||
'backdrop_original': [],
|
||||
},
|
||||
'year': year,
|
||||
'genres': genres,
|
||||
'network': get('network'),
|
||||
'plot': get('overview'),
|
||||
'networkid': get('networkid'),
|
||||
'air_day': (get('airs_dayofweek') or '').lower(),
|
||||
'air_time': self.parseTime(get('airs_time')),
|
||||
'firstaired': get('firstaired'),
|
||||
'runtime': tryInt(get('runtime')),
|
||||
'contentrating': get('contentrating'),
|
||||
'rating': {},
|
||||
'actors': splitString(get('actors'), '|'),
|
||||
'status': get('status'),
|
||||
'language': get('language'),
|
||||
}
|
||||
|
||||
if tryFloat(get('rating')):
|
||||
show_data['rating']['thetvdb'] = [tryFloat(get('rating')), tryInt(get('ratingcount'))],
|
||||
|
||||
show_data = dict((k, v) for k, v in show_data.iteritems() if v)
|
||||
|
||||
# Only load season info when available
|
||||
if type(show) == Show:
|
||||
|
||||
# Parse season and episode data
|
||||
show_data['seasons'] = {}
|
||||
|
||||
for season_nr in show:
|
||||
season = self._parseSeason(show, season_nr, show[season_nr])
|
||||
season['episodes'] = {}
|
||||
|
||||
for episode_nr in show[season_nr]:
|
||||
season['episodes'][episode_nr] = self._parseEpisode(show[season_nr][episode_nr])
|
||||
|
||||
show_data['seasons'][season_nr] = season
|
||||
|
||||
# Add alternative titles
|
||||
# try:
|
||||
# raw = self.tvdb.search(show['seriesname'])
|
||||
# if raw:
|
||||
# for show_info in raw:
|
||||
# print show_info
|
||||
# if show_info['id'] == show_data['id'] and show_info.get('aliasnames', None):
|
||||
# for alt_name in show_info['aliasnames'].split('|'):
|
||||
# show_data['titles'].append(toUnicode(alt_name))
|
||||
# except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
# log.error('Failed searching TheTVDB for "%s": %s', (show['seriesname'], traceback.format_exc()))
|
||||
|
||||
return show_data
|
||||
|
||||
def _parseSeason(self, show, number, season):
|
||||
"""
|
||||
contains no data
|
||||
"""
|
||||
|
||||
poster = []
|
||||
try:
|
||||
temp_poster = {}
|
||||
for id, data in show.data['_banners']['season']['season'].items():
|
||||
if data.get('season') == str(number) and data.get('language') == self.tvdb_api_parms['language']:
|
||||
temp_poster[tryFloat(data.get('rating')) * tryInt(data.get('ratingcount'))] = data.get('_bannerpath')
|
||||
#break
|
||||
poster.append(temp_poster[sorted(temp_poster, reverse = True)[0]])
|
||||
except:
|
||||
pass
|
||||
|
||||
season_data = {
|
||||
'identifiers': {
|
||||
'thetvdb': show['id'] if show.get('id') else show[number][1]['seasonid']
|
||||
},
|
||||
'number': tryInt(number),
|
||||
'images': {
|
||||
'poster': poster,
|
||||
},
|
||||
}
|
||||
|
||||
season_data = dict((k, v) for k, v in season_data.iteritems() if v)
|
||||
return season_data
|
||||
|
||||
def _parseEpisode(self, episode):
|
||||
"""
|
||||
('episodenumber', u'1'),
|
||||
('thumb_added', None),
|
||||
('rating', u'7.7'),
|
||||
('overview',
|
||||
u'Experienced waitress Max Black meets her new co-worker, former rich-girl Caroline Channing, and puts her skills to the test at an old but re-emerging Brooklyn diner. Despite her initial distaste for Caroline, Max eventually softens and the two team up for a new business venture.'),
|
||||
('dvd_episodenumber', None),
|
||||
('dvd_discid', None),
|
||||
('combined_episodenumber', u'1'),
|
||||
('epimgflag', u'7'),
|
||||
('id', u'4099506'),
|
||||
('seasonid', u'465948'),
|
||||
('thumb_height', u'225'),
|
||||
('tms_export', u'1374789754'),
|
||||
('seasonnumber', u'1'),
|
||||
('writer', u'|Michael Patrick King|Whitney Cummings|'),
|
||||
('lastupdated', u'1371420338'),
|
||||
('filename', u'http://thetvdb.com/banners/episodes/248741/4099506.jpg'),
|
||||
('absolute_number', u'1'),
|
||||
('ratingcount', u'102'),
|
||||
('combined_season', u'1'),
|
||||
('thumb_width', u'400'),
|
||||
('imdb_id', u'tt1980319'),
|
||||
('director', u'James Burrows'),
|
||||
('dvd_chapter', None),
|
||||
('dvd_season', None),
|
||||
('gueststars',
|
||||
u'|Brooke Lyons|Noah Mills|Shoshana Bush|Cale Hartmann|Adam Korson|Alex Enriquez|Matt Cook|Bill Parks|Eugene Shaw|Sergey Brusilovsky|Greg Lewis|Cocoa Brown|Nick Jameson|'),
|
||||
('seriesid', u'248741'),
|
||||
('language', u'en'),
|
||||
('productioncode', u'296793'),
|
||||
('firstaired', u'2011-09-19'),
|
||||
('episodename', u'Pilot')]
|
||||
"""
|
||||
|
||||
def get(name, default = None):
|
||||
return episode.get(name, default)
|
||||
|
||||
poster = get('filename', [])
|
||||
|
||||
episode_data = {
|
||||
'number': tryInt(get('episodenumber')),
|
||||
'absolute_number': tryInt(get('absolute_number')),
|
||||
'identifiers': {
|
||||
'thetvdb': tryInt(episode['id'])
|
||||
},
|
||||
'type': 'episode',
|
||||
'titles': [get('episodename')] if get('episodename') else [],
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
},
|
||||
'released': get('firstaired'),
|
||||
'plot': get('overview'),
|
||||
'firstaired': get('firstaired'),
|
||||
'language': get('language'),
|
||||
}
|
||||
|
||||
if get('imdb_id'):
|
||||
episode_data['identifiers']['imdb'] = get('imdb_id')
|
||||
|
||||
episode_data = dict((k, v) for k, v in episode_data.iteritems() if v)
|
||||
return episode_data
|
||||
|
||||
def parseTime(self, time):
|
||||
return time
|
||||
|
||||
def isDisabled(self):
|
||||
if self.conf('api_key') == '':
|
||||
log.error('No API key provided.')
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'thetvdb',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'providers',
|
||||
'name': 'tmdb',
|
||||
'label': 'TheTVDB',
|
||||
'hidden': True,
|
||||
'description': 'Used for all calls to TheTVDB.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': '7966C02F860586D2',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,86 +0,0 @@
|
||||
import urllib
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.show.providers.base import ShowProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Trakt'
|
||||
|
||||
|
||||
class Trakt(ShowProvider):
|
||||
api_key = 'c043de5ada9d180028c10229d2a3ea5b'
|
||||
base_url = 'http://api.trakt.tv/%%s.json/%s' % api_key
|
||||
|
||||
def __init__(self):
|
||||
addEvent('info.search', self.search, priority = 1)
|
||||
addEvent('show.search', self.search, priority = 1)
|
||||
|
||||
def search(self, q, limit = 12):
|
||||
if self.isDisabled():
|
||||
return False
|
||||
|
||||
# Check for cached result
|
||||
cache_key = 'trakt.cache.search.%s.%s' % (q, limit)
|
||||
results = self.getCache(cache_key) or []
|
||||
|
||||
if results:
|
||||
return results
|
||||
|
||||
# Search
|
||||
log.debug('Searching for show: "%s"', q)
|
||||
response = self._request('search/shows', query=q, limit=limit)
|
||||
|
||||
if not response:
|
||||
return []
|
||||
|
||||
# Parse search results
|
||||
for show in response:
|
||||
results.append(self._parseShow(show))
|
||||
|
||||
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
|
||||
|
||||
self.setCache(cache_key, results)
|
||||
return results
|
||||
|
||||
def _request(self, action, **kwargs):
|
||||
url = self.base_url % action
|
||||
|
||||
if kwargs:
|
||||
url += '?' + urllib.urlencode(kwargs)
|
||||
|
||||
return self.getJsonData(url)
|
||||
|
||||
def _parseShow(self, show):
|
||||
# Images
|
||||
images = show.get('images', {})
|
||||
|
||||
poster = images.get('poster')
|
||||
backdrop = images.get('backdrop')
|
||||
|
||||
# Rating
|
||||
rating = show.get('ratings', {}).get('percentage')
|
||||
|
||||
# Build show dict
|
||||
show_data = {
|
||||
'identifiers': {
|
||||
'thetvdb': show.get('tvdb_id'),
|
||||
'imdb': show.get('imdb_id'),
|
||||
'tvrage': show.get('tvrage_id'),
|
||||
},
|
||||
'type': 'show',
|
||||
'titles': [show.get('title')],
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [],
|
||||
'backdrop_original': [],
|
||||
},
|
||||
'year': show.get('year'),
|
||||
'rating': {
|
||||
'trakt': float(rating) / 10
|
||||
},
|
||||
}
|
||||
|
||||
return dict((k, v) for k, v in show_data.iteritems() if v)
|
||||
@@ -1,216 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.media.show.providers.base import ShowProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Xem'
|
||||
|
||||
|
||||
class Xem(ShowProvider):
|
||||
'''
|
||||
Mapping Information
|
||||
===================
|
||||
|
||||
Single
|
||||
------
|
||||
You will need the id / identifier of the show e.g. tvdb-id for American Dad! is 73141
|
||||
the origin is the name of the site/entity the episode, season (and/or absolute) numbers are based on
|
||||
|
||||
http://thexem.de/map/single?id=&origin=&episode=&season=&absolute=
|
||||
|
||||
episode, season and absolute are all optional but it wont work if you don't provide either episode and season OR absolute in
|
||||
addition you can provide destination as the name of the wished destination, if not provided it will output all available
|
||||
|
||||
When a destination has two or more addresses another entry will be added as _ ... for now the second address gets the index "2"
|
||||
(the first index is omitted) and so on
|
||||
|
||||
http://thexem.de/map/single?id=7529&origin=anidb&season=1&episode=2&destination=trakt
|
||||
{
|
||||
"result":"success",
|
||||
"data":{
|
||||
"trakt": {"season":1,"episode":3,"absolute":3},
|
||||
"trakt_2":{"season":1,"episode":4,"absolute":4}
|
||||
},
|
||||
"message":"single mapping for 7529 on anidb."
|
||||
}
|
||||
|
||||
All
|
||||
---
|
||||
Basically same as "single" just a little easier
|
||||
The origin address is added into the output too!!
|
||||
|
||||
http://thexem.de/map/all?id=7529&origin=anidb
|
||||
|
||||
All Names
|
||||
---------
|
||||
Get all names xem has to offer
|
||||
non optional params: origin(an entity string like 'tvdb')
|
||||
optional params: season, language
|
||||
- season: a season number or a list like: 1,3,5 or a compare operator like ne,gt,ge,lt,le,eq and a season number. default would
|
||||
return all
|
||||
- language: a language string like 'us' or 'jp' default is all
|
||||
- defaultNames: 1(yes) or 0(no) should the default names be added to the list ? default is 0(no)
|
||||
|
||||
http://thexem.de/map/allNames?origin=tvdb&season=le1
|
||||
|
||||
{
|
||||
"result": "success",
|
||||
"data": {
|
||||
"248812": ["Dont Trust the Bitch in Apartment 23", "Don't Trust the Bitch in Apartment 23"],
|
||||
"257571": ["Nazo no Kanojo X"],
|
||||
"257875": ["Lupin III - Mine Fujiko to Iu Onna", "Lupin III Fujiko to Iu Onna", "Lupin the Third - Mine Fujiko to Iu Onna"]
|
||||
},
|
||||
"message": ""
|
||||
}
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.info', self.getShowInfo, priority = 5)
|
||||
addEvent('episode.info', self.getEpisodeInfo, priority = 5)
|
||||
|
||||
self.config = {}
|
||||
self.config['base_url'] = "http://thexem.de"
|
||||
self.config['url_single'] = u"%(base_url)s/map/single?" % self.config
|
||||
self.config['url_all'] = u"%(base_url)s/map/all?" % self.config
|
||||
self.config['url_names'] = u"%(base_url)s/map/names?" % self.config
|
||||
self.config['url_all_names'] = u"%(base_url)s/map/allNames?" % self.config
|
||||
|
||||
def getShowInfo(self, identifiers = None):
|
||||
if self.isDisabled():
|
||||
return {}
|
||||
|
||||
identifier = identifiers.get('thetvdb')
|
||||
|
||||
if not identifier:
|
||||
return {}
|
||||
|
||||
cache_key = 'xem.cache.%s' % identifier
|
||||
log.debug('Getting showInfo: %s', cache_key)
|
||||
result = self.getCache(cache_key) or {}
|
||||
if result:
|
||||
return result
|
||||
|
||||
result['seasons'] = {}
|
||||
|
||||
# Create season/episode and absolute mappings
|
||||
url = self.config['url_all'] + "id=%s&origin=tvdb" % tryUrlencode(identifier)
|
||||
response = self.getJsonData(url)
|
||||
|
||||
if response and response.get('result') == 'success':
|
||||
data = response.get('data', None)
|
||||
self.parseMaps(result, data)
|
||||
|
||||
# Create name alias mappings
|
||||
url = self.config['url_names'] + "id=%s&origin=tvdb" % tryUrlencode(identifier)
|
||||
response = self.getJsonData(url)
|
||||
|
||||
if response and response.get('result') == 'success':
|
||||
data = response.get('data', None)
|
||||
self.parseNames(result, data)
|
||||
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
|
||||
def getEpisodeInfo(self, identifiers = None, params = {}):
|
||||
episode_num = params.get('episode_number', None)
|
||||
if episode_num is None:
|
||||
return False
|
||||
|
||||
season_num = params.get('season_number', None)
|
||||
if season_num is None:
|
||||
return False
|
||||
|
||||
result = self.getShowInfo(identifiers)
|
||||
|
||||
if not result:
|
||||
return False
|
||||
|
||||
# Find season
|
||||
if season_num not in result['seasons']:
|
||||
return False
|
||||
|
||||
season = result['seasons'][season_num]
|
||||
|
||||
# Find episode
|
||||
if episode_num not in season['episodes']:
|
||||
return False
|
||||
|
||||
return season['episodes'][episode_num]
|
||||
|
||||
def parseMaps(self, result, data, master = 'tvdb'):
|
||||
'''parses xem map and returns a custom formatted dict map
|
||||
|
||||
To retreive map for scene:
|
||||
if 'scene' in map['map_episode'][1][1]:
|
||||
print map['map_episode'][1][1]['scene']['season']
|
||||
'''
|
||||
if not isinstance(data, list):
|
||||
return
|
||||
|
||||
for episode_map in data:
|
||||
origin = episode_map.pop(master, None)
|
||||
if origin is None:
|
||||
continue # No master origin to map to
|
||||
|
||||
o_season = origin['season']
|
||||
o_episode = origin['episode']
|
||||
|
||||
# Create season info
|
||||
if o_season not in result['seasons']:
|
||||
result['seasons'][o_season] = {}
|
||||
|
||||
season = result['seasons'][o_season]
|
||||
|
||||
if 'episodes' not in season:
|
||||
season['episodes'] = {}
|
||||
|
||||
# Create episode info
|
||||
if o_episode not in season['episodes']:
|
||||
season['episodes'][o_episode] = {}
|
||||
|
||||
episode = season['episodes'][o_episode]
|
||||
episode['episode_map'] = episode_map
|
||||
|
||||
def parseNames(self, result, data):
|
||||
result['title_map'] = data.pop('all', None)
|
||||
|
||||
for season, title_map in data.items():
|
||||
season = int(season)
|
||||
|
||||
# Create season info
|
||||
if season not in result['seasons']:
|
||||
result['seasons'][season] = {}
|
||||
|
||||
season = result['seasons'][season]
|
||||
season['title_map'] = title_map
|
||||
|
||||
def isDisabled(self):
|
||||
if __name__ == '__main__':
|
||||
return False
|
||||
if self.conf('enabled'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'xem',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'providers',
|
||||
'name': 'xem',
|
||||
'label': 'TheXem',
|
||||
'hidden': True,
|
||||
'description': 'Used for all calls to TheXem.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': True,
|
||||
'label': 'Enabled',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,51 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media._base.providers.nzb.binsearch import Base
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.environment import Env
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'BinSearch'
|
||||
|
||||
|
||||
class BinSearch(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'm': 'n',
|
||||
'max': 400,
|
||||
'adv_age': Env.setting('retention', 'nzb'),
|
||||
'adv_sort': 'date',
|
||||
'adv_col': 'on',
|
||||
'adv_nfo': 'on',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'm': 'n',
|
||||
'max': 400,
|
||||
'adv_age': Env.setting('retention', 'nzb'),
|
||||
'adv_sort': 'date',
|
||||
'adv_col': 'on',
|
||||
'adv_nfo': 'on',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
})
|
||||
return query
|
||||
@@ -1,49 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media._base.providers.nzb.newznab import Base
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Newznab'
|
||||
|
||||
|
||||
class Newznab(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
query = tryUrlencode({
|
||||
't': 'tvsearch',
|
||||
'apikey': host['api_key'],
|
||||
'q': related['show']['title'],
|
||||
'season': identifier['season'],
|
||||
'extended': 1
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
query = tryUrlencode({
|
||||
't': 'tvsearch',
|
||||
'apikey': host['api_key'],
|
||||
'q': related['show']['title'],
|
||||
'season': identifier['season'],
|
||||
'ep': identifier['episode'],
|
||||
'extended': 1
|
||||
})
|
||||
|
||||
return query
|
||||
@@ -1,52 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.nzb.nzbclub import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NZBClub'
|
||||
|
||||
|
||||
class NZBClub(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
|
||||
q = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
})
|
||||
|
||||
query = tryUrlencode({
|
||||
'ig': 1,
|
||||
'rpp': 200,
|
||||
'st': 5,
|
||||
'sp': 1,
|
||||
'ns': 1,
|
||||
})
|
||||
return '%s&%s' % (q, query)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
|
||||
q = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
})
|
||||
|
||||
query = tryUrlencode({
|
||||
'ig': 1,
|
||||
'rpp': 200,
|
||||
'st': 5,
|
||||
'sp': 1,
|
||||
'ns': 1,
|
||||
})
|
||||
return '%s&%s' % (q, query)
|
||||
@@ -1,51 +0,0 @@
|
||||
from couchpotato import Env
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.nzb.nzbindex import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NzbIndex'
|
||||
|
||||
|
||||
class NzbIndex(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'age': Env.setting('retention', 'nzb'),
|
||||
'sort': 'agedesc',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
'rating': 1,
|
||||
'max': 250,
|
||||
'more': 1,
|
||||
'complete': 1,
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'age': Env.setting('retention', 'nzb'),
|
||||
'sort': 'agedesc',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
'rating': 1,
|
||||
'max': 250,
|
||||
'more': 1,
|
||||
'complete': 1,
|
||||
})
|
||||
return query
|
||||
@@ -1,36 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.bithdtv import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'BiTHDTV'
|
||||
|
||||
|
||||
class BiTHDTV(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': 12 # Season cat
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': 10 # Episode cat
|
||||
})
|
||||
return query
|
||||
@@ -1,41 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.bitsoup import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Bitsoup'
|
||||
|
||||
|
||||
class Bitsoup(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
# For season bundles, bitsoup currently only has one category
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': 45 # TV-Packs Category
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
cat_ids = [
|
||||
([42], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']),
|
||||
([49], ['hdtv_sd', 'webdl_480p'])
|
||||
]
|
||||
cat_backup_id = 0
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': self.getCatId(quality['identifier'])[0],
|
||||
})
|
||||
return query
|
||||
@@ -1,37 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.iptorrents import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'IPTorrents'
|
||||
|
||||
|
||||
class IPTorrents(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
# TODO come back to this later, a better quality system needs to be created
|
||||
cat_ids = [
|
||||
([65], [
|
||||
'bluray_1080p', 'bluray_720p',
|
||||
'bdrip_1080p', 'bdrip_720p',
|
||||
'brrip_1080p', 'brrip_720p',
|
||||
'webdl_1080p', 'webdl_720p', 'webdl_480p',
|
||||
'hdtv_720p', 'hdtv_sd'
|
||||
]),
|
||||
]
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
# TODO come back to this later, a better quality system needs to be created
|
||||
cat_ids = [
|
||||
([5], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([4, 78, 79], ['hdtv_sd'])
|
||||
]
|
||||
@@ -1,27 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.publichd import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'PublicHD'
|
||||
|
||||
|
||||
class PublicHD(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
@@ -1,60 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.sceneaccess import Base
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'SceneAccess'
|
||||
|
||||
|
||||
class SceneAccess(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([26], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
url = self.urls['archive'] % (
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
|
||||
arguments = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'method': 3,
|
||||
})
|
||||
query = "%s&%s" % (url, arguments)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([27], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([17, 11], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
url = self.urls['search'] % (
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
|
||||
arguments = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'method': 3,
|
||||
})
|
||||
query = "%s&%s" % (url, arguments)
|
||||
|
||||
return query
|
||||
@@ -1,46 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.thepiratebay import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ThePirateBay'
|
||||
|
||||
|
||||
class ThePirateBay(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([208], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([205], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, page, cats):
|
||||
return (
|
||||
tryUrlencode('"%s"' % fireEvent('media.search_query', media, single = True)),
|
||||
page,
|
||||
','.join(str(x) for x in cats)
|
||||
)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([208], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([205], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, page, cats):
|
||||
return (
|
||||
tryUrlencode('"%s"' % fireEvent('media.search_query', media, single = True)),
|
||||
page,
|
||||
','.join(str(x) for x in cats)
|
||||
)
|
||||
@@ -1,34 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentday import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentDay'
|
||||
|
||||
|
||||
class TorrentDay(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([14], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
]
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
cat_ids = [
|
||||
([7], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([2], [24], [26], ['hdtv_sd'])
|
||||
]
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
from couchpotato import fireEvent
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentleech import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentLeech'
|
||||
|
||||
|
||||
class TorrentLeech(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([27], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
return (
|
||||
tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([32], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([26], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
return (
|
||||
tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
@@ -1,38 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentpotato import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentPotato'
|
||||
|
||||
|
||||
class TorrentPotato(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
arguments = tryUrlencode({
|
||||
'user': host['name'],
|
||||
'passkey': host['pass_key'],
|
||||
'search': fireEvent('media.search_query', media, single = True)
|
||||
})
|
||||
return '%s?%s' % (host['host'], arguments)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
arguments = tryUrlencode({
|
||||
'user': host['name'],
|
||||
'passkey': host['pass_key'],
|
||||
'search': fireEvent('media.search_query', media, single = True)
|
||||
})
|
||||
return '%s?%s' % (host['host'], arguments)
|
||||
@@ -1,52 +0,0 @@
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentshack import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentShack'
|
||||
|
||||
|
||||
class TorrentShack(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
# TorrentShack tv season search categories
|
||||
# TV-SD Pack - 980
|
||||
# TV-HD Pack - 981
|
||||
# Full Blu-ray - 970
|
||||
cat_ids = [
|
||||
([980], ['hdtv_sd']),
|
||||
([981], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']),
|
||||
([970], ['bluray_1080p', 'bluray_720p']),
|
||||
]
|
||||
cat_backup_id = 980
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = (tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getSceneOnly())
|
||||
return query
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
# TorrentShack tv episode search categories
|
||||
# TV/x264-HD - 600
|
||||
# TV/x264-SD - 620
|
||||
# TV/DVDrip - 700
|
||||
cat_ids = [
|
||||
([600], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']),
|
||||
([620], ['hdtv_sd'])
|
||||
]
|
||||
cat_backup_id = 620
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = (tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getSceneOnly())
|
||||
return query
|
||||
@@ -1,152 +0,0 @@
|
||||
from couchpotato import fireEvent, get_db, Env
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEventAsync
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
from couchpotato.core.media._base.searcher.main import SearchSetupError
|
||||
from couchpotato.core.media.show import ShowTypeBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'EpisodeSearcher'
|
||||
|
||||
|
||||
class EpisodeSearcher(SearcherBase, ShowTypeBase):
|
||||
type = 'episode'
|
||||
|
||||
in_progress = False
|
||||
|
||||
def __init__(self):
|
||||
super(EpisodeSearcher, self).__init__()
|
||||
|
||||
addEvent('%s.searcher.all' % self.getType(), self.searchAll)
|
||||
addEvent('%s.searcher.single' % self.getType(), self.single)
|
||||
addEvent('searcher.correct_release', self.correctRelease)
|
||||
|
||||
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = {
|
||||
'desc': 'Starts a full search for all wanted shows',
|
||||
})
|
||||
|
||||
addApiView('%s.searcher.single' % self.getType(), self.singleView)
|
||||
|
||||
def searchAllView(self, **kwargs):
|
||||
fireEventAsync('%s.searcher.all' % self.getType(), manual = True)
|
||||
|
||||
return {
|
||||
'success': not self.in_progress
|
||||
}
|
||||
|
||||
def searchAll(self, manual = False):
|
||||
pass
|
||||
|
||||
def singleView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('%s.searcher.single' % self.getType(), media, single = True)
|
||||
}
|
||||
|
||||
def single(self, media, profile = None, quality_order = None, search_protocols = None, manual = False):
|
||||
db = get_db()
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# TODO search_protocols, profile, quality_order can be moved to a base method
|
||||
# Find out search type
|
||||
try:
|
||||
if not search_protocols:
|
||||
search_protocols = fireEvent('searcher.protocols', single = True)
|
||||
except SearchSetupError:
|
||||
return
|
||||
|
||||
if not profile and related['show']['profile_id']:
|
||||
profile = db.get('id', related['show']['profile_id'])
|
||||
|
||||
if not quality_order:
|
||||
quality_order = fireEvent('quality.order', single = True)
|
||||
|
||||
# TODO: check episode status
|
||||
# TODO: check air date
|
||||
#if not self.conf('always_search') and not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates, movie['library']['year']):
|
||||
# too_early_to_search.append(quality_type['quality']['identifier'])
|
||||
# return
|
||||
|
||||
ret = False
|
||||
has_better_quality = None
|
||||
found_releases = []
|
||||
too_early_to_search = []
|
||||
|
||||
releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||
query = fireEvent('library.query', media, condense = False, single = True)
|
||||
|
||||
index = 0
|
||||
for q_identifier in profile.get('qualities'):
|
||||
quality_custom = {
|
||||
'quality': q_identifier,
|
||||
'finish': profile['finish'][index],
|
||||
'wait_for': profile['wait_for'][index],
|
||||
'3d': profile['3d'][index] if profile.get('3d') else False
|
||||
}
|
||||
|
||||
has_better_quality = 0
|
||||
|
||||
# See if better quality is available
|
||||
for release in releases:
|
||||
if quality_order.index(release['quality']) <= quality_order.index(q_identifier) and release['status'] not in ['available', 'ignored', 'failed']:
|
||||
has_better_quality += 1
|
||||
|
||||
# Don't search for quality lower then already available.
|
||||
if has_better_quality is 0:
|
||||
|
||||
log.info('Searching for %s in %s', (query, q_identifier))
|
||||
quality = fireEvent('quality.single', identifier = q_identifier, single = True)
|
||||
quality['custom'] = quality_custom
|
||||
|
||||
results = fireEvent('searcher.search', search_protocols, media, quality, single = True)
|
||||
if len(results) == 0:
|
||||
log.debug('Nothing found for %s in %s', (query, q_identifier))
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, media, quality, single = True)
|
||||
|
||||
# Try find a valid result and download it
|
||||
if fireEvent('release.try_download_result', results, media, quality, single = True):
|
||||
ret = True
|
||||
|
||||
# Remove releases that aren't found anymore
|
||||
for release in releases:
|
||||
if release.get('status') == 'available' and release.get('identifier') not in found_releases:
|
||||
fireEvent('release.delete', release.get('_id'), single = True)
|
||||
else:
|
||||
log.info('Better quality (%s) already available or snatched for %s', (q_identifier, query))
|
||||
fireEvent('media.restatus', media['_id'])
|
||||
break
|
||||
|
||||
# Break if CP wants to shut down
|
||||
if self.shuttingDown() or ret:
|
||||
break
|
||||
|
||||
if len(too_early_to_search) > 0:
|
||||
log.info2('Too early to search for %s, %s', (too_early_to_search, query))
|
||||
|
||||
def correctRelease(self, release = None, media = None, quality = None, **kwargs):
|
||||
if media.get('type') != 'show.episode':
|
||||
return
|
||||
|
||||
retention = Env.setting('retention', section = 'nzb')
|
||||
|
||||
if release.get('seeders') is None and 0 < retention < release.get('age', 0):
|
||||
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name']))
|
||||
return False
|
||||
|
||||
# Check for required and ignored words
|
||||
if not fireEvent('searcher.correct_words', release['name'], media, single = True):
|
||||
return False
|
||||
|
||||
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations)
|
||||
match = fireEvent('matcher.match', release, media, quality, single = True)
|
||||
if match:
|
||||
return match.weight
|
||||
|
||||
return False
|
||||
@@ -1,172 +0,0 @@
|
||||
from couchpotato import get_db, Env
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
from couchpotato.core.media.movie.searcher import SearchSetupError
|
||||
from couchpotato.core.media.show import ShowTypeBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'SeasonSearcher'
|
||||
|
||||
|
||||
class SeasonSearcher(SearcherBase, ShowTypeBase):
|
||||
type = 'season'
|
||||
|
||||
in_progress = False
|
||||
|
||||
def __init__(self):
|
||||
super(SeasonSearcher, self).__init__()
|
||||
|
||||
addEvent('%s.searcher.all' % self.getType(), self.searchAll)
|
||||
addEvent('%s.searcher.single' % self.getType(), self.single)
|
||||
addEvent('searcher.correct_release', self.correctRelease)
|
||||
|
||||
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = {
|
||||
'desc': 'Starts a full search for all wanted seasons',
|
||||
})
|
||||
|
||||
def searchAllView(self, **kwargs):
|
||||
fireEventAsync('%s.searcher.all' % self.getType(), manual = True)
|
||||
|
||||
return {
|
||||
'success': not self.in_progress
|
||||
}
|
||||
|
||||
def searchAll(self, manual = False):
|
||||
pass
|
||||
|
||||
def single(self, media, profile = None, quality_order = None, search_protocols = None, manual = False):
|
||||
db = get_db()
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# TODO search_protocols, profile, quality_order can be moved to a base method
|
||||
# Find out search type
|
||||
try:
|
||||
if not search_protocols:
|
||||
search_protocols = fireEvent('searcher.protocols', single = True)
|
||||
except SearchSetupError:
|
||||
return
|
||||
|
||||
if not profile and related['show']['profile_id']:
|
||||
profile = db.get('id', related['show']['profile_id'])
|
||||
|
||||
if not quality_order:
|
||||
quality_order = fireEvent('quality.order', single = True)
|
||||
|
||||
# Find 'active' episodes
|
||||
episodes = related['episodes']
|
||||
episodes_active = []
|
||||
|
||||
for episode in episodes:
|
||||
if episode.get('status') != 'active':
|
||||
continue
|
||||
|
||||
episodes_active.append(episode)
|
||||
|
||||
if len(episodes_active) == len(episodes):
|
||||
# All episodes are 'active', try and search for full season
|
||||
if self.search(media, profile, quality_order, search_protocols):
|
||||
# Success, end season search
|
||||
return True
|
||||
else:
|
||||
log.info('Unable to find season pack, searching for individual episodes...')
|
||||
|
||||
# Search for each episode individually
|
||||
for episode in episodes_active:
|
||||
fireEvent('show.episode.searcher.single', episode, profile, quality_order, search_protocols, manual)
|
||||
|
||||
# TODO (testing) only grab one episode
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
def search(self, media, profile, quality_order, search_protocols):
|
||||
# TODO: check episode status
|
||||
# TODO: check air date
|
||||
#if not self.conf('always_search') and not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates, movie['library']['year']):
|
||||
# too_early_to_search.append(quality_type['quality']['identifier'])
|
||||
# return
|
||||
|
||||
ret = False
|
||||
has_better_quality = None
|
||||
found_releases = []
|
||||
too_early_to_search = []
|
||||
|
||||
releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||
query = fireEvent('library.query', media, condense = False, single = True)
|
||||
|
||||
index = 0
|
||||
for q_identifier in profile.get('qualities'):
|
||||
quality_custom = {
|
||||
'quality': q_identifier,
|
||||
'finish': profile['finish'][index],
|
||||
'wait_for': profile['wait_for'][index],
|
||||
'3d': profile['3d'][index] if profile.get('3d') else False
|
||||
}
|
||||
|
||||
has_better_quality = 0
|
||||
|
||||
# See if better quality is available
|
||||
for release in releases:
|
||||
if quality_order.index(release['quality']) <= quality_order.index(q_identifier) and release['status'] not in ['available', 'ignored', 'failed']:
|
||||
has_better_quality += 1
|
||||
|
||||
# Don't search for quality lower then already available.
|
||||
if has_better_quality is 0:
|
||||
|
||||
log.info('Searching for %s in %s', (query, q_identifier))
|
||||
quality = fireEvent('quality.single', identifier = q_identifier, single = True)
|
||||
quality['custom'] = quality_custom
|
||||
|
||||
results = fireEvent('searcher.search', search_protocols, media, quality, single = True)
|
||||
if len(results) == 0:
|
||||
log.debug('Nothing found for %s in %s', (query, q_identifier))
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, media, quality, single = True)
|
||||
|
||||
# Try find a valid result and download it
|
||||
if fireEvent('release.try_download_result', results, media, quality, single = True):
|
||||
ret = True
|
||||
|
||||
# Remove releases that aren't found anymore
|
||||
for release in releases:
|
||||
if release.get('status') == 'available' and release.get('identifier') not in found_releases:
|
||||
fireEvent('release.delete', release.get('_id'), single = True)
|
||||
else:
|
||||
log.info('Better quality (%s) already available or snatched for %s', (q_identifier, query))
|
||||
fireEvent('media.restatus', media['_id'])
|
||||
break
|
||||
|
||||
# Break if CP wants to shut down
|
||||
if self.shuttingDown() or ret:
|
||||
break
|
||||
|
||||
if len(too_early_to_search) > 0:
|
||||
log.info2('Too early to search for %s, %s', (too_early_to_search, query))
|
||||
|
||||
return len(found_releases) > 0
|
||||
|
||||
def correctRelease(self, release = None, media = None, quality = None, **kwargs):
|
||||
if media.get('type') != 'show.season':
|
||||
return
|
||||
|
||||
retention = Env.setting('retention', section = 'nzb')
|
||||
|
||||
if release.get('seeders') is None and 0 < retention < release.get('age', 0):
|
||||
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name']))
|
||||
return False
|
||||
|
||||
# Check for required and ignored words
|
||||
if not fireEvent('searcher.correct_words', release['name'], media, single = True):
|
||||
return False
|
||||
|
||||
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations)
|
||||
match = fireEvent('matcher.match', release, media, quality, single = True)
|
||||
if match:
|
||||
return match.weight
|
||||
|
||||
return False
|
||||
@@ -1,88 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.variable import getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
from couchpotato.core.media._base.searcher.main import SearchSetupError
|
||||
from couchpotato.core.media.show import ShowTypeBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ShowSearcher'
|
||||
|
||||
|
||||
class ShowSearcher(SearcherBase, ShowTypeBase):
|
||||
type = 'show'
|
||||
|
||||
in_progress = False
|
||||
|
||||
def __init__(self):
|
||||
super(ShowSearcher, self).__init__()
|
||||
|
||||
addEvent('%s.searcher.all' % self.getType(), self.searchAll)
|
||||
addEvent('%s.searcher.single' % self.getType(), self.single)
|
||||
addEvent('searcher.get_search_title', self.getSearchTitle)
|
||||
|
||||
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = {
|
||||
'desc': 'Starts a full search for all wanted episodes',
|
||||
})
|
||||
|
||||
def searchAllView(self, **kwargs):
|
||||
fireEventAsync('%s.searcher.all' % self.getType(), manual = True)
|
||||
|
||||
return {
|
||||
'success': not self.in_progress
|
||||
}
|
||||
|
||||
def searchAll(self, manual = False):
|
||||
pass
|
||||
|
||||
def single(self, media, search_protocols = None, manual = False):
|
||||
# Find out search type
|
||||
try:
|
||||
if not search_protocols:
|
||||
search_protocols = fireEvent('searcher.protocols', single = True)
|
||||
except SearchSetupError:
|
||||
return
|
||||
|
||||
if not media['profile_id'] or media['status'] == 'done':
|
||||
log.debug('Show doesn\'t have a profile or already done, assuming in manage tab.')
|
||||
return
|
||||
|
||||
show_title = fireEvent('media.search_query', media, condense = False, single = True)
|
||||
|
||||
fireEvent('notify.frontend', type = 'show.searcher.started.%s' % media['_id'], data = True, message = 'Searching for "%s"' % show_title)
|
||||
|
||||
show_tree = fireEvent('library.tree', media, single = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
profile = db.get('id', media['profile_id'])
|
||||
quality_order = fireEvent('quality.order', single = True)
|
||||
|
||||
for season in show_tree.get('seasons', []):
|
||||
if not season.get('info'):
|
||||
continue
|
||||
|
||||
# Skip specials (and seasons missing 'number') for now
|
||||
# TODO: set status for specials to skipped by default
|
||||
if not season['info'].get('number'):
|
||||
continue
|
||||
|
||||
# Check if full season can be downloaded
|
||||
fireEvent('show.season.searcher.single', season, profile, quality_order, search_protocols, manual)
|
||||
|
||||
# TODO (testing) only snatch one season
|
||||
return
|
||||
|
||||
fireEvent('notify.frontend', type = 'show.searcher.ended.%s' % media['_id'], data = True)
|
||||
|
||||
def getSearchTitle(self, media):
|
||||
if media.get('type') != 'show':
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
show = related['show']
|
||||
else:
|
||||
show = media
|
||||
|
||||
return getTitle(show)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user