Compare commits
206 Commits
tv
...
build/2.6.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
86edf5eb04 | ||
|
|
92f9743d3c | ||
|
|
1b151fbd97 | ||
|
|
0567504394 | ||
|
|
c8a3b64624 | ||
|
|
c657d6d70b | ||
|
|
d307d343e5 | ||
|
|
f2ab59e384 | ||
|
|
55f201040b | ||
|
|
476a5cc3dd | ||
|
|
342a4ad885 | ||
|
|
12159a1b7b | ||
|
|
b773f7b71c | ||
|
|
41aba6b19c | ||
|
|
96def8563b | ||
|
|
bf46a937c0 | ||
|
|
2edb6caa97 | ||
|
|
9e125a361a | ||
|
|
2252ed710c | ||
|
|
07a790e9b2 | ||
|
|
bb6fefd010 | ||
|
|
55e489cc51 | ||
|
|
7fe5a271dc | ||
|
|
ea92c503bb | ||
|
|
6942126b7f | ||
|
|
a6d37bf9c2 | ||
|
|
37c6bc7612 | ||
|
|
d6a264aaed | ||
|
|
108f3292c3 | ||
|
|
fc60727e82 | ||
|
|
49cd8fbc2c | ||
|
|
1991792291 | ||
|
|
29290022e6 | ||
|
|
04aa2e5fa4 | ||
|
|
6772b9d965 | ||
|
|
5df14d67e1 | ||
|
|
73abd1f022 | ||
|
|
e75a8529c9 | ||
|
|
07a7f8cbcf | ||
|
|
9b35a0fb20 | ||
|
|
0622e6e5ab | ||
|
|
f16931906f | ||
|
|
68dcba8853 | ||
|
|
ae8f66df1a | ||
|
|
5237ead5cb | ||
|
|
45b2dff6d2 | ||
|
|
30d56b5d2c | ||
|
|
5ff6824ae9 | ||
|
|
0210859155 | ||
|
|
665478db13 | ||
|
|
84c366ab54 | ||
|
|
908e5eae77 | ||
|
|
c4aaa10308 | ||
|
|
d10536a829 | ||
|
|
1e7fa82e11 | ||
|
|
1d448f3d9c | ||
|
|
338b5f427a | ||
|
|
59e3e73c4c | ||
|
|
cb2614127c | ||
|
|
fdbd826917 | ||
|
|
31daf4915e | ||
|
|
4ca7691afd | ||
|
|
64d3ecd9b8 | ||
|
|
d55df3240f | ||
|
|
52214e4938 | ||
|
|
b45307e493 | ||
|
|
4320369448 | ||
|
|
f560dc093c | ||
|
|
d26a2b1480 | ||
|
|
e11b07b559 | ||
|
|
b6ee8ef4d4 | ||
|
|
f80559d380 | ||
|
|
8530b00e7b | ||
|
|
5851e1e69f | ||
|
|
686bfd62eb | ||
|
|
9b82603c26 | ||
|
|
f41792915f | ||
|
|
2fa77fb610 | ||
|
|
e64d0e33fc | ||
|
|
b168643600 | ||
|
|
240283405e | ||
|
|
b69f8b7ed5 | ||
|
|
fbccba77a7 | ||
|
|
d3efda74b2 | ||
|
|
66b849cb29 | ||
|
|
b19f98ef5b | ||
|
|
c389790cf2 | ||
|
|
d7445dfa80 | ||
|
|
36782768a4 | ||
|
|
2c9d487614 | ||
|
|
b9a724c8bb | ||
|
|
68d826ca1c | ||
|
|
d6921882e1 | ||
|
|
2cfff73486 | ||
|
|
0c7dda8d44 | ||
|
|
dbaa377770 | ||
|
|
47d2b81d1c | ||
|
|
f79fcda27f | ||
|
|
cdbcad2238 | ||
|
|
5d913e87c3 | ||
|
|
16f02bda27 | ||
|
|
8d108b92bf | ||
|
|
46783028b1 | ||
|
|
d08c7c57a8 | ||
|
|
eeeb845ef3 | ||
|
|
651a063f94 | ||
|
|
f20aaa2d9d | ||
|
|
ba925ec191 | ||
|
|
3b7376fd18 | ||
|
|
c31b10c798 | ||
|
|
acda664686 | ||
|
|
e2852407ea | ||
|
|
88e738c6cd | ||
|
|
eaae8bdb0b | ||
|
|
821f68909d | ||
|
|
2b8dfed475 | ||
|
|
607b5ea766 | ||
|
|
88579cd71a | ||
|
|
6c57316ce6 | ||
|
|
6702683da3 | ||
|
|
1ed58586a1 | ||
|
|
f08ccd4fd8 | ||
|
|
312562a9f5 | ||
|
|
9e260a89af | ||
|
|
d233e4d22e | ||
|
|
23893dbcb9 | ||
|
|
506871b506 | ||
|
|
6115917660 | ||
|
|
21df8819d3 | ||
|
|
fb3f3e11f6 | ||
|
|
178c8942c3 | ||
|
|
51e747049d | ||
|
|
0582f7d694 | ||
|
|
fa7cac7538 | ||
|
|
9a314cfbc4 | ||
|
|
5941d0bf77 | ||
|
|
d326c1c25c | ||
|
|
96472a9a8f | ||
|
|
27252561e2 | ||
|
|
c9e732651f | ||
|
|
7849e7170d | ||
|
|
087894eb4e | ||
|
|
25f1b8c7a7 | ||
|
|
e71da1f14d | ||
|
|
938b14ba18 | ||
|
|
d6522d8f38 | ||
|
|
78eab890e7 | ||
|
|
1a56191f83 | ||
|
|
41c0f34d95 | ||
|
|
37bf205d7a | ||
|
|
aa1fa3eb9a | ||
|
|
0e2f8a612c | ||
|
|
465e7b2abc | ||
|
|
578fb45785 | ||
|
|
96995bbbe5 | ||
|
|
4cfdafebbc | ||
|
|
b97acb8ef5 | ||
|
|
d68d2dfdb6 | ||
|
|
39b269a454 | ||
|
|
ac081d3e10 | ||
|
|
5d4efb60cf | ||
|
|
cc408b980c | ||
|
|
59590b3ac9 | ||
|
|
ff759dacf3 | ||
|
|
a328e44130 | ||
|
|
7924cac5f9 | ||
|
|
1cef3b0c93 | ||
|
|
3cd59edc8b | ||
|
|
0d624af01d | ||
|
|
a09132570c | ||
|
|
ee3fc38432 | ||
|
|
dbf0192c8e | ||
|
|
6962cfc3f5 | ||
|
|
e096ec3b5b | ||
|
|
b30a74ae0c | ||
|
|
978eeb16c9 | ||
|
|
e5c9d91657 | ||
|
|
fa81c3a07a | ||
|
|
9cdd520d41 | ||
|
|
55d7898771 | ||
|
|
b8256bef97 | ||
|
|
5be9dc0b4a | ||
|
|
7d0be0cefb | ||
|
|
f7ce1edb13 | ||
|
|
5ad9280b60 | ||
|
|
2b353f1b20 | ||
|
|
75ab90b87b | ||
|
|
0219296120 | ||
|
|
20032b3a31 | ||
|
|
ea9e9a8c90 | ||
|
|
f7b0ee145b | ||
|
|
cc866738ee | ||
|
|
eadccf6e33 | ||
|
|
b70b66e567 | ||
|
|
5b6792dc20 | ||
|
|
f498e7343a | ||
|
|
6962f441e6 | ||
|
|
1def62b1b1 | ||
|
|
a4a4a6a185 | ||
|
|
d4c9469c1a | ||
|
|
3e2d4c5d7b | ||
|
|
d03f711d69 | ||
|
|
44dd8d9b96 | ||
|
|
549a3be0d8 | ||
|
|
1bb2edf8ec | ||
|
|
84c6f36315 |
241
Desktop.py
Normal file
241
Desktop.py
Normal file
@@ -0,0 +1,241 @@
|
||||
from esky.util import appdir_from_executable #@UnresolvedImport
|
||||
from threading import Thread
|
||||
from version import VERSION
|
||||
from wx.lib.softwareupdate import SoftwareUpdate
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import webbrowser
|
||||
import wx
|
||||
|
||||
# Include proper dirs
|
||||
if hasattr(sys, 'frozen'):
|
||||
import libs
|
||||
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
|
||||
else:
|
||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
def icon():
|
||||
icon = 'icon_windows.png'
|
||||
if os.path.isfile('icon_mac.png'):
|
||||
icon = 'icon_mac.png'
|
||||
|
||||
return wx.Icon(icon, wx.BITMAP_TYPE_PNG)
|
||||
|
||||
lib_dir = os.path.join(base_path, 'libs')
|
||||
|
||||
sys.path.insert(0, base_path)
|
||||
sys.path.insert(0, lib_dir)
|
||||
|
||||
from couchpotato.environment import Env
|
||||
|
||||
class TaskBarIcon(wx.TaskBarIcon):
|
||||
|
||||
TBMENU_OPEN = wx.NewId()
|
||||
TBMENU_SETTINGS = wx.NewId()
|
||||
TBMENU_EXIT = wx.ID_EXIT
|
||||
|
||||
closed = False
|
||||
menu = False
|
||||
enabled = False
|
||||
|
||||
def __init__(self, frame):
|
||||
wx.TaskBarIcon.__init__(self)
|
||||
self.frame = frame
|
||||
|
||||
self.SetIcon(icon())
|
||||
|
||||
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
|
||||
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
|
||||
|
||||
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
|
||||
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
|
||||
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
|
||||
|
||||
def OnTaskBarClick(self, evt):
|
||||
menu = self.CreatePopupMenu()
|
||||
self.PopupMenu(menu)
|
||||
menu.Destroy()
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
if self.menu:
|
||||
self.open_menu.Enable(True)
|
||||
self.setting_menu.Enable(True)
|
||||
|
||||
self.open_menu.SetText('Open')
|
||||
|
||||
def CreatePopupMenu(self):
|
||||
|
||||
if not self.menu:
|
||||
self.menu = wx.Menu()
|
||||
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
|
||||
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
|
||||
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
|
||||
|
||||
if not self.enabled:
|
||||
self.open_menu.Enable(False)
|
||||
self.setting_menu.Enable(False)
|
||||
|
||||
self.open_menu.SetText('Loading...')
|
||||
|
||||
return self.menu
|
||||
|
||||
def onOpen(self, event):
|
||||
url = self.frame.parent.getSetting('base_url')
|
||||
webbrowser.open(url)
|
||||
|
||||
def onSettings(self, event):
|
||||
url = self.frame.parent.getSetting('base_url') + 'settings/about/'
|
||||
webbrowser.open(url)
|
||||
|
||||
def onTaskBarClose(self, evt):
|
||||
if self.closed:
|
||||
return
|
||||
|
||||
self.closed = True
|
||||
|
||||
self.RemoveIcon()
|
||||
wx.CallAfter(self.frame.Close)
|
||||
|
||||
|
||||
def makeIcon(self, img):
|
||||
if "wxMSW" in wx.PlatformInfo:
|
||||
img = img.Scale(16, 16)
|
||||
elif "wxGTK" in wx.PlatformInfo:
|
||||
img = img.Scale(22, 22)
|
||||
|
||||
icon = wx.IconFromBitmap(img.CopyFromBitmap())
|
||||
return icon
|
||||
|
||||
|
||||
class MainFrame(wx.Frame):
|
||||
|
||||
def __init__(self, parent):
|
||||
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
|
||||
|
||||
self.parent = parent
|
||||
self.tbicon = TaskBarIcon(self)
|
||||
|
||||
|
||||
class WorkerThread(Thread):
|
||||
|
||||
def __init__(self, desktop):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self._desktop = desktop
|
||||
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
args = ['--quiet']
|
||||
self.options = getOptions(args)
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
settings.setFile(self.options.config_file)
|
||||
|
||||
# Create data dir if needed
|
||||
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
|
||||
if self.data_dir == '':
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
self.data_dir = getDataDir()
|
||||
|
||||
if not os.path.isdir(self.data_dir):
|
||||
os.makedirs(self.data_dir)
|
||||
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
|
||||
try:
|
||||
from couchpotato.runner import runCouchPotato
|
||||
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._desktop.frame.Close()
|
||||
self._desktop.ExitMainLoop()
|
||||
|
||||
|
||||
class CouchPotatoApp(wx.App, SoftwareUpdate):
|
||||
|
||||
settings = {}
|
||||
events = {}
|
||||
restart = False
|
||||
closing = False
|
||||
triggered_onClose = False
|
||||
|
||||
def OnInit(self):
|
||||
|
||||
# Updater
|
||||
base_url = 'https://api.couchpota.to/updates/%s'
|
||||
self.InitUpdates(base_url % VERSION + '/', 'https://couchpota.to/updates/%s' % 'changelog.html',
|
||||
icon = icon())
|
||||
|
||||
self.frame = MainFrame(self)
|
||||
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
|
||||
|
||||
# CouchPotato thread
|
||||
self.worker = WorkerThread(self)
|
||||
|
||||
return True
|
||||
|
||||
def onAppLoad(self):
|
||||
self.frame.tbicon.enable()
|
||||
|
||||
def setSettings(self, settings = {}):
|
||||
self.settings = settings
|
||||
|
||||
def getSetting(self, name):
|
||||
return self.settings.get(name)
|
||||
|
||||
def addEvents(self, events = {}):
|
||||
for name in events.iterkeys():
|
||||
self.events[name] = events[name]
|
||||
|
||||
def onClose(self, event):
|
||||
|
||||
if not self.closing:
|
||||
self.closing = True
|
||||
self.frame.tbicon.onTaskBarClose(event)
|
||||
|
||||
onClose = self.events.get('onClose')
|
||||
if onClose and not self.triggered_onClose:
|
||||
self.triggered_onClose = True
|
||||
onClose(event)
|
||||
|
||||
def afterShutdown(self, restart = False):
|
||||
self.frame.Destroy()
|
||||
self.restart = restart
|
||||
self.ExitMainLoop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
app = CouchPotatoApp(redirect = False)
|
||||
app.MainLoop()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
if app.restart:
|
||||
|
||||
def appexe_from_executable(exepath):
|
||||
appdir = appdir_from_executable(exepath)
|
||||
exename = os.path.basename(exepath)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
|
||||
return os.path.join(appdir, "Contents", "MacOS", exename)
|
||||
|
||||
return os.path.join(appdir, exename)
|
||||
|
||||
exe = appexe_from_executable(sys.executable)
|
||||
os.chdir(os.path.dirname(exe))
|
||||
|
||||
os.execv(exe, [exe] + sys.argv[1:])
|
||||
@@ -621,8 +621,6 @@ class Database(object):
|
||||
|
||||
except OperationalError:
|
||||
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
|
||||
|
||||
rename_old = True
|
||||
except:
|
||||
log.error('Migration failed: %s', traceback.format_exc())
|
||||
|
||||
|
||||
@@ -27,11 +27,6 @@ class Deluge(DownloaderBase):
|
||||
def connect(self, reconnect = False):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
|
||||
# Force host assignment
|
||||
if len(host) == 1:
|
||||
host.append(80)
|
||||
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
@@ -62,7 +62,7 @@ class CPLog(object):
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) if not isinstance(x, (int, float)) else x for x in list(replace_tuple)])
|
||||
elif isinstance(replace_tuple, dict):
|
||||
msg = msg % dict((k, ss(v) if not isinstance(v, (int, float)) else v) for k, v in replace_tuple.iteritems())
|
||||
msg = msg % dict((k, ss(v)) for k, v in replace_tuple.iteritems())
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except Exception as e:
|
||||
|
||||
@@ -273,6 +273,10 @@ class MediaPlugin(MediaBase):
|
||||
for x in filter_by:
|
||||
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||
|
||||
total_count = len(media_ids)
|
||||
if total_count == 0:
|
||||
return 0, []
|
||||
|
||||
offset = 0
|
||||
limit = -1
|
||||
if limit_offset:
|
||||
@@ -302,30 +306,11 @@ class MediaPlugin(MediaBase):
|
||||
media_ids.remove(media_id)
|
||||
if len(media_ids) == 0 or len(medias) == limit: break
|
||||
|
||||
# Sort media by type and return result
|
||||
result = {}
|
||||
|
||||
# Create keys for media types we are listing
|
||||
if types:
|
||||
for media_type in types:
|
||||
result['%ss' % media_type] = []
|
||||
else:
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
result['%ss' % media_type] = []
|
||||
|
||||
total_count = len(medias)
|
||||
|
||||
if total_count == 0:
|
||||
return 0, result
|
||||
|
||||
for kind in medias:
|
||||
result['%ss' % kind['type']].append(kind)
|
||||
|
||||
return total_count, result
|
||||
return total_count, medias
|
||||
|
||||
def listView(self, **kwargs):
|
||||
|
||||
total_count, result = self.list(
|
||||
total_movies, movies = self.list(
|
||||
types = splitString(kwargs.get('type')),
|
||||
status = splitString(kwargs.get('status')),
|
||||
release_status = splitString(kwargs.get('release_status')),
|
||||
@@ -336,12 +321,12 @@ class MediaPlugin(MediaBase):
|
||||
search = kwargs.get('search')
|
||||
)
|
||||
|
||||
results = result
|
||||
results['success'] = True
|
||||
results['empty'] = len(result) == 0
|
||||
results['total'] = total_count
|
||||
|
||||
return results
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(movies) == 0,
|
||||
'total': total_movies,
|
||||
'movies': movies,
|
||||
}
|
||||
|
||||
def addSingleListView(self):
|
||||
|
||||
|
||||
126
couchpotato/core/media/_base/providers/torrent/torrentleech.py
Normal file
126
couchpotato/core/media/_base/providers/torrent/torrentleech.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.torrentleech.org/',
|
||||
'login': 'https://www.torrentleech.org/user/account/login/',
|
||||
'login_check': 'https://torrentleech.org/user/messages',
|
||||
'detail': 'https://www.torrentleech.org/torrent/%s',
|
||||
'search': 'https://www.torrentleech.org/torrents/browse/index/query/%s/categories/%d',
|
||||
'download': 'https://www.torrentleech.org%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, media, quality)
|
||||
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'id': 'torrenttable'})
|
||||
if not result_table:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
|
||||
link = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||
url = result.find('td', attrs = {'class': 'quickdownload'}).find('a')
|
||||
details = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||
|
||||
results.append({
|
||||
'id': link['href'].replace('/torrent/', ''),
|
||||
'name': six.text_type(link.string),
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['download'] % details['href'],
|
||||
'size': self.parseSize(result.find_all('td')[4].string),
|
||||
'seeders': tryInt(result.find('td', attrs = {'class': 'seeders'}).string),
|
||||
'leechers': tryInt(result.find('td', attrs = {'class': 'leechers'}).string),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'remember_me': 'on',
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return '/user/account/logout' in output.lower() or 'welcome back' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentleech',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentLeech',
|
||||
'description': '<a href="http://torrentleech.org">TorrentLeech</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACHUlEQVR4AZVSO48SYRSdGTCBEMKzILLAWiybkKAGMZRUUJEoDZX7B9zsbuQPYEEjNLTQkYgJDwsoSaxspEBsCITXjjNAIKi8AkzceXgmbHQ1NJ5iMufmO9/9zrmXlCSJ+B8o75J8Pp/NZj0eTzweBy0Wi4PBYD6f12o1r9ebTCZx+22HcrnMsuxms7m6urTZ7LPZDMVYLBZ8ZV3yo8aq9Pq0wzCMTqe77dDv9y8uLyAWBH6xWOyL0K/56fcb+rrPgPZ6PZfLRe1fsl6vCUmGKIqoqNXqdDr9Dbjps9znUV0uTqdTjuPkDoVCIfcuJ4gizjMMm8u9vW+1nr04czqdK56c37CbKY9j2+1WEARZ0Gq1RFHAz2q1qlQqXxoN69HRcDjUarW8ZD6QUigUOnY8uKYH8N1sNkul9yiGw+F6vS4Rxn8EsodEIqHRaOSnq9T7ajQazWQycEIR1AEBYDabSZJyHDucJyegwWBQr9ebTCaKvHd4cCQANUU9evwQ1Ofz4YvUKUI43GE8HouSiFiNRhOowWBIpVLyHITJkuW3PwgAEf3pgIwxF5r+OplMEsk3CPT5szCMnY7EwUdhwUh/CXiej0Qi3idPz89fdrpdbsfBzH7S3Q9K5pP4c0sAKpVKoVAQGO1ut+t0OoFAQHkH2Da/3/+but3uarWK0ZMQoNdyucRutdttmqZxMTzY7XaYxsrgtUjEZrNhkSwWyy/0NCatZumrNQAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -264,11 +264,3 @@
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
@media all and (max-width: 480px) {
|
||||
.toggle_menu h2 {
|
||||
font-size: 16px;
|
||||
text-align: center;
|
||||
height: 30px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ from couchpotato import tryInt
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
from requests import HTTPError
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -33,14 +32,12 @@ class FanartTV(MovieProvider):
|
||||
|
||||
try:
|
||||
url = self.urls['api'] % identifier
|
||||
fanart_data = self.getJsonData(url, show_error = False)
|
||||
fanart_data = self.getJsonData(url)
|
||||
|
||||
if fanart_data:
|
||||
log.debug('Found images for %s', fanart_data.get('name'))
|
||||
images = self._parseMovie(fanart_data)
|
||||
except HTTPError as e:
|
||||
log.debug('Failed getting extra art for %s: %s',
|
||||
(identifier, e))
|
||||
|
||||
except:
|
||||
log.error('Failed getting extra art for %s: %s',
|
||||
(identifier, traceback.format_exc()))
|
||||
|
||||
@@ -59,8 +59,7 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
for movie in raw:
|
||||
parsed_movie = self.parseMovie(movie, extended = False)
|
||||
if parsed_movie:
|
||||
results.append(parsed_movie)
|
||||
results.append(parsed_movie)
|
||||
|
||||
nr += 1
|
||||
if nr == limit:
|
||||
@@ -84,7 +83,7 @@ class TheMovieDb(MovieProvider):
|
||||
'id': identifier
|
||||
}, extended = extended)
|
||||
|
||||
return result or {}
|
||||
return result
|
||||
|
||||
def parseMovie(self, movie, extended = True):
|
||||
|
||||
@@ -92,8 +91,6 @@ class TheMovieDb(MovieProvider):
|
||||
movie = self.request('movie/%s' % movie.get('id'), {
|
||||
'append_to_response': 'alternative_titles' + (',images,casts' if extended else '')
|
||||
})
|
||||
if not movie:
|
||||
return
|
||||
|
||||
# Images
|
||||
poster = self.getImage(movie, type = 'poster', size = 'w154')
|
||||
@@ -195,12 +192,8 @@ class TheMovieDb(MovieProvider):
|
||||
params = dict((k, v) for k, v in params.items() if v)
|
||||
params = tryUrlencode(params)
|
||||
|
||||
try:
|
||||
url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
|
||||
data = self.getJsonData(url, show_error = False)
|
||||
except:
|
||||
log.debug('Movie not found: %s, %s', (call, params))
|
||||
data = None
|
||||
url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
|
||||
data = self.getJsonData(url)
|
||||
|
||||
if data and return_key and return_key in data:
|
||||
data = data.get(return_key)
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.torrentleech import Base
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentLeech'
|
||||
|
||||
|
||||
class TorrentLeech(MovieProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([13], ['720p', '1080p', 'bd50']),
|
||||
([8], ['cam']),
|
||||
([9], ['ts', 'tc']),
|
||||
([10], ['r5', 'scr']),
|
||||
([11], ['dvdrip']),
|
||||
([14], ['brrip']),
|
||||
([12], ['dvdr']),
|
||||
]
|
||||
|
||||
def buildUrl(self, title, media, quality):
|
||||
return (
|
||||
tryUrlencode(title.replace(':', '')),
|
||||
self.getCatId(quality)[0]
|
||||
)
|
||||
@@ -203,14 +203,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
quality['custom'] = quality_custom
|
||||
|
||||
results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or []
|
||||
|
||||
# Check if movie isn't deleted while searching
|
||||
if not fireEvent('media.get', movie.get('_id'), single = True):
|
||||
break
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, movie, quality, single = True)
|
||||
results_count = len(found_releases)
|
||||
results_count = len(results)
|
||||
total_result_count += results_count
|
||||
if results_count == 0:
|
||||
log.debug('Nothing found for %s in %s', (default_title, quality['label']))
|
||||
@@ -218,9 +211,17 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
# Keep track of releases found outside ETA window
|
||||
outside_eta_results += results_count if could_not_be_released else 0
|
||||
|
||||
# Check if movie isn't deleted while searching
|
||||
if not fireEvent('media.get', movie.get('_id'), single = True):
|
||||
break
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, movie, quality, single = True)
|
||||
|
||||
# Don't trigger download, but notify user of available releases
|
||||
if could_not_be_released and results_count > 0:
|
||||
log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
|
||||
if could_not_be_released:
|
||||
if results_count > 0:
|
||||
log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
|
||||
|
||||
# Try find a valid result and download it
|
||||
if (force_download or not could_not_be_released or always_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True):
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
class ShowTypeBase(MediaBase):
|
||||
_type = 'show'
|
||||
|
||||
def getType(self):
|
||||
if hasattr(self, 'type') and self.type != self._type:
|
||||
return '%s.%s' % (self._type, self.type)
|
||||
|
||||
return self._type
|
||||
@@ -1,4 +0,0 @@
|
||||
from .main import ShowBase
|
||||
|
||||
def autoload():
|
||||
return ShowBase()
|
||||
@@ -1,109 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Episode'
|
||||
|
||||
|
||||
class Episode(MediaBase):
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.episode.add', self.add)
|
||||
addEvent('show.episode.update', self.update)
|
||||
addEvent('show.episode.update_extras', self.updateExtras)
|
||||
|
||||
def add(self, parent_id, info = None, update_after = True, status = None):
|
||||
if not info: info = {}
|
||||
|
||||
identifiers = info.pop('identifiers', None)
|
||||
|
||||
if not identifiers:
|
||||
log.warning('Unable to add episode, missing identifiers (info provider mismatch?)')
|
||||
return
|
||||
|
||||
# Add Season
|
||||
episode_info = {
|
||||
'_t': 'media',
|
||||
'type': 'show.episode',
|
||||
'identifiers': identifiers,
|
||||
'status': status if status else 'active',
|
||||
'parent_id': parent_id,
|
||||
'info': info, # Returned dict by providers
|
||||
}
|
||||
|
||||
# Check if season already exists
|
||||
existing_episode = fireEvent('media.with_identifiers', identifiers, with_doc = True, single = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
if existing_episode:
|
||||
s = existing_episode['doc']
|
||||
s.update(episode_info)
|
||||
|
||||
episode = db.update(s)
|
||||
else:
|
||||
episode = db.insert(episode_info)
|
||||
|
||||
# Update library info
|
||||
if update_after is not False:
|
||||
handle = fireEventAsync if update_after is 'async' else fireEvent
|
||||
handle('show.episode.update_extras', episode, info, store = True, single = True)
|
||||
|
||||
return episode
|
||||
|
||||
def update(self, media_id = None, identifiers = None, info = None):
|
||||
if not info: info = {}
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
db = get_db()
|
||||
|
||||
episode = db.get('id', media_id)
|
||||
|
||||
# Get new info
|
||||
if not info:
|
||||
season = db.get('id', episode['parent_id'])
|
||||
show = db.get('id', season['parent_id'])
|
||||
|
||||
info = fireEvent(
|
||||
'episode.info', show.get('identifiers'), {
|
||||
'season_identifiers': season.get('identifiers'),
|
||||
'season_number': season.get('info', {}).get('number'),
|
||||
|
||||
'episode_identifiers': episode.get('identifiers'),
|
||||
'episode_number': episode.get('info', {}).get('number'),
|
||||
|
||||
'absolute_number': episode.get('info', {}).get('absolute_number')
|
||||
},
|
||||
merge = True
|
||||
)
|
||||
|
||||
info['season_number'] = season.get('info', {}).get('number')
|
||||
|
||||
identifiers = info.pop('identifiers', None) or identifiers
|
||||
|
||||
# Update/create media
|
||||
episode['identifiers'].update(identifiers)
|
||||
episode.update({'info': info})
|
||||
|
||||
self.updateExtras(episode, info)
|
||||
|
||||
db.update(episode)
|
||||
return episode
|
||||
|
||||
def updateExtras(self, episode, info, store=False):
|
||||
db = get_db()
|
||||
|
||||
# Get images
|
||||
image_urls = info.get('images', [])
|
||||
existing_files = episode.get('files', {})
|
||||
self.getPoster(image_urls, existing_files)
|
||||
|
||||
if store:
|
||||
db.update(episode)
|
||||
@@ -1,291 +0,0 @@
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.variable import getTitle, find
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class ShowBase(MediaBase):
|
||||
|
||||
_type = 'show'
|
||||
|
||||
def __init__(self):
|
||||
super(ShowBase, self).__init__()
|
||||
self.initType()
|
||||
|
||||
addApiView('show.add', self.addView, docs = {
|
||||
'desc': 'Add new show to the wanted list',
|
||||
'params': {
|
||||
'identifier': {'desc': 'IMDB id of the show your want to add.'},
|
||||
'profile_id': {'desc': 'ID of quality profile you want the add the show in. If empty will use the default profile.'},
|
||||
'category_id': {'desc': 'ID of category you want the add the show in.'},
|
||||
'title': {'desc': 'Title of the show to use for search and renaming'},
|
||||
}
|
||||
})
|
||||
|
||||
addEvent('show.add', self.add)
|
||||
addEvent('show.update', self.update)
|
||||
addEvent('show.update_extras', self.updateExtras)
|
||||
|
||||
def addView(self, **kwargs):
|
||||
add_dict = self.add(params = kwargs)
|
||||
|
||||
return {
|
||||
'success': True if add_dict else False,
|
||||
'show': add_dict,
|
||||
}
|
||||
|
||||
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
if not params: params = {}
|
||||
|
||||
# Identifiers
|
||||
if not params.get('identifiers'):
|
||||
msg = 'Can\'t add show without at least 1 identifier.'
|
||||
log.error(msg)
|
||||
fireEvent('notify.frontend', type = 'show.no_identifier', message = msg)
|
||||
return False
|
||||
|
||||
info = params.get('info')
|
||||
if not info or (info and len(info.get('titles', [])) == 0):
|
||||
info = fireEvent('show.info', merge = True, identifiers = params.get('identifiers'))
|
||||
|
||||
# Add Show
|
||||
try:
|
||||
m, added = self.create(info, params, force_readd, search_after, update_after)
|
||||
|
||||
result = fireEvent('media.get', m['_id'], single = True)
|
||||
|
||||
if added and notify_after:
|
||||
if params.get('title'):
|
||||
message = 'Successfully added "%s" to your wanted list.' % params.get('title', '')
|
||||
else:
|
||||
title = getTitle(m)
|
||||
if title:
|
||||
message = 'Successfully added "%s" to your wanted list.' % title
|
||||
else:
|
||||
message = 'Successfully added to your wanted list.'
|
||||
|
||||
fireEvent('notify.frontend', type = 'show.added', data = result, message = message)
|
||||
|
||||
return result
|
||||
except:
|
||||
log.error('Failed adding media: %s', traceback.format_exc())
|
||||
|
||||
def create(self, info, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
# Set default title
|
||||
def_title = self.getDefaultTitle(info)
|
||||
|
||||
# Default profile and category
|
||||
default_profile = {}
|
||||
if not params.get('profile_id'):
|
||||
default_profile = fireEvent('profile.default', single = True)
|
||||
|
||||
cat_id = params.get('category_id')
|
||||
|
||||
media = {
|
||||
'_t': 'media',
|
||||
'type': 'show',
|
||||
'title': def_title,
|
||||
'identifiers': info.get('identifiers'),
|
||||
'status': status if status else 'active',
|
||||
'profile_id': params.get('profile_id', default_profile.get('_id')),
|
||||
'category_id': cat_id if cat_id is not None and len(cat_id) > 0 and cat_id != '-1' else None
|
||||
}
|
||||
|
||||
identifiers = info.pop('identifiers', {})
|
||||
seasons = info.pop('seasons', {})
|
||||
|
||||
# Update media with info
|
||||
self.updateInfo(media, info)
|
||||
|
||||
existing_show = fireEvent('media.with_identifiers', params.get('identifiers'), with_doc = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
if existing_show:
|
||||
s = existing_show['doc']
|
||||
s.update(media)
|
||||
|
||||
show = db.update(s)
|
||||
else:
|
||||
show = db.insert(media)
|
||||
|
||||
# Update dict to be usable
|
||||
show.update(media)
|
||||
|
||||
added = True
|
||||
do_search = False
|
||||
search_after = search_after and self.conf('search_on_add', section = 'showsearcher')
|
||||
onComplete = None
|
||||
|
||||
if existing_show:
|
||||
if search_after:
|
||||
onComplete = self.createOnComplete(show['_id'])
|
||||
|
||||
search_after = False
|
||||
elif force_readd:
|
||||
# Clean snatched history
|
||||
for release in fireEvent('release.for_media', show['_id'], single = True):
|
||||
if release.get('status') in ['downloaded', 'snatched', 'done']:
|
||||
if params.get('ignore_previous', False):
|
||||
release['status'] = 'ignored'
|
||||
db.update(release)
|
||||
else:
|
||||
fireEvent('release.delete', release['_id'], single = True)
|
||||
|
||||
show['profile_id'] = params.get('profile_id', default_profile.get('id'))
|
||||
show['category_id'] = media.get('category_id')
|
||||
show['last_edit'] = int(time.time())
|
||||
|
||||
do_search = True
|
||||
db.update(show)
|
||||
else:
|
||||
params.pop('info', None)
|
||||
log.debug('Show already exists, not updating: %s', params)
|
||||
added = False
|
||||
|
||||
# Create episodes
|
||||
self.createEpisodes(show, seasons)
|
||||
|
||||
# Trigger update info
|
||||
if added and update_after:
|
||||
# Do full update to get images etc
|
||||
fireEventAsync('show.update_extras', show.copy(), info, store = True, on_complete = onComplete)
|
||||
|
||||
# Remove releases
|
||||
for rel in fireEvent('release.for_media', show['_id'], single = True):
|
||||
if rel['status'] is 'available':
|
||||
db.delete(rel)
|
||||
|
||||
if do_search and search_after:
|
||||
onComplete = self.createOnComplete(show['_id'])
|
||||
onComplete()
|
||||
|
||||
return show, added
|
||||
|
||||
def createEpisodes(self, m, seasons_info):
|
||||
# Add Seasons
|
||||
for season_nr in seasons_info:
|
||||
season_info = seasons_info[season_nr]
|
||||
episodes = season_info.get('episodes', {})
|
||||
|
||||
season = fireEvent('show.season.add', m.get('_id'), season_info, update_after = False, single = True)
|
||||
|
||||
# Add Episodes
|
||||
for episode_nr in episodes:
|
||||
episode_info = episodes[episode_nr]
|
||||
episode_info['season_number'] = season_nr
|
||||
|
||||
fireEvent('show.episode.add', season.get('_id'), episode_info, update_after = False, single = True)
|
||||
|
||||
def update(self, media_id = None, media = None, identifiers = None, info = None):
|
||||
"""
|
||||
Update movie information inside media['doc']['info']
|
||||
|
||||
@param media_id: document id
|
||||
@param identifiers: identifiers from multiple providers
|
||||
{
|
||||
'thetvdb': 123,
|
||||
'imdb': 'tt123123',
|
||||
..
|
||||
}
|
||||
@param extended: update with extended info (parses more info, actors, images from some info providers)
|
||||
@return: dict, with media
|
||||
"""
|
||||
|
||||
if not info: info = {}
|
||||
if not identifiers: identifiers = {}
|
||||
|
||||
db = get_db()
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
if media is None and media_id:
|
||||
media = db.get('id', media_id)
|
||||
else:
|
||||
log.error('missing "media" and "media_id" parameters, unable to update')
|
||||
return
|
||||
|
||||
if not info:
|
||||
info = fireEvent('show.info', identifiers = media.get('identifiers'), merge = True)
|
||||
|
||||
try:
|
||||
identifiers = info.pop('identifiers', {})
|
||||
seasons = info.pop('seasons', {})
|
||||
|
||||
self.updateInfo(media, info)
|
||||
self.updateEpisodes(media, seasons)
|
||||
self.updateExtras(media, info)
|
||||
|
||||
db.update(media)
|
||||
return media
|
||||
except:
|
||||
log.error('Failed update media: %s', traceback.format_exc())
|
||||
|
||||
return {}
|
||||
|
||||
def updateInfo(self, media, info):
|
||||
db = get_db()
|
||||
|
||||
# Remove season info for later use (save separately)
|
||||
info.pop('in_wanted', None)
|
||||
info.pop('in_library', None)
|
||||
|
||||
if not info or len(info) == 0:
|
||||
log.error('Could not update, no show info to work with: %s', media.get('identifier'))
|
||||
return False
|
||||
|
||||
# Update basic info
|
||||
media['info'] = info
|
||||
|
||||
def updateEpisodes(self, media, seasons):
|
||||
# Fetch current season/episode tree
|
||||
show_tree = fireEvent('library.tree', media_id = media['_id'], single = True)
|
||||
|
||||
# Update seasons
|
||||
for season_num in seasons:
|
||||
season_info = seasons[season_num]
|
||||
episodes = season_info.get('episodes', {})
|
||||
|
||||
# Find season that matches number
|
||||
season = find(lambda s: s.get('info', {}).get('number', 0) == season_num, show_tree.get('seasons', []))
|
||||
|
||||
if not season:
|
||||
log.warning('Unable to find season "%s"', season_num)
|
||||
continue
|
||||
|
||||
# Update season
|
||||
fireEvent('show.season.update', season['_id'], info = season_info, single = True)
|
||||
|
||||
# Update episodes
|
||||
for episode_num in episodes:
|
||||
episode_info = episodes[episode_num]
|
||||
episode_info['season_number'] = season_num
|
||||
|
||||
# Find episode that matches number
|
||||
episode = find(lambda s: s.get('info', {}).get('number', 0) == episode_num, season.get('episodes', []))
|
||||
|
||||
if not episode:
|
||||
log.debug('Creating new episode %s in season %s', (episode_num, season_num))
|
||||
fireEvent('show.episode.add', season.get('_id'), episode_info, update_after = False, single = True)
|
||||
continue
|
||||
|
||||
fireEvent('show.episode.update', episode['_id'], info = episode_info, single = True)
|
||||
|
||||
def updateExtras(self, media, info, store=False):
|
||||
db = get_db()
|
||||
|
||||
# Update image file
|
||||
image_urls = info.get('images', [])
|
||||
self.getPoster(media, image_urls)
|
||||
|
||||
if store:
|
||||
db.update(media)
|
||||
@@ -1,94 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.media import MediaBase
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Season'
|
||||
|
||||
|
||||
class Season(MediaBase):
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.season.add', self.add)
|
||||
addEvent('show.season.update', self.update)
|
||||
addEvent('show.season.update_extras', self.updateExtras)
|
||||
|
||||
def add(self, parent_id, info = None, update_after = True, status = None):
|
||||
if not info: info = {}
|
||||
|
||||
identifiers = info.pop('identifiers', None)
|
||||
info.pop('episodes', None)
|
||||
|
||||
# Add Season
|
||||
season_info = {
|
||||
'_t': 'media',
|
||||
'type': 'show.season',
|
||||
'identifiers': identifiers,
|
||||
'status': status if status else 'active',
|
||||
'parent_id': parent_id,
|
||||
'info': info, # Returned dict by providers
|
||||
}
|
||||
|
||||
# Check if season already exists
|
||||
existing_season = fireEvent('media.with_identifiers', identifiers, with_doc = True, single = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
if existing_season:
|
||||
s = existing_season['doc']
|
||||
s.update(season_info)
|
||||
|
||||
season = db.update(s)
|
||||
else:
|
||||
season = db.insert(season_info)
|
||||
|
||||
# Update library info
|
||||
if update_after is not False:
|
||||
handle = fireEventAsync if update_after is 'async' else fireEvent
|
||||
handle('show.season.update_extras', season, info, store = True, single = True)
|
||||
|
||||
return season
|
||||
|
||||
def update(self, media_id = None, identifiers = None, info = None):
|
||||
if not info: info = {}
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
db = get_db()
|
||||
|
||||
season = db.get('id', media_id)
|
||||
show = db.get('id', season['parent_id'])
|
||||
|
||||
# Get new info
|
||||
if not info:
|
||||
info = fireEvent('season.info', show.get('identifiers'), {
|
||||
'season_number': season.get('info', {}).get('number', 0)
|
||||
}, merge = True)
|
||||
|
||||
identifiers = info.pop('identifiers', None) or identifiers
|
||||
info.pop('episodes', None)
|
||||
|
||||
# Update/create media
|
||||
season['identifiers'].update(identifiers)
|
||||
season.update({'info': info})
|
||||
|
||||
self.updateExtras(season, info)
|
||||
|
||||
db.update(season)
|
||||
return season
|
||||
|
||||
def updateExtras(self, season, info, store=False):
|
||||
db = get_db()
|
||||
|
||||
# Get images
|
||||
image_urls = info.get('images', [])
|
||||
existing_files = season.get('files', {})
|
||||
self.getPoster(image_urls, existing_files)
|
||||
|
||||
if store:
|
||||
db.update(season)
|
||||
@@ -1,28 +0,0 @@
|
||||
Page.Shows = new Class({
|
||||
|
||||
Extends: PageBase,
|
||||
|
||||
name: 'shows',
|
||||
title: 'Gimmy gimmy gimmy!',
|
||||
folder_browser: null,
|
||||
|
||||
indexAction: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.wanted){
|
||||
|
||||
// Wanted movies
|
||||
self.wanted = new ShowList({
|
||||
'identifier': 'wanted',
|
||||
'status': 'active',
|
||||
'type': 'show',
|
||||
'actions': [MA.IMDB, MA.Trailer, MA.Release, MA.Edit, MA.Refresh, MA.Readd, MA.Delete],
|
||||
'add_new': true,
|
||||
'on_empty_element': App.createUserscriptButtons().addClass('empty_wanted')
|
||||
});
|
||||
$(self.wanted).inject(self.el);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,474 +0,0 @@
|
||||
var EpisodeAction = new Class({
|
||||
|
||||
Implements: [Options],
|
||||
|
||||
class_name: 'item-action icon2',
|
||||
|
||||
initialize: function(episode, options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.show = episode.show;
|
||||
self.episode = episode;
|
||||
|
||||
self.create();
|
||||
if(self.el)
|
||||
self.el.addClass(self.class_name)
|
||||
},
|
||||
|
||||
create: function(){},
|
||||
|
||||
disable: function(){
|
||||
if(this.el)
|
||||
this.el.addClass('disable')
|
||||
},
|
||||
|
||||
enable: function(){
|
||||
if(this.el)
|
||||
this.el.removeClass('disable')
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.show.getTitle();
|
||||
}
|
||||
catch(e){
|
||||
try {
|
||||
return self.show.original_title ? self.show.original_title : self.show.titles[0];
|
||||
}
|
||||
catch(e){
|
||||
return 'Unknown';
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
get: function(key){
|
||||
var self = this;
|
||||
try {
|
||||
return self.show.get(key)
|
||||
}
|
||||
catch(e){
|
||||
return self.show[key]
|
||||
}
|
||||
},
|
||||
|
||||
createMask: function(){
|
||||
var self = this;
|
||||
self.mask = new Element('div.mask', {
|
||||
'styles': {
|
||||
'z-index': '1'
|
||||
}
|
||||
}).inject(self.show, 'top').fade('hide');
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el || null
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
var EA = {};
|
||||
|
||||
EA.IMDB = new Class({
|
||||
|
||||
Extends: EpisodeAction,
|
||||
id: null,
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.id = self.show.getIdentifier ? self.show.getIdentifier() : self.get('imdb');
|
||||
|
||||
self.el = new Element('a.imdb', {
|
||||
'title': 'Go to the IMDB page of ' + self.getTitle(),
|
||||
'href': 'http://www.imdb.com/title/'+self.id+'/',
|
||||
'target': '_blank'
|
||||
});
|
||||
|
||||
if(!self.id) self.disable();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
EA.Release = new Class({
|
||||
|
||||
Extends: EpisodeAction,
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el = new Element('a.releases.download', {
|
||||
'title': 'Show the releases that are available for ' + self.getTitle(),
|
||||
'events': {
|
||||
'click': self.toggle.bind(self)
|
||||
}
|
||||
});
|
||||
|
||||
self.options = new Element('div.episode-options').inject(self.episode.el);
|
||||
|
||||
if(!self.episode.data.releases || self.episode.data.releases.length == 0)
|
||||
self.el.hide();
|
||||
else
|
||||
self.showHelper();
|
||||
|
||||
App.on('show.searcher.ended', function(notification){
|
||||
if(self.show.data._id != notification.data._id) return;
|
||||
|
||||
self.releases = null;
|
||||
if(self.options_container){
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
toggle: function(e){
|
||||
var self = this;
|
||||
|
||||
if(self.options && self.options.hasClass('expanded')) {
|
||||
self.close();
|
||||
} else {
|
||||
self.open();
|
||||
}
|
||||
},
|
||||
|
||||
open: function(e){
|
||||
var self = this;
|
||||
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.createReleases();
|
||||
|
||||
},
|
||||
|
||||
close: function(e) {
|
||||
var self = this;
|
||||
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.options.setStyle('height', 0)
|
||||
.removeClass('expanded');
|
||||
},
|
||||
|
||||
createReleases: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.releases_table){
|
||||
self.options.adopt(
|
||||
self.releases_table = new Element('div.releases.table')
|
||||
);
|
||||
|
||||
// Header
|
||||
new Element('div.item.head').adopt(
|
||||
new Element('span.name', {'text': 'Release name'}),
|
||||
new Element('span.status', {'text': 'Status'}),
|
||||
new Element('span.quality', {'text': 'Quality'}),
|
||||
new Element('span.size', {'text': 'Size'}),
|
||||
new Element('span.age', {'text': 'Age'}),
|
||||
new Element('span.score', {'text': 'Score'}),
|
||||
new Element('span.provider', {'text': 'Provider'})
|
||||
).inject(self.releases_table);
|
||||
|
||||
if(self.episode.data.releases)
|
||||
self.episode.data.releases.each(function(release){
|
||||
|
||||
var quality = Quality.getQuality(release.quality) || {},
|
||||
info = release.info || {},
|
||||
provider = self.get(release, 'provider') + (info['provider_extra'] ? self.get(release, 'provider_extra') : '');
|
||||
|
||||
var release_name = self.get(release, 'name');
|
||||
if(release.files && release.files.length > 0){
|
||||
try {
|
||||
var movie_file = release.files.filter(function(file){
|
||||
var type = File.Type.get(file.type_id);
|
||||
return type && type.identifier == 'movie'
|
||||
}).pick();
|
||||
release_name = movie_file.path.split(Api.getOption('path_sep')).getLast();
|
||||
}
|
||||
catch(e){}
|
||||
}
|
||||
|
||||
// Create release
|
||||
release['el'] = new Element('div', {
|
||||
'class': 'item '+release.status,
|
||||
'id': 'release_'+release._id
|
||||
}).adopt(
|
||||
new Element('span.name', {'text': release_name, 'title': release_name}),
|
||||
new Element('span.status', {'text': release.status, 'class': 'status '+release.status}),
|
||||
new Element('span.quality', {'text': quality.label + (release.is_3d ? ' 3D' : '') || 'n/a'}),
|
||||
new Element('span.size', {'text': info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
|
||||
new Element('span.age', {'text': self.get(release, 'age')}),
|
||||
new Element('span.score', {'text': self.get(release, 'score')}),
|
||||
new Element('span.provider', { 'text': provider, 'title': provider }),
|
||||
info['detail_url'] ? new Element('a.info.icon2', {
|
||||
'href': info['detail_url'],
|
||||
'target': '_blank'
|
||||
}) : new Element('a'),
|
||||
new Element('a.download.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
if(!this.hasClass('completed'))
|
||||
self.download(release);
|
||||
}
|
||||
}
|
||||
}),
|
||||
new Element('a.delete.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
self.ignore(release);
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(self.releases_table);
|
||||
|
||||
if(release.status == 'ignored' || release.status == 'failed' || release.status == 'snatched'){
|
||||
if(!self.last_release || (self.last_release && self.last_release.status != 'snatched' && release.status == 'snatched'))
|
||||
self.last_release = release;
|
||||
}
|
||||
else if(!self.next_release && release.status == 'available'){
|
||||
self.next_release = release;
|
||||
}
|
||||
|
||||
var update_handle = function(notification) {
|
||||
if(notification.data._id != release._id) return;
|
||||
|
||||
var q = self.show.quality.getElement('.q_' + release.quality),
|
||||
new_status = notification.data.status;
|
||||
|
||||
release.el.set('class', 'item ' + new_status);
|
||||
|
||||
var status_el = release.el.getElement('.release_status');
|
||||
status_el.set('class', 'release_status ' + new_status);
|
||||
status_el.set('text', new_status);
|
||||
|
||||
if(!q && (new_status == 'snatched' || new_status == 'seeding' || new_status == 'done'))
|
||||
q = self.addQuality(release.quality_id);
|
||||
|
||||
if(q && !q.hasClass(new_status)) {
|
||||
q.removeClass(release.status).addClass(new_status);
|
||||
q.set('title', q.get('title').replace(release.status, new_status));
|
||||
}
|
||||
};
|
||||
|
||||
App.on('release.update_status', update_handle);
|
||||
|
||||
});
|
||||
|
||||
if(self.last_release)
|
||||
self.releases_table.getElements('#release_'+self.last_release._id).addClass('last_release');
|
||||
|
||||
if(self.next_release)
|
||||
self.releases_table.getElements('#release_'+self.next_release._id).addClass('next_release');
|
||||
|
||||
if(self.next_release || (self.last_release && ['ignored', 'failed'].indexOf(self.last_release.status) === false)){
|
||||
|
||||
self.trynext_container = new Element('div.buttons.try_container').inject(self.releases_table, 'top');
|
||||
|
||||
var nr = self.next_release,
|
||||
lr = self.last_release;
|
||||
|
||||
self.trynext_container.adopt(
|
||||
new Element('span.or', {
|
||||
'text': 'If anything went wrong, download'
|
||||
}),
|
||||
lr ? new Element('a.button.orange', {
|
||||
'text': 'the same release again',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.download(lr);
|
||||
}
|
||||
}
|
||||
}) : null,
|
||||
nr && lr ? new Element('span.or', {
|
||||
'text': ','
|
||||
}) : null,
|
||||
nr ? [new Element('a.button.green', {
|
||||
'text': lr ? 'another release' : 'the best release',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.download(nr);
|
||||
}
|
||||
}
|
||||
}),
|
||||
new Element('span.or', {
|
||||
'text': 'or pick one below'
|
||||
})] : null
|
||||
)
|
||||
}
|
||||
|
||||
self.last_release = null;
|
||||
self.next_release = null;
|
||||
|
||||
self.episode.el.addEvent('outerClick', function(){
|
||||
self.close();
|
||||
});
|
||||
}
|
||||
|
||||
self.options.setStyle('height', self.releases_table.getSize().y)
|
||||
.addClass('expanded');
|
||||
|
||||
},
|
||||
|
||||
showHelper: function(e){
|
||||
var self = this;
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
var has_available = false,
|
||||
has_snatched = false;
|
||||
|
||||
if(self.episode.data.releases)
|
||||
self.episode.data.releases.each(function(release){
|
||||
if(has_available && has_snatched) return;
|
||||
|
||||
if(['snatched', 'downloaded', 'seeding'].contains(release.status))
|
||||
has_snatched = true;
|
||||
|
||||
if(['available'].contains(release.status))
|
||||
has_available = true;
|
||||
|
||||
});
|
||||
|
||||
if(has_available || has_snatched){
|
||||
|
||||
self.trynext_container = new Element('div.buttons.trynext').inject(self.show.info_container);
|
||||
|
||||
self.trynext_container.adopt(
|
||||
has_available ? [new Element('a.icon2.readd', {
|
||||
'text': has_snatched ? 'Download another release' : 'Download the best release',
|
||||
'events': {
|
||||
'click': self.tryNextRelease.bind(self)
|
||||
}
|
||||
}),
|
||||
new Element('a.icon2.download', {
|
||||
'text': 'pick one yourself',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.show.quality.fireEvent('click');
|
||||
}
|
||||
}
|
||||
})] : null,
|
||||
new Element('a.icon2.completed', {
|
||||
'text': 'mark this movie done',
|
||||
'events': {
|
||||
'click': self.markMovieDone.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
get: function(release, type){
|
||||
return (release.info && release.info[type] !== undefined) ? release.info[type] : 'n/a'
|
||||
},
|
||||
|
||||
download: function(release){
|
||||
var self = this;
|
||||
|
||||
var release_el = self.releases_table.getElement('#release_'+release._id),
|
||||
icon = release_el.getElement('.download.icon2');
|
||||
|
||||
if(icon)
|
||||
icon.addClass('icon spinner').removeClass('download');
|
||||
|
||||
Api.request('release.manual_download', {
|
||||
'data': {
|
||||
'id': release._id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
if(icon)
|
||||
icon.removeClass('icon spinner');
|
||||
|
||||
if(json.success){
|
||||
if(icon)
|
||||
icon.addClass('completed');
|
||||
release_el.getElement('.release_status').set('text', 'snatched');
|
||||
}
|
||||
else
|
||||
if(icon)
|
||||
icon.addClass('attention').set('title', 'Something went wrong when downloading, please check logs.');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
ignore: function(release){
|
||||
|
||||
Api.request('release.ignore', {
|
||||
'data': {
|
||||
'id': release._id
|
||||
}
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
markMovieDone: function(){
|
||||
var self = this;
|
||||
|
||||
Api.request('media.delete', {
|
||||
'data': {
|
||||
'id': self.show.get('_id'),
|
||||
'delete_from': 'wanted'
|
||||
},
|
||||
'onComplete': function(){
|
||||
var movie = $(self.show);
|
||||
movie.set('tween', {
|
||||
'duration': 300,
|
||||
'onComplete': function(){
|
||||
self.show.destroy()
|
||||
}
|
||||
});
|
||||
movie.tween('height', 0);
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
tryNextRelease: function(){
|
||||
var self = this;
|
||||
|
||||
Api.request('movie.searcher.try_next', {
|
||||
'data': {
|
||||
'media_id': self.show.get('_id')
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
EA.Refresh = new Class({
|
||||
|
||||
Extends: EpisodeAction,
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el = new Element('a.refresh', {
|
||||
'title': 'Refresh the movie info and do a forced search',
|
||||
'events': {
|
||||
'click': self.doRefresh.bind(self)
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
doRefresh: function(e){
|
||||
var self = this;
|
||||
(e).preventDefault();
|
||||
|
||||
Api.request('media.refresh', {
|
||||
'data': {
|
||||
'id': self.episode.get('_id')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,128 +0,0 @@
|
||||
var Episode = new Class({
|
||||
|
||||
Extends: BlockBase,
|
||||
|
||||
action: {},
|
||||
|
||||
initialize: function(show, options, data){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.show = show;
|
||||
self.options = options;
|
||||
self.data = data;
|
||||
|
||||
self.profile = self.show.profile;
|
||||
|
||||
self.el = new Element('div.item.episode').adopt(
|
||||
self.detail = new Element('div.item.data')
|
||||
);
|
||||
|
||||
self.create();
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.detail.set('id', 'episode_'+self.data._id);
|
||||
|
||||
self.detail.adopt(
|
||||
new Element('span.episode', {'text': (self.data.info.number || 0)}),
|
||||
new Element('span.name', {'text': self.getTitle()}),
|
||||
new Element('span.firstaired', {'text': self.data.info.firstaired}),
|
||||
|
||||
self.quality = new Element('span.quality', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
var releases = self.detail.getElement('.item-actions .releases');
|
||||
|
||||
if(releases.isVisible())
|
||||
releases.fireEvent('click', [e])
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.actions = new Element('div.item-actions')
|
||||
);
|
||||
|
||||
// Add profile
|
||||
if(self.profile.data) {
|
||||
self.profile.getTypes().each(function(type){
|
||||
var q = self.addQuality(type.get('quality'), type.get('3d'));
|
||||
|
||||
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
|
||||
q.addClass('finish');
|
||||
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add releases
|
||||
self.updateReleases();
|
||||
|
||||
Object.each(self.options.actions, function(action, key){
|
||||
self.action[key.toLowerCase()] = action = new self.options.actions[key](self);
|
||||
if(action.el)
|
||||
self.actions.adopt(action)
|
||||
});
|
||||
},
|
||||
|
||||
updateReleases: function(){
|
||||
var self = this;
|
||||
if(!self.data.releases || self.data.releases.length == 0) return;
|
||||
|
||||
self.data.releases.each(function(release){
|
||||
|
||||
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')),
|
||||
status = release.status;
|
||||
|
||||
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done'))
|
||||
q = self.addQuality(release.quality, release.is_3d || false);
|
||||
|
||||
if (q && !q.hasClass(status)){
|
||||
q.addClass(status);
|
||||
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status)
|
||||
}
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
addQuality: function(quality, is_3d){
|
||||
var self = this,
|
||||
q = Quality.getQuality(quality);
|
||||
|
||||
return new Element('span', {
|
||||
'text': q.label + (is_3d ? ' 3D' : ''),
|
||||
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''),
|
||||
'title': ''
|
||||
}).inject(self.quality);
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
var title = '';
|
||||
|
||||
if(self.data.info.titles && self.data.info.titles.length > 0) {
|
||||
title = self.data.info.titles[0];
|
||||
} else {
|
||||
title = 'Episode ' + self.data.info.number;
|
||||
}
|
||||
|
||||
return title;
|
||||
},
|
||||
|
||||
getIdentifier: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.get('identifiers').imdb;
|
||||
}
|
||||
catch (e){ }
|
||||
|
||||
return self.get('imdb');
|
||||
},
|
||||
|
||||
get: function(attr){
|
||||
return this.data[attr] || this.data.info[attr]
|
||||
}
|
||||
});
|
||||
@@ -1,636 +0,0 @@
|
||||
var ShowList = new Class({
|
||||
|
||||
Implements: [Events, Options],
|
||||
|
||||
options: {
|
||||
navigation: true,
|
||||
limit: 50,
|
||||
load_more: true,
|
||||
loader: true,
|
||||
menu: [],
|
||||
add_new: false,
|
||||
force_view: false
|
||||
},
|
||||
|
||||
movies: [],
|
||||
movies_added: {},
|
||||
total_movies: 0,
|
||||
letters: {},
|
||||
filter: null,
|
||||
|
||||
initialize: function(options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.offset = 0;
|
||||
self.filter = self.options.filter || {
|
||||
'starts_with': null,
|
||||
'search': null
|
||||
};
|
||||
|
||||
self.el = new Element('div.shows').adopt(
|
||||
self.title = self.options.title ? new Element('h2', {
|
||||
'text': self.options.title,
|
||||
'styles': {'display': 'none'}
|
||||
}) : null,
|
||||
self.description = self.options.description ? new Element('div.description', {
|
||||
'html': self.options.description,
|
||||
'styles': {'display': 'none'}
|
||||
}) : null,
|
||||
self.movie_list = new Element('div.list'),
|
||||
self.load_more = self.options.load_more ? new Element('a.load_more', {
|
||||
'events': {
|
||||
'click': self.loadMore.bind(self)
|
||||
}
|
||||
}) : null
|
||||
);
|
||||
|
||||
if($(window).getSize().x <= 480 && !self.options.force_view)
|
||||
self.changeView('list');
|
||||
else
|
||||
self.changeView(self.getSavedView() || self.options.view || 'details');
|
||||
|
||||
self.getMovies();
|
||||
|
||||
App.on('movie.added', self.movieAdded.bind(self));
|
||||
App.on('movie.deleted', self.movieDeleted.bind(self))
|
||||
},
|
||||
|
||||
movieDeleted: function(notification){
|
||||
var self = this;
|
||||
|
||||
if(self.movies_added[notification.data._id]){
|
||||
self.movies.each(function(movie){
|
||||
if(movie.get('_id') == notification.data._id){
|
||||
movie.destroy();
|
||||
delete self.movies_added[notification.data._id];
|
||||
self.setCounter(self.counter_count-1);
|
||||
self.total_movies--;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
self.checkIfEmpty();
|
||||
},
|
||||
|
||||
movieAdded: function(notification){
|
||||
var self = this;
|
||||
|
||||
self.fireEvent('movieAdded', notification);
|
||||
if(self.options.add_new && !self.movies_added[notification.data._id] && notification.data.status == self.options.status){
|
||||
window.scroll(0,0);
|
||||
self.createShow(notification.data, 'top');
|
||||
self.setCounter(self.counter_count+1);
|
||||
|
||||
self.checkIfEmpty();
|
||||
}
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
// Create the alphabet nav
|
||||
if(self.options.navigation)
|
||||
self.createNavigation();
|
||||
|
||||
if(self.options.load_more)
|
||||
self.scrollspy = new ScrollSpy({
|
||||
min: function(){
|
||||
var c = self.load_more.getCoordinates();
|
||||
return c.top - window.document.getSize().y - 300
|
||||
},
|
||||
onEnter: self.loadMore.bind(self)
|
||||
});
|
||||
|
||||
self.created = true;
|
||||
},
|
||||
|
||||
addMovies: function(movies, total){
|
||||
var self = this;
|
||||
|
||||
if(!self.created) self.create();
|
||||
|
||||
// do scrollspy
|
||||
if(movies.length < self.options.limit && self.scrollspy){
|
||||
self.load_more.hide();
|
||||
self.scrollspy.stop();
|
||||
}
|
||||
|
||||
Object.each(movies, function(movie){
|
||||
self.createShow(movie);
|
||||
});
|
||||
|
||||
self.total_movies += total;
|
||||
self.setCounter(total);
|
||||
|
||||
},
|
||||
|
||||
setCounter: function(count){
|
||||
var self = this;
|
||||
|
||||
if(!self.navigation_counter) return;
|
||||
|
||||
self.counter_count = count;
|
||||
self.navigation_counter.set('text', (count || 0) + ' shows');
|
||||
|
||||
if (self.empty_message) {
|
||||
self.empty_message.destroy();
|
||||
self.empty_message = null;
|
||||
}
|
||||
|
||||
if(self.total_movies && count == 0 && !self.empty_message){
|
||||
var message = (self.filter.search ? 'for "'+self.filter.search+'"' : '') +
|
||||
(self.filter.starts_with ? ' in <strong>'+self.filter.starts_with+'</strong>' : '');
|
||||
|
||||
self.empty_message = new Element('.message', {
|
||||
'html': 'No shows found ' + message + '.<br/>'
|
||||
}).grab(
|
||||
new Element('a', {
|
||||
'text': 'Reset filter',
|
||||
'events': {
|
||||
'click': function(){
|
||||
self.filter = {
|
||||
'starts_with': null,
|
||||
'search': null
|
||||
};
|
||||
self.navigation_search_input.set('value', '');
|
||||
self.reset();
|
||||
self.activateLetter();
|
||||
self.getMovies(true);
|
||||
self.last_search_value = '';
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(self.movie_list);
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
createShow: function(show, inject_at){
|
||||
var self = this;
|
||||
var m = new Show(self, {
|
||||
'actions': self.options.actions,
|
||||
'view': self.current_view,
|
||||
'onSelect': self.calculateSelected.bind(self)
|
||||
}, show);
|
||||
|
||||
$(m).inject(self.movie_list, inject_at || 'bottom');
|
||||
|
||||
m.fireEvent('injected');
|
||||
|
||||
self.movies.include(m);
|
||||
self.movies_added[show._id] = true;
|
||||
},
|
||||
|
||||
createNavigation: function(){
|
||||
var self = this;
|
||||
var chars = '#ABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
|
||||
self.el.addClass('with_navigation');
|
||||
|
||||
self.navigation = new Element('div.alph_nav').adopt(
|
||||
self.mass_edit_form = new Element('div.mass_edit_form').adopt(
|
||||
new Element('span.select').adopt(
|
||||
self.mass_edit_select = new Element('input[type=checkbox].inlay', {
|
||||
'events': {
|
||||
'change': self.massEditToggleAll.bind(self)
|
||||
}
|
||||
}),
|
||||
self.mass_edit_selected = new Element('span.count', {'text': 0}),
|
||||
self.mass_edit_selected_label = new Element('span', {'text': 'selected'})
|
||||
),
|
||||
new Element('div.quality').adopt(
|
||||
self.mass_edit_quality = new Element('select'),
|
||||
new Element('a.button.orange', {
|
||||
'text': 'Change quality',
|
||||
'events': {
|
||||
'click': self.changeQualitySelected.bind(self)
|
||||
}
|
||||
})
|
||||
),
|
||||
new Element('div.delete').adopt(
|
||||
new Element('span[text=or]'),
|
||||
new Element('a.button.red', {
|
||||
'text': 'Delete',
|
||||
'events': {
|
||||
'click': self.deleteSelected.bind(self)
|
||||
}
|
||||
})
|
||||
),
|
||||
new Element('div.refresh').adopt(
|
||||
new Element('span[text=or]'),
|
||||
new Element('a.button.green', {
|
||||
'text': 'Refresh',
|
||||
'events': {
|
||||
'click': self.refreshSelected.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
),
|
||||
new Element('div.menus').adopt(
|
||||
self.navigation_counter = new Element('span.counter[title=Total]'),
|
||||
self.filter_menu = new Block.Menu(self, {
|
||||
'class': 'filter'
|
||||
}),
|
||||
self.navigation_actions = new Element('ul.actions', {
|
||||
'events': {
|
||||
'click:relay(li)': function(e, el){
|
||||
var a = 'active';
|
||||
self.navigation_actions.getElements('.'+a).removeClass(a);
|
||||
self.changeView(el.get('data-view'));
|
||||
this.addClass(a);
|
||||
|
||||
el.inject(el.getParent(), 'top');
|
||||
el.getSiblings().hide();
|
||||
setTimeout(function(){
|
||||
el.getSiblings().setStyle('display', null);
|
||||
}, 100)
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.navigation_menu = new Block.Menu(self, {
|
||||
'class': 'extra'
|
||||
})
|
||||
)
|
||||
).inject(self.el, 'top');
|
||||
|
||||
// Mass edit
|
||||
self.mass_edit_select_class = new Form.Check(self.mass_edit_select);
|
||||
Quality.getActiveProfiles().each(function(profile){
|
||||
new Element('option', {
|
||||
'value': profile.get('_id'),
|
||||
'text': profile.get('label')
|
||||
}).inject(self.mass_edit_quality)
|
||||
});
|
||||
|
||||
self.filter_menu.addLink(
|
||||
self.navigation_search_input = new Element('input', {
|
||||
'title': 'Search through ' + self.options.identifier,
|
||||
'placeholder': 'Search through ' + self.options.identifier,
|
||||
'events': {
|
||||
'keyup': self.search.bind(self),
|
||||
'change': self.search.bind(self)
|
||||
}
|
||||
})
|
||||
).addClass('search');
|
||||
|
||||
var available_chars;
|
||||
self.filter_menu.addEvent('open', function(){
|
||||
self.navigation_search_input.focus();
|
||||
|
||||
// Get available chars and highlight
|
||||
if(!available_chars && (self.navigation.isDisplayed() || self.navigation.isVisible()))
|
||||
Api.request('media.available_chars', {
|
||||
'data': Object.merge({
|
||||
'type': 'show',
|
||||
'status': self.options.status
|
||||
}, self.filter),
|
||||
'onSuccess': function(json){
|
||||
available_chars = json.chars;
|
||||
|
||||
available_chars.each(function(c){
|
||||
self.letters[c.capitalize()].addClass('available')
|
||||
})
|
||||
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
self.filter_menu.addLink(
|
||||
self.navigation_alpha = new Element('ul.numbers', {
|
||||
'events': {
|
||||
'click:relay(li.available)': function(e, el){
|
||||
self.activateLetter(el.get('data-letter'));
|
||||
self.getMovies(true)
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Actions
|
||||
['mass_edit', 'details', 'list'].each(function(view){
|
||||
var current = self.current_view == view;
|
||||
new Element('li', {
|
||||
'class': 'icon2 ' + view + (current ? ' active ' : ''),
|
||||
'data-view': view
|
||||
}).inject(self.navigation_actions, current ? 'top' : 'bottom');
|
||||
});
|
||||
|
||||
// All
|
||||
self.letters['all'] = new Element('li.letter_all.available.active', {
|
||||
'text': 'ALL'
|
||||
}).inject(self.navigation_alpha);
|
||||
|
||||
// Chars
|
||||
chars.split('').each(function(c){
|
||||
self.letters[c] = new Element('li', {
|
||||
'text': c,
|
||||
'class': 'letter_'+c,
|
||||
'data-letter': c
|
||||
}).inject(self.navigation_alpha);
|
||||
});
|
||||
|
||||
// Add menu or hide
|
||||
if (self.options.menu.length > 0)
|
||||
self.options.menu.each(function(menu_item){
|
||||
self.navigation_menu.addLink(menu_item);
|
||||
});
|
||||
else
|
||||
self.navigation_menu.hide();
|
||||
|
||||
},
|
||||
|
||||
calculateSelected: function(){
|
||||
var self = this;
|
||||
|
||||
var selected = 0,
|
||||
movies = self.movies.length;
|
||||
self.movies.each(function(movie){
|
||||
selected += movie.isSelected() ? 1 : 0
|
||||
});
|
||||
|
||||
var indeterminate = selected > 0 && selected < movies,
|
||||
checked = selected == movies && selected > 0;
|
||||
|
||||
self.mass_edit_select.set('indeterminate', indeterminate);
|
||||
|
||||
self.mass_edit_select_class[checked ? 'check' : 'uncheck']();
|
||||
self.mass_edit_select_class.element[indeterminate ? 'addClass' : 'removeClass']('indeterminate');
|
||||
|
||||
self.mass_edit_selected.set('text', selected);
|
||||
},
|
||||
|
||||
deleteSelected: function(){
|
||||
var self = this,
|
||||
ids = self.getSelectedMovies(),
|
||||
help_msg = self.identifier == 'wanted' ? 'If you do, you won\'t be able to watch them, as they won\'t get downloaded!' : 'Your files will be safe, this will only delete the reference from the CouchPotato manage list';
|
||||
|
||||
var qObj = new Question('Are you sure you want to delete '+ids.length+' movie'+ (ids.length != 1 ? 's' : '') +'?', help_msg, [{
|
||||
'text': 'Yes, delete '+(ids.length != 1 ? 'them' : 'it'),
|
||||
'class': 'delete',
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
this.set('text', 'Deleting..');
|
||||
Api.request('media.delete', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(','),
|
||||
'delete_from': self.options.identifier
|
||||
},
|
||||
'onSuccess': function(){
|
||||
qObj.close();
|
||||
|
||||
var erase_movies = [];
|
||||
self.movies.each(function(movie){
|
||||
if (movie.isSelected()){
|
||||
$(movie).destroy();
|
||||
erase_movies.include(movie);
|
||||
}
|
||||
});
|
||||
|
||||
erase_movies.each(function(movie){
|
||||
self.movies.erase(movie);
|
||||
movie.destroy();
|
||||
self.setCounter(self.counter_count-1);
|
||||
self.total_movies--;
|
||||
});
|
||||
|
||||
self.calculateSelected();
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}, {
|
||||
'text': 'Cancel',
|
||||
'cancel': true
|
||||
}]);
|
||||
|
||||
},
|
||||
|
||||
changeQualitySelected: function(){
|
||||
var self = this;
|
||||
var ids = self.getSelectedMovies();
|
||||
|
||||
Api.request('movie.edit', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(','),
|
||||
'profile_id': self.mass_edit_quality.get('value')
|
||||
},
|
||||
'onSuccess': self.search.bind(self)
|
||||
});
|
||||
},
|
||||
|
||||
refreshSelected: function(){
|
||||
var self = this;
|
||||
var ids = self.getSelectedMovies();
|
||||
|
||||
Api.request('media.refresh', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(',')
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
getSelectedMovies: function(){
|
||||
var self = this;
|
||||
|
||||
var ids = [];
|
||||
self.movies.each(function(movie){
|
||||
if (movie.isSelected())
|
||||
ids.include(movie.get('_id'))
|
||||
});
|
||||
|
||||
return ids
|
||||
},
|
||||
|
||||
massEditToggleAll: function(){
|
||||
var self = this;
|
||||
|
||||
var select = self.mass_edit_select.get('checked');
|
||||
|
||||
self.movies.each(function(movie){
|
||||
movie.select(select)
|
||||
});
|
||||
|
||||
self.calculateSelected()
|
||||
},
|
||||
|
||||
reset: function(){
|
||||
var self = this;
|
||||
|
||||
self.movies = [];
|
||||
if(self.mass_edit_select)
|
||||
self.calculateSelected();
|
||||
if(self.navigation_alpha)
|
||||
self.navigation_alpha.getElements('.active').removeClass('active');
|
||||
|
||||
self.offset = 0;
|
||||
if(self.scrollspy){
|
||||
//self.load_more.show();
|
||||
self.scrollspy.start();
|
||||
}
|
||||
},
|
||||
|
||||
activateLetter: function(letter){
|
||||
var self = this;
|
||||
|
||||
self.reset();
|
||||
|
||||
self.letters[letter || 'all'].addClass('active');
|
||||
self.filter.starts_with = letter;
|
||||
|
||||
},
|
||||
|
||||
changeView: function(new_view){
|
||||
var self = this;
|
||||
|
||||
self.el
|
||||
.removeClass(self.current_view+'_list')
|
||||
.addClass(new_view+'_list');
|
||||
|
||||
self.current_view = new_view;
|
||||
Cookie.write(self.options.identifier+'_view2', new_view, {duration: 1000});
|
||||
},
|
||||
|
||||
getSavedView: function(){
|
||||
var self = this;
|
||||
return Cookie.read(self.options.identifier+'_view2');
|
||||
},
|
||||
|
||||
search: function(){
|
||||
var self = this;
|
||||
|
||||
if(self.search_timer) clearTimeout(self.search_timer);
|
||||
self.search_timer = (function(){
|
||||
var search_value = self.navigation_search_input.get('value');
|
||||
if (search_value == self.last_search_value) return;
|
||||
|
||||
self.reset();
|
||||
|
||||
self.activateLetter();
|
||||
self.filter.search = search_value;
|
||||
|
||||
self.getMovies(true);
|
||||
|
||||
self.last_search_value = search_value;
|
||||
|
||||
}).delay(250);
|
||||
|
||||
},
|
||||
|
||||
update: function(){
|
||||
var self = this;
|
||||
|
||||
self.reset();
|
||||
self.getMovies(true);
|
||||
},
|
||||
|
||||
getMovies: function(reset){
|
||||
var self = this;
|
||||
|
||||
if(self.scrollspy){
|
||||
self.scrollspy.stop();
|
||||
self.load_more.set('text', 'loading...');
|
||||
}
|
||||
|
||||
if(self.movies.length == 0 && self.options.loader){
|
||||
|
||||
self.loader_first = new Element('div.loading').adopt(
|
||||
new Element('div.message', {'text': self.options.title ? 'Loading \'' + self.options.title + '\'' : 'Loading...'})
|
||||
).inject(self.el, 'top');
|
||||
|
||||
createSpinner(self.loader_first, {
|
||||
radius: 4,
|
||||
length: 4,
|
||||
width: 1
|
||||
});
|
||||
|
||||
self.el.setStyle('min-height', 93);
|
||||
|
||||
}
|
||||
|
||||
Api.request(self.options.api_call || 'media.list', {
|
||||
'data': Object.merge({
|
||||
'type': self.options.type || 'movie',
|
||||
'status': self.options.status,
|
||||
'limit_offset': self.options.limit ? self.options.limit + ',' + self.offset : null
|
||||
}, self.filter),
|
||||
'onSuccess': function(json){
|
||||
|
||||
if(reset)
|
||||
self.movie_list.empty();
|
||||
|
||||
if(self.loader_first){
|
||||
var lf = self.loader_first;
|
||||
self.loader_first.addClass('hide');
|
||||
self.loader_first = null;
|
||||
setTimeout(function(){
|
||||
lf.destroy();
|
||||
}, 20000);
|
||||
self.el.setStyle('min-height', null);
|
||||
}
|
||||
|
||||
self.store(json.shows);
|
||||
self.addMovies(json.shows, json.total || json.shows.length);
|
||||
if(self.scrollspy) {
|
||||
self.load_more.set('text', 'load more movies');
|
||||
self.scrollspy.start();
|
||||
}
|
||||
|
||||
self.checkIfEmpty();
|
||||
self.fireEvent('loaded');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
loadMore: function(){
|
||||
var self = this;
|
||||
if(self.offset >= self.options.limit)
|
||||
self.getMovies()
|
||||
},
|
||||
|
||||
store: function(movies){
|
||||
var self = this;
|
||||
|
||||
self.offset += movies.length;
|
||||
|
||||
},
|
||||
|
||||
checkIfEmpty: function(){
|
||||
var self = this;
|
||||
|
||||
var is_empty = self.movies.length == 0 && (self.total_movies == 0 || self.total_movies === undefined);
|
||||
|
||||
if(self.title)
|
||||
self.title[is_empty ? 'hide' : 'show']();
|
||||
|
||||
if(self.description)
|
||||
self.description.setStyle('display', [is_empty ? 'none' : '']);
|
||||
|
||||
if(is_empty && self.options.on_empty_element){
|
||||
self.options.on_empty_element.inject(self.loader_first || self.title || self.movie_list, 'after');
|
||||
|
||||
if(self.navigation)
|
||||
self.navigation.hide();
|
||||
|
||||
self.empty_element = self.options.on_empty_element;
|
||||
}
|
||||
else if(self.empty_element){
|
||||
self.empty_element.destroy();
|
||||
|
||||
if(self.navigation)
|
||||
self.navigation.show();
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el;
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,230 +0,0 @@
|
||||
Block.Search.ShowItem = new Class({
|
||||
|
||||
Implements: [Options, Events],
|
||||
|
||||
initialize: function(info, options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.info = info;
|
||||
self.alternative_titles = [];
|
||||
|
||||
self.create();
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this,
|
||||
info = self.info;
|
||||
|
||||
self.el = new Element('div.media_result', {
|
||||
'id': info.id
|
||||
}).adopt(
|
||||
self.thumbnail = info.images && info.images.poster.length > 0 ? new Element('img.thumbnail', {
|
||||
'src': info.images.poster[0],
|
||||
'height': null,
|
||||
'width': null
|
||||
}) : null,
|
||||
self.options_el = new Element('div.options.inlay'),
|
||||
self.data_container = new Element('div.data', {
|
||||
'events': {
|
||||
'click': self.showOptions.bind(self)
|
||||
}
|
||||
}).adopt(
|
||||
self.info_container = new Element('div.info').adopt(
|
||||
new Element('h2').adopt(
|
||||
self.title = new Element('span.title', {
|
||||
'text': info.titles && info.titles.length > 0 ? info.titles[0] : 'Unknown'
|
||||
}),
|
||||
self.year = info.year ? new Element('span.year', {
|
||||
'text': info.year
|
||||
}) : null
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if(info.titles)
|
||||
info.titles.each(function(title){
|
||||
self.alternativeTitle({
|
||||
'title': title
|
||||
});
|
||||
})
|
||||
},
|
||||
|
||||
alternativeTitle: function(alternative){
|
||||
var self = this;
|
||||
|
||||
self.alternative_titles.include(alternative);
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
try {
|
||||
return self.info.original_title ? self.info.original_title : self.info.titles[0];
|
||||
}
|
||||
catch(e){
|
||||
return 'Unknown';
|
||||
}
|
||||
},
|
||||
|
||||
get: function(key){
|
||||
return this.info[key]
|
||||
},
|
||||
|
||||
showOptions: function(){
|
||||
var self = this;
|
||||
|
||||
self.createOptions();
|
||||
|
||||
self.data_container.addClass('open');
|
||||
self.el.addEvent('outerClick', self.closeOptions.bind(self))
|
||||
|
||||
},
|
||||
|
||||
closeOptions: function(){
|
||||
var self = this;
|
||||
|
||||
self.data_container.removeClass('open');
|
||||
self.el.removeEvents('outerClick')
|
||||
},
|
||||
|
||||
add: function(e){
|
||||
var self = this;
|
||||
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.loadingMask();
|
||||
|
||||
Api.request('show.add', {
|
||||
'data': {
|
||||
'identifiers': self.info.identifiers,
|
||||
'type': self.info.type,
|
||||
'title': self.title_select.get('value'),
|
||||
'profile_id': self.profile_select.get('value'),
|
||||
'category_id': self.category_select.get('value')
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.options_el.empty();
|
||||
self.options_el.adopt(
|
||||
new Element('div.message', {
|
||||
'text': json.success ? 'Show successfully added.' : 'Show didn\'t add properly. Check logs'
|
||||
})
|
||||
);
|
||||
self.mask.fade('out');
|
||||
|
||||
self.fireEvent('added');
|
||||
},
|
||||
'onFailure': function(){
|
||||
self.options_el.empty();
|
||||
self.options_el.adopt(
|
||||
new Element('div.message', {
|
||||
'text': 'Something went wrong, check the logs for more info.'
|
||||
})
|
||||
);
|
||||
self.mask.fade('out');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
createOptions: function(){
|
||||
var self = this,
|
||||
info = self.info;
|
||||
|
||||
if(!self.options_el.hasClass('set')){
|
||||
|
||||
if(self.info.in_library){
|
||||
var in_library = [];
|
||||
self.info.in_library.releases.each(function(release){
|
||||
in_library.include(release.quality.label)
|
||||
});
|
||||
}
|
||||
|
||||
self.options_el.grab(
|
||||
new Element('div', {
|
||||
'class': self.info.in_wanted && self.info.in_wanted.profile_id || in_library ? 'in_library_wanted' : ''
|
||||
}).adopt(
|
||||
self.info.in_wanted && self.info.in_wanted.profile_id ? new Element('span.in_wanted', {
|
||||
'text': 'Already in wanted list: ' + Quality.getProfile(self.info.in_wanted.profile_id).get('label')
|
||||
}) : (in_library ? new Element('span.in_library', {
|
||||
'text': 'Already in library: ' + in_library.join(', ')
|
||||
}) : null),
|
||||
self.title_select = new Element('select', {
|
||||
'name': 'title'
|
||||
}),
|
||||
self.profile_select = new Element('select', {
|
||||
'name': 'profile'
|
||||
}),
|
||||
self.category_select = new Element('select', {
|
||||
'name': 'category'
|
||||
}).grab(
|
||||
new Element('option', {'value': -1, 'text': 'None'})
|
||||
),
|
||||
self.add_button = new Element('a.button', {
|
||||
'text': 'Add',
|
||||
'events': {
|
||||
'click': self.add.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
Array.each(self.alternative_titles, function(alt){
|
||||
new Element('option', {
|
||||
'text': alt.title
|
||||
}).inject(self.title_select)
|
||||
})
|
||||
|
||||
|
||||
// Fill categories
|
||||
var categories = CategoryList.getAll();
|
||||
|
||||
if(categories.length == 0)
|
||||
self.category_select.hide();
|
||||
else {
|
||||
self.category_select.show();
|
||||
categories.each(function(category){
|
||||
new Element('option', {
|
||||
'value': category.data._id,
|
||||
'text': category.data.label
|
||||
}).inject(self.category_select);
|
||||
});
|
||||
}
|
||||
|
||||
// Fill profiles
|
||||
var profiles = Quality.getActiveProfiles();
|
||||
if(profiles.length == 1)
|
||||
self.profile_select.hide();
|
||||
|
||||
profiles.each(function(profile){
|
||||
new Element('option', {
|
||||
'value': profile.get('_id'),
|
||||
'text': profile.get('label')
|
||||
}).inject(self.profile_select)
|
||||
});
|
||||
|
||||
self.options_el.addClass('set');
|
||||
|
||||
if(categories.length == 0 && self.title_select.getElements('option').length == 1 && profiles.length == 1 &&
|
||||
!(self.info.in_wanted && self.info.in_wanted.profile_id || in_library))
|
||||
self.add();
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
loadingMask: function(){
|
||||
var self = this;
|
||||
|
||||
self.mask = new Element('div.mask').inject(self.el).fade('hide')
|
||||
|
||||
createSpinner(self.mask)
|
||||
self.mask.fade('in')
|
||||
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,127 +0,0 @@
|
||||
var Season = new Class({
|
||||
|
||||
Extends: BlockBase,
|
||||
|
||||
action: {},
|
||||
|
||||
initialize: function(show, options, data){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
|
||||
self.show = show;
|
||||
self.options = options;
|
||||
self.data = data;
|
||||
|
||||
self.profile = self.show.profile;
|
||||
|
||||
self.el = new Element('div.item.season').adopt(
|
||||
self.detail = new Element('div.item.data')
|
||||
);
|
||||
|
||||
self.create();
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.detail.set('id', 'season_'+self.data._id);
|
||||
|
||||
self.detail.adopt(
|
||||
new Element('span.name', {'text': self.getTitle()}),
|
||||
|
||||
self.quality = new Element('span.quality', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
var releases = self.detail.getElement('.item-actions .releases');
|
||||
|
||||
if(releases.isVisible())
|
||||
releases.fireEvent('click', [e])
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.actions = new Element('div.item-actions')
|
||||
);
|
||||
|
||||
// Add profile
|
||||
if(self.profile.data) {
|
||||
self.profile.getTypes().each(function(type){
|
||||
var q = self.addQuality(type.get('quality'), type.get('3d'));
|
||||
|
||||
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
|
||||
q.addClass('finish');
|
||||
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Add releases
|
||||
self.updateReleases();
|
||||
|
||||
Object.each(self.options.actions, function(action, key){
|
||||
self.action[key.toLowerCase()] = action = new self.options.actions[key](self);
|
||||
if(action.el)
|
||||
self.actions.adopt(action)
|
||||
});
|
||||
},
|
||||
|
||||
updateReleases: function(){
|
||||
var self = this;
|
||||
if(!self.data.releases || self.data.releases.length == 0) return;
|
||||
|
||||
self.data.releases.each(function(release){
|
||||
|
||||
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')),
|
||||
status = release.status;
|
||||
|
||||
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done'))
|
||||
q = self.addQuality(release.quality, release.is_3d || false);
|
||||
|
||||
if (q && !q.hasClass(status)){
|
||||
q.addClass(status);
|
||||
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status)
|
||||
}
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
addQuality: function(quality, is_3d){
|
||||
var self = this,
|
||||
q = Quality.getQuality(quality);
|
||||
|
||||
return new Element('span', {
|
||||
'text': q.label + (is_3d ? ' 3D' : ''),
|
||||
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''),
|
||||
'title': ''
|
||||
}).inject(self.quality);
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
var title = '';
|
||||
|
||||
if(self.data.info.number) {
|
||||
title = 'Season ' + self.data.info.number;
|
||||
} else {
|
||||
// Season 0 / Specials
|
||||
title = 'Specials';
|
||||
}
|
||||
|
||||
return title;
|
||||
},
|
||||
|
||||
getIdentifier: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.get('identifiers').imdb;
|
||||
}
|
||||
catch (e){ }
|
||||
|
||||
return self.get('imdb');
|
||||
},
|
||||
|
||||
get: function(attr){
|
||||
return this.data[attr] || this.data.info[attr]
|
||||
}
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,92 +0,0 @@
|
||||
var Episodes = new Class({
|
||||
initialize: function(show, options) {
|
||||
var self = this;
|
||||
|
||||
self.show = show;
|
||||
self.options = options;
|
||||
},
|
||||
|
||||
open: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.container){
|
||||
self.container = new Element('div.options').grab(
|
||||
self.episodes_container = new Element('div.episodes.table')
|
||||
);
|
||||
|
||||
self.container.inject(self.show, 'top');
|
||||
|
||||
Api.request('library.tree', {
|
||||
'data': {
|
||||
'media_id': self.show.data._id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.data = json.result;
|
||||
|
||||
self.createEpisodes();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
self.show.slide('in', self.container, true);
|
||||
},
|
||||
|
||||
createEpisodes: function() {
|
||||
var self = this;
|
||||
|
||||
self.data.seasons.sort(self.sortSeasons);
|
||||
self.data.seasons.each(function(season) {
|
||||
self.createSeason(season);
|
||||
|
||||
season.episodes.sort(self.sortEpisodes);
|
||||
season.episodes.each(function(episode) {
|
||||
self.createEpisode(episode);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
createSeason: function(season) {
|
||||
var self = this,
|
||||
s = new Season(self.show, self.options, season);
|
||||
|
||||
$(s).inject(self.episodes_container);
|
||||
},
|
||||
|
||||
createEpisode: function(episode){
|
||||
var self = this,
|
||||
e = new Episode(self.show, self.options, episode);
|
||||
|
||||
$(e).inject(self.episodes_container);
|
||||
},
|
||||
|
||||
sortSeasons: function(a, b) {
|
||||
// Move "Specials" to the bottom of the list
|
||||
if(!a.info.number) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if(!b.info.number) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Order seasons descending
|
||||
if(a.info.number < b.info.number)
|
||||
return -1;
|
||||
|
||||
if(a.info.number > b.info.number)
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
},
|
||||
|
||||
sortEpisodes: function(a, b) {
|
||||
// Order episodes descending
|
||||
if(a.info.number < b.info.number)
|
||||
return -1;
|
||||
|
||||
if(a.info.number > b.info.number)
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
@@ -1,370 +0,0 @@
|
||||
var Show = new Class({
|
||||
|
||||
Extends: BlockBase,
|
||||
|
||||
action: {},
|
||||
|
||||
initialize: function(list, options, data){
|
||||
var self = this;
|
||||
|
||||
self.data = data;
|
||||
self.view = options.view || 'details';
|
||||
self.list = list;
|
||||
|
||||
self.el = new Element('div.show');
|
||||
|
||||
self.episodes = new Episodes(self, {
|
||||
'actions': [EA.IMDB, EA.Release, EA.Refresh]
|
||||
});
|
||||
|
||||
self.profile = Quality.getProfile(data.profile_id) || {};
|
||||
self.category = CategoryList.getCategory(data.category_id) || {};
|
||||
self.parent(self, options);
|
||||
|
||||
self.addEvents();
|
||||
},
|
||||
|
||||
addEvents: function(){
|
||||
var self = this;
|
||||
|
||||
self.global_events = {};
|
||||
|
||||
// Do refresh with new data
|
||||
self.global_events['movie.update'] = function(notification){
|
||||
if(self.data._id != notification.data._id) return;
|
||||
|
||||
self.busy(false);
|
||||
self.removeView();
|
||||
self.update.delay(2000, self, notification);
|
||||
};
|
||||
App.on('movie.update', self.global_events['movie.update']);
|
||||
|
||||
// Add spinner on load / search
|
||||
['media.busy', 'movie.searcher.started'].each(function(listener){
|
||||
self.global_events[listener] = function(notification){
|
||||
if(notification.data && (self.data._id == notification.data._id || (typeOf(notification.data._id) == 'array' && notification.data._id.indexOf(self.data._id) > -1)))
|
||||
self.busy(true);
|
||||
};
|
||||
App.on(listener, self.global_events[listener]);
|
||||
});
|
||||
|
||||
// Remove spinner
|
||||
self.global_events['movie.searcher.ended'] = function(notification){
|
||||
if(notification.data && self.data._id == notification.data._id)
|
||||
self.busy(false)
|
||||
};
|
||||
App.on('movie.searcher.ended', self.global_events['movie.searcher.ended']);
|
||||
|
||||
// Reload when releases have updated
|
||||
self.global_events['release.update_status'] = function(notification){
|
||||
var data = notification.data;
|
||||
if(data && self.data._id == data.movie_id){
|
||||
|
||||
if(!self.data.releases)
|
||||
self.data.releases = [];
|
||||
|
||||
self.data.releases.push({'quality': data.quality, 'status': data.status});
|
||||
self.updateReleases();
|
||||
}
|
||||
};
|
||||
|
||||
App.on('release.update_status', self.global_events['release.update_status']);
|
||||
|
||||
},
|
||||
|
||||
destroy: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.destroy();
|
||||
delete self.list.movies_added[self.get('id')];
|
||||
self.list.movies.erase(self);
|
||||
|
||||
self.list.checkIfEmpty();
|
||||
|
||||
// Remove events
|
||||
Object.each(self.global_events, function(handle, listener){
|
||||
App.off(listener, handle);
|
||||
});
|
||||
},
|
||||
|
||||
busy: function(set_busy, timeout){
|
||||
var self = this;
|
||||
|
||||
if(!set_busy){
|
||||
setTimeout(function(){
|
||||
if(self.spinner){
|
||||
self.mask.fade('out');
|
||||
setTimeout(function(){
|
||||
if(self.mask)
|
||||
self.mask.destroy();
|
||||
if(self.spinner)
|
||||
self.spinner.el.destroy();
|
||||
self.spinner = null;
|
||||
self.mask = null;
|
||||
}, timeout || 400);
|
||||
}
|
||||
}, timeout || 1000)
|
||||
}
|
||||
else if(!self.spinner) {
|
||||
self.createMask();
|
||||
self.spinner = createSpinner(self.mask);
|
||||
self.mask.fade('in');
|
||||
}
|
||||
},
|
||||
|
||||
createMask: function(){
|
||||
var self = this;
|
||||
self.mask = new Element('div.mask', {
|
||||
'styles': {
|
||||
'z-index': 4
|
||||
}
|
||||
}).inject(self.el, 'top').fade('hide');
|
||||
},
|
||||
|
||||
update: function(notification){
|
||||
var self = this;
|
||||
|
||||
self.data = notification.data;
|
||||
self.el.empty();
|
||||
self.removeView();
|
||||
|
||||
self.profile = Quality.getProfile(self.data.profile_id) || {};
|
||||
self.category = CategoryList.getCategory(self.data.category_id) || {};
|
||||
self.create();
|
||||
|
||||
self.busy(false);
|
||||
},
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.addClass('status_'+self.get('status'));
|
||||
|
||||
var eta = null,
|
||||
eta_date = null,
|
||||
now = Math.round(+new Date()/1000);
|
||||
|
||||
if(self.data.info.release_date)
|
||||
[self.data.info.release_date.dvd, self.data.info.release_date.theater].each(function(timestamp){
|
||||
if (timestamp > 0 && (eta == null || Math.abs(timestamp - now) < Math.abs(eta - now)))
|
||||
eta = timestamp;
|
||||
});
|
||||
|
||||
if(eta){
|
||||
eta_date = new Date(eta * 1000);
|
||||
eta_date = eta_date.toLocaleString('en-us', { month: "long" }) + ' ' + eta_date.getFullYear();
|
||||
}
|
||||
|
||||
self.el.adopt(
|
||||
self.select_checkbox = new Element('input[type=checkbox].inlay', {
|
||||
'events': {
|
||||
'change': function(){
|
||||
self.fireEvent('select')
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.thumbnail = (self.data.files && self.data.files.image_poster) ? new Element('img', {
|
||||
'class': 'type_image poster',
|
||||
'src': Api.createUrl('file.cache') + self.data.files.image_poster[0].split(Api.getOption('path_sep')).pop()
|
||||
}): null,
|
||||
self.data_container = new Element('div.data.inlay.light').adopt(
|
||||
self.info_container = new Element('div.info').adopt(
|
||||
new Element('div.title').adopt(
|
||||
self.title = new Element('a', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
self.episodes.open();
|
||||
}
|
||||
},
|
||||
'text': self.getTitle() || 'n/a'
|
||||
}),
|
||||
self.year = new Element('div.year', {
|
||||
'text': self.data.info.year || 'n/a'
|
||||
})
|
||||
),
|
||||
self.description = new Element('div.description.tiny_scroll', {
|
||||
'text': self.data.info.plot
|
||||
}),
|
||||
self.eta = eta_date && (now+8035200 > eta) ? new Element('div.eta', {
|
||||
'text': eta_date,
|
||||
'title': 'ETA'
|
||||
}) : null,
|
||||
self.quality = new Element('div.quality', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
var releases = self.el.getElement('.actions .releases');
|
||||
if(releases.isVisible())
|
||||
releases.fireEvent('click', [e])
|
||||
}
|
||||
}
|
||||
})
|
||||
),
|
||||
self.actions = new Element('div.actions')
|
||||
)
|
||||
);
|
||||
|
||||
if(!self.thumbnail)
|
||||
self.el.addClass('no_thumbnail');
|
||||
|
||||
//self.changeView(self.view);
|
||||
self.select_checkbox_class = new Form.Check(self.select_checkbox);
|
||||
|
||||
// Add profile
|
||||
if(self.profile.data)
|
||||
self.profile.getTypes().each(function(type){
|
||||
|
||||
var q = self.addQuality(type.get('quality'), type.get('3d'));
|
||||
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){
|
||||
q.addClass('finish');
|
||||
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.')
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// Add releases
|
||||
self.updateReleases();
|
||||
|
||||
Object.each(self.options.actions, function(action, key){
|
||||
self.action[key.toLowerCase()] = action = new self.options.actions[key](self);
|
||||
if(action.el)
|
||||
self.actions.adopt(action)
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
updateReleases: function(){
|
||||
var self = this;
|
||||
if(!self.data.releases || self.data.releases.length == 0) return;
|
||||
|
||||
self.data.releases.each(function(release){
|
||||
|
||||
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')),
|
||||
status = release.status;
|
||||
|
||||
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done'))
|
||||
q = self.addQuality(release.quality, release.is_3d || false);
|
||||
|
||||
if (q && !q.hasClass(status)){
|
||||
q.addClass(status);
|
||||
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status)
|
||||
}
|
||||
|
||||
});
|
||||
},
|
||||
|
||||
addQuality: function(quality, is_3d){
|
||||
var self = this;
|
||||
|
||||
var q = Quality.getQuality(quality);
|
||||
return new Element('span', {
|
||||
'text': q.label + (is_3d ? ' 3D' : ''),
|
||||
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''),
|
||||
'title': ''
|
||||
}).inject(self.quality);
|
||||
|
||||
},
|
||||
|
||||
getTitle: function(){
|
||||
var self = this;
|
||||
|
||||
if(self.data.title)
|
||||
return self.getUnprefixedTitle(self.data.title);
|
||||
else if(self.data.info.titles.length > 0)
|
||||
return self.getUnprefixedTitle(self.data.info.titles[0]);
|
||||
|
||||
return 'Unknown movie'
|
||||
},
|
||||
|
||||
getUnprefixedTitle: function(t){
|
||||
if(t.substr(0, 4).toLowerCase() == 'the ')
|
||||
t = t.substr(4) + ', The';
|
||||
else if(t.substr(0, 3).toLowerCase() == 'an ')
|
||||
t = t.substr(3) + ', An';
|
||||
else if(t.substr(0, 2).toLowerCase() == 'a ')
|
||||
t = t.substr(2) + ', A';
|
||||
return t;
|
||||
},
|
||||
|
||||
slide: function(direction, el, expand){
|
||||
var self = this;
|
||||
|
||||
if(direction == 'in'){
|
||||
self.temp_view = self.view;
|
||||
self.changeView('details');
|
||||
|
||||
self.el.addEvent('outerClick', function(){
|
||||
self.removeView();
|
||||
self.slide('out')
|
||||
});
|
||||
el.show();
|
||||
|
||||
|
||||
if(expand === true) {
|
||||
self.el.addClass('expanded');
|
||||
self.el.getElements('.table').addClass('expanded');
|
||||
}
|
||||
|
||||
self.data_container.addClass('hide_right');
|
||||
}
|
||||
else {
|
||||
self.el.removeEvents('outerClick');
|
||||
|
||||
setTimeout(function(){
|
||||
if(self.el)
|
||||
{
|
||||
self.el.getElements('> :not(.data):not(.poster):not(.movie_container)').hide();
|
||||
self.el.getElements('.table').removeClass('expanded');
|
||||
}
|
||||
}, 600);
|
||||
|
||||
self.el.removeClass('expanded');
|
||||
self.data_container.removeClass('hide_right');
|
||||
}
|
||||
},
|
||||
|
||||
changeView: function(new_view){
|
||||
var self = this;
|
||||
|
||||
if(self.el)
|
||||
self.el
|
||||
.removeClass(self.view+'_view')
|
||||
.addClass(new_view+'_view');
|
||||
|
||||
self.view = new_view;
|
||||
},
|
||||
|
||||
removeView: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.removeClass(self.view+'_view')
|
||||
},
|
||||
|
||||
getIdentifier: function(){
|
||||
var self = this;
|
||||
|
||||
try {
|
||||
return self.get('identifiers').imdb;
|
||||
}
|
||||
catch (e){ }
|
||||
|
||||
return self.get('imdb');
|
||||
},
|
||||
|
||||
get: function(attr){
|
||||
return this.data[attr] || this.data.info[attr]
|
||||
},
|
||||
|
||||
select: function(bool){
|
||||
var self = this;
|
||||
self.select_checkbox_class[bool ? 'check' : 'uncheck']()
|
||||
},
|
||||
|
||||
isSelected: function(){
|
||||
return this.select_checkbox.get('checked');
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el;
|
||||
}
|
||||
|
||||
});
|
||||
@@ -1,71 +0,0 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'EpisodeLibraryPlugin'
|
||||
|
||||
|
||||
class EpisodeLibraryPlugin(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.query', self.query)
|
||||
addEvent('library.identifier', self.identifier)
|
||||
|
||||
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs):
|
||||
if media.get('type') != 'show.episode':
|
||||
return
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# Get season titles
|
||||
titles = fireEvent(
|
||||
'library.query', related['season'],
|
||||
|
||||
first = False,
|
||||
include_identifier = include_identifier,
|
||||
condense = condense,
|
||||
|
||||
single = True
|
||||
)
|
||||
|
||||
# Add episode identifier to titles
|
||||
if include_identifier:
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
if identifier and identifier.get('episode'):
|
||||
titles = [title + ('E%02d' % identifier['episode']) for title in titles]
|
||||
|
||||
if first:
|
||||
return titles[0] if titles else None
|
||||
|
||||
return titles
|
||||
|
||||
def identifier(self, media):
|
||||
if media.get('type') != 'show.episode':
|
||||
return
|
||||
|
||||
identifier = {
|
||||
'season': None,
|
||||
'episode': None
|
||||
}
|
||||
|
||||
# TODO identifier mapping
|
||||
# scene_map = media['info'].get('map_episode', {}).get('scene')
|
||||
|
||||
# if scene_map:
|
||||
# # Use scene mappings if they are available
|
||||
# identifier['season'] = scene_map.get('season_nr')
|
||||
# identifier['episode'] = scene_map.get('episode_nr')
|
||||
# else:
|
||||
# Fallback to normal season/episode numbers
|
||||
identifier['season'] = media['info'].get('season_number')
|
||||
identifier['episode'] = media['info'].get('number')
|
||||
|
||||
# Cast identifiers to integers
|
||||
# TODO this will need changing to support identifiers with trailing 'a', 'b' characters
|
||||
identifier['season'] = tryInt(identifier['season'], None)
|
||||
identifier['episode'] = tryInt(identifier['episode'], None)
|
||||
|
||||
return identifier
|
||||
@@ -1,52 +0,0 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'SeasonLibraryPlugin'
|
||||
|
||||
|
||||
class SeasonLibraryPlugin(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.query', self.query)
|
||||
addEvent('library.identifier', self.identifier)
|
||||
|
||||
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs):
|
||||
if media.get('type') != 'show.season':
|
||||
return
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# Get show titles
|
||||
titles = fireEvent(
|
||||
'library.query', related['show'],
|
||||
|
||||
first = False,
|
||||
condense = condense,
|
||||
|
||||
single = True
|
||||
)
|
||||
|
||||
# TODO map_names
|
||||
|
||||
# Add season identifier to titles
|
||||
if include_identifier:
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
if identifier and identifier.get('season') is not None:
|
||||
titles = [title + (' S%02d' % identifier['season']) for title in titles]
|
||||
|
||||
if first:
|
||||
return titles[0] if titles else None
|
||||
|
||||
return titles
|
||||
|
||||
def identifier(self, media):
|
||||
if media.get('type') != 'show.season':
|
||||
return
|
||||
|
||||
return {
|
||||
'season': tryInt(media['info']['number'], None)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
from qcond import QueryCondenser
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ShowLibraryPlugin'
|
||||
|
||||
|
||||
class ShowLibraryPlugin(LibraryBase):
|
||||
query_condenser = QueryCondenser()
|
||||
|
||||
def __init__(self):
|
||||
addEvent('library.query', self.query)
|
||||
|
||||
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs):
|
||||
if media.get('type') != 'show':
|
||||
return
|
||||
|
||||
titles = media['info']['titles']
|
||||
|
||||
if condense:
|
||||
# Use QueryCondenser to build a list of optimal search titles
|
||||
condensed_titles = self.query_condenser.distinct(titles)
|
||||
|
||||
if condensed_titles:
|
||||
# Use condensed titles if we got a valid result
|
||||
titles = condensed_titles
|
||||
else:
|
||||
# Fallback to simplifying titles
|
||||
titles = [simplifyString(title) for title in titles]
|
||||
|
||||
if first:
|
||||
return titles[0] if titles else None
|
||||
|
||||
return titles
|
||||
@@ -1,7 +0,0 @@
|
||||
from .main import ShowMatcher
|
||||
|
||||
|
||||
def autoload():
|
||||
return ShowMatcher()
|
||||
|
||||
config = []
|
||||
@@ -1,72 +0,0 @@
|
||||
from couchpotato import fireEvent, CPLog, tryInt
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.media._base.matcher.base import MatcherBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(MatcherBase):
|
||||
|
||||
# TODO come back to this later, think this could be handled better, this is starting to get out of hand....
|
||||
quality_map = {
|
||||
'bluray_1080p': {'resolution': ['1080p'], 'source': ['bluray']},
|
||||
'bluray_720p': {'resolution': ['720p'], 'source': ['bluray']},
|
||||
|
||||
'bdrip_1080p': {'resolution': ['1080p'], 'source': ['BDRip']},
|
||||
'bdrip_720p': {'resolution': ['720p'], 'source': ['BDRip']},
|
||||
|
||||
'brrip_1080p': {'resolution': ['1080p'], 'source': ['BRRip']},
|
||||
'brrip_720p': {'resolution': ['720p'], 'source': ['BRRip']},
|
||||
|
||||
'webdl_1080p': {'resolution': ['1080p'], 'source': ['webdl', ['web', 'dl']]},
|
||||
'webdl_720p': {'resolution': ['720p'], 'source': ['webdl', ['web', 'dl']]},
|
||||
'webdl_480p': {'resolution': ['480p'], 'source': ['webdl', ['web', 'dl']]},
|
||||
|
||||
'hdtv_720p': {'resolution': ['720p'], 'source': ['hdtv']},
|
||||
'hdtv_sd': {'resolution': ['480p', None], 'source': ['hdtv']},
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
super(Base, self).__init__()
|
||||
|
||||
addEvent('%s.matcher.correct_identifier' % self.type, self.correctIdentifier)
|
||||
|
||||
def correct(self, chain, release, media, quality):
|
||||
log.info("Checking if '%s' is valid", release['name'])
|
||||
log.info2('Release parsed as: %s', chain.info)
|
||||
|
||||
if not fireEvent('matcher.correct_quality', chain, quality, self.quality_map, single = True):
|
||||
log.info('Wrong: %s, quality does not match', release['name'])
|
||||
return False
|
||||
|
||||
if not fireEvent('%s.matcher.correct_identifier' % self.type, chain, media):
|
||||
log.info('Wrong: %s, identifier does not match', release['name'])
|
||||
return False
|
||||
|
||||
if not fireEvent('matcher.correct_title', chain, media):
|
||||
log.info("Wrong: '%s', undetermined naming.", (' '.join(chain.info['show_name'])))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def correctIdentifier(self, chain, media):
|
||||
raise NotImplementedError()
|
||||
|
||||
def getChainIdentifier(self, chain):
|
||||
if 'identifier' not in chain.info:
|
||||
return None
|
||||
|
||||
identifier = self.flattenInfo(chain.info['identifier'])
|
||||
|
||||
# Try cast values to integers
|
||||
for key, value in identifier.items():
|
||||
if isinstance(value, list):
|
||||
if len(value) <= 1:
|
||||
value = value[0]
|
||||
else:
|
||||
log.warning('Wrong: identifier contains multiple season or episode values, unsupported')
|
||||
return None
|
||||
|
||||
identifier[key] = tryInt(value, value)
|
||||
|
||||
return identifier
|
||||
@@ -1,30 +0,0 @@
|
||||
from couchpotato import fireEvent, CPLog
|
||||
from couchpotato.core.media.show.matcher.base import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Episode(Base):
|
||||
type = 'show.episode'
|
||||
|
||||
def correctIdentifier(self, chain, media):
|
||||
identifier = self.getChainIdentifier(chain)
|
||||
if not identifier:
|
||||
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)')
|
||||
return False
|
||||
|
||||
# TODO - Parse episode ranges from identifier to determine if they are multi-part episodes
|
||||
if any([x in identifier for x in ['episode_from', 'episode_to']]):
|
||||
log.info2('Wrong: releases with identifier ranges are not supported yet')
|
||||
return False
|
||||
|
||||
required = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
# TODO - Support air by date episodes
|
||||
# TODO - Support episode parts
|
||||
|
||||
if identifier != required:
|
||||
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,9 +0,0 @@
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.matcher.episode import Episode
|
||||
from couchpotato.core.media.show.matcher.season import Season
|
||||
|
||||
|
||||
class ShowMatcher(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
@@ -1,27 +0,0 @@
|
||||
from couchpotato import fireEvent, CPLog
|
||||
from couchpotato.core.media.show.matcher.base import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Season(Base):
|
||||
type = 'show.season'
|
||||
|
||||
def correctIdentifier(self, chain, media):
|
||||
identifier = self.getChainIdentifier(chain)
|
||||
if not identifier:
|
||||
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)')
|
||||
return False
|
||||
|
||||
# TODO - Parse episode ranges from identifier to determine if they are season packs
|
||||
if any([x in identifier for x in ['episode_from', 'episode_to']]):
|
||||
log.info2('Wrong: releases with identifier ranges are not supported yet')
|
||||
return False
|
||||
|
||||
required = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
if identifier != required:
|
||||
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,13 +0,0 @@
|
||||
from couchpotato.core.media._base.providers.info.base import BaseInfoProvider
|
||||
|
||||
|
||||
class ShowProvider(BaseInfoProvider):
|
||||
type = 'show'
|
||||
|
||||
|
||||
class SeasonProvider(BaseInfoProvider):
|
||||
type = 'show.season'
|
||||
|
||||
|
||||
class EpisodeProvider(BaseInfoProvider):
|
||||
type = 'show.episode'
|
||||
@@ -1,372 +0,0 @@
|
||||
from datetime import datetime
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato import Env
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.show.providers.base import ShowProvider
|
||||
from tvdb_api import tvdb_exceptions
|
||||
from tvdb_api.tvdb_api import Tvdb, Show
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TheTVDb'
|
||||
|
||||
|
||||
class TheTVDb(ShowProvider):
|
||||
|
||||
# TODO: Consider grabbing zips to put less strain on tvdb
|
||||
# TODO: Unicode stuff (check)
|
||||
# TODO: Notigy frontend on error (tvdb down at monent)
|
||||
# TODO: Expose apikey in setting so it can be changed by user
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.info', self.getShowInfo, priority = 1)
|
||||
addEvent('season.info', self.getSeasonInfo, priority = 1)
|
||||
addEvent('episode.info', self.getEpisodeInfo, priority = 1)
|
||||
|
||||
self.tvdb_api_parms = {
|
||||
'apikey': self.conf('api_key'),
|
||||
'banners': True,
|
||||
'language': 'en',
|
||||
'cache': os.path.join(Env.get('cache_dir'), 'thetvdb_api'),
|
||||
}
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
self.tvdb = Tvdb(**self.tvdb_api_parms)
|
||||
self.valid_languages = self.tvdb.config['valid_languages']
|
||||
|
||||
def getShow(self, identifier = None):
|
||||
show = None
|
||||
try:
|
||||
log.debug('Getting show: %s', identifier)
|
||||
show = self.tvdb[int(identifier)]
|
||||
except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
log.error('Failed to getShowInfo for show id "%s": %s', (identifier, traceback.format_exc()))
|
||||
return None
|
||||
|
||||
return show
|
||||
|
||||
def getShowInfo(self, identifiers = None):
|
||||
"""
|
||||
|
||||
@param identifiers: dict with identifiers per provider
|
||||
@return: Full show info including season and episode info
|
||||
"""
|
||||
|
||||
if not identifiers or not identifiers.get('thetvdb'):
|
||||
return None
|
||||
|
||||
identifier = tryInt(identifiers.get('thetvdb'))
|
||||
|
||||
cache_key = 'thetvdb.cache.show.%s' % identifier
|
||||
result = None #self.getCache(cache_key)
|
||||
if result:
|
||||
return result
|
||||
|
||||
show = self.getShow(identifier = identifier)
|
||||
if show:
|
||||
result = self._parseShow(show)
|
||||
self.setCache(cache_key, result)
|
||||
|
||||
return result or {}
|
||||
|
||||
def getSeasonInfo(self, identifiers = None, params = {}):
|
||||
"""Either return a list of all seasons or a single season by number.
|
||||
identifier is the show 'id'
|
||||
"""
|
||||
if not identifiers or not identifiers.get('thetvdb'):
|
||||
return None
|
||||
|
||||
season_number = params.get('season_number', None)
|
||||
identifier = tryInt(identifiers.get('thetvdb'))
|
||||
|
||||
cache_key = 'thetvdb.cache.%s.%s' % (identifier, season_number)
|
||||
log.debug('Getting SeasonInfo: %s', cache_key)
|
||||
result = self.getCache(cache_key) or {}
|
||||
if result:
|
||||
return result
|
||||
|
||||
try:
|
||||
show = self.tvdb[int(identifier)]
|
||||
except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
log.error('Failed parsing TheTVDB SeasonInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
result = []
|
||||
for number, season in show.items():
|
||||
if season_number is not None and number == season_number:
|
||||
result = self._parseSeason(show, number, season)
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
else:
|
||||
result.append(self._parseSeason(show, number, season))
|
||||
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
|
||||
def getEpisodeInfo(self, identifier = None, params = {}):
|
||||
"""Either return a list of all episodes or a single episode.
|
||||
If episode_identifer contains an episode number to search for
|
||||
"""
|
||||
season_number = self.getIdentifier(params.get('season_number', None))
|
||||
episode_identifier = self.getIdentifier(params.get('episode_identifiers', None))
|
||||
identifier = self.getIdentifier(identifier)
|
||||
|
||||
if not identifier and season_number is None:
|
||||
return False
|
||||
|
||||
# season_identifier must contain the 'show id : season number' since there is no tvdb id
|
||||
# for season and we need a reference to both the show id and season number
|
||||
if not identifier and season_number:
|
||||
try:
|
||||
identifier, season_number = season_number.split(':')
|
||||
season_number = int(season_number)
|
||||
except: return None
|
||||
|
||||
cache_key = 'thetvdb.cache.%s.%s.%s' % (identifier, episode_identifier, season_number)
|
||||
log.debug('Getting EpisodeInfo: %s', cache_key)
|
||||
result = self.getCache(cache_key) or {}
|
||||
if result:
|
||||
return result
|
||||
|
||||
try:
|
||||
show = self.tvdb[int(identifier)]
|
||||
except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
log.error('Failed parsing TheTVDB EpisodeInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
result = []
|
||||
for number, season in show.items():
|
||||
if season_number is not None and number != season_number:
|
||||
continue
|
||||
|
||||
for episode in season.values():
|
||||
if episode_identifier is not None and episode['id'] == toUnicode(episode_identifier):
|
||||
result = self._parseEpisode(episode)
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
else:
|
||||
result.append(self._parseEpisode(episode))
|
||||
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
|
||||
def getIdentifier(self, value):
|
||||
if type(value) is dict:
|
||||
return value.get('thetvdb')
|
||||
|
||||
return value
|
||||
|
||||
def _parseShow(self, show):
|
||||
|
||||
#
|
||||
# NOTE: show object only allows direct access via
|
||||
# show['id'], not show.get('id')
|
||||
#
|
||||
def get(name):
|
||||
return show.get(name) if not hasattr(show, 'search') else show[name]
|
||||
|
||||
## Images
|
||||
poster = get('poster')
|
||||
backdrop = get('fanart')
|
||||
|
||||
genres = splitString(get('genre'), '|')
|
||||
if get('firstaired') is not None:
|
||||
try: year = datetime.strptime(get('firstaired'), '%Y-%m-%d').year
|
||||
except: year = None
|
||||
else:
|
||||
year = None
|
||||
|
||||
show_data = {
|
||||
'identifiers': {
|
||||
'thetvdb': tryInt(get('id')),
|
||||
'imdb': get('imdb_id'),
|
||||
'zap2it': get('zap2it_id'),
|
||||
},
|
||||
'type': 'show',
|
||||
'titles': [get('seriesname')],
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [],
|
||||
'backdrop_original': [],
|
||||
},
|
||||
'year': year,
|
||||
'genres': genres,
|
||||
'network': get('network'),
|
||||
'plot': get('overview'),
|
||||
'networkid': get('networkid'),
|
||||
'air_day': (get('airs_dayofweek') or '').lower(),
|
||||
'air_time': self.parseTime(get('airs_time')),
|
||||
'firstaired': get('firstaired'),
|
||||
'runtime': tryInt(get('runtime')),
|
||||
'contentrating': get('contentrating'),
|
||||
'rating': {},
|
||||
'actors': splitString(get('actors'), '|'),
|
||||
'status': get('status'),
|
||||
'language': get('language'),
|
||||
}
|
||||
|
||||
if tryFloat(get('rating')):
|
||||
show_data['rating']['thetvdb'] = [tryFloat(get('rating')), tryInt(get('ratingcount'))],
|
||||
|
||||
show_data = dict((k, v) for k, v in show_data.iteritems() if v)
|
||||
|
||||
# Only load season info when available
|
||||
if type(show) == Show:
|
||||
|
||||
# Parse season and episode data
|
||||
show_data['seasons'] = {}
|
||||
|
||||
for season_nr in show:
|
||||
season = self._parseSeason(show, season_nr, show[season_nr])
|
||||
season['episodes'] = {}
|
||||
|
||||
for episode_nr in show[season_nr]:
|
||||
season['episodes'][episode_nr] = self._parseEpisode(show[season_nr][episode_nr])
|
||||
|
||||
show_data['seasons'][season_nr] = season
|
||||
|
||||
# Add alternative titles
|
||||
# try:
|
||||
# raw = self.tvdb.search(show['seriesname'])
|
||||
# if raw:
|
||||
# for show_info in raw:
|
||||
# print show_info
|
||||
# if show_info['id'] == show_data['id'] and show_info.get('aliasnames', None):
|
||||
# for alt_name in show_info['aliasnames'].split('|'):
|
||||
# show_data['titles'].append(toUnicode(alt_name))
|
||||
# except (tvdb_exceptions.tvdb_error, IOError), e:
|
||||
# log.error('Failed searching TheTVDB for "%s": %s', (show['seriesname'], traceback.format_exc()))
|
||||
|
||||
return show_data
|
||||
|
||||
def _parseSeason(self, show, number, season):
|
||||
"""
|
||||
contains no data
|
||||
"""
|
||||
|
||||
poster = []
|
||||
try:
|
||||
temp_poster = {}
|
||||
for id, data in show.data['_banners']['season']['season'].items():
|
||||
if data.get('season') == str(number) and data.get('language') == self.tvdb_api_parms['language']:
|
||||
temp_poster[tryFloat(data.get('rating')) * tryInt(data.get('ratingcount'))] = data.get('_bannerpath')
|
||||
#break
|
||||
poster.append(temp_poster[sorted(temp_poster, reverse = True)[0]])
|
||||
except:
|
||||
pass
|
||||
|
||||
season_data = {
|
||||
'identifiers': {
|
||||
'thetvdb': show['id'] if show.get('id') else show[number][1]['seasonid']
|
||||
},
|
||||
'number': tryInt(number),
|
||||
'images': {
|
||||
'poster': poster,
|
||||
},
|
||||
}
|
||||
|
||||
season_data = dict((k, v) for k, v in season_data.iteritems() if v)
|
||||
return season_data
|
||||
|
||||
def _parseEpisode(self, episode):
|
||||
"""
|
||||
('episodenumber', u'1'),
|
||||
('thumb_added', None),
|
||||
('rating', u'7.7'),
|
||||
('overview',
|
||||
u'Experienced waitress Max Black meets her new co-worker, former rich-girl Caroline Channing, and puts her skills to the test at an old but re-emerging Brooklyn diner. Despite her initial distaste for Caroline, Max eventually softens and the two team up for a new business venture.'),
|
||||
('dvd_episodenumber', None),
|
||||
('dvd_discid', None),
|
||||
('combined_episodenumber', u'1'),
|
||||
('epimgflag', u'7'),
|
||||
('id', u'4099506'),
|
||||
('seasonid', u'465948'),
|
||||
('thumb_height', u'225'),
|
||||
('tms_export', u'1374789754'),
|
||||
('seasonnumber', u'1'),
|
||||
('writer', u'|Michael Patrick King|Whitney Cummings|'),
|
||||
('lastupdated', u'1371420338'),
|
||||
('filename', u'http://thetvdb.com/banners/episodes/248741/4099506.jpg'),
|
||||
('absolute_number', u'1'),
|
||||
('ratingcount', u'102'),
|
||||
('combined_season', u'1'),
|
||||
('thumb_width', u'400'),
|
||||
('imdb_id', u'tt1980319'),
|
||||
('director', u'James Burrows'),
|
||||
('dvd_chapter', None),
|
||||
('dvd_season', None),
|
||||
('gueststars',
|
||||
u'|Brooke Lyons|Noah Mills|Shoshana Bush|Cale Hartmann|Adam Korson|Alex Enriquez|Matt Cook|Bill Parks|Eugene Shaw|Sergey Brusilovsky|Greg Lewis|Cocoa Brown|Nick Jameson|'),
|
||||
('seriesid', u'248741'),
|
||||
('language', u'en'),
|
||||
('productioncode', u'296793'),
|
||||
('firstaired', u'2011-09-19'),
|
||||
('episodename', u'Pilot')]
|
||||
"""
|
||||
|
||||
def get(name, default = None):
|
||||
return episode.get(name, default)
|
||||
|
||||
poster = get('filename', [])
|
||||
|
||||
episode_data = {
|
||||
'number': tryInt(get('episodenumber')),
|
||||
'absolute_number': tryInt(get('absolute_number')),
|
||||
'identifiers': {
|
||||
'thetvdb': tryInt(episode['id'])
|
||||
},
|
||||
'type': 'episode',
|
||||
'titles': [get('episodename')] if get('episodename') else [],
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
},
|
||||
'released': get('firstaired'),
|
||||
'plot': get('overview'),
|
||||
'firstaired': get('firstaired'),
|
||||
'language': get('language'),
|
||||
}
|
||||
|
||||
if get('imdb_id'):
|
||||
episode_data['identifiers']['imdb'] = get('imdb_id')
|
||||
|
||||
episode_data = dict((k, v) for k, v in episode_data.iteritems() if v)
|
||||
return episode_data
|
||||
|
||||
def parseTime(self, time):
|
||||
return time
|
||||
|
||||
def isDisabled(self):
|
||||
if self.conf('api_key') == '':
|
||||
log.error('No API key provided.')
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'thetvdb',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'providers',
|
||||
'name': 'tmdb',
|
||||
'label': 'TheTVDB',
|
||||
'hidden': True,
|
||||
'description': 'Used for all calls to TheTVDB.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': '7966C02F860586D2',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,86 +0,0 @@
|
||||
import urllib
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.show.providers.base import ShowProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Trakt'
|
||||
|
||||
|
||||
class Trakt(ShowProvider):
|
||||
api_key = 'c043de5ada9d180028c10229d2a3ea5b'
|
||||
base_url = 'http://api.trakt.tv/%%s.json/%s' % api_key
|
||||
|
||||
def __init__(self):
|
||||
addEvent('info.search', self.search, priority = 1)
|
||||
addEvent('show.search', self.search, priority = 1)
|
||||
|
||||
def search(self, q, limit = 12):
|
||||
if self.isDisabled():
|
||||
return False
|
||||
|
||||
# Check for cached result
|
||||
cache_key = 'trakt.cache.search.%s.%s' % (q, limit)
|
||||
results = self.getCache(cache_key) or []
|
||||
|
||||
if results:
|
||||
return results
|
||||
|
||||
# Search
|
||||
log.debug('Searching for show: "%s"', q)
|
||||
response = self._request('search/shows', query=q, limit=limit)
|
||||
|
||||
if not response:
|
||||
return []
|
||||
|
||||
# Parse search results
|
||||
for show in response:
|
||||
results.append(self._parseShow(show))
|
||||
|
||||
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
|
||||
|
||||
self.setCache(cache_key, results)
|
||||
return results
|
||||
|
||||
def _request(self, action, **kwargs):
|
||||
url = self.base_url % action
|
||||
|
||||
if kwargs:
|
||||
url += '?' + urllib.urlencode(kwargs)
|
||||
|
||||
return self.getJsonData(url)
|
||||
|
||||
def _parseShow(self, show):
|
||||
# Images
|
||||
images = show.get('images', {})
|
||||
|
||||
poster = images.get('poster')
|
||||
backdrop = images.get('backdrop')
|
||||
|
||||
# Rating
|
||||
rating = show.get('ratings', {}).get('percentage')
|
||||
|
||||
# Build show dict
|
||||
show_data = {
|
||||
'identifiers': {
|
||||
'thetvdb': show.get('tvdb_id'),
|
||||
'imdb': show.get('imdb_id'),
|
||||
'tvrage': show.get('tvrage_id'),
|
||||
},
|
||||
'type': 'show',
|
||||
'titles': [show.get('title')],
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [],
|
||||
'backdrop_original': [],
|
||||
},
|
||||
'year': show.get('year'),
|
||||
'rating': {
|
||||
'trakt': float(rating) / 10
|
||||
},
|
||||
}
|
||||
|
||||
return dict((k, v) for k, v in show_data.iteritems() if v)
|
||||
@@ -1,216 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.media.show.providers.base import ShowProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Xem'
|
||||
|
||||
|
||||
class Xem(ShowProvider):
|
||||
'''
|
||||
Mapping Information
|
||||
===================
|
||||
|
||||
Single
|
||||
------
|
||||
You will need the id / identifier of the show e.g. tvdb-id for American Dad! is 73141
|
||||
the origin is the name of the site/entity the episode, season (and/or absolute) numbers are based on
|
||||
|
||||
http://thexem.de/map/single?id=&origin=&episode=&season=&absolute=
|
||||
|
||||
episode, season and absolute are all optional but it wont work if you don't provide either episode and season OR absolute in
|
||||
addition you can provide destination as the name of the wished destination, if not provided it will output all available
|
||||
|
||||
When a destination has two or more addresses another entry will be added as _ ... for now the second address gets the index "2"
|
||||
(the first index is omitted) and so on
|
||||
|
||||
http://thexem.de/map/single?id=7529&origin=anidb&season=1&episode=2&destination=trakt
|
||||
{
|
||||
"result":"success",
|
||||
"data":{
|
||||
"trakt": {"season":1,"episode":3,"absolute":3},
|
||||
"trakt_2":{"season":1,"episode":4,"absolute":4}
|
||||
},
|
||||
"message":"single mapping for 7529 on anidb."
|
||||
}
|
||||
|
||||
All
|
||||
---
|
||||
Basically same as "single" just a little easier
|
||||
The origin address is added into the output too!!
|
||||
|
||||
http://thexem.de/map/all?id=7529&origin=anidb
|
||||
|
||||
All Names
|
||||
---------
|
||||
Get all names xem has to offer
|
||||
non optional params: origin(an entity string like 'tvdb')
|
||||
optional params: season, language
|
||||
- season: a season number or a list like: 1,3,5 or a compare operator like ne,gt,ge,lt,le,eq and a season number. default would
|
||||
return all
|
||||
- language: a language string like 'us' or 'jp' default is all
|
||||
- defaultNames: 1(yes) or 0(no) should the default names be added to the list ? default is 0(no)
|
||||
|
||||
http://thexem.de/map/allNames?origin=tvdb&season=le1
|
||||
|
||||
{
|
||||
"result": "success",
|
||||
"data": {
|
||||
"248812": ["Dont Trust the Bitch in Apartment 23", "Don't Trust the Bitch in Apartment 23"],
|
||||
"257571": ["Nazo no Kanojo X"],
|
||||
"257875": ["Lupin III - Mine Fujiko to Iu Onna", "Lupin III Fujiko to Iu Onna", "Lupin the Third - Mine Fujiko to Iu Onna"]
|
||||
},
|
||||
"message": ""
|
||||
}
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
addEvent('show.info', self.getShowInfo, priority = 5)
|
||||
addEvent('episode.info', self.getEpisodeInfo, priority = 5)
|
||||
|
||||
self.config = {}
|
||||
self.config['base_url'] = "http://thexem.de"
|
||||
self.config['url_single'] = u"%(base_url)s/map/single?" % self.config
|
||||
self.config['url_all'] = u"%(base_url)s/map/all?" % self.config
|
||||
self.config['url_names'] = u"%(base_url)s/map/names?" % self.config
|
||||
self.config['url_all_names'] = u"%(base_url)s/map/allNames?" % self.config
|
||||
|
||||
def getShowInfo(self, identifiers = None):
|
||||
if self.isDisabled():
|
||||
return {}
|
||||
|
||||
identifier = identifiers.get('thetvdb')
|
||||
|
||||
if not identifier:
|
||||
return {}
|
||||
|
||||
cache_key = 'xem.cache.%s' % identifier
|
||||
log.debug('Getting showInfo: %s', cache_key)
|
||||
result = self.getCache(cache_key) or {}
|
||||
if result:
|
||||
return result
|
||||
|
||||
result['seasons'] = {}
|
||||
|
||||
# Create season/episode and absolute mappings
|
||||
url = self.config['url_all'] + "id=%s&origin=tvdb" % tryUrlencode(identifier)
|
||||
response = self.getJsonData(url)
|
||||
|
||||
if response and response.get('result') == 'success':
|
||||
data = response.get('data', None)
|
||||
self.parseMaps(result, data)
|
||||
|
||||
# Create name alias mappings
|
||||
url = self.config['url_names'] + "id=%s&origin=tvdb" % tryUrlencode(identifier)
|
||||
response = self.getJsonData(url)
|
||||
|
||||
if response and response.get('result') == 'success':
|
||||
data = response.get('data', None)
|
||||
self.parseNames(result, data)
|
||||
|
||||
self.setCache(cache_key, result)
|
||||
return result
|
||||
|
||||
def getEpisodeInfo(self, identifiers = None, params = {}):
|
||||
episode_num = params.get('episode_number', None)
|
||||
if episode_num is None:
|
||||
return False
|
||||
|
||||
season_num = params.get('season_number', None)
|
||||
if season_num is None:
|
||||
return False
|
||||
|
||||
result = self.getShowInfo(identifiers)
|
||||
|
||||
if not result:
|
||||
return False
|
||||
|
||||
# Find season
|
||||
if season_num not in result['seasons']:
|
||||
return False
|
||||
|
||||
season = result['seasons'][season_num]
|
||||
|
||||
# Find episode
|
||||
if episode_num not in season['episodes']:
|
||||
return False
|
||||
|
||||
return season['episodes'][episode_num]
|
||||
|
||||
def parseMaps(self, result, data, master = 'tvdb'):
|
||||
'''parses xem map and returns a custom formatted dict map
|
||||
|
||||
To retreive map for scene:
|
||||
if 'scene' in map['map_episode'][1][1]:
|
||||
print map['map_episode'][1][1]['scene']['season']
|
||||
'''
|
||||
if not isinstance(data, list):
|
||||
return
|
||||
|
||||
for episode_map in data:
|
||||
origin = episode_map.pop(master, None)
|
||||
if origin is None:
|
||||
continue # No master origin to map to
|
||||
|
||||
o_season = origin['season']
|
||||
o_episode = origin['episode']
|
||||
|
||||
# Create season info
|
||||
if o_season not in result['seasons']:
|
||||
result['seasons'][o_season] = {}
|
||||
|
||||
season = result['seasons'][o_season]
|
||||
|
||||
if 'episodes' not in season:
|
||||
season['episodes'] = {}
|
||||
|
||||
# Create episode info
|
||||
if o_episode not in season['episodes']:
|
||||
season['episodes'][o_episode] = {}
|
||||
|
||||
episode = season['episodes'][o_episode]
|
||||
episode['episode_map'] = episode_map
|
||||
|
||||
def parseNames(self, result, data):
|
||||
result['title_map'] = data.pop('all', None)
|
||||
|
||||
for season, title_map in data.items():
|
||||
season = int(season)
|
||||
|
||||
# Create season info
|
||||
if season not in result['seasons']:
|
||||
result['seasons'][season] = {}
|
||||
|
||||
season = result['seasons'][season]
|
||||
season['title_map'] = title_map
|
||||
|
||||
def isDisabled(self):
|
||||
if __name__ == '__main__':
|
||||
return False
|
||||
if self.conf('enabled'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'xem',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'providers',
|
||||
'name': 'xem',
|
||||
'label': 'TheXem',
|
||||
'hidden': True,
|
||||
'description': 'Used for all calls to TheXem.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': True,
|
||||
'label': 'Enabled',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,51 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media._base.providers.nzb.binsearch import Base
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.environment import Env
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'BinSearch'
|
||||
|
||||
|
||||
class BinSearch(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'm': 'n',
|
||||
'max': 400,
|
||||
'adv_age': Env.setting('retention', 'nzb'),
|
||||
'adv_sort': 'date',
|
||||
'adv_col': 'on',
|
||||
'adv_nfo': 'on',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'm': 'n',
|
||||
'max': 400,
|
||||
'adv_age': Env.setting('retention', 'nzb'),
|
||||
'adv_sort': 'date',
|
||||
'adv_col': 'on',
|
||||
'adv_nfo': 'on',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
})
|
||||
return query
|
||||
@@ -1,49 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media._base.providers.nzb.newznab import Base
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Newznab'
|
||||
|
||||
|
||||
class Newznab(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
|
||||
query = tryUrlencode({
|
||||
't': 'tvsearch',
|
||||
'apikey': host['api_key'],
|
||||
'q': related['show']['title'],
|
||||
'season': identifier['season'],
|
||||
'extended': 1
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
identifier = fireEvent('library.identifier', media, single = True)
|
||||
query = tryUrlencode({
|
||||
't': 'tvsearch',
|
||||
'apikey': host['api_key'],
|
||||
'q': related['show']['title'],
|
||||
'season': identifier['season'],
|
||||
'ep': identifier['episode'],
|
||||
'extended': 1
|
||||
})
|
||||
|
||||
return query
|
||||
@@ -1,52 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.nzb.nzbclub import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NZBClub'
|
||||
|
||||
|
||||
class NZBClub(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
|
||||
q = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
})
|
||||
|
||||
query = tryUrlencode({
|
||||
'ig': 1,
|
||||
'rpp': 200,
|
||||
'st': 5,
|
||||
'sp': 1,
|
||||
'ns': 1,
|
||||
})
|
||||
return '%s&%s' % (q, query)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
|
||||
q = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
})
|
||||
|
||||
query = tryUrlencode({
|
||||
'ig': 1,
|
||||
'rpp': 200,
|
||||
'st': 5,
|
||||
'sp': 1,
|
||||
'ns': 1,
|
||||
})
|
||||
return '%s&%s' % (q, query)
|
||||
@@ -1,51 +0,0 @@
|
||||
from couchpotato import Env
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.nzb.nzbindex import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NzbIndex'
|
||||
|
||||
|
||||
class NzbIndex(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'age': Env.setting('retention', 'nzb'),
|
||||
'sort': 'agedesc',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
'rating': 1,
|
||||
'max': 250,
|
||||
'more': 1,
|
||||
'complete': 1,
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'q': fireEvent('media.search_query', media, single = True),
|
||||
'age': Env.setting('retention', 'nzb'),
|
||||
'sort': 'agedesc',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
'rating': 1,
|
||||
'max': 250,
|
||||
'more': 1,
|
||||
'complete': 1,
|
||||
})
|
||||
return query
|
||||
@@ -1,36 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.bithdtv import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'BiTHDTV'
|
||||
|
||||
|
||||
class BiTHDTV(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': 12 # Season cat
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': 10 # Episode cat
|
||||
})
|
||||
return query
|
||||
@@ -1,41 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.bitsoup import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Bitsoup'
|
||||
|
||||
|
||||
class Bitsoup(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
# For season bundles, bitsoup currently only has one category
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': 45 # TV-Packs Category
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
cat_ids = [
|
||||
([42], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']),
|
||||
([49], ['hdtv_sd', 'webdl_480p'])
|
||||
]
|
||||
cat_backup_id = 0
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'cat': self.getCatId(quality['identifier'])[0],
|
||||
})
|
||||
return query
|
||||
@@ -1,37 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.iptorrents import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'IPTorrents'
|
||||
|
||||
|
||||
class IPTorrents(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
# TODO come back to this later, a better quality system needs to be created
|
||||
cat_ids = [
|
||||
([65], [
|
||||
'bluray_1080p', 'bluray_720p',
|
||||
'bdrip_1080p', 'bdrip_720p',
|
||||
'brrip_1080p', 'brrip_720p',
|
||||
'webdl_1080p', 'webdl_720p', 'webdl_480p',
|
||||
'hdtv_720p', 'hdtv_sd'
|
||||
]),
|
||||
]
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
# TODO come back to this later, a better quality system needs to be created
|
||||
cat_ids = [
|
||||
([5], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([4, 78, 79], ['hdtv_sd'])
|
||||
]
|
||||
@@ -1,27 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.publichd import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'PublicHD'
|
||||
|
||||
|
||||
class PublicHD(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
@@ -1,60 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.sceneaccess import Base
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'SceneAccess'
|
||||
|
||||
|
||||
class SceneAccess(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([26], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
url = self.urls['archive'] % (
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
|
||||
arguments = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'method': 3,
|
||||
})
|
||||
query = "%s&%s" % (url, arguments)
|
||||
|
||||
return query
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([27], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([17, 11], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
url = self.urls['search'] % (
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
|
||||
arguments = tryUrlencode({
|
||||
'search': fireEvent('media.search_query', media, single = True),
|
||||
'method': 3,
|
||||
})
|
||||
query = "%s&%s" % (url, arguments)
|
||||
|
||||
return query
|
||||
@@ -1,46 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.thepiratebay import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ThePirateBay'
|
||||
|
||||
|
||||
class ThePirateBay(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([208], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([205], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, page, cats):
|
||||
return (
|
||||
tryUrlencode('"%s"' % fireEvent('media.search_query', media, single = True)),
|
||||
page,
|
||||
','.join(str(x) for x in cats)
|
||||
)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([208], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([205], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, page, cats):
|
||||
return (
|
||||
tryUrlencode('"%s"' % fireEvent('media.search_query', media, single = True)),
|
||||
page,
|
||||
','.join(str(x) for x in cats)
|
||||
)
|
||||
@@ -1,34 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentday import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentDay'
|
||||
|
||||
|
||||
class TorrentDay(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([14], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
]
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
cat_ids = [
|
||||
([7], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([2], [24], [26], ['hdtv_sd'])
|
||||
]
|
||||
def buildUrl(self, media):
|
||||
return fireEvent('media.search_query', media, single = True)
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
from couchpotato import fireEvent
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentleech import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentLeech'
|
||||
|
||||
|
||||
class TorrentLeech(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([27], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
return (
|
||||
tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([32], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']),
|
||||
([26], ['hdtv_sd'])
|
||||
]
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
return (
|
||||
tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0]
|
||||
)
|
||||
@@ -1,38 +0,0 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentpotato import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentPotato'
|
||||
|
||||
|
||||
class TorrentPotato(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
arguments = tryUrlencode({
|
||||
'user': host['name'],
|
||||
'passkey': host['pass_key'],
|
||||
'search': fireEvent('media.search_query', media, single = True)
|
||||
})
|
||||
return '%s?%s' % (host['host'], arguments)
|
||||
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
|
||||
def buildUrl(self, media, host):
|
||||
arguments = tryUrlencode({
|
||||
'user': host['name'],
|
||||
'passkey': host['pass_key'],
|
||||
'search': fireEvent('media.search_query', media, single = True)
|
||||
})
|
||||
return '%s?%s' % (host['host'], arguments)
|
||||
@@ -1,52 +0,0 @@
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import MultiProvider
|
||||
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider
|
||||
from couchpotato.core.media._base.providers.torrent.torrentshack import Base
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'TorrentShack'
|
||||
|
||||
|
||||
class TorrentShack(MultiProvider):
|
||||
|
||||
def getTypes(self):
|
||||
return [Season, Episode]
|
||||
|
||||
|
||||
class Season(SeasonProvider, Base):
|
||||
# TorrentShack tv season search categories
|
||||
# TV-SD Pack - 980
|
||||
# TV-HD Pack - 981
|
||||
# Full Blu-ray - 970
|
||||
cat_ids = [
|
||||
([980], ['hdtv_sd']),
|
||||
([981], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']),
|
||||
([970], ['bluray_1080p', 'bluray_720p']),
|
||||
]
|
||||
cat_backup_id = 980
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = (tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getSceneOnly())
|
||||
return query
|
||||
|
||||
class Episode(EpisodeProvider, Base):
|
||||
# TorrentShack tv episode search categories
|
||||
# TV/x264-HD - 600
|
||||
# TV/x264-SD - 620
|
||||
# TV/DVDrip - 700
|
||||
cat_ids = [
|
||||
([600], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']),
|
||||
([620], ['hdtv_sd'])
|
||||
]
|
||||
cat_backup_id = 620
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
query = (tryUrlencode(fireEvent('media.search_query', media, single = True)),
|
||||
self.getCatId(quality['identifier'])[0],
|
||||
self.getSceneOnly())
|
||||
return query
|
||||
@@ -1,152 +0,0 @@
|
||||
from couchpotato import fireEvent, get_db, Env
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEventAsync
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
from couchpotato.core.media._base.searcher.main import SearchSetupError
|
||||
from couchpotato.core.media.show import ShowTypeBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'EpisodeSearcher'
|
||||
|
||||
|
||||
class EpisodeSearcher(SearcherBase, ShowTypeBase):
|
||||
type = 'episode'
|
||||
|
||||
in_progress = False
|
||||
|
||||
def __init__(self):
|
||||
super(EpisodeSearcher, self).__init__()
|
||||
|
||||
addEvent('%s.searcher.all' % self.getType(), self.searchAll)
|
||||
addEvent('%s.searcher.single' % self.getType(), self.single)
|
||||
addEvent('searcher.correct_release', self.correctRelease)
|
||||
|
||||
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = {
|
||||
'desc': 'Starts a full search for all wanted shows',
|
||||
})
|
||||
|
||||
addApiView('%s.searcher.single' % self.getType(), self.singleView)
|
||||
|
||||
def searchAllView(self, **kwargs):
|
||||
fireEventAsync('%s.searcher.all' % self.getType(), manual = True)
|
||||
|
||||
return {
|
||||
'success': not self.in_progress
|
||||
}
|
||||
|
||||
def searchAll(self, manual = False):
|
||||
pass
|
||||
|
||||
def singleView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('%s.searcher.single' % self.getType(), media, single = True)
|
||||
}
|
||||
|
||||
def single(self, media, profile = None, quality_order = None, search_protocols = None, manual = False):
|
||||
db = get_db()
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# TODO search_protocols, profile, quality_order can be moved to a base method
|
||||
# Find out search type
|
||||
try:
|
||||
if not search_protocols:
|
||||
search_protocols = fireEvent('searcher.protocols', single = True)
|
||||
except SearchSetupError:
|
||||
return
|
||||
|
||||
if not profile and related['show']['profile_id']:
|
||||
profile = db.get('id', related['show']['profile_id'])
|
||||
|
||||
if not quality_order:
|
||||
quality_order = fireEvent('quality.order', single = True)
|
||||
|
||||
# TODO: check episode status
|
||||
# TODO: check air date
|
||||
#if not self.conf('always_search') and not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates, movie['library']['year']):
|
||||
# too_early_to_search.append(quality_type['quality']['identifier'])
|
||||
# return
|
||||
|
||||
ret = False
|
||||
has_better_quality = None
|
||||
found_releases = []
|
||||
too_early_to_search = []
|
||||
|
||||
releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||
query = fireEvent('library.query', media, condense = False, single = True)
|
||||
|
||||
index = 0
|
||||
for q_identifier in profile.get('qualities'):
|
||||
quality_custom = {
|
||||
'quality': q_identifier,
|
||||
'finish': profile['finish'][index],
|
||||
'wait_for': profile['wait_for'][index],
|
||||
'3d': profile['3d'][index] if profile.get('3d') else False
|
||||
}
|
||||
|
||||
has_better_quality = 0
|
||||
|
||||
# See if better quality is available
|
||||
for release in releases:
|
||||
if quality_order.index(release['quality']) <= quality_order.index(q_identifier) and release['status'] not in ['available', 'ignored', 'failed']:
|
||||
has_better_quality += 1
|
||||
|
||||
# Don't search for quality lower then already available.
|
||||
if has_better_quality is 0:
|
||||
|
||||
log.info('Searching for %s in %s', (query, q_identifier))
|
||||
quality = fireEvent('quality.single', identifier = q_identifier, single = True)
|
||||
quality['custom'] = quality_custom
|
||||
|
||||
results = fireEvent('searcher.search', search_protocols, media, quality, single = True)
|
||||
if len(results) == 0:
|
||||
log.debug('Nothing found for %s in %s', (query, q_identifier))
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, media, quality, single = True)
|
||||
|
||||
# Try find a valid result and download it
|
||||
if fireEvent('release.try_download_result', results, media, quality, single = True):
|
||||
ret = True
|
||||
|
||||
# Remove releases that aren't found anymore
|
||||
for release in releases:
|
||||
if release.get('status') == 'available' and release.get('identifier') not in found_releases:
|
||||
fireEvent('release.delete', release.get('_id'), single = True)
|
||||
else:
|
||||
log.info('Better quality (%s) already available or snatched for %s', (q_identifier, query))
|
||||
fireEvent('media.restatus', media['_id'])
|
||||
break
|
||||
|
||||
# Break if CP wants to shut down
|
||||
if self.shuttingDown() or ret:
|
||||
break
|
||||
|
||||
if len(too_early_to_search) > 0:
|
||||
log.info2('Too early to search for %s, %s', (too_early_to_search, query))
|
||||
|
||||
def correctRelease(self, release = None, media = None, quality = None, **kwargs):
|
||||
if media.get('type') != 'show.episode':
|
||||
return
|
||||
|
||||
retention = Env.setting('retention', section = 'nzb')
|
||||
|
||||
if release.get('seeders') is None and 0 < retention < release.get('age', 0):
|
||||
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name']))
|
||||
return False
|
||||
|
||||
# Check for required and ignored words
|
||||
if not fireEvent('searcher.correct_words', release['name'], media, single = True):
|
||||
return False
|
||||
|
||||
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations)
|
||||
match = fireEvent('matcher.match', release, media, quality, single = True)
|
||||
if match:
|
||||
return match.weight
|
||||
|
||||
return False
|
||||
@@ -1,172 +0,0 @@
|
||||
from couchpotato import get_db, Env
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
from couchpotato.core.media.movie.searcher import SearchSetupError
|
||||
from couchpotato.core.media.show import ShowTypeBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'SeasonSearcher'
|
||||
|
||||
|
||||
class SeasonSearcher(SearcherBase, ShowTypeBase):
|
||||
type = 'season'
|
||||
|
||||
in_progress = False
|
||||
|
||||
def __init__(self):
|
||||
super(SeasonSearcher, self).__init__()
|
||||
|
||||
addEvent('%s.searcher.all' % self.getType(), self.searchAll)
|
||||
addEvent('%s.searcher.single' % self.getType(), self.single)
|
||||
addEvent('searcher.correct_release', self.correctRelease)
|
||||
|
||||
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = {
|
||||
'desc': 'Starts a full search for all wanted seasons',
|
||||
})
|
||||
|
||||
def searchAllView(self, **kwargs):
|
||||
fireEventAsync('%s.searcher.all' % self.getType(), manual = True)
|
||||
|
||||
return {
|
||||
'success': not self.in_progress
|
||||
}
|
||||
|
||||
def searchAll(self, manual = False):
|
||||
pass
|
||||
|
||||
def single(self, media, profile = None, quality_order = None, search_protocols = None, manual = False):
|
||||
db = get_db()
|
||||
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
|
||||
# TODO search_protocols, profile, quality_order can be moved to a base method
|
||||
# Find out search type
|
||||
try:
|
||||
if not search_protocols:
|
||||
search_protocols = fireEvent('searcher.protocols', single = True)
|
||||
except SearchSetupError:
|
||||
return
|
||||
|
||||
if not profile and related['show']['profile_id']:
|
||||
profile = db.get('id', related['show']['profile_id'])
|
||||
|
||||
if not quality_order:
|
||||
quality_order = fireEvent('quality.order', single = True)
|
||||
|
||||
# Find 'active' episodes
|
||||
episodes = related['episodes']
|
||||
episodes_active = []
|
||||
|
||||
for episode in episodes:
|
||||
if episode.get('status') != 'active':
|
||||
continue
|
||||
|
||||
episodes_active.append(episode)
|
||||
|
||||
if len(episodes_active) == len(episodes):
|
||||
# All episodes are 'active', try and search for full season
|
||||
if self.search(media, profile, quality_order, search_protocols):
|
||||
# Success, end season search
|
||||
return True
|
||||
else:
|
||||
log.info('Unable to find season pack, searching for individual episodes...')
|
||||
|
||||
# Search for each episode individually
|
||||
for episode in episodes_active:
|
||||
fireEvent('show.episode.searcher.single', episode, profile, quality_order, search_protocols, manual)
|
||||
|
||||
# TODO (testing) only grab one episode
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
def search(self, media, profile, quality_order, search_protocols):
|
||||
# TODO: check episode status
|
||||
# TODO: check air date
|
||||
#if not self.conf('always_search') and not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates, movie['library']['year']):
|
||||
# too_early_to_search.append(quality_type['quality']['identifier'])
|
||||
# return
|
||||
|
||||
ret = False
|
||||
has_better_quality = None
|
||||
found_releases = []
|
||||
too_early_to_search = []
|
||||
|
||||
releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||
query = fireEvent('library.query', media, condense = False, single = True)
|
||||
|
||||
index = 0
|
||||
for q_identifier in profile.get('qualities'):
|
||||
quality_custom = {
|
||||
'quality': q_identifier,
|
||||
'finish': profile['finish'][index],
|
||||
'wait_for': profile['wait_for'][index],
|
||||
'3d': profile['3d'][index] if profile.get('3d') else False
|
||||
}
|
||||
|
||||
has_better_quality = 0
|
||||
|
||||
# See if better quality is available
|
||||
for release in releases:
|
||||
if quality_order.index(release['quality']) <= quality_order.index(q_identifier) and release['status'] not in ['available', 'ignored', 'failed']:
|
||||
has_better_quality += 1
|
||||
|
||||
# Don't search for quality lower then already available.
|
||||
if has_better_quality is 0:
|
||||
|
||||
log.info('Searching for %s in %s', (query, q_identifier))
|
||||
quality = fireEvent('quality.single', identifier = q_identifier, single = True)
|
||||
quality['custom'] = quality_custom
|
||||
|
||||
results = fireEvent('searcher.search', search_protocols, media, quality, single = True)
|
||||
if len(results) == 0:
|
||||
log.debug('Nothing found for %s in %s', (query, q_identifier))
|
||||
|
||||
# Add them to this movie releases list
|
||||
found_releases += fireEvent('release.create_from_search', results, media, quality, single = True)
|
||||
|
||||
# Try find a valid result and download it
|
||||
if fireEvent('release.try_download_result', results, media, quality, single = True):
|
||||
ret = True
|
||||
|
||||
# Remove releases that aren't found anymore
|
||||
for release in releases:
|
||||
if release.get('status') == 'available' and release.get('identifier') not in found_releases:
|
||||
fireEvent('release.delete', release.get('_id'), single = True)
|
||||
else:
|
||||
log.info('Better quality (%s) already available or snatched for %s', (q_identifier, query))
|
||||
fireEvent('media.restatus', media['_id'])
|
||||
break
|
||||
|
||||
# Break if CP wants to shut down
|
||||
if self.shuttingDown() or ret:
|
||||
break
|
||||
|
||||
if len(too_early_to_search) > 0:
|
||||
log.info2('Too early to search for %s, %s', (too_early_to_search, query))
|
||||
|
||||
return len(found_releases) > 0
|
||||
|
||||
def correctRelease(self, release = None, media = None, quality = None, **kwargs):
|
||||
if media.get('type') != 'show.season':
|
||||
return
|
||||
|
||||
retention = Env.setting('retention', section = 'nzb')
|
||||
|
||||
if release.get('seeders') is None and 0 < retention < release.get('age', 0):
|
||||
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name']))
|
||||
return False
|
||||
|
||||
# Check for required and ignored words
|
||||
if not fireEvent('searcher.correct_words', release['name'], media, single = True):
|
||||
return False
|
||||
|
||||
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations)
|
||||
match = fireEvent('matcher.match', release, media, quality, single = True)
|
||||
if match:
|
||||
return match.weight
|
||||
|
||||
return False
|
||||
@@ -1,88 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.variable import getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
from couchpotato.core.media._base.searcher.main import SearchSetupError
|
||||
from couchpotato.core.media.show import ShowTypeBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ShowSearcher'
|
||||
|
||||
|
||||
class ShowSearcher(SearcherBase, ShowTypeBase):
|
||||
type = 'show'
|
||||
|
||||
in_progress = False
|
||||
|
||||
def __init__(self):
|
||||
super(ShowSearcher, self).__init__()
|
||||
|
||||
addEvent('%s.searcher.all' % self.getType(), self.searchAll)
|
||||
addEvent('%s.searcher.single' % self.getType(), self.single)
|
||||
addEvent('searcher.get_search_title', self.getSearchTitle)
|
||||
|
||||
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = {
|
||||
'desc': 'Starts a full search for all wanted episodes',
|
||||
})
|
||||
|
||||
def searchAllView(self, **kwargs):
|
||||
fireEventAsync('%s.searcher.all' % self.getType(), manual = True)
|
||||
|
||||
return {
|
||||
'success': not self.in_progress
|
||||
}
|
||||
|
||||
def searchAll(self, manual = False):
|
||||
pass
|
||||
|
||||
def single(self, media, search_protocols = None, manual = False):
|
||||
# Find out search type
|
||||
try:
|
||||
if not search_protocols:
|
||||
search_protocols = fireEvent('searcher.protocols', single = True)
|
||||
except SearchSetupError:
|
||||
return
|
||||
|
||||
if not media['profile_id'] or media['status'] == 'done':
|
||||
log.debug('Show doesn\'t have a profile or already done, assuming in manage tab.')
|
||||
return
|
||||
|
||||
show_title = fireEvent('media.search_query', media, condense = False, single = True)
|
||||
|
||||
fireEvent('notify.frontend', type = 'show.searcher.started.%s' % media['_id'], data = True, message = 'Searching for "%s"' % show_title)
|
||||
|
||||
show_tree = fireEvent('library.tree', media, single = True)
|
||||
|
||||
db = get_db()
|
||||
|
||||
profile = db.get('id', media['profile_id'])
|
||||
quality_order = fireEvent('quality.order', single = True)
|
||||
|
||||
for season in show_tree.get('seasons', []):
|
||||
if not season.get('info'):
|
||||
continue
|
||||
|
||||
# Skip specials (and seasons missing 'number') for now
|
||||
# TODO: set status for specials to skipped by default
|
||||
if not season['info'].get('number'):
|
||||
continue
|
||||
|
||||
# Check if full season can be downloaded
|
||||
fireEvent('show.season.searcher.single', season, profile, quality_order, search_protocols, manual)
|
||||
|
||||
# TODO (testing) only snatch one season
|
||||
return
|
||||
|
||||
fireEvent('notify.frontend', type = 'show.searcher.ended.%s' % media['_id'], data = True)
|
||||
|
||||
def getSearchTitle(self, media):
|
||||
if media.get('type') != 'show':
|
||||
related = fireEvent('library.related', media, single = True)
|
||||
show = related['show']
|
||||
else:
|
||||
show = media
|
||||
|
||||
return getTitle(show)
|
||||
0
couchpotato/core/plugins/dashboard.py
Executable file → Normal file
0
couchpotato/core/plugins/dashboard.py
Executable file → Normal file
@@ -33,25 +33,7 @@ class QualityPlugin(Plugin):
|
||||
{'identifier': 'r5', 'size': (600, 1000), 'median_size': 700, 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr', '720p', '1080p'], 'ext':[]},
|
||||
{'identifier': 'tc', 'size': (600, 1000), 'median_size': 700, 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': ['720p', '1080p'], 'ext':[]},
|
||||
{'identifier': 'ts', 'size': (600, 1000), 'median_size': 700, 'label': 'TeleSync', 'alternative': ['telesync', 'hdts'], 'allow': ['720p', '1080p'], 'ext':[]},
|
||||
{'identifier': 'cam', 'size': (600, 1000), 'median_size': 700, 'label': 'Cam', 'alternative': ['camrip', 'hdcam'], 'allow': ['720p', '1080p'], 'ext':[]},
|
||||
|
||||
# TODO come back to this later, think this could be handled better, this is starting to get out of hand....
|
||||
# BluRay
|
||||
{'identifier': 'bluray_1080p', 'hd': True, 'size': (800, 5000), 'label': 'BluRay - 1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
{'identifier': 'bluray_720p', 'hd': True, 'size': (800, 5000), 'label': 'BluRay - 720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
# BDRip
|
||||
{'identifier': 'bdrip_1080p', 'hd': True, 'size': (800, 5000), 'label': 'BDRip - 1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
{'identifier': 'bdrip_720p', 'hd': True, 'size': (800, 5000), 'label': 'BDRip - 720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
# BRRip
|
||||
{'identifier': 'brrip_1080p', 'hd': True, 'size': (800, 5000), 'label': 'BRRip - 1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
{'identifier': 'brrip_720p', 'hd': True, 'size': (800, 5000), 'label': 'BRRip - 720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
# WEB-DL
|
||||
{'identifier': 'webdl_1080p', 'hd': True, 'size': (800, 5000), 'label': 'WEB-DL - 1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
{'identifier': 'webdl_720p', 'hd': True, 'size': (800, 5000), 'label': 'WEB-DL - 720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
{'identifier': 'webdl_480p', 'hd': True, 'size': (100, 5000), 'label': 'WEB-DL - 480p', 'width': 720, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
# HDTV
|
||||
{'identifier': 'hdtv_720p', 'hd': True, 'size': (800, 5000), 'label': 'HDTV - 720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv']},
|
||||
{'identifier': 'hdtv_sd', 'hd': False, 'size': (100, 1000), 'label': 'HDTV - SD', 'width': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'mp4', 'avi']},
|
||||
{'identifier': 'cam', 'size': (600, 1000), 'median_size': 700, 'label': 'Cam', 'alternative': ['camrip', 'hdcam'], 'allow': ['720p', '1080p'], 'ext':[]}
|
||||
]
|
||||
pre_releases = ['cam', 'ts', 'tc', 'r5', 'scr']
|
||||
threed_tags = {
|
||||
|
||||
@@ -441,6 +441,7 @@ class Release(Plugin):
|
||||
for rel in search_results:
|
||||
|
||||
rel_identifier = md5(rel['url'])
|
||||
found_releases.append(rel_identifier)
|
||||
|
||||
release = {
|
||||
'_t': 'release',
|
||||
@@ -481,9 +482,6 @@ class Release(Plugin):
|
||||
# Update release in search_results
|
||||
rel['status'] = rls.get('status')
|
||||
|
||||
if rel['status'] == 'available':
|
||||
found_releases.append(rel_identifier)
|
||||
|
||||
return found_releases
|
||||
except:
|
||||
log.error('Failed: %s', traceback.format_exc())
|
||||
|
||||
@@ -220,10 +220,6 @@ class Renamer(Plugin):
|
||||
nfo_name = self.conf('nfo_name')
|
||||
separator = self.conf('separator')
|
||||
|
||||
if len(file_name) == 0:
|
||||
log.error('Please fill in the filename option under renamer settings. Forcing it on <original>.<ext> to keep the same name as source file.')
|
||||
file_name = '<original>.<ext>'
|
||||
|
||||
cd_keys = ['<cd>','<cd_nr>', '<original>']
|
||||
if not any(x in folder_name for x in cd_keys) and not any(x in file_name for x in cd_keys):
|
||||
log.error('Missing `cd` or `cd_nr` in the renamer. This will cause multi-file releases of being renamed to the same file. '
|
||||
@@ -795,7 +791,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
dest = sp(dest)
|
||||
try:
|
||||
|
||||
if os.path.exists(dest) and os.path.isfile(dest):
|
||||
if os.path.exists(dest):
|
||||
raise Exception('Destination "%s" already exists' % dest)
|
||||
|
||||
move_type = self.conf('file_action')
|
||||
|
||||
19
couchpotato/core/plugins/score/main.py
Executable file → Normal file
19
couchpotato/core/plugins/score/main.py
Executable file → Normal file
@@ -1,4 +1,4 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import getTitle, splitString, removeDuplicate
|
||||
from couchpotato.core.logger import CPLog
|
||||
@@ -16,20 +16,17 @@ class Score(Plugin):
|
||||
def __init__(self):
|
||||
addEvent('score.calculate', self.calculate)
|
||||
|
||||
def calculate(self, nzb, media):
|
||||
def calculate(self, nzb, movie):
|
||||
""" Calculate the score of a NZB, used for sorting later """
|
||||
|
||||
# Fetch root media item (movie, show)
|
||||
root = fireEvent('library.root', media, single = True)
|
||||
|
||||
# Merge global and category
|
||||
preferred_words = splitString(Env.setting('preferred_words', section = 'searcher').lower())
|
||||
try: preferred_words = removeDuplicate(preferred_words + splitString(media['category']['preferred'].lower()))
|
||||
try: preferred_words = removeDuplicate(preferred_words + splitString(movie['category']['preferred'].lower()))
|
||||
except: pass
|
||||
|
||||
score = nameScore(toUnicode(nzb['name']), root['info'].get('year'), preferred_words)
|
||||
score = nameScore(toUnicode(nzb['name']), movie['info']['year'], preferred_words)
|
||||
|
||||
for movie_title in root['info']['titles']:
|
||||
for movie_title in movie['info']['titles']:
|
||||
score += nameRatioScore(toUnicode(nzb['name']), toUnicode(movie_title))
|
||||
score += namePositionScore(toUnicode(nzb['name']), toUnicode(movie_title))
|
||||
|
||||
@@ -47,15 +44,15 @@ class Score(Plugin):
|
||||
score += providerScore(nzb['provider'])
|
||||
|
||||
# Duplicates in name
|
||||
score += duplicateScore(nzb['name'], getTitle(root))
|
||||
score += duplicateScore(nzb['name'], getTitle(movie))
|
||||
|
||||
# Merge global and category
|
||||
ignored_words = splitString(Env.setting('ignored_words', section = 'searcher').lower())
|
||||
try: ignored_words = removeDuplicate(ignored_words + splitString(media['category']['ignored'].lower()))
|
||||
try: ignored_words = removeDuplicate(ignored_words + splitString(movie['category']['ignored'].lower()))
|
||||
except: pass
|
||||
|
||||
# Partial ignored words
|
||||
score += partialIgnoredScore(nzb['name'], getTitle(root), ignored_words)
|
||||
score += partialIgnoredScore(nzb['name'], getTitle(movie), ignored_words)
|
||||
|
||||
# Ignore single downloads from multipart
|
||||
score += halfMultipartScore(nzb['name'])
|
||||
|
||||
@@ -117,7 +117,7 @@ var AboutSettingTab = new Class({
|
||||
var self = this;
|
||||
var date = new Date(json.version.date * 1000);
|
||||
self.version_text.set('text', json.version.hash + (json.version.date ? ' ('+date.toLocaleString()+')' : ''));
|
||||
self.updater_type.set('text', json.version.type + ', ' + json.branch);
|
||||
self.updater_type.set('text', (json.version.type != json.branch) ? (json.version.type + ', ' + json.branch) : json.branch);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
BIN
icon_mac.png
Normal file
BIN
icon_mac.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 435 B |
BIN
icon_windows.png
Normal file
BIN
icon_windows.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 367 B |
52
installer.iss
Normal file
52
installer.iss
Normal file
@@ -0,0 +1,52 @@
|
||||
#define MyAppName "CouchPotato"
|
||||
#define MyAppVer "2.6.0"
|
||||
#define MyAppBit "win32"
|
||||
//#define MyAppBit "win-amd64"
|
||||
|
||||
[Setup]
|
||||
AppName={#MyAppName}
|
||||
AppVersion=2
|
||||
AppVerName={#MyAppName}
|
||||
DefaultDirName={userappdata}\{#MyAppName}\application
|
||||
DisableProgramGroupPage=yes
|
||||
DisableDirPage=yes
|
||||
UninstallDisplayIcon=./icon.ico
|
||||
SetupIconFile=./icon.ico
|
||||
OutputDir=./dist
|
||||
OutputBaseFilename={#MyAppName}-{#MyAppVer}.{#MyAppBit}.installer
|
||||
AppPublisher=Your Mom
|
||||
AppPublisherURL=http://couchpota.to
|
||||
PrivilegesRequired=none
|
||||
WizardSmallImageFile=installer_icon.bmp
|
||||
WizardImageFile=installer_banner.bmp
|
||||
UsePreviousAppDir=no
|
||||
|
||||
[Messages]
|
||||
WelcomeLabel1=Installing [name]!
|
||||
WelcomeLabel2=This wizard will install [name] to your AppData folder. It does this so it can use the build in updater without needing admin rights.
|
||||
|
||||
[CustomMessages]
|
||||
LaunchProgram=Launch {#MyAppName} right now.
|
||||
|
||||
[Files]
|
||||
Source: "./dist/{#MyAppName}-{#MyAppVer}.{#MyAppBit}/*"; Flags: recursesubdirs; DestDir: "{app}"
|
||||
|
||||
[Icons]
|
||||
Name: "{commonprograms}\{#MyAppName}"; Filename: "{app}\{#MyAppName}.exe"
|
||||
Name: "{userstartup}\{#MyAppName}"; Filename: "{app}\{#MyAppName}.exe"; Tasks: startup
|
||||
|
||||
[Tasks]
|
||||
Name: "startup"; Description: "Run {#MyAppName} at startup"; Flags: unchecked
|
||||
|
||||
[Run]
|
||||
Filename: {app}\{#MyAppName}.exe; Description: {cm:LaunchProgram,{#MyAppName}}; Flags: nowait postinstall skipifsilent
|
||||
|
||||
|
||||
[UninstallDelete]
|
||||
Type: filesandordirs; Name: "{app}\appdata"
|
||||
Type: filesandordirs; Name: "{app}\Microsoft.VC90.CRT"
|
||||
Type: filesandordirs; Name: "{app}\updates"
|
||||
Type: filesandordirs; Name: "{app}\CouchPotato*"
|
||||
Type: filesandordirs; Name: "{app}\python27.dll"
|
||||
Type: filesandordirs; Name: "{app}\unins000.dat"
|
||||
Type: filesandordirs; Name: "{app}\unins000.exe"
|
||||
BIN
installer_banner.bmp
Normal file
BIN
installer_banner.bmp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 151 KiB |
BIN
installer_icon.bmp
Normal file
BIN
installer_icon.bmp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 9.6 KiB |
@@ -1,42 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from qcond.transformers.merge import MergeTransformer
|
||||
from qcond.transformers.slice import SliceTransformer
|
||||
from qcond.transformers.strip_common import StripCommonTransformer
|
||||
|
||||
|
||||
__version_info__ = ('0', '1', '0')
|
||||
__version_branch__ = 'master'
|
||||
|
||||
__version__ = "%s%s" % (
|
||||
'.'.join(__version_info__),
|
||||
'-' + __version_branch__ if __version_branch__ else ''
|
||||
)
|
||||
|
||||
|
||||
class QueryCondenser(object):
|
||||
def __init__(self):
|
||||
self.transformers = [
|
||||
MergeTransformer(),
|
||||
SliceTransformer(),
|
||||
StripCommonTransformer()
|
||||
]
|
||||
|
||||
def distinct(self, titles):
|
||||
for transformer in self.transformers:
|
||||
titles = transformer.run(titles)
|
||||
|
||||
return titles
|
||||
@@ -1,23 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
xrange = range
|
||||
else:
|
||||
xrange = xrange
|
||||
@@ -1,84 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from difflib import SequenceMatcher
|
||||
import re
|
||||
import sys
|
||||
from logr import Logr
|
||||
from qcond.compat import xrange
|
||||
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
|
||||
def simplify(s):
|
||||
s = s.lower()
|
||||
s = re.sub(r"(\w)'(\w)", r"\1\2", s)
|
||||
return s
|
||||
|
||||
|
||||
def strip(s):
|
||||
return re.sub(r"^(\W*)(.*?)(\W*)$", r"\2", s)
|
||||
|
||||
|
||||
def create_matcher(a, b, swap_longest = True, case_sensitive = False):
|
||||
# Ensure longest string is a
|
||||
if swap_longest and len(b) > len(a):
|
||||
a_ = a
|
||||
a = b
|
||||
b = a_
|
||||
|
||||
if not case_sensitive:
|
||||
a = a.upper()
|
||||
b = b.upper()
|
||||
|
||||
return SequenceMatcher(None, a, b)
|
||||
|
||||
|
||||
def first(function_or_none, sequence):
|
||||
if PY3:
|
||||
for item in filter(function_or_none, sequence):
|
||||
return item
|
||||
else:
|
||||
result = filter(function_or_none, sequence)
|
||||
if len(result):
|
||||
return result[0]
|
||||
|
||||
return None
|
||||
|
||||
def sorted_append(sequence, item, func):
|
||||
if not len(sequence):
|
||||
sequence.insert(0, item)
|
||||
return
|
||||
|
||||
x = 0
|
||||
for x in xrange(len(sequence)):
|
||||
if func(sequence[x]):
|
||||
sequence.insert(x, item)
|
||||
return
|
||||
|
||||
sequence.append(item)
|
||||
|
||||
def itemsMatch(L1, L2):
|
||||
return len(L1) == len(L2) and sorted(L1) == sorted(L2)
|
||||
|
||||
def distinct(sequence):
|
||||
result = []
|
||||
|
||||
for item in sequence:
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
|
||||
return result
|
||||
@@ -1,21 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class Transformer(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def run(self, titles):
|
||||
raise NotImplementedError()
|
||||
@@ -1,241 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from operator import itemgetter
|
||||
from logr import Logr
|
||||
from qcond.helpers import simplify, strip, first, sorted_append, distinct
|
||||
from qcond.transformers.base import Transformer
|
||||
from qcond.compat import xrange
|
||||
|
||||
|
||||
class MergeTransformer(Transformer):
|
||||
def __init__(self):
|
||||
super(MergeTransformer, self).__init__()
|
||||
|
||||
def run(self, titles):
|
||||
titles = distinct([simplify(title) for title in titles])
|
||||
|
||||
Logr.info(str(titles))
|
||||
|
||||
Logr.debug("------------------------------------------------------------")
|
||||
|
||||
root, tails = self.parse(titles)
|
||||
|
||||
Logr.debug("--------------------------PARSE-----------------------------")
|
||||
|
||||
for node in root:
|
||||
print_tree(node)
|
||||
|
||||
Logr.debug("--------------------------MERGE-----------------------------")
|
||||
|
||||
self.merge(root)
|
||||
|
||||
Logr.debug("--------------------------FINAL-----------------------------")
|
||||
|
||||
for node in root:
|
||||
print_tree(node)
|
||||
|
||||
Logr.debug("--------------------------RESULT-----------------------------")
|
||||
|
||||
scores = {}
|
||||
results = []
|
||||
|
||||
for tail in tails:
|
||||
score, value, original_value = tail.full_value()
|
||||
|
||||
if value in scores:
|
||||
scores[value] += score
|
||||
else:
|
||||
results.append((value, original_value))
|
||||
scores[value] = score
|
||||
|
||||
Logr.debug("%s %s %s", score, value, original_value)
|
||||
|
||||
sorted_results = sorted(results, key=lambda item: (scores[item[0]], item[1]), reverse = True)
|
||||
|
||||
return [result[0] for result in sorted_results]
|
||||
|
||||
def parse(self, titles):
|
||||
root = []
|
||||
tails = []
|
||||
|
||||
for title in titles:
|
||||
Logr.debug(title)
|
||||
|
||||
cur = None
|
||||
words = title.split(' ')
|
||||
|
||||
for wx in xrange(len(words)):
|
||||
word = strip(words[wx])
|
||||
|
||||
if cur is None:
|
||||
cur = find_node(root, word)
|
||||
|
||||
if cur is None:
|
||||
cur = DNode(word, None, num_children=len(words) - wx, original_value=title)
|
||||
root.append(cur)
|
||||
else:
|
||||
parent = cur
|
||||
parent.weight += 1
|
||||
|
||||
cur = find_node(parent.right, word)
|
||||
|
||||
if cur is None:
|
||||
Logr.debug("%s %d", word, len(words) - wx)
|
||||
cur = DNode(word, parent, num_children=len(words) - wx)
|
||||
sorted_append(parent.right, cur, lambda a: a.num_children < cur.num_children)
|
||||
else:
|
||||
cur.weight += 1
|
||||
|
||||
tails.append(cur)
|
||||
|
||||
return root, tails
|
||||
|
||||
def merge(self, root):
|
||||
for x in range(len(root)):
|
||||
Logr.debug(root[x])
|
||||
root[x].right = self._merge(root[x].right)
|
||||
Logr.debug('=================================================================')
|
||||
|
||||
return root
|
||||
|
||||
def get_nodes_right(self, value):
|
||||
if type(value) is not list:
|
||||
value = [value]
|
||||
|
||||
nodes = []
|
||||
|
||||
for node in value:
|
||||
nodes.append(node)
|
||||
|
||||
for child in self.get_nodes_right(node.right):
|
||||
nodes.append(child)
|
||||
|
||||
return nodes
|
||||
|
||||
def destroy_nodes_right(self, value):
|
||||
nodes = self.get_nodes_right(value)
|
||||
|
||||
for node in nodes:
|
||||
node.value = None
|
||||
node.dead = True
|
||||
|
||||
def _merge(self, nodes, depth = 0):
|
||||
Logr.debug(str('\t' * depth) + str(nodes))
|
||||
|
||||
if not len(nodes):
|
||||
return []
|
||||
|
||||
top = nodes[0]
|
||||
|
||||
# Merge into top
|
||||
for x in range(len(nodes)):
|
||||
# Merge extra results into top
|
||||
if x > 0:
|
||||
top.value = None
|
||||
top.weight += nodes[x].weight
|
||||
self.destroy_nodes_right(top.right)
|
||||
|
||||
if len(nodes[x].right):
|
||||
top.join_right(nodes[x].right)
|
||||
|
||||
Logr.debug("= %s joined %s", nodes[x], top)
|
||||
|
||||
nodes[x].dead = True
|
||||
|
||||
nodes = [n for n in nodes if not n.dead]
|
||||
|
||||
# Traverse further
|
||||
for node in nodes:
|
||||
if len(node.right):
|
||||
node.right = self._merge(node.right, depth + 1)
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def print_tree(node, depth = 0):
|
||||
Logr.debug(str('\t' * depth) + str(node))
|
||||
|
||||
if len(node.right):
|
||||
for child in node.right:
|
||||
print_tree(child, depth + 1)
|
||||
else:
|
||||
Logr.debug(node.full_value()[1])
|
||||
|
||||
|
||||
def find_node(node_list, value):
|
||||
# Try find adjacent node match
|
||||
for node in node_list:
|
||||
if node.value == value:
|
||||
return node
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class DNode(object):
|
||||
def __init__(self, value, parent, right=None, weight=1, num_children=None, original_value=None):
|
||||
self.value = value
|
||||
|
||||
self.parent = parent
|
||||
|
||||
if right is None:
|
||||
right = []
|
||||
self.right = right
|
||||
|
||||
self.weight = weight
|
||||
|
||||
self.original_value = original_value
|
||||
self.num_children = num_children
|
||||
|
||||
self.dead = False
|
||||
|
||||
def join_right(self, nodes):
|
||||
for node in nodes:
|
||||
duplicate = first(lambda x: x.value == node.value, self.right)
|
||||
|
||||
if duplicate:
|
||||
duplicate.weight += node.weight
|
||||
duplicate.join_right(node.right)
|
||||
else:
|
||||
node.parent = self
|
||||
self.right.append(node)
|
||||
|
||||
def full_value(self):
|
||||
words = []
|
||||
total_score = 0
|
||||
|
||||
cur = self
|
||||
root = None
|
||||
|
||||
while cur is not None:
|
||||
if cur.value and not cur.dead:
|
||||
words.insert(0, cur.value)
|
||||
total_score += cur.weight
|
||||
|
||||
if cur.parent is None:
|
||||
root = cur
|
||||
cur = cur.parent
|
||||
|
||||
return float(total_score) / len(words), ' '.join(words), root.original_value if root else None
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s value:"%s", weight: %s, num_children: %s%s%s>' % (
|
||||
'DNode',
|
||||
self.value,
|
||||
self.weight,
|
||||
self.num_children,
|
||||
(', original_value: %s' % self.original_value) if self.original_value else '',
|
||||
' REMOVING' if self.dead else ''
|
||||
)
|
||||
@@ -1,280 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from logr import Logr
|
||||
from qcond.helpers import create_matcher
|
||||
from qcond.transformers.base import Transformer
|
||||
|
||||
|
||||
class SliceTransformer(Transformer):
|
||||
def __init__(self):
|
||||
super(SliceTransformer, self).__init__()
|
||||
|
||||
def run(self, titles):
|
||||
nodes = []
|
||||
|
||||
# Create a node for each title
|
||||
for title in titles:
|
||||
nodes.append(SimNode(title))
|
||||
|
||||
# Calculate similarities between nodes
|
||||
for node in nodes:
|
||||
calculate_sim_links(node, [n for n in nodes if n != node])
|
||||
|
||||
kill_nodes_above(nodes, 0.90)
|
||||
|
||||
Logr.debug('---------------------------------------------------------------------')
|
||||
|
||||
print_link_tree(nodes)
|
||||
Logr.debug('%s %s', len(nodes), [n.value for n in nodes])
|
||||
|
||||
Logr.debug('---------------------------------------------------------------------')
|
||||
|
||||
kill_trailing_nodes(nodes)
|
||||
|
||||
Logr.debug('---------------------------------------------------------------------')
|
||||
|
||||
# Sort remaining nodes by 'num_merges'
|
||||
nodes = sorted(nodes, key=lambda n: n.num_merges, reverse=True)
|
||||
|
||||
print_link_tree(nodes)
|
||||
|
||||
Logr.debug('---------------------------------------------------------------------')
|
||||
|
||||
Logr.debug('%s %s', len(nodes), [n.value for n in nodes])
|
||||
|
||||
return [n.value for n in nodes]
|
||||
|
||||
|
||||
class SimLink(object):
|
||||
def __init__(self, similarity, opcodes, stats):
|
||||
self.similarity = similarity
|
||||
self.opcodes = opcodes
|
||||
self.stats = stats
|
||||
|
||||
|
||||
class SimNode(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
self.dead = False
|
||||
self.num_merges = 0
|
||||
|
||||
self.links = {} # {<other SimNode>: <SimLink>}
|
||||
|
||||
|
||||
def kill_nodes(nodes, killed_nodes):
|
||||
# Remove killed nodes from root list
|
||||
for node in killed_nodes:
|
||||
if node in nodes:
|
||||
nodes.remove(node)
|
||||
|
||||
# Remove killed nodes from links
|
||||
for killed_node in killed_nodes:
|
||||
for node in nodes:
|
||||
if killed_node in node.links:
|
||||
node.links.pop(killed_node)
|
||||
|
||||
|
||||
def kill_nodes_above(nodes, above_sim):
|
||||
killed_nodes = []
|
||||
|
||||
for node in nodes:
|
||||
if node.dead:
|
||||
continue
|
||||
|
||||
Logr.debug(node.value)
|
||||
|
||||
for link_node, link in node.links.items():
|
||||
if link_node.dead:
|
||||
continue
|
||||
|
||||
Logr.debug('\t%0.2f -- %s', link.similarity, link_node.value)
|
||||
|
||||
if link.similarity >= above_sim:
|
||||
if len(link_node.value) > len(node.value):
|
||||
Logr.debug('\t\tvery similar, killed this node')
|
||||
link_node.dead = True
|
||||
node.num_merges += 1
|
||||
killed_nodes.append(link_node)
|
||||
else:
|
||||
Logr.debug('\t\tvery similar, killed owner')
|
||||
node.dead = True
|
||||
link_node.num_merges += 1
|
||||
killed_nodes.append(node)
|
||||
|
||||
kill_nodes(nodes, killed_nodes)
|
||||
|
||||
|
||||
def print_link_tree(nodes):
|
||||
for node in nodes:
|
||||
Logr.debug(node.value)
|
||||
Logr.debug('\tnum_merges: %s', node.num_merges)
|
||||
|
||||
if len(node.links):
|
||||
Logr.debug('\t========== LINKS ==========')
|
||||
for link_node, link in node.links.items():
|
||||
Logr.debug('\t%0.2f -- %s', link.similarity, link_node.value)
|
||||
|
||||
Logr.debug('\t---------------------------')
|
||||
|
||||
|
||||
def kill_trailing_nodes(nodes):
|
||||
killed_nodes = []
|
||||
|
||||
for node in nodes:
|
||||
if node.dead:
|
||||
continue
|
||||
|
||||
Logr.debug(node.value)
|
||||
|
||||
for link_node, link in node.links.items():
|
||||
if link_node.dead:
|
||||
continue
|
||||
|
||||
is_valid = link.stats.get('valid', False)
|
||||
|
||||
has_deletions = False
|
||||
has_insertions = False
|
||||
has_replacements = False
|
||||
|
||||
for opcode in link.opcodes:
|
||||
if opcode[0] == 'delete':
|
||||
has_deletions = True
|
||||
if opcode[0] == 'insert':
|
||||
has_insertions = True
|
||||
if opcode[0] == 'replace':
|
||||
has_replacements = True
|
||||
|
||||
equal_perc = link.stats.get('equal', 0) / float(len(node.value))
|
||||
insert_perc = link.stats.get('insert', 0) / float(len(node.value))
|
||||
|
||||
Logr.debug('\t({0:<24}) [{1:02d}:{2:02d} = {3:02d} {4:3.0f}% {5:3.0f}%] -- {6:<45}'.format(
|
||||
'd:%s, i:%s, r:%s' % (has_deletions, has_insertions, has_replacements),
|
||||
len(node.value), len(link_node.value), link.stats.get('equal', 0),
|
||||
equal_perc * 100, insert_perc * 100,
|
||||
'"{0}"'.format(link_node.value)
|
||||
))
|
||||
|
||||
Logr.debug('\t\t%s', link.stats)
|
||||
|
||||
kill = all([
|
||||
is_valid,
|
||||
equal_perc >= 0.5,
|
||||
insert_perc < 2,
|
||||
has_insertions,
|
||||
not has_deletions,
|
||||
not has_replacements
|
||||
])
|
||||
|
||||
if kill:
|
||||
Logr.debug('\t\tkilled this node')
|
||||
|
||||
link_node.dead = True
|
||||
node.num_merges += 1
|
||||
killed_nodes.append(link_node)
|
||||
|
||||
kill_nodes(nodes, killed_nodes)
|
||||
|
||||
stats_print_format = "\t{0:<8} ({1:2d}:{2:2d}) ({3:2d}:{4:2d})"
|
||||
|
||||
|
||||
def get_index_values(iterable, a, b):
|
||||
return (
|
||||
iterable[a] if a else None,
|
||||
iterable[b] if b else None
|
||||
)
|
||||
|
||||
|
||||
def get_indices(iterable, a, b):
|
||||
return (
|
||||
a if 0 < a < len(iterable) else None,
|
||||
b if 0 < b < len(iterable) else None
|
||||
)
|
||||
|
||||
|
||||
def get_opcode_stats(for_node, node, opcodes):
|
||||
stats = {}
|
||||
|
||||
for tag, i1, i2, j1, j2 in opcodes:
|
||||
Logr.debug(stats_print_format.format(
|
||||
tag, i1, i2, j1, j2
|
||||
))
|
||||
|
||||
if tag in ['insert', 'delete']:
|
||||
ax = None, None
|
||||
bx = None, None
|
||||
|
||||
if tag == 'insert':
|
||||
ax = get_indices(for_node.value, i1 - 1, i1)
|
||||
bx = get_indices(node.value, j1, j2 - 1)
|
||||
|
||||
if tag == 'delete':
|
||||
ax = get_indices(for_node.value, j1 - 1, j1)
|
||||
bx = get_indices(node.value, i1, i2 - 1)
|
||||
|
||||
av = get_index_values(for_node.value, *ax)
|
||||
bv = get_index_values(node.value, *bx)
|
||||
|
||||
Logr.debug(
|
||||
'\t\t%s %s [%s><%s] <---> %s %s [%s><%s]',
|
||||
ax, av, av[0], av[1],
|
||||
bx, bv, bv[0], bv[1]
|
||||
)
|
||||
|
||||
head_valid = av[0] in [None, ' '] or bv[0] in [None, ' ']
|
||||
tail_valid = av[1] in [None, ' '] or bv[1] in [None, ' ']
|
||||
valid = head_valid and tail_valid
|
||||
|
||||
if 'valid' not in stats or (stats['valid'] and not valid):
|
||||
stats['valid'] = valid
|
||||
|
||||
Logr.debug('\t\t' + ('VALID' if valid else 'INVALID'))
|
||||
|
||||
if tag not in stats:
|
||||
stats[tag] = 0
|
||||
|
||||
stats[tag] += (i2 - i1) or (j2 - j1)
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def calculate_sim_links(for_node, other_nodes):
|
||||
for node in other_nodes:
|
||||
if node in for_node.links:
|
||||
continue
|
||||
|
||||
Logr.debug('calculating similarity between "%s" and "%s"', for_node.value, node.value)
|
||||
|
||||
# Get similarity
|
||||
similarity_matcher = create_matcher(for_node.value, node.value)
|
||||
similarity = similarity_matcher.quick_ratio()
|
||||
|
||||
# Get for_node -> node opcodes
|
||||
a_opcodes_matcher = create_matcher(for_node.value, node.value, swap_longest = False)
|
||||
a_opcodes = a_opcodes_matcher.get_opcodes()
|
||||
a_stats = get_opcode_stats(for_node, node, a_opcodes)
|
||||
|
||||
Logr.debug('-' * 100)
|
||||
|
||||
# Get node -> for_node opcodes
|
||||
b_opcodes_matcher = create_matcher(node.value, for_node.value, swap_longest = False)
|
||||
b_opcodes = b_opcodes_matcher.get_opcodes()
|
||||
b_stats = get_opcode_stats(for_node, node, b_opcodes)
|
||||
|
||||
for_node.links[node] = SimLink(similarity, a_opcodes, a_stats)
|
||||
node.links[for_node] = SimLink(similarity, b_opcodes, b_stats)
|
||||
|
||||
#raw_input('Press ENTER to continue')
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from qcond.transformers.base import Transformer
|
||||
|
||||
|
||||
COMMON_WORDS = [
|
||||
'the'
|
||||
]
|
||||
|
||||
|
||||
class StripCommonTransformer(Transformer):
|
||||
def run(self, titles):
|
||||
return [title for title in titles if title.lower() not in COMMON_WORDS]
|
||||
@@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement
|
||||
# is zero for an official release, positive for a development branch,
|
||||
# or negative for a release candidate or beta (after the base version
|
||||
# number has been incremented)
|
||||
version = "4.1.dev1"
|
||||
version_info = (4, 1, 0, -100)
|
||||
version = "4.0.1"
|
||||
version_info = (4, 0, 1, -100)
|
||||
|
||||
@@ -76,7 +76,7 @@ from tornado import escape
|
||||
from tornado.httputil import url_concat
|
||||
from tornado.log import gen_log
|
||||
from tornado.stack_context import ExceptionStackContext
|
||||
from tornado.util import u, unicode_type, ArgReplacer
|
||||
from tornado.util import bytes_type, u, unicode_type, ArgReplacer
|
||||
|
||||
try:
|
||||
import urlparse # py2
|
||||
@@ -333,7 +333,7 @@ class OAuthMixin(object):
|
||||
|
||||
The ``callback_uri`` may be omitted if you have previously
|
||||
registered a callback URI with the third-party service. For
|
||||
some services (including Friendfeed), you must use a
|
||||
some sevices (including Friendfeed), you must use a
|
||||
previously-registered callback URI and cannot specify a
|
||||
callback via this method.
|
||||
|
||||
@@ -1112,7 +1112,7 @@ class FacebookMixin(object):
|
||||
args["cancel_url"] = urlparse.urljoin(
|
||||
self.request.full_url(), cancel_uri)
|
||||
if extended_permissions:
|
||||
if isinstance(extended_permissions, (unicode_type, bytes)):
|
||||
if isinstance(extended_permissions, (unicode_type, bytes_type)):
|
||||
extended_permissions = [extended_permissions]
|
||||
args["req_perms"] = ",".join(extended_permissions)
|
||||
self.redirect("http://www.facebook.com/login.php?" +
|
||||
|
||||
@@ -29,7 +29,6 @@ import sys
|
||||
|
||||
from tornado.stack_context import ExceptionStackContext, wrap
|
||||
from tornado.util import raise_exc_info, ArgReplacer
|
||||
from tornado.log import app_log
|
||||
|
||||
try:
|
||||
from concurrent import futures
|
||||
@@ -174,11 +173,8 @@ class Future(object):
|
||||
def _set_done(self):
|
||||
self._done = True
|
||||
for cb in self._callbacks:
|
||||
try:
|
||||
cb(self)
|
||||
except Exception:
|
||||
app_log.exception('exception calling callback %r for %r',
|
||||
cb, self)
|
||||
# TODO: error handling
|
||||
cb(self)
|
||||
self._callbacks = None
|
||||
|
||||
TracebackFuture = Future
|
||||
|
||||
@@ -19,12 +19,10 @@
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import logging
|
||||
import pycurl
|
||||
import threading
|
||||
import time
|
||||
from io import BytesIO
|
||||
|
||||
from tornado import httputil
|
||||
from tornado import ioloop
|
||||
@@ -33,6 +31,12 @@ from tornado import stack_context
|
||||
|
||||
from tornado.escape import utf8, native_str
|
||||
from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main
|
||||
from tornado.util import bytes_type
|
||||
|
||||
try:
|
||||
from io import BytesIO # py3
|
||||
except ImportError:
|
||||
from cStringIO import StringIO as BytesIO # py2
|
||||
|
||||
|
||||
class CurlAsyncHTTPClient(AsyncHTTPClient):
|
||||
@@ -41,7 +45,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
|
||||
self._multi = pycurl.CurlMulti()
|
||||
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
|
||||
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
|
||||
self._curls = [self._curl_create() for i in range(max_clients)]
|
||||
self._curls = [_curl_create() for i in range(max_clients)]
|
||||
self._free_list = self._curls[:]
|
||||
self._requests = collections.deque()
|
||||
self._fds = {}
|
||||
@@ -207,8 +211,8 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
|
||||
"callback": callback,
|
||||
"curl_start_time": time.time(),
|
||||
}
|
||||
self._curl_setup_request(curl, request, curl.info["buffer"],
|
||||
curl.info["headers"])
|
||||
_curl_setup_request(curl, request, curl.info["buffer"],
|
||||
curl.info["headers"])
|
||||
self._multi.add_handle(curl)
|
||||
|
||||
if not started:
|
||||
@@ -255,206 +259,6 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
|
||||
def handle_callback_exception(self, callback):
|
||||
self.io_loop.handle_callback_exception(callback)
|
||||
|
||||
def _curl_create(self):
|
||||
curl = pycurl.Curl()
|
||||
if gen_log.isEnabledFor(logging.DEBUG):
|
||||
curl.setopt(pycurl.VERBOSE, 1)
|
||||
curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
|
||||
return curl
|
||||
|
||||
def _curl_setup_request(self, curl, request, buffer, headers):
|
||||
curl.setopt(pycurl.URL, native_str(request.url))
|
||||
|
||||
# libcurl's magic "Expect: 100-continue" behavior causes delays
|
||||
# with servers that don't support it (which include, among others,
|
||||
# Google's OpenID endpoint). Additionally, this behavior has
|
||||
# a bug in conjunction with the curl_multi_socket_action API
|
||||
# (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
|
||||
# which increases the delays. It's more trouble than it's worth,
|
||||
# so just turn off the feature (yes, setting Expect: to an empty
|
||||
# value is the official way to disable this)
|
||||
if "Expect" not in request.headers:
|
||||
request.headers["Expect"] = ""
|
||||
|
||||
# libcurl adds Pragma: no-cache by default; disable that too
|
||||
if "Pragma" not in request.headers:
|
||||
request.headers["Pragma"] = ""
|
||||
|
||||
curl.setopt(pycurl.HTTPHEADER,
|
||||
["%s: %s" % (native_str(k), native_str(v))
|
||||
for k, v in request.headers.get_all()])
|
||||
|
||||
curl.setopt(pycurl.HEADERFUNCTION,
|
||||
functools.partial(self._curl_header_callback,
|
||||
headers, request.header_callback))
|
||||
if request.streaming_callback:
|
||||
write_function = lambda chunk: self.io_loop.add_callback(
|
||||
request.streaming_callback, chunk)
|
||||
else:
|
||||
write_function = buffer.write
|
||||
if bytes is str: # py2
|
||||
curl.setopt(pycurl.WRITEFUNCTION, write_function)
|
||||
else: # py3
|
||||
# Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
|
||||
# a fork/port. That version has a bug in which it passes unicode
|
||||
# strings instead of bytes to the WRITEFUNCTION. This means that
|
||||
# if you use a WRITEFUNCTION (which tornado always does), you cannot
|
||||
# download arbitrary binary data. This needs to be fixed in the
|
||||
# ported pycurl package, but in the meantime this lambda will
|
||||
# make it work for downloading (utf8) text.
|
||||
curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
|
||||
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
|
||||
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
|
||||
curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
|
||||
curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
|
||||
if request.user_agent:
|
||||
curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
|
||||
else:
|
||||
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
|
||||
if request.network_interface:
|
||||
curl.setopt(pycurl.INTERFACE, request.network_interface)
|
||||
if request.decompress_response:
|
||||
curl.setopt(pycurl.ENCODING, "gzip,deflate")
|
||||
else:
|
||||
curl.setopt(pycurl.ENCODING, "none")
|
||||
if request.proxy_host and request.proxy_port:
|
||||
curl.setopt(pycurl.PROXY, request.proxy_host)
|
||||
curl.setopt(pycurl.PROXYPORT, request.proxy_port)
|
||||
if request.proxy_username:
|
||||
credentials = '%s:%s' % (request.proxy_username,
|
||||
request.proxy_password)
|
||||
curl.setopt(pycurl.PROXYUSERPWD, credentials)
|
||||
else:
|
||||
curl.setopt(pycurl.PROXY, '')
|
||||
curl.unsetopt(pycurl.PROXYUSERPWD)
|
||||
if request.validate_cert:
|
||||
curl.setopt(pycurl.SSL_VERIFYPEER, 1)
|
||||
curl.setopt(pycurl.SSL_VERIFYHOST, 2)
|
||||
else:
|
||||
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
|
||||
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
|
||||
if request.ca_certs is not None:
|
||||
curl.setopt(pycurl.CAINFO, request.ca_certs)
|
||||
else:
|
||||
# There is no way to restore pycurl.CAINFO to its default value
|
||||
# (Using unsetopt makes it reject all certificates).
|
||||
# I don't see any way to read the default value from python so it
|
||||
# can be restored later. We'll have to just leave CAINFO untouched
|
||||
# if no ca_certs file was specified, and require that if any
|
||||
# request uses a custom ca_certs file, they all must.
|
||||
pass
|
||||
|
||||
if request.allow_ipv6 is False:
|
||||
# Curl behaves reasonably when DNS resolution gives an ipv6 address
|
||||
# that we can't reach, so allow ipv6 unless the user asks to disable.
|
||||
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
|
||||
else:
|
||||
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
|
||||
|
||||
# Set the request method through curl's irritating interface which makes
|
||||
# up names for almost every single method
|
||||
curl_options = {
|
||||
"GET": pycurl.HTTPGET,
|
||||
"POST": pycurl.POST,
|
||||
"PUT": pycurl.UPLOAD,
|
||||
"HEAD": pycurl.NOBODY,
|
||||
}
|
||||
custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
|
||||
for o in curl_options.values():
|
||||
curl.setopt(o, False)
|
||||
if request.method in curl_options:
|
||||
curl.unsetopt(pycurl.CUSTOMREQUEST)
|
||||
curl.setopt(curl_options[request.method], True)
|
||||
elif request.allow_nonstandard_methods or request.method in custom_methods:
|
||||
curl.setopt(pycurl.CUSTOMREQUEST, request.method)
|
||||
else:
|
||||
raise KeyError('unknown method ' + request.method)
|
||||
|
||||
# Handle curl's cryptic options for every individual HTTP method
|
||||
if request.method == "GET":
|
||||
if request.body is not None:
|
||||
raise ValueError('Body must be None for GET request')
|
||||
elif request.method in ("POST", "PUT") or request.body:
|
||||
if request.body is None:
|
||||
raise ValueError(
|
||||
'Body must not be None for "%s" request'
|
||||
% request.method)
|
||||
|
||||
request_buffer = BytesIO(utf8(request.body))
|
||||
def ioctl(cmd):
|
||||
if cmd == curl.IOCMD_RESTARTREAD:
|
||||
request_buffer.seek(0)
|
||||
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
|
||||
curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
|
||||
if request.method == "POST":
|
||||
curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
|
||||
else:
|
||||
curl.setopt(pycurl.UPLOAD, True)
|
||||
curl.setopt(pycurl.INFILESIZE, len(request.body))
|
||||
|
||||
if request.auth_username is not None:
|
||||
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
|
||||
|
||||
if request.auth_mode is None or request.auth_mode == "basic":
|
||||
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
|
||||
elif request.auth_mode == "digest":
|
||||
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
|
||||
else:
|
||||
raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
|
||||
|
||||
curl.setopt(pycurl.USERPWD, native_str(userpwd))
|
||||
gen_log.debug("%s %s (username: %r)", request.method, request.url,
|
||||
request.auth_username)
|
||||
else:
|
||||
curl.unsetopt(pycurl.USERPWD)
|
||||
gen_log.debug("%s %s", request.method, request.url)
|
||||
|
||||
if request.client_cert is not None:
|
||||
curl.setopt(pycurl.SSLCERT, request.client_cert)
|
||||
|
||||
if request.client_key is not None:
|
||||
curl.setopt(pycurl.SSLKEY, request.client_key)
|
||||
|
||||
if threading.activeCount() > 1:
|
||||
# libcurl/pycurl is not thread-safe by default. When multiple threads
|
||||
# are used, signals should be disabled. This has the side effect
|
||||
# of disabling DNS timeouts in some environments (when libcurl is
|
||||
# not linked against ares), so we don't do it when there is only one
|
||||
# thread. Applications that use many short-lived threads may need
|
||||
# to set NOSIGNAL manually in a prepare_curl_callback since
|
||||
# there may not be any other threads running at the time we call
|
||||
# threading.activeCount.
|
||||
curl.setopt(pycurl.NOSIGNAL, 1)
|
||||
if request.prepare_curl_callback is not None:
|
||||
request.prepare_curl_callback(curl)
|
||||
|
||||
def _curl_header_callback(self, headers, header_callback, header_line):
|
||||
header_line = native_str(header_line)
|
||||
if header_callback is not None:
|
||||
self.io_loop.add_callback(header_callback, header_line)
|
||||
# header_line as returned by curl includes the end-of-line characters.
|
||||
header_line = header_line.strip()
|
||||
if header_line.startswith("HTTP/"):
|
||||
headers.clear()
|
||||
try:
|
||||
(__, __, reason) = httputil.parse_response_start_line(header_line)
|
||||
header_line = "X-Http-Reason: %s" % reason
|
||||
except httputil.HTTPInputError:
|
||||
return
|
||||
if not header_line:
|
||||
return
|
||||
headers.parse_line(header_line)
|
||||
|
||||
def _curl_debug(self, debug_type, debug_msg):
|
||||
debug_types = ('I', '<', '>', '<', '>')
|
||||
if debug_type == 0:
|
||||
gen_log.debug('%s', debug_msg.strip())
|
||||
elif debug_type in (1, 2):
|
||||
for line in debug_msg.splitlines():
|
||||
gen_log.debug('%s %s', debug_types[debug_type], line)
|
||||
elif debug_type == 4:
|
||||
gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
|
||||
|
||||
|
||||
class CurlError(HTTPError):
|
||||
def __init__(self, errno, message):
|
||||
@@ -462,6 +266,212 @@ class CurlError(HTTPError):
|
||||
self.errno = errno
|
||||
|
||||
|
||||
def _curl_create():
|
||||
curl = pycurl.Curl()
|
||||
if gen_log.isEnabledFor(logging.DEBUG):
|
||||
curl.setopt(pycurl.VERBOSE, 1)
|
||||
curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
|
||||
return curl
|
||||
|
||||
|
||||
def _curl_setup_request(curl, request, buffer, headers):
|
||||
curl.setopt(pycurl.URL, native_str(request.url))
|
||||
|
||||
# libcurl's magic "Expect: 100-continue" behavior causes delays
|
||||
# with servers that don't support it (which include, among others,
|
||||
# Google's OpenID endpoint). Additionally, this behavior has
|
||||
# a bug in conjunction with the curl_multi_socket_action API
|
||||
# (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
|
||||
# which increases the delays. It's more trouble than it's worth,
|
||||
# so just turn off the feature (yes, setting Expect: to an empty
|
||||
# value is the official way to disable this)
|
||||
if "Expect" not in request.headers:
|
||||
request.headers["Expect"] = ""
|
||||
|
||||
# libcurl adds Pragma: no-cache by default; disable that too
|
||||
if "Pragma" not in request.headers:
|
||||
request.headers["Pragma"] = ""
|
||||
|
||||
# Request headers may be either a regular dict or HTTPHeaders object
|
||||
if isinstance(request.headers, httputil.HTTPHeaders):
|
||||
curl.setopt(pycurl.HTTPHEADER,
|
||||
[native_str("%s: %s" % i) for i in request.headers.get_all()])
|
||||
else:
|
||||
curl.setopt(pycurl.HTTPHEADER,
|
||||
[native_str("%s: %s" % i) for i in request.headers.items()])
|
||||
|
||||
if request.header_callback:
|
||||
curl.setopt(pycurl.HEADERFUNCTION,
|
||||
lambda line: request.header_callback(native_str(line)))
|
||||
else:
|
||||
curl.setopt(pycurl.HEADERFUNCTION,
|
||||
lambda line: _curl_header_callback(headers,
|
||||
native_str(line)))
|
||||
if request.streaming_callback:
|
||||
write_function = request.streaming_callback
|
||||
else:
|
||||
write_function = buffer.write
|
||||
if bytes_type is str: # py2
|
||||
curl.setopt(pycurl.WRITEFUNCTION, write_function)
|
||||
else: # py3
|
||||
# Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
|
||||
# a fork/port. That version has a bug in which it passes unicode
|
||||
# strings instead of bytes to the WRITEFUNCTION. This means that
|
||||
# if you use a WRITEFUNCTION (which tornado always does), you cannot
|
||||
# download arbitrary binary data. This needs to be fixed in the
|
||||
# ported pycurl package, but in the meantime this lambda will
|
||||
# make it work for downloading (utf8) text.
|
||||
curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
|
||||
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
|
||||
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
|
||||
curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
|
||||
curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
|
||||
if request.user_agent:
|
||||
curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
|
||||
else:
|
||||
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
|
||||
if request.network_interface:
|
||||
curl.setopt(pycurl.INTERFACE, request.network_interface)
|
||||
if request.decompress_response:
|
||||
curl.setopt(pycurl.ENCODING, "gzip,deflate")
|
||||
else:
|
||||
curl.setopt(pycurl.ENCODING, "none")
|
||||
if request.proxy_host and request.proxy_port:
|
||||
curl.setopt(pycurl.PROXY, request.proxy_host)
|
||||
curl.setopt(pycurl.PROXYPORT, request.proxy_port)
|
||||
if request.proxy_username:
|
||||
credentials = '%s:%s' % (request.proxy_username,
|
||||
request.proxy_password)
|
||||
curl.setopt(pycurl.PROXYUSERPWD, credentials)
|
||||
else:
|
||||
curl.setopt(pycurl.PROXY, '')
|
||||
curl.unsetopt(pycurl.PROXYUSERPWD)
|
||||
if request.validate_cert:
|
||||
curl.setopt(pycurl.SSL_VERIFYPEER, 1)
|
||||
curl.setopt(pycurl.SSL_VERIFYHOST, 2)
|
||||
else:
|
||||
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
|
||||
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
|
||||
if request.ca_certs is not None:
|
||||
curl.setopt(pycurl.CAINFO, request.ca_certs)
|
||||
else:
|
||||
# There is no way to restore pycurl.CAINFO to its default value
|
||||
# (Using unsetopt makes it reject all certificates).
|
||||
# I don't see any way to read the default value from python so it
|
||||
# can be restored later. We'll have to just leave CAINFO untouched
|
||||
# if no ca_certs file was specified, and require that if any
|
||||
# request uses a custom ca_certs file, they all must.
|
||||
pass
|
||||
|
||||
if request.allow_ipv6 is False:
|
||||
# Curl behaves reasonably when DNS resolution gives an ipv6 address
|
||||
# that we can't reach, so allow ipv6 unless the user asks to disable.
|
||||
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
|
||||
else:
|
||||
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
|
||||
|
||||
# Set the request method through curl's irritating interface which makes
|
||||
# up names for almost every single method
|
||||
curl_options = {
|
||||
"GET": pycurl.HTTPGET,
|
||||
"POST": pycurl.POST,
|
||||
"PUT": pycurl.UPLOAD,
|
||||
"HEAD": pycurl.NOBODY,
|
||||
}
|
||||
custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
|
||||
for o in curl_options.values():
|
||||
curl.setopt(o, False)
|
||||
if request.method in curl_options:
|
||||
curl.unsetopt(pycurl.CUSTOMREQUEST)
|
||||
curl.setopt(curl_options[request.method], True)
|
||||
elif request.allow_nonstandard_methods or request.method in custom_methods:
|
||||
curl.setopt(pycurl.CUSTOMREQUEST, request.method)
|
||||
else:
|
||||
raise KeyError('unknown method ' + request.method)
|
||||
|
||||
# Handle curl's cryptic options for every individual HTTP method
|
||||
if request.method in ("POST", "PUT"):
|
||||
if request.body is None:
|
||||
raise AssertionError(
|
||||
'Body must not be empty for "%s" request'
|
||||
% request.method)
|
||||
|
||||
request_buffer = BytesIO(utf8(request.body))
|
||||
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
|
||||
if request.method == "POST":
|
||||
def ioctl(cmd):
|
||||
if cmd == curl.IOCMD_RESTARTREAD:
|
||||
request_buffer.seek(0)
|
||||
curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
|
||||
curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
|
||||
else:
|
||||
curl.setopt(pycurl.INFILESIZE, len(request.body))
|
||||
elif request.method == "GET":
|
||||
if request.body is not None:
|
||||
raise AssertionError('Body must be empty for GET request')
|
||||
|
||||
if request.auth_username is not None:
|
||||
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
|
||||
|
||||
if request.auth_mode is None or request.auth_mode == "basic":
|
||||
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
|
||||
elif request.auth_mode == "digest":
|
||||
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
|
||||
else:
|
||||
raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
|
||||
|
||||
curl.setopt(pycurl.USERPWD, native_str(userpwd))
|
||||
gen_log.debug("%s %s (username: %r)", request.method, request.url,
|
||||
request.auth_username)
|
||||
else:
|
||||
curl.unsetopt(pycurl.USERPWD)
|
||||
gen_log.debug("%s %s", request.method, request.url)
|
||||
|
||||
if request.client_cert is not None:
|
||||
curl.setopt(pycurl.SSLCERT, request.client_cert)
|
||||
|
||||
if request.client_key is not None:
|
||||
curl.setopt(pycurl.SSLKEY, request.client_key)
|
||||
|
||||
if threading.activeCount() > 1:
|
||||
# libcurl/pycurl is not thread-safe by default. When multiple threads
|
||||
# are used, signals should be disabled. This has the side effect
|
||||
# of disabling DNS timeouts in some environments (when libcurl is
|
||||
# not linked against ares), so we don't do it when there is only one
|
||||
# thread. Applications that use many short-lived threads may need
|
||||
# to set NOSIGNAL manually in a prepare_curl_callback since
|
||||
# there may not be any other threads running at the time we call
|
||||
# threading.activeCount.
|
||||
curl.setopt(pycurl.NOSIGNAL, 1)
|
||||
if request.prepare_curl_callback is not None:
|
||||
request.prepare_curl_callback(curl)
|
||||
|
||||
|
||||
def _curl_header_callback(headers, header_line):
|
||||
# header_line as returned by curl includes the end-of-line characters.
|
||||
header_line = header_line.strip()
|
||||
if header_line.startswith("HTTP/"):
|
||||
headers.clear()
|
||||
try:
|
||||
(__, __, reason) = httputil.parse_response_start_line(header_line)
|
||||
header_line = "X-Http-Reason: %s" % reason
|
||||
except httputil.HTTPInputError:
|
||||
return
|
||||
if not header_line:
|
||||
return
|
||||
headers.parse_line(header_line)
|
||||
|
||||
|
||||
def _curl_debug(debug_type, debug_msg):
|
||||
debug_types = ('I', '<', '>', '<', '>')
|
||||
if debug_type == 0:
|
||||
gen_log.debug('%s', debug_msg.strip())
|
||||
elif debug_type in (1, 2):
|
||||
for line in debug_msg.splitlines():
|
||||
gen_log.debug('%s %s', debug_types[debug_type], line)
|
||||
elif debug_type == 4:
|
||||
gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
|
||||
|
||||
if __name__ == "__main__":
|
||||
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
|
||||
main()
|
||||
|
||||
@@ -25,7 +25,7 @@ from __future__ import absolute_import, division, print_function, with_statement
|
||||
import re
|
||||
import sys
|
||||
|
||||
from tornado.util import unicode_type, basestring_type, u
|
||||
from tornado.util import bytes_type, unicode_type, basestring_type, u
|
||||
|
||||
try:
|
||||
from urllib.parse import parse_qs as _parse_qs # py3
|
||||
@@ -187,7 +187,7 @@ else:
|
||||
return encoded
|
||||
|
||||
|
||||
_UTF8_TYPES = (bytes, type(None))
|
||||
_UTF8_TYPES = (bytes_type, type(None))
|
||||
|
||||
|
||||
def utf8(value):
|
||||
@@ -215,7 +215,7 @@ def to_unicode(value):
|
||||
"""
|
||||
if isinstance(value, _TO_UNICODE_TYPES):
|
||||
return value
|
||||
if not isinstance(value, bytes):
|
||||
if not isinstance(value, bytes_type):
|
||||
raise TypeError(
|
||||
"Expected bytes, unicode, or None; got %r" % type(value)
|
||||
)
|
||||
@@ -246,7 +246,7 @@ def to_basestring(value):
|
||||
"""
|
||||
if isinstance(value, _BASESTRING_TYPES):
|
||||
return value
|
||||
if not isinstance(value, bytes):
|
||||
if not isinstance(value, bytes_type):
|
||||
raise TypeError(
|
||||
"Expected bytes, unicode, or None; got %r" % type(value)
|
||||
)
|
||||
@@ -264,7 +264,7 @@ def recursive_unicode(obj):
|
||||
return list(recursive_unicode(i) for i in obj)
|
||||
elif isinstance(obj, tuple):
|
||||
return tuple(recursive_unicode(i) for i in obj)
|
||||
elif isinstance(obj, bytes):
|
||||
elif isinstance(obj, bytes_type):
|
||||
return to_unicode(obj)
|
||||
else:
|
||||
return obj
|
||||
|
||||
@@ -109,10 +109,7 @@ def engine(func):
|
||||
raise ReturnValueIgnoredError(
|
||||
"@gen.engine functions cannot return values: %r" %
|
||||
(future.result(),))
|
||||
# The engine interface doesn't give us any way to return
|
||||
# errors but to raise them into the stack context.
|
||||
# Save the stack context here to use when the Future has resolved.
|
||||
future.add_done_callback(stack_context.wrap(final_callback))
|
||||
future.add_done_callback(final_callback)
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -139,17 +136,6 @@ def coroutine(func, replace_callback=True):
|
||||
|
||||
From the caller's perspective, ``@gen.coroutine`` is similar to
|
||||
the combination of ``@return_future`` and ``@gen.engine``.
|
||||
|
||||
.. warning::
|
||||
|
||||
When exceptions occur inside a coroutine, the exception
|
||||
information will be stored in the `.Future` object. You must
|
||||
examine the result of the `.Future` object, or the exception
|
||||
may go unnoticed by your code. This means yielding the function
|
||||
if called from another coroutine, using something like
|
||||
`.IOLoop.run_sync` for top-level calls, or passing the `.Future`
|
||||
to `.IOLoop.add_future`.
|
||||
|
||||
"""
|
||||
return _make_coroutine_wrapper(func, replace_callback=True)
|
||||
|
||||
@@ -199,18 +185,7 @@ def _make_coroutine_wrapper(func, replace_callback):
|
||||
future.set_exc_info(sys.exc_info())
|
||||
else:
|
||||
Runner(result, future, yielded)
|
||||
try:
|
||||
return future
|
||||
finally:
|
||||
# Subtle memory optimization: if next() raised an exception,
|
||||
# the future's exc_info contains a traceback which
|
||||
# includes this stack frame. This creates a cycle,
|
||||
# which will be collected at the next full GC but has
|
||||
# been shown to greatly increase memory usage of
|
||||
# benchmarks (relative to the refcount-based scheme
|
||||
# used in the absence of cycles). We can avoid the
|
||||
# cycle by clearing the local variable after we return it.
|
||||
future = None
|
||||
return future
|
||||
future.set_result(result)
|
||||
return future
|
||||
return wrapper
|
||||
|
||||
@@ -306,8 +306,6 @@ class HTTP1Connection(httputil.HTTPConnection):
|
||||
self._clear_callbacks()
|
||||
stream = self.stream
|
||||
self.stream = None
|
||||
if not self._finish_future.done():
|
||||
self._finish_future.set_result(None)
|
||||
return stream
|
||||
|
||||
def set_body_timeout(self, timeout):
|
||||
@@ -469,7 +467,6 @@ class HTTP1Connection(httputil.HTTPConnection):
|
||||
if start_line.version == "HTTP/1.1":
|
||||
return connection_header != "close"
|
||||
elif ("Content-Length" in headers
|
||||
or headers.get("Transfer-Encoding", "").lower() == "chunked"
|
||||
or start_line.method in ("HEAD", "GET")):
|
||||
return connection_header == "keep-alive"
|
||||
return False
|
||||
@@ -486,11 +483,7 @@ class HTTP1Connection(httputil.HTTPConnection):
|
||||
self._finish_future.set_result(None)
|
||||
|
||||
def _parse_headers(self, data):
|
||||
# The lstrip removes newlines that some implementations sometimes
|
||||
# insert between messages of a reused connection. Per RFC 7230,
|
||||
# we SHOULD ignore at least one empty line before the request.
|
||||
# http://tools.ietf.org/html/rfc7230#section-3.5
|
||||
data = native_str(data.decode('latin1')).lstrip("\r\n")
|
||||
data = native_str(data.decode('latin1'))
|
||||
eol = data.find("\r\n")
|
||||
start_line = data[:eol]
|
||||
try:
|
||||
|
||||
@@ -63,12 +63,7 @@ class HTTPClient(object):
|
||||
response = http_client.fetch("http://www.google.com/")
|
||||
print response.body
|
||||
except httpclient.HTTPError as e:
|
||||
# HTTPError is raised for non-200 responses; the response
|
||||
# can be found in e.response.
|
||||
print("Error: " + str(e))
|
||||
except Exception as e:
|
||||
# Other errors are possible, such as IOError.
|
||||
print("Error: " + str(e))
|
||||
print "Error:", e
|
||||
http_client.close()
|
||||
"""
|
||||
def __init__(self, async_client_class=None, **kwargs):
|
||||
|
||||
@@ -50,13 +50,12 @@ class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
|
||||
|
||||
import tornado.httpserver
|
||||
import tornado.ioloop
|
||||
from tornado import httputil
|
||||
|
||||
def handle_request(request):
|
||||
message = "You requested %s\n" % request.uri
|
||||
request.connection.write_headers(
|
||||
httputil.ResponseStartLine('HTTP/1.1', 200, 'OK'),
|
||||
httputil.HTTPHeaders({"Content-Length": str(len(message))}))
|
||||
{"Content-Length": str(len(message))})
|
||||
request.connection.write(message)
|
||||
request.connection.finish()
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ import time
|
||||
|
||||
from tornado.escape import native_str, parse_qs_bytes, utf8
|
||||
from tornado.log import gen_log
|
||||
from tornado.util import ObjectDict
|
||||
from tornado.util import ObjectDict, bytes_type
|
||||
|
||||
try:
|
||||
import Cookie # py2
|
||||
@@ -335,7 +335,7 @@ class HTTPServerRequest(object):
|
||||
|
||||
# set remote IP and protocol
|
||||
context = getattr(connection, 'context', None)
|
||||
self.remote_ip = getattr(context, 'remote_ip', None)
|
||||
self.remote_ip = getattr(context, 'remote_ip')
|
||||
self.protocol = getattr(context, 'protocol', "http")
|
||||
|
||||
self.host = host or self.headers.get("Host") or "127.0.0.1"
|
||||
@@ -379,7 +379,7 @@ class HTTPServerRequest(object):
|
||||
Use ``request.connection`` and the `.HTTPConnection` methods
|
||||
to write the response.
|
||||
"""
|
||||
assert isinstance(chunk, bytes)
|
||||
assert isinstance(chunk, bytes_type)
|
||||
self.connection.write(chunk, callback=callback)
|
||||
|
||||
def finish(self):
|
||||
@@ -562,18 +562,11 @@ class HTTPConnection(object):
|
||||
|
||||
|
||||
def url_concat(url, args):
|
||||
"""Concatenate url and arguments regardless of whether
|
||||
"""Concatenate url and argument dictionary regardless of whether
|
||||
url has existing query parameters.
|
||||
|
||||
``args`` may be either a dictionary or a list of key-value pairs
|
||||
(the latter allows for multiple values with the same key.
|
||||
|
||||
>>> url_concat("http://example.com/foo", dict(c="d"))
|
||||
'http://example.com/foo?c=d'
|
||||
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
|
||||
'http://example.com/foo?a=b&c=d'
|
||||
>>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")])
|
||||
'http://example.com/foo?a=b&c=d&c=d2'
|
||||
"""
|
||||
if not args:
|
||||
return url
|
||||
@@ -810,8 +803,6 @@ def parse_response_start_line(line):
|
||||
# _parseparam and _parse_header are copied and modified from python2.7's cgi.py
|
||||
# The original 2.7 version of this code did not correctly support some
|
||||
# combinations of semicolons and double quotes.
|
||||
# It has also been modified to support valueless parameters as seen in
|
||||
# websocket extension negotiations.
|
||||
|
||||
|
||||
def _parseparam(s):
|
||||
@@ -845,31 +836,9 @@ def _parse_header(line):
|
||||
value = value[1:-1]
|
||||
value = value.replace('\\\\', '\\').replace('\\"', '"')
|
||||
pdict[name] = value
|
||||
else:
|
||||
pdict[p] = None
|
||||
return key, pdict
|
||||
|
||||
|
||||
def _encode_header(key, pdict):
|
||||
"""Inverse of _parse_header.
|
||||
|
||||
>>> _encode_header('permessage-deflate',
|
||||
... {'client_max_window_bits': 15, 'client_no_context_takeover': None})
|
||||
'permessage-deflate; client_max_window_bits=15; client_no_context_takeover'
|
||||
"""
|
||||
if not pdict:
|
||||
return key
|
||||
out = [key]
|
||||
# Sort the parameters just to make it easy to test.
|
||||
for k, v in sorted(pdict.items()):
|
||||
if v is None:
|
||||
out.append(k)
|
||||
else:
|
||||
# TODO: quote if necessary.
|
||||
out.append('%s=%s' % (k, v))
|
||||
return '; '.join(out)
|
||||
|
||||
|
||||
def doctests():
|
||||
import doctest
|
||||
return doctest.DocTestSuite()
|
||||
|
||||
@@ -197,7 +197,7 @@ class IOLoop(Configurable):
|
||||
|
||||
An `IOLoop` automatically becomes current for its thread
|
||||
when it is started, but it is sometimes useful to call
|
||||
`make_current` explicitly before starting the `IOLoop`,
|
||||
`make_current` explictly before starting the `IOLoop`,
|
||||
so that code run at startup time can find the right
|
||||
instance.
|
||||
"""
|
||||
@@ -724,7 +724,7 @@ class PollIOLoop(IOLoop):
|
||||
#
|
||||
# If someone has already set a wakeup fd, we don't want to
|
||||
# disturb it. This is an issue for twisted, which does its
|
||||
# SIGCHLD processing in response to its own wakeup fd being
|
||||
# SIGCHILD processing in response to its own wakeup fd being
|
||||
# written to. As long as the wakeup fd is registered on the IOLoop,
|
||||
# the loop will still wake up and everything should work.
|
||||
old_wakeup_fd = None
|
||||
@@ -754,18 +754,17 @@ class PollIOLoop(IOLoop):
|
||||
# Do not run anything until we have determined which ones
|
||||
# are ready, so timeouts that call add_timeout cannot
|
||||
# schedule anything in this iteration.
|
||||
due_timeouts = []
|
||||
if self._timeouts:
|
||||
now = self.time()
|
||||
while self._timeouts:
|
||||
if self._timeouts[0].callback is None:
|
||||
# The timeout was cancelled. Note that the
|
||||
# cancellation check is repeated below for timeouts
|
||||
# that are cancelled by another timeout or callback.
|
||||
# the timeout was cancelled
|
||||
heapq.heappop(self._timeouts)
|
||||
self._cancellations -= 1
|
||||
elif self._timeouts[0].deadline <= now:
|
||||
due_timeouts.append(heapq.heappop(self._timeouts))
|
||||
timeout = heapq.heappop(self._timeouts)
|
||||
callbacks.append(timeout.callback)
|
||||
del timeout
|
||||
else:
|
||||
break
|
||||
if (self._cancellations > 512
|
||||
@@ -779,12 +778,9 @@ class PollIOLoop(IOLoop):
|
||||
|
||||
for callback in callbacks:
|
||||
self._run_callback(callback)
|
||||
for timeout in due_timeouts:
|
||||
if timeout.callback is not None:
|
||||
self._run_callback(timeout.callback)
|
||||
# Closures may be holding on to a lot of memory, so allow
|
||||
# them to be freed before we go into our poll wait.
|
||||
callbacks = callback = due_timeouts = timeout = None
|
||||
callbacks = callback = None
|
||||
|
||||
if self._callbacks:
|
||||
# If any callbacks or timeouts called add_callback,
|
||||
|
||||
@@ -39,7 +39,7 @@ from tornado import ioloop
|
||||
from tornado.log import gen_log, app_log
|
||||
from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError
|
||||
from tornado import stack_context
|
||||
from tornado.util import errno_from_exception
|
||||
from tornado.util import bytes_type, errno_from_exception
|
||||
|
||||
try:
|
||||
from tornado.platform.posix import _set_nonblocking
|
||||
@@ -324,7 +324,7 @@ class BaseIOStream(object):
|
||||
.. versionchanged:: 4.0
|
||||
Now returns a `.Future` if no callback is given.
|
||||
"""
|
||||
assert isinstance(data, bytes)
|
||||
assert isinstance(data, bytes_type)
|
||||
self._check_closed()
|
||||
# We use bool(_write_buffer) as a proxy for write_buffer_size>0,
|
||||
# so never put empty strings in the buffer.
|
||||
@@ -554,7 +554,7 @@ class BaseIOStream(object):
|
||||
# Pretend to have a pending callback so that an EOF in
|
||||
# _read_to_buffer doesn't trigger an immediate close
|
||||
# callback. At the end of this method we'll either
|
||||
# establish a real pending callback via
|
||||
# estabilsh a real pending callback via
|
||||
# _read_from_buffer or run the close callback.
|
||||
#
|
||||
# We need two try statements here so that
|
||||
@@ -993,11 +993,6 @@ class IOStream(BaseIOStream):
|
||||
|
||||
"""
|
||||
self._connecting = True
|
||||
if callback is not None:
|
||||
self._connect_callback = stack_context.wrap(callback)
|
||||
future = None
|
||||
else:
|
||||
future = self._connect_future = TracebackFuture()
|
||||
try:
|
||||
self.socket.connect(address)
|
||||
except socket.error as e:
|
||||
@@ -1013,7 +1008,12 @@ class IOStream(BaseIOStream):
|
||||
gen_log.warning("Connect error on fd %s: %s",
|
||||
self.socket.fileno(), e)
|
||||
self.close(exc_info=True)
|
||||
return future
|
||||
return
|
||||
if callback is not None:
|
||||
self._connect_callback = stack_context.wrap(callback)
|
||||
future = None
|
||||
else:
|
||||
future = self._connect_future = TracebackFuture()
|
||||
self._add_io_state(self.io_loop.WRITE)
|
||||
return future
|
||||
|
||||
@@ -1185,14 +1185,8 @@ class SSLIOStream(IOStream):
|
||||
return self.close(exc_info=True)
|
||||
raise
|
||||
except socket.error as err:
|
||||
# Some port scans (e.g. nmap in -sT mode) have been known
|
||||
# to cause do_handshake to raise EBADF, so make that error
|
||||
# quiet as well.
|
||||
# https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0
|
||||
if (err.args[0] in _ERRNO_CONNRESET or
|
||||
err.args[0] == errno.EBADF):
|
||||
if err.args[0] in _ERRNO_CONNRESET:
|
||||
return self.close(exc_info=True)
|
||||
raise
|
||||
except AttributeError:
|
||||
# On Linux, if the connection was reset before the call to
|
||||
# wrap_socket, do_handshake will fail with an
|
||||
|
||||
@@ -35,11 +35,6 @@ except ImportError:
|
||||
# ssl is not available on Google App Engine
|
||||
ssl = None
|
||||
|
||||
try:
|
||||
xrange # py2
|
||||
except NameError:
|
||||
xrange = range # py3
|
||||
|
||||
if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+
|
||||
ssl_match_hostname = ssl.match_hostname
|
||||
SSLCertificateError = ssl.CertificateError
|
||||
@@ -65,11 +60,8 @@ _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
|
||||
if hasattr(errno, "WSAEWOULDBLOCK"):
|
||||
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
|
||||
|
||||
# Default backlog used when calling sock.listen()
|
||||
_DEFAULT_BACKLOG = 128
|
||||
|
||||
def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
|
||||
backlog=_DEFAULT_BACKLOG, flags=None):
|
||||
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
|
||||
"""Creates listening sockets bound to the given port and address.
|
||||
|
||||
Returns a list of socket objects (multiple sockets are returned if
|
||||
@@ -149,7 +141,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
|
||||
return sockets
|
||||
|
||||
if hasattr(socket, 'AF_UNIX'):
|
||||
def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG):
|
||||
def bind_unix_socket(file, mode=0o600, backlog=128):
|
||||
"""Creates a listening unix socket.
|
||||
|
||||
If a socket with the given name already exists, it will be deleted.
|
||||
@@ -192,18 +184,7 @@ def add_accept_handler(sock, callback, io_loop=None):
|
||||
io_loop = IOLoop.current()
|
||||
|
||||
def accept_handler(fd, events):
|
||||
# More connections may come in while we're handling callbacks;
|
||||
# to prevent starvation of other tasks we must limit the number
|
||||
# of connections we accept at a time. Ideally we would accept
|
||||
# up to the number of connections that were waiting when we
|
||||
# entered this method, but this information is not available
|
||||
# (and rearranging this method to call accept() as many times
|
||||
# as possible before running any callbacks would have adverse
|
||||
# effects on load balancing in multiprocess configurations).
|
||||
# Instead, we use the (default) listen backlog as a rough
|
||||
# heuristic for the number of connections we can reasonably
|
||||
# accept at once.
|
||||
for i in xrange(_DEFAULT_BACKLOG):
|
||||
while True:
|
||||
try:
|
||||
connection, address = sock.accept()
|
||||
except socket.error as e:
|
||||
|
||||
@@ -79,7 +79,7 @@ import sys
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
from tornado.escape import _unicode, native_str
|
||||
from tornado.escape import _unicode
|
||||
from tornado.log import define_logging_options
|
||||
from tornado import stack_context
|
||||
from tornado.util import basestring_type, exec_in
|
||||
@@ -271,14 +271,10 @@ class OptionParser(object):
|
||||
If ``final`` is ``False``, parse callbacks will not be run.
|
||||
This is useful for applications that wish to combine configurations
|
||||
from multiple sources.
|
||||
|
||||
.. versionchanged:: 4.1
|
||||
Config files are now always interpreted as utf-8 instead of
|
||||
the system default encoding.
|
||||
"""
|
||||
config = {}
|
||||
with open(path, 'rb') as f:
|
||||
exec_in(native_str(f.read()), config, config)
|
||||
with open(path) as f:
|
||||
exec_in(f.read(), config, config)
|
||||
for name in config:
|
||||
if name in self._options:
|
||||
self._options[name].set(config[name])
|
||||
|
||||
@@ -10,10 +10,12 @@ unfinished callbacks on the event loop that fail when it resumes)
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
import datetime
|
||||
import functools
|
||||
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado import stack_context
|
||||
from tornado.util import timedelta_to_seconds
|
||||
|
||||
try:
|
||||
# Import the real asyncio module for py33+ first. Older versions of the
|
||||
|
||||
@@ -141,7 +141,7 @@ class TornadoDelayedCall(object):
|
||||
class TornadoReactor(PosixReactorBase):
|
||||
"""Twisted reactor built on the Tornado IOLoop.
|
||||
|
||||
Since it is intended to be used in applications where the top-level
|
||||
Since it is intented to be used in applications where the top-level
|
||||
event loop is ``io_loop.start()`` rather than ``reactor.run()``,
|
||||
it is implemented a little differently than other Twisted reactors.
|
||||
We override `mainLoop` instead of `doIteration` and must implement
|
||||
|
||||
@@ -39,7 +39,7 @@ from tornado.util import errno_from_exception
|
||||
try:
|
||||
import multiprocessing
|
||||
except ImportError:
|
||||
# Multiprocessing is not available on Google App Engine.
|
||||
# Multiprocessing is not availble on Google App Engine.
|
||||
multiprocessing = None
|
||||
|
||||
try:
|
||||
@@ -240,7 +240,7 @@ class Subprocess(object):
|
||||
|
||||
The callback takes one argument, the return code of the process.
|
||||
|
||||
This method uses a ``SIGCHLD`` handler, which is a global setting
|
||||
This method uses a ``SIGCHILD`` handler, which is a global setting
|
||||
and may conflict if you have other libraries trying to handle the
|
||||
same signal. If you are using more than one ``IOLoop`` it may
|
||||
be necessary to call `Subprocess.initialize` first to designate
|
||||
@@ -257,7 +257,7 @@ class Subprocess(object):
|
||||
|
||||
@classmethod
|
||||
def initialize(cls, io_loop=None):
|
||||
"""Initializes the ``SIGCHLD`` handler.
|
||||
"""Initializes the ``SIGCHILD`` handler.
|
||||
|
||||
The signal handler is run on an `.IOLoop` to avoid locking issues.
|
||||
Note that the `.IOLoop` used for signal handling need not be the
|
||||
@@ -275,7 +275,7 @@ class Subprocess(object):
|
||||
|
||||
@classmethod
|
||||
def uninitialize(cls):
|
||||
"""Removes the ``SIGCHLD`` handler."""
|
||||
"""Removes the ``SIGCHILD`` handler."""
|
||||
if not cls._initialized:
|
||||
return
|
||||
signal.signal(signal.SIGCHLD, cls._old_sigchld)
|
||||
|
||||
@@ -19,8 +19,11 @@ import functools
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
try:
|
||||
from io import BytesIO # python 3
|
||||
except ImportError:
|
||||
from cStringIO import StringIO as BytesIO # python 2
|
||||
|
||||
try:
|
||||
import urlparse # py2
|
||||
@@ -219,7 +222,6 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
|
||||
stack_context.wrap(self._on_timeout))
|
||||
self.tcp_client.connect(host, port, af=af,
|
||||
ssl_options=ssl_options,
|
||||
max_buffer_size=self.max_buffer_size,
|
||||
callback=self._on_connect)
|
||||
|
||||
def _get_ssl_options(self, scheme):
|
||||
@@ -314,18 +316,18 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
|
||||
if self.request.user_agent:
|
||||
self.request.headers["User-Agent"] = self.request.user_agent
|
||||
if not self.request.allow_nonstandard_methods:
|
||||
# Some HTTP methods nearly always have bodies while others
|
||||
# almost never do. Fail in this case unless the user has
|
||||
# opted out of sanity checks with allow_nonstandard_methods.
|
||||
body_expected = self.request.method in ("POST", "PATCH", "PUT")
|
||||
body_present = (self.request.body is not None or
|
||||
self.request.body_producer is not None)
|
||||
if ((body_expected and not body_present) or
|
||||
(body_present and not body_expected)):
|
||||
raise ValueError(
|
||||
'Body must %sbe None for method %s (unelss '
|
||||
'allow_nonstandard_methods is true)' %
|
||||
('not ' if body_expected else '', self.request.method))
|
||||
if self.request.method in ("POST", "PATCH", "PUT"):
|
||||
if (self.request.body is None and
|
||||
self.request.body_producer is None):
|
||||
raise AssertionError(
|
||||
'Body must not be empty for "%s" request'
|
||||
% self.request.method)
|
||||
else:
|
||||
if (self.request.body is not None or
|
||||
self.request.body_producer is not None):
|
||||
raise AssertionError(
|
||||
'Body must be empty for "%s" request'
|
||||
% self.request.method)
|
||||
if self.request.expect_100_continue:
|
||||
self.request.headers["Expect"] = "100-continue"
|
||||
if self.request.body is not None:
|
||||
|
||||
@@ -41,13 +41,13 @@ Example usage::
|
||||
sys.exit(1)
|
||||
|
||||
with StackContext(die_on_error):
|
||||
# Any exception thrown here *or in callback and its descendants*
|
||||
# Any exception thrown here *or in callback and its desendents*
|
||||
# will cause the process to exit instead of spinning endlessly
|
||||
# in the ioloop.
|
||||
http_client.fetch(url, callback)
|
||||
ioloop.start()
|
||||
|
||||
Most applications shouldn't have to work with `StackContext` directly.
|
||||
Most applications shouln't have to work with `StackContext` directly.
|
||||
Here are a few rules of thumb for when it's necessary:
|
||||
|
||||
* If you're writing an asynchronous library that doesn't rely on a
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user