Compare commits
132 Commits
py3
...
build/2.5.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0622e6e5ab | ||
|
|
f16931906f | ||
|
|
68dcba8853 | ||
|
|
ae8f66df1a | ||
|
|
5237ead5cb | ||
|
|
c4aaa10308 | ||
|
|
d10536a829 | ||
|
|
1e7fa82e11 | ||
|
|
fbccba77a7 | ||
|
|
d3efda74b2 | ||
|
|
66b849cb29 | ||
|
|
b19f98ef5b | ||
|
|
c389790cf2 | ||
|
|
d7445dfa80 | ||
|
|
36782768a4 | ||
|
|
2c9d487614 | ||
|
|
b9a724c8bb | ||
|
|
68d826ca1c | ||
|
|
d6921882e1 | ||
|
|
2cfff73486 | ||
|
|
0c7dda8d44 | ||
|
|
dbaa377770 | ||
|
|
47d2b81d1c | ||
|
|
f79fcda27f | ||
|
|
cdbcad2238 | ||
|
|
5d913e87c3 | ||
|
|
16f02bda27 | ||
|
|
8d108b92bf | ||
|
|
46783028b1 | ||
|
|
d08c7c57a8 | ||
|
|
eeeb845ef3 | ||
|
|
651a063f94 | ||
|
|
f20aaa2d9d | ||
|
|
ba925ec191 | ||
|
|
3b7376fd18 | ||
|
|
c31b10c798 | ||
|
|
acda664686 | ||
|
|
e2852407ea | ||
|
|
88e738c6cd | ||
|
|
eaae8bdb0b | ||
|
|
821f68909d | ||
|
|
2b8dfed475 | ||
|
|
607b5ea766 | ||
|
|
88579cd71a | ||
|
|
6c57316ce6 | ||
|
|
6702683da3 | ||
|
|
1ed58586a1 | ||
|
|
f08ccd4fd8 | ||
|
|
312562a9f5 | ||
|
|
9e260a89af | ||
|
|
d233e4d22e | ||
|
|
23893dbcb9 | ||
|
|
506871b506 | ||
|
|
6115917660 | ||
|
|
21df8819d3 | ||
|
|
fb3f3e11f6 | ||
|
|
178c8942c3 | ||
|
|
51e747049d | ||
|
|
0582f7d694 | ||
|
|
fa7cac7538 | ||
|
|
9a314cfbc4 | ||
|
|
5941d0bf77 | ||
|
|
d326c1c25c | ||
|
|
96472a9a8f | ||
|
|
27252561e2 | ||
|
|
c9e732651f | ||
|
|
7849e7170d | ||
|
|
087894eb4e | ||
|
|
25f1b8c7a7 | ||
|
|
e71da1f14d | ||
|
|
938b14ba18 | ||
|
|
d6522d8f38 | ||
|
|
78eab890e7 | ||
|
|
1a56191f83 | ||
|
|
41c0f34d95 | ||
|
|
37bf205d7a | ||
|
|
aa1fa3eb9a | ||
|
|
0e2f8a612c | ||
|
|
465e7b2abc | ||
|
|
578fb45785 | ||
|
|
96995bbbe5 | ||
|
|
4cfdafebbc | ||
|
|
b97acb8ef5 | ||
|
|
d68d2dfdb6 | ||
|
|
39b269a454 | ||
|
|
ac081d3e10 | ||
|
|
5d4efb60cf | ||
|
|
cc408b980c | ||
|
|
59590b3ac9 | ||
|
|
ff759dacf3 | ||
|
|
a328e44130 | ||
|
|
7924cac5f9 | ||
|
|
1cef3b0c93 | ||
|
|
3cd59edc8b | ||
|
|
0d624af01d | ||
|
|
a09132570c | ||
|
|
ee3fc38432 | ||
|
|
dbf0192c8e | ||
|
|
6962cfc3f5 | ||
|
|
e096ec3b5b | ||
|
|
b30a74ae0c | ||
|
|
978eeb16c9 | ||
|
|
e5c9d91657 | ||
|
|
fa81c3a07a | ||
|
|
9cdd520d41 | ||
|
|
55d7898771 | ||
|
|
b8256bef97 | ||
|
|
5be9dc0b4a | ||
|
|
7d0be0cefb | ||
|
|
f7ce1edb13 | ||
|
|
5ad9280b60 | ||
|
|
2b353f1b20 | ||
|
|
75ab90b87b | ||
|
|
0219296120 | ||
|
|
20032b3a31 | ||
|
|
ea9e9a8c90 | ||
|
|
f7b0ee145b | ||
|
|
cc866738ee | ||
|
|
eadccf6e33 | ||
|
|
b70b66e567 | ||
|
|
5b6792dc20 | ||
|
|
f498e7343a | ||
|
|
6962f441e6 | ||
|
|
1def62b1b1 | ||
|
|
a4a4a6a185 | ||
|
|
d4c9469c1a | ||
|
|
3e2d4c5d7b | ||
|
|
d03f711d69 | ||
|
|
44dd8d9b96 | ||
|
|
549a3be0d8 | ||
|
|
1bb2edf8ec | ||
|
|
84c6f36315 |
@@ -10,6 +10,7 @@ import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import time
|
||||
|
||||
# Root path
|
||||
base_path = dirname(os.path.abspath(__file__))
|
||||
@@ -61,7 +62,7 @@ class Loader(object):
|
||||
self.log = CPLog(__name__)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%H:%M:%S')
|
||||
hdlr = handlers.RotatingFileHandler(os.path.join(self.log_dir, 'error.log'), 'a', 500000, 10, encoding = 'utf-8')
|
||||
hdlr = handlers.RotatingFileHandler(os.path.join(self.log_dir, 'error.log'), 'a', 500000, 10)
|
||||
hdlr.setLevel(logging.CRITICAL)
|
||||
hdlr.setFormatter(formatter)
|
||||
self.log.logger.addHandler(hdlr)
|
||||
|
||||
232
Desktop.py
Normal file
232
Desktop.py
Normal file
@@ -0,0 +1,232 @@
|
||||
from esky.util import appdir_from_executable #@UnresolvedImport
|
||||
from threading import Thread
|
||||
from version import VERSION
|
||||
from wx.lib.softwareupdate import SoftwareUpdate
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import webbrowser
|
||||
import wx
|
||||
|
||||
# Include proper dirs
|
||||
if hasattr(sys, 'frozen'):
|
||||
import libs
|
||||
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
|
||||
else:
|
||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
lib_dir = os.path.join(base_path, 'libs')
|
||||
|
||||
sys.path.insert(0, base_path)
|
||||
sys.path.insert(0, lib_dir)
|
||||
|
||||
from couchpotato.environment import Env
|
||||
|
||||
class TaskBarIcon(wx.TaskBarIcon):
|
||||
|
||||
TBMENU_OPEN = wx.NewId()
|
||||
TBMENU_SETTINGS = wx.NewId()
|
||||
TBMENU_EXIT = wx.ID_EXIT
|
||||
|
||||
closed = False
|
||||
menu = False
|
||||
enabled = False
|
||||
|
||||
def __init__(self, frame):
|
||||
wx.TaskBarIcon.__init__(self)
|
||||
self.frame = frame
|
||||
|
||||
icon = wx.Icon('icon.png', wx.BITMAP_TYPE_PNG)
|
||||
self.SetIcon(icon)
|
||||
|
||||
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
|
||||
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
|
||||
|
||||
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
|
||||
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
|
||||
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
|
||||
|
||||
def OnTaskBarClick(self, evt):
|
||||
menu = self.CreatePopupMenu()
|
||||
self.PopupMenu(menu)
|
||||
menu.Destroy()
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
if self.menu:
|
||||
self.open_menu.Enable(True)
|
||||
self.setting_menu.Enable(True)
|
||||
|
||||
self.open_menu.SetText('Open')
|
||||
|
||||
def CreatePopupMenu(self):
|
||||
|
||||
if not self.menu:
|
||||
self.menu = wx.Menu()
|
||||
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
|
||||
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
|
||||
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
|
||||
|
||||
if not self.enabled:
|
||||
self.open_menu.Enable(False)
|
||||
self.setting_menu.Enable(False)
|
||||
|
||||
self.open_menu.SetText('Loading...')
|
||||
|
||||
return self.menu
|
||||
|
||||
def onOpen(self, event):
|
||||
url = self.frame.parent.getSetting('base_url')
|
||||
webbrowser.open(url)
|
||||
|
||||
def onSettings(self, event):
|
||||
url = self.frame.parent.getSetting('base_url') + 'settings/about/'
|
||||
webbrowser.open(url)
|
||||
|
||||
def onTaskBarClose(self, evt):
|
||||
if self.closed:
|
||||
return
|
||||
|
||||
self.closed = True
|
||||
|
||||
self.RemoveIcon()
|
||||
wx.CallAfter(self.frame.Close)
|
||||
|
||||
|
||||
def makeIcon(self, img):
|
||||
if "wxMSW" in wx.PlatformInfo:
|
||||
img = img.Scale(16, 16)
|
||||
elif "wxGTK" in wx.PlatformInfo:
|
||||
img = img.Scale(22, 22)
|
||||
|
||||
icon = wx.IconFromBitmap(img.CopyFromBitmap())
|
||||
return icon
|
||||
|
||||
|
||||
class MainFrame(wx.Frame):
|
||||
|
||||
def __init__(self, parent):
|
||||
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
|
||||
|
||||
self.parent = parent
|
||||
self.tbicon = TaskBarIcon(self)
|
||||
|
||||
|
||||
class WorkerThread(Thread):
|
||||
|
||||
def __init__(self, desktop):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self._desktop = desktop
|
||||
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
args = ['--quiet']
|
||||
self.options = getOptions(args)
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
settings.setFile(self.options.config_file)
|
||||
|
||||
# Create data dir if needed
|
||||
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
|
||||
if self.data_dir == '':
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
self.data_dir = getDataDir()
|
||||
|
||||
if not os.path.isdir(self.data_dir):
|
||||
os.makedirs(self.data_dir)
|
||||
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
|
||||
try:
|
||||
from couchpotato.runner import runCouchPotato
|
||||
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._desktop.frame.Close()
|
||||
self._desktop.ExitMainLoop()
|
||||
|
||||
|
||||
class CouchPotatoApp(wx.App, SoftwareUpdate):
|
||||
|
||||
settings = {}
|
||||
events = {}
|
||||
restart = False
|
||||
closing = False
|
||||
|
||||
def OnInit(self):
|
||||
|
||||
# Updater
|
||||
base_url = 'https://api.couchpota.to/updates/%s'
|
||||
self.InitUpdates(base_url % VERSION + '/', 'https://couchpota.to/updates/%s' % 'changelog.html',
|
||||
icon = wx.Icon('icon.png'))
|
||||
|
||||
self.frame = MainFrame(self)
|
||||
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
|
||||
|
||||
# CouchPotato thread
|
||||
self.worker = WorkerThread(self)
|
||||
|
||||
return True
|
||||
|
||||
def onAppLoad(self):
|
||||
self.frame.tbicon.enable()
|
||||
|
||||
def setSettings(self, settings = {}):
|
||||
self.settings = settings
|
||||
|
||||
def getSetting(self, name):
|
||||
return self.settings.get(name)
|
||||
|
||||
def addEvents(self, events = {}):
|
||||
for name in events.iterkeys():
|
||||
self.events[name] = events[name]
|
||||
|
||||
def onClose(self, event):
|
||||
|
||||
if not self.closing:
|
||||
self.closing = True
|
||||
self.frame.tbicon.onTaskBarClose(event)
|
||||
|
||||
onClose = self.events.get('onClose')
|
||||
onClose(event)
|
||||
|
||||
def afterShutdown(self, restart = False):
|
||||
self.frame.Destroy()
|
||||
self.restart = restart
|
||||
self.ExitMainLoop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
app = CouchPotatoApp(redirect = False)
|
||||
app.MainLoop()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
if app.restart:
|
||||
|
||||
def appexe_from_executable(exepath):
|
||||
appdir = appdir_from_executable(exepath)
|
||||
exename = os.path.basename(exepath)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
|
||||
return os.path.join(appdir, "Contents", "MacOS", exename)
|
||||
|
||||
return os.path.join(appdir, exename)
|
||||
|
||||
exe = appexe_from_executable(sys.executable)
|
||||
os.chdir(os.path.dirname(exe))
|
||||
|
||||
os.execv(exe, [exe] + sys.argv[1:])
|
||||
20
README.md
20
README.md
@@ -29,25 +29,19 @@ OS X:
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py`
|
||||
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||
|
||||
Linux:
|
||||
Linux (Ubuntu / Debian):
|
||||
|
||||
* (Ubuntu / Debian) Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* (Fedora / CentOS) Install [GIT](http://git-scm.com/) with `yum install git`
|
||||
* Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* 'cd' to the folder of your choosing.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py` to start
|
||||
* (Ubuntu / Debian) To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||
* (systemd) To run on boot copy the systemd config `sudo cp CouchPotatoServer/init/couchpotato.fedora.service /etc/systemd/system/couchpotato.service`
|
||||
* (systemd) Update the systemd config file with your user and path to CouchPotato.py
|
||||
* (systemd) Enable it at boot with `sudo systemctl enable couchpotato`
|
||||
* To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||
* Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||
* Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||
* Open your browser and go to `http://localhost:5050/`
|
||||
|
||||
Docker:
|
||||
* You can use [razorgirl's Dockerfile](https://github.com/razorgirl/docker-couchpotato) to quickly build your own isolated app container. It's based on the Linux instructions above. For more info about Docker check out the [official website](https://www.docker.com).
|
||||
|
||||
FreeBSD :
|
||||
|
||||
|
||||
@@ -13,8 +13,6 @@ Lastly, for anything related to CouchPotato, feel free to stop by the [forum](ht
|
||||
## Issues
|
||||
Issues are intended for reporting bugs and weird behaviour or suggesting improvements to CouchPotatoServer.
|
||||
Before you submit an issue, please go through the following checklist:
|
||||
* **FILL IN ALL THE FIELDS ASKED FOR**
|
||||
* **POST MORE THAN A SINGLE LINE LOG**, if you do, you'd better have a easy reproducable bug
|
||||
* Search through existing issues (*including closed issues!*) first: you might be able to get your answer there.
|
||||
* Double check your issue manually, because it could be an external issue.
|
||||
* Post logs with your issue: Without seeing what is going on, the developers can't reproduce the error.
|
||||
@@ -27,14 +25,12 @@ Before you submit an issue, please go through the following checklist:
|
||||
* What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed than when you use CP on OS X or Windows.
|
||||
* Your issue might be marked with the "can't reproduce" tag. Don't ask why your issue was closed if it says so in the tag.
|
||||
* If you're running on a NAS (QNAP, Austor, Synology etc.) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else!
|
||||
* Do not "bump" issues with "Any updates on this" or whatever. Yes I've seen it, you don't have to remind me of it. There will be an update when the code is done or I need information. If you feel the need to do so, you'd better have more info on the issue.
|
||||
|
||||
The more relevant information you provide, the more likely that your issue will be resolved.
|
||||
If you don't follow any of the checks above, I'll close the issue. If you are wondering why (and ask) I'll block you from posting new issues and the repo.
|
||||
|
||||
## Pull Requests
|
||||
Pull requests are intended for contributing code or documentation to the project. Before you submit a pull request, consider the following:
|
||||
* Make sure your pull request is made for the *develop* branch (or relevant feature branch).
|
||||
* Have you tested your PR? If not, why?
|
||||
* Does your PR have any limitations I should know of?
|
||||
* Does your PR have any limitations we should know of?
|
||||
* Is your PR up-to-date with the branch you're trying to push into?
|
||||
|
||||
@@ -3,7 +3,7 @@ from threading import Thread
|
||||
import json
|
||||
import threading
|
||||
import traceback
|
||||
from six.moves import urllib
|
||||
import urllib
|
||||
|
||||
from couchpotato.core.helpers.request import getParams
|
||||
from couchpotato.core.logger import CPLog
|
||||
@@ -102,7 +102,7 @@ class ApiHandler(RequestHandler):
|
||||
|
||||
kwargs = {}
|
||||
for x in self.request.arguments:
|
||||
kwargs[x] = urllib.parse.unquote(self.get_argument(x))
|
||||
kwargs[x] = urllib.unquote(self.get_argument(x))
|
||||
|
||||
# Split array arguments
|
||||
kwargs = getParams(kwargs)
|
||||
@@ -143,8 +143,6 @@ class ApiHandler(RequestHandler):
|
||||
else:
|
||||
self.write(result)
|
||||
self.finish()
|
||||
except UnicodeDecodeError:
|
||||
log.error('Failed proper encode: %s', traceback.format_exc())
|
||||
except:
|
||||
log.debug('Failed doing request, probably already closed: %s', (traceback.format_exc()))
|
||||
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||
|
||||
@@ -286,13 +286,13 @@ config = [{
|
||||
'name': 'permission_folder',
|
||||
'default': '0755',
|
||||
'label': 'Folder CHMOD',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755). <a target="_blank" href="http://permissions-calculator.org/">Calculate the correct value</a>',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
|
||||
},
|
||||
{
|
||||
'name': 'permission_file',
|
||||
'default': '0755',
|
||||
'label': 'File CHMOD',
|
||||
'description': 'See Folder CHMOD description, but for files',
|
||||
'description': 'Same as Folder CHMOD but for files',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -2,15 +2,13 @@ import json
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
from sqlite3 import OperationalError
|
||||
from CodernityDB3.index import Index
|
||||
|
||||
from CodernityDB.index import IndexException, IndexNotFoundException, IndexConflict
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.database import IndexException, IndexNotFoundException, IndexConflict, RecordNotFound
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode, sp
|
||||
from couchpotato.core.helpers.variable import getImdb, tryInt, randomString
|
||||
from couchpotato.core.helpers.variable import getImdb, tryInt
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -33,7 +31,6 @@ class Database(object):
|
||||
|
||||
addEvent('database.setup.after', self.startup_compact)
|
||||
addEvent('database.setup_index', self.setupIndex)
|
||||
addEvent('database.delete_corrupted', self.deleteCorrupted)
|
||||
|
||||
addEvent('app.migrate', self.migrate)
|
||||
addEvent('app.after_shutdown', self.close)
|
||||
@@ -149,17 +146,6 @@ class Database(object):
|
||||
|
||||
return results
|
||||
|
||||
def deleteCorrupted(self, _id, traceback_error = ''):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
log.debug('Deleted corrupted document "%s": %s', (_id, traceback_error))
|
||||
corrupted = db.get('id', _id, with_storage = False)
|
||||
db._delete_id_index(corrupted.get('_id'), corrupted.get('_rev'), None)
|
||||
except:
|
||||
log.debug('Failed deleting corrupted: %s', traceback.format_exc())
|
||||
|
||||
def reindex(self, **kwargs):
|
||||
|
||||
success = True
|
||||
@@ -240,34 +226,6 @@ class Database(object):
|
||||
from couchpotato import Env
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Try fix for migration failures on desktop
|
||||
if Env.get('desktop'):
|
||||
try:
|
||||
list(db.all('profile', with_doc = True))
|
||||
except RecordNotFound:
|
||||
|
||||
failed_location = '%s_failed' % db.path
|
||||
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old')
|
||||
|
||||
if not os.path.isdir(failed_location) and os.path.isfile(old_db):
|
||||
log.error('Corrupt database, trying migrate again')
|
||||
db.close()
|
||||
|
||||
# Rename database folder
|
||||
os.rename(db.path, '%s_failed' % db.path)
|
||||
|
||||
# Rename .old database to try another migrate
|
||||
os.rename(old_db, old_db[:-4])
|
||||
|
||||
fireEventAsync('app.restart')
|
||||
else:
|
||||
log.error('Migration failed and couldn\'t recover database. Please report on GitHub, with this message.')
|
||||
db.reindex()
|
||||
|
||||
return
|
||||
|
||||
# Check size and compact if needed
|
||||
size = db.get_db_details().get('size')
|
||||
prop_name = 'last_db_compact'
|
||||
last_check = int(Env.prop(prop_name, default = 0))
|
||||
@@ -312,326 +270,307 @@ class Database(object):
|
||||
}
|
||||
|
||||
migrate_data = {}
|
||||
rename_old = False
|
||||
|
||||
try:
|
||||
c = conn.cursor()
|
||||
|
||||
c = conn.cursor()
|
||||
for ml in migrate_list:
|
||||
migrate_data[ml] = {}
|
||||
rows = migrate_list[ml]
|
||||
|
||||
for ml in migrate_list:
|
||||
migrate_data[ml] = {}
|
||||
rows = migrate_list[ml]
|
||||
|
||||
try:
|
||||
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||
except:
|
||||
# ignore faulty destination_id database
|
||||
if ml == 'category':
|
||||
migrate_data[ml] = {}
|
||||
else:
|
||||
rename_old = True
|
||||
raise
|
||||
|
||||
for p in c.fetchall():
|
||||
columns = {}
|
||||
for row in migrate_list[ml]:
|
||||
columns[row] = p[rows.index(row)]
|
||||
|
||||
if not migrate_data[ml].get(p[0]):
|
||||
migrate_data[ml][p[0]] = columns
|
||||
else:
|
||||
if not isinstance(migrate_data[ml][p[0]], list):
|
||||
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||
migrate_data[ml][p[0]].append(columns)
|
||||
|
||||
conn.close()
|
||||
|
||||
log.info('Getting data took %s', time.time() - migrate_start)
|
||||
|
||||
db = self.getDB()
|
||||
if not db.opened:
|
||||
return
|
||||
|
||||
# Use properties
|
||||
properties = migrate_data['properties']
|
||||
log.info('Importing %s properties', len(properties))
|
||||
for x in properties:
|
||||
property = properties[x]
|
||||
Env.prop(property.get('identifier'), property.get('value'))
|
||||
|
||||
# Categories
|
||||
categories = migrate_data.get('category', [])
|
||||
log.info('Importing %s categories', len(categories))
|
||||
category_link = {}
|
||||
for x in categories:
|
||||
c = categories[x]
|
||||
|
||||
new_c = db.insert({
|
||||
'_t': 'category',
|
||||
'order': c.get('order', 999),
|
||||
'label': toUnicode(c.get('label', '')),
|
||||
'ignored': toUnicode(c.get('ignored', '')),
|
||||
'preferred': toUnicode(c.get('preferred', '')),
|
||||
'required': toUnicode(c.get('required', '')),
|
||||
'destination': toUnicode(c.get('destination', '')),
|
||||
})
|
||||
|
||||
category_link[x] = new_c.get('_id')
|
||||
|
||||
# Profiles
|
||||
log.info('Importing profiles')
|
||||
new_profiles = db.all('profile', with_doc = True)
|
||||
new_profiles_by_label = {}
|
||||
for x in new_profiles:
|
||||
|
||||
# Remove default non core profiles
|
||||
if not x['doc'].get('core'):
|
||||
db.delete(x['doc'])
|
||||
try:
|
||||
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||
except:
|
||||
# ignore faulty destination_id database
|
||||
if ml == 'category':
|
||||
migrate_data[ml] = {}
|
||||
else:
|
||||
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||
raise
|
||||
|
||||
profiles = migrate_data['profile']
|
||||
profile_link = {}
|
||||
for x in profiles:
|
||||
p = profiles[x]
|
||||
for p in c.fetchall():
|
||||
columns = {}
|
||||
for row in migrate_list[ml]:
|
||||
columns[row] = p[rows.index(row)]
|
||||
|
||||
exists = new_profiles_by_label.get(p.get('label'))
|
||||
|
||||
# Update existing with order only
|
||||
if exists and p.get('core'):
|
||||
profile = db.get('id', exists)
|
||||
profile['order'] = tryInt(p.get('order'))
|
||||
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||
db.update(profile)
|
||||
|
||||
profile_link[x] = profile.get('_id')
|
||||
if not migrate_data[ml].get(p[0]):
|
||||
migrate_data[ml][p[0]] = columns
|
||||
else:
|
||||
if not isinstance(migrate_data[ml][p[0]], list):
|
||||
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||
migrate_data[ml][p[0]].append(columns)
|
||||
|
||||
new_profile = {
|
||||
'_t': 'profile',
|
||||
'label': p.get('label'),
|
||||
'order': int(p.get('order', 999)),
|
||||
'core': p.get('core', False),
|
||||
'qualities': [],
|
||||
'wait_for': [],
|
||||
'finish': []
|
||||
conn.close()
|
||||
|
||||
log.info('Getting data took %s', time.time() - migrate_start)
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Use properties
|
||||
properties = migrate_data['properties']
|
||||
log.info('Importing %s properties', len(properties))
|
||||
for x in properties:
|
||||
property = properties[x]
|
||||
Env.prop(property.get('identifier'), property.get('value'))
|
||||
|
||||
# Categories
|
||||
categories = migrate_data.get('category', [])
|
||||
log.info('Importing %s categories', len(categories))
|
||||
category_link = {}
|
||||
for x in categories:
|
||||
c = categories[x]
|
||||
|
||||
new_c = db.insert({
|
||||
'_t': 'category',
|
||||
'order': c.get('order', 999),
|
||||
'label': toUnicode(c.get('label', '')),
|
||||
'ignored': toUnicode(c.get('ignored', '')),
|
||||
'preferred': toUnicode(c.get('preferred', '')),
|
||||
'required': toUnicode(c.get('required', '')),
|
||||
'destination': toUnicode(c.get('destination', '')),
|
||||
})
|
||||
|
||||
category_link[x] = new_c.get('_id')
|
||||
|
||||
# Profiles
|
||||
log.info('Importing profiles')
|
||||
new_profiles = db.all('profile', with_doc = True)
|
||||
new_profiles_by_label = {}
|
||||
for x in new_profiles:
|
||||
|
||||
# Remove default non core profiles
|
||||
if not x['doc'].get('core'):
|
||||
db.delete(x['doc'])
|
||||
else:
|
||||
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||
|
||||
profiles = migrate_data['profile']
|
||||
profile_link = {}
|
||||
for x in profiles:
|
||||
p = profiles[x]
|
||||
|
||||
exists = new_profiles_by_label.get(p.get('label'))
|
||||
|
||||
# Update existing with order only
|
||||
if exists and p.get('core'):
|
||||
profile = db.get('id', exists)
|
||||
profile['order'] = tryInt(p.get('order'))
|
||||
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||
db.update(profile)
|
||||
|
||||
profile_link[x] = profile.get('_id')
|
||||
else:
|
||||
|
||||
new_profile = {
|
||||
'_t': 'profile',
|
||||
'label': p.get('label'),
|
||||
'order': int(p.get('order', 999)),
|
||||
'core': p.get('core', False),
|
||||
'qualities': [],
|
||||
'wait_for': [],
|
||||
'finish': []
|
||||
}
|
||||
|
||||
types = migrate_data['profiletype']
|
||||
for profile_type in types:
|
||||
p_type = types[profile_type]
|
||||
if types[profile_type]['profile_id'] == p['id']:
|
||||
if p_type['quality_id']:
|
||||
new_profile['finish'].append(p_type['finish'])
|
||||
new_profile['wait_for'].append(p_type['wait_for'])
|
||||
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||
|
||||
if len(new_profile['qualities']) > 0:
|
||||
new_profile.update(db.insert(new_profile))
|
||||
profile_link[x] = new_profile.get('_id')
|
||||
else:
|
||||
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||
|
||||
# Qualities
|
||||
log.info('Importing quality sizes')
|
||||
new_qualities = db.all('quality', with_doc = True)
|
||||
new_qualities_by_identifier = {}
|
||||
for x in new_qualities:
|
||||
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||
|
||||
qualities = migrate_data['quality']
|
||||
quality_link = {}
|
||||
for x in qualities:
|
||||
q = qualities[x]
|
||||
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||
|
||||
quality = db.get('id', q_id)
|
||||
quality['order'] = q.get('order')
|
||||
quality['size_min'] = tryInt(q.get('size_min'))
|
||||
quality['size_max'] = tryInt(q.get('size_max'))
|
||||
db.update(quality)
|
||||
|
||||
quality_link[x] = quality
|
||||
|
||||
# Titles
|
||||
titles = migrate_data['librarytitle']
|
||||
titles_by_library = {}
|
||||
for x in titles:
|
||||
title = titles[x]
|
||||
if title.get('default'):
|
||||
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||
|
||||
# Releases
|
||||
releaseinfos = migrate_data['releaseinfo']
|
||||
for x in releaseinfos:
|
||||
info = releaseinfos[x]
|
||||
|
||||
# Skip if release doesn't exist for this info
|
||||
if not migrate_data['release'].get(info.get('release_id')):
|
||||
continue
|
||||
|
||||
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||
|
||||
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||
|
||||
releases = migrate_data['release']
|
||||
releases_by_media = {}
|
||||
for x in releases:
|
||||
release = releases[x]
|
||||
if not releases_by_media.get(release.get('movie_id')):
|
||||
releases_by_media[release.get('movie_id')] = []
|
||||
|
||||
releases_by_media[release.get('movie_id')].append(release)
|
||||
|
||||
# Type ids
|
||||
types = migrate_data['filetype']
|
||||
type_by_id = {}
|
||||
for t in types:
|
||||
type = types[t]
|
||||
type_by_id[type.get('id')] = type
|
||||
|
||||
# Media
|
||||
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||
statuses = migrate_data['status']
|
||||
libraries = migrate_data['library']
|
||||
library_files = migrate_data['library_files__file_library']
|
||||
releases_files = migrate_data['release_files__file_release']
|
||||
all_files = migrate_data['file']
|
||||
poster_type = migrate_data['filetype']['poster']
|
||||
medias = migrate_data['movie']
|
||||
for x in medias:
|
||||
m = medias[x]
|
||||
|
||||
status = statuses.get(m['status_id']).get('identifier')
|
||||
l = libraries.get(m['library_id'])
|
||||
|
||||
# Only migrate wanted movies, Skip if no identifier present
|
||||
if not l or not getImdb(l.get('identifier')): continue
|
||||
|
||||
profile_id = profile_link.get(m['profile_id'])
|
||||
category_id = category_link.get(m['category_id'])
|
||||
title = titles_by_library.get(m['library_id'])
|
||||
releases = releases_by_media.get(x, [])
|
||||
info = json.loads(l.get('info', ''))
|
||||
|
||||
files = library_files.get(m['library_id'], [])
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
added_media = fireEvent('movie.add', {
|
||||
'info': info,
|
||||
'identifier': l.get('identifier'),
|
||||
'profile_id': profile_id,
|
||||
'category_id': category_id,
|
||||
'title': title
|
||||
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||
|
||||
if not added_media:
|
||||
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||
continue
|
||||
|
||||
added_media['files'] = added_media.get('files', {})
|
||||
for f in files:
|
||||
ffile = all_files[f.get('file_id')]
|
||||
|
||||
# Only migrate posters
|
||||
if ffile.get('type_id') == poster_type.get('id'):
|
||||
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||
break
|
||||
|
||||
if 'image_poster' in added_media['files']:
|
||||
db.update(added_media)
|
||||
|
||||
for rel in releases:
|
||||
|
||||
empty_info = False
|
||||
if not rel.get('info'):
|
||||
empty_info = True
|
||||
rel['info'] = {}
|
||||
|
||||
quality = quality_link.get(rel.get('quality_id'))
|
||||
if not quality:
|
||||
continue
|
||||
|
||||
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||
|
||||
if rel['info'].get('download_id'):
|
||||
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||
rel['info']['download_info'] = {
|
||||
'id': rel['info'].get('download_id'),
|
||||
'downloader': rel['info'].get('download_downloader'),
|
||||
'status_support': status_support,
|
||||
}
|
||||
|
||||
types = migrate_data['profiletype']
|
||||
for profile_type in types:
|
||||
p_type = types[profile_type]
|
||||
if types[profile_type]['profile_id'] == p['id']:
|
||||
if p_type['quality_id']:
|
||||
new_profile['finish'].append(p_type['finish'])
|
||||
new_profile['wait_for'].append(p_type['wait_for'])
|
||||
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||
# Add status to keys
|
||||
rel['info']['status'] = release_status
|
||||
if not empty_info:
|
||||
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||
else:
|
||||
release = {
|
||||
'_t': 'release',
|
||||
'identifier': rel.get('identifier'),
|
||||
'media_id': added_media.get('_id'),
|
||||
'quality': quality.get('identifier'),
|
||||
'status': release_status,
|
||||
'last_edit': int(time.time()),
|
||||
'files': {}
|
||||
}
|
||||
|
||||
if len(new_profile['qualities']) > 0:
|
||||
new_profile.update(db.insert(new_profile))
|
||||
profile_link[x] = new_profile.get('_id')
|
||||
else:
|
||||
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||
# Add downloader info if provided
|
||||
try:
|
||||
release['download_info'] = rel['info']['download_info']
|
||||
del rel['download_info']
|
||||
except:
|
||||
pass
|
||||
|
||||
# Qualities
|
||||
log.info('Importing quality sizes')
|
||||
new_qualities = db.all('quality', with_doc = True)
|
||||
new_qualities_by_identifier = {}
|
||||
for x in new_qualities:
|
||||
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||
# Add files
|
||||
release_files = releases_files.get(rel.get('id'), [])
|
||||
if not isinstance(release_files, list):
|
||||
release_files = [release_files]
|
||||
|
||||
qualities = migrate_data['quality']
|
||||
quality_link = {}
|
||||
for x in qualities:
|
||||
q = qualities[x]
|
||||
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||
|
||||
quality = db.get('id', q_id)
|
||||
quality['order'] = q.get('order')
|
||||
quality['size_min'] = tryInt(q.get('size_min'))
|
||||
quality['size_max'] = tryInt(q.get('size_max'))
|
||||
db.update(quality)
|
||||
|
||||
quality_link[x] = quality
|
||||
|
||||
# Titles
|
||||
titles = migrate_data['librarytitle']
|
||||
titles_by_library = {}
|
||||
for x in titles:
|
||||
title = titles[x]
|
||||
if title.get('default'):
|
||||
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||
|
||||
# Releases
|
||||
releaseinfos = migrate_data['releaseinfo']
|
||||
for x in releaseinfos:
|
||||
info = releaseinfos[x]
|
||||
|
||||
# Skip if release doesn't exist for this info
|
||||
if not migrate_data['release'].get(info.get('release_id')):
|
||||
continue
|
||||
|
||||
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||
|
||||
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||
|
||||
releases = migrate_data['release']
|
||||
releases_by_media = {}
|
||||
for x in releases:
|
||||
release = releases[x]
|
||||
if not releases_by_media.get(release.get('movie_id')):
|
||||
releases_by_media[release.get('movie_id')] = []
|
||||
|
||||
releases_by_media[release.get('movie_id')].append(release)
|
||||
|
||||
# Type ids
|
||||
types = migrate_data['filetype']
|
||||
type_by_id = {}
|
||||
for t in types:
|
||||
type = types[t]
|
||||
type_by_id[type.get('id')] = type
|
||||
|
||||
# Media
|
||||
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||
statuses = migrate_data['status']
|
||||
libraries = migrate_data['library']
|
||||
library_files = migrate_data['library_files__file_library']
|
||||
releases_files = migrate_data['release_files__file_release']
|
||||
all_files = migrate_data['file']
|
||||
poster_type = migrate_data['filetype']['poster']
|
||||
medias = migrate_data['movie']
|
||||
for x in medias:
|
||||
m = medias[x]
|
||||
|
||||
status = statuses.get(m['status_id']).get('identifier')
|
||||
l = libraries.get(m['library_id'])
|
||||
|
||||
# Only migrate wanted movies, Skip if no identifier present
|
||||
if not l or not getImdb(l.get('identifier')): continue
|
||||
|
||||
profile_id = profile_link.get(m['profile_id'])
|
||||
category_id = category_link.get(m['category_id'])
|
||||
title = titles_by_library.get(m['library_id'])
|
||||
releases = releases_by_media.get(x, [])
|
||||
info = json.loads(l.get('info', ''))
|
||||
|
||||
files = library_files.get(m['library_id'], [])
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
added_media = fireEvent('movie.add', {
|
||||
'info': info,
|
||||
'identifier': l.get('identifier'),
|
||||
'profile_id': profile_id,
|
||||
'category_id': category_id,
|
||||
'title': title
|
||||
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||
|
||||
if not added_media:
|
||||
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||
continue
|
||||
|
||||
added_media['files'] = added_media.get('files', {})
|
||||
for f in files:
|
||||
ffile = all_files[f.get('file_id')]
|
||||
|
||||
# Only migrate posters
|
||||
if ffile.get('type_id') == poster_type.get('id'):
|
||||
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||
break
|
||||
|
||||
if 'image_poster' in added_media['files']:
|
||||
db.update(added_media)
|
||||
|
||||
for rel in releases:
|
||||
|
||||
empty_info = False
|
||||
if not rel.get('info'):
|
||||
empty_info = True
|
||||
rel['info'] = {}
|
||||
|
||||
quality = quality_link.get(rel.get('quality_id'))
|
||||
if not quality:
|
||||
if len(release_files) == 0:
|
||||
continue
|
||||
|
||||
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||
for f in release_files:
|
||||
rfile = all_files[f.get('file_id')]
|
||||
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||
|
||||
if rel['info'].get('download_id'):
|
||||
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||
rel['info']['download_info'] = {
|
||||
'id': rel['info'].get('download_id'),
|
||||
'downloader': rel['info'].get('download_downloader'),
|
||||
'status_support': status_support,
|
||||
}
|
||||
if not release['files'].get(file_type):
|
||||
release['files'][file_type] = []
|
||||
|
||||
# Add status to keys
|
||||
rel['info']['status'] = release_status
|
||||
if not empty_info:
|
||||
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||
else:
|
||||
release = {
|
||||
'_t': 'release',
|
||||
'identifier': rel.get('identifier'),
|
||||
'media_id': added_media.get('_id'),
|
||||
'quality': quality.get('identifier'),
|
||||
'status': release_status,
|
||||
'last_edit': int(time.time()),
|
||||
'files': {}
|
||||
}
|
||||
release['files'][file_type].append(rfile.get('path'))
|
||||
|
||||
# Add downloader info if provided
|
||||
try:
|
||||
release['download_info'] = rel['info']['download_info']
|
||||
del rel['download_info']
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add files
|
||||
release_files = releases_files.get(rel.get('id'), [])
|
||||
if not isinstance(release_files, list):
|
||||
release_files = [release_files]
|
||||
|
||||
if len(release_files) == 0:
|
||||
continue
|
||||
|
||||
for f in release_files:
|
||||
rfile = all_files.get(f.get('file_id'))
|
||||
if not rfile:
|
||||
continue
|
||||
|
||||
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||
|
||||
if not release['files'].get(file_type):
|
||||
release['files'][file_type] = []
|
||||
|
||||
release['files'][file_type].append(rfile.get('path'))
|
||||
|
||||
try:
|
||||
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||
rls.update(release)
|
||||
db.update(rls)
|
||||
except:
|
||||
db.insert(release)
|
||||
|
||||
log.info('Total migration took %s', time.time() - migrate_start)
|
||||
log.info('=' * 30)
|
||||
|
||||
rename_old = True
|
||||
|
||||
except OperationalError:
|
||||
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
|
||||
except:
|
||||
log.error('Migration failed: %s', traceback.format_exc())
|
||||
try:
|
||||
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||
rls.update(release)
|
||||
db.update(rls)
|
||||
except:
|
||||
db.insert(release)
|
||||
|
||||
log.info('Total migration took %s', time.time() - migrate_start)
|
||||
log.info('=' * 30)
|
||||
|
||||
# rename old database
|
||||
if rename_old:
|
||||
random = randomString()
|
||||
log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random))
|
||||
os.rename(old_db, '%s.%s_old' % (old_db, random))
|
||||
log.info('Renaming old database to %s ', old_db + '.old')
|
||||
os.rename(old_db, old_db + '.old')
|
||||
|
||||
if os.path.isfile(old_db + '-wal'):
|
||||
os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random))
|
||||
if os.path.isfile(old_db + '-shm'):
|
||||
os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random))
|
||||
if os.path.isfile(old_db + '-wal'):
|
||||
os.rename(old_db + '-wal', old_db + '-wal.old')
|
||||
if os.path.isfile(old_db + '-shm'):
|
||||
os.rename(old_db + '-shm', old_db + '-shm.old')
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
from base64 import b64encode
|
||||
import os
|
||||
from urllib2 import URLError
|
||||
from uuid import uuid4
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from requests import HTTPError
|
||||
import urllib2
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
||||
@@ -29,17 +35,13 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
# Send the nzb
|
||||
try:
|
||||
nzb_filename = self.createFileName(data, filedata, media, unique_tag = True)
|
||||
response = self.call('nzb/add', files = {'file': (nzb_filename, filedata, 'application/octet-stream')}, parameters = {
|
||||
'name': nzb_filename,
|
||||
'groupname': self.conf('group')
|
||||
})
|
||||
nzb_filename = self.createFileName(data, filedata, media)
|
||||
self.call('nzb/add', files = {'file': (nzb_filename, filedata)})
|
||||
|
||||
if response and response.get('result', '').lower() == 'ok':
|
||||
return self.downloadReturnId(nzb_filename)
|
||||
|
||||
log.error('Something went wrong sending the NZB file. Response: %s', response)
|
||||
return False
|
||||
time.sleep(10)
|
||||
raw_statuses = self.call('nzb')
|
||||
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if os.path.basename(nzb['nzbFileName']) == nzb_filename][0]
|
||||
return self.downloadReturnId(nzb_id)
|
||||
except:
|
||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
@@ -58,8 +60,7 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
for nzb in raw_statuses.get('nzbs', []):
|
||||
nzb_id = os.path.basename(nzb['nzbFileName'])
|
||||
if nzb_id in ids:
|
||||
if nzb['id'] in ids:
|
||||
|
||||
# Check status
|
||||
status = 'busy'
|
||||
@@ -69,8 +70,7 @@ class NZBVortex(DownloaderBase):
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'temp_id': nzb['id'],
|
||||
'id': nzb_id,
|
||||
'id': nzb['id'],
|
||||
'name': nzb['uiTitle'],
|
||||
'status': status,
|
||||
'original_status': nzb['state'],
|
||||
@@ -85,7 +85,7 @@ class NZBVortex(DownloaderBase):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call('nzb/%s/cancel' % release_download['temp_id'])
|
||||
self.call('nzb/%s/cancel' % release_download['id'])
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
@@ -114,7 +114,7 @@ class NZBVortex(DownloaderBase):
|
||||
log.error('Login failed, please check you api-key')
|
||||
return False
|
||||
|
||||
def call(self, call, parameters = None, is_repeat = False, auth = True, *args, **kwargs):
|
||||
def call(self, call, parameters = None, repeat = False, auth = True, *args, **kwargs):
|
||||
|
||||
# Login first
|
||||
if not parameters: parameters = {}
|
||||
@@ -127,20 +127,19 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
params = tryUrlencode(parameters)
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api/' + call
|
||||
|
||||
try:
|
||||
data = self.getJsonData('%s%s' % (url, '?' + params if params else ''), *args, cache_timeout = 0, show_error = False, **kwargs)
|
||||
data = self.urlopen('%s?%s' % (url, params), *args, **kwargs)
|
||||
|
||||
if data:
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
return json.loads(data)
|
||||
except URLError as e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
# Try login and do again
|
||||
if not is_repeat:
|
||||
if not repeat:
|
||||
self.login()
|
||||
return self.call(call, parameters = parameters, is_repeat = True, **kwargs)
|
||||
return self.call(call, parameters = parameters, repeat = True, **kwargs)
|
||||
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
except:
|
||||
@@ -152,12 +151,13 @@ class NZBVortex(DownloaderBase):
|
||||
|
||||
if not self.api_level:
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/app/apilevel'
|
||||
|
||||
try:
|
||||
data = self.call('app/apilevel', auth = False)
|
||||
self.api_level = float(data.get('apilevel'))
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
data = self.urlopen(url, show_error = False)
|
||||
self.api_level = float(json.loads(data).get('apilevel'))
|
||||
except URLError as e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||
else:
|
||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||
@@ -169,6 +169,29 @@ class NZBVortex(DownloaderBase):
|
||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||
|
||||
|
||||
class HTTPSConnection(httplib.HTTPSConnection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
|
||||
|
||||
def connect(self):
|
||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
||||
if sys.version_info < (2, 6, 7):
|
||||
if hasattr(self, '_tunnel_host'):
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
else:
|
||||
if self._tunnel_host:
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
|
||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version = ssl.PROTOCOL_TLSv1)
|
||||
|
||||
|
||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
||||
def https_open(self, req):
|
||||
return self.do_open(HTTPSConnection, req)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbvortex',
|
||||
'groups': [
|
||||
@@ -188,18 +211,20 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'https://localhost:4321',
|
||||
'description': 'Hostname with port. Usually <strong>https://localhost:4321</strong>',
|
||||
'default': 'localhost:4321',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:4321</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 1,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
{
|
||||
'name': 'group',
|
||||
'label': 'Group',
|
||||
'description': 'The group CP places the nzb in. Make sure to create it in NZBVortex.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse
|
||||
import os
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
@@ -62,7 +62,7 @@ class rTorrent(DownloaderBase):
|
||||
if self.conf('ssl') and url.startswith('httprpc://'):
|
||||
url = url.replace('httprpc://', 'httprpc+https://')
|
||||
|
||||
parsed = urllib.urlparse(url)
|
||||
parsed = urlparse(url)
|
||||
|
||||
# rpc_url is only used on http/https scgi pass-through
|
||||
if parsed.scheme in ['http', 'https']:
|
||||
|
||||
@@ -23,14 +23,16 @@ class Transmission(DownloaderBase):
|
||||
log = CPLog(__name__)
|
||||
trpc = None
|
||||
|
||||
def connect(self):
|
||||
def connect(self, reconnect = False):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host')).rstrip('/').rsplit(':', 1)
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||
if not self.trpc or reconnect:
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
return self.trpc
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
@@ -86,7 +88,7 @@ class Transmission(DownloaderBase):
|
||||
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
|
||||
|
||||
def test(self):
|
||||
if self.connect() and self.trpc.get_session():
|
||||
if self.connect(True) and self.trpc.get_session():
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -162,11 +164,11 @@ class Transmission(DownloaderBase):
|
||||
class TransmissionRPC(object):
|
||||
|
||||
"""TransmissionRPC lite library"""
|
||||
def __init__(self, host = 'http://localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
def __init__(self, host = 'localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
|
||||
super(TransmissionRPC, self).__init__()
|
||||
|
||||
self.url = host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.url = 'http://' + host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.tag = 0
|
||||
self.session_id = 0
|
||||
self.session = {}
|
||||
@@ -274,8 +276,8 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'http://localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>http://localhost:9091</strong>',
|
||||
'default': 'localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
|
||||
@@ -90,7 +90,7 @@ def fireEvent(name, *args, **kwargs):
|
||||
|
||||
else:
|
||||
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True)
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
|
||||
|
||||
for event in events[name]:
|
||||
e.handle(event['handler'], priority = event['priority'])
|
||||
@@ -102,7 +102,7 @@ def fireEvent(name, *args, **kwargs):
|
||||
# Fire
|
||||
result = e(*args, **kwargs)
|
||||
|
||||
result_keys = list(result.keys())
|
||||
result_keys = result.keys()
|
||||
result_keys.sort(key = natsortKey)
|
||||
|
||||
if options['single'] and not options['merge']:
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
|
||||
from six import PY2
|
||||
|
||||
|
||||
if PY2:
|
||||
from CodernityDB.database_super_thread_safe import SuperThreadSafeDatabase
|
||||
from CodernityDB.index import IndexException, IndexConflict, IndexNotFoundException
|
||||
from CodernityDB.database import RecordNotFound, RecordDeleted
|
||||
from CodernityDB.hash_index import HashIndex
|
||||
from CodernityDB.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||
else:
|
||||
from CodernityDB3.database_super_thread_safe import SuperThreadSafeDatabase
|
||||
from CodernityDB3.index import IndexException, IndexConflict, IndexNotFoundException
|
||||
from CodernityDB3.database import RecordNotFound, RecordDeleted
|
||||
from CodernityDB3.hash_index import HashIndex
|
||||
from CodernityDB3.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||
|
||||
SuperThreadSafeDatabase = SuperThreadSafeDatabase
|
||||
IndexException = IndexException
|
||||
IndexNotFoundException = IndexNotFoundException
|
||||
IndexConflict = IndexConflict
|
||||
RecordNotFound = RecordNotFound
|
||||
HashIndex = HashIndex
|
||||
MultiTreeBasedIndex = MultiTreeBasedIndex
|
||||
TreeBasedIndex = TreeBasedIndex
|
||||
RecordDeleted = RecordDeleted
|
||||
@@ -1,11 +1,10 @@
|
||||
from string import ascii_letters, digits
|
||||
from urllib import quote_plus
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
import unicodedata
|
||||
|
||||
from chardet import detect
|
||||
from six.moves import urllib
|
||||
from couchpotato.core.logger import CPLog
|
||||
import six
|
||||
|
||||
@@ -16,7 +15,7 @@ log = CPLog(__name__)
|
||||
def toSafeString(original):
|
||||
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
|
||||
cleaned_filename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
||||
valid_string = ''.join(list(six.unichr(c) for c in cleaned_filename if six.unichr(c) in valid_chars))
|
||||
valid_string = ''.join(c for c in cleaned_filename if c in valid_chars)
|
||||
return ' '.join(valid_string.split())
|
||||
|
||||
|
||||
@@ -29,16 +28,13 @@ def simplifyString(original):
|
||||
|
||||
def toUnicode(original, *args):
|
||||
try:
|
||||
if isinstance(original, six.text_type):
|
||||
if isinstance(original, unicode):
|
||||
return original
|
||||
else:
|
||||
try:
|
||||
return six.text_type(original, *args)
|
||||
except:
|
||||
try:
|
||||
detected = detect(original)
|
||||
if detected.get('encoding') == 'utf-8':
|
||||
return original.decode('utf-8')
|
||||
return ek(original, *args)
|
||||
except:
|
||||
raise
|
||||
@@ -47,35 +43,16 @@ def toUnicode(original, *args):
|
||||
ascii_text = str(original).encode('string_escape')
|
||||
return toUnicode(ascii_text)
|
||||
|
||||
def toUTF8(original):
|
||||
try:
|
||||
if isinstance(original, six.binary_type) and len(original) > 0:
|
||||
# Try to detect
|
||||
detected = detect(original)
|
||||
return original.decode(detected.get('encoding')).encode('utf-8')
|
||||
else:
|
||||
return original
|
||||
except:
|
||||
#log.error('Failed encoding to UTF8: %s', traceback.format_exc())
|
||||
raise
|
||||
|
||||
def ss(original, *args):
|
||||
|
||||
u_original = toUnicode(original, *args)
|
||||
try:
|
||||
if isinstance(u_original, six.text_type):
|
||||
u_original = u_original.encode('unicode_escape')
|
||||
else:
|
||||
u_original = u_original
|
||||
|
||||
return six.u(u_original)
|
||||
from couchpotato.environment import Env
|
||||
return u_original.encode(Env.get('encoding'))
|
||||
except Exception as e:
|
||||
log.debug('Failed ss encoding char, force UTF8: %s', e)
|
||||
try:
|
||||
from couchpotato.environment import Env
|
||||
return u_original.encode(Env.get('encoding'), 'replace')
|
||||
except:
|
||||
return u_original.encode('utf-8', 'replace')
|
||||
return u_original.encode('UTF-8')
|
||||
|
||||
|
||||
def sp(path, *args):
|
||||
@@ -88,7 +65,7 @@ def sp(path, *args):
|
||||
if os.path.sep == '/' and '\\' in path:
|
||||
path = '/' + path.replace(':', '').replace('\\', '/')
|
||||
|
||||
path = os.path.normpath(path)
|
||||
path = os.path.normpath(ss(path, *args))
|
||||
|
||||
# Remove any trailing path separators
|
||||
if path != os.path.sep:
|
||||
@@ -130,15 +107,14 @@ def stripAccents(s):
|
||||
def tryUrlencode(s):
|
||||
new = six.u('')
|
||||
if isinstance(s, dict):
|
||||
for key, value in list(s.items()):
|
||||
for key, value in s.items():
|
||||
new += six.u('&%s=%s') % (key, tryUrlencode(value))
|
||||
|
||||
return new[1:]
|
||||
else:
|
||||
for letter in ss(s):
|
||||
letter = six.unichr(letter)
|
||||
try:
|
||||
new += urllib.parse.quote_plus(letter)
|
||||
new += quote_plus(letter)
|
||||
except:
|
||||
new += letter
|
||||
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import os
|
||||
from chardet import detect
|
||||
from couchpotato import Env
|
||||
|
||||
fs_enc = Env.get('fs_encoding')
|
||||
|
||||
|
||||
def list_dir(path, full_path = True):
|
||||
"""
|
||||
List directory don't error when it doesn't exist
|
||||
"""
|
||||
|
||||
path = unicode_path(path)
|
||||
|
||||
if os.path.isdir(path):
|
||||
for f in os.listdir(path):
|
||||
if full_path:
|
||||
yield join(path, f)
|
||||
else:
|
||||
yield f
|
||||
|
||||
|
||||
def join(*args):
|
||||
"""
|
||||
Join path, encode properly before joining
|
||||
"""
|
||||
|
||||
return os.path.join(*[safe(x) for x in args])
|
||||
|
||||
|
||||
def unicode_path(path):
|
||||
"""
|
||||
Convert back to unicode
|
||||
:param path: path string
|
||||
"""
|
||||
|
||||
if isinstance(path, str):
|
||||
detected = detect(path)
|
||||
print detected
|
||||
path = path.decode(detected.get('encoding'))
|
||||
path = path.decode('unicode_escape')
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def safe(path):
|
||||
|
||||
if isinstance(path, unicode):
|
||||
return path.encode('unicode_escape')
|
||||
|
||||
return path
|
||||
@@ -1,2 +0,0 @@
|
||||
class NotSupported(Exception):
|
||||
pass
|
||||
@@ -1,7 +1,7 @@
|
||||
from urllib import unquote
|
||||
import re
|
||||
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from six.moves import urllib
|
||||
from couchpotato.core.helpers.variable import natsortKey
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ def getParams(params):
|
||||
reg = re.compile('^[a-z0-9_\.]+$')
|
||||
|
||||
# Sort keys
|
||||
param_keys = list(params.keys())
|
||||
param_keys = params.keys()
|
||||
param_keys.sort(key = natsortKey)
|
||||
|
||||
temp = {}
|
||||
@@ -28,7 +28,7 @@ def getParams(params):
|
||||
|
||||
for item in nested:
|
||||
if item is nested[-1]:
|
||||
current[item] = toUnicode(urllib.parse.unquote(value))
|
||||
current[item] = toUnicode(unquote(value))
|
||||
else:
|
||||
try:
|
||||
current[item]
|
||||
@@ -37,7 +37,7 @@ def getParams(params):
|
||||
|
||||
current = current[item]
|
||||
else:
|
||||
temp[param] = toUnicode(urllib.parse.unquote(value))
|
||||
temp[param] = toUnicode(unquote(value))
|
||||
if temp[param].lower() in ['true', 'false']:
|
||||
temp[param] = temp[param].lower() != 'false'
|
||||
|
||||
|
||||
34
couchpotato/core/helpers/variable.py
Executable file → Normal file
34
couchpotato/core/helpers/variable.py
Executable file → Normal file
@@ -41,11 +41,11 @@ def symlink(src, dst):
|
||||
def getUserDir():
|
||||
try:
|
||||
import pwd
|
||||
os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir)
|
||||
os.environ['HOME'] = pwd.getpwuid(os.geteuid()).pw_dir
|
||||
except:
|
||||
pass
|
||||
|
||||
return sp(os.path.expanduser('~'))
|
||||
return os.path.expanduser('~')
|
||||
|
||||
|
||||
def getDownloadDir():
|
||||
@@ -380,33 +380,3 @@ def getFreeSpace(directories):
|
||||
free_space[folder] = size
|
||||
|
||||
return free_space
|
||||
|
||||
|
||||
def getSize(paths):
|
||||
|
||||
single = not isinstance(paths, (tuple, list))
|
||||
if single:
|
||||
paths = [paths]
|
||||
|
||||
total_size = 0
|
||||
for path in paths:
|
||||
path = sp(path)
|
||||
|
||||
if os.path.isdir(path):
|
||||
total_size = 0
|
||||
for dirpath, _, filenames in os.walk(path):
|
||||
for f in filenames:
|
||||
total_size += os.path.getsize(sp(os.path.join(dirpath, f)))
|
||||
|
||||
elif os.path.isfile(path):
|
||||
total_size += os.path.getsize(path)
|
||||
|
||||
return total_size / 1048576 # MB
|
||||
|
||||
|
||||
def find(func, iterable):
|
||||
for item in iterable:
|
||||
if func(item):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
@@ -3,7 +3,6 @@ import sys
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.py3 import NotSupported
|
||||
from couchpotato.core.logger import CPLog
|
||||
from importhelper import import_module
|
||||
import six
|
||||
@@ -132,7 +131,7 @@ class Loader(object):
|
||||
return False
|
||||
try:
|
||||
# Load single file plugin
|
||||
if isinstance(module.autoload, (six.string_types, six.text_type)):
|
||||
if isinstance(module.autoload, (str, unicode)):
|
||||
getattr(module, module.autoload)()
|
||||
# Load folder plugin
|
||||
else:
|
||||
@@ -163,8 +162,6 @@ class Loader(object):
|
||||
def loadModule(self, name):
|
||||
try:
|
||||
return import_module(name)
|
||||
except NotSupported:
|
||||
log.error('Module "%s" is not supported in Python 3', name)
|
||||
except ImportError:
|
||||
log.debug('Skip loading module plugin %s: %s', (name, traceback.format_exc()))
|
||||
return None
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
import re
|
||||
import traceback
|
||||
|
||||
|
||||
class CPLog(object):
|
||||
@@ -55,19 +54,20 @@ class CPLog(object):
|
||||
|
||||
def safeMessage(self, msg, replace_tuple = ()):
|
||||
|
||||
from couchpotato.core.helpers.encoding import ss, toUTF8
|
||||
from couchpotato.core.helpers.encoding import ss, toUnicode
|
||||
|
||||
msg = toUTF8(msg)
|
||||
msg = ss(msg)
|
||||
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([toUTF8(x) for x in list(replace_tuple)])
|
||||
elif isinstance(replace_tuple, dict):
|
||||
msg = msg % dict((k, toUTF8(v)) for k, v in replace_tuple.iteritems())
|
||||
else:
|
||||
msg = msg % toUTF8(replace_tuple)
|
||||
msg = msg % replace_tuple
|
||||
except:
|
||||
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, traceback.format_exc()))
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except Exception as e:
|
||||
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e))
|
||||
|
||||
self.setup()
|
||||
if not self.is_develop:
|
||||
@@ -84,4 +84,4 @@ class CPLog(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
return toUTF8(msg)
|
||||
return toUnicode(msg)
|
||||
|
||||
25
couchpotato/core/media/__init__.py
Executable file → Normal file
25
couchpotato/core/media/__init__.py
Executable file → Normal file
@@ -26,9 +26,9 @@ class MediaBase(Plugin):
|
||||
def onComplete():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.searcher.single' % media.get('type')
|
||||
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||
event_name = '%s.searcher.single' % media.get('type')
|
||||
|
||||
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
|
||||
@@ -39,9 +39,9 @@ class MediaBase(Plugin):
|
||||
def notifyFront():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.update' % media.get('type')
|
||||
fireEvent('notify.frontend', type = event_name, data = media)
|
||||
event_name = '%s.update' % media.get('type')
|
||||
|
||||
fireEvent('notify.frontend', type = event_name, data = media)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
|
||||
@@ -65,13 +65,10 @@ class MediaBase(Plugin):
|
||||
|
||||
return def_title or 'UNKNOWN'
|
||||
|
||||
def getPoster(self, media, image_urls):
|
||||
if 'files' not in media:
|
||||
media['files'] = {}
|
||||
|
||||
existing_files = media['files']
|
||||
|
||||
def getPoster(self, image_urls, existing_files):
|
||||
image_type = 'poster'
|
||||
|
||||
# Remove non-existing files
|
||||
file_type = 'image_%s' % image_type
|
||||
|
||||
# Make existing unique
|
||||
@@ -89,13 +86,13 @@ class MediaBase(Plugin):
|
||||
|
||||
# Loop over type
|
||||
for image in image_urls.get(image_type, []):
|
||||
if not isinstance(image, six.string_types):
|
||||
if not isinstance(image, (str, unicode)):
|
||||
continue
|
||||
|
||||
if file_type not in existing_files or len(existing_files.get(file_type, [])) == 0:
|
||||
file_path = fireEvent('file.download', url = image, single = True)
|
||||
if file_path:
|
||||
existing_files[file_type] = [toUnicode(file_path)]
|
||||
existing_files[file_type] = [file_path]
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
110
couchpotato/core/media/_base/library/main.py
Executable file → Normal file
110
couchpotato/core/media/_base/library/main.py
Executable file → Normal file
@@ -1,47 +1,10 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Library(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.title', self.title)
|
||||
addEvent('library.related', self.related)
|
||||
addEvent('library.tree', self.tree)
|
||||
|
||||
addEvent('library.root', self.root)
|
||||
|
||||
addApiView('library.query', self.queryView)
|
||||
addApiView('library.related', self.relatedView)
|
||||
addApiView('library.tree', self.treeView)
|
||||
|
||||
def queryView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.query', media, single = True)
|
||||
}
|
||||
|
||||
def relatedView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.related', media, single = True)
|
||||
}
|
||||
|
||||
def treeView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.tree', media, single = True)
|
||||
}
|
||||
|
||||
def title(self, library):
|
||||
return fireEvent(
|
||||
@@ -53,76 +16,3 @@ class Library(LibraryBase):
|
||||
include_identifier = False,
|
||||
single = True
|
||||
)
|
||||
|
||||
def related(self, media):
|
||||
result = {self.key(media['type']): media}
|
||||
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
result[self.key(cur['type'])] = cur
|
||||
|
||||
children = db.get_many('media_children', media['_id'], with_doc = True)
|
||||
|
||||
for item in children:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
|
||||
result[key].append(item['doc'])
|
||||
|
||||
return result
|
||||
|
||||
def root(self, media):
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
return cur
|
||||
|
||||
def tree(self, media = None, media_id = None):
|
||||
db = get_db()
|
||||
|
||||
if media:
|
||||
result = media
|
||||
elif media_id:
|
||||
result = db.get('id', media_id, with_doc = True)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Find children
|
||||
items = db.get_many('media_children', result['_id'], with_doc = True)
|
||||
keys = []
|
||||
|
||||
# Build children arrays
|
||||
for item in items:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = {}
|
||||
elif type(result[key]) is not dict:
|
||||
result[key] = {}
|
||||
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
|
||||
result[key][item['_id']] = fireEvent('library.tree', item['doc'], single = True)
|
||||
|
||||
# Unique children
|
||||
for key in keys:
|
||||
result[key] = result[key].values()
|
||||
|
||||
# Include releases
|
||||
result['releases'] = fireEvent('release.for_media', result['_id'], single = True)
|
||||
|
||||
return result
|
||||
|
||||
def key(self, media_type):
|
||||
parts = media_type.split('.')
|
||||
return parts[-1]
|
||||
|
||||
@@ -40,7 +40,7 @@ class Matcher(MatcherBase):
|
||||
return False
|
||||
|
||||
def correctTitle(self, chain, media):
|
||||
root = fireEvent('library.root', media, single = True)
|
||||
root_library = media['library']['root_library']
|
||||
|
||||
if 'show_name' not in chain.info or not len(chain.info['show_name']):
|
||||
log.info('Wrong: missing show name in parsed result')
|
||||
@@ -50,10 +50,10 @@ class Matcher(MatcherBase):
|
||||
chain_words = [x.lower() for x in chain.info['show_name']]
|
||||
|
||||
# Build a list of possible titles of the media we are searching for
|
||||
titles = root['info']['titles']
|
||||
titles = root_library['info']['titles']
|
||||
|
||||
# Add year suffix titles (will result in ['<name_one>', '<name_one> <suffix_one>', '<name_two>', ...])
|
||||
suffixes = [None, root['info']['year']]
|
||||
suffixes = [None, root_library['info']['year']]
|
||||
|
||||
titles = [
|
||||
title + ((' %s' % suffix) if suffix else '')
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
from string import ascii_letters
|
||||
from hashlib import md5
|
||||
from couchpotato.core.helpers.database import MultiTreeBasedIndex, TreeBasedIndex
|
||||
|
||||
from CodernityDB.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
|
||||
|
||||
|
||||
class MediaIndex(MultiTreeBasedIndex):
|
||||
_version = 3
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import MultiTreeBasedIndex"""
|
||||
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
@@ -62,11 +62,10 @@ class MediaTypeIndex(TreeBasedIndex):
|
||||
|
||||
|
||||
class TitleSearchIndex(MultiTreeBasedIndex):
|
||||
_version = 2
|
||||
_version = 1
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import MultiTreeBasedIndex
|
||||
try: from itertools import izip
|
||||
except: izip = zip
|
||||
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex
|
||||
from itertools import izip
|
||||
from couchpotato.core.helpers.encoding import simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -102,7 +101,7 @@ from couchpotato.core.helpers.encoding import simplifyString"""
|
||||
class TitleIndex(TreeBasedIndex):
|
||||
_version = 4
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||
from string import ascii_letters
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
|
||||
@@ -135,7 +134,7 @@ from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
class StartsWithIndex(TreeBasedIndex):
|
||||
_version = 3
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||
from string import ascii_letters
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
|
||||
@@ -181,7 +180,7 @@ class MediaChildrenIndex(TreeBasedIndex):
|
||||
class MediaTagIndex(MultiTreeBasedIndex):
|
||||
_version = 2
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import MultiTreeBasedIndex"""
|
||||
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
|
||||
113
couchpotato/core/media/_base/media/main.py
Executable file → Normal file
113
couchpotato/core/media/_base/media/main.py
Executable file → Normal file
@@ -1,18 +1,18 @@
|
||||
from datetime import timedelta
|
||||
from operator import itemgetter
|
||||
import time
|
||||
import traceback
|
||||
from string import ascii_lowercase
|
||||
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from couchpotato import tryInt, get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.database import RecordNotFound, RecordDeleted
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, getImdb, getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media import MediaBase
|
||||
from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex, MediaTagIndex
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -44,15 +44,15 @@ class MediaPlugin(MediaBase):
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter media by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter media by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the media list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all media starting with the letter "a"'},
|
||||
'search': {'desc': 'Search media title'},
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter movie by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter movie by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the movie list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all movies starting with the letter "a"'},
|
||||
'search': {'desc': 'Search movie title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any media returned or not,
|
||||
'empty': bool, any movies returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
@@ -78,7 +78,6 @@ class MediaPlugin(MediaBase):
|
||||
addEvent('app.load', self.addSingleListView, priority = 100)
|
||||
addEvent('app.load', self.addSingleCharView, priority = 100)
|
||||
addEvent('app.load', self.addSingleDeleteView, priority = 100)
|
||||
addEvent('app.load', self.cleanupFaults)
|
||||
|
||||
addEvent('media.get', self.get)
|
||||
addEvent('media.with_status', self.withStatus)
|
||||
@@ -89,18 +88,6 @@ class MediaPlugin(MediaBase):
|
||||
addEvent('media.tag', self.tag)
|
||||
addEvent('media.untag', self.unTag)
|
||||
|
||||
# Wrongly tagged media files
|
||||
def cleanupFaults(self):
|
||||
medias = fireEvent('media.with_status', 'ignored', single = True) or []
|
||||
|
||||
db = get_db()
|
||||
for media in medias:
|
||||
try:
|
||||
media['status'] = 'done'
|
||||
db.update(media)
|
||||
except:
|
||||
pass
|
||||
|
||||
def refresh(self, id = '', **kwargs):
|
||||
handlers = []
|
||||
ids = splitString(id)
|
||||
@@ -122,7 +109,7 @@ class MediaPlugin(MediaBase):
|
||||
|
||||
try:
|
||||
media = get_db().get('id', media_id)
|
||||
event = '%s.update' % media.get('type')
|
||||
event = '%s.update_info' % media.get('type')
|
||||
|
||||
def handler():
|
||||
fireEvent(event, media_id = media_id, on_complete = self.createOnComplete(media_id))
|
||||
@@ -159,7 +146,7 @@ class MediaPlugin(MediaBase):
|
||||
|
||||
return media
|
||||
|
||||
except (RecordNotFound, RecordDeleted):
|
||||
except RecordNotFound:
|
||||
log.error('Media with id "%s" not found', media_id)
|
||||
except:
|
||||
raise
|
||||
@@ -173,13 +160,10 @@ class MediaPlugin(MediaBase):
|
||||
'media': media,
|
||||
}
|
||||
|
||||
def withStatus(self, status, types = None, with_doc = True):
|
||||
def withStatus(self, status, with_doc = True):
|
||||
|
||||
db = get_db()
|
||||
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
|
||||
status = list(status if isinstance(status, (list, tuple)) else [status])
|
||||
|
||||
for s in status:
|
||||
@@ -187,29 +171,24 @@ class MediaPlugin(MediaBase):
|
||||
if with_doc:
|
||||
try:
|
||||
doc = db.get('id', ms['_id'])
|
||||
|
||||
if types and doc.get('type') not in types:
|
||||
continue
|
||||
|
||||
yield doc
|
||||
except (RecordDeleted, RecordNotFound):
|
||||
except RecordNotFound:
|
||||
log.debug('Record not found, skipping: %s', ms['_id'])
|
||||
except (ValueError, EOFError):
|
||||
fireEvent('database.delete_corrupted', ms.get('_id'), traceback_error = traceback.format_exc(0))
|
||||
else:
|
||||
yield ms
|
||||
|
||||
def withIdentifiers(self, identifiers, with_doc = False):
|
||||
|
||||
db = get_db()
|
||||
|
||||
for x in identifiers:
|
||||
try:
|
||||
return db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||
media = db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||
return media
|
||||
except:
|
||||
pass
|
||||
|
||||
log.debug('No media found with identifiers: %s', identifiers)
|
||||
return False
|
||||
|
||||
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None):
|
||||
|
||||
@@ -281,7 +260,7 @@ class MediaPlugin(MediaBase):
|
||||
offset = 0
|
||||
limit = -1
|
||||
if limit_offset:
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, six.string_types) else limit_offset
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
|
||||
limit = tryInt(splt[0])
|
||||
offset = tryInt(0 if len(splt) is 1 else splt[1])
|
||||
|
||||
@@ -296,10 +275,6 @@ class MediaPlugin(MediaBase):
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
|
||||
# Skip if no media has been found
|
||||
if not media:
|
||||
continue
|
||||
|
||||
# Merge releases with movie dict
|
||||
medias.append(media)
|
||||
|
||||
@@ -332,22 +307,9 @@ class MediaPlugin(MediaBase):
|
||||
def addSingleListView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempList = lambda *args, **kwargs : self.listView(type = media_type, **kwargs)
|
||||
addApiView('%s.list' % media_type, tempList, docs = {
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the ' + media_type + ' list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all ' + media_type + 's starting with the letter "a"'},
|
||||
'search': {'desc': 'Search ' + media_type + ' title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any """ + media_type + """s returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
def tempList(*args, **kwargs):
|
||||
return self.listView(types = media_type, **kwargs)
|
||||
addApiView('%s.list' % media_type, tempList)
|
||||
|
||||
def availableChars(self, types = None, status = None, release_status = None):
|
||||
|
||||
@@ -393,7 +355,7 @@ class MediaPlugin(MediaBase):
|
||||
if x['_id'] in media_ids:
|
||||
chars.add(x['key'])
|
||||
|
||||
if len(chars) == 27:
|
||||
if len(chars) == 25:
|
||||
break
|
||||
|
||||
return list(chars)
|
||||
@@ -414,7 +376,8 @@ class MediaPlugin(MediaBase):
|
||||
def addSingleCharView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempChar = lambda *args, **kwargs : self.charView(type = media_type, **kwargs)
|
||||
def tempChar(*args, **kwargs):
|
||||
return self.charView(types = media_type, **kwargs)
|
||||
addApiView('%s.available_chars' % media_type, tempChar)
|
||||
|
||||
def delete(self, media_id, delete_from = None):
|
||||
@@ -452,7 +415,7 @@ class MediaPlugin(MediaBase):
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
|
||||
if (total_releases == total_deleted) or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||
if (total_releases == total_deleted and media['status'] != 'active') or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||
db.delete(media)
|
||||
deleted = True
|
||||
elif new_media_status:
|
||||
@@ -483,16 +446,11 @@ class MediaPlugin(MediaBase):
|
||||
def addSingleDeleteView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempDelete = lambda *args, **kwargs : self.deleteView(type = media_type, **kwargs)
|
||||
addApiView('%s.delete' % media_type, tempDelete, docs = {
|
||||
'desc': 'Delete a ' + media_type + ' from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete ' + media_type + ' from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
def tempDelete(*args, **kwargs):
|
||||
return self.deleteView(types = media_type, *args, **kwargs)
|
||||
addApiView('%s.delete' % media_type, tempDelete)
|
||||
|
||||
def restatus(self, media_id, tag_recent = True):
|
||||
def restatus(self, media_id):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
@@ -512,13 +470,12 @@ class MediaPlugin(MediaBase):
|
||||
done_releases = [release for release in media_releases if release.get('status') == 'done']
|
||||
|
||||
if done_releases:
|
||||
# Only look at latest added release
|
||||
release = sorted(done_releases, key = itemgetter('last_edit'), reverse = True)[0]
|
||||
|
||||
# Check if we are finished with the media
|
||||
for release in done_releases:
|
||||
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||
m['status'] = 'done'
|
||||
break
|
||||
|
||||
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||
m['status'] = 'done'
|
||||
elif previous_status == 'done':
|
||||
m['status'] = 'done'
|
||||
|
||||
@@ -531,22 +488,18 @@ class MediaPlugin(MediaBase):
|
||||
db.update(m)
|
||||
|
||||
# Tag media as recent
|
||||
if tag_recent:
|
||||
self.tag(media_id, 'recent', update_edited = True)
|
||||
self.tag(media_id, 'recent')
|
||||
|
||||
return m['status']
|
||||
except:
|
||||
log.error('Failed restatus: %s', traceback.format_exc())
|
||||
|
||||
def tag(self, media_id, tag, update_edited = False):
|
||||
def tag(self, media_id, tag):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
m = db.get('id', media_id)
|
||||
|
||||
if update_edited:
|
||||
m['last_edit'] = int(time.time())
|
||||
|
||||
tags = m.get('tags') or []
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
@@ -50,7 +50,7 @@ class Provider(Plugin):
|
||||
if Env.get('dev'): return True
|
||||
|
||||
now = time.time()
|
||||
host = urllib.urlparse(test_url).hostname
|
||||
host = urlparse(test_url).hostname
|
||||
|
||||
if self.last_available_check.get(host) < now - 900:
|
||||
self.last_available_check[host] = now
|
||||
@@ -219,7 +219,7 @@ class YarrProvider(Provider):
|
||||
if provider and provider == self.getName():
|
||||
return self
|
||||
|
||||
hostname = urllib.urlparse(url).hostname
|
||||
hostname = urlparse(url).hostname
|
||||
if host and hostname in host:
|
||||
return self
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse
|
||||
import time
|
||||
import traceback
|
||||
import re
|
||||
@@ -45,7 +45,7 @@ class Base(NZBProvider, RSS):
|
||||
def _searchOnHost(self, host, media, quality, results):
|
||||
|
||||
query = self.buildUrl(media, host)
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
url = '%s&%s' % (self.getUrl(host['host']), query)
|
||||
nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})
|
||||
|
||||
for nzb in nzbs:
|
||||
@@ -83,7 +83,7 @@ class Base(NZBProvider, RSS):
|
||||
try:
|
||||
# Get details for extended description to retrieve passwords
|
||||
query = self.buildDetailsUrl(nzb_id, host['api_key'])
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
url = '%s&%s' % (self.getUrl(host['host']), query)
|
||||
nzb_details = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})[0]
|
||||
|
||||
description = self.getTextElement(nzb_details, 'description')
|
||||
@@ -97,7 +97,7 @@ class Base(NZBProvider, RSS):
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
'provider_extra': urllib.urlparse(host['host']).hostname or host['host'],
|
||||
'provider_extra': urlparse(host['host']).hostname or host['host'],
|
||||
'name': toUnicode(name),
|
||||
'name_extra': name_extra,
|
||||
'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))),
|
||||
@@ -175,7 +175,7 @@ class Base(NZBProvider, RSS):
|
||||
return '&apikey=%s' % host['api_key']
|
||||
|
||||
def download(self, url = '', nzb_id = ''):
|
||||
host = urllib.urlparse(url).hostname
|
||||
host = urlparse(url).hostname
|
||||
|
||||
if self.limits_reached.get(host):
|
||||
# Try again in 3 hours
|
||||
@@ -187,12 +187,11 @@ class Base(NZBProvider, RSS):
|
||||
self.limits_reached[host] = False
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc in [503, 429]:
|
||||
if e.code == 503:
|
||||
response = e.read().lower()
|
||||
if sc == 429 or 'maximum api' in response or 'download limit' in response:
|
||||
if 'maximum api' in response or 'download limit' in response:
|
||||
if not self.limits_reached.get(host):
|
||||
log.error('Limit reached / to many requests for newznab provider: %s', host)
|
||||
log.error('Limit reached for newznab provider: %s', host)
|
||||
self.limits_reached[host] = time.time()
|
||||
return 'try_next'
|
||||
|
||||
@@ -221,7 +220,7 @@ config = [{
|
||||
'description': 'Enable <a href="http://newznab.com/" target="_blank">NewzNab</a> such as <a href="https://nzb.su" target="_blank">NZB.su</a>, \
|
||||
<a href="https://nzbs.org" target="_blank">NZBs.org</a>, <a href="http://dognzb.cr/" target="_blank">DOGnzb.cr</a>, \
|
||||
<a href="https://github.com/spotweb/spotweb" target="_blank">Spotweb</a>, <a href="https://nzbgeek.info/" target="_blank">NZBGeek</a>, \
|
||||
<a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||
<a href="https://smackdownonyou.com" target="_blank">SmackDown</a>, <a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAgMAAABinRfyAAAACVBMVEVjhwD///86aRovd/sBAAAAMklEQVQI12NgAIPQUCCRmQkjssDEShiRuRIqwZqZGcDAGBrqANUhGgIkWAOABKMDxCAA24UK50b26SAAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
@@ -232,30 +231,30 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'use',
|
||||
'default': '0,0,0,0,0'
|
||||
'default': '0,0,0,0,0,0'
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://api.nzbgeek.info,https://www.nzbfinder.ws',
|
||||
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://index.nzbgeek.info, https://smackdownonyou.com, https://www.nzbfinder.ws',
|
||||
'description': 'The hostname of your newznab provider',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': '0,0,0,0,0',
|
||||
'default': '0,0,0,0,0,0',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
{
|
||||
'name': 'custom_tag',
|
||||
'advanced': True,
|
||||
'label': 'Custom tag',
|
||||
'default': ',,,,',
|
||||
'default': ',,,,,',
|
||||
'description': 'Add custom tags, for example add rls=1 to get only scene releases from nzbs.org',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': ',,,,',
|
||||
'default': ',,,,,',
|
||||
'label': 'Api Key',
|
||||
'description': 'Can be found on your profile page',
|
||||
'type': 'combined',
|
||||
|
||||
126
couchpotato/core/media/_base/providers/nzb/nzbindex.py
Normal file
126
couchpotato/core/media/_base/providers/nzb/nzbindex.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import re
|
||||
import time
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from dateutil.parser import parse
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'download': 'https://www.nzbindex.com/download/',
|
||||
'search': 'https://www.nzbindex.com/rss/?%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
nzbs = self.getRSSData(self.urls['search'] % self.buildUrl(media, quality))
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
enclosure = self.getElement(nzb, 'enclosure').attrib
|
||||
nzbindex_id = int(self.getTextElement(nzb, "link").split('/')[4])
|
||||
|
||||
title = self.getTextElement(nzb, "title")
|
||||
|
||||
match = fireEvent('matcher.parse', title, parser='usenet', single = True)
|
||||
if not match.chains:
|
||||
log.info('Unable to parse release with title "%s"', title)
|
||||
continue
|
||||
|
||||
# TODO should we consider other lower-weight chains here?
|
||||
info = fireEvent('matcher.flatten_info', match.chains[0].info, single = True)
|
||||
|
||||
release_name = fireEvent('matcher.construct_from_raw', info.get('release_name'), single = True)
|
||||
|
||||
file_name = info.get('detail', {}).get('file_name')
|
||||
file_name = file_name[0] if file_name else None
|
||||
|
||||
title = release_name or file_name
|
||||
|
||||
# Strip extension from parsed title (if one exists)
|
||||
ext_pos = title.rfind('.')
|
||||
|
||||
# Assume extension if smaller than 4 characters
|
||||
# TODO this should probably be done a better way
|
||||
if len(title[ext_pos + 1:]) <= 4:
|
||||
title = title[:ext_pos]
|
||||
|
||||
if not title:
|
||||
log.info('Unable to find release name from match')
|
||||
continue
|
||||
|
||||
try:
|
||||
description = self.getTextElement(nzb, "description")
|
||||
except:
|
||||
description = ''
|
||||
|
||||
def extra_check(item):
|
||||
if '#c20000' in item['description'].lower():
|
||||
log.info('Wrong: Seems to be passworded: %s', item['name'])
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
results.append({
|
||||
'id': nzbindex_id,
|
||||
'name': title,
|
||||
'age': self.calculateAge(int(time.mktime(parse(self.getTextElement(nzb, "pubDate")).timetuple()))),
|
||||
'size': tryInt(enclosure['length']) / 1024 / 1024,
|
||||
'url': enclosure['url'],
|
||||
'detail_url': enclosure['url'].replace('/download/', '/release/'),
|
||||
'description': description,
|
||||
'get_more_info': self.getMoreInfo,
|
||||
'extra_check': extra_check,
|
||||
})
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
try:
|
||||
if '/nfo/' in item['description'].lower():
|
||||
nfo_url = re.search('href=\"(?P<nfo>.+)\" ', item['description']).group('nfo')
|
||||
full_description = self.getCache('nzbindex.%s' % item['id'], url = nfo_url, cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
item['description'] = toUnicode(html.find('pre', attrs = {'id': 'nfo0'}).text)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbindex',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'nzbindex',
|
||||
'description': 'Free provider, less accurate. See <a href="https://www.nzbindex.com/">NZBIndex</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAo0lEQVR42t2SQQ2AMBAEcUCwUAv94QMLfHliAQtYqIVawEItYAG6yZFMLkUANNlk79Kbbtp2P1j9uKxVV9VWFeStl+Wh3fWK9hNwEoADZkJtMD49AqS5AUjWGx6A+m+ARICGrM5W+wSTB0gETKzdHZwCEZAJ8PGZQN4AiQAmkR9s06EBAugJiBoAAPFfAQcBgZcIHzwA6TYP4JsXeSg3P9L31w3eksbH3zMb/wAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,4 +1,4 @@
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse, parse_qs
|
||||
import time
|
||||
|
||||
from couchpotato.core.event import fireEvent
|
||||
@@ -52,7 +52,7 @@ class Base(NZBProvider, RSS):
|
||||
for nzb in nzbs:
|
||||
|
||||
enclosure = self.getElement(nzb, 'enclosure').attrib
|
||||
nzb_id = urllib.parse_qs(urllib.urlparse(self.getTextElement(nzb, 'link')).query).get('id')[0]
|
||||
nzb_id = parse_qs(urlparse(self.getTextElement(nzb, 'link')).query).get('id')[0]
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
|
||||
@@ -61,7 +61,7 @@ class Base(TorrentProvider):
|
||||
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
|
||||
'url': self.urls['download'] % (torrent_id, authkey, self.conf('passkey')),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': tryInt(entry.find('size').get_text()) / 1048576,
|
||||
'size': self.parseSize(entry.find('size').get_text()),
|
||||
'seeders': tryInt(entry.find('seeders').get_text()),
|
||||
'leechers': tryInt(entry.find('leechers').get_text()),
|
||||
'score': torrentscore
|
||||
|
||||
@@ -22,9 +22,6 @@ class Base(TorrentProvider):
|
||||
http_time_between_calls = 1 # Seconds
|
||||
only_tables_tags = SoupStrainer('table')
|
||||
|
||||
torrent_name_cell = 1
|
||||
torrent_download_cell = 2
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, movie, quality)
|
||||
@@ -43,8 +40,8 @@ class Base(TorrentProvider):
|
||||
|
||||
all_cells = result.find_all('td')
|
||||
|
||||
torrent = all_cells[self.torrent_name_cell].find('a')
|
||||
download = all_cells[self.torrent_download_cell].find('a')
|
||||
torrent = all_cells[1].find('a')
|
||||
download = all_cells[3].find('a')
|
||||
|
||||
torrent_id = torrent['href']
|
||||
torrent_id = torrent_id.replace('details.php?id=', '')
|
||||
@@ -52,9 +49,9 @@ class Base(TorrentProvider):
|
||||
|
||||
torrent_name = torrent.getText()
|
||||
|
||||
torrent_size = self.parseSize(all_cells[8].getText())
|
||||
torrent_seeders = tryInt(all_cells[10].getText())
|
||||
torrent_leechers = tryInt(all_cells[11].getText())
|
||||
torrent_size = self.parseSize(all_cells[7].getText())
|
||||
torrent_seeders = tryInt(all_cells[9].getText())
|
||||
torrent_leechers = tryInt(all_cells[10].getText())
|
||||
torrent_url = self.urls['baseurl'] % download['href']
|
||||
torrent_detail_url = self.urls['baseurl'] % torrent['href']
|
||||
|
||||
|
||||
@@ -34,7 +34,8 @@ class Base(TorrentMagnetProvider):
|
||||
'http://kickass.pw',
|
||||
'http://kickassto.come.in',
|
||||
'http://katproxy.ws',
|
||||
'http://kickass.bitproxy.eu',
|
||||
'http://www.kickassunblock.info',
|
||||
'http://www.kickassproxy.info',
|
||||
'http://katph.eu',
|
||||
'http://kickassto.come.in',
|
||||
]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from six.moves import html_entities
|
||||
import htmlentitydefs
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
@@ -64,10 +64,6 @@ class Base(TorrentProvider):
|
||||
torrentdesc += ' HQ'
|
||||
if self.conf('prefer_golden'):
|
||||
torrentscore += 5000
|
||||
if 'FreeleechType' in torrent:
|
||||
torrentdesc += ' Freeleech'
|
||||
if self.conf('prefer_freeleech'):
|
||||
torrentscore += 7000
|
||||
if 'Scene' in torrent and torrent['Scene']:
|
||||
torrentdesc += ' Scene'
|
||||
if self.conf('prefer_scene'):
|
||||
@@ -145,15 +141,15 @@ class Base(TorrentProvider):
|
||||
# character reference
|
||||
try:
|
||||
if txt[:3] == "&#x":
|
||||
return six.unichr(int(txt[3:-1], 16))
|
||||
return unichr(int(txt[3:-1], 16))
|
||||
else:
|
||||
return six.unichr(int(txt[2:-1]))
|
||||
return unichr(int(txt[2:-1]))
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
# named entity
|
||||
try:
|
||||
txt = six.unichr(html_entities.name2codepoint[txt[1:-1]])
|
||||
txt = unichr(htmlentitydefs.name2codepoint[txt[1:-1]])
|
||||
except KeyError:
|
||||
pass
|
||||
return txt # leave as is
|
||||
@@ -227,14 +223,6 @@ config = [{
|
||||
'default': 1,
|
||||
'description': 'Favors Golden Popcorn-releases over all other releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_freeleech',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'label': 'Prefer Freeleech',
|
||||
'default': 1,
|
||||
'description': 'Favors torrents marked as freeleech over all other releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_scene',
|
||||
'advanced': True,
|
||||
|
||||
@@ -24,16 +24,16 @@ class Base(TorrentMagnetProvider):
|
||||
http_time_between_calls = 0
|
||||
|
||||
proxy_list = [
|
||||
'https://dieroschtibay.org',
|
||||
'https://nobay.net',
|
||||
'https://thebay.al',
|
||||
'https://thepiratebay.se',
|
||||
'http://thepiratebay.se.net',
|
||||
'http://thepiratebay.cd',
|
||||
'http://thebootlegbay.com',
|
||||
'http://tpb.ninja.so',
|
||||
'http://proxybay.fr',
|
||||
'http://www.tpb.gr',
|
||||
'http://tpbproxy.co.uk',
|
||||
'http://pirateproxy.in',
|
||||
'http://piratebay.skey.sk',
|
||||
'http://pirateproxy.be',
|
||||
'http://www.getpirate.com',
|
||||
'http://piratebay.io',
|
||||
'http://bayproxy.li',
|
||||
'http://proxybay.pw',
|
||||
]
|
||||
|
||||
@@ -13,12 +13,12 @@ log = CPLog(__name__)
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.torrentleech.org/',
|
||||
'login': 'https://www.torrentleech.org/user/account/login/',
|
||||
'login_check': 'https://torrentleech.org/user/messages',
|
||||
'detail': 'https://www.torrentleech.org/torrent/%s',
|
||||
'search': 'https://www.torrentleech.org/torrents/browse/index/query/%s/categories/%d',
|
||||
'download': 'https://www.torrentleech.org%s',
|
||||
'test': 'http://www.torrentleech.org/',
|
||||
'login': 'http://www.torrentleech.org/user/account/login/',
|
||||
'login_check': 'http://torrentleech.org/user/messages',
|
||||
'detail': 'http://www.torrentleech.org/torrent/%s',
|
||||
'search': 'http://www.torrentleech.org/torrents/browse/index/query/%s/categories/%d',
|
||||
'download': 'http://www.torrentleech.org%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse
|
||||
import re
|
||||
import traceback
|
||||
|
||||
@@ -45,7 +45,7 @@ class Base(TorrentProvider):
|
||||
results.append({
|
||||
'id': torrent.get('torrent_id'),
|
||||
'protocol': 'torrent' if re.match('^(http|https|ftp)://.*$', torrent.get('download_url')) else 'torrent_magnet',
|
||||
'provider_extra': urllib.urlparse(host['host']).hostname or host['host'],
|
||||
'provider_extra': urlparse(host['host']).hostname or host['host'],
|
||||
'name': toUnicode(torrent.get('release_name')),
|
||||
'url': torrent.get('download_url'),
|
||||
'detail_url': torrent.get('details_url'),
|
||||
|
||||
@@ -13,12 +13,12 @@ log = CPLog(__name__)
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://torrentshack.eu/',
|
||||
'login': 'http://torrentshack.eu/login.php',
|
||||
'login_check': 'http://torrentshack.eu/inbox.php',
|
||||
'detail': 'http://torrentshack.eu/torrent/%s',
|
||||
'search': 'http://torrentshack.eu/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
|
||||
'download': 'http://torrentshack.eu/%s',
|
||||
'test': 'https://torrentshack.net/',
|
||||
'login': 'https://torrentshack.net/login.php',
|
||||
'login_check': 'https://torrentshack.net/inbox.php',
|
||||
'detail': 'https://torrentshack.net/torrent/%s',
|
||||
'search': 'https://torrentshack.net/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
|
||||
'download': 'https://torrentshack.net/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
@@ -42,7 +42,6 @@ class Base(TorrentProvider):
|
||||
|
||||
link = result.find('span', attrs = {'class': 'torrent_name_link'}).parent
|
||||
url = result.find('td', attrs = {'class': 'torrent_td'}).find('a')
|
||||
tds = result.find_all('td')
|
||||
|
||||
results.append({
|
||||
'id': link['href'].replace('torrents.php?torrentid=', ''),
|
||||
@@ -50,8 +49,8 @@ class Base(TorrentProvider):
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['download'] % link['href'],
|
||||
'size': self.parseSize(result.find_all('td')[5].string),
|
||||
'seeders': tryInt(tds[len(tds)-2].string),
|
||||
'leechers': tryInt(tds[len(tds)-1].string),
|
||||
'seeders': tryInt(result.find_all('td')[7].string),
|
||||
'leechers': tryInt(result.find_all('td')[8].string),
|
||||
})
|
||||
|
||||
except:
|
||||
@@ -81,7 +80,7 @@ config = [{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentShack',
|
||||
'description': '<a href="http://torrentshack.eu/">TorrentShack</a>',
|
||||
'description': '<a href="https://www.torrentshack.net/">TorrentShack</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABmElEQVQoFQXBzY2cVRiE0afqvd84CQiAnxWWtyxsS6ThINBYg2Dc7mZBMEjE4mzs6e9WcY5+ePNuVFJJodQAoLo+SaWCy9rcV8cmjah3CI6iYu7oRU30kE5xxELRfamklY3k1NL19sSm7vPzP/ZdNZzKVDaY2sPZJBh9fv5ITrmG2+Vp4e1sPchVqTCQZJnVXi+/L4uuAJGly1+Pw8CprLbi8Om7tbT19/XRqJUk11JP9uHj9ulxhXbvJbI9qJvr5YkGXFG2IBT8tXczt+sfzDZCp3765f3t9tHEHGEDACma77+8o4oATKk+/PfW9YmHruRFjWoVSFsVsGu1YSKq6Oc37+n98unPZSRlY7vsKDqN+92X3yR9+PdXee3iJNKMStqdcZqoTJbUSi5JOkpfRlhSI0mSpEmCFKoU7FqSNOLAk54uGwCStMUCgLrVic62g7oDoFmmdI+P3S0pDe1xvDqb6XrZqbtzShWNoh9fv/XQHaDdM9OqrZi2M7M3UrB2vlkPS1IbdEBk7UiSoD6VlZ6aKWer4aH4f/AvKoHUTjuyAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse
|
||||
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString
|
||||
@@ -34,7 +34,7 @@ class UserscriptBase(Plugin):
|
||||
|
||||
def belongsTo(self, url):
|
||||
|
||||
host = urllib.urlparse(url).hostname
|
||||
host = urlparse(url).hostname
|
||||
host_split = host.split('.')
|
||||
if len(host_split) > 2:
|
||||
host = host[len(host_split[0]):]
|
||||
|
||||
@@ -3,7 +3,6 @@ from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getImdb
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
import six
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -31,7 +30,7 @@ class Search(Plugin):
|
||||
def search(self, q = '', types = None, **kwargs):
|
||||
|
||||
# Make sure types is the correct instance
|
||||
if isinstance(types, six.string_types):
|
||||
if isinstance(types, (str, unicode)):
|
||||
types = [types]
|
||||
elif isinstance(types, (list, tuple, set)):
|
||||
types = list(types)
|
||||
|
||||
@@ -73,24 +73,4 @@ config = [{
|
||||
],
|
||||
},
|
||||
],
|
||||
}, {
|
||||
'name': 'torrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'name': 'searcher',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'minimum_seeders',
|
||||
'advanced': True,
|
||||
'label': 'Minimum seeders',
|
||||
'description': 'Ignore torrents with seeders below this number',
|
||||
'default': 1,
|
||||
'type': 'int',
|
||||
'unit': 'seeders'
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
@@ -129,11 +129,7 @@ class Searcher(SearcherBase):
|
||||
# Try guessing via quality tags
|
||||
guess = fireEvent('quality.guess', [nzb.get('name')], single = True)
|
||||
|
||||
if guess:
|
||||
return threed == guess.get('is_3d')
|
||||
# If no quality guess, assume not 3d
|
||||
else:
|
||||
return threed == False
|
||||
return threed == guess.get('is_3d')
|
||||
|
||||
def correctYear(self, haystack, year, year_range):
|
||||
|
||||
@@ -178,25 +174,6 @@ class Searcher(SearcherBase):
|
||||
|
||||
return False
|
||||
|
||||
def containsWords(self, rel_name, rel_words, conf, media):
|
||||
|
||||
# Make sure it has required words
|
||||
words = splitString(self.conf('%s_words' % conf, section = 'searcher').lower())
|
||||
try: words = removeDuplicate(words + splitString(media['category'][conf].lower()))
|
||||
except: pass
|
||||
|
||||
req_match = 0
|
||||
for req_set in words:
|
||||
if len(req_set) >= 2 and (req_set[:1] + req_set[-1:]) == '//':
|
||||
if re.search(req_set[1:-1], rel_name):
|
||||
log.debug('Regex match: %s', req_set[1:-1])
|
||||
req_match += 1
|
||||
else:
|
||||
req = splitString(req_set, '&')
|
||||
req_match += len(list(set(rel_words) & set(req))) == len(req)
|
||||
|
||||
return words, req_match > 0
|
||||
|
||||
def correctWords(self, rel_name, media):
|
||||
media_title = fireEvent('searcher.get_search_title', media, single = True)
|
||||
media_words = re.split('\W+', simplifyString(media_title))
|
||||
@@ -204,13 +181,31 @@ class Searcher(SearcherBase):
|
||||
rel_name = simplifyString(rel_name)
|
||||
rel_words = re.split('\W+', rel_name)
|
||||
|
||||
required_words, contains_required = self.containsWords(rel_name, rel_words, 'required', media)
|
||||
if len(required_words) > 0 and not contains_required:
|
||||
# Make sure it has required words
|
||||
required_words = splitString(self.conf('required_words', section = 'searcher').lower())
|
||||
try: required_words = removeDuplicate(required_words + splitString(media['category']['required'].lower()))
|
||||
except: pass
|
||||
|
||||
req_match = 0
|
||||
for req_set in required_words:
|
||||
req = splitString(req_set, '&')
|
||||
req_match += len(list(set(rel_words) & set(req))) == len(req)
|
||||
|
||||
if len(required_words) > 0 and req_match == 0:
|
||||
log.info2('Wrong: Required word missing: %s', rel_name)
|
||||
return False
|
||||
|
||||
ignored_words, contains_ignored = self.containsWords(rel_name, rel_words, 'ignored', media)
|
||||
if len(ignored_words) > 0 and contains_ignored:
|
||||
# Ignore releases
|
||||
ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
|
||||
try: ignored_words = removeDuplicate(ignored_words + splitString(media['category']['ignored'].lower()))
|
||||
except: pass
|
||||
|
||||
ignored_match = 0
|
||||
for ignored_set in ignored_words:
|
||||
ignored = splitString(ignored_set, '&')
|
||||
ignored_match += len(list(set(rel_words) & set(ignored))) == len(ignored)
|
||||
|
||||
if len(ignored_words) > 0 and ignored_match:
|
||||
log.info2("Wrong: '%s' contains 'ignored words'", rel_name)
|
||||
return False
|
||||
|
||||
|
||||
59
couchpotato/core/media/movie/_base/main.py
Executable file → Normal file
59
couchpotato/core/media/movie/_base/main.py
Executable file → Normal file
@@ -1,10 +1,11 @@
|
||||
import os
|
||||
import traceback
|
||||
import time
|
||||
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.database import RecordNotFound
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, getTitle, getImdb, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
@@ -27,10 +28,6 @@ class MovieBase(MovieTypeBase):
|
||||
|
||||
addApiView('movie.add', self.addView, docs = {
|
||||
'desc': 'Add new movie to the wanted list',
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'movie': object
|
||||
}"""},
|
||||
'params': {
|
||||
'identifier': {'desc': 'IMDB id of the movie your want to add.'},
|
||||
'profile_id': {'desc': 'ID of quality profile you want the add the movie in. If empty will use the default profile.'},
|
||||
@@ -49,7 +46,7 @@ class MovieBase(MovieTypeBase):
|
||||
})
|
||||
|
||||
addEvent('movie.add', self.add)
|
||||
addEvent('movie.update', self.update)
|
||||
addEvent('movie.update_info', self.updateInfo)
|
||||
addEvent('movie.update_release_dates', self.updateReleaseDate)
|
||||
|
||||
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
@@ -154,7 +151,8 @@ class MovieBase(MovieTypeBase):
|
||||
for release in fireEvent('release.for_media', m['_id'], single = True):
|
||||
if release.get('status') in ['downloaded', 'snatched', 'seeding', 'done']:
|
||||
if params.get('ignore_previous', False):
|
||||
fireEvent('release.update_status', release['_id'], status = 'ignored')
|
||||
release['status'] = 'ignored'
|
||||
db.update(release)
|
||||
else:
|
||||
fireEvent('release.delete', release['_id'], single = True)
|
||||
|
||||
@@ -174,7 +172,7 @@ class MovieBase(MovieTypeBase):
|
||||
# Trigger update info
|
||||
if added and update_after:
|
||||
# Do full update to get images etc
|
||||
fireEventAsync('movie.update', m['_id'], default_title = params.get('title'), on_complete = onComplete)
|
||||
fireEventAsync('movie.update_info', m['_id'], default_title = params.get('title'), on_complete = onComplete)
|
||||
|
||||
# Remove releases
|
||||
for rel in fireEvent('release.for_media', m['_id'], single = True):
|
||||
@@ -182,9 +180,6 @@ class MovieBase(MovieTypeBase):
|
||||
db.delete(rel)
|
||||
|
||||
movie_dict = fireEvent('media.get', m['_id'], single = True)
|
||||
if not movie_dict:
|
||||
log.debug('Failed adding media, can\'t find it anymore')
|
||||
return False
|
||||
|
||||
if do_search and search_after:
|
||||
onComplete = self.createOnComplete(m['_id'])
|
||||
@@ -261,7 +256,7 @@ class MovieBase(MovieTypeBase):
|
||||
'success': False,
|
||||
}
|
||||
|
||||
def update(self, media_id = None, identifier = None, default_title = None, extended = False):
|
||||
def updateInfo(self, media_id = None, identifier = None, default_title = None, extended = False):
|
||||
"""
|
||||
Update movie information inside media['doc']['info']
|
||||
|
||||
@@ -274,10 +269,6 @@ class MovieBase(MovieTypeBase):
|
||||
if self.shuttingDown():
|
||||
return
|
||||
|
||||
lock_key = 'media.get.%s' % media_id if media_id else identifier
|
||||
self.acquireLock(lock_key)
|
||||
|
||||
media = {}
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
@@ -321,16 +312,42 @@ class MovieBase(MovieTypeBase):
|
||||
media['title'] = def_title
|
||||
|
||||
# Files
|
||||
image_urls = info.get('images', [])
|
||||
images = info.get('images', [])
|
||||
media['files'] = media.get('files', {})
|
||||
for image_type in ['poster']:
|
||||
|
||||
self.getPoster(media, image_urls)
|
||||
# Remove non-existing files
|
||||
file_type = 'image_%s' % image_type
|
||||
existing_files = list(set(media['files'].get(file_type, [])))
|
||||
for ef in media['files'].get(file_type, []):
|
||||
if not os.path.isfile(ef):
|
||||
existing_files.remove(ef)
|
||||
|
||||
# Replace new files list
|
||||
media['files'][file_type] = existing_files
|
||||
if len(existing_files) == 0:
|
||||
del media['files'][file_type]
|
||||
|
||||
# Loop over type
|
||||
for image in images.get(image_type, []):
|
||||
if not isinstance(image, (str, unicode)):
|
||||
continue
|
||||
|
||||
if file_type not in media['files'] or len(media['files'].get(file_type, [])) == 0:
|
||||
file_path = fireEvent('file.download', url = image, single = True)
|
||||
if file_path:
|
||||
media['files'][file_type] = [file_path]
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
db.update(media)
|
||||
|
||||
return media
|
||||
except:
|
||||
log.error('Failed update media: %s', traceback.format_exc())
|
||||
|
||||
self.releaseLock(lock_key)
|
||||
return media
|
||||
return {}
|
||||
|
||||
def updateReleaseDate(self, media_id):
|
||||
"""
|
||||
@@ -346,7 +363,7 @@ class MovieBase(MovieTypeBase):
|
||||
media = db.get('id', media_id)
|
||||
|
||||
if not media.get('info'):
|
||||
media = self.update(media_id)
|
||||
media = self.updateInfo(media_id)
|
||||
dates = media.get('info', {}).get('release_date')
|
||||
else:
|
||||
dates = media.get('info').get('release_date')
|
||||
|
||||
@@ -115,15 +115,8 @@ MA.Release = new Class({
|
||||
|
||||
self.releases = null;
|
||||
if(self.options_container){
|
||||
// Releases are currently displayed
|
||||
if(self.options_container.isDisplayed()){
|
||||
self.options_container.destroy();
|
||||
self.createReleases();
|
||||
}
|
||||
else {
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -138,10 +131,10 @@ MA.Release = new Class({
|
||||
|
||||
},
|
||||
|
||||
createReleases: function(refresh){
|
||||
createReleases: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.options_container || refresh){
|
||||
if(!self.options_container){
|
||||
self.options_container = new Element('div.options').grab(
|
||||
self.release_container = new Element('div.releases.table')
|
||||
);
|
||||
|
||||
@@ -54,21 +54,13 @@ var Movie = new Class({
|
||||
// Reload when releases have updated
|
||||
self.global_events['release.update_status'] = function(notification){
|
||||
var data = notification.data;
|
||||
if(data && self.data._id == data.media_id){
|
||||
if(data && self.data._id == data.movie_id){
|
||||
|
||||
if(!self.data.releases)
|
||||
self.data.releases = [];
|
||||
|
||||
var updated = false;
|
||||
self.data.releases.each(function(release){
|
||||
if(release._id == data._id){
|
||||
release['status'] = data.status;
|
||||
updated = true;
|
||||
}
|
||||
});
|
||||
|
||||
if(updated)
|
||||
self.updateReleases();
|
||||
self.data.releases.push({'quality': data.quality, 'status': data.status});
|
||||
self.updateReleases();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -167,7 +159,7 @@ var Movie = new Class({
|
||||
}
|
||||
}
|
||||
}),
|
||||
self.thumbnail = (self.data.files && self.data.files.image_poster && self.data.files.image_poster.length > 0) ? new Element('img', {
|
||||
self.thumbnail = (self.data.files && self.data.files.image_poster) ? new Element('img', {
|
||||
'class': 'type_image poster',
|
||||
'src': Api.createUrl('file.cache') + self.data.files.image_poster[0].split(Api.getOption('path_sep')).pop()
|
||||
}): null,
|
||||
|
||||
@@ -21,6 +21,13 @@ config = [{
|
||||
'type': 'int',
|
||||
'description': 'Maximum number of items displayed from each chart.',
|
||||
},
|
||||
{
|
||||
'name': 'update_interval',
|
||||
'default': 12,
|
||||
'type': 'int',
|
||||
'advanced': True,
|
||||
'description': '(hours)',
|
||||
},
|
||||
{
|
||||
'name': 'hide_wanted',
|
||||
'default': False,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import time
|
||||
|
||||
from couchpotato import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent,fireEvent
|
||||
@@ -12,14 +13,13 @@ log = CPLog(__name__)
|
||||
class Charts(Plugin):
|
||||
|
||||
update_in_progress = False
|
||||
update_interval = 72 # hours
|
||||
|
||||
def __init__(self):
|
||||
addApiView('charts.view', self.automationView)
|
||||
addEvent('app.load', self.setCrons)
|
||||
|
||||
def setCrons(self):
|
||||
fireEvent('schedule.interval', 'charts.update_cache', self.updateViewCache, hours = self.update_interval)
|
||||
fireEvent('schedule.interval', 'charts.update_cache', self.updateViewCache, hours = self.conf('update_interval', default = 12))
|
||||
|
||||
def automationView(self, force_update = False, **kwargs):
|
||||
|
||||
@@ -52,7 +52,7 @@ class Charts(Plugin):
|
||||
for chart in charts:
|
||||
chart['hide_wanted'] = self.conf('hide_wanted')
|
||||
chart['hide_library'] = self.conf('hide_library')
|
||||
self.setCache('charts_cached', charts, timeout = self.update_interval * 3600)
|
||||
self.setCache('charts_cached', charts, timeout = 7200 * tryInt(self.conf('update_interval', default = 12)))
|
||||
except:
|
||||
log.error('Failed refreshing charts')
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@ var Charts = new Class({
|
||||
|
||||
Implements: [Options, Events],
|
||||
|
||||
shown_once: false,
|
||||
|
||||
initialize: function(options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
@@ -42,13 +40,17 @@ var Charts = new Class({
|
||||
)
|
||||
);
|
||||
|
||||
if( Cookie.read('suggestions_charts_menu_selected') === 'charts'){
|
||||
self.show();
|
||||
self.fireEvent.delay(0, self, 'created');
|
||||
}
|
||||
if( Cookie.read('suggestions_charts_menu_selected') === 'charts')
|
||||
self.el.show();
|
||||
else
|
||||
self.el.hide();
|
||||
|
||||
self.api_request = Api.request('charts.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
self.fireEvent.delay(0, self, 'created');
|
||||
|
||||
},
|
||||
|
||||
fill: function(json){
|
||||
@@ -155,24 +157,6 @@ var Charts = new Class({
|
||||
|
||||
},
|
||||
|
||||
show: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.show();
|
||||
|
||||
if(!self.shown_once){
|
||||
self.api_request = Api.request('charts.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
self.shown_once = true;
|
||||
}
|
||||
},
|
||||
|
||||
hide: function(){
|
||||
this.el.hide();
|
||||
},
|
||||
|
||||
afterAdded: function(m){
|
||||
|
||||
$(m).getElement('div.chart_number')
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato import fireEvent
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
@@ -7,7 +5,6 @@ from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.automation.base import Automation
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Bluray'
|
||||
@@ -37,49 +34,27 @@ class Bluray(Automation, RSS):
|
||||
|
||||
try:
|
||||
# Stop if the release year is before the minimal year
|
||||
brk = False
|
||||
h3s = soup.body.find_all('h3')
|
||||
for h3 in h3s:
|
||||
if h3.parent.name != 'a':
|
||||
|
||||
try:
|
||||
page_year = tryInt(h3.get_text()[-4:])
|
||||
if page_year > 0 and page_year < self.getMinimal('year'):
|
||||
brk = True
|
||||
except:
|
||||
log.error('Failed determining page year: %s', traceback.format_exc())
|
||||
brk = True
|
||||
break
|
||||
|
||||
if brk:
|
||||
page_year = soup.body.find_all('center')[3].table.tr.find_all('td', recursive = False)[3].h3.get_text().split(', ')[1]
|
||||
if tryInt(page_year) < self.getMinimal('year'):
|
||||
break
|
||||
|
||||
for h3 in h3s:
|
||||
try:
|
||||
if h3.parent.name == 'a':
|
||||
name = h3.get_text().lower().split('blu-ray')[0].strip()
|
||||
for table in soup.body.find_all('center')[3].table.tr.find_all('td', recursive = False)[3].find_all('table')[1:20]:
|
||||
name = table.h3.get_text().lower().split('blu-ray')[0].strip()
|
||||
year = table.small.get_text().split('|')[1].strip()
|
||||
|
||||
if not name.find('/') == -1: # make sure it is not a double movie release
|
||||
continue
|
||||
if not name.find('/') == -1: # make sure it is not a double movie release
|
||||
continue
|
||||
|
||||
if not h3.parent.parent.small: # ignore non-movie tables
|
||||
continue
|
||||
if tryInt(year) < self.getMinimal('year'):
|
||||
continue
|
||||
|
||||
year = h3.parent.parent.small.get_text().split('|')[1].strip()
|
||||
imdb = self.search(name, year)
|
||||
|
||||
if tryInt(year) < self.getMinimal('year'):
|
||||
continue
|
||||
|
||||
imdb = self.search(name, year)
|
||||
|
||||
if imdb:
|
||||
if self.isMinimalMovie(imdb):
|
||||
movies.append(imdb['imdb'])
|
||||
except:
|
||||
log.debug('Error parsing movie html: %s', traceback.format_exc())
|
||||
break
|
||||
if imdb:
|
||||
if self.isMinimalMovie(imdb):
|
||||
movies.append(imdb['imdb'])
|
||||
except:
|
||||
log.debug('Error loading page %s: %s', (page, traceback.format_exc()))
|
||||
log.debug('Error loading page: %s', page)
|
||||
break
|
||||
|
||||
self.conf('backlog', value = False)
|
||||
@@ -159,7 +134,7 @@ config = [{
|
||||
{
|
||||
'name': 'backlog',
|
||||
'advanced': True,
|
||||
'description': ('Parses the history until the minimum movie year is reached. (Takes a while)', 'Will be disabled once it has completed'),
|
||||
'description': 'Parses the history until the minimum movie year is reached. (Will be disabled once it has completed)',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
},
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import copy
|
||||
import traceback
|
||||
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from couchpotato import get_db
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.database import RecordNotFound
|
||||
from couchpotato.core.helpers.variable import mergeDicts, randomString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
@@ -2,7 +2,7 @@ import base64
|
||||
import time
|
||||
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
from couchpotato.environment import Env
|
||||
@@ -66,7 +66,7 @@ class CouchPotatoApi(MovieProvider):
|
||||
if not name:
|
||||
return
|
||||
|
||||
name_enc = base64.b64encode(ss(name))
|
||||
name_enc = base64.b64encode(name)
|
||||
return self.getJsonData(self.urls['validate'] % name_enc, headers = self.getRequestHeaders())
|
||||
|
||||
def isMovie(self, identifier = None):
|
||||
|
||||
@@ -14,7 +14,7 @@ autoload = 'FanartTV'
|
||||
class FanartTV(MovieProvider):
|
||||
|
||||
urls = {
|
||||
'api': 'http://webservice.fanart.tv/v3/movies/%s?api_key=b28b14e9be662e027cfbc7c3dd600405'
|
||||
'api': 'http://api.fanart.tv/webservice/movie/b28b14e9be662e027cfbc7c3dd600405/%s/JSON/all/1/2'
|
||||
}
|
||||
|
||||
MAX_EXTRAFANART = 20
|
||||
@@ -23,9 +23,10 @@ class FanartTV(MovieProvider):
|
||||
def __init__(self):
|
||||
addEvent('movie.info', self.getArt, priority = 1)
|
||||
|
||||
def getArt(self, identifier = None, extended = True, **kwargs):
|
||||
def getArt(self, identifier = None, **kwargs):
|
||||
|
||||
if not identifier or not extended:
|
||||
log.debug("Getting Extra Artwork from Fanart.tv...")
|
||||
if not identifier:
|
||||
return {}
|
||||
|
||||
images = {}
|
||||
@@ -35,8 +36,9 @@ class FanartTV(MovieProvider):
|
||||
fanart_data = self.getJsonData(url)
|
||||
|
||||
if fanart_data:
|
||||
log.debug('Found images for %s', fanart_data.get('name'))
|
||||
images = self._parseMovie(fanart_data)
|
||||
name, resource = fanart_data.items()[0]
|
||||
log.debug('Found images for %s', name)
|
||||
images = self._parseMovie(resource)
|
||||
|
||||
except:
|
||||
log.error('Failed getting extra art for %s: %s',
|
||||
@@ -93,7 +95,7 @@ class FanartTV(MovieProvider):
|
||||
for image in images:
|
||||
if tryInt(image.get('likes')) > highscore:
|
||||
highscore = tryInt(image.get('likes'))
|
||||
image_url = image.get('url') or image.get('href')
|
||||
image_url = image.get('url')
|
||||
|
||||
return image_url
|
||||
|
||||
@@ -116,9 +118,7 @@ class FanartTV(MovieProvider):
|
||||
if tryInt(image.get('likes')) > highscore:
|
||||
highscore = tryInt(image.get('likes'))
|
||||
best = image
|
||||
url = best.get('url') or best.get('href')
|
||||
if url:
|
||||
image_urls.append(url)
|
||||
image_urls.append(best.get('url'))
|
||||
pool.remove(best)
|
||||
|
||||
return image_urls
|
||||
|
||||
@@ -7,7 +7,6 @@ from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -73,7 +72,7 @@ class OMDBAPI(MovieProvider):
|
||||
try:
|
||||
|
||||
try:
|
||||
if isinstance(movie, six.string_types):
|
||||
if isinstance(movie, (str, unicode)):
|
||||
movie = json.loads(movie)
|
||||
except ValueError:
|
||||
log.info('No proper json to decode')
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import traceback
|
||||
import time
|
||||
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode, ss, tryUrlencode
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode, ss
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
import tmdb3
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -13,24 +13,15 @@ autoload = 'TheMovieDb'
|
||||
|
||||
|
||||
class TheMovieDb(MovieProvider):
|
||||
|
||||
http_time_between_calls = .3
|
||||
|
||||
configuration = {
|
||||
'images': {
|
||||
'secure_base_url': 'https://image.tmdb.org/t/p/',
|
||||
},
|
||||
}
|
||||
MAX_EXTRATHUMBS = 4
|
||||
|
||||
def __init__(self):
|
||||
addEvent('movie.info', self.getInfo, priority = 3)
|
||||
addEvent('movie.info_by_tmdb', self.getInfo)
|
||||
addEvent('app.load', self.config)
|
||||
|
||||
def config(self):
|
||||
configuration = self.request('configuration')
|
||||
if configuration:
|
||||
self.configuration = configuration
|
||||
# Configure TMDB settings
|
||||
tmdb3.set_key(self.conf('api_key'))
|
||||
tmdb3.set_cache('null')
|
||||
|
||||
def search(self, q, limit = 12):
|
||||
""" Find movie by name """
|
||||
@@ -40,19 +31,14 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
search_string = simplifyString(q)
|
||||
cache_key = 'tmdb.cache.%s.%s' % (search_string, limit)
|
||||
results = None #self.getCache(cache_key)
|
||||
results = self.getCache(cache_key)
|
||||
|
||||
if not results:
|
||||
log.debug('Searching for movie: %s', q)
|
||||
|
||||
raw = None
|
||||
try:
|
||||
|
||||
#name_year = fireEvent('scanner.name_year', q, single = True)
|
||||
|
||||
raw = self.request('search/movie', {
|
||||
'query': q
|
||||
}, return_key = 'results')
|
||||
raw = tmdb3.searchMovie(search_string)
|
||||
except:
|
||||
log.error('Failed searching TMDB for "%s": %s', (search_string, traceback.format_exc()))
|
||||
|
||||
@@ -83,27 +69,39 @@ class TheMovieDb(MovieProvider):
|
||||
if not identifier:
|
||||
return {}
|
||||
|
||||
result = self.parseMovie({
|
||||
'id': identifier
|
||||
}, extended = extended)
|
||||
cache_key = 'tmdb.cache.%s%s' % (identifier, '.ex' if extended else '')
|
||||
result = self.getCache(cache_key)
|
||||
|
||||
if not result:
|
||||
try:
|
||||
log.debug('Getting info: %s', cache_key)
|
||||
# noinspection PyArgumentList
|
||||
movie = tmdb3.Movie(identifier)
|
||||
try: exists = movie.title is not None
|
||||
except: exists = False
|
||||
|
||||
if exists:
|
||||
result = self.parseMovie(movie, extended = extended)
|
||||
self.setCache(cache_key, result)
|
||||
else:
|
||||
result = {}
|
||||
except:
|
||||
log.error('Failed getting info for %s: %s', (identifier, traceback.format_exc()))
|
||||
|
||||
return result
|
||||
|
||||
def parseMovie(self, movie, extended = True):
|
||||
|
||||
cache_key = 'tmdb.cache.%s%s' % (movie.get('id'), '.ex' if extended else '')
|
||||
movie_data = None #self.getCache(cache_key)
|
||||
cache_key = 'tmdb.cache.%s%s' % (movie.id, '.ex' if extended else '')
|
||||
movie_data = self.getCache(cache_key)
|
||||
|
||||
if not movie_data:
|
||||
|
||||
# Full data
|
||||
movie = self.request('movie/%s' % movie.get('id'))
|
||||
|
||||
# Images
|
||||
poster = self.getImage(movie, type = 'poster', size = 'w154')
|
||||
poster_original = self.getImage(movie, type = 'poster', size = 'original')
|
||||
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
|
||||
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original')
|
||||
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original', n = self.MAX_EXTRATHUMBS, skipfirst = True)
|
||||
|
||||
images = {
|
||||
'poster': [poster] if poster else [],
|
||||
@@ -116,43 +114,39 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
# Genres
|
||||
try:
|
||||
genres = [genre.get('name') for genre in movie.get('genres', [])]
|
||||
genres = [genre.name for genre in movie.genres]
|
||||
except:
|
||||
genres = []
|
||||
|
||||
# 1900 is the same as None
|
||||
year = str(movie.get('release_date') or '')[:4]
|
||||
if not movie.get('release_date') or year == '1900' or year.lower() == 'none':
|
||||
year = str(movie.releasedate or '')[:4]
|
||||
if not movie.releasedate or year == '1900' or year.lower() == 'none':
|
||||
year = None
|
||||
|
||||
# Gather actors data
|
||||
actors = {}
|
||||
if extended:
|
||||
|
||||
# Full data
|
||||
cast = self.request('movie/%s/casts' % movie.get('id'), return_key = 'cast')
|
||||
|
||||
for cast_item in cast:
|
||||
for cast_item in movie.cast:
|
||||
try:
|
||||
actors[toUnicode(cast_item.get('name'))] = toUnicode(cast_item.get('character'))
|
||||
images['actors'][toUnicode(cast_item.get('name'))] = self.getImage(cast_item, type = 'profile', size = 'original')
|
||||
actors[toUnicode(cast_item.name)] = toUnicode(cast_item.character)
|
||||
images['actors'][toUnicode(cast_item.name)] = self.getImage(cast_item, type = 'profile', size = 'original')
|
||||
except:
|
||||
log.debug('Error getting cast info for %s: %s', (cast_item, traceback.format_exc()))
|
||||
|
||||
movie_data = {
|
||||
'type': 'movie',
|
||||
'via_tmdb': True,
|
||||
'tmdb_id': movie.get('id'),
|
||||
'titles': [toUnicode(movie.get('title'))],
|
||||
'original_title': movie.get('original_title'),
|
||||
'tmdb_id': movie.id,
|
||||
'titles': [toUnicode(movie.title)],
|
||||
'original_title': movie.originaltitle,
|
||||
'images': images,
|
||||
'imdb': movie.get('imdb_id'),
|
||||
'runtime': movie.get('runtime'),
|
||||
'released': str(movie.get('release_date')),
|
||||
'imdb': movie.imdb,
|
||||
'runtime': movie.runtime,
|
||||
'released': str(movie.releasedate),
|
||||
'year': tryInt(year, None),
|
||||
'plot': movie.get('overview'),
|
||||
'plot': movie.overview,
|
||||
'genres': genres,
|
||||
'collection': getattr(movie.get('belongs_to_collection'), 'name', None),
|
||||
'collection': getattr(movie.collection, 'name', None),
|
||||
'actor_roles': actors
|
||||
}
|
||||
|
||||
@@ -163,12 +157,8 @@ class TheMovieDb(MovieProvider):
|
||||
movie_data['titles'].append(movie_data['original_title'])
|
||||
|
||||
if extended:
|
||||
|
||||
# Full data
|
||||
alternate_titles = self.request('movie/%s/alternative_titles' % movie.get('id'), return_key = 'titles')
|
||||
|
||||
for alt in alternate_titles:
|
||||
alt_name = alt.get('title')
|
||||
for alt in movie.alternate_titles:
|
||||
alt_name = alt.title
|
||||
if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None:
|
||||
movie_data['titles'].append(alt_name)
|
||||
|
||||
@@ -181,38 +171,36 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
image_url = ''
|
||||
try:
|
||||
path = movie.get('%s_path' % type)
|
||||
image_url = '%s%s%s' % (self.configuration['images']['secure_base_url'], size, path)
|
||||
image_url = getattr(movie, type).geturl(size = size)
|
||||
except:
|
||||
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
|
||||
|
||||
return image_url
|
||||
|
||||
def getMultImages(self, movie, type = 'backdrops', size = 'original'):
|
||||
def getMultImages(self, movie, type = 'backdrops', size = 'original', n = -1, skipfirst = False):
|
||||
"""
|
||||
If n < 0, return all images. Otherwise return n images.
|
||||
If n > len(getattr(movie, type)), then return all images.
|
||||
If skipfirst is True, then it will skip getattr(movie, type)[0]. This
|
||||
is because backdrops[0] is typically backdrop.
|
||||
"""
|
||||
|
||||
image_urls = []
|
||||
try:
|
||||
images = getattr(movie, type)
|
||||
if n < 0 or n > len(images):
|
||||
num_images = len(images)
|
||||
else:
|
||||
num_images = n
|
||||
|
||||
# Full data
|
||||
images = self.request('movie/%s/images' % movie.get('id'), return_key = type)
|
||||
for image in images[1:5]:
|
||||
image_urls.append(self.getImage(image, 'file', size))
|
||||
for i in range(int(skipfirst), num_images + int(skipfirst)):
|
||||
image_urls.append(images[i].geturl(size = size))
|
||||
|
||||
except:
|
||||
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
|
||||
log.debug('Failed getting %i %s.%s for "%s"', (n, type, size, ss(str(movie))))
|
||||
|
||||
return image_urls
|
||||
|
||||
def request(self, call = '', params = {}, return_key = None):
|
||||
params = tryUrlencode(params)
|
||||
url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
|
||||
data = self.getJsonData(url, cache_timeout = 0)
|
||||
|
||||
if data and return_key and data.get(return_key):
|
||||
data = data.get(return_key)
|
||||
|
||||
return data
|
||||
|
||||
def isDisabled(self):
|
||||
if self.conf('api_key') == '':
|
||||
log.error('No API key provided.')
|
||||
|
||||
2
couchpotato/core/media/movie/providers/metadata/base.py
Executable file → Normal file
2
couchpotato/core/media/movie/providers/metadata/base.py
Executable file → Normal file
@@ -28,7 +28,7 @@ class MovieMetaData(MetaDataBase):
|
||||
|
||||
# Update library to get latest info
|
||||
try:
|
||||
group['media'] = fireEvent('movie.update', group['media'].get('_id'), identifier = getIdentifier(group['media']), extended = True, single = True)
|
||||
group['media'] = fireEvent('movie.update_info', group['media'].get('_id'), identifier = getIdentifier(group['media']), extended = True, single = True)
|
||||
except:
|
||||
log.error('Failed to update movie, before creating metadata: %s', traceback.format_exc())
|
||||
|
||||
|
||||
30
couchpotato/core/media/movie/providers/nzb/nzbindex.py
Normal file
30
couchpotato/core/media/movie/providers/nzb/nzbindex.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.media._base.providers.nzb.nzbindex import Base
|
||||
from couchpotato.core.media.movie.providers.base import MovieProvider
|
||||
from couchpotato.environment import Env
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NzbIndex'
|
||||
|
||||
|
||||
class NzbIndex(MovieProvider, Base):
|
||||
|
||||
def buildUrl(self, media, quality):
|
||||
title = fireEvent('library.query', media, include_year = False, single = True)
|
||||
year = media['info']['year']
|
||||
|
||||
query = tryUrlencode({
|
||||
'q': '"%s %s" | "%s (%s)"' % (title, year, title, year),
|
||||
'age': Env.setting('retention', 'nzb'),
|
||||
'sort': 'agedesc',
|
||||
'minsize': quality.get('size_min'),
|
||||
'maxsize': quality.get('size_max'),
|
||||
'rating': 1,
|
||||
'max': 250,
|
||||
'more': 1,
|
||||
'complete': 1,
|
||||
})
|
||||
return query
|
||||
@@ -13,7 +13,7 @@ class IPTorrents(MovieProvider, Base):
|
||||
([87], ['3d']),
|
||||
([48], ['720p', '1080p', 'bd50']),
|
||||
([72], ['cam', 'ts', 'tc', 'r5', 'scr']),
|
||||
([7,48], ['dvdrip', 'brrip']),
|
||||
([7], ['dvdrip', 'brrip']),
|
||||
([6], ['dvdr']),
|
||||
]
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ class PassThePopcorn(MovieProvider, Base):
|
||||
'bd50': {'media': 'Blu-ray', 'format': 'BD50'},
|
||||
'1080p': {'resolution': '1080p'},
|
||||
'720p': {'resolution': '720p'},
|
||||
'brrip': {'resolution': 'anyhd'},
|
||||
'brrip': {'media': 'Blu-ray'},
|
||||
'dvdr': {'resolution': 'anysd'},
|
||||
'dvdrip': {'media': 'DVD'},
|
||||
'scr': {'media': 'DVD-Screener'},
|
||||
@@ -27,7 +27,7 @@ class PassThePopcorn(MovieProvider, Base):
|
||||
'bd50': {'Codec': ['BD50']},
|
||||
'1080p': {'Resolution': ['1080p']},
|
||||
'720p': {'Resolution': ['720p']},
|
||||
'brrip': {'Quality': ['High Definition'], 'Container': ['!ISO']},
|
||||
'brrip': {'Source': ['Blu-ray'], 'Quality': ['High Definition'], 'Container': ['!ISO']},
|
||||
'dvdr': {'Codec': ['DVD5', 'DVD9']},
|
||||
'dvdrip': {'Source': ['DVD'], 'Codec': ['!DVD5', '!DVD9']},
|
||||
'scr': {'Source': ['DVD-Screener']},
|
||||
|
||||
@@ -11,7 +11,7 @@ autoload = 'TorrentLeech'
|
||||
class TorrentLeech(MovieProvider, Base):
|
||||
|
||||
cat_ids = [
|
||||
([13], ['720p', '1080p', 'bd50']),
|
||||
([13], ['720p', '1080p']),
|
||||
([8], ['cam']),
|
||||
([9], ['ts', 'tc']),
|
||||
([10], ['r5', 'scr']),
|
||||
|
||||
@@ -3,7 +3,7 @@ import re
|
||||
|
||||
from bs4 import SoupStrainer, BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getTitle, getIdentifier
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie.providers.trailer.base import TrailerProvider
|
||||
from requests import HTTPError
|
||||
@@ -29,7 +29,7 @@ class HDTrailers(TrailerProvider):
|
||||
|
||||
url = self.urls['api'] % self.movieUrlName(movie_name)
|
||||
try:
|
||||
data = self.getCache('hdtrailers.%s' % getIdentifier(group), url, show_error = False)
|
||||
data = self.getCache('hdtrailers.%s' % group['identifier'], url, show_error = False)
|
||||
except HTTPError:
|
||||
log.debug('No page found for: %s', movie_name)
|
||||
data = None
|
||||
@@ -59,7 +59,7 @@ class HDTrailers(TrailerProvider):
|
||||
|
||||
url = "%s?%s" % (self.urls['backup'], tryUrlencode({'s':movie_name}))
|
||||
try:
|
||||
data = self.getCache('hdtrailers.alt.%s' % getIdentifier(group), url, show_error = False)
|
||||
data = self.getCache('hdtrailers.alt.%s' % group['identifier'], url, show_error = False)
|
||||
except HTTPError:
|
||||
log.debug('No alternative page found for: %s', movie_name)
|
||||
data = None
|
||||
@@ -68,7 +68,7 @@ class HDTrailers(TrailerProvider):
|
||||
return results
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(data, parse_only = self.only_tables_tags)
|
||||
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||
result_table = html.find_all('h2', text = re.compile(movie_name))
|
||||
|
||||
for h2 in result_table:
|
||||
@@ -90,7 +90,7 @@ class HDTrailers(TrailerProvider):
|
||||
|
||||
results = {'480p':[], '720p':[], '1080p':[]}
|
||||
try:
|
||||
html = BeautifulSoup(data, parse_only = self.only_tables_tags)
|
||||
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||
result_table = html.find('table', attrs = {'class':'bottomTable'})
|
||||
|
||||
for tr in result_table.find_all('tr'):
|
||||
|
||||
@@ -25,6 +25,6 @@ class Filmstarts(UserscriptBase):
|
||||
name = html.find("meta", {"property":"og:title"})['content']
|
||||
|
||||
# Year of production is not available in the meta data, so get it from the table
|
||||
year = table.find(text="Produktionsjahr").parent.parent.next_sibling.text
|
||||
year = table.find("tr", text="Produktionsjahr").parent.parent.parent.td.text
|
||||
|
||||
return self.search(name, year)
|
||||
return self.search(name, year)
|
||||
36
couchpotato/core/media/movie/searcher.py
Executable file → Normal file
36
couchpotato/core/media/movie/searcher.py
Executable file → Normal file
@@ -74,7 +74,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
self.in_progress = True
|
||||
fireEvent('notify.frontend', type = 'movie.searcher.started', data = True, message = 'Full search started')
|
||||
|
||||
medias = [x['_id'] for x in fireEvent('media.with_status', 'active', types = 'movie', with_doc = False, single = True)]
|
||||
medias = [x['_id'] for x in fireEvent('media.with_status', 'active', with_doc = False, single = True)]
|
||||
random.shuffle(medias)
|
||||
|
||||
total = len(medias)
|
||||
@@ -89,13 +89,12 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
for media_id in medias:
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if not media: continue
|
||||
|
||||
try:
|
||||
self.single(media, search_protocols, manual = manual)
|
||||
except IndexError:
|
||||
log.error('Forcing library update for %s, if you see this often, please report: %s', (getIdentifier(media), traceback.format_exc()))
|
||||
fireEvent('movie.update', media_id)
|
||||
fireEvent('movie.update_info', media_id)
|
||||
except:
|
||||
log.error('Search failed for %s: %s', (getIdentifier(media), traceback.format_exc()))
|
||||
|
||||
@@ -141,17 +140,17 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
previous_releases = movie.get('releases', [])
|
||||
too_early_to_search = []
|
||||
outside_eta_results = 0
|
||||
always_search = self.conf('always_search')
|
||||
alway_search = self.conf('always_search')
|
||||
ignore_eta = manual
|
||||
total_result_count = 0
|
||||
|
||||
fireEvent('notify.frontend', type = 'movie.searcher.started', data = {'_id': movie['_id']}, message = 'Searching for "%s"' % default_title)
|
||||
|
||||
# Ignore eta once every 7 days
|
||||
if not always_search:
|
||||
if not alway_search:
|
||||
prop_name = 'last_ignored_eta.%s' % movie['_id']
|
||||
last_ignored_eta = float(Env.prop(prop_name, default = 0))
|
||||
if last_ignored_eta < time.time() - 604800:
|
||||
if last_ignored_eta > time.time() - 604800:
|
||||
ignore_eta = True
|
||||
Env.prop(prop_name, value = time.time())
|
||||
|
||||
@@ -170,7 +169,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
}
|
||||
|
||||
could_not_be_released = not self.couldBeReleased(q_identifier in pre_releases, release_dates, movie['info']['year'])
|
||||
if not always_search and could_not_be_released:
|
||||
if not alway_search and could_not_be_released:
|
||||
too_early_to_search.append(q_identifier)
|
||||
|
||||
# Skip release, if ETA isn't ignored
|
||||
@@ -196,7 +195,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
break
|
||||
|
||||
quality = fireEvent('quality.single', identifier = q_identifier, single = True)
|
||||
log.info('Search for %s in %s%s', (default_title, quality['label'], ' ignoring ETA' if always_search or ignore_eta else ''))
|
||||
log.info('Search for %s in %s%s', (default_title, quality['label'], ' ignoring ETA' if alway_search or ignore_eta else ''))
|
||||
|
||||
# Extend quality with profile customs
|
||||
quality['custom'] = quality_custom
|
||||
@@ -223,7 +222,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
|
||||
|
||||
# Try find a valid result and download it
|
||||
if (force_download or not could_not_be_released or always_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True):
|
||||
if (force_download or not could_not_be_released or alway_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True):
|
||||
ret = True
|
||||
|
||||
# Remove releases that aren't found anymore
|
||||
@@ -241,7 +240,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
break
|
||||
|
||||
if total_result_count > 0:
|
||||
fireEvent('media.tag', movie['_id'], 'recent', update_edited = True, single = True)
|
||||
fireEvent('media.tag', movie['_id'], 'recent', single = True)
|
||||
|
||||
if len(too_early_to_search) > 0:
|
||||
log.info2('Too early to search for %s, %s', (too_early_to_search, default_title))
|
||||
@@ -278,7 +277,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
|
||||
# Contains lower quality string
|
||||
contains_other = fireEvent('searcher.contains_other_quality', nzb, movie_year = media['info']['year'], preferred_quality = preferred_quality, single = True)
|
||||
if contains_other and isinstance(contains_other, dict):
|
||||
if contains_other != False:
|
||||
log.info2('Wrong: %s, looking for %s, found %s', (nzb['name'], quality['label'], [x for x in contains_other] if contains_other else 'no quality'))
|
||||
return False
|
||||
|
||||
@@ -382,17 +381,16 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
|
||||
def tryNextRelease(self, media_id, manual = False, force_download = False):
|
||||
|
||||
try:
|
||||
|
||||
rels = fireEvent('release.for_media', media_id, single = True)
|
||||
db = get_db()
|
||||
rels = fireEvent('media.with_status', ['snatched', 'done'], single = True)
|
||||
|
||||
for rel in rels:
|
||||
if rel.get('status') in ['snatched', 'done']:
|
||||
fireEvent('release.update_status', rel.get('_id'), status = 'ignored')
|
||||
rel['status'] = 'ignored'
|
||||
db.update(rel)
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
log.info('Trying next release for: %s', getTitle(media))
|
||||
self.single(media, manual = manual, force_download = force_download)
|
||||
movie_dict = fireEvent('media.get', media_id, single = True)
|
||||
log.info('Trying next release for: %s', getTitle(movie_dict))
|
||||
self.single(movie_dict, manual = manual, force_download = force_download)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
2
couchpotato/core/media/movie/suggestion/main.py
Executable file → Normal file
2
couchpotato/core/media/movie/suggestion/main.py
Executable file → Normal file
@@ -27,7 +27,7 @@ class Suggestion(Plugin):
|
||||
else:
|
||||
|
||||
if not movies or len(movies) == 0:
|
||||
active_movies = fireEvent('media.with_status', ['active', 'done'], types = 'movie', single = True)
|
||||
active_movies = fireEvent('media.with_status', ['active', 'done'], single = True)
|
||||
movies = [getIdentifier(x) for x in active_movies]
|
||||
|
||||
if not ignored or len(ignored) == 0:
|
||||
|
||||
@@ -2,8 +2,6 @@ var SuggestList = new Class({
|
||||
|
||||
Implements: [Options, Events],
|
||||
|
||||
shown_once: false,
|
||||
|
||||
initialize: function(options){
|
||||
var self = this;
|
||||
self.setOptions(options);
|
||||
@@ -46,13 +44,12 @@ var SuggestList = new Class({
|
||||
}
|
||||
});
|
||||
|
||||
var cookie_menu_select = Cookie.read('suggestions_charts_menu_selected') || 'suggestions';
|
||||
if( cookie_menu_select === 'suggestions')
|
||||
self.show();
|
||||
else
|
||||
self.hide();
|
||||
var cookie_menu_select = Cookie.read('suggestions_charts_menu_selected');
|
||||
if( cookie_menu_select === 'suggestions' || cookie_menu_select === null ) self.el.show(); else self.el.hide();
|
||||
|
||||
self.fireEvent('created');
|
||||
self.api_request = Api.request('suggestion.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
@@ -148,24 +145,6 @@ var SuggestList = new Class({
|
||||
|
||||
},
|
||||
|
||||
show: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.show();
|
||||
|
||||
if(!self.shown_once){
|
||||
self.api_request = Api.request('suggestion.view', {
|
||||
'onComplete': self.fill.bind(self)
|
||||
});
|
||||
|
||||
self.shown_once = true;
|
||||
}
|
||||
},
|
||||
|
||||
hide: function(){
|
||||
this.el.hide();
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
return this.el;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
from CodernityDB.tree_index import TreeBasedIndex
|
||||
|
||||
|
||||
class NotificationIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||
import time"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -22,7 +22,7 @@ import time"""
|
||||
class NotificationUnreadIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
|
||||
import time"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
||||
@@ -7,7 +7,6 @@ import uuid
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView, addNonBlockApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.database import RecordDeleted
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
@@ -67,9 +66,7 @@ class CoreNotifier(Notification):
|
||||
fireEvent('schedule.interval', 'core.clean_messages', self.cleanMessages, seconds = 15, single = True)
|
||||
|
||||
addEvent('app.load', self.clean)
|
||||
|
||||
if not Env.get('dev'):
|
||||
addEvent('app.load', self.checkMessages)
|
||||
addEvent('app.load', self.checkMessages)
|
||||
|
||||
self.messages = []
|
||||
self.listeners = []
|
||||
@@ -156,14 +153,9 @@ class CoreNotifier(Notification):
|
||||
n = {
|
||||
'_t': 'notification',
|
||||
'time': int(time.time()),
|
||||
'message': toUnicode(message)
|
||||
'message': toUnicode(message),
|
||||
'data': data
|
||||
}
|
||||
|
||||
if data.get('sticky'):
|
||||
n['sticky'] = True
|
||||
if data.get('important'):
|
||||
n['important'] = True
|
||||
|
||||
db.insert(n)
|
||||
|
||||
self.frontend(type = listener, data = n)
|
||||
@@ -271,16 +263,11 @@ class CoreNotifier(Notification):
|
||||
if init:
|
||||
db = get_db()
|
||||
|
||||
notifications = db.all('notification')
|
||||
notifications = db.all('notification', with_doc = True)
|
||||
|
||||
for n in notifications:
|
||||
|
||||
try:
|
||||
doc = db.get('id', n.get('_id'))
|
||||
if doc.get('time') > (time.time() - 604800):
|
||||
messages.append(doc)
|
||||
except RecordDeleted:
|
||||
pass
|
||||
if n['doc'].get('time') > (time.time() - 604800):
|
||||
messages.append(n['doc'])
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
|
||||
@@ -50,7 +50,7 @@ var NotificationBase = new Class({
|
||||
, 'top');
|
||||
self.notifications.include(result);
|
||||
|
||||
if((result.important !== undefined || result.sticky !== undefined) && !result.read){
|
||||
if((result.data.important !== undefined || result.data.sticky !== undefined) && !result.read){
|
||||
var sticky = true;
|
||||
App.trigger('message', [result.message, sticky, result])
|
||||
}
|
||||
@@ -72,7 +72,7 @@ var NotificationBase = new Class({
|
||||
|
||||
if(!force_ids) {
|
||||
var rn = self.notifications.filter(function(n){
|
||||
return !n.read && n.important === undefined
|
||||
return !n.read && n.data.important === undefined
|
||||
});
|
||||
|
||||
var ids = [];
|
||||
|
||||
@@ -42,7 +42,7 @@ class Email(Notification):
|
||||
# Open the SMTP connection, via SSL if requested
|
||||
log.debug("Connecting to host %s on port %s" % (smtp_server, smtp_port))
|
||||
log.debug("SMTP over SSL %s", ("enabled" if ssl == 1 else "disabled"))
|
||||
mailserver = smtplib.SMTP_SSL(smtp_server, smtp_port) if ssl == 1 else smtplib.SMTP(smtp_server, smtp_port)
|
||||
mailserver = smtplib.SMTP_SSL(smtp_server) if ssl == 1 else smtplib.SMTP(smtp_server)
|
||||
|
||||
if starttls:
|
||||
log.debug("Using StartTLS to initiate the connection with the SMTP server")
|
||||
|
||||
@@ -34,9 +34,9 @@ class Growl(Notification):
|
||||
|
||||
self.growl = notifier.GrowlNotifier(
|
||||
applicationName = Env.get('appname'),
|
||||
notifications = ['Updates'],
|
||||
defaultNotifications = ['Updates'],
|
||||
applicationIcon = self.getNotificationImage('medium'),
|
||||
notifications = ["Updates"],
|
||||
defaultNotifications = ["Updates"],
|
||||
applicationIcon = '%s/static/images/couch.png' % fireEvent('app.api_url', single = True),
|
||||
hostname = hostname if hostname else 'localhost',
|
||||
password = password if password else None,
|
||||
port = port if port else 23053
|
||||
@@ -56,7 +56,7 @@ class Growl(Notification):
|
||||
|
||||
try:
|
||||
self.growl.notify(
|
||||
noteType = 'Updates',
|
||||
noteType = "Updates",
|
||||
title = self.default_title,
|
||||
description = message,
|
||||
sticky = False,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import timedelta, datetime
|
||||
from six.moves import urllib
|
||||
from urlparse import urlparse
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
@@ -106,7 +106,7 @@ class PlexServer(object):
|
||||
def createHost(self, host, port = None):
|
||||
|
||||
h = cleanHost(host)
|
||||
p = urllib.urlparse(h)
|
||||
p = urlparse(h)
|
||||
h = h.rstrip('/')
|
||||
|
||||
if port and not p.port:
|
||||
|
||||
@@ -84,8 +84,7 @@ config = [{
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Access Token',
|
||||
'description': 'Can be found on <a href="https://www.pushbullet.com/account" target="_blank">Account Settings</a>',
|
||||
'label': 'User API Key'
|
||||
},
|
||||
{
|
||||
'name': 'devices',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from httplib import HTTPSConnection
|
||||
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.helpers.variable import getTitle, getIdentifier
|
||||
from couchpotato.core.helpers.variable import getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.notifications.base import Notification
|
||||
|
||||
@@ -27,9 +27,9 @@ class Pushover(Notification):
|
||||
'sound': self.conf('sound'),
|
||||
}
|
||||
|
||||
if data and getIdentifier(data):
|
||||
if data and data.get('identifier'):
|
||||
api_data.update({
|
||||
'url': toUnicode('http://www.imdb.com/title/%s/' % getIdentifier(data)),
|
||||
'url': toUnicode('http://www.imdb.com/title/%s/' % data['identifier']),
|
||||
'url_title': toUnicode('%s on IMDb' % getTitle(data)),
|
||||
})
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from couchpotato.core.helpers.variable import getTitle, getIdentifier
|
||||
from couchpotato.core.helpers.variable import getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.notifications.base import Notification
|
||||
|
||||
@@ -16,8 +16,7 @@ class Trakt(Notification):
|
||||
'test': 'account/test/%s',
|
||||
}
|
||||
|
||||
listen_to = ['movie.snatched']
|
||||
enabled_option = 'notification_enabled'
|
||||
listen_to = ['movie.downloaded']
|
||||
|
||||
def notify(self, message = '', data = None, listener = None):
|
||||
if not data: data = {}
|
||||
@@ -39,7 +38,7 @@ class Trakt(Notification):
|
||||
'username': self.conf('automation_username'),
|
||||
'password': self.conf('automation_password'),
|
||||
'movies': [{
|
||||
'imdb_id': getIdentifier(data),
|
||||
'imdb_id': data['identifier'],
|
||||
'title': getTitle(data),
|
||||
'year': data['info']['year']
|
||||
}] if data else []
|
||||
|
||||
@@ -1,16 +1,8 @@
|
||||
from six import PY3
|
||||
from .main import Twitter
|
||||
|
||||
try:
|
||||
from .main import Twitter
|
||||
|
||||
def autoload():
|
||||
return Twitter()
|
||||
except:
|
||||
if PY3:
|
||||
from couchpotato.core.helpers.py3 import NotSupported
|
||||
raise NotSupported
|
||||
else:
|
||||
raise
|
||||
def autoload():
|
||||
return Twitter()
|
||||
|
||||
config = [{
|
||||
'name': 'twitter',
|
||||
|
||||
@@ -7,8 +7,8 @@ import urllib
|
||||
from couchpotato.core.helpers.variable import splitString, getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.notifications.base import Notification
|
||||
from requests.exceptions import ConnectionError, Timeout
|
||||
from requests.packages.urllib3.exceptions import MaxRetryError
|
||||
import requests
|
||||
from requests.packages.urllib3.exceptions import MaxRetryError, ConnectionError
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -172,7 +172,7 @@ class XBMC(Notification):
|
||||
# manually fake expected response array
|
||||
return [{'result': 'Error'}]
|
||||
|
||||
except (MaxRetryError, Timeout, ConnectionError):
|
||||
except (MaxRetryError, requests.exceptions.Timeout, ConnectionError):
|
||||
log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off')
|
||||
return [{'result': 'Error'}]
|
||||
except:
|
||||
@@ -208,7 +208,7 @@ class XBMC(Notification):
|
||||
log.debug('Returned from request %s: %s', (host, response))
|
||||
|
||||
return response
|
||||
except (MaxRetryError, Timeout, ConnectionError):
|
||||
except (MaxRetryError, requests.exceptions.Timeout):
|
||||
log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off')
|
||||
return []
|
||||
except:
|
||||
|
||||
@@ -3,20 +3,12 @@ import traceback
|
||||
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.notifications.base import Notification
|
||||
from six import PY3
|
||||
import xmpp
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
try:
|
||||
import xmpp
|
||||
autoload = 'Xmpp'
|
||||
except:
|
||||
if PY3:
|
||||
from couchpotato.core.helpers.py3 import NotSupported
|
||||
raise NotSupported
|
||||
else:
|
||||
raise
|
||||
autoload = 'Xmpp'
|
||||
|
||||
|
||||
class Xmpp(Notification):
|
||||
|
||||
@@ -46,8 +46,7 @@ class Automation(Plugin):
|
||||
break
|
||||
|
||||
movie_dict = fireEvent('media.get', movie_id, single = True)
|
||||
if movie_dict:
|
||||
fireEvent('movie.searcher.single', movie_dict)
|
||||
fireEvent('movie.searcher.single', movie_dict)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import threading
|
||||
from six.moves import urllib
|
||||
from urllib import quote
|
||||
from urlparse import urlparse
|
||||
import glob
|
||||
import inspect
|
||||
import os.path
|
||||
@@ -10,8 +10,7 @@ import traceback
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.encoding import ss, toSafeString, \
|
||||
toUnicode, sp
|
||||
from couchpotato.core.helpers.variable import getExt, md5, isLocalIP, scanForPassword, tryInt, getIdentifier, \
|
||||
randomString
|
||||
from couchpotato.core.helpers.variable import getExt, md5, isLocalIP, scanForPassword, tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
import requests
|
||||
@@ -36,8 +35,6 @@ class Plugin(object):
|
||||
_needs_shutdown = False
|
||||
_running = None
|
||||
|
||||
_locks = {}
|
||||
|
||||
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20130519 Firefox/24.0'
|
||||
http_last_use = {}
|
||||
http_time_between_calls = 0
|
||||
@@ -121,31 +118,15 @@ class Plugin(object):
|
||||
if os.path.exists(path):
|
||||
log.debug('%s already exists, overwriting file with new version', path)
|
||||
|
||||
write_type = 'w+' if not binary else 'w+b'
|
||||
|
||||
# Stream file using response object
|
||||
if isinstance(content, requests.models.Response):
|
||||
|
||||
# Write file to temp
|
||||
with open('%s.tmp' % path, write_type) as f:
|
||||
for chunk in content.iter_content(chunk_size = 1048576):
|
||||
if chunk: # filter out keep-alive new chunks
|
||||
f.write(chunk)
|
||||
f.flush()
|
||||
|
||||
# Rename to destination
|
||||
os.rename('%s.tmp' % path, path)
|
||||
|
||||
else:
|
||||
try:
|
||||
f = open(path, write_type)
|
||||
f.write(content)
|
||||
f.close()
|
||||
os.chmod(path, Env.getPermission('file'))
|
||||
except:
|
||||
log.error('Unable writing to file "%s": %s', (path, traceback.format_exc()))
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
try:
|
||||
f = open(path, 'w+' if not binary else 'w+b')
|
||||
f.write(content)
|
||||
f.close()
|
||||
os.chmod(path, Env.getPermission('file'))
|
||||
except:
|
||||
log.error('Unable writing to file "%s": %s', (path, traceback.format_exc()))
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
def makeDir(self, path):
|
||||
path = sp(path)
|
||||
@@ -162,17 +143,21 @@ class Plugin(object):
|
||||
folder = sp(folder)
|
||||
|
||||
for item in os.listdir(folder):
|
||||
full_folder = sp(os.path.join(folder, item))
|
||||
full_folder = os.path.join(folder, item)
|
||||
|
||||
if not only_clean or (item in only_clean and os.path.isdir(full_folder)):
|
||||
|
||||
for subfolder, dirs, files in os.walk(full_folder, topdown = False):
|
||||
for root, dirs, files in os.walk(full_folder):
|
||||
|
||||
try:
|
||||
os.rmdir(subfolder)
|
||||
except:
|
||||
if show_error:
|
||||
log.info2('Couldn\'t remove directory %s: %s', (subfolder, traceback.format_exc()))
|
||||
for dir_name in dirs:
|
||||
full_path = os.path.join(root, dir_name)
|
||||
|
||||
if len(os.listdir(full_path)) == 0:
|
||||
try:
|
||||
os.rmdir(full_path)
|
||||
except:
|
||||
if show_error:
|
||||
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
try:
|
||||
os.rmdir(folder)
|
||||
@@ -181,18 +166,18 @@ class Plugin(object):
|
||||
log.error('Couldn\'t remove empty directory %s: %s', (folder, traceback.format_exc()))
|
||||
|
||||
# http request
|
||||
def urlopen(self, url, timeout = 30, data = None, headers = None, files = None, show_error = True, stream = False):
|
||||
url = urllib.parse.quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]")
|
||||
def urlopen(self, url, timeout = 30, data = None, headers = None, files = None, show_error = True):
|
||||
url = quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]")
|
||||
|
||||
if not headers: headers = {}
|
||||
if not data: data = {}
|
||||
|
||||
# Fill in some headers
|
||||
parsed_url = urllib.parse.urlparse(url)
|
||||
parsed_url = urlparse(url)
|
||||
host = '%s%s' % (parsed_url.hostname, (':' + str(parsed_url.port) if parsed_url.port else ''))
|
||||
|
||||
headers['Referer'] = headers.get('Referer', '%s://%s' % (parsed_url.scheme, host))
|
||||
headers['Host'] = headers.get('Host', None)
|
||||
headers['Host'] = headers.get('Host', host)
|
||||
headers['User-Agent'] = headers.get('User-Agent', self.user_agent)
|
||||
headers['Accept-encoding'] = headers.get('Accept-encoding', 'gzip')
|
||||
headers['Connection'] = headers.get('Connection', 'keep-alive')
|
||||
@@ -213,7 +198,6 @@ class Plugin(object):
|
||||
del self.http_failed_disabled[host]
|
||||
|
||||
self.wait(host)
|
||||
status_code = None
|
||||
try:
|
||||
|
||||
kwargs = {
|
||||
@@ -222,20 +206,14 @@ class Plugin(object):
|
||||
'timeout': timeout,
|
||||
'files': files,
|
||||
'verify': False, #verify_ssl, Disable for now as to many wrongly implemented certificates..
|
||||
'stream': stream,
|
||||
}
|
||||
method = 'post' if len(data) > 0 or files else 'get'
|
||||
|
||||
log.info('Opening url: %s %s, data: %s', (method, url, [x for x in data.keys()] if isinstance(data, dict) else 'with data'))
|
||||
response = r.request(method, url, **kwargs)
|
||||
|
||||
status_code = response.status_code
|
||||
if response.status_code == requests.codes.ok:
|
||||
if stream:
|
||||
data = response
|
||||
else:
|
||||
data = response.content
|
||||
data = data.decode(response.encoding)
|
||||
data = response.content
|
||||
else:
|
||||
response.raise_for_status()
|
||||
|
||||
@@ -246,12 +224,6 @@ class Plugin(object):
|
||||
|
||||
# Save failed requests by hosts
|
||||
try:
|
||||
|
||||
# To many requests
|
||||
if status_code in [429]:
|
||||
self.http_failed_request[host] = 1
|
||||
self.http_failed_disabled[host] = time.time()
|
||||
|
||||
if not self.http_failed_request.get(host):
|
||||
self.http_failed_request[host] = 1
|
||||
else:
|
||||
@@ -283,7 +255,7 @@ class Plugin(object):
|
||||
|
||||
if wait > 0:
|
||||
log.debug('Waiting for %s, %d seconds', (self.getName(), wait))
|
||||
time.sleep(min(wait, 30))
|
||||
time.sleep(wait)
|
||||
|
||||
def beforeCall(self, handler):
|
||||
self.isRunning('%s.%s' % (self.getName(), handler.__name__))
|
||||
@@ -350,9 +322,9 @@ class Plugin(object):
|
||||
Env.get('cache').set(cache_key_md5, value, timeout)
|
||||
return value
|
||||
|
||||
def createNzbName(self, data, media, unique_tag = False):
|
||||
def createNzbName(self, data, media):
|
||||
release_name = data.get('name')
|
||||
tag = self.cpTag(media, unique_tag = unique_tag)
|
||||
tag = self.cpTag(media)
|
||||
|
||||
# Check if password is filename
|
||||
name_password = scanForPassword(data.get('name'))
|
||||
@@ -365,26 +337,18 @@ class Plugin(object):
|
||||
max_length = 127 - len(tag) # Some filesystems don't support 128+ long filenames
|
||||
return '%s%s' % (toSafeString(toUnicode(release_name)[:max_length]), tag)
|
||||
|
||||
def createFileName(self, data, filedata, media, unique_tag = False):
|
||||
name = self.createNzbName(data, media, unique_tag = unique_tag)
|
||||
def createFileName(self, data, filedata, media):
|
||||
name = self.createNzbName(data, media)
|
||||
if data.get('protocol') == 'nzb' and 'DOCTYPE nzb' not in filedata and '</nzb>' not in filedata:
|
||||
return '%s.%s' % (name, 'rar')
|
||||
return '%s.%s' % (name, data.get('protocol'))
|
||||
|
||||
def cpTag(self, media, unique_tag = False):
|
||||
def cpTag(self, media):
|
||||
if Env.setting('enabled', 'renamer'):
|
||||
identifier = getIdentifier(media)
|
||||
return '.cp(' + identifier + ')' if identifier else ''
|
||||
|
||||
tag = ''
|
||||
if Env.setting('enabled', 'renamer') or unique_tag:
|
||||
identifier = getIdentifier(media) or ''
|
||||
unique_tag = ', ' + randomString() if unique_tag else ''
|
||||
|
||||
tag = '.cp('
|
||||
tag += identifier
|
||||
tag += ', ' if unique_tag and identifier else ''
|
||||
tag += randomString() if unique_tag else ''
|
||||
tag += ')'
|
||||
|
||||
return tag if len(tag) > 7 else ''
|
||||
return ''
|
||||
|
||||
def checkFilesChanged(self, files, unchanged_for = 60):
|
||||
now = time.time()
|
||||
@@ -429,19 +393,3 @@ class Plugin(object):
|
||||
|
||||
def isEnabled(self):
|
||||
return self.conf(self.enabled_option) or self.conf(self.enabled_option) is None
|
||||
|
||||
def acquireLock(self, key):
|
||||
|
||||
lock = self._locks.get(key)
|
||||
if not lock:
|
||||
self._locks[key] = threading.RLock()
|
||||
|
||||
log.debug('Acquiring lock: %s', key)
|
||||
self._locks.get(key).acquire()
|
||||
|
||||
def releaseLock(self, key):
|
||||
|
||||
lock = self._locks.get(key)
|
||||
if lock:
|
||||
log.debug('Releasing lock: %s', key)
|
||||
self._locks.get(key).release()
|
||||
|
||||
@@ -1,18 +1,12 @@
|
||||
import ctypes
|
||||
import os
|
||||
import string
|
||||
import traceback
|
||||
import time
|
||||
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import sp, ss, toUnicode
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import getUserDir
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
import six
|
||||
|
||||
|
||||
if os.name == 'nt':
|
||||
@@ -59,9 +53,9 @@ class FileBrowser(Plugin):
|
||||
dirs = []
|
||||
path = sp(path)
|
||||
for f in os.listdir(path):
|
||||
p = sp(os.path.join(path, f))
|
||||
p = os.path.join(path, f)
|
||||
if os.path.isdir(p) and ((self.is_hidden(p) and bool(int(show_hidden))) or not self.is_hidden(p)):
|
||||
dirs.append(toUnicode('%s%s' % (p, os.path.sep)))
|
||||
dirs.append(p + os.path.sep)
|
||||
|
||||
return sorted(dirs)
|
||||
|
||||
@@ -72,8 +66,8 @@ class FileBrowser(Plugin):
|
||||
|
||||
driveletters = []
|
||||
for drive in string.ascii_uppercase:
|
||||
if win32file.GetDriveType(drive + ':') in [win32file.DRIVE_FIXED, win32file.DRIVE_REMOTE, win32file.DRIVE_RAMDISK, win32file.DRIVE_REMOVABLE]:
|
||||
driveletters.append(drive + ':\\')
|
||||
if win32file.GetDriveType(drive + ":") in [win32file.DRIVE_FIXED, win32file.DRIVE_REMOTE, win32file.DRIVE_RAMDISK, win32file.DRIVE_REMOVABLE]:
|
||||
driveletters.append(drive + ":\\")
|
||||
|
||||
return driveletters
|
||||
|
||||
@@ -106,19 +100,14 @@ class FileBrowser(Plugin):
|
||||
|
||||
|
||||
def is_hidden(self, filepath):
|
||||
name = ss(os.path.basename(os.path.abspath(filepath)))
|
||||
name = os.path.basename(os.path.abspath(filepath))
|
||||
return name.startswith('.') or self.has_hidden_attribute(filepath)
|
||||
|
||||
def has_hidden_attribute(self, filepath):
|
||||
|
||||
result = False
|
||||
try:
|
||||
attrs = ctypes.windll.kernel32.GetFileAttributesW(sp(filepath)) #@UndefinedVariable
|
||||
attrs = ctypes.windll.kernel32.GetFileAttributesW(six.text_type(filepath)) #@UndefinedVariable
|
||||
assert attrs != -1
|
||||
result = bool(attrs & 2)
|
||||
except (AttributeError, AssertionError):
|
||||
pass
|
||||
except:
|
||||
log.error('Failed getting hidden attribute: %s', traceback.format_exc())
|
||||
|
||||
result = False
|
||||
return result
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
from CodernityDB.tree_index import TreeBasedIndex
|
||||
|
||||
|
||||
class CategoryIndex(TreeBasedIndex):
|
||||
|
||||
@@ -27,7 +27,7 @@ class CategoryPlugin(Plugin):
|
||||
'desc': 'List all available categories',
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'categories': array, categories
|
||||
'list': array, categories
|
||||
}"""}
|
||||
})
|
||||
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from datetime import date
|
||||
import random as rndm
|
||||
import time
|
||||
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.database import RecordDeleted
|
||||
from couchpotato.core.helpers.variable import splitString, tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -42,13 +41,14 @@ class Dashboard(Plugin):
|
||||
# Add limit
|
||||
limit = 12
|
||||
if limit_offset:
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, six.string_types) else limit_offset
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
|
||||
limit = tryInt(splt[0])
|
||||
|
||||
# Get all active medias
|
||||
active_ids = [x['_id'] for x in fireEvent('media.with_status', 'active', with_doc = False, single = True)]
|
||||
|
||||
medias = []
|
||||
now_year = date.today().year
|
||||
|
||||
if len(active_ids) > 0:
|
||||
|
||||
@@ -60,13 +60,9 @@ class Dashboard(Plugin):
|
||||
rndm.shuffle(active_ids)
|
||||
|
||||
for media_id in active_ids:
|
||||
try:
|
||||
media = db.get('id', media_id)
|
||||
except RecordDeleted:
|
||||
log.debug('Record already deleted: %s', media_id)
|
||||
continue
|
||||
media = db.get('id', media_id)
|
||||
|
||||
pp = profile_pre.get(media.get('profile_id'))
|
||||
pp = profile_pre.get(media['profile_id'])
|
||||
if not pp: continue
|
||||
|
||||
eta = media['info'].get('release_date', {}) or {}
|
||||
@@ -74,25 +70,22 @@ class Dashboard(Plugin):
|
||||
|
||||
# Theater quality
|
||||
if pp.get('theater') and fireEvent('movie.searcher.could_be_released', True, eta, media['info']['year'], single = True):
|
||||
coming_soon = 'theater'
|
||||
coming_soon = True
|
||||
elif pp.get('dvd') and fireEvent('movie.searcher.could_be_released', False, eta, media['info']['year'], single = True):
|
||||
coming_soon = 'dvd'
|
||||
coming_soon = True
|
||||
|
||||
if coming_soon:
|
||||
|
||||
# Don't list older movies
|
||||
eta_date = eta.get(coming_soon)
|
||||
eta_3month_passed = eta_date < (now - 7862400) # Release was more than 3 months ago
|
||||
|
||||
if (not late and not eta_3month_passed) or \
|
||||
(late and eta_3month_passed):
|
||||
if ((not late and (media['info']['year'] >= now_year - 1) and (not eta.get('dvd') and not eta.get('theater') or eta.get('dvd') and eta.get('dvd') > (now - 2419200))) or
|
||||
(late and (media['info']['year'] < now_year - 1 or (eta.get('dvd', 0) > 0 or eta.get('theater')) and eta.get('dvd') < (now - 2419200)))):
|
||||
|
||||
add = True
|
||||
|
||||
# Check if it doesn't have any releases
|
||||
if late:
|
||||
media['releases'] = fireEvent('release.for_media', media['_id'], single = True)
|
||||
|
||||
|
||||
for release in media.get('releases'):
|
||||
if release.get('status') in ['snatched', 'available', 'seeding', 'downloaded']:
|
||||
add = False
|
||||
|
||||
@@ -4,7 +4,7 @@ import traceback
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode, ss, sp
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import md5, getExt, isSubFolder
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
@@ -59,18 +59,13 @@ class FileManager(Plugin):
|
||||
log.error('Failed removing unused file: %s', traceback.format_exc())
|
||||
|
||||
def showCacheFile(self, route, **kwargs):
|
||||
Env.get('app').add_handlers(".*$", [('%s%s' % (Env.get('api_base'), route), StaticFileHandler, {'path': toUnicode(Env.get('cache_dir'))})])
|
||||
Env.get('app').add_handlers(".*$", [('%s%s' % (Env.get('api_base'), route), StaticFileHandler, {'path': Env.get('cache_dir')})])
|
||||
|
||||
def download(self, url = '', dest = None, overwrite = False, urlopen_kwargs = None):
|
||||
if not urlopen_kwargs: urlopen_kwargs = {}
|
||||
|
||||
# Return response object to stream download
|
||||
urlopen_kwargs['stream'] = True
|
||||
|
||||
if not dest: # to Cache
|
||||
dest = os.path.join(Env.get('cache_dir'), ss('%s.%s' % (md5(url), getExt(url))))
|
||||
|
||||
dest = sp(dest)
|
||||
dest = os.path.join(Env.get('cache_dir'), '%s.%s' % (md5(url), getExt(url)))
|
||||
|
||||
if not overwrite and os.path.isfile(dest):
|
||||
return dest
|
||||
@@ -112,4 +107,4 @@ class FileManager(Plugin):
|
||||
else:
|
||||
log.info('Subfolder test succeeded')
|
||||
|
||||
return failed == 0
|
||||
return failed == 0
|
||||
@@ -1,9 +1,9 @@
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
@@ -103,8 +103,9 @@ class Logging(Plugin):
|
||||
if not os.path.isfile(path):
|
||||
break
|
||||
|
||||
f = codecs.open(path, 'r', 'utf-8')
|
||||
raw_lines = self.toList(f.read())
|
||||
f = open(path, 'r')
|
||||
log_content = toUnicode(f.read())
|
||||
raw_lines = self.toList(log_content)
|
||||
raw_lines.reverse()
|
||||
|
||||
brk = False
|
||||
@@ -130,7 +131,7 @@ class Logging(Plugin):
|
||||
|
||||
def toList(self, log_content = ''):
|
||||
|
||||
logs_raw = log_content.split('[0m\n')
|
||||
logs_raw = toUnicode(log_content).split('[0m\n')
|
||||
|
||||
logs = []
|
||||
for log_line in logs_raw:
|
||||
|
||||
@@ -241,7 +241,7 @@ Running on: ...\n\
|
||||
'href': 'https://github.com/RuudBurger/CouchPotatoServer/blob/develop/contributing.md'
|
||||
}),
|
||||
new Element('span', {
|
||||
'html': ' before posting, then copy the text below and <strong>FILL IN</strong> the dots.'
|
||||
'text': ' before posting, then copy the text below'
|
||||
})
|
||||
),
|
||||
textarea = new Element('textarea', {
|
||||
|
||||
8
couchpotato/core/plugins/manage.py
Executable file → Normal file
8
couchpotato/core/plugins/manage.py
Executable file → Normal file
@@ -165,7 +165,7 @@ class Manage(Plugin):
|
||||
already_used = used_files.get(release_file)
|
||||
|
||||
if already_used:
|
||||
release_id = release['_id'] if already_used.get('last_edit', 0) > release.get('last_edit', 0) else already_used['_id']
|
||||
release_id = release['_id'] if already_used.get('last_edit', 0) < release.get('last_edit', 0) else already_used['_id']
|
||||
if release_id not in deleted_releases:
|
||||
fireEvent('release.delete', release_id, single = True)
|
||||
deleted_releases.append(release_id)
|
||||
@@ -190,7 +190,6 @@ class Manage(Plugin):
|
||||
|
||||
delete_me = {}
|
||||
|
||||
# noinspection PyTypeChecker
|
||||
for folder in self.in_progress:
|
||||
if self.in_progress[folder]['to_go'] <= 0:
|
||||
delete_me[folder] = True
|
||||
@@ -220,7 +219,7 @@ class Manage(Plugin):
|
||||
|
||||
# Add it to release and update the info
|
||||
fireEvent('release.add', group = group, update_info = False)
|
||||
fireEvent('movie.update', identifier = group['identifier'], on_complete = self.createAfterUpdate(folder, group['identifier']))
|
||||
fireEvent('movie.update_info', identifier = group['identifier'], on_complete = self.createAfterUpdate(folder, group['identifier']))
|
||||
|
||||
return addToLibrary
|
||||
|
||||
@@ -234,8 +233,7 @@ class Manage(Plugin):
|
||||
total = self.in_progress[folder]['total']
|
||||
movie_dict = fireEvent('media.get', identifier, single = True)
|
||||
|
||||
if movie_dict:
|
||||
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = None if total > 5 else 'Added "%s" to manage.' % getTitle(movie_dict))
|
||||
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = None if total > 5 else 'Added "%s" to manage.' % getTitle(movie_dict))
|
||||
|
||||
return afterUpdate
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
from CodernityDB.tree_index import TreeBasedIndex
|
||||
|
||||
|
||||
class ProfileIndex(TreeBasedIndex):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from hashlib import md5
|
||||
from couchpotato.core.helpers.database import HashIndex
|
||||
|
||||
from CodernityDB.hash_index import HashIndex
|
||||
|
||||
|
||||
class QualityIndex(HashIndex):
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from math import fabs, ceil
|
||||
import traceback
|
||||
import re
|
||||
|
||||
from CodernityDB.database import RecordNotFound
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.database import RecordNotFound
|
||||
from couchpotato.core.helpers.encoding import toUnicode, ss
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt, splitString, tryFloat
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.core.plugins.quality.index import QualityIndex
|
||||
@@ -23,17 +22,17 @@ class QualityPlugin(Plugin):
|
||||
}
|
||||
|
||||
qualities = [
|
||||
{'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'median_size': 40000, 'label': 'BR-Disk', 'alternative': ['bd25', ('br', 'disk')], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']},
|
||||
{'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'median_size': 10000, 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts', 'ts'], 'tags': ['m2ts', 'x264', 'h264']},
|
||||
{'identifier': '720p', 'hd': True, 'allow_3d': True, 'size': (3000, 10000), 'median_size': 5500, 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264']},
|
||||
{'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'median_size': 2000, 'label': 'BR-Rip', 'alternative': ['bdrip', ('br', 'rip'), 'hdtv', 'hdrip'], 'allow': ['720p', '1080p'], 'ext':['mp4', 'avi'], 'tags': ['webdl', ('web', 'dl')]},
|
||||
{'identifier': 'dvdr', 'size': (3000, 10000), 'median_size': 4500, 'label': 'DVD-R', 'alternative': ['br2dvd', ('dvd', 'r')], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']},
|
||||
{'identifier': 'dvdrip', 'size': (600, 2400), 'median_size': 1500, 'label': 'DVD-Rip', 'width': 720, 'alternative': [('dvd', 'rip')], 'allow': [], 'ext':['avi'], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
|
||||
{'identifier': 'scr', 'size': (600, 1600), 'median_size': 700, 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr', 'webrip', ('web', 'rip')], 'allow': ['dvdr', 'dvdrip', '720p', '1080p'], 'ext':[], 'tags': []},
|
||||
{'identifier': 'r5', 'size': (600, 1000), 'median_size': 700, 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr', '720p'], 'ext':[]},
|
||||
{'identifier': 'tc', 'size': (600, 1000), 'median_size': 700, 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': ['720p'], 'ext':[]},
|
||||
{'identifier': 'ts', 'size': (600, 1000), 'median_size': 700, 'label': 'TeleSync', 'alternative': ['telesync', 'hdts'], 'allow': ['720p'], 'ext':[]},
|
||||
{'identifier': 'cam', 'size': (600, 1000), 'median_size': 700, 'label': 'Cam', 'alternative': ['camrip', 'hdcam'], 'allow': ['720p'], 'ext':[]}
|
||||
{'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25', ('br', 'disk')], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']},
|
||||
{'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts', 'ts'], 'tags': ['m2ts', 'x264', 'h264']},
|
||||
{'identifier': '720p', 'hd': True, 'allow_3d': True, 'size': (3000, 10000), 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264']},
|
||||
{'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip', ('br', 'rip')], 'allow': ['720p', '1080p'], 'ext':['mp4', 'avi'], 'tags': ['hdtv', 'hdrip', 'webdl', ('web', 'dl')]},
|
||||
{'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': ['br2dvd', ('dvd', 'r')], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']},
|
||||
{'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': [('dvd', 'rip')], 'allow': [], 'ext':['avi'], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
|
||||
{'identifier': 'scr', 'size': (600, 1600), 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr'], 'allow': ['dvdr', 'dvdrip', '720p', '1080p'], 'ext':[], 'tags': ['webrip', ('web', 'rip')]},
|
||||
{'identifier': 'r5', 'size': (600, 1000), 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr'], 'ext':[]},
|
||||
{'identifier': 'tc', 'size': (600, 1000), 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': [], 'ext':[]},
|
||||
{'identifier': 'ts', 'size': (600, 1000), 'label': 'TeleSync', 'alternative': ['telesync', 'hdts'], 'allow': [], 'ext':[]},
|
||||
{'identifier': 'cam', 'size': (600, 1000), 'label': 'Cam', 'alternative': ['camrip', 'hdcam'], 'allow': ['720p'], 'ext':[]}
|
||||
]
|
||||
pre_releases = ['cam', 'ts', 'tc', 'r5', 'scr']
|
||||
threed_tags = {
|
||||
@@ -188,15 +187,14 @@ class QualityPlugin(Plugin):
|
||||
|
||||
return False
|
||||
|
||||
def guess(self, files, extra = None, size = None, use_cache = True):
|
||||
def guess(self, files, extra = None, size = None):
|
||||
if not extra: extra = {}
|
||||
|
||||
# Create hash for cache
|
||||
cache_key = str([f.replace('.' + getExt(f), '') if len(getExt(f)) < 4 else f for f in files])
|
||||
if use_cache:
|
||||
cached = self.getCache(cache_key)
|
||||
if cached and len(extra) == 0:
|
||||
return cached
|
||||
cached = self.getCache(cache_key)
|
||||
if cached and len(extra) == 0:
|
||||
return cached
|
||||
|
||||
qualities = self.all()
|
||||
|
||||
@@ -208,10 +206,6 @@ class QualityPlugin(Plugin):
|
||||
'3d': {}
|
||||
}
|
||||
|
||||
# Use metadata titles as extra check
|
||||
if extra and extra.get('titles'):
|
||||
files.extend(extra.get('titles'))
|
||||
|
||||
for cur_file in files:
|
||||
words = re.split('\W+', cur_file.lower())
|
||||
name_year = fireEvent('scanner.name_year', cur_file, file_name = cur_file, single = True)
|
||||
@@ -224,7 +218,7 @@ class QualityPlugin(Plugin):
|
||||
contains_score = self.containsTagScore(quality, words, cur_file)
|
||||
threedscore = self.contains3D(quality, threed_words, cur_file) if quality.get('allow_3d') else (0, None)
|
||||
|
||||
self.calcScore(score, quality, contains_score, threedscore, penalty = contains_score)
|
||||
self.calcScore(score, quality, contains_score, threedscore)
|
||||
|
||||
size_scores = []
|
||||
for quality in qualities:
|
||||
@@ -236,11 +230,11 @@ class QualityPlugin(Plugin):
|
||||
if size_score > 0:
|
||||
size_scores.append(quality)
|
||||
|
||||
self.calcScore(score, quality, size_score + loose_score)
|
||||
self.calcScore(score, quality, size_score + loose_score, penalty = False)
|
||||
|
||||
# Add additional size score if only 1 size validated
|
||||
if len(size_scores) == 1:
|
||||
self.calcScore(score, size_scores[0], 8)
|
||||
self.calcScore(score, size_scores[0], 10, penalty = False)
|
||||
del size_scores
|
||||
|
||||
# Return nothing if all scores are <= 0
|
||||
@@ -265,17 +259,17 @@ class QualityPlugin(Plugin):
|
||||
|
||||
def containsTagScore(self, quality, words, cur_file = ''):
|
||||
cur_file = ss(cur_file)
|
||||
score = 0.0
|
||||
score = 0
|
||||
|
||||
extension = words[-1]
|
||||
words = words[:-1]
|
||||
|
||||
points = {
|
||||
'identifier': 20,
|
||||
'label': 20,
|
||||
'alternative': 20,
|
||||
'tags': 11,
|
||||
'ext': 5,
|
||||
'identifier': 10,
|
||||
'label': 10,
|
||||
'alternative': 9,
|
||||
'tags': 9,
|
||||
'ext': 3,
|
||||
}
|
||||
|
||||
# Check alt and tags
|
||||
@@ -291,7 +285,11 @@ class QualityPlugin(Plugin):
|
||||
|
||||
if isinstance(alt, (str, unicode)) and ss(alt.lower()) in words:
|
||||
log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
|
||||
score += points.get(tag_type)
|
||||
score += points.get(tag_type) / 2
|
||||
|
||||
if list(set(qualities) & set(words)):
|
||||
log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
|
||||
score += points.get(tag_type)
|
||||
|
||||
# Check extention
|
||||
for ext in quality.get('ext', []):
|
||||
@@ -327,7 +325,7 @@ class QualityPlugin(Plugin):
|
||||
# Check width resolution, range 20
|
||||
if quality.get('width') and (quality.get('width') - 20) <= extra.get('resolution_width', 0) <= (quality.get('width') + 20):
|
||||
log.debug('Found %s via resolution_width: %s == %s', (quality['identifier'], quality.get('width'), extra.get('resolution_width', 0)))
|
||||
score += 10
|
||||
score += 5
|
||||
|
||||
# Check height resolution, range 20
|
||||
if quality.get('height') and (quality.get('height') - 20) <= extra.get('resolution_height', 0) <= (quality.get('height') + 20):
|
||||
@@ -347,28 +345,15 @@ class QualityPlugin(Plugin):
|
||||
|
||||
if size:
|
||||
|
||||
size = tryFloat(size)
|
||||
size_min = tryFloat(quality['size_min'])
|
||||
size_max = tryFloat(quality['size_max'])
|
||||
|
||||
if size_min <= size <= size_max:
|
||||
log.debug('Found %s via release size: %s MB < %s MB < %s MB', (quality['identifier'], size_min, size, size_max))
|
||||
|
||||
proc_range = size_max - size_min
|
||||
size_diff = size - size_min
|
||||
size_proc = (size_diff / proc_range)
|
||||
|
||||
median_diff = quality['median_size'] - size_min
|
||||
median_proc = (median_diff / proc_range)
|
||||
|
||||
max_points = 8
|
||||
score += ceil(max_points - (fabs(size_proc - median_proc) * max_points))
|
||||
if tryInt(quality['size_min']) <= tryInt(size) <= tryInt(quality['size_max']):
|
||||
log.debug('Found %s via release size: %s MB < %s MB < %s MB', (quality['identifier'], quality['size_min'], size, quality['size_max']))
|
||||
score += 5
|
||||
else:
|
||||
score -= 5
|
||||
|
||||
return score
|
||||
|
||||
def calcScore(self, score, quality, add_score, threedscore = (0, None), penalty = 0):
|
||||
def calcScore(self, score, quality, add_score, threedscore = (0, None), penalty = True):
|
||||
|
||||
score[quality['identifier']]['score'] += add_score
|
||||
|
||||
@@ -387,11 +372,11 @@ class QualityPlugin(Plugin):
|
||||
|
||||
if penalty and add_score != 0:
|
||||
for allow in quality.get('allow', []):
|
||||
score[allow]['score'] -= ((penalty * 2) if self.cached_order[allow] < self.cached_order[quality['identifier']] else penalty) * 2
|
||||
score[allow]['score'] -= 40 if self.cached_order[allow] < self.cached_order[quality['identifier']] else 5
|
||||
|
||||
# Give panelty for all other qualities
|
||||
for q in self.qualities:
|
||||
if quality.get('identifier') != q.get('identifier') and score.get(q.get('identifier')):
|
||||
# Give panelty for all lower qualities
|
||||
for q in self.qualities[self.order.index(quality.get('identifier'))+1:]:
|
||||
if score.get(q.get('identifier')):
|
||||
score[q.get('identifier')]['score'] -= 1
|
||||
|
||||
def isFinish(self, quality, profile, release_age = 0):
|
||||
@@ -459,37 +444,21 @@ class QualityPlugin(Plugin):
|
||||
'Movie Monuments 2013 BrRip 1080p': {'size': 1800, 'quality': 'brrip'},
|
||||
'Movie Monuments 2013 BrRip 720p': {'size': 1300, 'quality': 'brrip'},
|
||||
'The.Movie.2014.3D.1080p.BluRay.AVC.DTS-HD.MA.5.1-GroupName': {'size': 30000, 'quality': 'bd50', 'is_3d': True},
|
||||
'/home/namehou/Movie Monuments (2012)/Movie Monuments.mkv': {'size': 5500, 'quality': '720p', 'is_3d': False},
|
||||
'/home/namehou/Movie Monuments (2012)/Movie Monuments Full-OU.mkv': {'size': 5500, 'quality': '720p', 'is_3d': True},
|
||||
'/home/namehou/Movie Monuments (2013)/Movie Monuments.mkv': {'size': 10000, 'quality': '1080p', 'is_3d': False},
|
||||
'/home/namehou/Movie Monuments (2013)/Movie Monuments Full-OU.mkv': {'size': 10000, 'quality': '1080p', 'is_3d': True},
|
||||
'/home/namehou/Movie Monuments (2013)/Movie Monuments.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': False},
|
||||
'/home/namehou/Movie Monuments (2013)/Movie Monuments Full-OU.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': True},
|
||||
'/volume1/Public/3D/Moviename/Moviename (2009).3D.SBS.ts': {'size': 7500, 'quality': '1080p', 'is_3d': True},
|
||||
'/volume1/Public/Moviename/Moviename (2009).ts': {'size': 7500, 'quality': '1080p'},
|
||||
'/volume1/Public/Moviename/Moviename (2009).ts': {'size': 5500, 'quality': '1080p'},
|
||||
'/movies/BluRay HDDVD H.264 MKV 720p EngSub/QuiQui le fou (criterion collection #123, 1915)/QuiQui le fou (1915) 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p'},
|
||||
'C:\\movies\QuiQui le fou (collection #123, 1915)\QuiQui le fou (1915) 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p'},
|
||||
'C:\\movies\QuiQui le fou (collection #123, 1915)\QuiQui le fou (1915) half-sbs 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p', 'is_3d': True},
|
||||
'Moviename 2014 720p HDCAM XviD DualAudio': {'size': 4000, 'quality': 'cam'},
|
||||
'Moviename (2014) - 720p CAM x264': {'size': 2250, 'quality': 'cam'},
|
||||
'Movie Name (2014).mp4': {'size': 750, 'quality': 'brrip'},
|
||||
'Moviename.2014.720p.R6.WEB-DL.x264.AC3-xyz': {'size': 750, 'quality': 'r5'},
|
||||
'Movie name 2014 New Source 720p HDCAM x264 AC3 xyz': {'size': 750, 'quality': 'cam'},
|
||||
'Movie.Name.2014.720p.HD.TS.AC3.x264': {'size': 750, 'quality': 'ts'},
|
||||
'Movie.Name.2014.1080p.HDrip.x264.aac-ReleaseGroup': {'size': 7000, 'quality': 'brrip'},
|
||||
'Movie.Name.2014.HDCam.Chinese.Subs-ReleaseGroup': {'size': 15000, 'quality': 'cam'},
|
||||
'Movie Name 2014 HQ DVDRip X264 AC3 (bla)': {'size': 0, 'quality': 'dvdrip'},
|
||||
'Movie Name1 (2012).mkv': {'size': 4500, 'quality': '720p'},
|
||||
'Movie Name (2013).mkv': {'size': 8500, 'quality': '1080p'},
|
||||
'Movie Name (2014).mkv': {'size': 4500, 'quality': '720p', 'extra': {'titles': ['Movie Name 2014 720p Bluray']}},
|
||||
'Movie Name (2015).mkv': {'size': 500, 'quality': '1080p', 'extra': {'resolution_width': 1920}},
|
||||
'Movie Name (2015).mp4': {'size': 6500, 'quality': 'brrip'},
|
||||
'Movie Name (2015).mp4': {'size': 6500, 'quality': 'brrip'},
|
||||
'Movie Name.2014.720p Web-Dl Aac2.0 h264-ReleaseGroup': {'size': 3800, 'quality': 'brrip'},
|
||||
'Movie Name.2014.720p.WEBRip.x264.AC3-ReleaseGroup': {'size': 3000, 'quality': 'scr'},
|
||||
}
|
||||
|
||||
correct = 0
|
||||
for name in tests:
|
||||
test_quality = self.guess(files = [name], extra = tests[name].get('extra', None), size = tests[name].get('size', None), use_cache = False) or {}
|
||||
test_quality = self.guess(files = [name], extra = tests[name].get('extra', None), size = tests[name].get('size', None)) or {}
|
||||
success = test_quality.get('identifier') == tests[name]['quality'] and test_quality.get('is_3d') == tests[name].get('is_3d', False)
|
||||
if not success:
|
||||
log.error('%s failed check, thinks it\'s "%s" expecting "%s"', (name,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from hashlib import md5
|
||||
from couchpotato.core.helpers.database import TreeBasedIndex, HashIndex
|
||||
|
||||
from CodernityDB.hash_index import HashIndex
|
||||
from CodernityDB.tree_index import TreeBasedIndex
|
||||
|
||||
|
||||
class ReleaseIndex(TreeBasedIndex):
|
||||
|
||||
@@ -3,12 +3,12 @@ import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from CodernityDB.database import RecordDeleted, RecordNotFound
|
||||
from couchpotato import md5, get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.database import RecordDeleted, RecordNotFound
|
||||
from couchpotato.core.helpers.encoding import toUnicode, sp
|
||||
from couchpotato.core.helpers.variable import getTitle, tryInt
|
||||
from couchpotato.core.helpers.variable import getTitle
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from .index import ReleaseIndex, ReleaseStatusIndex, ReleaseIDIndex, ReleaseDownloadIndex
|
||||
@@ -65,58 +65,43 @@ class Release(Plugin):
|
||||
log.debug('Removing releases from dashboard')
|
||||
|
||||
now = time.time()
|
||||
week = 604800
|
||||
week = 262080
|
||||
|
||||
db = get_db()
|
||||
|
||||
# Get (and remove) parentless releases
|
||||
releases = db.all('release', with_doc = False)
|
||||
releases = db.all('release', with_doc = True)
|
||||
media_exist = []
|
||||
reindex = 0
|
||||
for release in releases:
|
||||
if release.get('key') in media_exist:
|
||||
continue
|
||||
|
||||
try:
|
||||
|
||||
try:
|
||||
doc = db.get('id', release.get('_id'))
|
||||
except RecordDeleted:
|
||||
reindex += 1
|
||||
continue
|
||||
|
||||
db.get('id', release.get('key'))
|
||||
media_exist.append(release.get('key'))
|
||||
|
||||
try:
|
||||
if doc.get('status') == 'ignore':
|
||||
doc['status'] = 'ignored'
|
||||
db.update(doc)
|
||||
if release['doc'].get('status') == 'ignore':
|
||||
release['doc']['status'] = 'ignored'
|
||||
db.update(release['doc'])
|
||||
except:
|
||||
log.error('Failed fixing mis-status tag: %s', traceback.format_exc())
|
||||
except ValueError:
|
||||
fireEvent('database.delete_corrupted', release.get('key'), traceback_error = traceback.format_exc(0))
|
||||
reindex += 1
|
||||
except RecordDeleted:
|
||||
db.delete(doc)
|
||||
log.debug('Deleted orphaned release: %s', doc)
|
||||
reindex += 1
|
||||
db.delete(release['doc'])
|
||||
log.debug('Deleted orphaned release: %s', release['doc'])
|
||||
except:
|
||||
log.debug('Failed cleaning up orphaned releases: %s', traceback.format_exc())
|
||||
|
||||
if reindex > 0:
|
||||
db.reindex()
|
||||
|
||||
del media_exist
|
||||
|
||||
# get movies last_edit more than a week ago
|
||||
medias = fireEvent('media.with_status', ['done', 'active'], single = True)
|
||||
medias = fireEvent('media.with_status', 'done', single = True)
|
||||
|
||||
for media in medias:
|
||||
if media.get('last_edit', 0) > (now - week):
|
||||
continue
|
||||
|
||||
for rel in self.forMedia(media['_id']):
|
||||
for rel in fireEvent('release.for_media', media['_id'], single = True):
|
||||
|
||||
# Remove all available releases
|
||||
if rel['status'] in ['available']:
|
||||
@@ -126,8 +111,7 @@ class Release(Plugin):
|
||||
elif rel['status'] in ['snatched', 'downloaded']:
|
||||
self.updateStatus(rel['_id'], status = 'ignored')
|
||||
|
||||
if 'recent' in media.get('tags', []):
|
||||
fireEvent('media.untag', media.get('_id'), 'recent', single = True)
|
||||
fireEvent('media.untag', media.get('_id'), 'recent', single = True)
|
||||
|
||||
def add(self, group, update_info = True, update_id = None):
|
||||
|
||||
@@ -250,9 +234,8 @@ class Release(Plugin):
|
||||
db = get_db()
|
||||
|
||||
try:
|
||||
if id:
|
||||
rel = db.get('id', id, with_doc = True)
|
||||
self.updateStatus(id, 'available' if rel['status'] in ['ignored', 'failed'] else 'ignored')
|
||||
rel = db.get('id', id, with_doc = True)
|
||||
self.updateStatus(id, 'available' if rel['status'] in ['ignored', 'failed'] else 'ignored')
|
||||
|
||||
return {
|
||||
'success': True
|
||||
@@ -341,10 +324,10 @@ class Release(Plugin):
|
||||
rls['download_info'] = download_result
|
||||
db.update(rls)
|
||||
|
||||
log_movie = '%s (%s) in %s' % (getTitle(media), media['info'].get('year'), rls['quality'])
|
||||
log_movie = '%s (%s) in %s' % (getTitle(media), media['info']['year'], rls['quality'])
|
||||
snatch_message = 'Snatched "%s": %s' % (data.get('name'), log_movie)
|
||||
log.info(snatch_message)
|
||||
fireEvent('%s.snatched' % data['type'], message = snatch_message, data = media)
|
||||
fireEvent('%s.snatched' % data['type'], message = snatch_message, data = rls)
|
||||
|
||||
# Mark release as snatched
|
||||
if renamer_enabled:
|
||||
@@ -380,9 +363,8 @@ class Release(Plugin):
|
||||
wait_for = False
|
||||
let_through = False
|
||||
filtered_results = []
|
||||
minimum_seeders = tryInt(Env.setting('minimum_seeders', section = 'torrent', default = 1))
|
||||
|
||||
# Filter out ignored and other releases we don't want
|
||||
# If a single release comes through the "wait for", let through all
|
||||
for rel in results:
|
||||
|
||||
if rel['status'] in ['ignored', 'failed']:
|
||||
@@ -397,11 +379,6 @@ class Release(Plugin):
|
||||
log.info('Ignored, size "%sMB" to low: %s', (rel['size'], rel['name']))
|
||||
continue
|
||||
|
||||
if 'seeders' in rel and rel.get('seeders') < minimum_seeders:
|
||||
log.info('Ignored, not enough seeders, has %s needs %s: %s', (rel.get('seeders'), minimum_seeders, rel['name']))
|
||||
continue
|
||||
|
||||
# If a single release comes through the "wait for", let through all
|
||||
rel['wait_for'] = False
|
||||
if quality_custom.get('index') != 0 and quality_custom.get('wait_for', 0) > 0 and rel.get('age') <= quality_custom.get('wait_for', 0):
|
||||
rel['wait_for'] = True
|
||||
@@ -544,15 +521,11 @@ class Release(Plugin):
|
||||
def forMedia(self, media_id):
|
||||
|
||||
db = get_db()
|
||||
raw_releases = db.get_many('release', media_id)
|
||||
raw_releases = list(db.get_many('release', media_id, with_doc = True))
|
||||
|
||||
releases = []
|
||||
for r in raw_releases:
|
||||
try:
|
||||
doc = db.get('id', r.get('_id'))
|
||||
releases.append(doc)
|
||||
except RecordDeleted:
|
||||
pass
|
||||
releases.append(r['doc'])
|
||||
|
||||
releases = sorted(releases, key = lambda k: k.get('info', {}).get('score', 0), reverse = True)
|
||||
|
||||
|
||||
157
couchpotato/core/plugins/renamer.py
Executable file → Normal file
157
couchpotato/core/plugins/renamer.py
Executable file → Normal file
@@ -10,8 +10,7 @@ from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import toUnicode, ss, sp
|
||||
from couchpotato.core.helpers.variable import getExt, mergeDicts, getTitle, \
|
||||
getImdb, link, symlink, tryInt, splitString, fnEscape, isSubFolder, \
|
||||
getIdentifier, randomString, getFreeSpace, getSize
|
||||
getImdb, link, symlink, tryInt, splitString, fnEscape, isSubFolder, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
@@ -124,6 +123,11 @@ class Renamer(Plugin):
|
||||
no_process = [to_folder]
|
||||
cat_list = fireEvent('category.all', single = True) or []
|
||||
no_process.extend([item['destination'] for item in cat_list])
|
||||
try:
|
||||
if Env.setting('library', section = 'manage').strip():
|
||||
no_process.extend([sp(manage_folder) for manage_folder in splitString(Env.setting('library', section = 'manage'), '::')])
|
||||
except:
|
||||
pass
|
||||
|
||||
# Check to see if the no_process folders are inside the "from" folder.
|
||||
if not os.path.isdir(base_folder) or not os.path.isdir(to_folder):
|
||||
@@ -198,18 +202,14 @@ class Renamer(Plugin):
|
||||
db = get_db()
|
||||
|
||||
# Extend the download info with info stored in the downloaded release
|
||||
keep_original = self.moveTypeIsLinked()
|
||||
is_torrent = False
|
||||
if release_download:
|
||||
release_download = self.extendReleaseDownload(release_download)
|
||||
is_torrent = self.downloadIsTorrent(release_download)
|
||||
keep_original = True if is_torrent and self.conf('file_action') not in ['move'] else keep_original
|
||||
|
||||
# Unpack any archives
|
||||
extr_files = None
|
||||
if self.conf('unrar'):
|
||||
folder, media_folder, files, extr_files = self.extractFiles(folder = folder, media_folder = media_folder, files = files,
|
||||
cleanup = self.conf('cleanup') and not keep_original)
|
||||
cleanup = self.conf('cleanup') and not self.downloadIsTorrent(release_download))
|
||||
|
||||
groups = fireEvent('scanner.scan', folder = folder if folder else base_folder,
|
||||
files = files, release_download = release_download, return_ignored = False, single = True) or []
|
||||
@@ -220,12 +220,6 @@ class Renamer(Plugin):
|
||||
nfo_name = self.conf('nfo_name')
|
||||
separator = self.conf('separator')
|
||||
|
||||
cd_keys = ['<cd>','<cd_nr>', '<original>']
|
||||
if not any(x in folder_name for x in cd_keys) and not any(x in file_name for x in cd_keys):
|
||||
log.error('Missing `cd` or `cd_nr` in the renamer. This will cause multi-file releases of being renamed to the same file.'
|
||||
'Force adding it')
|
||||
file_name = '%s %s' % ('<cd>', file_name)
|
||||
|
||||
# Tag release folder as failed_rename in case no groups were found. This prevents check_snatched from removing the release from the downloader.
|
||||
if not groups and self.statusInfoComplete(release_download):
|
||||
self.tagRelease(release_download = release_download, tag = 'failed_rename')
|
||||
@@ -254,7 +248,7 @@ class Renamer(Plugin):
|
||||
'profile_id': None
|
||||
}, search_after = False, status = 'done', single = True)
|
||||
else:
|
||||
group['media'] = fireEvent('movie.update', media_id = group['media'].get('_id'), single = True)
|
||||
group['media'] = fireEvent('movie.update_info', media_id = group['media'].get('_id'), single = True)
|
||||
|
||||
if not group['media'] or not group['media'].get('_id'):
|
||||
log.error('Could not rename, no library item to work with: %s', group_identifier)
|
||||
@@ -273,14 +267,13 @@ class Renamer(Plugin):
|
||||
category_label = category['label']
|
||||
|
||||
if category['destination'] and len(category['destination']) > 0 and category['destination'] != 'None':
|
||||
destination = sp(category['destination'])
|
||||
destination = category['destination']
|
||||
log.debug('Setting category destination for "%s": %s' % (media_title, destination))
|
||||
else:
|
||||
log.debug('No category destination found for "%s"' % media_title)
|
||||
except:
|
||||
log.error('Failed getting category label: %s', traceback.format_exc())
|
||||
|
||||
|
||||
# Find subtitle for renaming
|
||||
group['before_rename'] = []
|
||||
fireEvent('renamer.before', group)
|
||||
@@ -333,7 +326,7 @@ class Renamer(Plugin):
|
||||
if file_type is 'nfo' and not self.conf('rename_nfo'):
|
||||
log.debug('Skipping, renaming of %s disabled', file_type)
|
||||
for current_file in group['files'][file_type]:
|
||||
if self.conf('cleanup') and (not keep_original or self.fileIsAdded(current_file, group)):
|
||||
if self.conf('cleanup') and (not self.downloadIsTorrent(release_download) or self.fileIsAdded(current_file, group)):
|
||||
remove_files.append(current_file)
|
||||
continue
|
||||
|
||||
@@ -352,9 +345,6 @@ class Renamer(Plugin):
|
||||
replacements['original'] = os.path.splitext(os.path.basename(current_file))[0]
|
||||
replacements['original_folder'] = fireEvent('scanner.remove_cptag', group['dirname'], single = True)
|
||||
|
||||
if not replacements['original_folder'] or len(replacements['original_folder']) == 0:
|
||||
replacements['original_folder'] = replacements['original']
|
||||
|
||||
# Extension
|
||||
replacements['ext'] = getExt(current_file)
|
||||
|
||||
@@ -373,6 +363,10 @@ class Renamer(Plugin):
|
||||
elif file_type is 'nfo':
|
||||
final_file_name = self.doReplace(nfo_name, replacements, remove_multiple = True)
|
||||
|
||||
# Seperator replace
|
||||
if separator:
|
||||
final_file_name = final_file_name.replace(' ', separator)
|
||||
|
||||
# Move DVD files (no structure renaming)
|
||||
if group['is_dvd'] and file_type is 'movie':
|
||||
found = False
|
||||
@@ -529,26 +523,18 @@ class Renamer(Plugin):
|
||||
|
||||
# Mark media for dashboard
|
||||
if mark_as_recent:
|
||||
fireEvent('media.tag', group['media'].get('_id'), 'recent', update_edited = True, single = True)
|
||||
fireEvent('media.tag', group['media'].get('_id'), 'recent', single = True)
|
||||
|
||||
# Remove leftover files
|
||||
if not remove_leftovers: # Don't remove anything
|
||||
continue
|
||||
break
|
||||
|
||||
log.debug('Removing leftover files')
|
||||
for current_file in group['files']['leftover']:
|
||||
if self.conf('cleanup') and not self.conf('move_leftover') and \
|
||||
(not keep_original or self.fileIsAdded(current_file, group)):
|
||||
(not self.downloadIsTorrent(release_download) or self.fileIsAdded(current_file, group)):
|
||||
remove_files.append(current_file)
|
||||
|
||||
if self.conf('check_space'):
|
||||
total_space, available_space = getFreeSpace(destination)
|
||||
renaming_size = getSize(rename_files.keys())
|
||||
if renaming_size > available_space:
|
||||
log.error('Not enough space left, need %s MB but only %s MB available', (renaming_size, available_space))
|
||||
self.tagRelease(group = group, tag = 'not_enough_space')
|
||||
continue
|
||||
|
||||
# Remove files
|
||||
delete_folders = []
|
||||
for src in remove_files:
|
||||
@@ -564,9 +550,9 @@ class Renamer(Plugin):
|
||||
os.remove(src)
|
||||
|
||||
parent_dir = os.path.dirname(src)
|
||||
if parent_dir not in delete_folders and os.path.isdir(parent_dir) and \
|
||||
if delete_folders.count(parent_dir) == 0 and os.path.isdir(parent_dir) and \
|
||||
not isSubFolder(destination, parent_dir) and not isSubFolder(media_folder, parent_dir) and \
|
||||
isSubFolder(parent_dir, base_folder):
|
||||
not isSubFolder(parent_dir, base_folder):
|
||||
|
||||
delete_folders.append(parent_dir)
|
||||
|
||||
@@ -575,7 +561,6 @@ class Renamer(Plugin):
|
||||
self.tagRelease(group = group, tag = 'failed_remove')
|
||||
|
||||
# Delete leftover folder from older releases
|
||||
delete_folders = sorted(delete_folders, key = len, reverse = True)
|
||||
for delete_folder in delete_folders:
|
||||
try:
|
||||
self.deleteEmptyFolder(delete_folder, show_error = False)
|
||||
@@ -588,16 +573,13 @@ class Renamer(Plugin):
|
||||
for src in rename_files:
|
||||
if rename_files[src]:
|
||||
dst = rename_files[src]
|
||||
|
||||
if dst in group['renamed_files']:
|
||||
log.error('File "%s" already renamed once, adding random string at the end to prevent data loss', dst)
|
||||
dst = '%s.random-%s' % (dst, randomString())
|
||||
log.info('Renaming "%s" to "%s"', (src, dst))
|
||||
|
||||
# Create dir
|
||||
self.makeDir(os.path.dirname(dst))
|
||||
|
||||
try:
|
||||
self.moveFile(src, dst, use_default = not is_torrent or self.fileIsAdded(src, group))
|
||||
self.moveFile(src, dst, forcemove = not self.downloadIsTorrent(release_download) or self.fileIsAdded(src, group))
|
||||
group['renamed_files'].append(dst)
|
||||
except:
|
||||
log.error('Failed renaming the file "%s" : %s', (os.path.basename(src), traceback.format_exc()))
|
||||
@@ -613,7 +595,7 @@ class Renamer(Plugin):
|
||||
self.untagRelease(group = group, tag = 'failed_rename')
|
||||
|
||||
# Tag folder if it is in the 'from' folder and it will not be removed because it is a torrent
|
||||
if self.movieInFromFolder(media_folder) and keep_original:
|
||||
if self.movieInFromFolder(media_folder) and self.downloadIsTorrent(release_download):
|
||||
self.tagRelease(group = group, tag = 'renamed_already')
|
||||
|
||||
# Remove matching releases
|
||||
@@ -624,7 +606,7 @@ class Renamer(Plugin):
|
||||
except:
|
||||
log.error('Failed removing %s: %s', (release, traceback.format_exc()))
|
||||
|
||||
if group['dirname'] and group['parentdir'] and not keep_original:
|
||||
if group['dirname'] and group['parentdir'] and not self.downloadIsTorrent(release_download):
|
||||
if media_folder:
|
||||
# Delete the movie folder
|
||||
group_folder = media_folder
|
||||
@@ -633,9 +615,8 @@ class Renamer(Plugin):
|
||||
group_folder = sp(os.path.join(base_folder, os.path.relpath(group['parentdir'], base_folder).split(os.path.sep)[0]))
|
||||
|
||||
try:
|
||||
if self.conf('cleanup') or self.conf('move_leftover'):
|
||||
log.info('Deleting folder: %s', group_folder)
|
||||
self.deleteEmptyFolder(group_folder)
|
||||
log.info('Deleting folder: %s', group_folder)
|
||||
self.deleteEmptyFolder(group_folder)
|
||||
except:
|
||||
log.error('Failed removing %s: %s', (group_folder, traceback.format_exc()))
|
||||
|
||||
@@ -787,49 +768,33 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
|
||||
return False
|
||||
|
||||
def moveFile(self, old, dest, use_default = False):
|
||||
def moveFile(self, old, dest, forcemove = False):
|
||||
dest = sp(dest)
|
||||
try:
|
||||
|
||||
if os.path.exists(dest):
|
||||
raise Exception('Destination "%s" already exists' % dest)
|
||||
|
||||
move_type = self.conf('file_action')
|
||||
if use_default:
|
||||
move_type = self.conf('default_file_action')
|
||||
|
||||
if move_type not in ['copy', 'link']:
|
||||
if forcemove or self.conf('file_action') not in ['copy', 'link']:
|
||||
try:
|
||||
log.info('Moving "%s" to "%s"', (old, dest))
|
||||
shutil.move(old, dest)
|
||||
except:
|
||||
exists = os.path.exists(dest)
|
||||
if exists and os.path.getsize(old) == os.path.getsize(dest):
|
||||
if os.path.exists(dest):
|
||||
log.error('Successfully moved file "%s", but something went wrong: %s', (dest, traceback.format_exc()))
|
||||
os.unlink(old)
|
||||
else:
|
||||
# remove faultly copied file
|
||||
if exists:
|
||||
os.unlink(dest)
|
||||
raise
|
||||
elif move_type == 'copy':
|
||||
log.info('Copying "%s" to "%s"', (old, dest))
|
||||
elif self.conf('file_action') == 'copy':
|
||||
shutil.copy(old, dest)
|
||||
else:
|
||||
log.info('Linking "%s" to "%s"', (old, dest))
|
||||
elif self.conf('file_action') == 'link':
|
||||
# First try to hardlink
|
||||
try:
|
||||
log.debug('Hardlinking file "%s" to "%s"...', (old, dest))
|
||||
link(old, dest)
|
||||
except:
|
||||
# Try to simlink next
|
||||
log.debug('Couldn\'t hardlink file "%s" to "%s". Symlinking instead. Error: %s.', (old, dest, traceback.format_exc()))
|
||||
log.debug('Couldn\'t hardlink file "%s" to "%s". Simlinking instead. Error: %s.', (old, dest, traceback.format_exc()))
|
||||
shutil.copy(old, dest)
|
||||
try:
|
||||
old_link = '%s.link' % sp(old)
|
||||
symlink(dest, old_link)
|
||||
symlink(dest, old + '.link')
|
||||
os.unlink(old)
|
||||
os.rename(old_link, old)
|
||||
os.rename(old + '.link', old)
|
||||
except:
|
||||
log.error('Couldn\'t symlink file "%s" to "%s". Copied instead. Error: %s. ', (old, dest, traceback.format_exc()))
|
||||
|
||||
@@ -838,7 +803,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
if os.name == 'nt' and self.conf('ntfs_permission'):
|
||||
os.popen('icacls "' + dest + '"* /reset /T')
|
||||
except:
|
||||
log.debug('Failed setting permissions for file: %s, %s', (dest, traceback.format_exc(1)))
|
||||
log.error('Failed setting permissions for file: %s, %s', (dest, traceback.format_exc(1)))
|
||||
except:
|
||||
log.error('Couldn\'t move file "%s" to "%s": %s', (old, dest, traceback.format_exc()))
|
||||
raise
|
||||
@@ -872,7 +837,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
replaced = re.sub(r"[\x00:\*\?\"<>\|]", '', replaced)
|
||||
|
||||
sep = self.conf('foldersep') if folder else self.conf('separator')
|
||||
return ss(replaced.replace(' ', ' ' if not sep else sep))
|
||||
return replaced.replace(' ', ' ' if not sep else sep)
|
||||
|
||||
def replaceDoubles(self, string):
|
||||
|
||||
@@ -885,8 +850,6 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
reg, replace_with = r
|
||||
string = re.sub(reg, replace_with, string)
|
||||
|
||||
string = string.rstrip(',_-/\\ ')
|
||||
|
||||
return string
|
||||
|
||||
def checkSnatched(self, fire_scan = True):
|
||||
@@ -1126,9 +1089,6 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
return False
|
||||
return src in group['before_rename']
|
||||
|
||||
def moveTypeIsLinked(self):
|
||||
return self.conf('default_file_action') in ['copy', 'link']
|
||||
|
||||
def statusInfoComplete(self, release_download):
|
||||
return release_download.get('id') and release_download.get('downloader') and release_download.get('folder')
|
||||
|
||||
@@ -1180,20 +1140,14 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
|
||||
log.info('Archive %s found. Extracting...', os.path.basename(archive['file']))
|
||||
try:
|
||||
rar_handle = RarFile(archive['file'], custom_path = self.conf('unrar_path'))
|
||||
rar_handle = RarFile(archive['file'])
|
||||
extr_path = os.path.join(from_folder, os.path.relpath(os.path.dirname(archive['file']), folder))
|
||||
self.makeDir(extr_path)
|
||||
for packedinfo in rar_handle.infolist():
|
||||
extr_file_path = sp(os.path.join(extr_path, os.path.basename(packedinfo.filename)))
|
||||
if not packedinfo.isdir and not os.path.isfile(extr_file_path):
|
||||
if not packedinfo.isdir and not os.path.isfile(sp(os.path.join(extr_path, os.path.basename(packedinfo.filename)))):
|
||||
log.debug('Extracting %s...', packedinfo.filename)
|
||||
rar_handle.extract(condition = [packedinfo.index], path = extr_path, withSubpath = False, overwrite = False)
|
||||
if self.conf('unrar_modify_date'):
|
||||
try:
|
||||
os.utime(extr_file_path, (os.path.getatime(archive['file']), os.path.getmtime(archive['file'])))
|
||||
except:
|
||||
log.error('Rar modify date enabled, but failed: %s', traceback.format_exc())
|
||||
extr_files.append(extr_file_path)
|
||||
extr_files.append(sp(os.path.join(extr_path, os.path.basename(packedinfo.filename))))
|
||||
del rar_handle
|
||||
except Exception as e:
|
||||
log.error('Failed to extract %s: %s %s', (archive['file'], e, traceback.format_exc()))
|
||||
@@ -1220,7 +1174,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
|
||||
except Exception as e:
|
||||
log.error('Failed moving left over file %s to %s: %s %s', (leftoverfile, move_to, e, traceback.format_exc()))
|
||||
# As we probably tried to overwrite the nfo file, check if it exists and then remove the original
|
||||
if os.path.isfile(move_to) and os.path.getsize(leftoverfile) == os.path.getsize(move_to):
|
||||
if os.path.isfile(move_to):
|
||||
if cleanup:
|
||||
log.info('Deleting left over file %s instead...', leftoverfile)
|
||||
os.unlink(leftoverfile)
|
||||
@@ -1328,18 +1282,6 @@ config = [{
|
||||
'description': 'Extract rar files if found.',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'advanced': True,
|
||||
'name': 'unrar_path',
|
||||
'description': 'Custom path to unrar bin',
|
||||
},
|
||||
{
|
||||
'advanced': True,
|
||||
'name': 'unrar_modify_date',
|
||||
'type': 'bool',
|
||||
'description': ('Set modify date of unrar-ed files to the rar-file\'s date.', 'This will allow XBMC to recognize extracted files as recently added even if the movie was released some time ago.'),
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'cleanup',
|
||||
'type': 'bool',
|
||||
@@ -1390,31 +1332,14 @@ config = [{
|
||||
'label': 'Folder-Separator',
|
||||
'description': ('Replace all the spaces with a character.', 'Example: ".", "-" (without quotes). Leave empty to use spaces.'),
|
||||
},
|
||||
{
|
||||
'name': 'check_space',
|
||||
'label': 'Check space',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': ('Check if there\'s enough available space to rename the files', 'Disable when the filesystem doesn\'t return the proper value'),
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'default_file_action',
|
||||
'label': 'Default File Action',
|
||||
'default': 'move',
|
||||
'type': 'dropdown',
|
||||
'values': [('Link', 'link'), ('Copy', 'copy'), ('Move', 'move')],
|
||||
'description': ('<strong>Link</strong>, <strong>Copy</strong> or <strong>Move</strong> after download completed.',
|
||||
'Link first tries <a href="http://en.wikipedia.org/wiki/Hard_link">hard link</a>, then <a href="http://en.wikipedia.org/wiki/Sym_link">sym link</a> and falls back to Copy.'),
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'file_action',
|
||||
'label': 'Torrent File Action',
|
||||
'default': 'link',
|
||||
'type': 'dropdown',
|
||||
'values': [('Link', 'link'), ('Copy', 'copy'), ('Move', 'move')],
|
||||
'description': 'See above. It is prefered to use link when downloading torrents as it will save you space, while still beeing able to seed.',
|
||||
'description': ('<strong>Link</strong>, <strong>Copy</strong> or <strong>Move</strong> after download completed.',
|
||||
'Link first tries <a href="http://en.wikipedia.org/wiki/Hard_link">hard link</a>, then <a href="http://en.wikipedia.org/wiki/Sym_link">sym link</a> and falls back to Copy. It is perfered to use link when downloading torrents as it will save you space, while still beeing able to seed.'),
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,6 +11,7 @@ from couchpotato.core.helpers.variable import getExt, getImdb, tryInt, \
|
||||
splitString, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from enzyme.exceptions import NoParserError, ParseError
|
||||
from guessit import guess_movie_info
|
||||
from subliminal.videos import Video
|
||||
import enzyme
|
||||
@@ -120,7 +121,7 @@ class Scanner(Plugin):
|
||||
'()([ab])(\.....?)$' #*a.mkv
|
||||
]
|
||||
|
||||
cp_imdb = '\.cp\((?P<id>tt[0-9]+),?\s?(?P<random>[A-Za-z0-9]+)?\)'
|
||||
cp_imdb = '(.cp.(?P<id>tt[0-9{7}]+).)'
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -456,7 +457,6 @@ class Scanner(Plugin):
|
||||
meta = self.getMeta(cur_file)
|
||||
|
||||
try:
|
||||
data['titles'] = meta.get('titles', [])
|
||||
data['video'] = meta.get('video', self.getCodec(cur_file, self.codecs['video']))
|
||||
data['audio'] = meta.get('audio', self.getCodec(cur_file, self.codecs['audio']))
|
||||
data['audio_channels'] = meta.get('audio_channels', 2.0)
|
||||
@@ -492,7 +492,7 @@ class Scanner(Plugin):
|
||||
|
||||
data['quality_type'] = 'HD' if data.get('resolution_width', 0) >= 1280 or data['quality'].get('hd') else 'SD'
|
||||
|
||||
filename = re.sub(self.cp_imdb, '', files[0])
|
||||
filename = re.sub('(.cp\(tt[0-9{7}]+\))', '', files[0])
|
||||
data['group'] = self.getGroup(filename[len(folder):])
|
||||
data['source'] = self.getSourceMedia(filename)
|
||||
if data['quality'].get('is_3d', 0):
|
||||
@@ -527,33 +527,16 @@ class Scanner(Plugin):
|
||||
try: ac = self.audio_codec_map.get(p.audio[0].codec)
|
||||
except: pass
|
||||
|
||||
# Find title in video headers
|
||||
titles = []
|
||||
|
||||
try:
|
||||
if p.title and self.findYear(p.title):
|
||||
titles.append(ss(p.title))
|
||||
except:
|
||||
log.error('Failed getting title from meta: %s', traceback.format_exc())
|
||||
|
||||
for video in p.video:
|
||||
try:
|
||||
if video.title and self.findYear(video.title):
|
||||
titles.append(ss(video.title))
|
||||
except:
|
||||
log.error('Failed getting title from meta: %s', traceback.format_exc())
|
||||
|
||||
return {
|
||||
'titles': list(set(titles)),
|
||||
'video': vc,
|
||||
'audio': ac,
|
||||
'resolution_width': tryInt(p.video[0].width),
|
||||
'resolution_height': tryInt(p.video[0].height),
|
||||
'audio_channels': p.audio[0].channels,
|
||||
}
|
||||
except enzyme.exceptions.ParseError:
|
||||
except ParseError:
|
||||
log.debug('Failed to parse meta for %s', filename)
|
||||
except enzyme.exceptions.NoParserError:
|
||||
except NoParserError:
|
||||
log.debug('No parser found for %s', filename)
|
||||
except:
|
||||
log.debug('Failed parsing %s', filename)
|
||||
@@ -570,7 +553,7 @@ class Scanner(Plugin):
|
||||
scan_result = []
|
||||
for p in paths:
|
||||
if not group['is_dvd']:
|
||||
video = Video.from_path(sp(p))
|
||||
video = Video.from_path(toUnicode(p))
|
||||
video_result = [(video, video.scan())]
|
||||
scan_result.extend(video_result)
|
||||
|
||||
@@ -694,7 +677,7 @@ class Scanner(Plugin):
|
||||
|
||||
def removeCPTag(self, name):
|
||||
try:
|
||||
return re.sub(self.cp_imdb, '', name).strip()
|
||||
return re.sub(self.cp_imdb, '', name)
|
||||
except:
|
||||
pass
|
||||
return name
|
||||
|
||||
@@ -33,43 +33,33 @@ name_scores = [
|
||||
def nameScore(name, year, preferred_words):
|
||||
""" Calculate score for words in the NZB name """
|
||||
|
||||
try:
|
||||
score = 0
|
||||
name = name.lower()
|
||||
score = 0
|
||||
name = name.lower()
|
||||
|
||||
# give points for the cool stuff
|
||||
for value in name_scores:
|
||||
v = value.split(':')
|
||||
add = int(v.pop())
|
||||
if v.pop() in name:
|
||||
score += add
|
||||
# give points for the cool stuff
|
||||
for value in name_scores:
|
||||
v = value.split(':')
|
||||
add = int(v.pop())
|
||||
if v.pop() in name:
|
||||
score += add
|
||||
|
||||
# points if the year is correct
|
||||
if str(year) in name:
|
||||
score += 5
|
||||
# points if the year is correct
|
||||
if str(year) in name:
|
||||
score += 5
|
||||
|
||||
# Contains preferred word
|
||||
nzb_words = re.split('\W+', simplifyString(name))
|
||||
score += 100 * len(list(set(nzb_words) & set(preferred_words)))
|
||||
# Contains preferred word
|
||||
nzb_words = re.split('\W+', simplifyString(name))
|
||||
score += 100 * len(list(set(nzb_words) & set(preferred_words)))
|
||||
|
||||
return score
|
||||
except:
|
||||
log.error('Failed doing nameScore: %s', traceback.format_exc())
|
||||
|
||||
return 0
|
||||
return score
|
||||
|
||||
|
||||
def nameRatioScore(nzb_name, movie_name):
|
||||
try:
|
||||
nzb_words = re.split('\W+', fireEvent('scanner.create_file_identifier', nzb_name, single = True))
|
||||
movie_words = re.split('\W+', simplifyString(movie_name))
|
||||
nzb_words = re.split('\W+', fireEvent('scanner.create_file_identifier', nzb_name, single = True))
|
||||
movie_words = re.split('\W+', simplifyString(movie_name))
|
||||
|
||||
left_over = set(nzb_words) - set(movie_words)
|
||||
return 10 - len(left_over)
|
||||
except:
|
||||
log.error('Failed doing nameRatioScore: %s', traceback.format_exc())
|
||||
|
||||
return 0
|
||||
left_over = set(nzb_words) - set(movie_words)
|
||||
return 10 - len(left_over)
|
||||
|
||||
|
||||
def namePositionScore(nzb_name, movie_name):
|
||||
@@ -144,53 +134,38 @@ def providerScore(provider):
|
||||
|
||||
def duplicateScore(nzb_name, movie_name):
|
||||
|
||||
try:
|
||||
nzb_words = re.split('\W+', simplifyString(nzb_name))
|
||||
movie_words = re.split('\W+', simplifyString(movie_name))
|
||||
nzb_words = re.split('\W+', simplifyString(nzb_name))
|
||||
movie_words = re.split('\W+', simplifyString(movie_name))
|
||||
|
||||
# minus for duplicates
|
||||
duplicates = [x for i, x in enumerate(nzb_words) if nzb_words[i:].count(x) > 1]
|
||||
# minus for duplicates
|
||||
duplicates = [x for i, x in enumerate(nzb_words) if nzb_words[i:].count(x) > 1]
|
||||
|
||||
return len(list(set(duplicates) - set(movie_words))) * -4
|
||||
except:
|
||||
log.error('Failed doing duplicateScore: %s', traceback.format_exc())
|
||||
|
||||
return 0
|
||||
return len(list(set(duplicates) - set(movie_words))) * -4
|
||||
|
||||
|
||||
def partialIgnoredScore(nzb_name, movie_name, ignored_words):
|
||||
|
||||
try:
|
||||
nzb_name = nzb_name.lower()
|
||||
movie_name = movie_name.lower()
|
||||
nzb_name = nzb_name.lower()
|
||||
movie_name = movie_name.lower()
|
||||
|
||||
score = 0
|
||||
for ignored_word in ignored_words:
|
||||
if ignored_word in nzb_name and ignored_word not in movie_name:
|
||||
score -= 5
|
||||
score = 0
|
||||
for ignored_word in ignored_words:
|
||||
if ignored_word in nzb_name and ignored_word not in movie_name:
|
||||
score -= 5
|
||||
|
||||
return score
|
||||
except:
|
||||
log.error('Failed doing partialIgnoredScore: %s', traceback.format_exc())
|
||||
|
||||
return 0
|
||||
return score
|
||||
|
||||
|
||||
def halfMultipartScore(nzb_name):
|
||||
|
||||
try:
|
||||
wrong_found = 0
|
||||
for nr in [1, 2, 3, 4, 5, 'i', 'ii', 'iii', 'iv', 'v', 'a', 'b', 'c', 'd', 'e']:
|
||||
for wrong in ['cd', 'part', 'dis', 'disc', 'dvd']:
|
||||
if '%s%s' % (wrong, nr) in nzb_name.lower():
|
||||
wrong_found += 1
|
||||
wrong_found = 0
|
||||
for nr in [1, 2, 3, 4, 5, 'i', 'ii', 'iii', 'iv', 'v', 'a', 'b', 'c', 'd', 'e']:
|
||||
for wrong in ['cd', 'part', 'dis', 'disc', 'dvd']:
|
||||
if '%s%s' % (wrong, nr) in nzb_name.lower():
|
||||
wrong_found += 1
|
||||
|
||||
if wrong_found == 1:
|
||||
return -30
|
||||
|
||||
return 0
|
||||
except:
|
||||
log.error('Failed doing halfMultipartScore: %s', traceback.format_exc())
|
||||
if wrong_found == 1:
|
||||
return -30
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import ConfigParser
|
||||
from hashlib import md5
|
||||
|
||||
from CodernityDB.hash_index import HashIndex
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.database import HashIndex
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from six.moves import configparser
|
||||
from couchpotato.core.helpers.variable import mergeDicts, tryInt, tryFloat
|
||||
import six
|
||||
|
||||
|
||||
class Settings(object):
|
||||
@@ -64,7 +62,7 @@ class Settings(object):
|
||||
def setFile(self, config_file):
|
||||
self.file = config_file
|
||||
|
||||
self.p = configparser.RawConfigParser()
|
||||
self.p = ConfigParser.RawConfigParser()
|
||||
self.p.read(config_file)
|
||||
|
||||
from couchpotato.core.logger import CPLog
|
||||
@@ -150,10 +148,7 @@ class Settings(object):
|
||||
return tryFloat(self.p.get(section, option))
|
||||
|
||||
def getUnicode(self, section, option):
|
||||
value = self.p.get(section, option)
|
||||
if six.PY2:
|
||||
value = value.decode('unicode_escape')
|
||||
|
||||
value = self.p.get(section, option).decode('unicode_escape')
|
||||
return toUnicode(value).strip()
|
||||
|
||||
def getValues(self):
|
||||
@@ -166,7 +161,7 @@ class Settings(object):
|
||||
return values
|
||||
|
||||
def save(self):
|
||||
with open(self.file, 'w') as configfile:
|
||||
with open(self.file, 'wb') as configfile:
|
||||
self.p.write(configfile)
|
||||
|
||||
self.log.debug('Saved settings')
|
||||
|
||||
@@ -14,7 +14,6 @@ class Env(object):
|
||||
''' Environment variables '''
|
||||
_app = None
|
||||
_encoding = 'UTF-8'
|
||||
_fs_encoding = 'UTF-8'
|
||||
_debug = False
|
||||
_dev = False
|
||||
_settings = Settings()
|
||||
|
||||
@@ -9,18 +9,16 @@ import traceback
|
||||
import warnings
|
||||
import re
|
||||
import tarfile
|
||||
import shutil
|
||||
|
||||
from CodernityDB.database_super_thread_safe import SuperThreadSafeDatabase
|
||||
from argparse import ArgumentParser
|
||||
from cache import FileSystemCache
|
||||
from couchpotato import KeyHandler, LoginHandler, LogoutHandler
|
||||
from couchpotato.api import NonBlockHandler, ApiHandler
|
||||
from couchpotato.core.event import fireEventAsync, fireEvent
|
||||
from couchpotato.core.helpers.database import SuperThreadSafeDatabase
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import getDataDir, tryInt, getFreeSpace
|
||||
import requests
|
||||
from requests.packages.urllib3 import disable_warnings
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.web import Application, StaticFileHandler, RedirectHandler
|
||||
|
||||
@@ -86,17 +84,9 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
|
||||
encoding = 'UTF-8'
|
||||
|
||||
Env.set('encoding', encoding)
|
||||
Env.set('fs_encoding', sys.getfilesystemencoding())
|
||||
|
||||
# Do db stuff
|
||||
db_path = sp(os.path.join(data_dir, 'database'))
|
||||
old_db_path = os.path.join(data_dir, 'couchpotato.db')
|
||||
|
||||
# Remove database folder if both exists
|
||||
if os.path.isdir(db_path) and os.path.isfile(old_db_path):
|
||||
db = SuperThreadSafeDatabase(db_path)
|
||||
db.open()
|
||||
db.destroy()
|
||||
|
||||
# Check if database exists
|
||||
db = SuperThreadSafeDatabase(db_path)
|
||||
@@ -110,19 +100,14 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
|
||||
if not os.path.isdir(backup_path): os.makedirs(backup_path)
|
||||
|
||||
for root, dirs, files in os.walk(backup_path):
|
||||
# Only consider files being a direct child of the backup_path
|
||||
if root == backup_path:
|
||||
for backup_file in sorted(files):
|
||||
ints = re.findall('\d+', backup_file)
|
||||
for backup_file in sorted(files):
|
||||
ints = re.findall('\d+', backup_file)
|
||||
|
||||
# Delete non zip files
|
||||
if len(ints) != 1:
|
||||
os.remove(os.path.join(root, backup_file))
|
||||
else:
|
||||
existing_backups.append((int(ints[0]), backup_file))
|
||||
else:
|
||||
# Delete stray directories.
|
||||
shutil.rmtree(root)
|
||||
# Delete non zip files
|
||||
if len(ints) != 1:
|
||||
os.remove(os.path.join(backup_path, backup_file))
|
||||
else:
|
||||
existing_backups.append((int(ints[0]), backup_file))
|
||||
|
||||
# Remove all but the last 5
|
||||
for eb in existing_backups[:-backup_count]:
|
||||
@@ -152,15 +137,12 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
|
||||
if not os.path.exists(python_cache):
|
||||
os.mkdir(python_cache)
|
||||
|
||||
session = requests.Session()
|
||||
session.max_redirects = 5
|
||||
|
||||
# Register environment settings
|
||||
Env.set('app_dir', sp(base_path))
|
||||
Env.set('data_dir', sp(data_dir))
|
||||
Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log')))
|
||||
Env.set('db', db)
|
||||
Env.set('http_opener', session)
|
||||
Env.set('http_opener', requests.Session())
|
||||
Env.set('cache_dir', cache_dir)
|
||||
Env.set('cache', FileSystemCache(python_cache))
|
||||
Env.set('console_log', options.console_log)
|
||||
@@ -185,9 +167,6 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
|
||||
for logger_name in ['gntp']:
|
||||
logging.getLogger(logger_name).setLevel(logging.WARNING)
|
||||
|
||||
# Disable SSL warning
|
||||
disable_warnings()
|
||||
|
||||
# Use reloader
|
||||
reloader = debug is True and development and not Env.get('desktop') and not options.daemon
|
||||
|
||||
@@ -205,7 +184,7 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
|
||||
logger.addHandler(hdlr)
|
||||
|
||||
# To file
|
||||
hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10, encoding = 'utf-8')
|
||||
hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10, encoding = Env.get('encoding'))
|
||||
hdlr2.setFormatter(formatter)
|
||||
logger.addHandler(hdlr2)
|
||||
|
||||
|
||||
@@ -117,7 +117,7 @@ var AboutSettingTab = new Class({
|
||||
var self = this;
|
||||
var date = new Date(json.version.date * 1000);
|
||||
self.version_text.set('text', json.version.hash + (json.version.date ? ' ('+date.toLocaleString()+')' : ''));
|
||||
self.updater_type.set('text', json.version.type + ', ' + json.branch);
|
||||
self.updater_type.set('text', (json.version.type != json.branch) ? (json.version.type + ', ' + json.branch) : json.branch);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
@@ -146,13 +146,13 @@ Page.Home = new Class({
|
||||
var self = this;
|
||||
|
||||
// Suggest
|
||||
self.suggestions_list = new SuggestList({
|
||||
'onCreated': function(){
|
||||
self.suggestion_list = new SuggestList({
|
||||
'onLoaded': function(){
|
||||
self.chain.callChain();
|
||||
}
|
||||
});
|
||||
|
||||
$(self.suggestions_list).inject(self.el);
|
||||
$(self.suggestion_list).inject(self.el);
|
||||
|
||||
},
|
||||
|
||||
@@ -160,38 +160,46 @@ Page.Home = new Class({
|
||||
var self = this;
|
||||
|
||||
// Charts
|
||||
self.charts_list = new Charts({
|
||||
self.charts = new Charts({
|
||||
'onCreated': function(){
|
||||
self.chain.callChain();
|
||||
}
|
||||
});
|
||||
|
||||
$(self.charts_list).inject(self.el);
|
||||
$(self.charts).inject(self.el);
|
||||
|
||||
},
|
||||
|
||||
createSuggestionsChartsMenu: function(){
|
||||
var self = this,
|
||||
suggestion_tab, charts_tab;
|
||||
var self = this;
|
||||
|
||||
self.el_toggle_menu = new Element('div.toggle_menu', {
|
||||
'events': {
|
||||
'click:relay(a)': function(e, el) {
|
||||
e.preventDefault();
|
||||
self.toggleSuggestionsCharts(el.get('data-container'), el);
|
||||
}
|
||||
}
|
||||
}).adopt(
|
||||
suggestion_tab = new Element('a.toggle_suggestions', {
|
||||
'data-container': 'suggestions'
|
||||
}).grab(new Element('h2', {'text': 'Suggestions'})),
|
||||
charts_tab = new Element('a.toggle_charts', {
|
||||
'data-container': 'charts'
|
||||
}).grab( new Element('h2', {'text': 'Charts'}))
|
||||
);
|
||||
self.el_toggle_menu_suggestions = new Element('a.toggle_suggestions.active', {
|
||||
'href': '#',
|
||||
'events': { 'click': function(e) {
|
||||
e.preventDefault();
|
||||
self.toggleSuggestionsCharts('suggestions');
|
||||
}
|
||||
}
|
||||
}).grab( new Element('h2', {'text': 'Suggestions'}));
|
||||
|
||||
var menu_selected = Cookie.read('suggestions_charts_menu_selected') || 'suggestions';
|
||||
self.toggleSuggestionsCharts(menu_selected, menu_selected == 'suggestions' ? suggestion_tab : charts_tab);
|
||||
self.el_toggle_menu_charts = new Element('a.toggle_charts', {
|
||||
'href': '#',
|
||||
'events': { 'click': function(e) {
|
||||
e.preventDefault();
|
||||
self.toggleSuggestionsCharts('charts');
|
||||
}
|
||||
}
|
||||
}).grab( new Element('h2', {'text': 'Charts'}));
|
||||
|
||||
self.el_toggle_menu = new Element('div.toggle_menu').grab(
|
||||
self.el_toggle_menu_suggestions
|
||||
).grab(
|
||||
self.el_toggle_menu_charts
|
||||
);
|
||||
|
||||
var menu_selected = Cookie.read('suggestions_charts_menu_selected');
|
||||
if( menu_selected === null ) menu_selected = 'suggestions';
|
||||
self.toggleSuggestionsCharts( menu_selected );
|
||||
|
||||
self.el_toggle_menu.inject(self.el);
|
||||
|
||||
@@ -199,19 +207,23 @@ Page.Home = new Class({
|
||||
|
||||
},
|
||||
|
||||
toggleSuggestionsCharts: function(menu_id, el){
|
||||
toggleSuggestionsCharts: function(menu_id){
|
||||
var self = this;
|
||||
|
||||
// Toggle ta
|
||||
self.el_toggle_menu.getElements('.active').removeClass('active');
|
||||
if(el) el.addClass('active');
|
||||
|
||||
// Hide both
|
||||
if(self.suggestions_list) self.suggestions_list.hide();
|
||||
if(self.charts_list) self.charts_list.hide();
|
||||
|
||||
var toggle_to = self[menu_id + '_list'];
|
||||
if(toggle_to) toggle_to.show();
|
||||
switch(menu_id) {
|
||||
case 'suggestions':
|
||||
if($(self.suggestion_list)) $(self.suggestion_list).show();
|
||||
self.el_toggle_menu_suggestions.addClass('active');
|
||||
if($(self.charts)) $(self.charts).hide();
|
||||
self.el_toggle_menu_charts.removeClass('active');
|
||||
break;
|
||||
case 'charts':
|
||||
if($(self.charts)) $(self.charts).show();
|
||||
self.el_toggle_menu_charts.addClass('active');
|
||||
if($(self.suggestion_list)) $(self.suggestion_list).hide();
|
||||
self.el_toggle_menu_suggestions.removeClass('active');
|
||||
break;
|
||||
}
|
||||
|
||||
Cookie.write('suggestions_charts_menu_selected', menu_id, {'duration': 365});
|
||||
},
|
||||
|
||||
@@ -560,19 +560,11 @@ Option.Password = new Class({
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.el.adopt(
|
||||
self.createLabel(),
|
||||
self.input = new Element('input.inlay', {
|
||||
'type': 'text',
|
||||
'name': self.postName(),
|
||||
'value': self.getSettingValue() ? '********' : '',
|
||||
'placeholder': self.getPlaceholder()
|
||||
})
|
||||
);
|
||||
self.parent();
|
||||
self.input.set('type', 'password');
|
||||
|
||||
self.input.addEvent('focus', function(){
|
||||
self.input.set('value', '');
|
||||
self.input.set('type', 'password');
|
||||
self.input.set('value', '')
|
||||
})
|
||||
|
||||
}
|
||||
@@ -642,7 +634,6 @@ Option.Directory = new Class({
|
||||
browser: null,
|
||||
save_on_change: false,
|
||||
use_cache: false,
|
||||
current_dir: '',
|
||||
|
||||
create: function(){
|
||||
var self = this;
|
||||
@@ -654,17 +645,8 @@ Option.Directory = new Class({
|
||||
'click': self.showBrowser.bind(self)
|
||||
}
|
||||
}).adopt(
|
||||
self.input = new Element('input', {
|
||||
'value': self.getSettingValue(),
|
||||
'events': {
|
||||
'change': self.filterDirectory.bind(self),
|
||||
'keydown': function(e){
|
||||
if(e.key == 'enter' || e.key == 'tab')
|
||||
(e).stop();
|
||||
},
|
||||
'keyup': self.filterDirectory.bind(self),
|
||||
'paste': self.filterDirectory.bind(self)
|
||||
}
|
||||
self.input = new Element('span', {
|
||||
'text': self.getSettingValue()
|
||||
})
|
||||
)
|
||||
);
|
||||
@@ -672,55 +654,10 @@ Option.Directory = new Class({
|
||||
self.cached = {};
|
||||
},
|
||||
|
||||
filterDirectory: function(e){
|
||||
var self = this,
|
||||
value = self.getValue(),
|
||||
path_sep = Api.getOption('path_sep'),
|
||||
active_selector = 'li:not(.blur):not(.empty)';
|
||||
|
||||
if(e.key == 'enter' || e.key == 'tab'){
|
||||
(e).stop();
|
||||
|
||||
var first = self.dir_list.getElement(active_selector);
|
||||
if(first){
|
||||
self.selectDirectory(first.get('data-value'));
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
||||
// New folder
|
||||
if(value.substr(-1) == path_sep){
|
||||
if(self.current_dir != value)
|
||||
self.selectDirectory(value)
|
||||
}
|
||||
else {
|
||||
var pd = self.getParentDir(value);
|
||||
if(self.current_dir != pd)
|
||||
self.getDirs(pd);
|
||||
|
||||
var folder_filter = value.split(path_sep).getLast()
|
||||
self.dir_list.getElements('li').each(function(li){
|
||||
var valid = li.get('text').substr(0, folder_filter.length).toLowerCase() != folder_filter.toLowerCase()
|
||||
li[valid ? 'addClass' : 'removeClass']('blur')
|
||||
});
|
||||
|
||||
var first = self.dir_list.getElement(active_selector);
|
||||
if(first){
|
||||
if(!self.dir_list_scroll)
|
||||
self.dir_list_scroll = new Fx.Scroll(self.dir_list, {
|
||||
'transition': 'quint:in:out'
|
||||
});
|
||||
|
||||
self.dir_list_scroll.toElement(first);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
selectDirectory: function(dir){
|
||||
var self = this;
|
||||
|
||||
self.input.set('value', dir);
|
||||
self.input.set('text', dir);
|
||||
|
||||
self.getDirs()
|
||||
},
|
||||
@@ -731,28 +668,9 @@ Option.Directory = new Class({
|
||||
self.selectDirectory(self.getParentDir())
|
||||
},
|
||||
|
||||
caretAtEnd: function(){
|
||||
var self = this;
|
||||
|
||||
self.input.focus();
|
||||
|
||||
if (typeof self.input.selectionStart == "number") {
|
||||
self.input.selectionStart = self.input.selectionEnd = self.input.get('value').length;
|
||||
} else if (typeof el.createTextRange != "undefined") {
|
||||
self.input.focus();
|
||||
var range = self.input.createTextRange();
|
||||
range.collapse(false);
|
||||
range.select();
|
||||
}
|
||||
},
|
||||
|
||||
showBrowser: function(){
|
||||
var self = this;
|
||||
|
||||
// Move caret to back of the input
|
||||
if(!self.browser || self.browser && !self.browser.isVisible())
|
||||
self.caretAtEnd()
|
||||
|
||||
if(!self.browser){
|
||||
self.browser = new Element('div.directory_list').adopt(
|
||||
new Element('div.pointer'),
|
||||
@@ -768,9 +686,7 @@ Option.Directory = new Class({
|
||||
}).adopt(
|
||||
self.show_hidden = new Element('input[type=checkbox].inlay', {
|
||||
'events': {
|
||||
'change': function(){
|
||||
self.getDirs()
|
||||
}
|
||||
'change': self.getDirs.bind(self)
|
||||
}
|
||||
})
|
||||
)
|
||||
@@ -791,7 +707,7 @@ Option.Directory = new Class({
|
||||
'text': 'Clear',
|
||||
'events': {
|
||||
'click': function(e){
|
||||
self.input.set('value', '');
|
||||
self.input.set('text', '');
|
||||
self.hideBrowser(e, true);
|
||||
}
|
||||
}
|
||||
@@ -819,7 +735,7 @@ Option.Directory = new Class({
|
||||
new Form.Check(self.show_hidden);
|
||||
}
|
||||
|
||||
self.initial_directory = self.input.get('value');
|
||||
self.initial_directory = self.input.get('text');
|
||||
|
||||
self.getDirs();
|
||||
self.browser.show();
|
||||
@@ -833,7 +749,7 @@ Option.Directory = new Class({
|
||||
if(save)
|
||||
self.save();
|
||||
else
|
||||
self.input.set('value', self.initial_directory);
|
||||
self.input.set('text', self.initial_directory);
|
||||
|
||||
self.browser.hide();
|
||||
self.el.removeEvents('outerClick')
|
||||
@@ -841,21 +757,21 @@ Option.Directory = new Class({
|
||||
},
|
||||
|
||||
fillBrowser: function(json){
|
||||
var self = this,
|
||||
v = self.getValue();
|
||||
var self = this;
|
||||
|
||||
self.data = json;
|
||||
|
||||
var previous_dir = json.parent;
|
||||
var v = self.getValue();
|
||||
var previous_dir = self.getParentDir();
|
||||
|
||||
if(v == '')
|
||||
self.input.set('value', json.home);
|
||||
self.input.set('text', json.home);
|
||||
|
||||
if(previous_dir.length >= 1 && !json.is_root){
|
||||
if(previous_dir != v && previous_dir.length >= 1 && !json.is_root){
|
||||
|
||||
var prev_dirname = self.getCurrentDirname(previous_dir);
|
||||
if(previous_dir == json.home)
|
||||
prev_dirname = 'Home Folder';
|
||||
prev_dirname = 'Home';
|
||||
else if(previous_dir == '/' && json.platform == 'nt')
|
||||
prev_dirname = 'Computer';
|
||||
|
||||
@@ -885,13 +801,12 @@ Option.Directory = new Class({
|
||||
new Element('li.empty', {
|
||||
'text': 'Selected folder is empty'
|
||||
}).inject(self.dir_list)
|
||||
|
||||
self.caretAtEnd();
|
||||
},
|
||||
|
||||
getDirs: function(dir){
|
||||
var self = this,
|
||||
c = dir || self.getValue();
|
||||
getDirs: function(){
|
||||
var self = this;
|
||||
|
||||
var c = self.getValue();
|
||||
|
||||
if(self.cached[c] && self.use_cache){
|
||||
self.fillBrowser()
|
||||
@@ -902,10 +817,7 @@ Option.Directory = new Class({
|
||||
'path': c,
|
||||
'show_hidden': +self.show_hidden.checked
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.current_dir = c;
|
||||
self.fillBrowser(json);
|
||||
}
|
||||
'onComplete': self.fillBrowser.bind(self)
|
||||
})
|
||||
}
|
||||
},
|
||||
@@ -919,8 +831,8 @@ Option.Directory = new Class({
|
||||
var v = dir || self.getValue();
|
||||
var sep = Api.getOption('path_sep');
|
||||
var dirs = v.split(sep);
|
||||
if(dirs.pop() == '')
|
||||
dirs.pop();
|
||||
if(dirs.pop() == '')
|
||||
dirs.pop();
|
||||
|
||||
return dirs.join(sep) + sep
|
||||
},
|
||||
@@ -933,7 +845,7 @@ Option.Directory = new Class({
|
||||
|
||||
getValue: function(){
|
||||
var self = this;
|
||||
return self.input.get('value');
|
||||
return self.input.get('text');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -302,19 +302,15 @@
|
||||
font-family: 'Elusive-Icons';
|
||||
color: #f5e39c;
|
||||
}
|
||||
.page form .directory > input {
|
||||
.page form .directory > span {
|
||||
height: 25px;
|
||||
display: inline-block;
|
||||
float: right;
|
||||
text-align: right;
|
||||
white-space: nowrap;
|
||||
cursor: pointer;
|
||||
background: none;
|
||||
border: 0;
|
||||
color: #FFF;
|
||||
width: 100%;
|
||||
}
|
||||
.page form .directory input:empty:before {
|
||||
.page form .directory span:empty:before {
|
||||
content: 'No folder selected';
|
||||
font-style: italic;
|
||||
opacity: .3;
|
||||
@@ -357,11 +353,6 @@
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.page .directory_list li.blur {
|
||||
opacity: .3;
|
||||
}
|
||||
|
||||
.page .directory_list li:last-child {
|
||||
border-bottom: 1px solid rgba(255,255,255,0.1);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"/>
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
|
||||
|
||||
{% for url in fireEvent('clientscript.get_styles', as_html = True, location = 'front', single = True) %}
|
||||
<link rel="stylesheet" href="{{ Env.get('web_base') }}{{ url }}" type="text/css">{% end %}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user