Compare commits
107 Commits
py3
...
build/2.3.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d913e87c3 | ||
|
|
16f02bda27 | ||
|
|
8d108b92bf | ||
|
|
46783028b1 | ||
|
|
d08c7c57a8 | ||
|
|
eeeb845ef3 | ||
|
|
651a063f94 | ||
|
|
f20aaa2d9d | ||
|
|
ba925ec191 | ||
|
|
3b7376fd18 | ||
|
|
c31b10c798 | ||
|
|
acda664686 | ||
|
|
e2852407ea | ||
|
|
88e738c6cd | ||
|
|
eaae8bdb0b | ||
|
|
821f68909d | ||
|
|
2b8dfed475 | ||
|
|
607b5ea766 | ||
|
|
88579cd71a | ||
|
|
6c57316ce6 | ||
|
|
6702683da3 | ||
|
|
1ed58586a1 | ||
|
|
f08ccd4fd8 | ||
|
|
312562a9f5 | ||
|
|
9e260a89af | ||
|
|
d233e4d22e | ||
|
|
23893dbcb9 | ||
|
|
506871b506 | ||
|
|
6115917660 | ||
|
|
21df8819d3 | ||
|
|
fb3f3e11f6 | ||
|
|
178c8942c3 | ||
|
|
51e747049d | ||
|
|
0582f7d694 | ||
|
|
fa7cac7538 | ||
|
|
9a314cfbc4 | ||
|
|
5941d0bf77 | ||
|
|
d326c1c25c | ||
|
|
96472a9a8f | ||
|
|
27252561e2 | ||
|
|
c9e732651f | ||
|
|
7849e7170d | ||
|
|
087894eb4e | ||
|
|
25f1b8c7a7 | ||
|
|
e71da1f14d | ||
|
|
938b14ba18 | ||
|
|
d6522d8f38 | ||
|
|
78eab890e7 | ||
|
|
1a56191f83 | ||
|
|
41c0f34d95 | ||
|
|
37bf205d7a | ||
|
|
aa1fa3eb9a | ||
|
|
0e2f8a612c | ||
|
|
465e7b2abc | ||
|
|
578fb45785 | ||
|
|
96995bbbe5 | ||
|
|
4cfdafebbc | ||
|
|
b97acb8ef5 | ||
|
|
d68d2dfdb6 | ||
|
|
39b269a454 | ||
|
|
ac081d3e10 | ||
|
|
5d4efb60cf | ||
|
|
cc408b980c | ||
|
|
59590b3ac9 | ||
|
|
ff759dacf3 | ||
|
|
a328e44130 | ||
|
|
7924cac5f9 | ||
|
|
1cef3b0c93 | ||
|
|
3cd59edc8b | ||
|
|
0d624af01d | ||
|
|
a09132570c | ||
|
|
ee3fc38432 | ||
|
|
dbf0192c8e | ||
|
|
6962cfc3f5 | ||
|
|
e096ec3b5b | ||
|
|
b30a74ae0c | ||
|
|
978eeb16c9 | ||
|
|
e5c9d91657 | ||
|
|
fa81c3a07a | ||
|
|
9cdd520d41 | ||
|
|
55d7898771 | ||
|
|
b8256bef97 | ||
|
|
5be9dc0b4a | ||
|
|
7d0be0cefb | ||
|
|
f7ce1edb13 | ||
|
|
5ad9280b60 | ||
|
|
2b353f1b20 | ||
|
|
75ab90b87b | ||
|
|
0219296120 | ||
|
|
20032b3a31 | ||
|
|
ea9e9a8c90 | ||
|
|
f7b0ee145b | ||
|
|
cc866738ee | ||
|
|
eadccf6e33 | ||
|
|
b70b66e567 | ||
|
|
5b6792dc20 | ||
|
|
f498e7343a | ||
|
|
6962f441e6 | ||
|
|
1def62b1b1 | ||
|
|
a4a4a6a185 | ||
|
|
d4c9469c1a | ||
|
|
3e2d4c5d7b | ||
|
|
d03f711d69 | ||
|
|
44dd8d9b96 | ||
|
|
549a3be0d8 | ||
|
|
1bb2edf8ec | ||
|
|
84c6f36315 |
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
from logging import handlers
|
||||
from os.path import dirname
|
||||
import logging
|
||||
@@ -10,6 +9,7 @@ import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
import time
|
||||
|
||||
# Root path
|
||||
base_path = dirname(os.path.abspath(__file__))
|
||||
@@ -18,12 +18,7 @@ base_path = dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, os.path.join(base_path, 'libs'))
|
||||
|
||||
from couchpotato.environment import Env
|
||||
from couchpotato.core.helpers.variable import getDataDir, removePyc
|
||||
|
||||
|
||||
# Remove pyc files before dynamic load (sees .pyc files regular .py modules)
|
||||
removePyc(base_path)
|
||||
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
|
||||
class Loader(object):
|
||||
|
||||
@@ -33,7 +28,7 @@ class Loader(object):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
self.options = getOptions(sys.argv[1:])
|
||||
self.options = getOptions(base_path, sys.argv[1:])
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
@@ -54,14 +49,14 @@ class Loader(object):
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.makedirs(self.log_dir)
|
||||
os.mkdir(self.log_dir)
|
||||
|
||||
# Logging
|
||||
from couchpotato.core.logger import CPLog
|
||||
self.log = CPLog(__name__)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%H:%M:%S')
|
||||
hdlr = handlers.RotatingFileHandler(os.path.join(self.log_dir, 'error.log'), 'a', 500000, 10, encoding = 'utf-8')
|
||||
hdlr = handlers.RotatingFileHandler(os.path.join(self.log_dir, 'error.log'), 'a', 500000, 10)
|
||||
hdlr.setLevel(logging.CRITICAL)
|
||||
hdlr.setFormatter(formatter)
|
||||
self.log.logger.addHandler(hdlr)
|
||||
@@ -71,11 +66,10 @@ class Loader(object):
|
||||
signal.signal(signal.SIGTERM, lambda signum, stack_frame: sys.exit(1))
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
addEvent('app.do_shutdown', self.setRestart)
|
||||
addEvent('app.after_shutdown', self.afterShutdown)
|
||||
|
||||
def setRestart(self, restart):
|
||||
def afterShutdown(self, restart):
|
||||
self.do_restart = restart
|
||||
return True
|
||||
|
||||
def onExit(self, signal, frame):
|
||||
from couchpotato.core.event import fireEvent
|
||||
@@ -103,6 +97,7 @@ class Loader(object):
|
||||
|
||||
# Release log files and shutdown logger
|
||||
logging.shutdown()
|
||||
time.sleep(3)
|
||||
|
||||
args = [sys.executable] + [os.path.join(base_path, os.path.basename(__file__))] + sys.argv[1:]
|
||||
subprocess.Popen(args)
|
||||
@@ -137,15 +132,14 @@ if __name__ == '__main__':
|
||||
pass
|
||||
except SystemExit:
|
||||
raise
|
||||
except socket.error as e:
|
||||
except socket.error as (nr, msg):
|
||||
# log when socket receives SIGINT, but continue.
|
||||
# previous code would have skipped over other types of IO errors too.
|
||||
nr, msg = e
|
||||
if nr != 4:
|
||||
try:
|
||||
l.log.critical(traceback.format_exc())
|
||||
except:
|
||||
print(traceback.format_exc())
|
||||
print traceback.format_exc()
|
||||
raise
|
||||
except:
|
||||
try:
|
||||
@@ -154,7 +148,7 @@ if __name__ == '__main__':
|
||||
if l:
|
||||
l.log.critical(traceback.format_exc())
|
||||
else:
|
||||
print(traceback.format_exc())
|
||||
print traceback.format_exc()
|
||||
except:
|
||||
print(traceback.format_exc())
|
||||
print traceback.format_exc()
|
||||
raise
|
||||
|
||||
231
Desktop.py
Normal file
231
Desktop.py
Normal file
@@ -0,0 +1,231 @@
|
||||
from esky.util import appdir_from_executable #@UnresolvedImport
|
||||
from threading import Thread
|
||||
from version import VERSION
|
||||
from wx.lib.softwareupdate import SoftwareUpdate
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import webbrowser
|
||||
import wx
|
||||
|
||||
# Include proper dirs
|
||||
if hasattr(sys, 'frozen'):
|
||||
import libs
|
||||
base_path = os.path.dirname(os.path.dirname(os.path.abspath(libs.__file__)))
|
||||
else:
|
||||
base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
lib_dir = os.path.join(base_path, 'libs')
|
||||
|
||||
sys.path.insert(0, base_path)
|
||||
sys.path.insert(0, lib_dir)
|
||||
|
||||
from couchpotato.environment import Env
|
||||
|
||||
class TaskBarIcon(wx.TaskBarIcon):
|
||||
|
||||
TBMENU_OPEN = wx.NewId()
|
||||
TBMENU_SETTINGS = wx.NewId()
|
||||
TBMENU_EXIT = wx.ID_EXIT
|
||||
|
||||
closed = False
|
||||
menu = False
|
||||
enabled = False
|
||||
|
||||
def __init__(self, frame):
|
||||
wx.TaskBarIcon.__init__(self)
|
||||
self.frame = frame
|
||||
|
||||
icon = wx.Icon('icon.png', wx.BITMAP_TYPE_PNG)
|
||||
self.SetIcon(icon)
|
||||
|
||||
self.Bind(wx.EVT_TASKBAR_LEFT_UP, self.OnTaskBarClick)
|
||||
self.Bind(wx.EVT_TASKBAR_RIGHT_UP, self.OnTaskBarClick)
|
||||
|
||||
self.Bind(wx.EVT_MENU, self.onOpen, id = self.TBMENU_OPEN)
|
||||
self.Bind(wx.EVT_MENU, self.onSettings, id = self.TBMENU_SETTINGS)
|
||||
self.Bind(wx.EVT_MENU, self.onTaskBarClose, id = self.TBMENU_EXIT)
|
||||
|
||||
def OnTaskBarClick(self, evt):
|
||||
menu = self.CreatePopupMenu()
|
||||
self.PopupMenu(menu)
|
||||
menu.Destroy()
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
if self.menu:
|
||||
self.open_menu.Enable(True)
|
||||
self.setting_menu.Enable(True)
|
||||
|
||||
self.open_menu.SetText('Open')
|
||||
|
||||
def CreatePopupMenu(self):
|
||||
|
||||
if not self.menu:
|
||||
self.menu = wx.Menu()
|
||||
self.open_menu = self.menu.Append(self.TBMENU_OPEN, 'Open')
|
||||
self.setting_menu = self.menu.Append(self.TBMENU_SETTINGS, 'About')
|
||||
self.exit_menu = self.menu.Append(self.TBMENU_EXIT, 'Quit')
|
||||
|
||||
if not self.enabled:
|
||||
self.open_menu.Enable(False)
|
||||
self.setting_menu.Enable(False)
|
||||
|
||||
self.open_menu.SetText('Loading...')
|
||||
|
||||
return self.menu
|
||||
|
||||
def onOpen(self, event):
|
||||
url = self.frame.parent.getSetting('base_url')
|
||||
webbrowser.open(url)
|
||||
|
||||
def onSettings(self, event):
|
||||
url = self.frame.parent.getSetting('base_url') + 'settings/about/'
|
||||
webbrowser.open(url)
|
||||
|
||||
def onTaskBarClose(self, evt):
|
||||
if self.closed:
|
||||
return
|
||||
|
||||
self.closed = True
|
||||
|
||||
self.RemoveIcon()
|
||||
wx.CallAfter(self.frame.Close)
|
||||
|
||||
|
||||
def makeIcon(self, img):
|
||||
if "wxMSW" in wx.PlatformInfo:
|
||||
img = img.Scale(16, 16)
|
||||
elif "wxGTK" in wx.PlatformInfo:
|
||||
img = img.Scale(22, 22)
|
||||
|
||||
icon = wx.IconFromBitmap(img.CopyFromBitmap())
|
||||
return icon
|
||||
|
||||
|
||||
class MainFrame(wx.Frame):
|
||||
|
||||
def __init__(self, parent):
|
||||
wx.Frame.__init__(self, None, style = wx.FRAME_NO_TASKBAR)
|
||||
|
||||
self.parent = parent
|
||||
self.tbicon = TaskBarIcon(self)
|
||||
|
||||
|
||||
class WorkerThread(Thread):
|
||||
|
||||
def __init__(self, desktop):
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self._desktop = desktop
|
||||
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
|
||||
# Get options via arg
|
||||
from couchpotato.runner import getOptions
|
||||
args = ['--quiet']
|
||||
self.options = getOptions(base_path, args)
|
||||
|
||||
# Load settings
|
||||
settings = Env.get('settings')
|
||||
settings.setFile(self.options.config_file)
|
||||
|
||||
# Create data dir if needed
|
||||
self.data_dir = os.path.expanduser(Env.setting('data_dir'))
|
||||
if self.data_dir == '':
|
||||
from couchpotato.core.helpers.variable import getDataDir
|
||||
self.data_dir = getDataDir()
|
||||
|
||||
if not os.path.isdir(self.data_dir):
|
||||
os.makedirs(self.data_dir)
|
||||
|
||||
# Create logging dir
|
||||
self.log_dir = os.path.join(self.data_dir, 'logs');
|
||||
if not os.path.isdir(self.log_dir):
|
||||
os.mkdir(self.log_dir)
|
||||
|
||||
try:
|
||||
from couchpotato.runner import runCouchPotato
|
||||
runCouchPotato(self.options, base_path, args, data_dir = self.data_dir, log_dir = self.log_dir, Env = Env, desktop = self._desktop)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._desktop.frame.Close()
|
||||
|
||||
|
||||
class CouchPotatoApp(wx.App, SoftwareUpdate):
|
||||
|
||||
settings = {}
|
||||
events = {}
|
||||
restart = False
|
||||
closing = False
|
||||
|
||||
def OnInit(self):
|
||||
|
||||
# Updater
|
||||
base_url = 'https://couchpota.to/updates/%s'
|
||||
self.InitUpdates(base_url % VERSION + '/', base_url % 'changelog.html',
|
||||
icon = wx.Icon('icon.png'))
|
||||
|
||||
self.frame = MainFrame(self)
|
||||
self.frame.Bind(wx.EVT_CLOSE, self.onClose)
|
||||
|
||||
# CouchPotato thread
|
||||
self.worker = WorkerThread(self)
|
||||
|
||||
return True
|
||||
|
||||
def onAppLoad(self):
|
||||
self.frame.tbicon.enable()
|
||||
|
||||
def setSettings(self, settings = {}):
|
||||
self.settings = settings
|
||||
|
||||
def getSetting(self, name):
|
||||
return self.settings.get(name)
|
||||
|
||||
def addEvents(self, events = {}):
|
||||
for name in events.iterkeys():
|
||||
self.events[name] = events[name]
|
||||
|
||||
def onClose(self, event):
|
||||
|
||||
if not self.closing:
|
||||
self.closing = True
|
||||
self.frame.tbicon.onTaskBarClose(event)
|
||||
|
||||
onClose = self.events.get('onClose')
|
||||
onClose(event)
|
||||
|
||||
def afterShutdown(self, restart = False):
|
||||
self.frame.Destroy()
|
||||
self.restart = restart
|
||||
self.ExitMainLoop()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
app = CouchPotatoApp(redirect = False)
|
||||
app.MainLoop()
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
if app.restart:
|
||||
|
||||
def appexe_from_executable(exepath):
|
||||
appdir = appdir_from_executable(exepath)
|
||||
exename = os.path.basename(exepath)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
if os.path.isdir(os.path.join(appdir, "Contents", "MacOS")):
|
||||
return os.path.join(appdir, "Contents", "MacOS", exename)
|
||||
|
||||
return os.path.join(appdir, exename)
|
||||
|
||||
exe = appexe_from_executable(sys.executable)
|
||||
os.chdir(os.path.dirname(exe))
|
||||
|
||||
os.execv(exe, [exe] + sys.argv[1:])
|
||||
35
README.md
35
README.md
@@ -1,4 +1,4 @@
|
||||
CouchPotato
|
||||
CouchPotato Server
|
||||
=====
|
||||
|
||||
CouchPotato (CP) is an automatic NZB and torrent downloader. You can keep a "movies I want"-list and it will search for NZBs/torrents of these movies every X hours.
|
||||
@@ -7,7 +7,7 @@ Once a movie is found, it will send it to SABnzbd or download the torrent to a s
|
||||
|
||||
## Running from Source
|
||||
|
||||
CouchPotatoServer can be run from source. This will use *git* as updater, so make sure that is installed.
|
||||
CouchPotatoServer can be run from source. This will use *git* as updater, so make sure that is installed also.
|
||||
|
||||
Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for more details:
|
||||
|
||||
@@ -17,9 +17,9 @@ Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for
|
||||
* Open up `Git Bash` (or CMD) and go to the folder you want to install CP. Something like Program Files.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`.
|
||||
* You can now start CP via `CouchPotatoServer\CouchPotato.py` to start
|
||||
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||
* Your browser should open up, but if it doesn't go to: `http://localhost:5050/`
|
||||
|
||||
OS X:
|
||||
OSx:
|
||||
|
||||
* If you're on Leopard (10.5) install Python 2.6+: [Python 2.6.5](http://www.python.org/download/releases/2.6.5/)
|
||||
* Install [GIT](http://git-scm.com/)
|
||||
@@ -27,27 +27,20 @@ OS X:
|
||||
* Go to your App folder `cd /Applications`
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py`
|
||||
* Your browser should open up, but if it doesn't go to `http://localhost:5050/`
|
||||
* Your browser should open up, but if it doesn't go to: `http://localhost:5050/`
|
||||
|
||||
Linux:
|
||||
Linux (ubuntu / debian):
|
||||
|
||||
* (Ubuntu / Debian) Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* (Fedora / CentOS) Install [GIT](http://git-scm.com/) with `yum install git`
|
||||
* Install [GIT](http://git-scm.com/) with `apt-get install git-core`
|
||||
* 'cd' to the folder of your choosing.
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then do `python CouchPotatoServer/CouchPotato.py` to start
|
||||
* (Ubuntu / Debian) To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Change the paths inside the default file `sudo nano /etc/default/couchpotato`
|
||||
* (Ubuntu / Debian) Make it executable `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* (Ubuntu / Debian) Add it to defaults `sudo update-rc.d couchpotato defaults`
|
||||
* (systemd) To run on boot copy the systemd config `sudo cp CouchPotatoServer/init/couchpotato.fedora.service /etc/systemd/system/couchpotato.service`
|
||||
* (systemd) Update the systemd config file with your user and path to CouchPotato.py
|
||||
* (systemd) Enable it at boot with `sudo systemctl enable couchpotato`
|
||||
* Open your browser and go to `http://localhost:5050/`
|
||||
|
||||
Docker:
|
||||
* You can use [razorgirl's Dockerfile](https://github.com/razorgirl/docker-couchpotato) to quickly build your own isolated app container. It's based on the Linux instructions above. For more info about Docker check out the [official website](https://www.docker.com).
|
||||
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato`
|
||||
* Change the paths inside the init script. `sudo nano /etc/init.d/couchpotato`
|
||||
* Make it executable. `sudo chmod +x /etc/init.d/couchpotato`
|
||||
* Add it to defaults. `sudo update-rc.d couchpotato defaults`
|
||||
* Open your browser and go to: `http://localhost:5050/`
|
||||
|
||||
|
||||
FreeBSD :
|
||||
|
||||
@@ -63,7 +56,7 @@ FreeBSD :
|
||||
* Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`
|
||||
* Then run `sudo python CouchPotatoServer/CouchPotato.py` to start for the first time
|
||||
* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/freebsd /etc/rc.d/couchpotato`
|
||||
* Change the paths inside the init script. `sudo vim /etc/rc.d/couchpotato`
|
||||
* Change the paths inside the init script. `sudo vim /etc/init.d/couchpotato`
|
||||
* Make init script executable. `sudo chmod +x /etc/rc.d/couchpotato`
|
||||
* Add init to startup. `sudo echo 'couchpotato_enable="YES"' >> /etc/rc.conf`
|
||||
* Open your browser and go to: `http://server:5050/`
|
||||
|
||||
@@ -1,40 +1,15 @@
|
||||
# Contributing to CouchPotatoServer
|
||||
#So you feel like posting a bug, sending me a pull request or just telling me how awesome I am. No problem!
|
||||
|
||||
1. [Contributing](#contributing)
|
||||
2. [Submitting an Issue](#issues)
|
||||
3. [Submitting a Pull Request](#pull-requests)
|
||||
##Just make sure you think of the following things:
|
||||
|
||||
## Contributing
|
||||
Thank you for your interest in contributing to CouchPotato. There are several ways to help out, even if you've never worked on an open source project before.
|
||||
If you've found a bug or want to request a feature, you can report it by [posting an issue](https://github.com/RuudBurger/CouchPotatoServer/issues/new) - be sure to read the [guidelines](#issues) first!
|
||||
If you want to contribute your own work, please read the [guidelines](#pull-requests) for submitting a pull request.
|
||||
Lastly, for anything related to CouchPotato, feel free to stop by the [forum](http://couchpota.to/forum/) or the [#couchpotato](http://webchat.freenode.net/?channels=couchpotato) IRC channel at irc.freenode.net.
|
||||
* Search through the existing (and closed) issues first. See if you can get your answer there.
|
||||
* Double check the result manually, because it could be an external issue.
|
||||
* Post logs! Without seeing what is going on, I can't reproduce the error.
|
||||
* What is the movie + quality you are searching for.
|
||||
* What are you settings for the specific problem.
|
||||
* What providers are you using. (While your logs include these, scanning through hundred of lines of log isn't my hobby).
|
||||
* Give me a short step by step of how to reproduce.
|
||||
* What hardware / OS are you using and what are the limits? NAS can be slow and maybe have a different python installed then when you use CP on OSX or Windows for example.
|
||||
* I will mark issues with the "can't reproduce" tag. Don't go asking me "why closed" if it clearly says the issue in the tag ;)
|
||||
|
||||
## Issues
|
||||
Issues are intended for reporting bugs and weird behaviour or suggesting improvements to CouchPotatoServer.
|
||||
Before you submit an issue, please go through the following checklist:
|
||||
* **FILL IN ALL THE FIELDS ASKED FOR**
|
||||
* **POST MORE THAN A SINGLE LINE LOG**, if you do, you'd better have a easy reproducable bug
|
||||
* Search through existing issues (*including closed issues!*) first: you might be able to get your answer there.
|
||||
* Double check your issue manually, because it could be an external issue.
|
||||
* Post logs with your issue: Without seeing what is going on, the developers can't reproduce the error.
|
||||
* Check the logs yourself before submitting them. Obvious errors like permission or HTTP errors are often not related to CouchPotato.
|
||||
* What movie and quality are you searching for?
|
||||
* What are your settings for the specific problem?
|
||||
* What providers are you using? (While your logs include these, scanning through hundreds of lines of logs isn't our hobby)
|
||||
* Post the logs from the *config* directory, please do not copy paste the UI. Use pastebin to store these logs!
|
||||
* Give a short step by step of how to reproduce the error.
|
||||
* What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed than when you use CP on OS X or Windows.
|
||||
* Your issue might be marked with the "can't reproduce" tag. Don't ask why your issue was closed if it says so in the tag.
|
||||
* If you're running on a NAS (QNAP, Austor, Synology etc.) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else!
|
||||
* Do not "bump" issues with "Any updates on this" or whatever. Yes I've seen it, you don't have to remind me of it. There will be an update when the code is done or I need information. If you feel the need to do so, you'd better have more info on the issue.
|
||||
|
||||
The more relevant information you provide, the more likely that your issue will be resolved.
|
||||
If you don't follow any of the checks above, I'll close the issue. If you are wondering why (and ask) I'll block you from posting new issues and the repo.
|
||||
|
||||
## Pull Requests
|
||||
Pull requests are intended for contributing code or documentation to the project. Before you submit a pull request, consider the following:
|
||||
* Make sure your pull request is made for the *develop* branch (or relevant feature branch).
|
||||
* Have you tested your PR? If not, why?
|
||||
* Does your PR have any limitations I should know of?
|
||||
* Is your PR up-to-date with the branch you're trying to push into?
|
||||
**If I don't get enough info, the chance of the issue getting closed is a lot bigger ;)**
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from couchpotato.api import api_docs, api_docs_missing, api
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.variable import md5, tryInt
|
||||
@@ -9,10 +5,13 @@ from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
from tornado import template
|
||||
from tornado.web import RequestHandler, authenticated
|
||||
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
views = {}
|
||||
template_loader = template.Loader(os.path.join(os.path.dirname(__file__), 'templates'))
|
||||
|
||||
@@ -25,10 +24,9 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
if username and password:
|
||||
return self.get_secure_cookie('user')
|
||||
else: # Login when no username or password are set
|
||||
else: # Login when no username or password are set
|
||||
return True
|
||||
|
||||
|
||||
# Main web handler
|
||||
class WebHandler(BaseHandler):
|
||||
|
||||
@@ -45,13 +43,11 @@ class WebHandler(BaseHandler):
|
||||
log.error("Failed doing web request '%s': %s", (route, traceback.format_exc()))
|
||||
self.write({'success': False, 'error': 'Failed returning results'})
|
||||
|
||||
|
||||
def addView(route, func):
|
||||
def addView(route, func, static = False):
|
||||
views[route] = func
|
||||
|
||||
|
||||
def get_db():
|
||||
return Env.get('db')
|
||||
def get_session(engine = None):
|
||||
return Env.getSession(engine)
|
||||
|
||||
|
||||
# Web view
|
||||
@@ -59,10 +55,12 @@ def index():
|
||||
return template_loader.load('index.html').generate(sep = os.sep, fireEvent = fireEvent, Env = Env)
|
||||
addView('', index)
|
||||
|
||||
|
||||
# API docs
|
||||
def apiDocs():
|
||||
routes = list(api.keys())
|
||||
routes = []
|
||||
|
||||
for route in api.iterkeys():
|
||||
routes.append(route)
|
||||
|
||||
if api_docs.get(''):
|
||||
del api_docs['']
|
||||
@@ -72,30 +70,21 @@ def apiDocs():
|
||||
|
||||
addView('docs', apiDocs)
|
||||
|
||||
|
||||
# Database debug manager
|
||||
def databaseManage():
|
||||
return template_loader.load('database.html').generate(fireEvent = fireEvent, Env = Env)
|
||||
|
||||
addView('database', databaseManage)
|
||||
|
||||
|
||||
# Make non basic auth option to get api key
|
||||
class KeyHandler(RequestHandler):
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
api_key = None
|
||||
api = None
|
||||
|
||||
try:
|
||||
username = Env.setting('username')
|
||||
password = Env.setting('password')
|
||||
|
||||
if (self.get_argument('u') == md5(username) or not username) and (self.get_argument('p') == password or not password):
|
||||
api_key = Env.setting('api_key')
|
||||
api = Env.setting('api_key')
|
||||
|
||||
self.write({
|
||||
'success': api_key is not None,
|
||||
'api_key': api_key
|
||||
'success': api is not None,
|
||||
'api_key': api
|
||||
})
|
||||
except:
|
||||
log.error('Failed doing key request: %s', (traceback.format_exc()))
|
||||
@@ -113,21 +102,20 @@ class LoginHandler(BaseHandler):
|
||||
|
||||
def post(self, *args, **kwargs):
|
||||
|
||||
api_key = None
|
||||
api = None
|
||||
|
||||
username = Env.setting('username')
|
||||
password = Env.setting('password')
|
||||
|
||||
if (self.get_argument('username') == username or not username) and (md5(self.get_argument('password')) == password or not password):
|
||||
api_key = Env.setting('api_key')
|
||||
api = Env.setting('api_key')
|
||||
|
||||
if api_key:
|
||||
if api:
|
||||
remember_me = tryInt(self.get_argument('remember_me', default = 0))
|
||||
self.set_secure_cookie('user', api_key, expires_days = 30 if remember_me > 0 else None)
|
||||
self.set_secure_cookie('user', api, expires_days = 30 if remember_me > 0 else None)
|
||||
|
||||
self.redirect(Env.get('web_base'))
|
||||
|
||||
|
||||
class LogoutHandler(BaseHandler):
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
@@ -148,3 +136,4 @@ def page_not_found(rh):
|
||||
|
||||
rh.set_status(404)
|
||||
rh.write('Wrong API key used')
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
from functools import wraps
|
||||
from threading import Thread
|
||||
import json
|
||||
import threading
|
||||
import traceback
|
||||
from six.moves import urllib
|
||||
|
||||
from couchpotato.core.helpers.request import getParams
|
||||
from couchpotato.core.logger import CPLog
|
||||
from functools import wraps
|
||||
from threading import Thread
|
||||
from tornado.gen import coroutine
|
||||
from tornado.web import RequestHandler, asynchronous
|
||||
|
||||
import json
|
||||
import threading
|
||||
import tornado
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -20,25 +20,15 @@ api_nonblock = {}
|
||||
api_docs = {}
|
||||
api_docs_missing = []
|
||||
|
||||
|
||||
def run_async(func):
|
||||
@wraps(func)
|
||||
def async_func(*args, **kwargs):
|
||||
func_hl = Thread(target = func, args = args, kwargs = kwargs)
|
||||
func_hl.start()
|
||||
return func_hl
|
||||
|
||||
return async_func
|
||||
|
||||
@run_async
|
||||
def run_handler(route, kwargs, callback = None):
|
||||
try:
|
||||
res = api[route](**kwargs)
|
||||
callback(res, route)
|
||||
except:
|
||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||
callback({'success': False, 'error': 'Failed returning results'}, route)
|
||||
|
||||
|
||||
# NonBlock API handler
|
||||
class NonBlockHandler(RequestHandler):
|
||||
|
||||
@@ -71,7 +61,6 @@ class NonBlockHandler(RequestHandler):
|
||||
|
||||
self.stopper = None
|
||||
|
||||
|
||||
def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
||||
api_nonblock[route] = func_tuple
|
||||
|
||||
@@ -80,79 +69,59 @@ def addNonBlockApiView(route, func_tuple, docs = None, **kwargs):
|
||||
else:
|
||||
api_docs_missing.append(route)
|
||||
|
||||
|
||||
# Blocking API handler
|
||||
class ApiHandler(RequestHandler):
|
||||
|
||||
@asynchronous
|
||||
@coroutine
|
||||
def get(self, route, *args, **kwargs):
|
||||
route = route.strip('/')
|
||||
if not api.get(route):
|
||||
self.write('API call doesn\'t seem to exist')
|
||||
self.finish()
|
||||
return
|
||||
|
||||
# Create lock if it doesn't exist
|
||||
if route in api_locks and not api_locks.get(route):
|
||||
api_locks[route] = threading.Lock()
|
||||
|
||||
api_locks[route].acquire()
|
||||
|
||||
try:
|
||||
|
||||
kwargs = {}
|
||||
for x in self.request.arguments:
|
||||
kwargs[x] = urllib.parse.unquote(self.get_argument(x))
|
||||
kwargs[x] = urllib.unquote(self.get_argument(x))
|
||||
|
||||
# Split array arguments
|
||||
kwargs = getParams(kwargs)
|
||||
kwargs['_request'] = self
|
||||
|
||||
# Remove t random string
|
||||
try: del kwargs['t']
|
||||
except: pass
|
||||
|
||||
# Add async callback handler
|
||||
run_handler(route, kwargs, callback = self.taskFinished)
|
||||
@run_async
|
||||
def run_handler(callback):
|
||||
try:
|
||||
result = api[route](**kwargs)
|
||||
callback(result)
|
||||
except:
|
||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||
callback({'success': False, 'error': 'Failed returning results'})
|
||||
result = yield tornado.gen.Task(run_handler)
|
||||
|
||||
# Check JSONP callback
|
||||
jsonp_callback = self.get_argument('callback_func', default = None)
|
||||
|
||||
if jsonp_callback:
|
||||
self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')')
|
||||
self.set_header("Content-Type", "text/javascript")
|
||||
elif isinstance(result, tuple) and result[0] == 'redirect':
|
||||
self.redirect(result[1])
|
||||
else:
|
||||
self.write(result)
|
||||
|
||||
except:
|
||||
log.error('Failed doing api request "%s": %s', (route, traceback.format_exc()))
|
||||
try:
|
||||
self.write({'success': False, 'error': 'Failed returning results'})
|
||||
self.finish()
|
||||
except:
|
||||
log.error('Failed write error "%s": %s', (route, traceback.format_exc()))
|
||||
|
||||
api_locks[route].release()
|
||||
|
||||
post = get
|
||||
|
||||
def taskFinished(self, result, route):
|
||||
|
||||
if not self.request.connection.stream.closed():
|
||||
try:
|
||||
# Check JSONP callback
|
||||
jsonp_callback = self.get_argument('callback_func', default = None)
|
||||
|
||||
if jsonp_callback:
|
||||
self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')')
|
||||
self.set_header("Content-Type", "text/javascript")
|
||||
self.finish()
|
||||
elif isinstance(result, tuple) and result[0] == 'redirect':
|
||||
self.redirect(result[1])
|
||||
else:
|
||||
self.write(result)
|
||||
self.finish()
|
||||
except UnicodeDecodeError:
|
||||
log.error('Failed proper encode: %s', traceback.format_exc())
|
||||
except:
|
||||
log.debug('Failed doing request, probably already closed: %s', (traceback.format_exc()))
|
||||
try: self.finish({'success': False, 'error': 'Failed returning results'})
|
||||
except: pass
|
||||
self.write({'success': False, 'error': 'Failed returning results'})
|
||||
|
||||
api_locks[route].release()
|
||||
|
||||
|
||||
def addApiView(route, func, static = False, docs = None, **kwargs):
|
||||
|
||||
if static: func(route)
|
||||
|
||||
100
couchpotato/core/_base/_core/__init__.py
Normal file
100
couchpotato/core/_base/_core/__init__.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from .main import Core
|
||||
from uuid import uuid4
|
||||
|
||||
def start():
|
||||
return Core()
|
||||
|
||||
config = [{
|
||||
'name': 'core',
|
||||
'order': 1,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'basics',
|
||||
'description': 'Needs restart before changes take effect.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'port',
|
||||
'default': 5050,
|
||||
'type': 'int',
|
||||
'description': 'The port I should listen to.',
|
||||
},
|
||||
{
|
||||
'name': 'ssl_cert',
|
||||
'description': 'Path to SSL server.crt',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'ssl_key',
|
||||
'description': 'Path to SSL server.key',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'launch_browser',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': 'Launch the browser when I start.',
|
||||
'wizard': True,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'advanced',
|
||||
'description': "For those who know what they're doing",
|
||||
'advanced': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': uuid4().hex,
|
||||
'readonly': 1,
|
||||
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
|
||||
},
|
||||
{
|
||||
'name': 'debug',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable debugging.',
|
||||
},
|
||||
{
|
||||
'name': 'development',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
|
||||
},
|
||||
{
|
||||
'name': 'data_dir',
|
||||
'type': 'directory',
|
||||
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
|
||||
},
|
||||
{
|
||||
'name': 'url_base',
|
||||
'default': '',
|
||||
'description': 'When using mod_proxy use this to append the url with this.',
|
||||
},
|
||||
{
|
||||
'name': 'permission_folder',
|
||||
'default': '0755',
|
||||
'label': 'Folder CHMOD',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
|
||||
},
|
||||
{
|
||||
'name': 'permission_file',
|
||||
'default': '0755',
|
||||
'label': 'File CHMOD',
|
||||
'description': 'Same as Folder CHMOD but for files',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,3 +1,10 @@
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.variable import cleanHost, md5
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from tornado.ioloop import IOLoop
|
||||
from uuid import uuid4
|
||||
import os
|
||||
import platform
|
||||
@@ -6,19 +13,8 @@ import time
|
||||
import traceback
|
||||
import webbrowser
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.variable import cleanHost, md5, isSubFolder
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from tornado.ioloop import IOLoop
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Core'
|
||||
|
||||
|
||||
class Core(Plugin):
|
||||
|
||||
@@ -51,7 +47,6 @@ class Core(Plugin):
|
||||
addEvent('app.api_url', self.createApiUrl)
|
||||
addEvent('app.version', self.version)
|
||||
addEvent('app.load', self.checkDataDir)
|
||||
addEvent('app.load', self.cleanUpFolders)
|
||||
|
||||
addEvent('setting.save.core.password', self.md5Password)
|
||||
addEvent('setting.save.core.api_key', self.checkApikey)
|
||||
@@ -60,10 +55,6 @@ class Core(Plugin):
|
||||
if not Env.get('desktop'):
|
||||
self.signalHandler()
|
||||
|
||||
# Set default urlopen timeout
|
||||
import socket
|
||||
socket.setdefaulttimeout(30)
|
||||
|
||||
def md5Password(self, value):
|
||||
return md5(value) if value else ''
|
||||
|
||||
@@ -71,15 +62,11 @@ class Core(Plugin):
|
||||
return value if value and len(value) > 3 else uuid4().hex
|
||||
|
||||
def checkDataDir(self):
|
||||
if isSubFolder(Env.get('data_dir'), Env.get('app_dir')):
|
||||
if Env.get('app_dir') in Env.get('data_dir'):
|
||||
log.error('You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted.')
|
||||
|
||||
return True
|
||||
|
||||
def cleanUpFolders(self):
|
||||
only_clean = ['couchpotato', 'libs', 'init']
|
||||
self.deleteEmptyFolder(Env.get('app_dir'), show_error = False, only_clean = only_clean)
|
||||
|
||||
def available(self, **kwargs):
|
||||
return {
|
||||
'success': True
|
||||
@@ -91,11 +78,7 @@ class Core(Plugin):
|
||||
|
||||
def shutdown():
|
||||
self.initShutdown()
|
||||
|
||||
if IOLoop.current()._closing:
|
||||
shutdown()
|
||||
else:
|
||||
IOLoop.current().add_callback(shutdown)
|
||||
IOLoop.current().add_callback(shutdown)
|
||||
|
||||
return 'shutdown'
|
||||
|
||||
@@ -118,7 +101,7 @@ class Core(Plugin):
|
||||
|
||||
self.shutdown_started = True
|
||||
|
||||
fireEvent('app.do_shutdown', restart = restart)
|
||||
fireEvent('app.do_shutdown')
|
||||
log.debug('Every plugin got shutdown event')
|
||||
|
||||
loop = True
|
||||
@@ -130,7 +113,7 @@ class Core(Plugin):
|
||||
|
||||
if len(still_running) == 0:
|
||||
break
|
||||
elif starttime < time.time() - 30: # Always force break after 30s wait
|
||||
elif starttime < time.time() - 30: # Always force break after 30s wait
|
||||
break
|
||||
|
||||
running = list(set(still_running) - set(self.ignore_restart))
|
||||
@@ -143,11 +126,8 @@ class Core(Plugin):
|
||||
|
||||
log.debug('Safe to shutdown/restart')
|
||||
|
||||
loop = IOLoop.current()
|
||||
|
||||
try:
|
||||
if not loop._closing:
|
||||
loop.stop()
|
||||
IOLoop.current().stop()
|
||||
except RuntimeError:
|
||||
pass
|
||||
except:
|
||||
@@ -197,104 +177,8 @@ class Core(Plugin):
|
||||
def signalHandler(self):
|
||||
if Env.get('daemonized'): return
|
||||
|
||||
def signal_handler(*args, **kwargs):
|
||||
def signal_handler(signal, frame):
|
||||
fireEvent('app.shutdown', single = True)
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'core',
|
||||
'order': 1,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'basics',
|
||||
'description': 'Needs restart before changes take effect.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'port',
|
||||
'default': 5050,
|
||||
'type': 'int',
|
||||
'description': 'The port I should listen to.',
|
||||
},
|
||||
{
|
||||
'name': 'ssl_cert',
|
||||
'description': 'Path to SSL server.crt',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'ssl_key',
|
||||
'description': 'Path to SSL server.key',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'launch_browser',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'description': 'Launch the browser when I start.',
|
||||
'wizard': True,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'tab': 'general',
|
||||
'name': 'advanced',
|
||||
'description': "For those who know what they're doing",
|
||||
'advanced': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': uuid4().hex,
|
||||
'readonly': 1,
|
||||
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
|
||||
},
|
||||
{
|
||||
'name': 'debug',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable debugging.',
|
||||
},
|
||||
{
|
||||
'name': 'development',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
|
||||
},
|
||||
{
|
||||
'name': 'data_dir',
|
||||
'type': 'directory',
|
||||
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
|
||||
},
|
||||
{
|
||||
'name': 'url_base',
|
||||
'default': '',
|
||||
'description': 'When using mod_proxy use this to append the url with this.',
|
||||
},
|
||||
{
|
||||
'name': 'permission_folder',
|
||||
'default': '0755',
|
||||
'label': 'Folder CHMOD',
|
||||
'description': 'Can be either decimal (493) or octal (leading zero: 0755). <a target="_blank" href="http://permissions-calculator.org/">Calculate the correct value</a>',
|
||||
},
|
||||
{
|
||||
'name': 'permission_file',
|
||||
'default': '0755',
|
||||
'label': 'File CHMOD',
|
||||
'description': 'See Folder CHMOD description, but for files',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
6
couchpotato/core/_base/clientscript/__init__.py
Normal file
6
couchpotato/core/_base/clientscript/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from .main import ClientScript
|
||||
|
||||
def start():
|
||||
return ClientScript()
|
||||
|
||||
config = []
|
||||
@@ -1,7 +1,3 @@
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
@@ -11,12 +7,12 @@ from couchpotato.environment import Env
|
||||
from minify.cssmin import cssmin
|
||||
from minify.jsmin import jsmin
|
||||
from tornado.web import StaticFileHandler
|
||||
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'ClientScript'
|
||||
|
||||
|
||||
class ClientScript(Plugin):
|
||||
|
||||
@@ -38,8 +34,6 @@ class ClientScript(Plugin):
|
||||
'scripts/library/question.js',
|
||||
'scripts/library/scrollspy.js',
|
||||
'scripts/library/spin.js',
|
||||
'scripts/library/Array.stableSort.js',
|
||||
'scripts/library/async.js',
|
||||
'scripts/couchpotato.js',
|
||||
'scripts/api.js',
|
||||
'scripts/library/history.js',
|
||||
@@ -49,17 +43,20 @@ class ClientScript(Plugin):
|
||||
'scripts/block/footer.js',
|
||||
'scripts/block/menu.js',
|
||||
'scripts/page/home.js',
|
||||
'scripts/page/wanted.js',
|
||||
'scripts/page/settings.js',
|
||||
'scripts/page/about.js',
|
||||
'scripts/page/manage.js',
|
||||
],
|
||||
}
|
||||
|
||||
urls = {'style': {}, 'script': {}}
|
||||
minified = {'style': {}, 'script': {}}
|
||||
paths = {'style': {}, 'script': {}}
|
||||
|
||||
urls = {'style': {}, 'script': {}, }
|
||||
minified = {'style': {}, 'script': {}, }
|
||||
paths = {'style': {}, 'script': {}, }
|
||||
comment = {
|
||||
'style': '/*** %s:%d ***/\n',
|
||||
'script': '// %s:%d\n'
|
||||
'style': '/*** %s:%d ***/\n',
|
||||
'script': '// %s:%d\n'
|
||||
}
|
||||
|
||||
html = {
|
||||
@@ -91,6 +88,7 @@ class ClientScript(Plugin):
|
||||
else:
|
||||
self.registerStyle(core_url, file_path, position = 'front')
|
||||
|
||||
|
||||
def minify(self):
|
||||
|
||||
# Create cache dir
|
||||
@@ -124,7 +122,7 @@ class ClientScript(Plugin):
|
||||
data = cssmin(data)
|
||||
data = data.replace('../images/', '../static/images/')
|
||||
data = data.replace('../fonts/', '../static/fonts/')
|
||||
data = data.replace('../../static/', '../static/') # Replace inside plugins
|
||||
data = data.replace('../../static/', '../static/') # Replace inside plugins
|
||||
|
||||
raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data})
|
||||
|
||||
@@ -187,7 +185,6 @@ class ClientScript(Plugin):
|
||||
|
||||
prefix_properties = ['border-radius', 'transform', 'transition', 'box-shadow']
|
||||
prefix_tags = ['ms', 'moz', 'webkit']
|
||||
|
||||
def prefix(self, data):
|
||||
|
||||
trimmed_data = re.sub('(\t|\n|\r)+', '', data)
|
||||
6
couchpotato/core/_base/desktop/__init__.py
Normal file
6
couchpotato/core/_base/desktop/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from .main import Desktop
|
||||
|
||||
def start():
|
||||
return Desktop()
|
||||
|
||||
config = []
|
||||
@@ -5,9 +5,6 @@ from couchpotato.environment import Env
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Desktop'
|
||||
|
||||
|
||||
if Env.get('desktop'):
|
||||
|
||||
class Desktop(Plugin):
|
||||
@@ -1,20 +0,0 @@
|
||||
from .main import Downloader
|
||||
|
||||
|
||||
def autoload():
|
||||
return Downloader()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'download_providers',
|
||||
'groups': [
|
||||
{
|
||||
'label': 'Downloaders',
|
||||
'description': 'You can select different downloaders for each type (usenet / torrent)',
|
||||
'type': 'list',
|
||||
'name': 'download_providers',
|
||||
'tab': 'downloaders',
|
||||
'options': [],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,76 +0,0 @@
|
||||
var DownloadersBase = new Class({
|
||||
|
||||
Implements: [Events],
|
||||
|
||||
initialize: function(){
|
||||
var self = this;
|
||||
|
||||
// Add test buttons to settings page
|
||||
App.addEvent('loadSettings', self.addTestButtons.bind(self));
|
||||
|
||||
},
|
||||
|
||||
// Downloaders setting tests
|
||||
addTestButtons: function(){
|
||||
var self = this;
|
||||
|
||||
var setting_page = App.getPage('Settings');
|
||||
setting_page.addEvent('create', function(){
|
||||
Object.each(setting_page.tabs.downloaders.groups, self.addTestButton.bind(self))
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
addTestButton: function(fieldset, plugin_name){
|
||||
var self = this,
|
||||
button_name = self.testButtonName(fieldset);
|
||||
|
||||
if(button_name.contains('Downloaders')) return;
|
||||
|
||||
new Element('.ctrlHolder.test_button').adopt(
|
||||
new Element('a.button', {
|
||||
'text': button_name,
|
||||
'events': {
|
||||
'click': function(){
|
||||
var button = fieldset.getElement('.test_button .button');
|
||||
button.set('text', 'Connecting...');
|
||||
|
||||
Api.request('download.'+plugin_name+'.test', {
|
||||
'onComplete': function(json){
|
||||
|
||||
button.set('text', button_name);
|
||||
|
||||
var message;
|
||||
if(json.success){
|
||||
message = new Element('span.success', {
|
||||
'text': 'Connection successful'
|
||||
}).inject(button, 'after')
|
||||
}
|
||||
else {
|
||||
var msg_text = 'Connection failed. Check logs for details.';
|
||||
if(json.hasOwnProperty('msg')) msg_text = json.msg;
|
||||
message = new Element('span.failed', {
|
||||
'text': msg_text
|
||||
}).inject(button, 'after')
|
||||
}
|
||||
|
||||
(function(){
|
||||
message.destroy();
|
||||
}).delay(3000)
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(fieldset);
|
||||
|
||||
},
|
||||
|
||||
testButtonName: function(fieldset){
|
||||
var name = String(fieldset.getElement('h2').innerHTML).substring(0,String(fieldset.getElement('h2').innerHTML).indexOf("<span"));
|
||||
return 'Test '+name;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
var Downloaders = new DownloadersBase();
|
||||
6
couchpotato/core/_base/scheduler/__init__.py
Normal file
6
couchpotato/core/_base/scheduler/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from .main import Scheduler
|
||||
|
||||
def start():
|
||||
return Scheduler()
|
||||
|
||||
config = []
|
||||
@@ -5,8 +5,6 @@ from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Scheduler'
|
||||
|
||||
|
||||
class Scheduler(Plugin):
|
||||
|
||||
@@ -19,7 +17,6 @@ class Scheduler(Plugin):
|
||||
addEvent('schedule.cron', self.cron)
|
||||
addEvent('schedule.interval', self.interval)
|
||||
addEvent('schedule.remove', self.remove)
|
||||
addEvent('schedule.queue', self.queue)
|
||||
|
||||
self.sched = Sched(misfire_grace_time = 60)
|
||||
self.sched.start()
|
||||
@@ -33,14 +30,14 @@ class Scheduler(Plugin):
|
||||
except:
|
||||
pass
|
||||
|
||||
def doShutdown(self, *args, **kwargs):
|
||||
def doShutdown(self):
|
||||
super(Scheduler, self).doShutdown()
|
||||
self.stop()
|
||||
return super(Scheduler, self).doShutdown(*args, **kwargs)
|
||||
|
||||
def stop(self):
|
||||
if self.started:
|
||||
log.debug('Stopping scheduler')
|
||||
self.sched.shutdown(wait = False)
|
||||
self.sched.shutdown()
|
||||
log.debug('Scheduler stopped')
|
||||
self.started = False
|
||||
|
||||
@@ -67,16 +64,3 @@ class Scheduler(Plugin):
|
||||
'seconds': seconds,
|
||||
'job': self.sched.add_interval_job(handle, hours = hours, minutes = minutes, seconds = seconds)
|
||||
}
|
||||
|
||||
return True
|
||||
|
||||
def queue(self, handlers = None):
|
||||
if not handlers: handlers = []
|
||||
|
||||
for h in handlers:
|
||||
h()
|
||||
|
||||
if self.shuttingDown():
|
||||
break
|
||||
|
||||
return True
|
||||
@@ -1,10 +1,8 @@
|
||||
import os
|
||||
|
||||
from .main import Updater
|
||||
from couchpotato.environment import Env
|
||||
import os
|
||||
|
||||
|
||||
def autoload():
|
||||
def start():
|
||||
return Updater()
|
||||
|
||||
config = [{
|
||||
|
||||
@@ -1,25 +1,20 @@
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from datetime import datetime
|
||||
from dateutil.parser import parse
|
||||
from git.repository import LocalRepository
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
import traceback
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from threading import RLock
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import removePyc
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
from couchpotato.environment import Env
|
||||
from dateutil.parser import parse
|
||||
from git.repository import LocalRepository
|
||||
import version
|
||||
from six.moves import filter
|
||||
|
||||
import zipfile
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -27,7 +22,6 @@ log = CPLog(__name__)
|
||||
class Updater(Plugin):
|
||||
|
||||
available_notified = False
|
||||
_lock = RLock()
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -38,7 +32,6 @@ class Updater(Plugin):
|
||||
else:
|
||||
self.updater = SourceUpdater()
|
||||
|
||||
addEvent('app.load', self.logVersion, priority = 10000)
|
||||
addEvent('app.load', self.setCrons)
|
||||
addEvent('updater.info', self.info)
|
||||
|
||||
@@ -60,16 +53,12 @@ class Updater(Plugin):
|
||||
|
||||
addEvent('setting.save.updater.enabled.after', self.setCrons)
|
||||
|
||||
def logVersion(self):
|
||||
info = self.info()
|
||||
log.info('=== VERSION %s, using %s ===', (info.get('version', {}).get('repr', 'UNKNOWN'), self.updater.getName()))
|
||||
|
||||
def setCrons(self):
|
||||
|
||||
fireEvent('schedule.remove', 'updater.check', single = True)
|
||||
if self.isEnabled():
|
||||
fireEvent('schedule.interval', 'updater.check', self.autoUpdate, hours = 6)
|
||||
self.autoUpdate() # Check after enabling
|
||||
self.autoUpdate() # Check after enabling
|
||||
|
||||
def autoUpdate(self):
|
||||
if self.isEnabled() and self.check() and self.conf('automatic') and not self.updater.update_failed:
|
||||
@@ -105,17 +94,7 @@ class Updater(Plugin):
|
||||
return False
|
||||
|
||||
def info(self, **kwargs):
|
||||
self._lock.acquire()
|
||||
|
||||
info = {}
|
||||
try:
|
||||
info = self.updater.info()
|
||||
except:
|
||||
log.error('Failed getting updater info: %s', traceback.format_exc())
|
||||
|
||||
self._lock.release()
|
||||
|
||||
return info
|
||||
return self.updater.info()
|
||||
|
||||
def checkView(self, **kwargs):
|
||||
return {
|
||||
@@ -142,12 +121,6 @@ class Updater(Plugin):
|
||||
'success': success
|
||||
}
|
||||
|
||||
def doShutdown(self, *args, **kwargs):
|
||||
if not Env.get('dev') and not Env.get('desktop'):
|
||||
removePyc(Env.get('app_dir'), show_logs = False)
|
||||
|
||||
return super(Updater, self).doShutdown(*args, **kwargs)
|
||||
|
||||
|
||||
class BaseUpdater(Plugin):
|
||||
|
||||
@@ -165,23 +138,42 @@ class BaseUpdater(Plugin):
|
||||
pass
|
||||
|
||||
def info(self):
|
||||
|
||||
current_version = self.getVersion()
|
||||
|
||||
return {
|
||||
'last_check': self.last_check,
|
||||
'update_version': self.update_version,
|
||||
'version': current_version,
|
||||
'version': self.getVersion(),
|
||||
'repo_name': '%s/%s' % (self.repo_user, self.repo_name),
|
||||
'branch': current_version.get('branch', self.branch),
|
||||
'branch': self.branch,
|
||||
}
|
||||
|
||||
def getVersion(self):
|
||||
pass
|
||||
|
||||
def check(self):
|
||||
pass
|
||||
|
||||
def deletePyc(self, only_excess = True):
|
||||
|
||||
for root, dirs, files in os.walk(ss(Env.get('app_dir'))):
|
||||
|
||||
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
|
||||
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
|
||||
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
|
||||
|
||||
for excess_pyc_file in excess_pyc_files:
|
||||
full_path = os.path.join(root, excess_pyc_file)
|
||||
log.debug('Removing old PYC file: %s', full_path)
|
||||
try:
|
||||
os.remove(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
for dir_name in dirs:
|
||||
full_path = os.path.join(root, dir_name)
|
||||
if len(os.listdir(full_path)) == 0:
|
||||
try:
|
||||
os.rmdir(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
|
||||
|
||||
class GitUpdater(BaseUpdater):
|
||||
|
||||
@@ -191,9 +183,15 @@ class GitUpdater(BaseUpdater):
|
||||
def doUpdate(self):
|
||||
|
||||
try:
|
||||
log.debug('Stashing local changes')
|
||||
self.repo.saveStash()
|
||||
|
||||
log.info('Updating to latest version')
|
||||
self.repo.pull()
|
||||
|
||||
# Delete leftover .pyc files
|
||||
self.deletePyc()
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Failed updating via GIT: %s', traceback.format_exc())
|
||||
@@ -206,16 +204,14 @@ class GitUpdater(BaseUpdater):
|
||||
|
||||
if not self.version:
|
||||
try:
|
||||
output = self.repo.getHead() # Yes, please
|
||||
output = self.repo.getHead() # Yes, please
|
||||
log.debug('Git version output: %s', output.hash)
|
||||
self.version = {
|
||||
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.repo.getCurrentBranch().name or self.branch, output.hash[:8], datetime.fromtimestamp(output.getDate())),
|
||||
'hash': output.hash[:8],
|
||||
'date': output.getDate(),
|
||||
'type': 'git',
|
||||
'branch': self.repo.getCurrentBranch().name
|
||||
}
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
|
||||
return 'No GIT'
|
||||
|
||||
@@ -238,7 +234,7 @@ class GitUpdater(BaseUpdater):
|
||||
local = self.repo.getHead()
|
||||
remote = branch.getHead()
|
||||
|
||||
log.debug('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
|
||||
log.info('Versions, local:%s, remote:%s', (local.hash[:8], remote.hash[:8]))
|
||||
|
||||
if local.getDate() < remote.getDate():
|
||||
self.update_version = {
|
||||
@@ -251,6 +247,7 @@ class GitUpdater(BaseUpdater):
|
||||
return False
|
||||
|
||||
|
||||
|
||||
class SourceUpdater(BaseUpdater):
|
||||
|
||||
def __init__(self):
|
||||
@@ -276,9 +273,9 @@ class SourceUpdater(BaseUpdater):
|
||||
|
||||
# Extract
|
||||
if download_data.get('type') == 'zip':
|
||||
zip_file = zipfile.ZipFile(destination)
|
||||
zip_file.extractall(extracted_path)
|
||||
zip_file.close()
|
||||
zip = zipfile.ZipFile(destination)
|
||||
zip.extractall(extracted_path)
|
||||
zip.close()
|
||||
else:
|
||||
tar = tarfile.open(destination)
|
||||
tar.extractall(path = extracted_path)
|
||||
@@ -300,12 +297,11 @@ class SourceUpdater(BaseUpdater):
|
||||
return False
|
||||
|
||||
def replaceWith(self, path):
|
||||
path = sp(path)
|
||||
app_dir = Env.get('app_dir')
|
||||
data_dir = Env.get('data_dir')
|
||||
app_dir = ss(Env.get('app_dir'))
|
||||
data_dir = ss(Env.get('data_dir'))
|
||||
|
||||
# Get list of files we want to overwrite
|
||||
removePyc(app_dir)
|
||||
self.deletePyc()
|
||||
existing_files = []
|
||||
for root, subfiles, filenames in os.walk(app_dir):
|
||||
for filename in filenames:
|
||||
@@ -346,12 +342,13 @@ class SourceUpdater(BaseUpdater):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def removeDir(self, path):
|
||||
try:
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
except OSError as inst:
|
||||
os.chmod(inst.filename, 0o777)
|
||||
except OSError, inst:
|
||||
os.chmod(inst.filename, 0777)
|
||||
self.removeDir(path)
|
||||
|
||||
def getVersion(self):
|
||||
@@ -365,8 +362,7 @@ class SourceUpdater(BaseUpdater):
|
||||
log.debug('Source version output: %s', output)
|
||||
self.version = output
|
||||
self.version['type'] = 'source'
|
||||
self.version['repr'] = 'source:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.branch, output.get('hash', '')[:8], datetime.fromtimestamp(output.get('date', 0)))
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
log.error('Failed using source updater. %s', e)
|
||||
return {}
|
||||
|
||||
@@ -396,7 +392,7 @@ class SourceUpdater(BaseUpdater):
|
||||
|
||||
return {
|
||||
'hash': commit['sha'],
|
||||
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
|
||||
'date': int(time.mktime(parse(commit['commit']['committer']['date']).timetuple())),
|
||||
}
|
||||
except:
|
||||
log.error('Failed getting latest request from github: %s', traceback.format_exc())
|
||||
@@ -441,7 +437,7 @@ class DesktopUpdater(BaseUpdater):
|
||||
if latest and latest != current_version.get('hash'):
|
||||
self.update_version = {
|
||||
'hash': latest,
|
||||
'date': None,
|
||||
'date': None,
|
||||
'changelog': self.desktop._changelogURL,
|
||||
}
|
||||
|
||||
@@ -453,7 +449,6 @@ class DesktopUpdater(BaseUpdater):
|
||||
|
||||
def getVersion(self):
|
||||
return {
|
||||
'repr': 'desktop: %s' % self.desktop._esky.active_version,
|
||||
'hash': self.desktop._esky.active_version,
|
||||
'date': None,
|
||||
'type': 'desktop',
|
||||
|
||||
@@ -5,7 +5,7 @@ var UpdaterBase = new Class({
|
||||
initialize: function(){
|
||||
var self = this;
|
||||
|
||||
App.addEvent('load', self.info.bind(self, 2000));
|
||||
App.addEvent('load', self.info.bind(self, 2000))
|
||||
App.addEvent('unload', function(){
|
||||
if(self.timer)
|
||||
clearTimeout(self.timer);
|
||||
@@ -24,7 +24,7 @@ var UpdaterBase = new Class({
|
||||
self.doUpdate();
|
||||
else {
|
||||
App.unBlockPage();
|
||||
App.trigger('message', ['No updates available']);
|
||||
App.fireEvent('message', 'No updates available');
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -66,7 +66,7 @@ var UpdaterBase = new Class({
|
||||
|
||||
var changelog = 'https://github.com/'+data.repo_name+'/compare/'+data.version.hash+'...'+data.branch;
|
||||
if(data.update_version.changelog)
|
||||
changelog = data.update_version.changelog + '#' + data.version.hash+'...'+data.update_version.hash;
|
||||
changelog = data.update_version.changelog + '#' + data.version.hash+'...'+data.update_version.hash
|
||||
|
||||
self.message = new Element('div.message.update').adopt(
|
||||
new Element('span', {
|
||||
|
||||
@@ -1,637 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
from sqlite3 import OperationalError
|
||||
from CodernityDB3.index import Index
|
||||
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.database import IndexException, IndexNotFoundException, IndexConflict, RecordNotFound
|
||||
from couchpotato.core.helpers.encoding import toUnicode, sp
|
||||
from couchpotato.core.helpers.variable import getImdb, tryInt, randomString
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Database(object):
|
||||
|
||||
indexes = None
|
||||
db = None
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.indexes = {}
|
||||
|
||||
addApiView('database.list_documents', self.listDocuments)
|
||||
addApiView('database.reindex', self.reindex)
|
||||
addApiView('database.compact', self.compact)
|
||||
addApiView('database.document.update', self.updateDocument)
|
||||
addApiView('database.document.delete', self.deleteDocument)
|
||||
|
||||
addEvent('database.setup.after', self.startup_compact)
|
||||
addEvent('database.setup_index', self.setupIndex)
|
||||
addEvent('database.delete_corrupted', self.deleteCorrupted)
|
||||
|
||||
addEvent('app.migrate', self.migrate)
|
||||
addEvent('app.after_shutdown', self.close)
|
||||
|
||||
def getDB(self):
|
||||
|
||||
if not self.db:
|
||||
from couchpotato import get_db
|
||||
self.db = get_db()
|
||||
|
||||
return self.db
|
||||
|
||||
def close(self, **kwargs):
|
||||
self.getDB().close()
|
||||
|
||||
def setupIndex(self, index_name, klass):
|
||||
|
||||
self.indexes[index_name] = klass
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Category index
|
||||
index_instance = klass(db.path, index_name)
|
||||
try:
|
||||
|
||||
# Make sure store and bucket don't exist
|
||||
exists = []
|
||||
for x in ['buck', 'stor']:
|
||||
full_path = os.path.join(db.path, '%s_%s' % (index_name, x))
|
||||
if os.path.exists(full_path):
|
||||
exists.append(full_path)
|
||||
|
||||
if index_name not in db.indexes_names:
|
||||
|
||||
# Remove existing buckets if index isn't there
|
||||
for x in exists:
|
||||
os.unlink(x)
|
||||
|
||||
# Add index (will restore buckets)
|
||||
db.add_index(index_instance)
|
||||
db.reindex_index(index_name)
|
||||
else:
|
||||
# Previous info
|
||||
previous = db.indexes_names[index_name]
|
||||
previous_version = previous._version
|
||||
current_version = klass._version
|
||||
|
||||
# Only edit index if versions are different
|
||||
if previous_version < current_version:
|
||||
log.debug('Index "%s" already exists, updating and reindexing', index_name)
|
||||
db.destroy_index(previous)
|
||||
db.add_index(index_instance)
|
||||
db.reindex_index(index_name)
|
||||
|
||||
except:
|
||||
log.error('Failed adding index %s: %s', (index_name, traceback.format_exc()))
|
||||
|
||||
def deleteDocument(self, **kwargs):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
|
||||
document_id = kwargs.get('_request').get_argument('id')
|
||||
document = db.get('id', document_id)
|
||||
db.delete(document)
|
||||
|
||||
return {
|
||||
'success': True
|
||||
}
|
||||
except:
|
||||
return {
|
||||
'success': False,
|
||||
'error': traceback.format_exc()
|
||||
}
|
||||
|
||||
def updateDocument(self, **kwargs):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
|
||||
document = json.loads(kwargs.get('_request').get_argument('document'))
|
||||
d = db.update(document)
|
||||
document.update(d)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'document': document
|
||||
}
|
||||
except:
|
||||
return {
|
||||
'success': False,
|
||||
'error': traceback.format_exc()
|
||||
}
|
||||
|
||||
def listDocuments(self, **kwargs):
|
||||
db = self.getDB()
|
||||
|
||||
results = {
|
||||
'unknown': []
|
||||
}
|
||||
|
||||
for document in db.all('id'):
|
||||
key = document.get('_t', 'unknown')
|
||||
|
||||
if kwargs.get('show') and key != kwargs.get('show'):
|
||||
continue
|
||||
|
||||
if not results.get(key):
|
||||
results[key] = []
|
||||
results[key].append(document)
|
||||
|
||||
return results
|
||||
|
||||
def deleteCorrupted(self, _id, traceback_error = ''):
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
try:
|
||||
log.debug('Deleted corrupted document "%s": %s', (_id, traceback_error))
|
||||
corrupted = db.get('id', _id, with_storage = False)
|
||||
db._delete_id_index(corrupted.get('_id'), corrupted.get('_rev'), None)
|
||||
except:
|
||||
log.debug('Failed deleting corrupted: %s', traceback.format_exc())
|
||||
|
||||
def reindex(self, **kwargs):
|
||||
|
||||
success = True
|
||||
try:
|
||||
db = self.getDB()
|
||||
db.reindex()
|
||||
except:
|
||||
log.error('Failed index: %s', traceback.format_exc())
|
||||
success = False
|
||||
|
||||
return {
|
||||
'success': success
|
||||
}
|
||||
|
||||
def compact(self, try_repair = True, **kwargs):
|
||||
|
||||
success = False
|
||||
db = self.getDB()
|
||||
|
||||
# Removing left over compact files
|
||||
db_path = sp(db.path)
|
||||
for f in os.listdir(sp(db.path)):
|
||||
for x in ['_compact_buck', '_compact_stor']:
|
||||
if f[-len(x):] == x:
|
||||
os.unlink(os.path.join(db_path, f))
|
||||
|
||||
try:
|
||||
start = time.time()
|
||||
size = float(db.get_db_details().get('size', 0))
|
||||
log.debug('Compacting database, current size: %sMB', round(size/1048576, 2))
|
||||
|
||||
db.compact()
|
||||
new_size = float(db.get_db_details().get('size', 0))
|
||||
log.debug('Done compacting database in %ss, new size: %sMB, saved: %sMB', (round(time.time()-start, 2), round(new_size/1048576, 2), round((size-new_size)/1048576, 2)))
|
||||
success = True
|
||||
except (IndexException, AttributeError):
|
||||
if try_repair:
|
||||
log.error('Something wrong with indexes, trying repair')
|
||||
|
||||
# Remove all indexes
|
||||
old_indexes = self.indexes.keys()
|
||||
for index_name in old_indexes:
|
||||
try:
|
||||
db.destroy_index(index_name)
|
||||
except IndexNotFoundException:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed removing old index %s', index_name)
|
||||
|
||||
# Add them again
|
||||
for index_name in self.indexes:
|
||||
klass = self.indexes[index_name]
|
||||
|
||||
# Category index
|
||||
index_instance = klass(db.path, index_name)
|
||||
try:
|
||||
db.add_index(index_instance)
|
||||
db.reindex_index(index_name)
|
||||
except IndexConflict:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed adding index %s', index_name)
|
||||
raise
|
||||
|
||||
self.compact(try_repair = False)
|
||||
else:
|
||||
log.error('Failed compact: %s', traceback.format_exc())
|
||||
|
||||
except:
|
||||
log.error('Failed compact: %s', traceback.format_exc())
|
||||
|
||||
return {
|
||||
'success': success
|
||||
}
|
||||
|
||||
# Compact on start
|
||||
def startup_compact(self):
|
||||
from couchpotato import Env
|
||||
|
||||
db = self.getDB()
|
||||
|
||||
# Try fix for migration failures on desktop
|
||||
if Env.get('desktop'):
|
||||
try:
|
||||
list(db.all('profile', with_doc = True))
|
||||
except RecordNotFound:
|
||||
|
||||
failed_location = '%s_failed' % db.path
|
||||
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db.old')
|
||||
|
||||
if not os.path.isdir(failed_location) and os.path.isfile(old_db):
|
||||
log.error('Corrupt database, trying migrate again')
|
||||
db.close()
|
||||
|
||||
# Rename database folder
|
||||
os.rename(db.path, '%s_failed' % db.path)
|
||||
|
||||
# Rename .old database to try another migrate
|
||||
os.rename(old_db, old_db[:-4])
|
||||
|
||||
fireEventAsync('app.restart')
|
||||
else:
|
||||
log.error('Migration failed and couldn\'t recover database. Please report on GitHub, with this message.')
|
||||
db.reindex()
|
||||
|
||||
return
|
||||
|
||||
# Check size and compact if needed
|
||||
size = db.get_db_details().get('size')
|
||||
prop_name = 'last_db_compact'
|
||||
last_check = int(Env.prop(prop_name, default = 0))
|
||||
|
||||
if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days
|
||||
self.compact()
|
||||
Env.prop(prop_name, value = int(time.time()))
|
||||
|
||||
def migrate(self):
|
||||
|
||||
from couchpotato import Env
|
||||
old_db = os.path.join(Env.get('data_dir'), 'couchpotato.db')
|
||||
if not os.path.isfile(old_db): return
|
||||
|
||||
log.info('=' * 30)
|
||||
log.info('Migrating database, hold on..')
|
||||
time.sleep(1)
|
||||
|
||||
if os.path.isfile(old_db):
|
||||
|
||||
migrate_start = time.time()
|
||||
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(old_db)
|
||||
|
||||
migrate_list = {
|
||||
'category': ['id', 'label', 'order', 'required', 'preferred', 'ignored', 'destination'],
|
||||
'profile': ['id', 'label', 'order', 'core', 'hide'],
|
||||
'profiletype': ['id', 'order', 'finish', 'wait_for', 'quality_id', 'profile_id'],
|
||||
'quality': ['id', 'identifier', 'order', 'size_min', 'size_max'],
|
||||
'movie': ['id', 'last_edit', 'library_id', 'status_id', 'profile_id', 'category_id'],
|
||||
'library': ['id', 'identifier', 'info'],
|
||||
'librarytitle': ['id', 'title', 'default', 'libraries_id'],
|
||||
'library_files__file_library': ['library_id', 'file_id'],
|
||||
'release': ['id', 'identifier', 'movie_id', 'status_id', 'quality_id', 'last_edit'],
|
||||
'releaseinfo': ['id', 'identifier', 'value', 'release_id'],
|
||||
'release_files__file_release': ['release_id', 'file_id'],
|
||||
'status': ['id', 'identifier'],
|
||||
'properties': ['id', 'identifier', 'value'],
|
||||
'file': ['id', 'path', 'type_id'],
|
||||
'filetype': ['identifier', 'id']
|
||||
}
|
||||
|
||||
migrate_data = {}
|
||||
rename_old = False
|
||||
|
||||
try:
|
||||
|
||||
c = conn.cursor()
|
||||
|
||||
for ml in migrate_list:
|
||||
migrate_data[ml] = {}
|
||||
rows = migrate_list[ml]
|
||||
|
||||
try:
|
||||
c.execute('SELECT %s FROM `%s`' % ('`' + '`,`'.join(rows) + '`', ml))
|
||||
except:
|
||||
# ignore faulty destination_id database
|
||||
if ml == 'category':
|
||||
migrate_data[ml] = {}
|
||||
else:
|
||||
rename_old = True
|
||||
raise
|
||||
|
||||
for p in c.fetchall():
|
||||
columns = {}
|
||||
for row in migrate_list[ml]:
|
||||
columns[row] = p[rows.index(row)]
|
||||
|
||||
if not migrate_data[ml].get(p[0]):
|
||||
migrate_data[ml][p[0]] = columns
|
||||
else:
|
||||
if not isinstance(migrate_data[ml][p[0]], list):
|
||||
migrate_data[ml][p[0]] = [migrate_data[ml][p[0]]]
|
||||
migrate_data[ml][p[0]].append(columns)
|
||||
|
||||
conn.close()
|
||||
|
||||
log.info('Getting data took %s', time.time() - migrate_start)
|
||||
|
||||
db = self.getDB()
|
||||
if not db.opened:
|
||||
return
|
||||
|
||||
# Use properties
|
||||
properties = migrate_data['properties']
|
||||
log.info('Importing %s properties', len(properties))
|
||||
for x in properties:
|
||||
property = properties[x]
|
||||
Env.prop(property.get('identifier'), property.get('value'))
|
||||
|
||||
# Categories
|
||||
categories = migrate_data.get('category', [])
|
||||
log.info('Importing %s categories', len(categories))
|
||||
category_link = {}
|
||||
for x in categories:
|
||||
c = categories[x]
|
||||
|
||||
new_c = db.insert({
|
||||
'_t': 'category',
|
||||
'order': c.get('order', 999),
|
||||
'label': toUnicode(c.get('label', '')),
|
||||
'ignored': toUnicode(c.get('ignored', '')),
|
||||
'preferred': toUnicode(c.get('preferred', '')),
|
||||
'required': toUnicode(c.get('required', '')),
|
||||
'destination': toUnicode(c.get('destination', '')),
|
||||
})
|
||||
|
||||
category_link[x] = new_c.get('_id')
|
||||
|
||||
# Profiles
|
||||
log.info('Importing profiles')
|
||||
new_profiles = db.all('profile', with_doc = True)
|
||||
new_profiles_by_label = {}
|
||||
for x in new_profiles:
|
||||
|
||||
# Remove default non core profiles
|
||||
if not x['doc'].get('core'):
|
||||
db.delete(x['doc'])
|
||||
else:
|
||||
new_profiles_by_label[x['doc']['label']] = x['_id']
|
||||
|
||||
profiles = migrate_data['profile']
|
||||
profile_link = {}
|
||||
for x in profiles:
|
||||
p = profiles[x]
|
||||
|
||||
exists = new_profiles_by_label.get(p.get('label'))
|
||||
|
||||
# Update existing with order only
|
||||
if exists and p.get('core'):
|
||||
profile = db.get('id', exists)
|
||||
profile['order'] = tryInt(p.get('order'))
|
||||
profile['hide'] = p.get('hide') in [1, True, 'true', 'True']
|
||||
db.update(profile)
|
||||
|
||||
profile_link[x] = profile.get('_id')
|
||||
else:
|
||||
|
||||
new_profile = {
|
||||
'_t': 'profile',
|
||||
'label': p.get('label'),
|
||||
'order': int(p.get('order', 999)),
|
||||
'core': p.get('core', False),
|
||||
'qualities': [],
|
||||
'wait_for': [],
|
||||
'finish': []
|
||||
}
|
||||
|
||||
types = migrate_data['profiletype']
|
||||
for profile_type in types:
|
||||
p_type = types[profile_type]
|
||||
if types[profile_type]['profile_id'] == p['id']:
|
||||
if p_type['quality_id']:
|
||||
new_profile['finish'].append(p_type['finish'])
|
||||
new_profile['wait_for'].append(p_type['wait_for'])
|
||||
new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier'])
|
||||
|
||||
if len(new_profile['qualities']) > 0:
|
||||
new_profile.update(db.insert(new_profile))
|
||||
profile_link[x] = new_profile.get('_id')
|
||||
else:
|
||||
log.error('Corrupt profile list for "%s", using default.', p.get('label'))
|
||||
|
||||
# Qualities
|
||||
log.info('Importing quality sizes')
|
||||
new_qualities = db.all('quality', with_doc = True)
|
||||
new_qualities_by_identifier = {}
|
||||
for x in new_qualities:
|
||||
new_qualities_by_identifier[x['doc']['identifier']] = x['_id']
|
||||
|
||||
qualities = migrate_data['quality']
|
||||
quality_link = {}
|
||||
for x in qualities:
|
||||
q = qualities[x]
|
||||
q_id = new_qualities_by_identifier[q.get('identifier')]
|
||||
|
||||
quality = db.get('id', q_id)
|
||||
quality['order'] = q.get('order')
|
||||
quality['size_min'] = tryInt(q.get('size_min'))
|
||||
quality['size_max'] = tryInt(q.get('size_max'))
|
||||
db.update(quality)
|
||||
|
||||
quality_link[x] = quality
|
||||
|
||||
# Titles
|
||||
titles = migrate_data['librarytitle']
|
||||
titles_by_library = {}
|
||||
for x in titles:
|
||||
title = titles[x]
|
||||
if title.get('default'):
|
||||
titles_by_library[title.get('libraries_id')] = title.get('title')
|
||||
|
||||
# Releases
|
||||
releaseinfos = migrate_data['releaseinfo']
|
||||
for x in releaseinfos:
|
||||
info = releaseinfos[x]
|
||||
|
||||
# Skip if release doesn't exist for this info
|
||||
if not migrate_data['release'].get(info.get('release_id')):
|
||||
continue
|
||||
|
||||
if not migrate_data['release'][info.get('release_id')].get('info'):
|
||||
migrate_data['release'][info.get('release_id')]['info'] = {}
|
||||
|
||||
migrate_data['release'][info.get('release_id')]['info'][info.get('identifier')] = info.get('value')
|
||||
|
||||
releases = migrate_data['release']
|
||||
releases_by_media = {}
|
||||
for x in releases:
|
||||
release = releases[x]
|
||||
if not releases_by_media.get(release.get('movie_id')):
|
||||
releases_by_media[release.get('movie_id')] = []
|
||||
|
||||
releases_by_media[release.get('movie_id')].append(release)
|
||||
|
||||
# Type ids
|
||||
types = migrate_data['filetype']
|
||||
type_by_id = {}
|
||||
for t in types:
|
||||
type = types[t]
|
||||
type_by_id[type.get('id')] = type
|
||||
|
||||
# Media
|
||||
log.info('Importing %s media items', len(migrate_data['movie']))
|
||||
statuses = migrate_data['status']
|
||||
libraries = migrate_data['library']
|
||||
library_files = migrate_data['library_files__file_library']
|
||||
releases_files = migrate_data['release_files__file_release']
|
||||
all_files = migrate_data['file']
|
||||
poster_type = migrate_data['filetype']['poster']
|
||||
medias = migrate_data['movie']
|
||||
for x in medias:
|
||||
m = medias[x]
|
||||
|
||||
status = statuses.get(m['status_id']).get('identifier')
|
||||
l = libraries.get(m['library_id'])
|
||||
|
||||
# Only migrate wanted movies, Skip if no identifier present
|
||||
if not l or not getImdb(l.get('identifier')): continue
|
||||
|
||||
profile_id = profile_link.get(m['profile_id'])
|
||||
category_id = category_link.get(m['category_id'])
|
||||
title = titles_by_library.get(m['library_id'])
|
||||
releases = releases_by_media.get(x, [])
|
||||
info = json.loads(l.get('info', ''))
|
||||
|
||||
files = library_files.get(m['library_id'], [])
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
added_media = fireEvent('movie.add', {
|
||||
'info': info,
|
||||
'identifier': l.get('identifier'),
|
||||
'profile_id': profile_id,
|
||||
'category_id': category_id,
|
||||
'title': title
|
||||
}, force_readd = False, search_after = False, update_after = False, notify_after = False, status = status, single = True)
|
||||
|
||||
if not added_media:
|
||||
log.error('Failed adding media %s: %s', (l.get('identifier'), info))
|
||||
continue
|
||||
|
||||
added_media['files'] = added_media.get('files', {})
|
||||
for f in files:
|
||||
ffile = all_files[f.get('file_id')]
|
||||
|
||||
# Only migrate posters
|
||||
if ffile.get('type_id') == poster_type.get('id'):
|
||||
if ffile.get('path') not in added_media['files'].get('image_poster', []) and os.path.isfile(ffile.get('path')):
|
||||
added_media['files']['image_poster'] = [ffile.get('path')]
|
||||
break
|
||||
|
||||
if 'image_poster' in added_media['files']:
|
||||
db.update(added_media)
|
||||
|
||||
for rel in releases:
|
||||
|
||||
empty_info = False
|
||||
if not rel.get('info'):
|
||||
empty_info = True
|
||||
rel['info'] = {}
|
||||
|
||||
quality = quality_link.get(rel.get('quality_id'))
|
||||
if not quality:
|
||||
continue
|
||||
|
||||
release_status = statuses.get(rel.get('status_id')).get('identifier')
|
||||
|
||||
if rel['info'].get('download_id'):
|
||||
status_support = rel['info'].get('download_status_support', False) in [True, 'true', 'True']
|
||||
rel['info']['download_info'] = {
|
||||
'id': rel['info'].get('download_id'),
|
||||
'downloader': rel['info'].get('download_downloader'),
|
||||
'status_support': status_support,
|
||||
}
|
||||
|
||||
# Add status to keys
|
||||
rel['info']['status'] = release_status
|
||||
if not empty_info:
|
||||
fireEvent('release.create_from_search', [rel['info']], added_media, quality, single = True)
|
||||
else:
|
||||
release = {
|
||||
'_t': 'release',
|
||||
'identifier': rel.get('identifier'),
|
||||
'media_id': added_media.get('_id'),
|
||||
'quality': quality.get('identifier'),
|
||||
'status': release_status,
|
||||
'last_edit': int(time.time()),
|
||||
'files': {}
|
||||
}
|
||||
|
||||
# Add downloader info if provided
|
||||
try:
|
||||
release['download_info'] = rel['info']['download_info']
|
||||
del rel['download_info']
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add files
|
||||
release_files = releases_files.get(rel.get('id'), [])
|
||||
if not isinstance(release_files, list):
|
||||
release_files = [release_files]
|
||||
|
||||
if len(release_files) == 0:
|
||||
continue
|
||||
|
||||
for f in release_files:
|
||||
rfile = all_files.get(f.get('file_id'))
|
||||
if not rfile:
|
||||
continue
|
||||
|
||||
file_type = type_by_id.get(rfile.get('type_id')).get('identifier')
|
||||
|
||||
if not release['files'].get(file_type):
|
||||
release['files'][file_type] = []
|
||||
|
||||
release['files'][file_type].append(rfile.get('path'))
|
||||
|
||||
try:
|
||||
rls = db.get('release_identifier', rel.get('identifier'), with_doc = True)['doc']
|
||||
rls.update(release)
|
||||
db.update(rls)
|
||||
except:
|
||||
db.insert(release)
|
||||
|
||||
log.info('Total migration took %s', time.time() - migrate_start)
|
||||
log.info('=' * 30)
|
||||
|
||||
rename_old = True
|
||||
|
||||
except OperationalError:
|
||||
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
|
||||
except:
|
||||
log.error('Migration failed: %s', traceback.format_exc())
|
||||
|
||||
|
||||
# rename old database
|
||||
if rename_old:
|
||||
random = randomString()
|
||||
log.info('Renaming old database to %s ', '%s.%s_old' % (old_db, random))
|
||||
os.rename(old_db, '%s.%s_old' % (old_db, random))
|
||||
|
||||
if os.path.isfile(old_db + '-wal'):
|
||||
os.rename(old_db + '-wal', '%s-wal.%s_old' % (old_db, random))
|
||||
if os.path.isfile(old_db + '-shm'):
|
||||
os.rename(old_db + '-shm', '%s-shm.%s_old' % (old_db, random))
|
||||
@@ -0,0 +1,13 @@
|
||||
config = [{
|
||||
'name': 'download_providers',
|
||||
'groups': [
|
||||
{
|
||||
'label': 'Downloaders',
|
||||
'description': 'You can select different downloaders for each type (usenet / torrent)',
|
||||
'type': 'list',
|
||||
'name': 'download_providers',
|
||||
'tab': 'downloaders',
|
||||
'options': [],
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
@@ -1,51 +1,36 @@
|
||||
from base64 import b32decode, b16encode
|
||||
import random
|
||||
import re
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.variable import mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import Provider
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
from couchpotato.core.providers.base import Provider
|
||||
import random
|
||||
import re
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
## This is here to load the static files
|
||||
class Downloader(Plugin):
|
||||
pass
|
||||
|
||||
|
||||
class DownloaderBase(Provider):
|
||||
class Downloader(Provider):
|
||||
|
||||
protocol = []
|
||||
http_time_between_calls = 0
|
||||
status_support = True
|
||||
|
||||
torrent_sources = [
|
||||
'https://zoink.it/torrent/%s.torrent',
|
||||
'http://torrage.com/torrent/%s.torrent',
|
||||
'https://torcache.net/torrent/%s.torrent',
|
||||
]
|
||||
|
||||
torrent_trackers = [
|
||||
'udp://tracker.istole.it:80/announce',
|
||||
'http://tracker.istole.it/announce',
|
||||
'udp://fr33domtracker.h33t.com:3310/announce',
|
||||
'http://tracker.publicbt.com/announce',
|
||||
'udp://tracker.publicbt.com:80/announce',
|
||||
'udp://tracker.istole.it:80/announce',
|
||||
'udp://fr33domtracker.h33t.com:3310/announce',
|
||||
'http://tracker.istole.it/announce',
|
||||
'http://tracker.ccc.de/announce',
|
||||
'udp://tracker.publicbt.com:80/announce',
|
||||
'udp://tracker.ccc.de:80/announce',
|
||||
'http://exodus.desync.com/announce',
|
||||
'http://exodus.desync.com:6969/announce',
|
||||
'http://tracker.publichd.eu/announce',
|
||||
'udp://tracker.publichd.eu:80/announce',
|
||||
'http://tracker.openbittorrent.com/announce',
|
||||
'udp://tracker.openbittorrent.com/announce',
|
||||
'udp://tracker.openbittorrent.com:80/announce',
|
||||
'udp://open.demonii.com:1337/announce',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
@@ -56,7 +41,6 @@ class DownloaderBase(Provider):
|
||||
addEvent('download.remove_failed', self._removeFailed)
|
||||
addEvent('download.pause', self._pause)
|
||||
addEvent('download.process_complete', self._processComplete)
|
||||
addApiView('download.%s.test' % self.getName().lower(), self._test)
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
for download_protocol in self.protocol:
|
||||
@@ -65,30 +49,22 @@ class DownloaderBase(Provider):
|
||||
|
||||
return []
|
||||
|
||||
def _download(self, data = None, media = None, manual = False, filedata = None):
|
||||
if not media: media = {}
|
||||
def _download(self, data = None, movie = None, manual = False, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
if self.isDisabled(manual, data):
|
||||
return
|
||||
return self.download(data = data, media = media, filedata = filedata)
|
||||
return self.download(data = data, movie = movie, filedata = filedata)
|
||||
|
||||
def download(self, *args, **kwargs):
|
||||
return False
|
||||
|
||||
def _getAllDownloadStatus(self, download_ids):
|
||||
def _getAllDownloadStatus(self):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
|
||||
ids = [download_id['id'] for download_id in download_ids if download_id['downloader'] == self.getName()]
|
||||
return self.getAllDownloadStatus()
|
||||
|
||||
if ids:
|
||||
return self.getAllDownloadStatus(ids)
|
||||
else:
|
||||
return
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
return []
|
||||
def getAllDownloadStatus(self):
|
||||
return
|
||||
|
||||
def _removeFailed(self, release_download):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
@@ -152,7 +128,6 @@ class DownloaderBase(Provider):
|
||||
def downloadReturnId(self, download_id):
|
||||
return {
|
||||
'downloader': self.getName(),
|
||||
'status_support': self.status_support,
|
||||
'id': download_id
|
||||
}
|
||||
|
||||
@@ -172,19 +147,10 @@ class DownloaderBase(Provider):
|
||||
if not data: data = {}
|
||||
|
||||
d_manual = self.conf('manual', default = False)
|
||||
return super(DownloaderBase, self).isEnabled() and \
|
||||
return super(Downloader, self).isEnabled() and \
|
||||
(d_manual and manual or d_manual is False) and \
|
||||
(not data or self.isCorrectProtocol(data.get('protocol')))
|
||||
|
||||
def _test(self, **kwargs):
|
||||
t = self.test()
|
||||
if isinstance(t, tuple):
|
||||
return {'success': t[0], 'msg': t[1]}
|
||||
return {'success': t}
|
||||
|
||||
def test(self):
|
||||
return False
|
||||
|
||||
def _pause(self, release_download, pause = True):
|
||||
if self.isDisabled(manual = True, data = {}):
|
||||
return
|
||||
@@ -198,7 +164,6 @@ class DownloaderBase(Provider):
|
||||
def pause(self, release_download, pause):
|
||||
return
|
||||
|
||||
|
||||
class ReleaseDownloadList(list):
|
||||
|
||||
provider = None
|
||||
@@ -225,7 +190,7 @@ class ReleaseDownloadList(list):
|
||||
'status': 'busy',
|
||||
'downloader': self.provider.getName(),
|
||||
'folder': '',
|
||||
'files': [],
|
||||
'files': '',
|
||||
}
|
||||
|
||||
return mergeDicts(defaults, result)
|
||||
55
couchpotato/core/downloaders/blackhole/__init__.py
Normal file
55
couchpotato/core/downloaders/blackhole/__init__.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from .main import Blackhole
|
||||
from couchpotato.core.helpers.variable import getDownloadDir
|
||||
|
||||
def start():
|
||||
return Blackhole()
|
||||
|
||||
config = [{
|
||||
'name': 'blackhole',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'blackhole',
|
||||
'label': 'Black hole',
|
||||
'description': 'Download the NZB/Torrent to a specific folder.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': True,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .nzb (or .torrent) file is saved to.',
|
||||
'default': getDownloadDir()
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'create_subdir',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Create a sub directory when saving the .nzb (or .torrent).',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,26 +1,19 @@
|
||||
from __future__ import with_statement
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import getDownloadDir
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Blackhole'
|
||||
|
||||
|
||||
class Blackhole(DownloaderBase):
|
||||
class Blackhole(Downloader):
|
||||
|
||||
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
||||
status_support = False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
directory = self.conf('directory')
|
||||
@@ -40,7 +33,7 @@ class Blackhole(DownloaderBase):
|
||||
log.error('No nzb/torrent available: %s', data.get('url'))
|
||||
return False
|
||||
|
||||
file_name = self.createFileName(data, filedata, media)
|
||||
file_name = self.createFileName(data, filedata, movie)
|
||||
full_path = os.path.join(directory, file_name)
|
||||
|
||||
if self.conf('create_subdir'):
|
||||
@@ -58,10 +51,10 @@ class Blackhole(DownloaderBase):
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(filedata)
|
||||
os.chmod(full_path, Env.getPermission('file'))
|
||||
return self.downloadReturnId('')
|
||||
return True
|
||||
else:
|
||||
log.info('File %s already exists.', full_path)
|
||||
return self.downloadReturnId('')
|
||||
return True
|
||||
|
||||
except:
|
||||
log.error('Failed to download to blackhole %s', traceback.format_exc())
|
||||
@@ -73,20 +66,6 @@ class Blackhole(DownloaderBase):
|
||||
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
directory = self.conf('directory')
|
||||
if directory and os.path.isdir(directory):
|
||||
|
||||
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
|
||||
|
||||
# Check if folder is writable
|
||||
self.createFile(test_file, 'This is a test file')
|
||||
if os.path.isfile(test_file):
|
||||
os.remove(test_file)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
if self.conf('use_for') == 'both':
|
||||
return super(Blackhole, self).getEnabledProtocol()
|
||||
@@ -105,54 +84,3 @@ class Blackhole(DownloaderBase):
|
||||
|
||||
return super(Blackhole, self).isEnabled(manual, data) and \
|
||||
((self.conf('use_for') in for_protocol))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'blackhole',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'blackhole',
|
||||
'label': 'Black hole',
|
||||
'description': 'Download the NZB/Torrent to a specific folder. <em>Note: Seeding and copying/linking features do <strong>not</strong> work with Black hole</em>.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': True,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .nzb (or .torrent) file is saved to.',
|
||||
'default': getDownloadDir()
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'create_subdir',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Create a sub directory when saving the .nzb (or .torrent).',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
90
couchpotato/core/downloaders/deluge/__init__.py
Normal file
90
couchpotato/core/downloaders/deluge/__init__.py
Normal file
@@ -0,0 +1,90 @@
|
||||
from .main import Deluge
|
||||
|
||||
def start():
|
||||
return Deluge()
|
||||
|
||||
config = [{
|
||||
'name': 'deluge',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'deluge',
|
||||
'label': 'Deluge',
|
||||
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:58846',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'completed_directory',
|
||||
'type': 'directory',
|
||||
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add to torrents in the Deluge UI.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'description': 'Remove the torrent from Deluge after it has finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,45 +1,38 @@
|
||||
from base64 import b64encode, b16encode, b32decode
|
||||
from bencode import bencode as benc, bdecode
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, sp
|
||||
from couchpotato.core.helpers.variable import tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
from synchronousdeluge import DelugeClient
|
||||
import os.path
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bencode import bencode as benc, bdecode
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, sp
|
||||
from couchpotato.core.helpers.variable import tryFloat, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from synchronousdeluge import DelugeClient
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Deluge'
|
||||
|
||||
|
||||
class Deluge(DownloaderBase):
|
||||
class Deluge(Downloader):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
log = CPLog(__name__)
|
||||
drpc = None
|
||||
|
||||
def connect(self, reconnect = False):
|
||||
def connect(self):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
if not self.drpc or reconnect:
|
||||
if not self.drpc:
|
||||
self.drpc = DelugeRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
return self.drpc
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
def download(self, data, movie, filedata = None):
|
||||
log.info('Sending "%s" (%s) to Deluge.', (data.get('name'), data.get('protocol')))
|
||||
|
||||
if not self.connect():
|
||||
@@ -80,7 +73,7 @@ class Deluge(DownloaderBase):
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
remote_torrent = self.drpc.add_torrent_magnet(data.get('url'), options)
|
||||
else:
|
||||
filename = self.createFileName(data, filedata, media)
|
||||
filename = self.createFileName(data, filedata, movie)
|
||||
remote_torrent = self.drpc.add_torrent_file(filename, filedata, options)
|
||||
|
||||
if not remote_torrent:
|
||||
@@ -90,34 +83,24 @@ class Deluge(DownloaderBase):
|
||||
log.info('Torrent sent to Deluge successfully.')
|
||||
return self.downloadReturnId(remote_torrent)
|
||||
|
||||
def test(self):
|
||||
if self.connect(True) and self.drpc.test():
|
||||
return True
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
log.debug('Checking Deluge download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
return False
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
queue = self.drpc.get_alltorrents(ids)
|
||||
queue = self.drpc.get_alltorrents()
|
||||
|
||||
if not queue:
|
||||
log.debug('Nothing in queue or error')
|
||||
return []
|
||||
return False
|
||||
|
||||
for torrent_id in queue:
|
||||
torrent = queue[torrent_id]
|
||||
|
||||
if not 'hash' in torrent:
|
||||
# When given a list of ids, deluge will return an empty item for a non-existant torrent.
|
||||
continue
|
||||
|
||||
log.debug('name=%s / id=%s / save_path=%s / move_on_completed=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_on_completed'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
|
||||
log.debug('name=%s / id=%s / save_path=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
|
||||
|
||||
# Deluge has no easy way to work out if a torrent is stalled or failing.
|
||||
#status = 'failed'
|
||||
@@ -147,7 +130,7 @@ class Deluge(DownloaderBase):
|
||||
'seed_ratio': torrent['ratio'],
|
||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||
'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
|
||||
'files': torrent_files,
|
||||
'files': '|'.join(torrent_files),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
@@ -166,7 +149,6 @@ class Deluge(DownloaderBase):
|
||||
log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
|
||||
|
||||
|
||||
class DelugeRPC(object):
|
||||
|
||||
host = 'localhost'
|
||||
@@ -187,13 +169,6 @@ class DelugeRPC(object):
|
||||
self.client = DelugeClient()
|
||||
self.client.connect(self.host, int(self.port), self.username, self.password)
|
||||
|
||||
def test(self):
|
||||
try:
|
||||
self.connect()
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
def add_torrent_magnet(self, torrent, options):
|
||||
torrent_id = False
|
||||
try:
|
||||
@@ -204,7 +179,7 @@ class DelugeRPC(object):
|
||||
|
||||
if torrent_id and options['label']:
|
||||
self.client.label.set_torrent(torrent_id, options['label']).get()
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
@@ -222,7 +197,7 @@ class DelugeRPC(object):
|
||||
|
||||
if torrent_id and options['label']:
|
||||
self.client.label.set_torrent(torrent_id, options['label']).get()
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
@@ -230,12 +205,12 @@ class DelugeRPC(object):
|
||||
|
||||
return torrent_id
|
||||
|
||||
def get_alltorrents(self, ids):
|
||||
def get_alltorrents(self):
|
||||
ret = False
|
||||
try:
|
||||
self.connect()
|
||||
ret = self.client.core.get_torrents_status({'id': ids}, ('name', 'hash', 'save_path', 'move_completed_path', 'progress', 'state', 'eta', 'ratio', 'stop_ratio', 'is_seed', 'is_finished', 'paused', 'move_on_completed', 'files')).get()
|
||||
except Exception as err:
|
||||
ret = self.client.core.get_torrents_status({}, {}).get()
|
||||
except Exception, err:
|
||||
log.error('Failed to get all torrents: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
@@ -246,7 +221,7 @@ class DelugeRPC(object):
|
||||
try:
|
||||
self.connect()
|
||||
self.client.core.pause_torrent(torrent_ids).get()
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Failed to pause torrent: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
@@ -256,7 +231,7 @@ class DelugeRPC(object):
|
||||
try:
|
||||
self.connect()
|
||||
self.client.core.resume_torrent(torrent_ids).get()
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Failed to resume torrent: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
@@ -267,7 +242,7 @@ class DelugeRPC(object):
|
||||
try:
|
||||
self.connect()
|
||||
ret = self.client.core.remove_torrent(torrent_id, remove_local_data).get()
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Failed to remove torrent: %s %s', (err, traceback.format_exc()))
|
||||
finally:
|
||||
if self.client:
|
||||
@@ -295,90 +270,3 @@ class DelugeRPC(object):
|
||||
return torrent_hash
|
||||
|
||||
return False
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'deluge',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'deluge',
|
||||
'label': 'Deluge',
|
||||
'description': 'Use <a href="http://www.deluge-torrent.org/" target="_blank">Deluge</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:58846',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:58846</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default Deluge download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'completed_directory',
|
||||
'type': 'directory',
|
||||
'description': 'Move completed torrent to this directory. Keep empty for default Deluge options.',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add to torrents in the Deluge UI.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'description': 'Remove the torrent from Deluge after it has finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,293 +0,0 @@
|
||||
from base64 import standard_b64encode
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import traceback
|
||||
import xmlrpclib
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import ss, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, md5, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NZBGet'
|
||||
|
||||
|
||||
class NZBGet(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
rpc = 'xmlrpc'
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
if not filedata:
|
||||
log.error('Unable to get NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
log.info('Sending "%s" to NZBGet.', data.get('name'))
|
||||
|
||||
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
|
||||
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
if re.search(r"^0", rpc.version()):
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), False, standard_b64encode(filedata.strip()))
|
||||
else:
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), tryInt(self.conf('priority')), False, standard_b64encode(filedata.strip()))
|
||||
|
||||
if xml_response:
|
||||
log.info('NZB sent successfully to NZBGet')
|
||||
nzb_id = md5(data['url']) # about as unique as they come ;)
|
||||
couchpotato_id = "couchpotato=" + nzb_id
|
||||
groups = rpc.listgroups()
|
||||
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
|
||||
confirmed = rpc.editqueue("GroupSetParameter", 0, couchpotato_id, file_id)
|
||||
if confirmed:
|
||||
log.debug('couchpotato parameter set in nzbget download')
|
||||
return self.downloadReturnId(nzb_id)
|
||||
else:
|
||||
log.error('NZBGet could not add %s to the queue.', nzb_name)
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to test connection'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking NZBGet download status.')
|
||||
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return []
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return []
|
||||
|
||||
# Get NZBGet data
|
||||
try:
|
||||
status = rpc.status()
|
||||
groups = rpc.listgroups()
|
||||
queue = rpc.postqueue(0)
|
||||
history = rpc.history()
|
||||
except:
|
||||
log.error('Failed getting data: %s', traceback.format_exc(1))
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for nzb in groups:
|
||||
try:
|
||||
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
|
||||
except:
|
||||
nzb_id = nzb['NZBID']
|
||||
|
||||
if nzb_id in ids:
|
||||
log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
|
||||
timeleft = -1
|
||||
try:
|
||||
if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
|
||||
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
|
||||
except:
|
||||
pass
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb_id,
|
||||
'name': nzb['NZBFilename'],
|
||||
'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
|
||||
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
|
||||
'timeleft': timeleft,
|
||||
})
|
||||
|
||||
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
|
||||
if nzb['NZBID'] in ids:
|
||||
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
|
||||
release_downloads.append({
|
||||
'id': nzb['NZBID'],
|
||||
'name': nzb['NZBFilename'],
|
||||
'original_status': nzb['Stage'],
|
||||
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
|
||||
})
|
||||
|
||||
for nzb in history:
|
||||
try:
|
||||
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
|
||||
except:
|
||||
nzb_id = nzb['NZBID']
|
||||
|
||||
if nzb_id in ids:
|
||||
log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
|
||||
release_downloads.append({
|
||||
'id': nzb_id,
|
||||
'name': nzb['NZBFilename'],
|
||||
'status': 'completed' if nzb['ParStatus'] in ['SUCCESS', 'NONE'] and nzb['ScriptStatus'] in ['SUCCESS', 'NONE'] else 'failed',
|
||||
'original_status': nzb['ParStatus'] + ', ' + nzb['ScriptStatus'],
|
||||
'timeleft': str(timedelta(seconds = 0)),
|
||||
'folder': sp(nzb['DestDir'])
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
rpc = self.getRPC()
|
||||
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError as e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
try:
|
||||
history = rpc.history()
|
||||
nzb_id = None
|
||||
path = None
|
||||
|
||||
for hist in history:
|
||||
for param in hist['Parameters']:
|
||||
if param['Name'] == 'couchpotato' and param['Value'] == release_download['id']:
|
||||
nzb_id = hist['ID']
|
||||
path = hist['DestDir']
|
||||
|
||||
if nzb_id and path and rpc.editqueue('HistoryDelete', 0, "", [tryInt(nzb_id)]):
|
||||
shutil.rmtree(path, True)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def getRPC(self):
|
||||
url = cleanHost(host = self.conf('host'), ssl = self.conf('ssl'), username = self.conf('username'), password = self.conf('password')) + self.rpc
|
||||
return xmlrpclib.ServerProxy(url)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbget',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbget',
|
||||
'label': 'NZBGet',
|
||||
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:6789',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': 'nzbget',
|
||||
'advanced': True,
|
||||
'description': 'Set a different username to connect. Default: nzbget',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'default': 'Movies',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'advanced': True,
|
||||
'default': '0',
|
||||
'type': 'dropdown',
|
||||
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
|
||||
'description': 'Only change this if you are using NZBget 9.0 or higher',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
69
couchpotato/core/downloaders/nzbget/__init__.py
Normal file
69
couchpotato/core/downloaders/nzbget/__init__.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from .main import NZBGet
|
||||
|
||||
def start():
|
||||
return NZBGet()
|
||||
|
||||
config = [{
|
||||
'name': 'nzbget',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbget',
|
||||
'label': 'NZBGet',
|
||||
'description': 'Use <a href="http://nzbget.sourceforge.net/Main_Page" target="_blank">NZBGet</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:6789',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': 'nzbget',
|
||||
'advanced': True,
|
||||
'description': 'Set a different username to connect. Default: nzbget',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'default': 'Movies',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'advanced': True,
|
||||
'default': '0',
|
||||
'type': 'dropdown',
|
||||
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100)],
|
||||
'description': 'Only change this if you are using NZBget 9.0 or higher',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
192
couchpotato/core/downloaders/nzbget/main.py
Normal file
192
couchpotato/core/downloaders/nzbget/main.py
Normal file
@@ -0,0 +1,192 @@
|
||||
from base64 import standard_b64encode
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import ss, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, md5
|
||||
from couchpotato.core.logger import CPLog
|
||||
from datetime import timedelta
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import traceback
|
||||
import xmlrpclib
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class NZBGet(Downloader):
|
||||
|
||||
protocol = ['nzb']
|
||||
|
||||
url = 'http://%(username)s:%(password)s@%(host)s/xmlrpc'
|
||||
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
if not filedata:
|
||||
log.error('Unable to get NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
log.info('Sending "%s" to NZBGet.', data.get('name'))
|
||||
|
||||
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
|
||||
nzb_name = ss('%s.nzb' % self.createNzbName(data, movie))
|
||||
|
||||
rpc = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
if re.search(r"^0", rpc.version()):
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), False, standard_b64encode(filedata.strip()))
|
||||
else:
|
||||
xml_response = rpc.append(nzb_name, self.conf('category'), tryInt(self.conf('priority')), False, standard_b64encode(filedata.strip()))
|
||||
|
||||
if xml_response:
|
||||
log.info('NZB sent successfully to NZBGet')
|
||||
nzb_id = md5(data['url']) # about as unique as they come ;)
|
||||
couchpotato_id = "couchpotato=" + nzb_id
|
||||
groups = rpc.listgroups()
|
||||
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
|
||||
confirmed = rpc.editqueue("GroupSetParameter", 0, couchpotato_id, file_id)
|
||||
if confirmed:
|
||||
log.debug('couchpotato parameter set in nzbget download')
|
||||
return self.downloadReturnId(nzb_id)
|
||||
else:
|
||||
log.error('NZBGet could not add %s to the queue.', nzb_name)
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
log.debug('Checking NZBGet download status.')
|
||||
|
||||
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
|
||||
|
||||
rpc = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
# Get NZBGet data
|
||||
try:
|
||||
status = rpc.status()
|
||||
groups = rpc.listgroups()
|
||||
queue = rpc.postqueue(0)
|
||||
history = rpc.history()
|
||||
except:
|
||||
log.error('Failed getting data: %s', traceback.format_exc(1))
|
||||
return False
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for nzb in groups:
|
||||
log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
|
||||
try:
|
||||
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
|
||||
except:
|
||||
nzb_id = nzb['NZBID']
|
||||
|
||||
|
||||
timeleft = -1
|
||||
try:
|
||||
if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
|
||||
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
|
||||
except:
|
||||
pass
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb_id,
|
||||
'name': nzb['NZBFilename'],
|
||||
'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
|
||||
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
|
||||
'timeleft': timeleft,
|
||||
})
|
||||
|
||||
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
|
||||
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
|
||||
release_downloads.append({
|
||||
'id': nzb['NZBID'],
|
||||
'name': nzb['NZBFilename'],
|
||||
'original_status': nzb['Stage'],
|
||||
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
|
||||
})
|
||||
|
||||
for nzb in history:
|
||||
log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
|
||||
try:
|
||||
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
|
||||
except:
|
||||
nzb_id = nzb['NZBID']
|
||||
release_downloads.append({
|
||||
'id': nzb_id,
|
||||
'name': nzb['NZBFilename'],
|
||||
'status': 'completed' if nzb['ParStatus'] in ['SUCCESS', 'NONE'] and nzb['ScriptStatus'] in ['SUCCESS', 'NONE'] else 'failed',
|
||||
'original_status': nzb['ParStatus'] + ', ' + nzb['ScriptStatus'],
|
||||
'timeleft': str(timedelta(seconds = 0)),
|
||||
'folder': sp(nzb['DestDir'])
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
|
||||
|
||||
rpc = xmlrpclib.ServerProxy(url)
|
||||
try:
|
||||
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
|
||||
log.debug('Successfully connected to NZBGet')
|
||||
else:
|
||||
log.info('Successfully connected to NZBGet, but unable to send a message')
|
||||
except socket.error:
|
||||
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
|
||||
return False
|
||||
except xmlrpclib.ProtocolError, e:
|
||||
if e.errcode == 401:
|
||||
log.error('Password is incorrect.')
|
||||
else:
|
||||
log.error('Protocol Error: %s', e)
|
||||
return False
|
||||
|
||||
try:
|
||||
history = rpc.history()
|
||||
nzb_id = None
|
||||
path = None
|
||||
|
||||
for hist in history:
|
||||
for param in hist['Parameters']:
|
||||
if param['Name'] == 'couchpotato' and param['Value'] == release_download['id']:
|
||||
nzb_id = hist['ID']
|
||||
path = hist['DestDir']
|
||||
|
||||
if nzb_id and path and rpc.editqueue('HistoryDelete', 0, "", [tryInt(nzb_id)]):
|
||||
shutil.rmtree(path, True)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,220 +0,0 @@
|
||||
from base64 import b64encode
|
||||
import os
|
||||
from uuid import uuid4
|
||||
import hashlib
|
||||
import traceback
|
||||
|
||||
from requests import HTTPError
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'NZBVortex'
|
||||
|
||||
|
||||
class NZBVortex(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
api_level = None
|
||||
session_id = None
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
# Send the nzb
|
||||
try:
|
||||
nzb_filename = self.createFileName(data, filedata, media, unique_tag = True)
|
||||
response = self.call('nzb/add', files = {'file': (nzb_filename, filedata, 'application/octet-stream')}, parameters = {
|
||||
'name': nzb_filename,
|
||||
'groupname': self.conf('group')
|
||||
})
|
||||
|
||||
if response and response.get('result', '').lower() == 'ok':
|
||||
return self.downloadReturnId(nzb_filename)
|
||||
|
||||
log.error('Something went wrong sending the NZB file. Response: %s', response)
|
||||
return False
|
||||
except:
|
||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
try:
|
||||
login_result = self.login()
|
||||
except:
|
||||
return False
|
||||
|
||||
return login_result
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
raw_statuses = self.call('nzb')
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
for nzb in raw_statuses.get('nzbs', []):
|
||||
nzb_id = os.path.basename(nzb['nzbFileName'])
|
||||
if nzb_id in ids:
|
||||
|
||||
# Check status
|
||||
status = 'busy'
|
||||
if nzb['state'] == 20:
|
||||
status = 'completed'
|
||||
elif nzb['state'] in [21, 22, 24]:
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'temp_id': nzb['id'],
|
||||
'id': nzb_id,
|
||||
'name': nzb['uiTitle'],
|
||||
'status': status,
|
||||
'original_status': nzb['state'],
|
||||
'timeleft': -1,
|
||||
'folder': sp(nzb['destinationPath']),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call('nzb/%s/cancel' % release_download['temp_id'])
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def login(self):
|
||||
|
||||
nonce = self.call('auth/nonce', auth = False).get('authNonce')
|
||||
cnonce = uuid4().hex
|
||||
hashed = b64encode(hashlib.sha256('%s:%s:%s' % (nonce, cnonce, self.conf('api_key'))).digest())
|
||||
|
||||
params = {
|
||||
'nonce': nonce,
|
||||
'cnonce': cnonce,
|
||||
'hash': hashed
|
||||
}
|
||||
|
||||
login_data = self.call('auth/login', parameters = params, auth = False)
|
||||
|
||||
# Save for later
|
||||
if login_data.get('loginResult') == 'successful':
|
||||
self.session_id = login_data.get('sessionID')
|
||||
return True
|
||||
|
||||
log.error('Login failed, please check you api-key')
|
||||
return False
|
||||
|
||||
def call(self, call, parameters = None, is_repeat = False, auth = True, *args, **kwargs):
|
||||
|
||||
# Login first
|
||||
if not parameters: parameters = {}
|
||||
if not self.session_id and auth:
|
||||
self.login()
|
||||
|
||||
# Always add session id to request
|
||||
if self.session_id:
|
||||
parameters['sessionid'] = self.session_id
|
||||
|
||||
params = tryUrlencode(parameters)
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||
|
||||
try:
|
||||
data = self.getJsonData('%s%s' % (url, '?' + params if params else ''), *args, cache_timeout = 0, show_error = False, **kwargs)
|
||||
|
||||
if data:
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
# Try login and do again
|
||||
if not is_repeat:
|
||||
self.login()
|
||||
return self.call(call, parameters = parameters, is_repeat = True, **kwargs)
|
||||
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return {}
|
||||
|
||||
def getApiLevel(self):
|
||||
|
||||
if not self.api_level:
|
||||
|
||||
try:
|
||||
data = self.call('app/apilevel', auth = False)
|
||||
self.api_level = float(data.get('apilevel'))
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc == 403:
|
||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||
else:
|
||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||
|
||||
return self.api_level
|
||||
|
||||
def isEnabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'nzbvortex',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbvortex',
|
||||
'label': 'NZBVortex',
|
||||
'description': 'Use <a href="http://www.nzbvortex.com/landing/" target="_blank">NZBVortex</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'https://localhost:4321',
|
||||
'description': 'Hostname with port. Usually <strong>https://localhost:4321</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
{
|
||||
'name': 'group',
|
||||
'label': 'Group',
|
||||
'description': 'The group CP places the nzb in. Make sure to create it in NZBVortex.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
48
couchpotato/core/downloaders/nzbvortex/__init__.py
Normal file
48
couchpotato/core/downloaders/nzbvortex/__init__.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from .main import NZBVortex
|
||||
|
||||
def start():
|
||||
return NZBVortex()
|
||||
|
||||
config = [{
|
||||
'name': 'nzbvortex',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'nzbvortex',
|
||||
'label': 'NZBVortex',
|
||||
'description': 'Use <a href="http://www.nzbvortex.com/landing/" target="_blank">NZBVortex</a> to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'https://localhost:4321',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
178
couchpotato/core/downloaders/nzbvortex/main.py
Normal file
178
couchpotato/core/downloaders/nzbvortex/main.py
Normal file
@@ -0,0 +1,178 @@
|
||||
from base64 import b64encode
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, sp
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from urllib2 import URLError
|
||||
from uuid import uuid4
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
import traceback
|
||||
import urllib2
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class NZBVortex(Downloader):
|
||||
|
||||
protocol = ['nzb']
|
||||
api_level = None
|
||||
session_id = None
|
||||
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
# Send the nzb
|
||||
try:
|
||||
nzb_filename = self.createFileName(data, filedata, movie)
|
||||
self.call('nzb/add', params = {'file': (nzb_filename, filedata)}, multipart = True)
|
||||
|
||||
raw_statuses = self.call('nzb')
|
||||
nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if nzb['name'] == nzb_filename][0]
|
||||
return self.downloadReturnId(nzb_id)
|
||||
except:
|
||||
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
raw_statuses = self.call('nzb')
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
for nzb in raw_statuses.get('nzbs', []):
|
||||
|
||||
# Check status
|
||||
status = 'busy'
|
||||
if nzb['state'] == 20:
|
||||
status = 'completed'
|
||||
elif nzb['state'] in [21, 22, 24]:
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['id'],
|
||||
'name': nzb['uiTitle'],
|
||||
'status': status,
|
||||
'original_status': nzb['state'],
|
||||
'timeleft':-1,
|
||||
'folder': sp(nzb['destinationPath']),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call('nzb/%s/cancel' % release_download['id'])
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def login(self):
|
||||
|
||||
nonce = self.call('auth/nonce', auth = False).get('authNonce')
|
||||
cnonce = uuid4().hex
|
||||
hashed = b64encode(hashlib.sha256('%s:%s:%s' % (nonce, cnonce, self.conf('api_key'))).digest())
|
||||
|
||||
params = {
|
||||
'nonce': nonce,
|
||||
'cnonce': cnonce,
|
||||
'hash': hashed
|
||||
}
|
||||
|
||||
login_data = self.call('auth/login', parameters = params, auth = False)
|
||||
|
||||
# Save for later
|
||||
if login_data.get('loginResult') == 'successful':
|
||||
self.session_id = login_data.get('sessionID')
|
||||
return True
|
||||
|
||||
log.error('Login failed, please check you api-key')
|
||||
return False
|
||||
|
||||
|
||||
def call(self, call, parameters = None, repeat = False, auth = True, *args, **kwargs):
|
||||
|
||||
# Login first
|
||||
if not parameters: parameters = {}
|
||||
if not self.session_id and auth:
|
||||
self.login()
|
||||
|
||||
# Always add session id to request
|
||||
if self.session_id:
|
||||
parameters['sessionid'] = self.session_id
|
||||
|
||||
params = tryUrlencode(parameters)
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/' + call
|
||||
url_opener = urllib2.build_opener(HTTPSHandler())
|
||||
|
||||
try:
|
||||
data = self.urlopen('%s?%s' % (url, params), opener = url_opener, *args, **kwargs)
|
||||
|
||||
if data:
|
||||
return json.loads(data)
|
||||
except URLError, e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
# Try login and do again
|
||||
if not repeat:
|
||||
self.login()
|
||||
return self.call(call, parameters = parameters, repeat = True, **kwargs)
|
||||
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
return {}
|
||||
|
||||
def getApiLevel(self):
|
||||
|
||||
if not self.api_level:
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api/app/apilevel'
|
||||
url_opener = urllib2.build_opener(HTTPSHandler())
|
||||
|
||||
try:
|
||||
data = self.urlopen(url, opener = url_opener, show_error = False)
|
||||
self.api_level = float(json.loads(data).get('apilevel'))
|
||||
except URLError, e:
|
||||
if hasattr(e, 'code') and e.code == 403:
|
||||
log.error('This version of NZBVortex isn\'t supported. Please update to 2.8.6 or higher')
|
||||
else:
|
||||
log.error('NZBVortex doesn\'t seem to be running or maybe the remote option isn\'t enabled yet: %s', traceback.format_exc(1))
|
||||
|
||||
return self.api_level
|
||||
|
||||
def isEnabled(self, manual = False, data = None):
|
||||
if not data: data = {}
|
||||
return super(NZBVortex, self).isEnabled(manual, data) and self.getApiLevel()
|
||||
|
||||
|
||||
class HTTPSConnection(httplib.HTTPSConnection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
|
||||
|
||||
def connect(self):
|
||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
||||
if sys.version_info < (2, 6, 7):
|
||||
if hasattr(self, '_tunnel_host'):
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
else:
|
||||
if self._tunnel_host:
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
|
||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version = ssl.PROTOCOL_TLSv1)
|
||||
|
||||
class HTTPSHandler(urllib2.HTTPSHandler):
|
||||
def https_open(self, req):
|
||||
return self.do_open(HTTPSConnection, req)
|
||||
@@ -1,111 +0,0 @@
|
||||
from __future__ import with_statement
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Pneumatic'
|
||||
|
||||
|
||||
class Pneumatic(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
|
||||
status_support = False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
directory = self.conf('directory')
|
||||
if not directory or not os.path.isdir(directory):
|
||||
log.error('No directory set for .strm downloads.')
|
||||
else:
|
||||
try:
|
||||
if not filedata or len(filedata) < 50:
|
||||
log.error('No nzb available!')
|
||||
return False
|
||||
|
||||
full_path = os.path.join(directory, self.createFileName(data, filedata, media))
|
||||
|
||||
try:
|
||||
if not os.path.isfile(full_path):
|
||||
log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(filedata)
|
||||
|
||||
nzb_name = self.createNzbName(data, media)
|
||||
strm_path = os.path.join(directory, nzb_name)
|
||||
|
||||
strm_file = open(strm_path + '.strm', 'wb')
|
||||
strmContent = self.strm_syntax % (full_path, nzb_name)
|
||||
strm_file.write(strmContent)
|
||||
strm_file.close()
|
||||
|
||||
return self.downloadReturnId('')
|
||||
|
||||
else:
|
||||
log.info('File %s already exists.', full_path)
|
||||
return self.downloadReturnId('')
|
||||
|
||||
except:
|
||||
log.error('Failed to download .strm: %s', traceback.format_exc())
|
||||
pass
|
||||
|
||||
except:
|
||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||
return False
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
directory = self.conf('directory')
|
||||
if directory and os.path.isdir(directory):
|
||||
|
||||
test_file = sp(os.path.join(directory, 'couchpotato_test.txt'))
|
||||
|
||||
# Check if folder is writable
|
||||
self.createFile(test_file, 'This is a test file')
|
||||
if os.path.isfile(test_file):
|
||||
os.remove(test_file)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'pneumatic',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'pneumatic',
|
||||
'label': 'Pneumatic',
|
||||
'description': 'Use <a href="http://forum.xbmc.org/showthread.php?tid=97657" target="_blank">Pneumatic</a> to download .strm files.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .strm file is saved to.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
37
couchpotato/core/downloaders/pneumatic/__init__.py
Normal file
37
couchpotato/core/downloaders/pneumatic/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from .main import Pneumatic
|
||||
|
||||
def start():
|
||||
return Pneumatic()
|
||||
|
||||
config = [{
|
||||
'name': 'pneumatic',
|
||||
'order': 30,
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'pneumatic',
|
||||
'label': 'Pneumatic',
|
||||
'description': 'Use <a href="http://forum.xbmc.org/showthread.php?tid=97657" target="_blank">Pneumatic</a> to download .strm files.',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Directory where the .strm file is saved to.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
57
couchpotato/core/downloaders/pneumatic/main.py
Normal file
57
couchpotato/core/downloaders/pneumatic/main.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import with_statement
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.logger import CPLog
|
||||
import os
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Pneumatic(Downloader):
|
||||
|
||||
protocol = ['nzb']
|
||||
strm_syntax = 'plugin://plugin.program.pneumatic/?mode=strm&type=add_file&nzb=%s&nzbname=%s'
|
||||
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
directory = self.conf('directory')
|
||||
if not directory or not os.path.isdir(directory):
|
||||
log.error('No directory set for .strm downloads.')
|
||||
else:
|
||||
try:
|
||||
if not filedata or len(filedata) < 50:
|
||||
log.error('No nzb available!')
|
||||
return False
|
||||
|
||||
fullPath = os.path.join(directory, self.createFileName(data, filedata, movie))
|
||||
|
||||
try:
|
||||
if not os.path.isfile(fullPath):
|
||||
log.info('Downloading %s to %s.', (data.get('protocol'), fullPath))
|
||||
with open(fullPath, 'wb') as f:
|
||||
f.write(filedata)
|
||||
|
||||
nzb_name = self.createNzbName(data, movie)
|
||||
strm_path = os.path.join(directory, nzb_name)
|
||||
|
||||
strm_file = open(strm_path + '.strm', 'wb')
|
||||
strmContent = self.strm_syntax % (fullPath, nzb_name)
|
||||
strm_file.write(strmContent)
|
||||
strm_file.close()
|
||||
|
||||
return True
|
||||
|
||||
else:
|
||||
log.info('File %s already exists.', fullPath)
|
||||
return True
|
||||
|
||||
except:
|
||||
log.error('Failed to download .strm: %s', traceback.format_exc())
|
||||
pass
|
||||
|
||||
except:
|
||||
log.info('Failed to download file %s: %s', (data.get('name'), traceback.format_exc()))
|
||||
return False
|
||||
return False
|
||||
@@ -1,245 +0,0 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from hashlib import sha1
|
||||
import os
|
||||
|
||||
from bencode import bencode, bdecode
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from qbittorrent.client import QBittorrentClient
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'qBittorrent'
|
||||
|
||||
|
||||
class qBittorrent(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
qb = None
|
||||
|
||||
def __init__(self):
|
||||
super(qBittorrent, self).__init__()
|
||||
|
||||
def connect(self):
|
||||
if self.qb is not None:
|
||||
return self.qb
|
||||
|
||||
url = cleanHost(self.conf('host'), protocol = True, ssl = False)
|
||||
|
||||
if self.conf('username') and self.conf('password'):
|
||||
self.qb = QBittorrentClient(
|
||||
url,
|
||||
username = self.conf('username'),
|
||||
password = self.conf('password')
|
||||
)
|
||||
else:
|
||||
self.qb = QBittorrentClient(url)
|
||||
|
||||
return self.qb
|
||||
|
||||
def test(self):
|
||||
if self.connect():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.debug('Sending "%s" to qBittorrent.', (data.get('name')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
|
||||
if filedata is False:
|
||||
return False
|
||||
|
||||
data['protocol'] = 'torrent'
|
||||
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
# Send request to qBittorrent
|
||||
try:
|
||||
self.qb.add_file(filedata)
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
except Exception as e:
|
||||
log.error('Failed to send torrent to qBittorrent: %s', e)
|
||||
return False
|
||||
|
||||
def getTorrentStatus(self, torrent):
|
||||
|
||||
if torrent.state in ('uploading', 'queuedUP', 'stalledUP'):
|
||||
return 'seeding'
|
||||
|
||||
if torrent.progress == 1:
|
||||
return 'completed'
|
||||
|
||||
return 'busy'
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
log.debug('Checking qBittorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
try:
|
||||
torrents = self.qb.get_torrents()
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for torrent in torrents:
|
||||
if torrent.hash in ids:
|
||||
torrent.update_general() # get extra info
|
||||
torrent_filelist = torrent.get_files()
|
||||
|
||||
torrent_files = []
|
||||
torrent_dir = os.path.join(torrent.save_path, torrent.name)
|
||||
|
||||
if os.path.isdir(torrent_dir):
|
||||
torrent.save_path = torrent_dir
|
||||
|
||||
if len(torrent_filelist) > 1 and os.path.isdir(torrent_dir): # multi file torrent, path.isdir check makes sure we're not in the root download folder
|
||||
for root, _, files in os.walk(torrent.save_path):
|
||||
for f in files:
|
||||
torrent_files.append(sp(os.path.join(root, f)))
|
||||
|
||||
else: # multi or single file placed directly in torrent.save_path
|
||||
for f in torrent_filelist:
|
||||
file_path = os.path.join(torrent.save_path, f.name)
|
||||
if os.path.isfile(file_path):
|
||||
torrent_files.append(sp(file_path))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent.hash,
|
||||
'name': torrent.name,
|
||||
'status': self.getTorrentStatus(torrent),
|
||||
'seed_ratio': torrent.ratio,
|
||||
'original_status': torrent.state,
|
||||
'timeleft': torrent.progress * 100 if torrent.progress else -1, # percentage
|
||||
'folder': sp(torrent.save_path),
|
||||
'files': torrent_files
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
except Exception as e:
|
||||
log.error('Failed to get status from qBittorrent: %s', e)
|
||||
return []
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.qb.get_torrent(release_download['id'])
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if pause:
|
||||
return torrent.pause()
|
||||
return torrent.resume()
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.processComplete(release_download, delete_files = True)
|
||||
|
||||
def processComplete(self, release_download, delete_files):
|
||||
log.debug('Requesting qBittorrent to remove the torrent %s%s.',
|
||||
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.qb.find_torrent(release_download['id'])
|
||||
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if delete_files:
|
||||
torrent.delete() # deletes torrent with data
|
||||
else:
|
||||
torrent.remove() # just removes the torrent, doesn't delete data
|
||||
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'qbittorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'qbittorrent',
|
||||
'label': 'qbittorrent',
|
||||
'description': '',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'http://localhost:8080/',
|
||||
'description': 'RPC Communication URI. Usually <strong>http://localhost:8080/</strong>'
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': False,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent after it finishes seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
86
couchpotato/core/downloaders/rtorrent/__init__.py
Executable file
86
couchpotato/core/downloaders/rtorrent/__init__.py
Executable file
@@ -0,0 +1,86 @@
|
||||
from .main import rTorrent
|
||||
|
||||
def start():
|
||||
return rTorrent()
|
||||
|
||||
config = [{
|
||||
'name': 'rtorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'rtorrent',
|
||||
'label': 'rTorrent',
|
||||
'description': '',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'url',
|
||||
'default': 'http://localhost:80/RPC2',
|
||||
'description': 'XML-RPC Endpoint URI. Usually <strong>scgi://localhost:5000</strong> '
|
||||
'or <strong>http://localhost:80/RPC2</strong>'
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to apply on added torrents.',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': False,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent after it finishes seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'append_label',
|
||||
'label': 'Append Label',
|
||||
'default': False,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Append label to download location. Requires you to set the download location above.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
227
couchpotato/core/downloaders/rtorrent/main.py
Executable file
227
couchpotato/core/downloaders/rtorrent/main.py
Executable file
@@ -0,0 +1,227 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from bencode import bencode, bdecode
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.logger import CPLog
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
from rtorrent import RTorrent
|
||||
from rtorrent.err import MethodError
|
||||
import os
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class rTorrent(Downloader):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
rt = None
|
||||
|
||||
def connect(self):
|
||||
# Already connected?
|
||||
if self.rt is not None:
|
||||
return self.rt
|
||||
|
||||
# Ensure url is set
|
||||
if not self.conf('url'):
|
||||
log.error('Config properties are not filled in correctly, url is missing.')
|
||||
return False
|
||||
|
||||
if self.conf('username') and self.conf('password'):
|
||||
self.rt = RTorrent(
|
||||
self.conf('url'),
|
||||
self.conf('username'),
|
||||
self.conf('password')
|
||||
)
|
||||
else:
|
||||
self.rt = RTorrent(self.conf('url'))
|
||||
|
||||
return self.rt
|
||||
|
||||
def _update_provider_group(self, name, data):
|
||||
if data.get('seed_time'):
|
||||
log.info('seeding time ignored, not supported')
|
||||
|
||||
if not name:
|
||||
return False
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
views = self.rt.get_views()
|
||||
|
||||
if name not in views:
|
||||
self.rt.create_group(name)
|
||||
|
||||
group = self.rt.get_group(name)
|
||||
|
||||
try:
|
||||
if data.get('seed_ratio'):
|
||||
ratio = int(float(data.get('seed_ratio')) * 100)
|
||||
log.debug('Updating provider ratio to %s, group name: %s', (ratio, name))
|
||||
|
||||
# Explicitly set all group options to ensure it is setup correctly
|
||||
group.set_upload('1M')
|
||||
group.set_min(ratio)
|
||||
group.set_max(ratio)
|
||||
group.set_command('d.stop')
|
||||
group.enable()
|
||||
else:
|
||||
# Reset group action and disable it
|
||||
group.set_command()
|
||||
group.disable()
|
||||
except MethodError, err:
|
||||
log.error('Unable to set group options: %s', err.msg)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def download(self, data, movie, filedata = None):
|
||||
log.debug('Sending "%s" to rTorrent.', (data.get('name')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
group_name = 'cp_' + data.get('provider').lower()
|
||||
if not self._update_provider_group(group_name, data):
|
||||
return False
|
||||
|
||||
torrent_params = {}
|
||||
if self.conf('label'):
|
||||
torrent_params['label'] = self.conf('label')
|
||||
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
# Try download magnet torrents
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
|
||||
if filedata is False:
|
||||
return False
|
||||
|
||||
data['protocol'] = 'torrent'
|
||||
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest().upper()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
# Send request to rTorrent
|
||||
try:
|
||||
# Send torrent to rTorrent
|
||||
torrent = self.rt.load_torrent(filedata)
|
||||
|
||||
if not torrent:
|
||||
log.error('Unable to find the torrent, did it fail to load?')
|
||||
return False
|
||||
|
||||
# Set label
|
||||
if self.conf('label'):
|
||||
torrent.set_custom(1, self.conf('label'))
|
||||
|
||||
if self.conf('directory') and self.conf('append_label'):
|
||||
torrent.set_directory(os.path.join(self.conf('directory'), self.conf('label')))
|
||||
elif self.conf('directory'):
|
||||
torrent.set_directory(self.conf('directory'))
|
||||
|
||||
# Set Ratio Group
|
||||
torrent.set_visible(group_name)
|
||||
|
||||
# Start torrent
|
||||
if not self.conf('paused', default = 0):
|
||||
torrent.start()
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
except Exception, err:
|
||||
log.error('Failed to send torrent to rTorrent: %s', err)
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
log.debug('Checking rTorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
try:
|
||||
torrents = self.rt.get_torrents()
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for torrent in torrents:
|
||||
torrent_files = []
|
||||
for file_item in torrent.get_files():
|
||||
torrent_files.append(sp(os.path.join(torrent.directory, file_item.path)))
|
||||
|
||||
status = 'busy'
|
||||
if torrent.complete:
|
||||
if torrent.active:
|
||||
status = 'seeding'
|
||||
else:
|
||||
status = 'completed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent.info_hash,
|
||||
'name': torrent.name,
|
||||
'status': status,
|
||||
'seed_ratio': torrent.ratio,
|
||||
'original_status': torrent.state,
|
||||
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
|
||||
'folder': sp(torrent.directory),
|
||||
'files': '|'.join(torrent_files)
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
except Exception, err:
|
||||
log.error('Failed to get status from rTorrent: %s', err)
|
||||
return False
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.rt.find_torrent(release_download['id'])
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if pause:
|
||||
return torrent.pause()
|
||||
return torrent.resume()
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.processComplete(release_download, delete_files = True)
|
||||
|
||||
def processComplete(self, release_download, delete_files):
|
||||
log.debug('Requesting rTorrent to remove the torrent %s%s.',
|
||||
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.rt.find_torrent(release_download['id'])
|
||||
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if delete_files:
|
||||
for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
|
||||
os.unlink(os.path.join(torrent.directory, file_item.path))
|
||||
|
||||
if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
|
||||
# Remove empty directories bottom up
|
||||
try:
|
||||
for path, _, _ in os.walk(torrent.directory, topdown = False):
|
||||
os.rmdir(path)
|
||||
except OSError:
|
||||
log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
|
||||
|
||||
torrent.erase() # just removes the torrent, doesn't delete data
|
||||
|
||||
return True
|
||||
@@ -1,335 +0,0 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
from six.moves import urllib
|
||||
import os
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import sp
|
||||
from couchpotato.core.helpers.variable import cleanHost, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from bencode import bencode, bdecode
|
||||
from rtorrent import RTorrent
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'rTorrent'
|
||||
|
||||
|
||||
class rTorrent(DownloaderBase):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
rt = None
|
||||
error_msg = ''
|
||||
|
||||
# Migration url to host options
|
||||
def __init__(self):
|
||||
super(rTorrent, self).__init__()
|
||||
|
||||
addEvent('app.load', self.migrate)
|
||||
addEvent('setting.save.rtorrent.*.after', self.settingsChanged)
|
||||
|
||||
def migrate(self):
|
||||
|
||||
url = self.conf('url')
|
||||
if url:
|
||||
host_split = splitString(url.split('://')[-1], split_on = '/')
|
||||
|
||||
self.conf('ssl', value = url.startswith('https'))
|
||||
self.conf('host', value = host_split[0].strip())
|
||||
self.conf('rpc_url', value = '/'.join(host_split[1:]))
|
||||
|
||||
self.deleteConf('url')
|
||||
|
||||
def settingsChanged(self):
|
||||
# Reset active connection if settings have changed
|
||||
if self.rt:
|
||||
log.debug('Settings have changed, closing active connection')
|
||||
|
||||
self.rt = None
|
||||
return True
|
||||
|
||||
def connect(self, reconnect = False):
|
||||
# Already connected?
|
||||
if not reconnect and self.rt is not None:
|
||||
return self.rt
|
||||
|
||||
url = cleanHost(self.conf('host'), protocol = True, ssl = self.conf('ssl'))
|
||||
|
||||
# Automatically add '+https' to 'httprpc' protocol if SSL is enabled
|
||||
if self.conf('ssl') and url.startswith('httprpc://'):
|
||||
url = url.replace('httprpc://', 'httprpc+https://')
|
||||
|
||||
parsed = urllib.urlparse(url)
|
||||
|
||||
# rpc_url is only used on http/https scgi pass-through
|
||||
if parsed.scheme in ['http', 'https']:
|
||||
url += self.conf('rpc_url')
|
||||
|
||||
self.rt = RTorrent(
|
||||
url,
|
||||
self.conf('username'),
|
||||
self.conf('password')
|
||||
)
|
||||
|
||||
self.error_msg = ''
|
||||
try:
|
||||
self.rt._verify_conn()
|
||||
except AssertionError as e:
|
||||
self.error_msg = e.message
|
||||
self.rt = None
|
||||
|
||||
return self.rt
|
||||
|
||||
def test(self):
|
||||
if self.connect(True):
|
||||
return True
|
||||
|
||||
if self.error_msg:
|
||||
return False, 'Connection failed: ' + self.error_msg
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.debug('Sending "%s" to rTorrent.', (data.get('name')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent_params = {}
|
||||
if self.conf('label'):
|
||||
torrent_params['label'] = self.conf('label')
|
||||
|
||||
if not filedata and data.get('protocol') == 'torrent':
|
||||
log.error('Failed sending torrent, no data')
|
||||
return False
|
||||
|
||||
# Try download magnet torrents
|
||||
if data.get('protocol') == 'torrent_magnet':
|
||||
filedata = self.magnetToTorrent(data.get('url'))
|
||||
|
||||
if filedata is False:
|
||||
return False
|
||||
|
||||
data['protocol'] = 'torrent'
|
||||
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest().upper()
|
||||
|
||||
# Convert base 32 to hex
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash))
|
||||
|
||||
# Send request to rTorrent
|
||||
try:
|
||||
# Send torrent to rTorrent
|
||||
torrent = self.rt.load_torrent(filedata, verify_retries=10)
|
||||
|
||||
if not torrent:
|
||||
log.error('Unable to find the torrent, did it fail to load?')
|
||||
return False
|
||||
|
||||
# Set label
|
||||
if self.conf('label'):
|
||||
torrent.set_custom(1, self.conf('label'))
|
||||
|
||||
if self.conf('directory'):
|
||||
torrent.set_directory(self.conf('directory'))
|
||||
|
||||
# Start torrent
|
||||
if not self.conf('paused', default = 0):
|
||||
torrent.start()
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
except Exception as err:
|
||||
log.error('Failed to send torrent to rTorrent: %s', err)
|
||||
return False
|
||||
|
||||
def getTorrentStatus(self, torrent):
|
||||
if not torrent.complete:
|
||||
return 'busy'
|
||||
|
||||
if torrent.open:
|
||||
return 'seeding'
|
||||
|
||||
return 'completed'
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
log.debug('Checking rTorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
|
||||
try:
|
||||
torrents = self.rt.get_torrents()
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
for torrent in torrents:
|
||||
if torrent.info_hash in ids:
|
||||
torrent_directory = os.path.normpath(torrent.directory)
|
||||
torrent_files = []
|
||||
|
||||
for file in torrent.get_files():
|
||||
if not os.path.normpath(file.path).startswith(torrent_directory):
|
||||
file_path = os.path.join(torrent_directory, file.path.lstrip('/'))
|
||||
else:
|
||||
file_path = file.path
|
||||
|
||||
torrent_files.append(sp(file_path))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent.info_hash,
|
||||
'name': torrent.name,
|
||||
'status': self.getTorrentStatus(torrent),
|
||||
'seed_ratio': torrent.ratio,
|
||||
'original_status': torrent.state,
|
||||
'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
|
||||
'folder': sp(torrent.directory),
|
||||
'files': torrent_files
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
except Exception as err:
|
||||
log.error('Failed to get status from rTorrent: %s', err)
|
||||
return []
|
||||
|
||||
def pause(self, release_download, pause = True):
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.rt.find_torrent(release_download['id'])
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if pause:
|
||||
return torrent.pause()
|
||||
return torrent.resume()
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
return self.processComplete(release_download, delete_files = True)
|
||||
|
||||
def processComplete(self, release_download, delete_files):
|
||||
log.debug('Requesting rTorrent to remove the torrent %s%s.',
|
||||
(release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
|
||||
torrent = self.rt.find_torrent(release_download['id'])
|
||||
|
||||
if torrent is None:
|
||||
return False
|
||||
|
||||
if delete_files:
|
||||
for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
|
||||
os.unlink(os.path.join(torrent.directory, file_item.path))
|
||||
|
||||
if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
|
||||
# Remove empty directories bottom up
|
||||
try:
|
||||
for path, _, _ in os.walk(sp(torrent.directory), topdown = False):
|
||||
os.rmdir(path)
|
||||
except OSError:
|
||||
log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
|
||||
|
||||
torrent.erase() # just removes the torrent, doesn't delete data
|
||||
|
||||
return True
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'rtorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'rtorrent',
|
||||
'label': 'rTorrent',
|
||||
'description': '',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:80',
|
||||
'description': 'RPC Communication URI. Usually <strong>scgi://localhost:5000</strong>, '
|
||||
'<strong>httprpc://localhost/rutorrent</strong> or <strong>localhost:80</strong>'
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
'type': 'string',
|
||||
'default': 'RPC2',
|
||||
'advanced': True,
|
||||
'description': 'Change if your RPC mount is at a different path.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to apply on added torrents.',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': False,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent after it finishes seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,281 +0,0 @@
|
||||
from datetime import timedelta
|
||||
from urllib2 import URLError
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
|
||||
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Sabnzbd'
|
||||
|
||||
|
||||
class Sabnzbd(DownloaderBase):
|
||||
|
||||
protocol = ['nzb']
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.info('Sending "%s" to SABnzbd.', data.get('name'))
|
||||
|
||||
req_params = {
|
||||
'cat': self.conf('category'),
|
||||
'mode': 'addurl',
|
||||
'nzbname': self.createNzbName(data, media),
|
||||
'priority': self.conf('priority'),
|
||||
}
|
||||
|
||||
nzb_filename = None
|
||||
if filedata:
|
||||
if len(filedata) < 50:
|
||||
log.error('No proper nzb available: %s', filedata)
|
||||
return False
|
||||
|
||||
# If it's a .rar, it adds the .rar extension, otherwise it stays .nzb
|
||||
nzb_filename = self.createFileName(data, filedata, media)
|
||||
req_params['mode'] = 'addfile'
|
||||
else:
|
||||
req_params['name'] = data.get('url')
|
||||
|
||||
try:
|
||||
if nzb_filename and req_params.get('mode') is 'addfile':
|
||||
sab_data = self.call(req_params, files = {'nzbfile': (ss(nzb_filename), filedata)})
|
||||
else:
|
||||
sab_data = self.call(req_params)
|
||||
except URLError:
|
||||
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
|
||||
return False
|
||||
except:
|
||||
log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
log.debug('Result from SAB: %s', sab_data)
|
||||
if sab_data.get('status') and not sab_data.get('error'):
|
||||
log.info('NZB sent to SAB successfully.')
|
||||
if filedata:
|
||||
return self.downloadReturnId(sab_data.get('nzo_ids')[0])
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
log.error('Error getting data from SABNZBd: %s', sab_data)
|
||||
return False
|
||||
|
||||
def test(self):
|
||||
try:
|
||||
sab_data = self.call({
|
||||
'mode': 'version',
|
||||
})
|
||||
v = sab_data.split('.')
|
||||
if int(v[0]) == 0 and int(v[1]) < 7:
|
||||
return False, 'Your Sabnzbd client is too old, please update to newest version.'
|
||||
|
||||
# the version check will work even with wrong api key, so we need the next check as well
|
||||
sab_data = self.call({
|
||||
'mode': 'qstatus',
|
||||
})
|
||||
if not sab_data:
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
|
||||
log.debug('Checking SABnzbd download status.')
|
||||
|
||||
# Go through Queue
|
||||
try:
|
||||
queue = self.call({
|
||||
'mode': 'queue',
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting queue: %s', traceback.format_exc(1))
|
||||
return []
|
||||
|
||||
# Go through history items
|
||||
try:
|
||||
history = self.call({
|
||||
'mode': 'history',
|
||||
'limit': 15,
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting history json: %s', traceback.format_exc(1))
|
||||
return []
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
# Get busy releases
|
||||
for nzb in queue.get('slots', []):
|
||||
if nzb['nzo_id'] in ids:
|
||||
status = 'busy'
|
||||
if 'ENCRYPTED / ' in nzb['filename']:
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['nzo_id'],
|
||||
'name': nzb['filename'],
|
||||
'status': status,
|
||||
'original_status': nzb['status'],
|
||||
'timeleft': nzb['timeleft'] if not queue['paused'] else -1,
|
||||
})
|
||||
|
||||
# Get old releases
|
||||
for nzb in history.get('slots', []):
|
||||
if nzb['nzo_id'] in ids:
|
||||
status = 'busy'
|
||||
if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()):
|
||||
status = 'failed'
|
||||
elif nzb['status'] == 'Completed':
|
||||
status = 'completed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['nzo_id'],
|
||||
'name': nzb['name'],
|
||||
'status': status,
|
||||
'original_status': nzb['status'],
|
||||
'timeleft': str(timedelta(seconds = 0)),
|
||||
'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'queue',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def processComplete(self, release_download, delete_files = False):
|
||||
log.debug('Requesting SabNZBd to remove the NZB %s.', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '0',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed removing: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def call(self, request_params, use_json = True, **kwargs):
|
||||
|
||||
url = cleanHost(self.conf('host'), ssl = self.conf('ssl')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
|
||||
'apikey': self.conf('api_key'),
|
||||
'output': 'json'
|
||||
}))
|
||||
|
||||
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
|
||||
if use_json:
|
||||
d = json.loads(data)
|
||||
if d.get('error'):
|
||||
log.error('Error getting data from SABNZBd: %s', d.get('error'))
|
||||
return {}
|
||||
|
||||
return d.get(request_params['mode']) or d
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'sabnzbd',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'sabnzbd',
|
||||
'label': 'Sabnzbd',
|
||||
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8080',
|
||||
},
|
||||
{
|
||||
'name': 'ssl',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
'description': 'Used for all calls to Sabnzbd.',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'label': 'Category',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'label': 'Priority',
|
||||
'type': 'dropdown',
|
||||
'default': '0',
|
||||
'advanced': True,
|
||||
'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)],
|
||||
'description': 'Add to the queue with this priority.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'advanced': True,
|
||||
'label': 'Remove NZB',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the NZB from history after it completed.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
71
couchpotato/core/downloaders/sabnzbd/__init__.py
Normal file
71
couchpotato/core/downloaders/sabnzbd/__init__.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from .main import Sabnzbd
|
||||
|
||||
def start():
|
||||
return Sabnzbd()
|
||||
|
||||
config = [{
|
||||
'name': 'sabnzbd',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'sabnzbd',
|
||||
'label': 'Sabnzbd',
|
||||
'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8080',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
'description': 'Used for all calls to Sabnzbd.',
|
||||
},
|
||||
{
|
||||
'name': 'category',
|
||||
'label': 'Category',
|
||||
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'priority',
|
||||
'label': 'Priority',
|
||||
'type': 'dropdown',
|
||||
'default': '0',
|
||||
'advanced': True,
|
||||
'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)],
|
||||
'description': 'Add to the queue with this priority.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'advanced': True,
|
||||
'label': 'Remove NZB',
|
||||
'default': False,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the NZB from history after it completed.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
182
couchpotato/core/downloaders/sabnzbd/main.py
Normal file
182
couchpotato/core/downloaders/sabnzbd/main.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
|
||||
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.environment import Env
|
||||
from datetime import timedelta
|
||||
from urllib2 import URLError
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Sabnzbd(Downloader):
|
||||
|
||||
protocol = ['nzb']
|
||||
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.info('Sending "%s" to SABnzbd.', data.get('name'))
|
||||
|
||||
req_params = {
|
||||
'cat': self.conf('category'),
|
||||
'mode': 'addurl',
|
||||
'nzbname': self.createNzbName(data, movie),
|
||||
'priority': self.conf('priority'),
|
||||
}
|
||||
|
||||
nzb_filename = None
|
||||
if filedata:
|
||||
if len(filedata) < 50:
|
||||
log.error('No proper nzb available: %s', filedata)
|
||||
return False
|
||||
|
||||
# If it's a .rar, it adds the .rar extension, otherwise it stays .nzb
|
||||
nzb_filename = self.createFileName(data, filedata, movie)
|
||||
req_params['mode'] = 'addfile'
|
||||
else:
|
||||
req_params['name'] = data.get('url')
|
||||
|
||||
try:
|
||||
if nzb_filename and req_params.get('mode') is 'addfile':
|
||||
sab_data = self.call(req_params, params = {'nzbfile': (ss(nzb_filename), filedata)}, multipart = True)
|
||||
else:
|
||||
sab_data = self.call(req_params)
|
||||
except URLError:
|
||||
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
|
||||
return False
|
||||
except:
|
||||
log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
log.debug('Result from SAB: %s', sab_data)
|
||||
if sab_data.get('status') and not sab_data.get('error'):
|
||||
log.info('NZB sent to SAB successfully.')
|
||||
if filedata:
|
||||
return self.downloadReturnId(sab_data.get('nzo_ids')[0])
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
log.error('Error getting data from SABNZBd: %s', sab_data)
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
log.debug('Checking SABnzbd download status.')
|
||||
|
||||
# Go through Queue
|
||||
try:
|
||||
queue = self.call({
|
||||
'mode': 'queue',
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting queue: %s', traceback.format_exc(1))
|
||||
return False
|
||||
|
||||
# Go through history items
|
||||
try:
|
||||
history = self.call({
|
||||
'mode': 'history',
|
||||
'limit': 15,
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting history json: %s', traceback.format_exc(1))
|
||||
return False
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
# Get busy releases
|
||||
for nzb in queue.get('slots', []):
|
||||
status = 'busy'
|
||||
if 'ENCRYPTED / ' in nzb['filename']:
|
||||
status = 'failed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['nzo_id'],
|
||||
'name': nzb['filename'],
|
||||
'status': status,
|
||||
'original_status': nzb['status'],
|
||||
'timeleft': nzb['timeleft'] if not queue['paused'] else -1,
|
||||
})
|
||||
|
||||
# Get old releases
|
||||
for nzb in history.get('slots', []):
|
||||
|
||||
status = 'busy'
|
||||
if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()):
|
||||
status = 'failed'
|
||||
elif nzb['status'] == 'Completed':
|
||||
status = 'completed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': nzb['nzo_id'],
|
||||
'name': nzb['name'],
|
||||
'status': status,
|
||||
'original_status': nzb['status'],
|
||||
'timeleft': str(timedelta(seconds = 0)),
|
||||
'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']),
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
def removeFailed(self, release_download):
|
||||
|
||||
log.info('%s failed downloading, deleting...', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'queue',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '1',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed deleting: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def processComplete(self, release_download, delete_files = False):
|
||||
log.debug('Requesting SabNZBd to remove the NZB %s.', release_download['name'])
|
||||
|
||||
try:
|
||||
self.call({
|
||||
'mode': 'history',
|
||||
'name': 'delete',
|
||||
'del_files': '0',
|
||||
'value': release_download['id']
|
||||
}, use_json = False)
|
||||
except:
|
||||
log.error('Failed removing: %s', traceback.format_exc(0))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def call(self, request_params, use_json = True, **kwargs):
|
||||
|
||||
url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(request_params, {
|
||||
'apikey': self.conf('api_key'),
|
||||
'output': 'json'
|
||||
}))
|
||||
|
||||
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs)
|
||||
if use_json:
|
||||
d = json.loads(data)
|
||||
if d.get('error'):
|
||||
log.error('Error getting data from SABNZBd: %s', d.get('error'))
|
||||
return {}
|
||||
|
||||
return d.get(request_params['mode']) or d
|
||||
else:
|
||||
return data
|
||||
|
||||
52
couchpotato/core/downloaders/synology/__init__.py
Normal file
52
couchpotato/core/downloaders/synology/__init__.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from .main import Synology
|
||||
|
||||
def start():
|
||||
return Synology()
|
||||
|
||||
config = [{
|
||||
'name': 'synology',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'synology',
|
||||
'label': 'Synology',
|
||||
'description': 'Use <a href="http://www.synology.com/dsm/home_home_applications_download_station.php" target="_blank">Synology Download Station</a> to download.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:5000',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:5000</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,39 +1,34 @@
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase
|
||||
from couchpotato.core.downloaders.base import Downloader
|
||||
from couchpotato.core.helpers.encoding import isInt
|
||||
from couchpotato.core.helpers.variable import cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
import json
|
||||
import requests
|
||||
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Synology'
|
||||
|
||||
|
||||
class Synology(DownloaderBase):
|
||||
class Synology(Downloader):
|
||||
|
||||
protocol = ['nzb', 'torrent', 'torrent_magnet']
|
||||
status_support = False
|
||||
log = CPLog(__name__)
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
response = False
|
||||
log.error('Sending "%s" (%s) to Synology.', (data['name'], data['protocol']))
|
||||
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
try:
|
||||
# Send request to Synology
|
||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'), self.conf('destination'))
|
||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
|
||||
if data['protocol'] == 'torrent_magnet':
|
||||
log.info('Adding torrent URL %s', data['url'])
|
||||
response = srpc.create_task(url = data['url'])
|
||||
@@ -47,17 +42,7 @@ class Synology(DownloaderBase):
|
||||
except:
|
||||
log.error('Exception while adding torrent: %s', traceback.format_exc())
|
||||
finally:
|
||||
return self.downloadReturnId('') if response else False
|
||||
|
||||
def test(self):
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
try:
|
||||
srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'))
|
||||
test_result = srpc.test()
|
||||
except:
|
||||
return False
|
||||
|
||||
return test_result
|
||||
return response
|
||||
|
||||
def getEnabledProtocol(self):
|
||||
if self.conf('use_for') == 'both':
|
||||
@@ -79,21 +64,18 @@ class Synology(DownloaderBase):
|
||||
return super(Synology, self).isEnabled(manual, data) and\
|
||||
((self.conf('use_for') in for_protocol))
|
||||
|
||||
|
||||
class SynologyRPC(object):
|
||||
|
||||
"""SynologyRPC lite library"""
|
||||
|
||||
def __init__(self, host = 'localhost', port = 5000, username = None, password = None, destination = None):
|
||||
def __init__(self, host = 'localhost', port = 5000, username = None, password = None):
|
||||
|
||||
super(SynologyRPC, self).__init__()
|
||||
|
||||
self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port)
|
||||
self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port)
|
||||
self.sid = None
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.destination = destination
|
||||
self.session_name = 'DownloadStation'
|
||||
|
||||
def _login(self):
|
||||
@@ -124,11 +106,11 @@ class SynologyRPC(object):
|
||||
if response['success']:
|
||||
log.info('Synology action successfull')
|
||||
return response
|
||||
except requests.ConnectionError as err:
|
||||
except requests.ConnectionError, err:
|
||||
log.error('Synology connection error, check your config %s', err)
|
||||
except requests.HTTPError as err:
|
||||
except requests.HTTPError, err:
|
||||
log.error('SynologyRPC HTTPError: %s', err)
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Exception: %s', err)
|
||||
finally:
|
||||
return response
|
||||
@@ -146,10 +128,6 @@ class SynologyRPC(object):
|
||||
'version': '1',
|
||||
'method': 'create',
|
||||
'_sid': self.sid}
|
||||
|
||||
if self.destination and len(self.destination) > 0:
|
||||
args['destination'] = self.destination
|
||||
|
||||
if url:
|
||||
log.info('Login success, adding torrent URI')
|
||||
args['uri'] = url
|
||||
@@ -167,60 +145,3 @@ class SynologyRPC(object):
|
||||
self._logout()
|
||||
|
||||
return result
|
||||
|
||||
def test(self):
|
||||
return bool(self._login())
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'synology',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'synology',
|
||||
'label': 'Synology',
|
||||
'description': 'Use <a href="http://www.synology.com/dsm/home_home_applications_download_station.php" target="_blank">Synology Download Station</a> to download.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'nzb,torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:5000',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:5000</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'destination',
|
||||
'description': 'Specify <strong>existing</strong> destination share to where your files will be downloaded, usually <strong>Downloads</strong>',
|
||||
'advanced': True,
|
||||
},
|
||||
{
|
||||
'name': 'use_for',
|
||||
'label': 'Use for',
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
94
couchpotato/core/downloaders/transmission/__init__.py
Normal file
94
couchpotato/core/downloaders/transmission/__init__.py
Normal file
@@ -0,0 +1,94 @@
|
||||
from .main import Transmission
|
||||
|
||||
def start():
|
||||
return Transmission()
|
||||
|
||||
config = [{
|
||||
'name': 'transmission',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'transmission',
|
||||
'label': 'Transmission',
|
||||
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
'type': 'string',
|
||||
'default': 'transmission',
|
||||
'advanced': True,
|
||||
'description': 'Change if you don\'t run Transmission RPC at the default url.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent from Transmission after it finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'stalled_as_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Consider a stalled torrent as failed',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,4 +1,8 @@
|
||||
from base64 import b64encode
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from datetime import timedelta
|
||||
import httplib
|
||||
import json
|
||||
@@ -6,18 +10,10 @@ import os.path
|
||||
import re
|
||||
import urllib2
|
||||
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'Transmission'
|
||||
|
||||
|
||||
class Transmission(DownloaderBase):
|
||||
class Transmission(Downloader):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
log = CPLog(__name__)
|
||||
@@ -25,17 +21,17 @@ class Transmission(DownloaderBase):
|
||||
|
||||
def connect(self):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host')).rstrip('/').rsplit(':', 1)
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url').strip('/ '), username = self.conf('username'), password = self.conf('password'))
|
||||
if not self.trpc:
|
||||
self.trpc = TransmissionRPC(host[0], port = host[1], rpc_url = self.conf('rpc_url'), username = self.conf('username'), password = self.conf('password'))
|
||||
|
||||
return self.trpc
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
if not data: data = {}
|
||||
def download(self, data, movie, filedata = None):
|
||||
|
||||
log.info('Sending "%s" (%s) to Transmission.', (data.get('name'), data.get('protocol')))
|
||||
|
||||
@@ -85,17 +81,12 @@ class Transmission(DownloaderBase):
|
||||
log.info('Torrent sent to Transmission successfully.')
|
||||
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
|
||||
|
||||
def test(self):
|
||||
if self.connect() and self.trpc.get_session():
|
||||
return True
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
log.debug('Checking Transmission download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
return False
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
@@ -103,44 +94,37 @@ class Transmission(DownloaderBase):
|
||||
'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
|
||||
}
|
||||
|
||||
session = self.trpc.get_session()
|
||||
queue = self.trpc.get_alltorrents(return_params)
|
||||
if not (queue and queue.get('torrents')):
|
||||
log.debug('Nothing in queue or error')
|
||||
return []
|
||||
return False
|
||||
|
||||
for torrent in queue['torrents']:
|
||||
if torrent['hashString'] in ids:
|
||||
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s',
|
||||
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir']))
|
||||
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / eta=%s / uploadRatio=%s / isFinished=%s',
|
||||
(torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent['eta'], torrent['uploadRatio'], torrent['isFinished']))
|
||||
|
||||
status = 'busy'
|
||||
if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'):
|
||||
status = 'failed'
|
||||
elif torrent['status'] == 0 and torrent['percentDone'] == 1:
|
||||
status = 'completed'
|
||||
elif torrent['status'] in [5, 6]:
|
||||
status = 'seeding'
|
||||
torrent_files = []
|
||||
for file_item in torrent['files']:
|
||||
torrent_files.append(sp(os.path.join(torrent['downloadDir'], file_item['name'])))
|
||||
|
||||
if session['incomplete-dir-enabled'] and status == 'busy':
|
||||
torrent_folder = session['incomplete-dir']
|
||||
else:
|
||||
torrent_folder = torrent['downloadDir']
|
||||
status = 'busy'
|
||||
if torrent.get('isStalled') and self.conf('stalled_as_failed'):
|
||||
status = 'failed'
|
||||
elif torrent['status'] == 0 and torrent['percentDone'] == 1:
|
||||
status = 'completed'
|
||||
elif torrent['status'] in [5, 6]:
|
||||
status = 'seeding'
|
||||
|
||||
torrent_files = []
|
||||
for file_item in torrent['files']:
|
||||
torrent_files.append(sp(os.path.join(torrent_folder, file_item['name'])))
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent['hashString'],
|
||||
'name': torrent['name'],
|
||||
'status': status,
|
||||
'original_status': torrent['status'],
|
||||
'seed_ratio': torrent['uploadRatio'],
|
||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||
'folder': sp(torrent_folder if len(torrent_files) == 1 else os.path.join(torrent_folder, torrent['name'])),
|
||||
'files': torrent_files
|
||||
})
|
||||
release_downloads.append({
|
||||
'id': torrent['hashString'],
|
||||
'name': torrent['name'],
|
||||
'status': status,
|
||||
'original_status': torrent['status'],
|
||||
'seed_ratio': torrent['uploadRatio'],
|
||||
'timeleft': str(timedelta(seconds = torrent['eta'])),
|
||||
'folder': sp(torrent['downloadDir'] if len(torrent_files) == 1 else os.path.join(torrent['downloadDir'], torrent['name'])),
|
||||
'files': '|'.join(torrent_files)
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
@@ -158,22 +142,21 @@ class Transmission(DownloaderBase):
|
||||
log.debug('Requesting Transmission to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
|
||||
return self.trpc.remove_torrent(release_download['id'], delete_files)
|
||||
|
||||
|
||||
class TransmissionRPC(object):
|
||||
|
||||
"""TransmissionRPC lite library"""
|
||||
def __init__(self, host = 'http://localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
def __init__(self, host = 'localhost', port = 9091, rpc_url = 'transmission', username = None, password = None):
|
||||
|
||||
super(TransmissionRPC, self).__init__()
|
||||
|
||||
self.url = host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.url = 'http://' + host + ':' + str(port) + '/' + rpc_url + '/rpc'
|
||||
self.tag = 0
|
||||
self.session_id = 0
|
||||
self.session = {}
|
||||
if username and password:
|
||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_manager.add_password(realm = 'Transmission', uri = self.url, user = username, passwd = password)
|
||||
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
||||
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager), urllib2.HTTPDigestAuthHandler(password_manager))
|
||||
opener.addheaders = [('User-agent', 'couchpotato-transmission-client/1.0')]
|
||||
urllib2.install_opener(opener)
|
||||
elif username or password:
|
||||
@@ -195,10 +178,10 @@ class TransmissionRPC(object):
|
||||
else:
|
||||
log.debug('Unknown failure sending command to Transmission. Return text is: %s', response['result'])
|
||||
return False
|
||||
except httplib.InvalidURL as err:
|
||||
except httplib.InvalidURL, err:
|
||||
log.error('Invalid Transmission host, check your config %s', err)
|
||||
return False
|
||||
except urllib2.HTTPError as err:
|
||||
except urllib2.HTTPError, err:
|
||||
if err.code == 401:
|
||||
log.error('Invalid Transmission Username or Password, check your config')
|
||||
return False
|
||||
@@ -216,7 +199,7 @@ class TransmissionRPC(object):
|
||||
log.error('Unable to get Transmission Session-Id %s', err)
|
||||
else:
|
||||
log.error('TransmissionRPC HTTPError: %s', err)
|
||||
except urllib2.URLError as err:
|
||||
except urllib2.URLError, err:
|
||||
log.error('Unable to connect to Transmission %s', err)
|
||||
|
||||
def get_session(self):
|
||||
@@ -254,93 +237,3 @@ class TransmissionRPC(object):
|
||||
post_data = {'arguments': {'ids': torrent_id, 'delete-local-data': delete_local_data}, 'method': 'torrent-remove', 'tag': self.tag}
|
||||
return self._request(post_data)
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'transmission',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'transmission',
|
||||
'label': 'Transmission',
|
||||
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'http://localhost:9091',
|
||||
'description': 'Hostname with port. Usually <strong>http://localhost:9091</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'rpc_url',
|
||||
'type': 'string',
|
||||
'default': 'transmission',
|
||||
'advanced': True,
|
||||
'description': 'Change if you don\'t run Transmission RPC at the default url.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'directory',
|
||||
'type': 'directory',
|
||||
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent from Transmission after it finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'stalled_as_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Consider a stalled torrent as failed',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
79
couchpotato/core/downloaders/utorrent/__init__.py
Normal file
79
couchpotato/core/downloaders/utorrent/__init__.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from .main import uTorrent
|
||||
|
||||
def start():
|
||||
return uTorrent()
|
||||
|
||||
config = [{
|
||||
'name': 'utorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'utorrent',
|
||||
'label': 'uTorrent',
|
||||
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> (3.0+) to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8000',
|
||||
'description': 'Hostname with port. Usually <strong>localhost:8000</strong>',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add torrent as.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent from uTorrent after it finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,6 +1,12 @@
|
||||
from base64 import b16encode, b32decode
|
||||
from bencode import bencode as benc, bdecode
|
||||
from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, ss, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from datetime import timedelta
|
||||
from hashlib import sha1
|
||||
from multipartpost import MultipartPostHandler
|
||||
import cookielib
|
||||
import httplib
|
||||
import json
|
||||
@@ -11,37 +17,17 @@ import time
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
from bencode import bencode as benc, bdecode
|
||||
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
|
||||
from couchpotato.core.helpers.encoding import isInt, ss, sp
|
||||
from couchpotato.core.helpers.variable import tryInt, tryFloat, cleanHost
|
||||
from couchpotato.core.logger import CPLog
|
||||
from multipartpost import MultipartPostHandler
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
autoload = 'uTorrent'
|
||||
|
||||
|
||||
class uTorrent(DownloaderBase):
|
||||
class uTorrent(Downloader):
|
||||
|
||||
protocol = ['torrent', 'torrent_magnet']
|
||||
utorrent_api = None
|
||||
status_flags = {
|
||||
'STARTED': 1,
|
||||
'CHECKING': 2,
|
||||
'CHECK-START': 4,
|
||||
'CHECKED': 8,
|
||||
'ERROR': 16,
|
||||
'PAUSED': 32,
|
||||
'QUEUED': 64,
|
||||
'LOADED': 128
|
||||
}
|
||||
|
||||
def connect(self):
|
||||
# Load host from config and split out port.
|
||||
host = cleanHost(self.conf('host'), protocol = False).split(':')
|
||||
host = self.conf('host').split(':')
|
||||
if not isInt(host[1]):
|
||||
log.error('Config properties are not filled in correctly, port is missing.')
|
||||
return False
|
||||
@@ -50,11 +36,11 @@ class uTorrent(DownloaderBase):
|
||||
|
||||
return self.utorrent_api
|
||||
|
||||
def download(self, data = None, media = None, filedata = None):
|
||||
if not media: media = {}
|
||||
def download(self, data = None, movie = None, filedata = None):
|
||||
if not movie: movie = {}
|
||||
if not data: data = {}
|
||||
|
||||
log.debug("Sending '%s' (%s) to uTorrent.", (data.get('name'), data.get('protocol')))
|
||||
log.debug('Sending "%s" (%s) to uTorrent.', (data.get('name'), data.get('protocol')))
|
||||
|
||||
if not self.connect():
|
||||
return False
|
||||
@@ -70,7 +56,7 @@ class uTorrent(DownloaderBase):
|
||||
new_settings['seed_prio_limitul_flag'] = True
|
||||
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
|
||||
|
||||
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
|
||||
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
|
||||
new_settings['bt.read_only_on_complete'] = False
|
||||
log.info('Updated uTorrent settings to not set the files to read only after completing.')
|
||||
|
||||
@@ -89,10 +75,9 @@ class uTorrent(DownloaderBase):
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper()
|
||||
torrent_params['trackers'] = '%0D%0A%0D%0A'.join(self.torrent_trackers)
|
||||
else:
|
||||
info = bdecode(filedata)['info']
|
||||
info = bdecode(filedata)["info"]
|
||||
torrent_hash = sha1(benc(info)).hexdigest().upper()
|
||||
|
||||
torrent_filename = self.createFileName(data, filedata, media)
|
||||
torrent_filename = self.createFileName(data, filedata, movie)
|
||||
|
||||
if data.get('seed_ratio'):
|
||||
torrent_params['seed_override'] = 1
|
||||
@@ -119,73 +104,72 @@ class uTorrent(DownloaderBase):
|
||||
|
||||
return self.downloadReturnId(torrent_hash)
|
||||
|
||||
def test(self):
|
||||
if self.connect():
|
||||
build_version = self.utorrent_api.get_build()
|
||||
if not build_version:
|
||||
return False
|
||||
if build_version < 25406: # This build corresponds to version 3.0.0 stable
|
||||
return False, 'Your uTorrent client is too old, please update to newest version.'
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def getAllDownloadStatus(self, ids):
|
||||
def getAllDownloadStatus(self):
|
||||
|
||||
log.debug('Checking uTorrent download status.')
|
||||
|
||||
if not self.connect():
|
||||
return []
|
||||
return False
|
||||
|
||||
release_downloads = ReleaseDownloadList(self)
|
||||
|
||||
data = self.utorrent_api.get_status()
|
||||
if not data:
|
||||
log.error('Error getting data from uTorrent')
|
||||
return []
|
||||
return False
|
||||
|
||||
queue = json.loads(data)
|
||||
if queue.get('error'):
|
||||
log.error('Error getting data from uTorrent: %s', queue.get('error'))
|
||||
return []
|
||||
return False
|
||||
|
||||
if not queue.get('torrents'):
|
||||
log.debug('Nothing in queue')
|
||||
return []
|
||||
return False
|
||||
|
||||
# Get torrents
|
||||
for torrent in queue['torrents']:
|
||||
if torrent[0] in ids:
|
||||
|
||||
#Get files of the torrent
|
||||
torrent_files = []
|
||||
try:
|
||||
torrent_files = json.loads(self.utorrent_api.get_files(torrent[0]))
|
||||
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
|
||||
except:
|
||||
log.debug('Failed getting files from torrent: %s', torrent[2])
|
||||
#Get files of the torrent
|
||||
torrent_files = []
|
||||
try:
|
||||
torrent_files = json.loads(self.utorrent_api.get_files(torrent[0]))
|
||||
torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
|
||||
except:
|
||||
log.debug('Failed getting files from torrent: %s', torrent[2])
|
||||
|
||||
status = 'busy'
|
||||
if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000:
|
||||
status = 'seeding'
|
||||
elif torrent[1] & self.status_flags['ERROR']:
|
||||
status = 'failed'
|
||||
elif torrent[4] == 1000:
|
||||
status = 'completed'
|
||||
status_flags = {
|
||||
"STARTED" : 1,
|
||||
"CHECKING" : 2,
|
||||
"CHECK-START" : 4,
|
||||
"CHECKED" : 8,
|
||||
"ERROR" : 16,
|
||||
"PAUSED" : 32,
|
||||
"QUEUED" : 64,
|
||||
"LOADED" : 128
|
||||
}
|
||||
|
||||
if not status == 'busy':
|
||||
self.removeReadOnly(torrent_files)
|
||||
status = 'busy'
|
||||
if (torrent[1] & status_flags["STARTED"] or torrent[1] & status_flags["QUEUED"]) and torrent[4] == 1000:
|
||||
status = 'seeding'
|
||||
elif (torrent[1] & status_flags["ERROR"]):
|
||||
status = 'failed'
|
||||
elif torrent[4] == 1000:
|
||||
status = 'completed'
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent[0],
|
||||
'name': torrent[2],
|
||||
'status': status,
|
||||
'seed_ratio': float(torrent[7]) / 1000,
|
||||
'original_status': torrent[1],
|
||||
'timeleft': str(timedelta(seconds = torrent[10])),
|
||||
'folder': sp(torrent[26]),
|
||||
'files': torrent_files
|
||||
})
|
||||
if not status == 'busy':
|
||||
self.removeReadOnly(torrent_files)
|
||||
|
||||
release_downloads.append({
|
||||
'id': torrent[0],
|
||||
'name': torrent[2],
|
||||
'status': status,
|
||||
'seed_ratio': float(torrent[7]) / 1000,
|
||||
'original_status': torrent[1],
|
||||
'timeleft': str(timedelta(seconds = torrent[10])),
|
||||
'folder': sp(torrent[26]),
|
||||
'files': '|'.join(torrent_files)
|
||||
})
|
||||
|
||||
return release_downloads
|
||||
|
||||
@@ -229,6 +213,7 @@ class uTorrentAPI(object):
|
||||
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password)
|
||||
self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager))
|
||||
self.opener.add_handler(urllib2.HTTPDigestAuthHandler(password_manager))
|
||||
elif username or password:
|
||||
log.debug('User or password missing, not using authentication.')
|
||||
self.token = self.get_token()
|
||||
@@ -237,7 +222,7 @@ class uTorrentAPI(object):
|
||||
if time.time() > self.last_time + 1800:
|
||||
self.last_time = time.time()
|
||||
self.token = self.get_token()
|
||||
request = urllib2.Request(self.url + '?token=' + self.token + '&' + action, data)
|
||||
request = urllib2.Request(self.url + "?token=" + self.token + "&" + action, data)
|
||||
try:
|
||||
open_request = self.opener.open(request)
|
||||
response = open_request.read()
|
||||
@@ -245,64 +230,64 @@ class uTorrentAPI(object):
|
||||
return response
|
||||
else:
|
||||
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
|
||||
except httplib.InvalidURL as err:
|
||||
except httplib.InvalidURL, err:
|
||||
log.error('Invalid uTorrent host, check your config %s', err)
|
||||
except urllib2.HTTPError as err:
|
||||
except urllib2.HTTPError, err:
|
||||
if err.code == 401:
|
||||
log.error('Invalid uTorrent Username or Password, check your config')
|
||||
else:
|
||||
log.error('uTorrent HTTPError: %s', err)
|
||||
except urllib2.URLError as err:
|
||||
except urllib2.URLError, err:
|
||||
log.error('Unable to connect to uTorrent %s', err)
|
||||
return False
|
||||
|
||||
def get_token(self):
|
||||
request = self.opener.open(self.url + 'token.html')
|
||||
token = re.findall('<div.*?>(.*?)</', request.read())[0]
|
||||
request = self.opener.open(self.url + "token.html")
|
||||
token = re.findall("<div.*?>(.*?)</", request.read())[0]
|
||||
return token
|
||||
|
||||
def add_torrent_uri(self, filename, torrent, add_folder = False):
|
||||
action = 'action=add-url&s=%s' % urllib.quote(torrent)
|
||||
action = "action=add-url&s=%s" % urllib.quote(torrent)
|
||||
if add_folder:
|
||||
action += '&path=%s' % urllib.quote(filename)
|
||||
action += "&path=%s" % urllib.quote(filename)
|
||||
return self._request(action)
|
||||
|
||||
def add_torrent_file(self, filename, filedata, add_folder = False):
|
||||
action = 'action=add-file'
|
||||
action = "action=add-file"
|
||||
if add_folder:
|
||||
action += '&path=%s' % urllib.quote(filename)
|
||||
return self._request(action, {'torrent_file': (ss(filename), filedata)})
|
||||
action += "&path=%s" % urllib.quote(filename)
|
||||
return self._request(action, {"torrent_file": (ss(filename), filedata)})
|
||||
|
||||
def set_torrent(self, hash, params):
|
||||
action = 'action=setprops&hash=%s' % hash
|
||||
for k, v in params.items():
|
||||
action += '&s=%s&v=%s' % (k, v)
|
||||
action = "action=setprops&hash=%s" % hash
|
||||
for k, v in params.iteritems():
|
||||
action += "&s=%s&v=%s" % (k, v)
|
||||
return self._request(action)
|
||||
|
||||
def pause_torrent(self, hash, pause = True):
|
||||
if pause:
|
||||
action = 'action=pause&hash=%s' % hash
|
||||
action = "action=pause&hash=%s" % hash
|
||||
else:
|
||||
action = 'action=unpause&hash=%s' % hash
|
||||
action = "action=unpause&hash=%s" % hash
|
||||
return self._request(action)
|
||||
|
||||
def stop_torrent(self, hash):
|
||||
action = 'action=stop&hash=%s' % hash
|
||||
action = "action=stop&hash=%s" % hash
|
||||
return self._request(action)
|
||||
|
||||
def remove_torrent(self, hash, remove_data = False):
|
||||
if remove_data:
|
||||
action = 'action=removedata&hash=%s' % hash
|
||||
action = "action=removedata&hash=%s" % hash
|
||||
else:
|
||||
action = 'action=remove&hash=%s' % hash
|
||||
action = "action=remove&hash=%s" % hash
|
||||
return self._request(action)
|
||||
|
||||
def get_status(self):
|
||||
action = 'list=1'
|
||||
action = "list=1"
|
||||
return self._request(action)
|
||||
|
||||
def get_settings(self):
|
||||
action = 'action=getsettings'
|
||||
action = "action=getsettings"
|
||||
settings_dict = {}
|
||||
try:
|
||||
utorrent_settings = json.loads(self._request(action))
|
||||
@@ -318,7 +303,7 @@ class uTorrentAPI(object):
|
||||
|
||||
#log.debug('uTorrent settings: %s', settings_dict)
|
||||
|
||||
except Exception as err:
|
||||
except Exception, err:
|
||||
log.error('Failed to get settings from uTorrent: %s', err)
|
||||
|
||||
return settings_dict
|
||||
@@ -334,88 +319,5 @@ class uTorrentAPI(object):
|
||||
return self._request(action)
|
||||
|
||||
def get_files(self, hash):
|
||||
action = 'action=getfiles&hash=%s' % hash
|
||||
action = "action=getfiles&hash=%s" % hash
|
||||
return self._request(action)
|
||||
|
||||
def get_build(self):
|
||||
data = self._request('')
|
||||
if not data:
|
||||
return False
|
||||
response = json.loads(data)
|
||||
return int(response.get('build'))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'utorrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'downloaders',
|
||||
'list': 'download_providers',
|
||||
'name': 'utorrent',
|
||||
'label': 'uTorrent',
|
||||
'description': 'Use <a href="http://www.utorrent.com/" target="_blank">uTorrent</a> (3.0+) to download torrents.',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'default': 0,
|
||||
'type': 'enabler',
|
||||
'radio_group': 'torrent',
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'localhost:8000',
|
||||
'description': 'Port can be found in settings when enabling WebUI.',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'label',
|
||||
'description': 'Label to add torrent as.',
|
||||
},
|
||||
{
|
||||
'name': 'remove_complete',
|
||||
'label': 'Remove torrent',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Remove the torrent from uTorrent after it finished seeding.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_files',
|
||||
'label': 'Remove files',
|
||||
'default': True,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Also remove the leftover files.',
|
||||
},
|
||||
{
|
||||
'name': 'paused',
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'default': False,
|
||||
'description': 'Add the torrent paused.',
|
||||
},
|
||||
{
|
||||
'name': 'manual',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'advanced': True,
|
||||
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
|
||||
},
|
||||
{
|
||||
'name': 'delete_failed',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'description': 'Delete a release after the download has failed.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
}]
|
||||
@@ -1,15 +1,12 @@
|
||||
from axl.axel import Event
|
||||
from couchpotato.core.helpers.variable import mergeDicts, natcmp
|
||||
from couchpotato.core.logger import CPLog
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
from axl.axel import Event
|
||||
from couchpotato.core.helpers.variable import mergeDicts, natsortKey
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
events = {}
|
||||
|
||||
|
||||
def runHandler(name, handler, *args, **kwargs):
|
||||
try:
|
||||
return handler(*args, **kwargs)
|
||||
@@ -17,7 +14,6 @@ def runHandler(name, handler, *args, **kwargs):
|
||||
from couchpotato.environment import Env
|
||||
log.error('Error in event "%s", that wasn\'t caught: %s%s', (name, traceback.format_exc(), Env.all() if not Env.get('dev') else ''))
|
||||
|
||||
|
||||
def addEvent(name, handler, priority = 100):
|
||||
|
||||
if not events.get(name):
|
||||
@@ -31,7 +27,7 @@ def addEvent(name, handler, priority = 100):
|
||||
has_parent = hasattr(handler, 'im_self')
|
||||
parent = None
|
||||
if has_parent:
|
||||
parent = handler.__self__
|
||||
parent = handler.im_self
|
||||
bc = hasattr(parent, 'beforeCall')
|
||||
if bc: parent.beforeCall(handler)
|
||||
|
||||
@@ -52,19 +48,22 @@ def addEvent(name, handler, priority = 100):
|
||||
'priority': priority,
|
||||
})
|
||||
|
||||
def removeEvent(name, handler):
|
||||
e = events[name]
|
||||
e -= handler
|
||||
|
||||
def fireEvent(name, *args, **kwargs):
|
||||
if name not in events: return
|
||||
if not events.has_key(name): return
|
||||
|
||||
#log.debug('Firing event %s', name)
|
||||
try:
|
||||
|
||||
options = {
|
||||
'is_after_event': False, # Fire after event
|
||||
'on_complete': False, # onComplete event
|
||||
'single': False, # Return single handler
|
||||
'merge': False, # Merge items
|
||||
'in_order': False, # Fire them in specific order, waits for the other to finish
|
||||
'is_after_event': False, # Fire after event
|
||||
'on_complete': False, # onComplete event
|
||||
'single': False, # Return single handler
|
||||
'merge': False, # Merge items
|
||||
'in_order': False, # Fire them in specific order, waits for the other to finish
|
||||
}
|
||||
|
||||
# Do options
|
||||
@@ -90,7 +89,7 @@ def fireEvent(name, *args, **kwargs):
|
||||
|
||||
else:
|
||||
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True)
|
||||
e = Event(name = name, threads = 10, exc_info = True, traceback = True, lock = threading.RLock())
|
||||
|
||||
for event in events[name]:
|
||||
e.handle(event['handler'], priority = event['priority'])
|
||||
@@ -102,14 +101,11 @@ def fireEvent(name, *args, **kwargs):
|
||||
# Fire
|
||||
result = e(*args, **kwargs)
|
||||
|
||||
result_keys = list(result.keys())
|
||||
result_keys.sort(key = natsortKey)
|
||||
|
||||
if options['single'] and not options['merge']:
|
||||
results = None
|
||||
|
||||
# Loop over results, stop when first not None result is found.
|
||||
for r_key in result_keys:
|
||||
for r_key in sorted(result.iterkeys(), cmp = natcmp):
|
||||
r = result[r_key]
|
||||
if r[0] is True and r[1] is not None:
|
||||
results = r[1]
|
||||
@@ -121,7 +117,7 @@ def fireEvent(name, *args, **kwargs):
|
||||
|
||||
else:
|
||||
results = []
|
||||
for r_key in result_keys:
|
||||
for r_key in sorted(result.iterkeys(), cmp = natcmp):
|
||||
r = result[r_key]
|
||||
if r[0] == True and r[1]:
|
||||
results.append(r[1])
|
||||
@@ -164,21 +160,18 @@ def fireEvent(name, *args, **kwargs):
|
||||
except Exception:
|
||||
log.error('%s: %s', (name, traceback.format_exc()))
|
||||
|
||||
|
||||
def fireEventAsync(*args, **kwargs):
|
||||
try:
|
||||
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
return True
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
log.error('%s: %s', (args[0], e))
|
||||
|
||||
|
||||
def errorHandler(error):
|
||||
etype, value, tb = error
|
||||
log.error(''.join(traceback.format_exception(etype, value, tb)))
|
||||
|
||||
|
||||
def getEvent(name):
|
||||
return events[name]
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
|
||||
from six import PY2
|
||||
|
||||
|
||||
if PY2:
|
||||
from CodernityDB.database_super_thread_safe import SuperThreadSafeDatabase
|
||||
from CodernityDB.index import IndexException, IndexConflict, IndexNotFoundException
|
||||
from CodernityDB.database import RecordNotFound, RecordDeleted
|
||||
from CodernityDB.hash_index import HashIndex
|
||||
from CodernityDB.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||
else:
|
||||
from CodernityDB3.database_super_thread_safe import SuperThreadSafeDatabase
|
||||
from CodernityDB3.index import IndexException, IndexConflict, IndexNotFoundException
|
||||
from CodernityDB3.database import RecordNotFound, RecordDeleted
|
||||
from CodernityDB3.hash_index import HashIndex
|
||||
from CodernityDB3.tree_index import MultiTreeBasedIndex, TreeBasedIndex
|
||||
|
||||
SuperThreadSafeDatabase = SuperThreadSafeDatabase
|
||||
IndexException = IndexException
|
||||
IndexNotFoundException = IndexNotFoundException
|
||||
IndexConflict = IndexConflict
|
||||
RecordNotFound = RecordNotFound
|
||||
HashIndex = HashIndex
|
||||
MultiTreeBasedIndex = MultiTreeBasedIndex
|
||||
TreeBasedIndex = TreeBasedIndex
|
||||
RecordDeleted = RecordDeleted
|
||||
@@ -1,44 +1,35 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from string import ascii_letters, digits
|
||||
from urllib import quote_plus
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
import unicodedata
|
||||
|
||||
from chardet import detect
|
||||
from six.moves import urllib
|
||||
from couchpotato.core.logger import CPLog
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
def toSafeString(original):
|
||||
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
|
||||
cleaned_filename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
||||
valid_string = ''.join(list(six.unichr(c) for c in cleaned_filename if six.unichr(c) in valid_chars))
|
||||
cleanedFilename = unicodedata.normalize('NFKD', toUnicode(original)).encode('ASCII', 'ignore')
|
||||
valid_string = ''.join(c for c in cleanedFilename if c in valid_chars)
|
||||
return ' '.join(valid_string.split())
|
||||
|
||||
|
||||
def simplifyString(original):
|
||||
string = stripAccents(original.lower())
|
||||
string = toSafeString(' '.join(re.split('\W+', string)))
|
||||
split = re.split('\W+|_', string.lower())
|
||||
return toUnicode(' '.join(split))
|
||||
|
||||
|
||||
def toUnicode(original, *args):
|
||||
try:
|
||||
if isinstance(original, six.text_type):
|
||||
if isinstance(original, unicode):
|
||||
return original
|
||||
else:
|
||||
try:
|
||||
return six.text_type(original, *args)
|
||||
return unicode(original, *args)
|
||||
except:
|
||||
try:
|
||||
detected = detect(original)
|
||||
if detected.get('encoding') == 'utf-8':
|
||||
return original.decode('utf-8')
|
||||
return ek(original, *args)
|
||||
except:
|
||||
raise
|
||||
@@ -47,62 +38,19 @@ def toUnicode(original, *args):
|
||||
ascii_text = str(original).encode('string_escape')
|
||||
return toUnicode(ascii_text)
|
||||
|
||||
def toUTF8(original):
|
||||
try:
|
||||
if isinstance(original, six.binary_type) and len(original) > 0:
|
||||
# Try to detect
|
||||
detected = detect(original)
|
||||
return original.decode(detected.get('encoding')).encode('utf-8')
|
||||
else:
|
||||
return original
|
||||
except:
|
||||
#log.error('Failed encoding to UTF8: %s', traceback.format_exc())
|
||||
raise
|
||||
|
||||
def ss(original, *args):
|
||||
|
||||
u_original = toUnicode(original, *args)
|
||||
try:
|
||||
if isinstance(u_original, six.text_type):
|
||||
u_original = u_original.encode('unicode_escape')
|
||||
else:
|
||||
u_original = u_original
|
||||
|
||||
return six.u(u_original)
|
||||
except Exception as e:
|
||||
from couchpotato.environment import Env
|
||||
return u_original.encode(Env.get('encoding'))
|
||||
except Exception, e:
|
||||
log.debug('Failed ss encoding char, force UTF8: %s', e)
|
||||
try:
|
||||
from couchpotato.environment import Env
|
||||
return u_original.encode(Env.get('encoding'), 'replace')
|
||||
except:
|
||||
return u_original.encode('utf-8', 'replace')
|
||||
|
||||
return u_original.encode('UTF-8')
|
||||
|
||||
def sp(path, *args):
|
||||
|
||||
# Standardise encoding, normalise case, path and strip trailing '/' or '\'
|
||||
if not path or len(path) == 0:
|
||||
return path
|
||||
|
||||
# convert windows path (from remote box) to *nix path
|
||||
if os.path.sep == '/' and '\\' in path:
|
||||
path = '/' + path.replace(':', '').replace('\\', '/')
|
||||
|
||||
path = os.path.normpath(path)
|
||||
|
||||
# Remove any trailing path separators
|
||||
if path != os.path.sep:
|
||||
path = path.rstrip(os.path.sep)
|
||||
|
||||
# Add a trailing separator in case it is a root folder on windows (crashes guessit)
|
||||
if len(path) == 2 and path[1] == ':':
|
||||
path = path + os.path.sep
|
||||
|
||||
# Replace *NIX ambiguous '//' at the beginning of a path with '/' (crashes guessit)
|
||||
path = re.sub('^//', '/', path)
|
||||
|
||||
return path
|
||||
|
||||
return os.path.normcase(os.path.normpath(ss(path, *args))).rstrip(os.path.sep)
|
||||
|
||||
def ek(original, *args):
|
||||
if isinstance(original, (str, unicode)):
|
||||
@@ -114,7 +62,6 @@ def ek(original, *args):
|
||||
|
||||
return original
|
||||
|
||||
|
||||
def isInt(value):
|
||||
try:
|
||||
int(value)
|
||||
@@ -122,23 +69,20 @@ def isInt(value):
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def stripAccents(s):
|
||||
return ''.join((c for c in unicodedata.normalize('NFD', toUnicode(s)) if unicodedata.category(c) != 'Mn'))
|
||||
|
||||
|
||||
def tryUrlencode(s):
|
||||
new = six.u('')
|
||||
new = u''
|
||||
if isinstance(s, dict):
|
||||
for key, value in list(s.items()):
|
||||
new += six.u('&%s=%s') % (key, tryUrlencode(value))
|
||||
for key, value in s.iteritems():
|
||||
new += u'&%s=%s' % (key, tryUrlencode(value))
|
||||
|
||||
return new[1:]
|
||||
else:
|
||||
for letter in ss(s):
|
||||
letter = six.unichr(letter)
|
||||
try:
|
||||
new += urllib.parse.quote_plus(letter)
|
||||
new += quote_plus(letter)
|
||||
except:
|
||||
new += letter
|
||||
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import os
|
||||
from chardet import detect
|
||||
from couchpotato import Env
|
||||
|
||||
fs_enc = Env.get('fs_encoding')
|
||||
|
||||
|
||||
def list_dir(path, full_path = True):
|
||||
"""
|
||||
List directory don't error when it doesn't exist
|
||||
"""
|
||||
|
||||
path = unicode_path(path)
|
||||
|
||||
if os.path.isdir(path):
|
||||
for f in os.listdir(path):
|
||||
if full_path:
|
||||
yield join(path, f)
|
||||
else:
|
||||
yield f
|
||||
|
||||
|
||||
def join(*args):
|
||||
"""
|
||||
Join path, encode properly before joining
|
||||
"""
|
||||
|
||||
return os.path.join(*[safe(x) for x in args])
|
||||
|
||||
|
||||
def unicode_path(path):
|
||||
"""
|
||||
Convert back to unicode
|
||||
:param path: path string
|
||||
"""
|
||||
|
||||
if isinstance(path, str):
|
||||
detected = detect(path)
|
||||
print detected
|
||||
path = path.decode(detected.get('encoding'))
|
||||
path = path.decode('unicode_escape')
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def safe(path):
|
||||
|
||||
if isinstance(path, unicode):
|
||||
return path.encode('unicode_escape')
|
||||
|
||||
return path
|
||||
@@ -1,2 +0,0 @@
|
||||
class NotSupported(Exception):
|
||||
pass
|
||||
@@ -1,21 +1,15 @@
|
||||
import re
|
||||
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from six.moves import urllib
|
||||
from couchpotato.core.helpers.variable import natsortKey
|
||||
from couchpotato.core.helpers.variable import natcmp
|
||||
from urllib import unquote
|
||||
import re
|
||||
|
||||
|
||||
def getParams(params):
|
||||
|
||||
reg = re.compile('^[a-z0-9_\.]+$')
|
||||
|
||||
# Sort keys
|
||||
param_keys = list(params.keys())
|
||||
param_keys.sort(key = natsortKey)
|
||||
|
||||
temp = {}
|
||||
for param in param_keys:
|
||||
value = params[param]
|
||||
for param, value in sorted(params.iteritems()):
|
||||
|
||||
nest = re.split("([\[\]]+)", param)
|
||||
if len(nest) > 1:
|
||||
@@ -28,7 +22,7 @@ def getParams(params):
|
||||
|
||||
for item in nested:
|
||||
if item is nested[-1]:
|
||||
current[item] = toUnicode(urllib.parse.unquote(value))
|
||||
current[item] = toUnicode(unquote(value))
|
||||
else:
|
||||
try:
|
||||
current[item]
|
||||
@@ -37,32 +31,19 @@ def getParams(params):
|
||||
|
||||
current = current[item]
|
||||
else:
|
||||
temp[param] = toUnicode(urllib.parse.unquote(value))
|
||||
temp[param] = toUnicode(unquote(value))
|
||||
if temp[param].lower() in ['true', 'false']:
|
||||
temp[param] = temp[param].lower() != 'false'
|
||||
|
||||
return dictToList(temp)
|
||||
|
||||
non_decimal = re.compile(r'[^\d.]+')
|
||||
|
||||
def dictToList(params):
|
||||
|
||||
if type(params) is dict:
|
||||
new = {}
|
||||
for x, value in params.items():
|
||||
for x, value in params.iteritems():
|
||||
try:
|
||||
convert = lambda text: int(text) if text.isdigit() else text.lower()
|
||||
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
|
||||
sorted_keys = sorted(value.keys(), key = alphanum_key)
|
||||
|
||||
all_ints = 0
|
||||
for pnr in sorted_keys:
|
||||
all_ints += 1 if non_decimal.sub('', pnr) == pnr else 0
|
||||
|
||||
if all_ints == len(sorted_keys):
|
||||
new_value = [dictToList(value[k]) for k in sorted_keys]
|
||||
else:
|
||||
new_value = value
|
||||
new_value = [dictToList(value[k]) for k in sorted(value.iterkeys(), cmp = natcmp)]
|
||||
except:
|
||||
new_value = value
|
||||
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
import xml.etree.ElementTree as XMLTree
|
||||
|
||||
from couchpotato.core.logger import CPLog
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class RSS(object):
|
||||
|
||||
def getTextElements(self, xml, path):
|
||||
@@ -49,6 +46,6 @@ class RSS(object):
|
||||
def getItems(self, data, path = 'channel/item'):
|
||||
try:
|
||||
return XMLTree.parse(data).findall(path)
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
log.error('Error parsing RSS. %s', e)
|
||||
return []
|
||||
|
||||
264
couchpotato/core/helpers/variable.py
Executable file → Normal file
264
couchpotato/core/helpers/variable.py
Executable file → Normal file
@@ -1,52 +1,38 @@
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss
|
||||
from couchpotato.core.logger import CPLog
|
||||
import collections
|
||||
import ctypes
|
||||
import hashlib
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss, sp
|
||||
from couchpotato.core.logger import CPLog
|
||||
import six
|
||||
from six.moves import map, zip, filter
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
def fnEscape(pattern):
|
||||
return pattern.replace('[', '[[').replace(']', '[]]').replace('[[', '[[]')
|
||||
|
||||
|
||||
def link(src, dst):
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
if ctypes.windll.kernel32.CreateHardLinkW(six.text_type(dst), six.text_type(src), 0) == 0: raise ctypes.WinError()
|
||||
if ctypes.windll.kernel32.CreateHardLinkW(unicode(dst), unicode(src), 0) == 0: raise ctypes.WinError()
|
||||
else:
|
||||
os.link(src, dst)
|
||||
|
||||
|
||||
def symlink(src, dst):
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(six.text_type(dst), six.text_type(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
||||
if ctypes.windll.kernel32.CreateSymbolicLinkW(unicode(dst), unicode(src), 1 if os.path.isdir(src) else 0) in [0, 1280]: raise ctypes.WinError()
|
||||
else:
|
||||
os.symlink(src, dst)
|
||||
|
||||
|
||||
def getUserDir():
|
||||
try:
|
||||
import pwd
|
||||
os.environ['HOME'] = sp(pwd.getpwuid(os.geteuid()).pw_dir)
|
||||
os.environ['HOME'] = pwd.getpwuid(os.geteuid()).pw_dir
|
||||
except:
|
||||
pass
|
||||
|
||||
return sp(os.path.expanduser('~'))
|
||||
|
||||
return os.path.expanduser('~')
|
||||
|
||||
def getDownloadDir():
|
||||
user_dir = getUserDir()
|
||||
@@ -60,7 +46,6 @@ def getDownloadDir():
|
||||
|
||||
return user_dir
|
||||
|
||||
|
||||
def getDataDir():
|
||||
|
||||
# Windows
|
||||
@@ -80,10 +65,8 @@ def getDataDir():
|
||||
# Linux
|
||||
return os.path.join(user_dir, '.couchpotato')
|
||||
|
||||
|
||||
def isDict(obj):
|
||||
return isinstance(obj, dict)
|
||||
|
||||
def isDict(object):
|
||||
return isinstance(object, dict)
|
||||
|
||||
def mergeDicts(a, b, prepend_list = False):
|
||||
assert isDict(a), isDict(b)
|
||||
@@ -105,7 +88,6 @@ def mergeDicts(a, b, prepend_list = False):
|
||||
current_dst[key] = current_src[key]
|
||||
return dst
|
||||
|
||||
|
||||
def removeListDuplicates(seq):
|
||||
checked = []
|
||||
for e in seq:
|
||||
@@ -113,73 +95,35 @@ def removeListDuplicates(seq):
|
||||
checked.append(e)
|
||||
return checked
|
||||
|
||||
|
||||
def flattenList(l):
|
||||
if isinstance(l, list):
|
||||
return sum(map(flattenList, l))
|
||||
else:
|
||||
return l
|
||||
|
||||
|
||||
def md5(text):
|
||||
return hashlib.md5(ss(text)).hexdigest()
|
||||
|
||||
|
||||
def sha1(text):
|
||||
return hashlib.sha1(text).hexdigest()
|
||||
|
||||
|
||||
def isLocalIP(ip):
|
||||
ip = ip.lstrip('htps:/')
|
||||
regex = '/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1)$/'
|
||||
return re.search(regex, ip) is not None or 'localhost' in ip or ip[:4] == '127.'
|
||||
|
||||
|
||||
def getExt(filename):
|
||||
return os.path.splitext(filename)[1][1:]
|
||||
|
||||
def cleanHost(host):
|
||||
if not host.startswith(('http://', 'https://')):
|
||||
host = 'http://' + host
|
||||
|
||||
def cleanHost(host, protocol = True, ssl = False, username = None, password = None):
|
||||
"""Return a cleaned up host with given url options set
|
||||
|
||||
Changes protocol to https if ssl is set to True and http if ssl is set to false.
|
||||
>>> cleanHost("localhost:80", ssl=True)
|
||||
'https://localhost:80/'
|
||||
>>> cleanHost("localhost:80", ssl=False)
|
||||
'http://localhost:80/'
|
||||
|
||||
Username and password is managed with the username and password variables
|
||||
>>> cleanHost("localhost:80", username="user", password="passwd")
|
||||
'http://user:passwd@localhost:80/'
|
||||
|
||||
Output without scheme (protocol) can be forced with protocol=False
|
||||
>>> cleanHost("localhost:80", protocol=False)
|
||||
'localhost:80'
|
||||
"""
|
||||
|
||||
if not '://' in host and protocol:
|
||||
host = ('https://' if ssl else 'http://') + host
|
||||
|
||||
if not protocol:
|
||||
host = host.split('://', 1)[-1]
|
||||
|
||||
if protocol and username and password:
|
||||
try:
|
||||
auth = re.findall('^(?:.+?//)(.+?):(.+?)@(?:.+)$', host)
|
||||
if auth:
|
||||
log.error('Cleanhost error: auth already defined in url: %s, please remove BasicAuth from url.', host)
|
||||
else:
|
||||
host = host.replace('://', '://%s:%s@' % (username, password), 1)
|
||||
except:
|
||||
pass
|
||||
|
||||
host = host.rstrip('/ ')
|
||||
if protocol:
|
||||
host += '/'
|
||||
host = host.rstrip('/')
|
||||
host += '/'
|
||||
|
||||
return host
|
||||
|
||||
|
||||
def getImdb(txt, check_inside = False, multiple = False):
|
||||
|
||||
if not check_inside:
|
||||
@@ -196,7 +140,7 @@ def getImdb(txt, check_inside = False, multiple = False):
|
||||
ids = re.findall('(tt\d{4,7})', txt)
|
||||
|
||||
if multiple:
|
||||
return removeDuplicate(['tt%07d' % tryInt(x[2:]) for x in ids]) if len(ids) > 0 else []
|
||||
return list(set(['tt%07d' % tryInt(x[2:]) for x in ids])) if len(ids) > 0 else []
|
||||
|
||||
return 'tt%07d' % tryInt(ids[0][2:])
|
||||
except IndexError:
|
||||
@@ -204,12 +148,10 @@ def getImdb(txt, check_inside = False, multiple = False):
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def tryInt(s, default = 0):
|
||||
try: return int(s)
|
||||
except: return default
|
||||
|
||||
|
||||
def tryFloat(s):
|
||||
try:
|
||||
if isinstance(s, str):
|
||||
@@ -218,42 +160,38 @@ def tryFloat(s):
|
||||
return float(s)
|
||||
except: return 0
|
||||
|
||||
def natsortKey(s):
|
||||
return map(tryInt, re.findall(r'(\d+|\D+)', s))
|
||||
|
||||
def natsortKey(string_):
|
||||
"""See http://www.codinghorror.com/blog/archives/001018.html"""
|
||||
return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_)]
|
||||
|
||||
def natcmp(a, b):
|
||||
return cmp(natsortKey(a), natsortKey(b))
|
||||
|
||||
def toIterable(value):
|
||||
if isinstance(value, collections.Iterable):
|
||||
return value
|
||||
return [value]
|
||||
|
||||
|
||||
def getIdentifier(media):
|
||||
return media.get('identifier') or media.get('identifiers', {}).get('imdb')
|
||||
|
||||
|
||||
def getTitle(media_dict):
|
||||
def getTitle(library_dict):
|
||||
try:
|
||||
try:
|
||||
return media_dict['title']
|
||||
return library_dict['titles'][0]['title']
|
||||
except:
|
||||
try:
|
||||
return media_dict['titles'][0]
|
||||
for title in library_dict.titles:
|
||||
if title.default:
|
||||
return title.title
|
||||
except:
|
||||
try:
|
||||
return media_dict['info']['titles'][0]
|
||||
return library_dict['info']['titles'][0]
|
||||
except:
|
||||
try:
|
||||
return media_dict['media']['info']['titles'][0]
|
||||
except:
|
||||
log.error('Could not get title for %s', getIdentifier(media_dict))
|
||||
return None
|
||||
except:
|
||||
log.error('Could not get title for library item: %s', media_dict)
|
||||
return None
|
||||
log.error('Could not get title for %s', library_dict.identifier)
|
||||
return None
|
||||
|
||||
log.error('Could not get title for %s', library_dict['identifier'])
|
||||
return None
|
||||
except:
|
||||
log.error('Could not get title for library item: %s', library_dict)
|
||||
return None
|
||||
|
||||
def possibleTitles(raw_title):
|
||||
|
||||
@@ -267,146 +205,14 @@ def possibleTitles(raw_title):
|
||||
new_title = raw_title.replace('&', 'and')
|
||||
titles.append(simplifyString(new_title))
|
||||
|
||||
return removeDuplicate(titles)
|
||||
|
||||
return list(set(titles))
|
||||
|
||||
def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
|
||||
return ''.join(random.choice(chars) for x in range(size))
|
||||
|
||||
|
||||
def splitString(str, split_on = ',', clean = True):
|
||||
l = [x.strip() for x in str.split(split_on)] if str else []
|
||||
return removeEmpty(l) if clean else l
|
||||
|
||||
|
||||
def removeEmpty(l):
|
||||
return list(filter(None, l))
|
||||
|
||||
|
||||
def removeDuplicate(l):
|
||||
seen = set()
|
||||
return [x for x in l if x not in seen and not seen.add(x)]
|
||||
|
||||
list = [x.strip() for x in str.split(split_on)] if str else []
|
||||
return filter(None, list) if clean else list
|
||||
|
||||
def dictIsSubset(a, b):
|
||||
return all([k in b and b[k] == v for k, v in a.items()])
|
||||
|
||||
|
||||
# Returns True if sub_folder is the same as or inside base_folder
|
||||
def isSubFolder(sub_folder, base_folder):
|
||||
if base_folder and sub_folder:
|
||||
base = sp(os.path.realpath(base_folder)) + os.path.sep
|
||||
subfolder = sp(os.path.realpath(sub_folder)) + os.path.sep
|
||||
return os.path.commonprefix([subfolder, base]) == base
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# From SABNZBD
|
||||
re_password = [re.compile(r'(.+){{([^{}]+)}}$'), re.compile(r'(.+)\s+password\s*=\s*(.+)$', re.I)]
|
||||
|
||||
|
||||
def scanForPassword(name):
|
||||
m = None
|
||||
for reg in re_password:
|
||||
m = reg.search(name)
|
||||
if m: break
|
||||
|
||||
if m:
|
||||
return m.group(1).strip('. '), m.group(2).strip()
|
||||
|
||||
|
||||
under_pat = re.compile(r'_([a-z])')
|
||||
|
||||
def underscoreToCamel(name):
|
||||
return under_pat.sub(lambda x: x.group(1).upper(), name)
|
||||
|
||||
|
||||
def removePyc(folder, only_excess = True, show_logs = True):
|
||||
|
||||
folder = sp(folder)
|
||||
|
||||
for root, dirs, files in os.walk(folder):
|
||||
|
||||
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
|
||||
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
|
||||
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
|
||||
|
||||
for excess_pyc_file in excess_pyc_files:
|
||||
full_path = os.path.join(root, excess_pyc_file)
|
||||
if show_logs: log.debug('Removing old PYC file: %s', full_path)
|
||||
try:
|
||||
os.remove(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
for dir_name in dirs:
|
||||
full_path = os.path.join(root, dir_name)
|
||||
if len(os.listdir(full_path)) == 0:
|
||||
try:
|
||||
os.rmdir(full_path)
|
||||
except:
|
||||
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
|
||||
|
||||
|
||||
def getFreeSpace(directories):
|
||||
|
||||
single = not isinstance(directories, (tuple, list))
|
||||
if single:
|
||||
directories = [directories]
|
||||
|
||||
free_space = {}
|
||||
for folder in directories:
|
||||
|
||||
size = None
|
||||
if os.path.isdir(folder):
|
||||
if os.name == 'nt':
|
||||
_, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
|
||||
ctypes.c_ulonglong()
|
||||
if sys.version_info >= (3,) or isinstance(folder, unicode):
|
||||
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW #@UndefinedVariable
|
||||
else:
|
||||
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA #@UndefinedVariable
|
||||
ret = fun(folder, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
|
||||
if ret == 0:
|
||||
raise ctypes.WinError()
|
||||
return [total.value, free.value]
|
||||
else:
|
||||
s = os.statvfs(folder)
|
||||
size = [s.f_blocks * s.f_frsize / (1024 * 1024), (s.f_bavail * s.f_frsize) / (1024 * 1024)]
|
||||
|
||||
if single: return size
|
||||
|
||||
free_space[folder] = size
|
||||
|
||||
return free_space
|
||||
|
||||
|
||||
def getSize(paths):
|
||||
|
||||
single = not isinstance(paths, (tuple, list))
|
||||
if single:
|
||||
paths = [paths]
|
||||
|
||||
total_size = 0
|
||||
for path in paths:
|
||||
path = sp(path)
|
||||
|
||||
if os.path.isdir(path):
|
||||
total_size = 0
|
||||
for dirpath, _, filenames in os.walk(path):
|
||||
for f in filenames:
|
||||
total_size += os.path.getsize(sp(os.path.join(dirpath, f)))
|
||||
|
||||
elif os.path.isfile(path):
|
||||
total_size += os.path.getsize(path)
|
||||
|
||||
return total_size / 1048576 # MB
|
||||
|
||||
|
||||
def find(func, iterable):
|
||||
for item in iterable:
|
||||
if func(item):
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,34 +1,30 @@
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from importlib import import_module
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.py3 import NotSupported
|
||||
from couchpotato.core.logger import CPLog
|
||||
from importhelper import import_module
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Loader(object):
|
||||
|
||||
def __init__(self):
|
||||
self.plugins = {}
|
||||
self.providers = {}
|
||||
self.modules = {}
|
||||
self.paths = {}
|
||||
plugins = {}
|
||||
providers = {}
|
||||
modules = {}
|
||||
|
||||
def preload(self, root = ''):
|
||||
core = os.path.join(root, 'couchpotato', 'core')
|
||||
|
||||
self.paths.update({
|
||||
self.paths = {
|
||||
'core': (0, 'couchpotato.core._base', os.path.join(core, '_base')),
|
||||
'plugin': (1, 'couchpotato.core.plugins', os.path.join(core, 'plugins')),
|
||||
'notifications': (20, 'couchpotato.core.notifications', os.path.join(core, 'notifications')),
|
||||
'downloaders': (20, 'couchpotato.core.downloaders', os.path.join(core, 'downloaders')),
|
||||
})
|
||||
}
|
||||
|
||||
# Add providers to loader
|
||||
self.addPath(root, ['couchpotato', 'core', 'providers'], 25, recursive = False)
|
||||
|
||||
# Add media to loader
|
||||
self.addPath(root, ['couchpotato', 'core', 'media'], 25, recursive = True)
|
||||
@@ -41,7 +37,7 @@ class Loader(object):
|
||||
self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
|
||||
|
||||
# Loop over all paths and add to module list
|
||||
for plugin_type, plugin_tuple in self.paths.items():
|
||||
for plugin_type, plugin_tuple in self.paths.iteritems():
|
||||
priority, module, dir_name = plugin_tuple
|
||||
self.addFromDir(plugin_type, priority, module, dir_name)
|
||||
|
||||
@@ -49,7 +45,7 @@ class Loader(object):
|
||||
did_save = 0
|
||||
|
||||
for priority in sorted(self.modules):
|
||||
for module_name, plugin in sorted(self.modules[priority].items()):
|
||||
for module_name, plugin in sorted(self.modules[priority].iteritems()):
|
||||
|
||||
# Load module
|
||||
try:
|
||||
@@ -60,10 +56,12 @@ class Loader(object):
|
||||
if m is None:
|
||||
continue
|
||||
|
||||
log.info('Loading %s: %s', (plugin['type'], plugin['name']))
|
||||
|
||||
# Save default settings for plugin/provider
|
||||
did_save += self.loadSettings(m, module_name, save = False)
|
||||
|
||||
self.loadPlugins(m, plugin.get('type'), plugin.get('name'))
|
||||
self.loadPlugins(m, plugin.get('name'))
|
||||
except ImportError as e:
|
||||
# todo:: subclass ImportError for missing requirements.
|
||||
if e.message.lower().startswith("missing"):
|
||||
@@ -83,7 +81,7 @@ class Loader(object):
|
||||
for filename in os.listdir(root_path):
|
||||
path = os.path.join(root_path, filename)
|
||||
if os.path.isdir(path) and filename[:2] != '__':
|
||||
if six.u('__init__.py') in os.listdir(path):
|
||||
if u'__init__.py' in os.listdir(path):
|
||||
new_base_path = ''.join(s + '.' for s in base_path) + filename
|
||||
self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
|
||||
|
||||
@@ -97,19 +95,14 @@ class Loader(object):
|
||||
self.addModule(priority, plugin_type, module, os.path.basename(dir_name))
|
||||
|
||||
for name in os.listdir(dir_name):
|
||||
path = os.path.join(dir_name, name)
|
||||
ext = os.path.splitext(path)[1]
|
||||
ext_length = len(ext)
|
||||
if name != 'static' and ((os.path.isdir(path) and os.path.isfile(os.path.join(path, '__init__.py')))
|
||||
or (os.path.isfile(path) and ext == '.py')):
|
||||
name = name[:-ext_length] if ext_length > 0 else name
|
||||
if os.path.isdir(os.path.join(dir_name, name)) and name != 'static' and os.path.isfile(os.path.join(dir_name, name, '__init__.py')):
|
||||
module_name = '%s.%s' % (module, name)
|
||||
self.addModule(priority, plugin_type, module_name, name)
|
||||
|
||||
def loadSettings(self, module, name, save = True):
|
||||
|
||||
if not hasattr(module, 'config'):
|
||||
#log.debug('Skip loading settings for plugin %s as it has no config section' % module.__file__)
|
||||
log.debug('Skip loading settings for plugin %s as it has no config section' % module.__file__)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -125,20 +118,13 @@ class Loader(object):
|
||||
log.debug('Failed loading settings for "%s": %s', (name, traceback.format_exc()))
|
||||
return False
|
||||
|
||||
def loadPlugins(self, module, type, name):
|
||||
def loadPlugins(self, module, name):
|
||||
|
||||
if not hasattr(module, 'autoload'):
|
||||
#log.debug('Skip startup for plugin %s as it has no start section' % module.__file__)
|
||||
if not hasattr(module, 'start'):
|
||||
log.debug('Skip startup for plugin %s as it has no start section' % module.__file__)
|
||||
return False
|
||||
try:
|
||||
# Load single file plugin
|
||||
if isinstance(module.autoload, (six.string_types, six.text_type)):
|
||||
getattr(module, module.autoload)()
|
||||
# Load folder plugin
|
||||
else:
|
||||
module.autoload()
|
||||
|
||||
log.info('Loaded %s: %s', (type, name))
|
||||
module.start()
|
||||
return True
|
||||
except:
|
||||
log.error('Failed loading plugin "%s": %s', (module.__file__, traceback.format_exc()))
|
||||
@@ -150,9 +136,6 @@ class Loader(object):
|
||||
self.modules[priority] = {}
|
||||
|
||||
module = module.lstrip('.')
|
||||
if plugin_type.startswith('couchpotato_core'):
|
||||
plugin_type = plugin_type[17:]
|
||||
|
||||
self.modules[priority][module] = {
|
||||
'priority': priority,
|
||||
'module': module,
|
||||
@@ -163,8 +146,6 @@ class Loader(object):
|
||||
def loadModule(self, name):
|
||||
try:
|
||||
return import_module(name)
|
||||
except NotSupported:
|
||||
log.error('Module "%s" is not supported in Python 3', name)
|
||||
except ImportError:
|
||||
log.debug('Skip loading module plugin %s: %s', (name, traceback.format_exc()))
|
||||
return None
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
import logging
|
||||
import re
|
||||
import traceback
|
||||
|
||||
|
||||
class CPLog(object):
|
||||
|
||||
context = ''
|
||||
replace_private = ['api', 'apikey', 'api_key', 'password', 'username', 'h', 'uid', 'key', 'passkey']
|
||||
|
||||
Env = None
|
||||
is_develop = False
|
||||
|
||||
def __init__(self, context = ''):
|
||||
if context.endswith('.main'):
|
||||
context = context[:-5]
|
||||
@@ -18,20 +13,6 @@ class CPLog(object):
|
||||
self.context = context
|
||||
self.logger = logging.getLogger()
|
||||
|
||||
def setup(self):
|
||||
|
||||
if not self.Env:
|
||||
from couchpotato.environment import Env
|
||||
|
||||
self.Env = Env
|
||||
self.is_develop = Env.get('dev')
|
||||
|
||||
from couchpotato.core.event import addEvent
|
||||
addEvent('app.after_shutdown', self.close)
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
logging.shutdown()
|
||||
|
||||
def info(self, msg, replace_tuple = ()):
|
||||
self.logger.info(self.addContext(msg, replace_tuple))
|
||||
|
||||
@@ -55,22 +36,23 @@ class CPLog(object):
|
||||
|
||||
def safeMessage(self, msg, replace_tuple = ()):
|
||||
|
||||
from couchpotato.core.helpers.encoding import ss, toUTF8
|
||||
from couchpotato.environment import Env
|
||||
from couchpotato.core.helpers.encoding import ss
|
||||
|
||||
msg = toUTF8(msg)
|
||||
msg = ss(msg)
|
||||
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([toUTF8(x) for x in list(replace_tuple)])
|
||||
elif isinstance(replace_tuple, dict):
|
||||
msg = msg % dict((k, toUTF8(v)) for k, v in replace_tuple.iteritems())
|
||||
else:
|
||||
msg = msg % toUTF8(replace_tuple)
|
||||
msg = msg % replace_tuple
|
||||
except:
|
||||
self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, traceback.format_exc()))
|
||||
try:
|
||||
if isinstance(replace_tuple, tuple):
|
||||
msg = msg % tuple([ss(x) for x in list(replace_tuple)])
|
||||
else:
|
||||
msg = msg % ss(replace_tuple)
|
||||
except Exception, e:
|
||||
self.logger.error(u'Failed encoding stuff to log "%s": %s' % (msg, e))
|
||||
|
||||
self.setup()
|
||||
if not self.is_develop:
|
||||
if not Env.get('dev'):
|
||||
|
||||
for replace in self.replace_private:
|
||||
msg = re.sub('(\?%s=)[^\&]+' % replace, '?%s=xxx' % replace, msg)
|
||||
@@ -78,10 +60,10 @@ class CPLog(object):
|
||||
|
||||
# Replace api key
|
||||
try:
|
||||
api_key = self.Env.setting('api_key')
|
||||
api_key = Env.setting('api_key')
|
||||
if api_key:
|
||||
msg = msg.replace(api_key, 'API_KEY')
|
||||
except:
|
||||
pass
|
||||
|
||||
return toUTF8(msg)
|
||||
return msg
|
||||
|
||||
99
couchpotato/core/media/__init__.py
Executable file → Normal file
99
couchpotato/core/media/__init__.py
Executable file → Normal file
@@ -1,101 +1,44 @@
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from couchpotato import CPLog
|
||||
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato import get_session
|
||||
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
from couchpotato.core.settings.model import Media
|
||||
|
||||
|
||||
class MediaBase(Plugin):
|
||||
|
||||
_type = None
|
||||
|
||||
default_dict = {
|
||||
'profile': {'types': {'quality': {}}},
|
||||
'releases': {'status': {}, 'quality': {}, 'files':{}, 'info': {}},
|
||||
'library': {'titles': {}, 'files':{}},
|
||||
'files': {},
|
||||
'status': {},
|
||||
'category': {},
|
||||
}
|
||||
|
||||
def initType(self):
|
||||
addEvent('media.types', self.getType)
|
||||
|
||||
def getType(self):
|
||||
return self._type
|
||||
|
||||
def createOnComplete(self, media_id):
|
||||
def createOnComplete(self, id):
|
||||
|
||||
def onComplete():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.searcher.single' % media.get('type')
|
||||
fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
db = get_session()
|
||||
media = db.query(Media).filter_by(id = id).first()
|
||||
fireEventAsync('%s.searcher.single' % media.type, media.to_dict(self.default_dict), on_complete = self.createNotifyFront(id))
|
||||
db.expire_all()
|
||||
|
||||
return onComplete
|
||||
|
||||
def createNotifyFront(self, media_id):
|
||||
|
||||
def notifyFront():
|
||||
try:
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
if media:
|
||||
event_name = '%s.update' % media.get('type')
|
||||
fireEvent('notify.frontend', type = event_name, data = media)
|
||||
except:
|
||||
log.error('Failed creating onComplete: %s', traceback.format_exc())
|
||||
db = get_session()
|
||||
media = db.query(Media).filter_by(id = media_id).first()
|
||||
fireEvent('notify.frontend', type = '%s.update.%s' % (media.type, media.id), data = media.to_dict(self.default_dict))
|
||||
db.expire_all()
|
||||
|
||||
return notifyFront
|
||||
|
||||
def getDefaultTitle(self, info, ):
|
||||
|
||||
# Set default title
|
||||
default_title = toUnicode(info.get('title'))
|
||||
titles = info.get('titles', [])
|
||||
counter = 0
|
||||
def_title = None
|
||||
for title in titles:
|
||||
if (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title):
|
||||
def_title = toUnicode(title)
|
||||
break
|
||||
counter += 1
|
||||
|
||||
if not def_title:
|
||||
def_title = toUnicode(titles[0])
|
||||
|
||||
return def_title or 'UNKNOWN'
|
||||
|
||||
def getPoster(self, media, image_urls):
|
||||
if 'files' not in media:
|
||||
media['files'] = {}
|
||||
|
||||
existing_files = media['files']
|
||||
|
||||
image_type = 'poster'
|
||||
file_type = 'image_%s' % image_type
|
||||
|
||||
# Make existing unique
|
||||
unique_files = list(set(existing_files.get(file_type, [])))
|
||||
|
||||
# Remove files that can't be found
|
||||
for ef in unique_files:
|
||||
if not os.path.isfile(ef):
|
||||
unique_files.remove(ef)
|
||||
|
||||
# Replace new files list
|
||||
existing_files[file_type] = unique_files
|
||||
if len(existing_files) == 0:
|
||||
del existing_files[file_type]
|
||||
|
||||
# Loop over type
|
||||
for image in image_urls.get(image_type, []):
|
||||
if not isinstance(image, six.string_types):
|
||||
continue
|
||||
|
||||
if file_type not in existing_files or len(existing_files.get(file_type, [])) == 0:
|
||||
file_path = fireEvent('file.download', url = image, single = True)
|
||||
if file_path:
|
||||
existing_files[file_type] = [toUnicode(file_path)]
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
from .main import Library
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
|
||||
def autoload():
|
||||
return Library()
|
||||
class LibraryBase(Plugin):
|
||||
|
||||
config = []
|
||||
_type = None
|
||||
|
||||
def initType(self):
|
||||
addEvent('library.types', self.getType)
|
||||
|
||||
def getType(self):
|
||||
return self._type
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
|
||||
class LibraryBase(Plugin):
|
||||
|
||||
_type = None
|
||||
|
||||
def initType(self):
|
||||
addEvent('library.types', self.getType)
|
||||
|
||||
def getType(self):
|
||||
return self._type
|
||||
@@ -1,128 +0,0 @@
|
||||
from couchpotato import get_db
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.library.base import LibraryBase
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Library(LibraryBase):
|
||||
def __init__(self):
|
||||
addEvent('library.title', self.title)
|
||||
addEvent('library.related', self.related)
|
||||
addEvent('library.tree', self.tree)
|
||||
|
||||
addEvent('library.root', self.root)
|
||||
|
||||
addApiView('library.query', self.queryView)
|
||||
addApiView('library.related', self.relatedView)
|
||||
addApiView('library.tree', self.treeView)
|
||||
|
||||
def queryView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.query', media, single = True)
|
||||
}
|
||||
|
||||
def relatedView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.related', media, single = True)
|
||||
}
|
||||
|
||||
def treeView(self, media_id, **kwargs):
|
||||
db = get_db()
|
||||
media = db.get('id', media_id)
|
||||
|
||||
return {
|
||||
'result': fireEvent('library.tree', media, single = True)
|
||||
}
|
||||
|
||||
def title(self, library):
|
||||
return fireEvent(
|
||||
'library.query',
|
||||
library,
|
||||
|
||||
condense = False,
|
||||
include_year = False,
|
||||
include_identifier = False,
|
||||
single = True
|
||||
)
|
||||
|
||||
def related(self, media):
|
||||
result = {self.key(media['type']): media}
|
||||
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
result[self.key(cur['type'])] = cur
|
||||
|
||||
children = db.get_many('media_children', media['_id'], with_doc = True)
|
||||
|
||||
for item in children:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
|
||||
result[key].append(item['doc'])
|
||||
|
||||
return result
|
||||
|
||||
def root(self, media):
|
||||
db = get_db()
|
||||
cur = media
|
||||
|
||||
while cur and cur.get('parent_id'):
|
||||
cur = db.get('id', cur['parent_id'])
|
||||
|
||||
return cur
|
||||
|
||||
def tree(self, media = None, media_id = None):
|
||||
db = get_db()
|
||||
|
||||
if media:
|
||||
result = media
|
||||
elif media_id:
|
||||
result = db.get('id', media_id, with_doc = True)
|
||||
else:
|
||||
return None
|
||||
|
||||
# Find children
|
||||
items = db.get_many('media_children', result['_id'], with_doc = True)
|
||||
keys = []
|
||||
|
||||
# Build children arrays
|
||||
for item in items:
|
||||
key = self.key(item['doc']['type']) + 's'
|
||||
|
||||
if key not in result:
|
||||
result[key] = {}
|
||||
elif type(result[key]) is not dict:
|
||||
result[key] = {}
|
||||
|
||||
if key not in keys:
|
||||
keys.append(key)
|
||||
|
||||
result[key][item['_id']] = fireEvent('library.tree', item['doc'], single = True)
|
||||
|
||||
# Unique children
|
||||
for key in keys:
|
||||
result[key] = result[key].values()
|
||||
|
||||
# Include releases
|
||||
result['releases'] = fireEvent('release.for_media', result['_id'], single = True)
|
||||
|
||||
return result
|
||||
|
||||
def key(self, media_type):
|
||||
parts = media_type.split('.')
|
||||
return parts[-1]
|
||||
@@ -1,7 +0,0 @@
|
||||
from .main import Matcher
|
||||
|
||||
|
||||
def autoload():
|
||||
return Matcher()
|
||||
|
||||
config = []
|
||||
@@ -1,84 +0,0 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MatcherBase(Plugin):
|
||||
type = None
|
||||
|
||||
def __init__(self):
|
||||
if self.type:
|
||||
addEvent('%s.matcher.correct' % self.type, self.correct)
|
||||
|
||||
def correct(self, chain, release, media, quality):
|
||||
raise NotImplementedError()
|
||||
|
||||
def flattenInfo(self, info):
|
||||
# Flatten dictionary of matches (chain info)
|
||||
if isinstance(info, dict):
|
||||
return dict([(key, self.flattenInfo(value)) for key, value in info.items()])
|
||||
|
||||
# Flatten matches
|
||||
result = None
|
||||
|
||||
for match in info:
|
||||
if isinstance(match, dict):
|
||||
if result is None:
|
||||
result = {}
|
||||
|
||||
for key, value in match.items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
|
||||
result[key].append(value)
|
||||
else:
|
||||
if result is None:
|
||||
result = []
|
||||
|
||||
result.append(match)
|
||||
|
||||
return result
|
||||
|
||||
def constructFromRaw(self, match):
|
||||
if not match:
|
||||
return None
|
||||
|
||||
parts = [
|
||||
''.join([
|
||||
y for y in x[1:] if y
|
||||
]) for x in match
|
||||
]
|
||||
|
||||
return ''.join(parts)[:-1].strip()
|
||||
|
||||
def simplifyValue(self, value):
|
||||
if not value:
|
||||
return value
|
||||
|
||||
if isinstance(value, basestring):
|
||||
return simplifyString(value)
|
||||
|
||||
if isinstance(value, list):
|
||||
return [self.simplifyValue(x) for x in value]
|
||||
|
||||
raise ValueError("Unsupported value type")
|
||||
|
||||
def chainMatch(self, chain, group, tags):
|
||||
info = self.flattenInfo(chain.info[group])
|
||||
|
||||
found_tags = []
|
||||
for tag, accepted in tags.items():
|
||||
values = [self.simplifyValue(x) for x in info.get(tag, [None])]
|
||||
|
||||
if any([val in accepted for val in values]):
|
||||
found_tags.append(tag)
|
||||
|
||||
log.debug('tags found: %s, required: %s' % (found_tags, tags.keys()))
|
||||
|
||||
if set(tags.keys()) == set(found_tags):
|
||||
return True
|
||||
|
||||
return all([key in found_tags for key, value in tags.items()])
|
||||
@@ -1,89 +0,0 @@
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.variable import possibleTitles
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.matcher.base import MatcherBase
|
||||
from caper import Caper
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Matcher(MatcherBase):
|
||||
|
||||
def __init__(self):
|
||||
super(Matcher, self).__init__()
|
||||
|
||||
self.caper = Caper()
|
||||
|
||||
addEvent('matcher.parse', self.parse)
|
||||
addEvent('matcher.match', self.match)
|
||||
|
||||
addEvent('matcher.flatten_info', self.flattenInfo)
|
||||
addEvent('matcher.construct_from_raw', self.constructFromRaw)
|
||||
|
||||
addEvent('matcher.correct_title', self.correctTitle)
|
||||
addEvent('matcher.correct_quality', self.correctQuality)
|
||||
|
||||
def parse(self, name, parser='scene'):
|
||||
return self.caper.parse(name, parser)
|
||||
|
||||
def match(self, release, media, quality):
|
||||
match = fireEvent('matcher.parse', release['name'], single = True)
|
||||
|
||||
if len(match.chains) < 1:
|
||||
log.info2('Wrong: %s, unable to parse release name (no chains)', release['name'])
|
||||
return False
|
||||
|
||||
for chain in match.chains:
|
||||
if fireEvent('%s.matcher.correct' % media['type'], chain, release, media, quality, single = True):
|
||||
return chain
|
||||
|
||||
return False
|
||||
|
||||
def correctTitle(self, chain, media):
|
||||
root = fireEvent('library.root', media, single = True)
|
||||
|
||||
if 'show_name' not in chain.info or not len(chain.info['show_name']):
|
||||
log.info('Wrong: missing show name in parsed result')
|
||||
return False
|
||||
|
||||
# Get the lower-case parsed show name from the chain
|
||||
chain_words = [x.lower() for x in chain.info['show_name']]
|
||||
|
||||
# Build a list of possible titles of the media we are searching for
|
||||
titles = root['info']['titles']
|
||||
|
||||
# Add year suffix titles (will result in ['<name_one>', '<name_one> <suffix_one>', '<name_two>', ...])
|
||||
suffixes = [None, root['info']['year']]
|
||||
|
||||
titles = [
|
||||
title + ((' %s' % suffix) if suffix else '')
|
||||
for title in titles
|
||||
for suffix in suffixes
|
||||
]
|
||||
|
||||
# Check show titles match
|
||||
# TODO check xem names
|
||||
for title in titles:
|
||||
for valid_words in [x.split(' ') for x in possibleTitles(title)]:
|
||||
|
||||
if valid_words == chain_words:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def correctQuality(self, chain, quality, quality_map):
|
||||
if quality['identifier'] not in quality_map:
|
||||
log.info2('Wrong: unknown preferred quality %s', quality['identifier'])
|
||||
return False
|
||||
|
||||
if 'video' not in chain.info:
|
||||
log.info2('Wrong: no video tags found')
|
||||
return False
|
||||
|
||||
video_tags = quality_map[quality['identifier']]
|
||||
|
||||
if not self.chainMatch(chain, 'video', video_tags):
|
||||
log.info2('Wrong: %s tags not in chain', video_tags)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,5 +1,6 @@
|
||||
from .main import MediaPlugin
|
||||
|
||||
|
||||
def autoload():
|
||||
def start():
|
||||
return MediaPlugin()
|
||||
|
||||
config = []
|
||||
|
||||
@@ -1,200 +0,0 @@
|
||||
from string import ascii_letters
|
||||
from hashlib import md5
|
||||
from couchpotato.core.helpers.database import MultiTreeBasedIndex, TreeBasedIndex
|
||||
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
|
||||
|
||||
|
||||
class MediaIndex(MultiTreeBasedIndex):
|
||||
_version = 3
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import MultiTreeBasedIndex"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and (data.get('identifier') or data.get('identifiers')):
|
||||
|
||||
identifiers = data.get('identifiers', {})
|
||||
if data.get('identifier') and 'imdb' not in identifiers:
|
||||
identifiers['imdb'] = data.get('identifier')
|
||||
|
||||
ids = []
|
||||
for x in identifiers:
|
||||
ids.append(md5('%s-%s' % (x, identifiers[x])).hexdigest())
|
||||
|
||||
return ids, None
|
||||
|
||||
|
||||
class MediaStatusIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaStatusIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('status'):
|
||||
return md5(data.get('status')).hexdigest(), None
|
||||
|
||||
|
||||
class MediaTypeIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaTypeIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('type'):
|
||||
return md5(data.get('type')).hexdigest(), None
|
||||
|
||||
|
||||
class TitleSearchIndex(MultiTreeBasedIndex):
|
||||
_version = 2
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import MultiTreeBasedIndex
|
||||
try: from itertools import izip
|
||||
except: izip = zip
|
||||
from couchpotato.core.helpers.encoding import simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(TitleSearchIndex, self).__init__(*args, **kwargs)
|
||||
self.__l = kwargs.get('w_len', 2)
|
||||
|
||||
def make_key_value(self, data):
|
||||
|
||||
if data.get('_t') == 'media' and len(data.get('title', '')) > 0:
|
||||
|
||||
out = set()
|
||||
title = str(simplifyString(data.get('title').lower()))
|
||||
l = self.__l
|
||||
title_split = title.split()
|
||||
|
||||
for x in range(len(title_split)):
|
||||
combo = ' '.join(title_split[x:])[:32].strip()
|
||||
out.add(combo.rjust(32, '_'))
|
||||
combo_range = max(l, min(len(combo), 32))
|
||||
|
||||
for cx in range(1, combo_range):
|
||||
ccombo = combo[:-cx].strip()
|
||||
if len(ccombo) > l:
|
||||
out.add(ccombo.rjust(32, '_'))
|
||||
|
||||
return out, None
|
||||
|
||||
def make_key(self, key):
|
||||
return key.rjust(32, '_').lower()
|
||||
|
||||
|
||||
class TitleIndex(TreeBasedIndex):
|
||||
_version = 4
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
from string import ascii_letters
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(TitleIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return self.simplify(key)
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('title') is not None and len(data.get('title')) > 0:
|
||||
return self.simplify(data['title']), None
|
||||
|
||||
def simplify(self, title):
|
||||
|
||||
title = toUnicode(title)
|
||||
|
||||
nr_prefix = '' if title and len(title) > 0 and title[0] in ascii_letters else '#'
|
||||
title = simplifyString(title)
|
||||
|
||||
for prefix in ['the ', 'an ', 'a ']:
|
||||
if prefix == title[:len(prefix)]:
|
||||
title = title[len(prefix):]
|
||||
break
|
||||
|
||||
return str(nr_prefix + title).ljust(32, ' ')[:32]
|
||||
|
||||
|
||||
class StartsWithIndex(TreeBasedIndex):
|
||||
_version = 3
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import TreeBasedIndex
|
||||
from string import ascii_letters
|
||||
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '1s'
|
||||
super(StartsWithIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return self.first(key)
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('title') is not None:
|
||||
return self.first(data['title']), None
|
||||
|
||||
def first(self, title):
|
||||
title = toUnicode(title)
|
||||
title = simplifyString(title)
|
||||
|
||||
for prefix in ['the ', 'an ', 'a ']:
|
||||
if prefix == title[:len(prefix)]:
|
||||
title = title[len(prefix):]
|
||||
break
|
||||
|
||||
return str(title[0] if title and len(title) > 0 and title[0] in ascii_letters else '#').lower()
|
||||
|
||||
|
||||
|
||||
class MediaChildrenIndex(TreeBasedIndex):
|
||||
_version = 1
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaChildrenIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key(self, key):
|
||||
return key
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('parent_id'):
|
||||
return data.get('parent_id'), None
|
||||
|
||||
|
||||
class MediaTagIndex(MultiTreeBasedIndex):
|
||||
_version = 2
|
||||
|
||||
custom_header = """from couchpotato.core.helpers.database import MultiTreeBasedIndex"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['key_format'] = '32s'
|
||||
super(MediaTagIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
def make_key_value(self, data):
|
||||
if data.get('_t') == 'media' and data.get('tags') and len(data.get('tags', [])) > 0:
|
||||
|
||||
tags = set()
|
||||
for tag in data.get('tags', []):
|
||||
tags.add(self.make_key(tag))
|
||||
|
||||
return list(tags), None
|
||||
|
||||
def make_key(self, key):
|
||||
return md5(key).hexdigest()
|
||||
565
couchpotato/core/media/_base/media/main.py
Executable file → Normal file
565
couchpotato/core/media/_base/media/main.py
Executable file → Normal file
@@ -1,36 +1,16 @@
|
||||
from datetime import timedelta
|
||||
import time
|
||||
import traceback
|
||||
from string import ascii_lowercase
|
||||
|
||||
from couchpotato import tryInt, get_db
|
||||
from couchpotato import get_session
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.database import RecordNotFound, RecordDeleted
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, getImdb, getTitle
|
||||
from couchpotato.core.helpers.variable import splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media import MediaBase
|
||||
from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex, MediaTagIndex
|
||||
import six
|
||||
|
||||
from couchpotato.core.settings.model import Media
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MediaPlugin(MediaBase):
|
||||
|
||||
_database = {
|
||||
'media': MediaIndex,
|
||||
'media_search_title': TitleSearchIndex,
|
||||
'media_status': MediaStatusIndex,
|
||||
'media_tag': MediaTagIndex,
|
||||
'media_by_type': MediaTypeIndex,
|
||||
'media_title': TitleIndex,
|
||||
'media_startswith': StartsWithIndex,
|
||||
'media_children': MediaChildrenIndex,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
||||
addApiView('media.refresh', self.refresh, docs = {
|
||||
@@ -40,541 +20,30 @@ class MediaPlugin(MediaBase):
|
||||
}
|
||||
})
|
||||
|
||||
addApiView('media.list', self.listView, docs = {
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'type': {'type': 'string', 'desc': 'Media type to filter on.'},
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter media by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter media by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the media list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all media starting with the letter "a"'},
|
||||
'search': {'desc': 'Search media title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any media returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
|
||||
addApiView('media.get', self.getView, docs = {
|
||||
'desc': 'Get media by id',
|
||||
'params': {
|
||||
'id': {'desc': 'The id of the media'},
|
||||
}
|
||||
})
|
||||
|
||||
addApiView('media.delete', self.deleteView, docs = {
|
||||
'desc': 'Delete a media from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete media from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
|
||||
addApiView('media.available_chars', self.charView)
|
||||
|
||||
addEvent('app.load', self.addSingleRefreshView, priority = 100)
|
||||
addEvent('app.load', self.addSingleListView, priority = 100)
|
||||
addEvent('app.load', self.addSingleCharView, priority = 100)
|
||||
addEvent('app.load', self.addSingleDeleteView, priority = 100)
|
||||
addEvent('app.load', self.cleanupFaults)
|
||||
|
||||
addEvent('media.get', self.get)
|
||||
addEvent('media.with_status', self.withStatus)
|
||||
addEvent('media.with_identifiers', self.withIdentifiers)
|
||||
addEvent('media.list', self.list)
|
||||
addEvent('media.delete', self.delete)
|
||||
addEvent('media.restatus', self.restatus)
|
||||
addEvent('media.tag', self.tag)
|
||||
addEvent('media.untag', self.unTag)
|
||||
|
||||
# Wrongly tagged media files
|
||||
def cleanupFaults(self):
|
||||
medias = fireEvent('media.with_status', 'ignored', single = True) or []
|
||||
|
||||
db = get_db()
|
||||
for media in medias:
|
||||
try:
|
||||
media['status'] = 'done'
|
||||
db.update(media)
|
||||
except:
|
||||
pass
|
||||
addEvent('app.load', self.addSingleRefresh)
|
||||
|
||||
def refresh(self, id = '', **kwargs):
|
||||
handlers = []
|
||||
ids = splitString(id)
|
||||
db = get_session()
|
||||
|
||||
for x in ids:
|
||||
for x in splitString(id):
|
||||
media = db.query(Media).filter_by(id = x).first()
|
||||
|
||||
refresh_handler = self.createRefreshHandler(x)
|
||||
if refresh_handler:
|
||||
handlers.append(refresh_handler)
|
||||
if media:
|
||||
# Get current selected title
|
||||
default_title = ''
|
||||
for title in media.library.titles:
|
||||
if title.default: default_title = title.title
|
||||
|
||||
fireEvent('notify.frontend', type = 'media.busy', data = {'_id': ids})
|
||||
fireEventAsync('schedule.queue', handlers = handlers)
|
||||
fireEvent('notify.frontend', type = '%s.busy.%s' % (media.type, x), data = True)
|
||||
fireEventAsync('library.update.%s' % media.type, identifier = media.library.identifier, default_title = default_title, force = True, on_complete = self.createOnComplete(x))
|
||||
|
||||
db.expire_all()
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
}
|
||||
|
||||
def createRefreshHandler(self, media_id):
|
||||
|
||||
try:
|
||||
media = get_db().get('id', media_id)
|
||||
event = '%s.update' % media.get('type')
|
||||
|
||||
def handler():
|
||||
fireEvent(event, media_id = media_id, on_complete = self.createOnComplete(media_id))
|
||||
|
||||
return handler
|
||||
|
||||
except:
|
||||
log.error('Refresh handler for non existing media: %s', traceback.format_exc())
|
||||
|
||||
def addSingleRefreshView(self):
|
||||
def addSingleRefresh(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
addApiView('%s.refresh' % media_type, self.refresh)
|
||||
|
||||
def get(self, media_id):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
imdb_id = getImdb(str(media_id))
|
||||
|
||||
if imdb_id:
|
||||
media = db.get('media', 'imdb-%s' % imdb_id, with_doc = True)['doc']
|
||||
else:
|
||||
media = db.get('id', media_id)
|
||||
|
||||
if media:
|
||||
|
||||
# Attach category
|
||||
try: media['category'] = db.get('id', media.get('category_id'))
|
||||
except: pass
|
||||
|
||||
media['releases'] = fireEvent('release.for_media', media['_id'], single = True)
|
||||
|
||||
return media
|
||||
|
||||
except (RecordNotFound, RecordDeleted):
|
||||
log.error('Media with id "%s" not found', media_id)
|
||||
except:
|
||||
raise
|
||||
|
||||
def getView(self, id = None, **kwargs):
|
||||
|
||||
media = self.get(id) if id else None
|
||||
|
||||
return {
|
||||
'success': media is not None,
|
||||
'media': media,
|
||||
}
|
||||
|
||||
def withStatus(self, status, types = None, with_doc = True):
|
||||
|
||||
db = get_db()
|
||||
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
|
||||
status = list(status if isinstance(status, (list, tuple)) else [status])
|
||||
|
||||
for s in status:
|
||||
for ms in db.get_many('media_status', s):
|
||||
if with_doc:
|
||||
try:
|
||||
doc = db.get('id', ms['_id'])
|
||||
|
||||
if types and doc.get('type') not in types:
|
||||
continue
|
||||
|
||||
yield doc
|
||||
except (RecordDeleted, RecordNotFound):
|
||||
log.debug('Record not found, skipping: %s', ms['_id'])
|
||||
except (ValueError, EOFError):
|
||||
fireEvent('database.delete_corrupted', ms.get('_id'), traceback_error = traceback.format_exc(0))
|
||||
else:
|
||||
yield ms
|
||||
|
||||
def withIdentifiers(self, identifiers, with_doc = False):
|
||||
db = get_db()
|
||||
|
||||
for x in identifiers:
|
||||
try:
|
||||
return db.get('media', '%s-%s' % (x, identifiers[x]), with_doc = with_doc)
|
||||
except:
|
||||
pass
|
||||
|
||||
log.debug('No media found with identifiers: %s', identifiers)
|
||||
return False
|
||||
|
||||
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None):
|
||||
|
||||
db = get_db()
|
||||
|
||||
# Make a list from string
|
||||
if status and not isinstance(status, (list, tuple)):
|
||||
status = [status]
|
||||
if release_status and not isinstance(release_status, (list, tuple)):
|
||||
release_status = [release_status]
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
if with_tags and not isinstance(with_tags, (list, tuple)):
|
||||
with_tags = [with_tags]
|
||||
|
||||
# query media ids
|
||||
if types:
|
||||
all_media_ids = set()
|
||||
for media_type in types:
|
||||
all_media_ids = all_media_ids.union(set([x['_id'] for x in db.get_many('media_by_type', media_type)]))
|
||||
else:
|
||||
all_media_ids = set([x['_id'] for x in db.all('media')])
|
||||
|
||||
media_ids = list(all_media_ids)
|
||||
filter_by = {}
|
||||
|
||||
# Filter on movie status
|
||||
if status and len(status) > 0:
|
||||
filter_by['media_status'] = set()
|
||||
for media_status in fireEvent('media.with_status', status, with_doc = False, single = True):
|
||||
filter_by['media_status'].add(media_status.get('_id'))
|
||||
|
||||
# Filter on release status
|
||||
if release_status and len(release_status) > 0:
|
||||
filter_by['release_status'] = set()
|
||||
for release_status in fireEvent('release.with_status', release_status, with_doc = False, single = True):
|
||||
filter_by['release_status'].add(release_status.get('media_id'))
|
||||
|
||||
# Add search filters
|
||||
if starts_with:
|
||||
starts_with = toUnicode(starts_with.lower())[0]
|
||||
starts_with = starts_with if starts_with in ascii_lowercase else '#'
|
||||
filter_by['starts_with'] = [x['_id'] for x in db.get_many('media_startswith', starts_with)]
|
||||
|
||||
# Add tag filter
|
||||
if with_tags:
|
||||
filter_by['with_tags'] = set()
|
||||
for tag in with_tags:
|
||||
for x in db.get_many('media_tag', tag):
|
||||
filter_by['with_tags'].add(x['_id'])
|
||||
|
||||
# Filter with search query
|
||||
if search:
|
||||
filter_by['search'] = [x['_id'] for x in db.get_many('media_search_title', search)]
|
||||
|
||||
if status_or and 'media_status' in filter_by and 'release_status' in filter_by:
|
||||
filter_by['status'] = list(filter_by['media_status']) + list(filter_by['release_status'])
|
||||
del filter_by['media_status']
|
||||
del filter_by['release_status']
|
||||
|
||||
# Filter by combining ids
|
||||
for x in filter_by:
|
||||
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||
|
||||
total_count = len(media_ids)
|
||||
if total_count == 0:
|
||||
return 0, []
|
||||
|
||||
offset = 0
|
||||
limit = -1
|
||||
if limit_offset:
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, six.string_types) else limit_offset
|
||||
limit = tryInt(splt[0])
|
||||
offset = tryInt(0 if len(splt) is 1 else splt[1])
|
||||
|
||||
# List movies based on title order
|
||||
medias = []
|
||||
for m in db.all('media_title'):
|
||||
media_id = m['_id']
|
||||
if media_id not in media_ids: continue
|
||||
if offset > 0:
|
||||
offset -= 1
|
||||
continue
|
||||
|
||||
media = fireEvent('media.get', media_id, single = True)
|
||||
|
||||
# Skip if no media has been found
|
||||
if not media:
|
||||
continue
|
||||
|
||||
# Merge releases with movie dict
|
||||
medias.append(media)
|
||||
|
||||
# remove from media ids
|
||||
media_ids.remove(media_id)
|
||||
if len(media_ids) == 0 or len(medias) == limit: break
|
||||
|
||||
return total_count, medias
|
||||
|
||||
def listView(self, **kwargs):
|
||||
|
||||
total_movies, movies = self.list(
|
||||
types = splitString(kwargs.get('type')),
|
||||
status = splitString(kwargs.get('status')),
|
||||
release_status = splitString(kwargs.get('release_status')),
|
||||
status_or = kwargs.get('status_or') is not None,
|
||||
limit_offset = kwargs.get('limit_offset'),
|
||||
with_tags = splitString(kwargs.get('with_tags')),
|
||||
starts_with = kwargs.get('starts_with'),
|
||||
search = kwargs.get('search')
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(movies) == 0,
|
||||
'total': total_movies,
|
||||
'movies': movies,
|
||||
}
|
||||
|
||||
def addSingleListView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempList = lambda *args, **kwargs : self.listView(type = media_type, **kwargs)
|
||||
addApiView('%s.list' % media_type, tempList, docs = {
|
||||
'desc': 'List media',
|
||||
'params': {
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter ' + media_type + ' by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the ' + media_type + ' list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all ' + media_type + 's starting with the letter "a"'},
|
||||
'search': {'desc': 'Search ' + media_type + ' title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'empty': bool, any """ + media_type + """s returned or not,
|
||||
'media': array, media found,
|
||||
}"""}
|
||||
})
|
||||
|
||||
def availableChars(self, types = None, status = None, release_status = None):
|
||||
|
||||
db = get_db()
|
||||
|
||||
# Make a list from string
|
||||
if status and not isinstance(status, (list, tuple)):
|
||||
status = [status]
|
||||
if release_status and not isinstance(release_status, (list, tuple)):
|
||||
release_status = [release_status]
|
||||
if types and not isinstance(types, (list, tuple)):
|
||||
types = [types]
|
||||
|
||||
# query media ids
|
||||
if types:
|
||||
all_media_ids = set()
|
||||
for media_type in types:
|
||||
all_media_ids = all_media_ids.union(set([x['_id'] for x in db.get_many('media_by_type', media_type)]))
|
||||
else:
|
||||
all_media_ids = set([x['_id'] for x in db.all('media')])
|
||||
|
||||
media_ids = all_media_ids
|
||||
filter_by = {}
|
||||
|
||||
# Filter on movie status
|
||||
if status and len(status) > 0:
|
||||
filter_by['media_status'] = set()
|
||||
for media_status in fireEvent('media.with_status', status, with_doc = False, single = True):
|
||||
filter_by['media_status'].add(media_status.get('_id'))
|
||||
|
||||
# Filter on release status
|
||||
if release_status and len(release_status) > 0:
|
||||
filter_by['release_status'] = set()
|
||||
for release_status in fireEvent('release.with_status', release_status, with_doc = False, single = True):
|
||||
filter_by['release_status'].add(release_status.get('media_id'))
|
||||
|
||||
# Filter by combining ids
|
||||
for x in filter_by:
|
||||
media_ids = [n for n in media_ids if n in filter_by[x]]
|
||||
|
||||
chars = set()
|
||||
for x in db.all('media_startswith'):
|
||||
if x['_id'] in media_ids:
|
||||
chars.add(x['key'])
|
||||
|
||||
if len(chars) == 27:
|
||||
break
|
||||
|
||||
return list(chars)
|
||||
|
||||
def charView(self, **kwargs):
|
||||
|
||||
type = splitString(kwargs.get('type', 'movie'))
|
||||
status = splitString(kwargs.get('status', None))
|
||||
release_status = splitString(kwargs.get('release_status', None))
|
||||
chars = self.availableChars(type, status, release_status)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(chars) == 0,
|
||||
'chars': chars,
|
||||
}
|
||||
|
||||
def addSingleCharView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempChar = lambda *args, **kwargs : self.charView(type = media_type, **kwargs)
|
||||
addApiView('%s.available_chars' % media_type, tempChar)
|
||||
|
||||
def delete(self, media_id, delete_from = None):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
media = db.get('id', media_id)
|
||||
if media:
|
||||
deleted = False
|
||||
|
||||
media_releases = fireEvent('release.for_media', media['_id'], single = True)
|
||||
|
||||
if delete_from == 'all':
|
||||
# Delete connected releases
|
||||
for release in media_releases:
|
||||
db.delete(release)
|
||||
|
||||
db.delete(media)
|
||||
deleted = True
|
||||
else:
|
||||
|
||||
total_releases = len(media_releases)
|
||||
total_deleted = 0
|
||||
new_media_status = None
|
||||
|
||||
for release in media_releases:
|
||||
if delete_from in ['wanted', 'snatched', 'late']:
|
||||
if release.get('status') != 'done':
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
new_media_status = 'done'
|
||||
elif delete_from == 'manage':
|
||||
if release.get('status') == 'done' or media.get('status') == 'done':
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
|
||||
if (total_releases == total_deleted) or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'):
|
||||
db.delete(media)
|
||||
deleted = True
|
||||
elif new_media_status:
|
||||
media['status'] = new_media_status
|
||||
db.update(media)
|
||||
|
||||
fireEvent('media.untag', media['_id'], 'recent', single = True)
|
||||
else:
|
||||
fireEvent('media.restatus', media.get('_id'), single = True)
|
||||
|
||||
if deleted:
|
||||
fireEvent('notify.frontend', type = 'media.deleted', data = media)
|
||||
except:
|
||||
log.error('Failed deleting media: %s', traceback.format_exc())
|
||||
|
||||
return True
|
||||
|
||||
def deleteView(self, id = '', **kwargs):
|
||||
|
||||
ids = splitString(id)
|
||||
for media_id in ids:
|
||||
self.delete(media_id, delete_from = kwargs.get('delete_from', 'all'))
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
}
|
||||
|
||||
def addSingleDeleteView(self):
|
||||
|
||||
for media_type in fireEvent('media.types', merge = True):
|
||||
tempDelete = lambda *args, **kwargs : self.deleteView(type = media_type, **kwargs)
|
||||
addApiView('%s.delete' % media_type, tempDelete, docs = {
|
||||
'desc': 'Delete a ' + media_type + ' from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Media ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete ' + media_type + ' from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
|
||||
def restatus(self, media_id, tag_recent = True):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
m = db.get('id', media_id)
|
||||
previous_status = m['status']
|
||||
|
||||
log.debug('Changing status for %s', getTitle(m))
|
||||
if not m['profile_id']:
|
||||
m['status'] = 'done'
|
||||
else:
|
||||
m['status'] = 'active'
|
||||
|
||||
try:
|
||||
profile = db.get('id', m['profile_id'])
|
||||
media_releases = fireEvent('release.for_media', m['_id'], single = True)
|
||||
done_releases = [release for release in media_releases if release.get('status') == 'done']
|
||||
|
||||
if done_releases:
|
||||
|
||||
# Check if we are finished with the media
|
||||
for release in done_releases:
|
||||
if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True):
|
||||
m['status'] = 'done'
|
||||
break
|
||||
|
||||
elif previous_status == 'done':
|
||||
m['status'] = 'done'
|
||||
|
||||
except RecordNotFound:
|
||||
log.debug('Failed restatus, keeping previous: %s', traceback.format_exc())
|
||||
m['status'] = previous_status
|
||||
|
||||
# Only update when status has changed
|
||||
if previous_status != m['status']:
|
||||
db.update(m)
|
||||
|
||||
# Tag media as recent
|
||||
if tag_recent:
|
||||
self.tag(media_id, 'recent', update_edited = True)
|
||||
|
||||
return m['status']
|
||||
except:
|
||||
log.error('Failed restatus: %s', traceback.format_exc())
|
||||
|
||||
def tag(self, media_id, tag, update_edited = False):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
m = db.get('id', media_id)
|
||||
|
||||
if update_edited:
|
||||
m['last_edit'] = int(time.time())
|
||||
|
||||
tags = m.get('tags') or []
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
m['tags'] = tags
|
||||
db.update(m)
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Failed tagging: %s', traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
def unTag(self, media_id, tag):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
m = db.get('id', media_id)
|
||||
|
||||
tags = m.get('tags') or []
|
||||
if tag in tags:
|
||||
new_tags = list(set(tags))
|
||||
new_tags.remove(tag)
|
||||
|
||||
m['tags'] = new_tags
|
||||
db.update(m)
|
||||
|
||||
return True
|
||||
except:
|
||||
log.error('Failed untagging: %s', traceback.format_exc())
|
||||
|
||||
return False
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import Provider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class AutomationBase(Provider):
|
||||
pass
|
||||
@@ -1,5 +0,0 @@
|
||||
from couchpotato.core.media._base.providers.base import Provider
|
||||
|
||||
|
||||
class BaseInfoProvider(Provider):
|
||||
type = 'unknown'
|
||||
@@ -1,8 +0,0 @@
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class MetaDataBase(Plugin):
|
||||
pass
|
||||
@@ -1,267 +0,0 @@
|
||||
from six.moves import urllib
|
||||
import time
|
||||
import traceback
|
||||
import re
|
||||
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import cleanHost, splitString, tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import ResultList
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from couchpotato.environment import Env
|
||||
from dateutil.parser import parse
|
||||
from requests import HTTPError
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'detail': 'details/%s',
|
||||
'download': 't=get&id=%s'
|
||||
}
|
||||
|
||||
passwords_regex = 'password|wachtwoord'
|
||||
limits_reached = {}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def search(self, media, quality):
|
||||
hosts = self.getHosts()
|
||||
|
||||
results = ResultList(self, media, quality, imdb_results = True)
|
||||
|
||||
for host in hosts:
|
||||
if self.isDisabled(host):
|
||||
continue
|
||||
|
||||
self._searchOnHost(host, media, quality, results)
|
||||
|
||||
return results
|
||||
|
||||
def _searchOnHost(self, host, media, quality, results):
|
||||
|
||||
query = self.buildUrl(media, host)
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
nzbs = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
date = None
|
||||
spotter = None
|
||||
for item in nzb:
|
||||
if date and spotter:
|
||||
break
|
||||
if item.attrib.get('name') == 'usenetdate':
|
||||
date = item.attrib.get('value')
|
||||
break
|
||||
|
||||
# Get the name of the person who posts the spot
|
||||
if item.attrib.get('name') == 'poster':
|
||||
if "@spot.net" in item.attrib.get('value'):
|
||||
spotter = item.attrib.get('value').split("@")[0]
|
||||
continue
|
||||
|
||||
if not date:
|
||||
date = self.getTextElement(nzb, 'pubDate')
|
||||
|
||||
nzb_id = self.getTextElement(nzb, 'guid').split('/')[-1:].pop()
|
||||
name = self.getTextElement(nzb, 'title')
|
||||
|
||||
if not name:
|
||||
continue
|
||||
|
||||
name_extra = ''
|
||||
if spotter:
|
||||
name_extra = spotter
|
||||
|
||||
description = ''
|
||||
if "@spot.net" in nzb_id:
|
||||
try:
|
||||
# Get details for extended description to retrieve passwords
|
||||
query = self.buildDetailsUrl(nzb_id, host['api_key'])
|
||||
url = '%s%s' % (self.getUrl(host['host']), query)
|
||||
nzb_details = self.getRSSData(url, cache_timeout = 1800, headers = {'User-Agent': Env.getIdentifier()})[0]
|
||||
|
||||
description = self.getTextElement(nzb_details, 'description')
|
||||
|
||||
# Extract a password from the description
|
||||
password = re.search('(?:' + self.passwords_regex + ')(?: *)(?:\:|\=)(?: *)(.*?)\<br\>|\n|$', description, flags = re.I).group(1)
|
||||
if password:
|
||||
name += ' {{%s}}' % password.strip()
|
||||
except:
|
||||
log.debug('Error getting details of "%s": %s', (name, traceback.format_exc()))
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
'provider_extra': urllib.urlparse(host['host']).hostname or host['host'],
|
||||
'name': toUnicode(name),
|
||||
'name_extra': name_extra,
|
||||
'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))),
|
||||
'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024,
|
||||
'url': ((self.getUrl(host['host']) + self.urls['download']) % tryUrlencode(nzb_id)) + self.getApiExt(host),
|
||||
'detail_url': (cleanHost(host['host']) + self.urls['detail']) % tryUrlencode(nzb_id),
|
||||
'content': self.getTextElement(nzb, 'description'),
|
||||
'description': description,
|
||||
'score': host['extra_score'],
|
||||
})
|
||||
|
||||
def getHosts(self):
|
||||
|
||||
uses = splitString(str(self.conf('use')), clean = False)
|
||||
hosts = splitString(self.conf('host'), clean = False)
|
||||
api_keys = splitString(self.conf('api_key'), clean = False)
|
||||
extra_score = splitString(self.conf('extra_score'), clean = False)
|
||||
custom_tags = splitString(self.conf('custom_tag'), clean = False)
|
||||
|
||||
list = []
|
||||
for nr in range(len(hosts)):
|
||||
|
||||
try: key = api_keys[nr]
|
||||
except: key = ''
|
||||
|
||||
try: host = hosts[nr]
|
||||
except: host = ''
|
||||
|
||||
try: score = tryInt(extra_score[nr])
|
||||
except: score = 0
|
||||
|
||||
try: custom_tag = custom_tags[nr]
|
||||
except: custom_tag = ''
|
||||
|
||||
list.append({
|
||||
'use': uses[nr],
|
||||
'host': host,
|
||||
'api_key': key,
|
||||
'extra_score': score,
|
||||
'custom_tag': custom_tag
|
||||
})
|
||||
|
||||
return list
|
||||
|
||||
def belongsTo(self, url, provider = None, host = None):
|
||||
|
||||
hosts = self.getHosts()
|
||||
|
||||
for host in hosts:
|
||||
result = super(Base, self).belongsTo(url, host = host['host'], provider = provider)
|
||||
if result:
|
||||
return result
|
||||
|
||||
def getUrl(self, host):
|
||||
if '?page=newznabapi' in host:
|
||||
return cleanHost(host)[:-1] + '&'
|
||||
|
||||
return cleanHost(host) + 'api?'
|
||||
|
||||
def isDisabled(self, host = None):
|
||||
return not self.isEnabled(host)
|
||||
|
||||
def isEnabled(self, host = None):
|
||||
|
||||
# Return true if at least one is enabled and no host is given
|
||||
if host is None:
|
||||
for host in self.getHosts():
|
||||
if self.isEnabled(host):
|
||||
return True
|
||||
return False
|
||||
|
||||
return NZBProvider.isEnabled(self) and host['host'] and host['api_key'] and int(host['use'])
|
||||
|
||||
def getApiExt(self, host):
|
||||
return '&apikey=%s' % host['api_key']
|
||||
|
||||
def download(self, url = '', nzb_id = ''):
|
||||
host = urllib.urlparse(url).hostname
|
||||
|
||||
if self.limits_reached.get(host):
|
||||
# Try again in 3 hours
|
||||
if self.limits_reached[host] > time.time() - 10800:
|
||||
return 'try_next'
|
||||
|
||||
try:
|
||||
data = self.urlopen(url, show_error = False)
|
||||
self.limits_reached[host] = False
|
||||
return data
|
||||
except HTTPError as e:
|
||||
sc = e.response.status_code
|
||||
if sc in [503, 429]:
|
||||
response = e.read().lower()
|
||||
if sc == 429 or 'maximum api' in response or 'download limit' in response:
|
||||
if not self.limits_reached.get(host):
|
||||
log.error('Limit reached / to many requests for newznab provider: %s', host)
|
||||
self.limits_reached[host] = time.time()
|
||||
return 'try_next'
|
||||
|
||||
log.error('Failed download from %s: %s', (host, traceback.format_exc()))
|
||||
|
||||
return 'try_next'
|
||||
|
||||
def buildDetailsUrl(self, nzb_id, api_key):
|
||||
query = tryUrlencode({
|
||||
't': 'details',
|
||||
'id': nzb_id,
|
||||
'apikey': api_key,
|
||||
})
|
||||
return query
|
||||
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'newznab',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'newznab',
|
||||
'order': 10,
|
||||
'description': 'Enable <a href="http://newznab.com/" target="_blank">NewzNab</a> such as <a href="https://nzb.su" target="_blank">NZB.su</a>, \
|
||||
<a href="https://nzbs.org" target="_blank">NZBs.org</a>, <a href="http://dognzb.cr/" target="_blank">DOGnzb.cr</a>, \
|
||||
<a href="https://github.com/spotweb/spotweb" target="_blank">Spotweb</a>, <a href="https://nzbgeek.info/" target="_blank">NZBGeek</a>, \
|
||||
<a href="https://www.nzbfinder.ws" target="_blank">NZBFinder</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAgMAAABinRfyAAAACVBMVEVjhwD///86aRovd/sBAAAAMklEQVQI12NgAIPQUCCRmQkjssDEShiRuRIqwZqZGcDAGBrqANUhGgIkWAOABKMDxCAA24UK50b26SAAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'name': 'use',
|
||||
'default': '0,0,0,0,0'
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://api.nzbgeek.info,https://www.nzbfinder.ws',
|
||||
'description': 'The hostname of your newznab provider',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': '0,0,0,0,0',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
{
|
||||
'name': 'custom_tag',
|
||||
'advanced': True,
|
||||
'label': 'Custom tag',
|
||||
'default': ',,,,',
|
||||
'description': 'Add custom tags, for example add rls=1 to get only scene releases from nzbs.org',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'default': ',,,,',
|
||||
'label': 'Api Key',
|
||||
'description': 'Can be found on your profile page',
|
||||
'type': 'combined',
|
||||
'combine': ['use', 'host', 'api_key', 'extra_score', 'custom_tag'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,103 +0,0 @@
|
||||
from six.moves import urllib
|
||||
import time
|
||||
|
||||
from couchpotato.core.event import fireEvent
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.nzb.base import NZBProvider
|
||||
from dateutil.parser import parse
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(NZBProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'search': 'https://rss.omgwtfnzbs.org/rss-search.php?%s',
|
||||
'detail_url': 'https://omgwtfnzbs.org/details.php?id=%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
cat_ids = [
|
||||
([15], ['dvdrip']),
|
||||
([15, 16], ['brrip']),
|
||||
([16], ['720p', '1080p', 'bd50']),
|
||||
([17], ['dvdr']),
|
||||
]
|
||||
cat_backup_id = 'movie'
|
||||
|
||||
def search(self, movie, quality):
|
||||
|
||||
if quality['identifier'] in fireEvent('quality.pre_releases', single = True):
|
||||
return []
|
||||
|
||||
return super(Base, self).search(movie, quality)
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
q = '%s %s' % (title, movie['info']['year'])
|
||||
params = tryUrlencode({
|
||||
'search': q,
|
||||
'catid': ','.join([str(x) for x in self.getCatId(quality)]),
|
||||
'user': self.conf('username', default = ''),
|
||||
'api': self.conf('api_key', default = ''),
|
||||
})
|
||||
|
||||
nzbs = self.getRSSData(self.urls['search'] % params)
|
||||
|
||||
for nzb in nzbs:
|
||||
|
||||
enclosure = self.getElement(nzb, 'enclosure').attrib
|
||||
nzb_id = urllib.parse_qs(urllib.urlparse(self.getTextElement(nzb, 'link')).query).get('id')[0]
|
||||
|
||||
results.append({
|
||||
'id': nzb_id,
|
||||
'name': toUnicode(self.getTextElement(nzb, 'title')),
|
||||
'age': self.calculateAge(int(time.mktime(parse(self.getTextElement(nzb, 'pubDate')).timetuple()))),
|
||||
'size': tryInt(enclosure['length']) / 1024 / 1024,
|
||||
'url': enclosure['url'],
|
||||
'detail_url': self.urls['detail_url'] % nzb_id,
|
||||
'description': self.getTextElement(nzb, 'description')
|
||||
})
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'omgwtfnzbs',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'nzb_providers',
|
||||
'name': 'OMGWTFNZBs',
|
||||
'description': 'See <a href="http://omgwtfnzbs.org/">OMGWTFNZBs</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQEAIAAADAAbR1AAADbElEQVR4AZ2UW0ybZRiAy/OvdHaLYvB0YTRIFi7GkM44zRLmIfNixkWdiRMyYoxRE8/TC7MYvXCGEBmr3mxLwVMwY0wYA7e6Wso4lB6h/U9taSlMGIfBXLYlJMyo0S///2dJI5lxN8/F2/f9nu9737e/jYmXr6KTbN9BGG9HE/NotQ76UWziNzrXFiETk/5ARUNH+7+0kW7fSgTl0VKGOLZzidOkmuuIo7q2oTArNLPIzhdIkqXkerFOm2CaD/5bcKrjIL2c3fkhPxOq93Kcb91v46fV9TQKF4TgV/TbUsQtzfCaK6jMOd5DJrguSIIhexmqqVxN0FXbRR8/ND/LYTTj6J7nl2gnL47OkDW4KJhnQHCa6JpKVNJGA3OC58nwBJoZ//ebbIyKpBxjrr0o1q1FMRkrKXZnHWF85VvxMrJxibwhGyd0f5bLnKzqJs1k0Sfo+EU8hdAUvkbcwKEgs2D0OiV4jmmD1zb+Tp6er0JMMvDxPo5xev9zTBF683NS+N56n1YiB95B5crr93KRuKhKI0tb0Kw2mgLLqTjLEWO8424i9IvURaYeOckwf3+/yCC9e3bQQ/MuD+Monk0k+XFXMUfx7z5EEP+XlXi5tLlMxH8zLppw7idJrugcus30kC86gc7UrQqjLIukM8zWHOACeU+TiMxXN6ExVOkgz4lvPEzice1GIVhxhG4CrZvpl6TH55giKWqXGLy9hZh5aUtgDSew/msSyCKpl+DDNfxJc8NBIsxUxUnz14O/oONu+IIIvso9TLBQ1SY5rUhuSzUhAqJ2mRXBLDOCeUtgUZXsaObT8BffhUJPqWgiV+3zKKzYH0ClvTRLhD77HIqVkyh5jThnivehoG+qJctIRSPn6bxvO4FCgTl9c1DmbpjLajbQFE8aW5SU3rg+zOPGUjTUF9NFpLEbH2c/KmGYlY69/GQJVtGMSUcEp9eCbB1nctbxHTLRdTUkGDf+B02uGWRG3OvpJ/zSMwzif+oxVBID3cQKBavLCiPmB2PM2UuSCUPgrX4VDb97AwEG67bh4+KTOlncvu3M31BwA5rLHbCfEjwkNDky9e/SSbSxnD46Pg0RJtpXRvhmBSZHpRjWtKwFybjuQeXaKxto4WjLZZZvVmC17pZLJFkwxm5++PS2Mrwc7nyIMYZe/IzoP5d6QgEybqTXAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'api_key',
|
||||
'label': 'Api Key',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': 20,
|
||||
'type': 'int',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,141 +0,0 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://awesome-hd.net/',
|
||||
'detail': 'https://awesome-hd.net/torrents.php?torrentid=%s',
|
||||
'search': 'https://awesome-hd.net/searchapi.php?action=imdbsearch&passkey=%s&imdb=%s&internal=%s',
|
||||
'download': 'https://awesome-hd.net/torrents.php?action=download&id=%s&authkey=%s&torrent_pass=%s',
|
||||
}
|
||||
http_time_between_calls = 1
|
||||
|
||||
def _search(self, movie, quality, results):
|
||||
|
||||
data = self.getHTMLData(self.urls['search'] % (self.conf('passkey'), getIdentifier(movie), self.conf('only_internal')))
|
||||
|
||||
if data:
|
||||
try:
|
||||
soup = BeautifulSoup(data)
|
||||
|
||||
if soup.find('error'):
|
||||
log.error(soup.find('error').get_text())
|
||||
return
|
||||
|
||||
authkey = soup.find('authkey').get_text()
|
||||
entries = soup.find_all('torrent')
|
||||
|
||||
for entry in entries:
|
||||
|
||||
torrentscore = 0
|
||||
torrent_id = entry.find('id').get_text()
|
||||
name = entry.find('name').get_text()
|
||||
year = entry.find('year').get_text()
|
||||
releasegroup = entry.find('releasegroup').get_text()
|
||||
resolution = entry.find('resolution').get_text()
|
||||
encoding = entry.find('encoding').get_text()
|
||||
freeleech = entry.find('freeleech').get_text()
|
||||
torrent_desc = '/ %s / %s / %s ' % (releasegroup, resolution, encoding)
|
||||
|
||||
if freeleech == '0.25' and self.conf('prefer_internal'):
|
||||
torrent_desc += '/ Internal'
|
||||
torrentscore += 200
|
||||
|
||||
if encoding == 'x264' and self.conf('favor') in ['encode', 'both']:
|
||||
torrentscore += 300
|
||||
if re.search('Remux', encoding) and self.conf('favor') in ['remux', 'both']:
|
||||
torrentscore += 200
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
|
||||
'url': self.urls['download'] % (torrent_id, authkey, self.conf('passkey')),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': tryInt(entry.find('size').get_text()) / 1048576,
|
||||
'seeders': tryInt(entry.find('seeders').get_text()),
|
||||
'leechers': tryInt(entry.find('leechers').get_text()),
|
||||
'score': torrentscore
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'awesomehd',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'Awesome-HD',
|
||||
'description': '<a href="https://awesome-hd.net">AHD</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC+UlEQVR4AV1SO0y6dxQ9H4g8CoIoohZ5NA0aR2UgkYpNB5uocTSaLlrDblMH09Gt8d90r3YpJkanxjA4GGkbO7RNxSABq8jDGnkpD+UD5NV7Bxvbk9wvv+/3uPece66A/yEWi42FQqHVfD7/cbPZtIEglUpjOp3uZHR0dBvAn3gDIRqNgjE4OKj0+Xzf3NzcfD4wMCCjf5TLZbTbbajVatzf3+Pu7q5uNpt35ufnvwBQAScQRREEldfr9RWLxan+/n5YrVa+jFarhVfQQyQSCU4EhULhX15engEgSrjC0dHRVqlUmjQYDBgaGgKtuTqz4mTgIoVCASaTCX19fajVapOHh4dbFJBks9mxcDi8qtFoJEajkfVyJWi1WkxMTMDhcIAT8x6D7/Dd6+vr1fHx8TGp2+3+iqo5+YCzBwIBToK5ubl/mQwPDyMSibAs2Gw2UHNRrValz8/PDUk8Hv9EqVRCr9fj4uICTNflcqFer+Pg4AB7e3uoVCq8x9Rxfn6O7u5uqFQq8FspZXxHTekggByA3W4Hr9PpNDeRL3I1cMhkMrBrnZ2dyGQyvNYIs7OzVbJNPjIyAraLwYdcjR8wXl5eIJfLwRIFQQDLYkm3t7c1CdGPPT4+cpOImp4PODMeaK+n10As2jBbrHifHOjS6qAguVFimkqlwAMmIQnHV1dX4NDQhVwuhyZTV6pgIktzDzkkk0lEwhEEzs7ASQr5Ai4vL1nuccfCwsLO/v6+p9FoyJhF6ekJro/cPCzIZLNQa7rQoK77/SdgWWpKkCaJ5EB9aWnpe6nH40nRMBnJV4f5gw+FX3/5GX/8/htXRZdOzzqhJWn6nl6YbTZqqhrhULD16fT0d8FgcFtYW1vD5uamfGVl5cd4IjldKhZACdkJvKfWUANrxEaJV4hiGVaL1b+7653hXzwRZQr2X76xsfG1xWIRaZzbNPv/CdrjEL9cX/+WXFBSgEPgzxuwG3Yans9OT0+naBZMIJDNfzudzp8WFxd/APAX3uAf9WOTxOPLdosAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'passkey',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'only_internal',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'default': 1,
|
||||
'description': 'Only search for internal releases.'
|
||||
},
|
||||
{
|
||||
'name': 'prefer_internal',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'default': 1,
|
||||
'description': 'Favors internal releases over non-internal releases.'
|
||||
},
|
||||
{
|
||||
'name': 'favor',
|
||||
'advanced': True,
|
||||
'default': 'both',
|
||||
'type': 'dropdown',
|
||||
'values': [('Encodes & Remuxes', 'both'), ('Encodes', 'encode'), ('Remuxes', 'remux'), ('None', 'none')],
|
||||
'description': 'Give extra scoring to encodes or remuxes.'
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://www.bit-hdtv.com/',
|
||||
'login': 'http://www.bit-hdtv.com/takelogin.php',
|
||||
'login_check': 'http://www.bit-hdtv.com/messages.php',
|
||||
'detail': 'http://www.bit-hdtv.com/details.php?id=%s',
|
||||
'search': 'http://www.bit-hdtv.com/torrents.php?',
|
||||
}
|
||||
|
||||
# Searches for movies only - BiT-HDTV's subcategory and resolution search filters appear to be broken
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
query = self.buildUrl(media, quality)
|
||||
|
||||
url = "%s&%s" % (self.urls['search'], query)
|
||||
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
# Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML
|
||||
split_data = data.partition('-->')
|
||||
if '## SELECT COUNT(' in split_data[0]:
|
||||
data = split_data[2]
|
||||
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'width': '750', 'class': ''})
|
||||
if result_table is None:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
for result in entries[1:]:
|
||||
|
||||
cells = result.find_all('td')
|
||||
link = cells[2].find('a')
|
||||
torrent_id = link['href'].replace('/details.php?id=', '')
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': link.contents[0].get_text(),
|
||||
'url': cells[0].find('a')['href'],
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': self.parseSize(cells[6].get_text()),
|
||||
'seeders': tryInt(cells[8].string),
|
||||
'leechers': tryInt(cells[9].string),
|
||||
'get_more_info': self.getMoreInfo,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
}
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('bithdtv.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('table', attrs = {'class': 'detail'})
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'bithdtv',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'BiT-HDTV',
|
||||
'description': '<a href="http://bit-hdtv.com">BiT-HDTV</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABMklEQVR4AZ3Qu0ojcQCF8W9MJcQbJNgEEQUbQVIqWgnaWfkIvoCgggixEAmIhRtY2GV3w7KwU61B0EYIxmiw0YCik84ipaCuc0nmP5dcjIUgOjqDvxf4OAdf9mnMLcUJyPyGSCP+YRdC+Kp8iagJKhuS+InYRhTGgDbeV2uEMand4ZRxizjXHQEimxhraAnUr73BNqQxMiNeV2SwcjTLEVtb4Zl10mXutvOWm2otw5Sxz6TGTbdd6ncuYvVLXAXrvM+ruyBpy1S3JLGDfUQ1O6jn5vTsrJXvqSt4UNfj6vxTRPxBHER5QeSirhLGk/5rWN+ffB1XZuxjnDy1q87m7TS+xOGA+Iv4gfkbaw+nOMXHDHnITGEk0VfRFnn4Po4vNYm6RGukmggR0L08+l+e4HMeASo/i6AJUjLgAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,137 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup, SoupStrainer
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.bitsoup.me/',
|
||||
'login': 'https://www.bitsoup.me/takelogin.php',
|
||||
'login_check': 'https://www.bitsoup.me/my.php',
|
||||
'search': 'https://www.bitsoup.me/browse.php?%s',
|
||||
'baseurl': 'https://www.bitsoup.me/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
only_tables_tags = SoupStrainer('table')
|
||||
|
||||
torrent_name_cell = 1
|
||||
torrent_download_cell = 2
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, movie, quality)
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'class': 'koptekst'})
|
||||
if not result_table or 'nothing found!' in data.lower():
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
for result in entries[1:]:
|
||||
|
||||
all_cells = result.find_all('td')
|
||||
|
||||
torrent = all_cells[self.torrent_name_cell].find('a')
|
||||
download = all_cells[self.torrent_download_cell].find('a')
|
||||
|
||||
torrent_id = torrent['href']
|
||||
torrent_id = torrent_id.replace('details.php?id=', '')
|
||||
torrent_id = torrent_id.replace('&hit=1', '')
|
||||
|
||||
torrent_name = torrent.getText()
|
||||
|
||||
torrent_size = self.parseSize(all_cells[8].getText())
|
||||
torrent_seeders = tryInt(all_cells[10].getText())
|
||||
torrent_leechers = tryInt(all_cells[11].getText())
|
||||
torrent_url = self.urls['baseurl'] % download['href']
|
||||
torrent_detail_url = self.urls['baseurl'] % torrent['href']
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': torrent_name,
|
||||
'size': torrent_size,
|
||||
'seeders': torrent_seeders,
|
||||
'leechers': torrent_leechers,
|
||||
'url': torrent_url,
|
||||
'detail_url': torrent_detail_url,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'ssl': 'yes',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'bitsoup',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'Bitsoup',
|
||||
'description': '<a href="https://bitsoup.me">Bitsoup</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAB8ElEQVR4AbWSS2sTURiGz3euk0mswaE37HhNhIrajQheFgF3rgR/lAt/gOBCXNZlo6AbqfUWRVCxi04wqUnTRibpJLaJzdzOOZ6WUumyC5/VHOb9eN/FA91uFx0FjI4IPfgiGLTWH73tn348GKmN7ijD0d2b41fO5qJEaX24AWNIUrVQCTTJ3Llx6vbV6Vtzk7Gi9+ebi996guFDDYAQAVj4FExP5qdOZB49W62t/zH3hECcwsPnbWeMXz6Xi2K1f0ApeK3hMCHHbP5gvvoriBgFAAQJEAxhjJ4u+YWTNsVI6b1JgtPWZkoIefKy4fcii2OTw2BABs7wj3bYDlLL4rvjGWOdTser1j5Xf7c3Q/MbHQYApxItvnm31mhQQ71eX2vUB76/vsWB2hg0QuogrMwLIG8P3InM2/eVGXeDViqVwWB79vRU2lgJYmdHcgXCTAXQFJTN5HguvDCR2Hxsxe8EvT54nlcul5vNpqDIEgwRQanAhAAABgRIyiQcjpIkkTOuWyqVoN/vSylX67XXH74uV1vHRUyxxFqbLBCSmBpiXSq6xcL5QrGYzWZ3XQIAwdlOJB+/aL764ucdmncYs0WsCI7kvTnn+qyDMEnTVCn1Tz5KsBFg6fvWcmsUAcnYNC/g2hnromvvqbHvxv+39S+MX+bWkFXwAgAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,116 +0,0 @@
|
||||
import re
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://hdbits.org/',
|
||||
'detail': 'https://hdbits.org/details.php?id=%s',
|
||||
'download': 'https://hdbits.org/download.php?id=%s&passkey=%s',
|
||||
'api': 'https://hdbits.org/api/torrents'
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _post_query(self, **params):
|
||||
|
||||
post_data = {
|
||||
'username': self.conf('username'),
|
||||
'passkey': self.conf('passkey')
|
||||
}
|
||||
post_data.update(params)
|
||||
|
||||
try:
|
||||
result = self.getJsonData(self.urls['api'], data = json.dumps(post_data))
|
||||
|
||||
if result:
|
||||
if result['status'] != 0:
|
||||
log.error('Error searching hdbits: %s' % result['message'])
|
||||
else:
|
||||
return result['data']
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def _search(self, movie, quality, results):
|
||||
|
||||
match = re.match(r'tt(\d{7})', getIdentifier(movie))
|
||||
|
||||
data = self._post_query(imdb = {'id': match.group(1)})
|
||||
|
||||
if data:
|
||||
try:
|
||||
for result in data:
|
||||
results.append({
|
||||
'id': result['id'],
|
||||
'name': result['name'],
|
||||
'url': self.urls['download'] % (result['id'], self.conf('passkey')),
|
||||
'detail_url': self.urls['detail'] % result['id'],
|
||||
'size': tryInt(result['size']) / 1024 / 1024,
|
||||
'seeders': tryInt(result['seeders']),
|
||||
'leechers': tryInt(result['leechers'])
|
||||
})
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'hdbits',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'HDBits',
|
||||
'wizard': True,
|
||||
'description': '<a href="http://hdbits.org">HDBits</a>',
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABi0lEQVR4AZWSzUsbQRjGdyabTcvSNPTSHlpQQeMHJApC8CJRvHgQQU969+LJP8G7f4N3DwpeFRQvRr0EKaUl0ATSpkigUNFsMl/r9NmZLCEHA/nNO5PfvMPDm0DI6fV3ZxiolEICe1oZCBVCCmBPKwOh2ErKBHGE4KYEXBpSLkUlqO4LcM7f+6nVhRnOhSkOz/hexk+tL+YL0yPF2YmN4tynD++4gTLGkNNac9YFLoREBR1+cnF3dFY6v/m6PD+FaXiNJtgA4xYbABxiGrz6+6HWaI5/+Qh37YS0/3Znc8UxwNGBIIBX22z+/ZdJ+4wzyjpR4PEpODg8tgUXBv2iWUzSpa12B0IR6n6lvt8Aek2lZHb084+fdRNgrwY8z81PjhVy2d2ttUrtV/lbBa+JXGEpDMPnoF2tN1QYRqVUtf6nFbThb7wk7le395elcqhASLb39okDiHY00VCtCTEHwSiH4AI0lkOiT1dwMeSfT3SRxiQWNO7Zwj1egkoVIQFMKvSiC3bcjXq9Jf8DcDIRT3hh10kAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'passkey',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,195 +0,0 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'download': 'https://www.ilovetorrents.me/%s',
|
||||
'detail': 'https://www.ilovetorrents.me/%s',
|
||||
'search': 'https://www.ilovetorrents.me/browse.php?search=%s&page=%s&cat=%s',
|
||||
'test': 'https://www.ilovetorrents.me/',
|
||||
'login': 'https://www.ilovetorrents.me/takelogin.php',
|
||||
'login_check': 'https://www.ilovetorrents.me'
|
||||
}
|
||||
|
||||
cat_ids = [
|
||||
(['41'], ['720p', '1080p', 'brrip']),
|
||||
(['19'], ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr']),
|
||||
(['20'], ['dvdr'])
|
||||
]
|
||||
|
||||
cat_backup_id = 200
|
||||
disable_provider = False
|
||||
http_time_between_calls = 1
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
page = 0
|
||||
total_pages = 1
|
||||
cats = self.getCatId(quality)
|
||||
|
||||
while page < total_pages:
|
||||
|
||||
movieTitle = tryUrlencode('"%s" %s' % (title, movie['info']['year']))
|
||||
search_url = self.urls['search'] % (movieTitle, page, cats[0])
|
||||
page += 1
|
||||
|
||||
data = self.getHTMLData(search_url)
|
||||
if data:
|
||||
try:
|
||||
|
||||
results_table = None
|
||||
|
||||
data_split = splitString(data, '<table')
|
||||
soup = None
|
||||
for x in data_split:
|
||||
soup = BeautifulSoup(x)
|
||||
results_table = soup.find('table', attrs = {'class': 'koptekst'})
|
||||
if results_table:
|
||||
break
|
||||
|
||||
if not results_table:
|
||||
return
|
||||
|
||||
try:
|
||||
pagelinks = soup.findAll(href = re.compile('page'))
|
||||
page_numbers = [int(re.search('page=(?P<page_number>.+'')', i['href']).group('page_number')) for i in pagelinks]
|
||||
total_pages = max(page_numbers)
|
||||
except:
|
||||
pass
|
||||
|
||||
entries = results_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
prelink = result.find(href = re.compile('details.php'))
|
||||
link = prelink['href']
|
||||
download = result.find('a', href = re.compile('download.php'))['href']
|
||||
|
||||
if link and download:
|
||||
|
||||
def extra_score(item):
|
||||
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
|
||||
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
|
||||
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
|
||||
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
|
||||
|
||||
return confirmed + trusted + vip + moderated
|
||||
|
||||
id = re.search('id=(?P<id>\d+)&', link).group('id')
|
||||
url = self.urls['download'] % download
|
||||
|
||||
fileSize = self.parseSize(result.select('td.rowhead')[5].text)
|
||||
results.append({
|
||||
'id': id,
|
||||
'name': toUnicode(prelink.find('b').text),
|
||||
'url': url,
|
||||
'detail_url': self.urls['detail'] % link,
|
||||
'size': fileSize,
|
||||
'seeders': tryInt(result.find_all('td')[2].string),
|
||||
'leechers': tryInt(result.find_all('td')[3].string),
|
||||
'extra_score': extra_score,
|
||||
'get_more_info': self.getMoreInfo
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'submit': 'Welcome to ILT',
|
||||
}
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
cache_key = 'ilt.%s' % item['id']
|
||||
description = self.getCache(cache_key)
|
||||
|
||||
if not description:
|
||||
|
||||
try:
|
||||
full_description = self.getHTMLData(item['detail_url'])
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('td', attrs = {'class': 'main'}).findAll('table')[1]
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
except:
|
||||
log.error('Failed getting more info for %s', item['name'])
|
||||
description = ''
|
||||
|
||||
self.setCache(cache_key, description, timeout = 25920000)
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'ilovetorrents',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'ILoveTorrents',
|
||||
'description': 'Where the Love of Torrents is Born',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACPUlEQVR4AYWM0U9SbxjH3+v266I/oNvWZTfd2J1d0ZqbZEFwWrUImOKs4YwtumFKZvvlJJADR2TCQQlMPKg5NmpREgaekAPnBATKgmK1LqQlx6awHnZWF1Tr2Xfvvs+7z+dB0mlO7StpAh+M4S/2jbo3w8+xvJvlnSneEt+10zwer5ujNUOoChjALWFw5XOwdCAk/P57cGvPl+Oht0W7VJHN5NC1uW1BON4hGjXbwpVWMZhsy9v7sEIXAsDNYBXgdkEoIKyWD2CF8ut/aOXTZc/fBSgLWw1BgA4BDHOV0GkT90cBQpXahU5TFomsb38XhJC5/Tbh1P8c6rJlBeGfAeyMhUFwNVcs9lxV9Ot0dwmyd+mrNvRtbJ2fSPC6Z3Vsvub2z3sDFACAAYzk0+kUyxEkyfN7PopqNBro55A+P6yPKIrL5zF1HwjdeBJJCObIsZO79bo3sHhWhglo5WMV3mazuVPb4fLvSL8/FAkB1hK6rXQPwYhMyROK8VK5LAiH/jsMt0HQjxiN4/ePdoilllcqDyt3Mkg8mRBNbIhMb8RERkowQA/p76g0/UDDdCoNmDminM0qSK5vlpE5kugCHhNPxntwWmJPYTMZtYcFR6ABHQsVRlYLukVORaaULvqKI46keFSCv77kSPS6kxrPptLNDHgz16fWBtyxe6v5h08LUy+KI8ushqTPWWIX8Sg6b45IrGtyW6zXFb/hpQf9m3oqfWuB0fpSw0uZ4WB69En69uOk2rmO2V52PXj+A/mI4ESKpb2HAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'label': 'Username',
|
||||
'type': 'string',
|
||||
'default': '',
|
||||
'description': 'The user name for your ILT account',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'label': 'Password',
|
||||
'type': 'password',
|
||||
'default': '',
|
||||
'description': 'The password for your ILT account.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
@@ -1,172 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.iptorrents.com/',
|
||||
'base_url': 'https://www.iptorrents.com',
|
||||
'login': 'https://www.iptorrents.com/torrents/',
|
||||
'login_check': 'https://www.iptorrents.com/inbox.php',
|
||||
'search': 'https://www.iptorrents.com/torrents/?%s%%s&q=%s&qf=ti&p=%%d',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def buildUrl(self, title, media, quality):
|
||||
return self._buildUrl(title.replace(':', ''), quality)
|
||||
|
||||
def _buildUrl(self, query, quality):
|
||||
|
||||
cat_ids = self.getCatId(quality)
|
||||
|
||||
if not cat_ids:
|
||||
log.warning('Unable to find category ids for identifier "%s"', quality.get('identifier'))
|
||||
return None
|
||||
|
||||
return self.urls['search'] % ("&".join(("l%d=" % x) for x in cat_ids), tryUrlencode(query).replace('%', '%%'))
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
freeleech = '' if not self.conf('freeleech') else '&free=on'
|
||||
|
||||
base_url = self.buildUrl(title, media, quality)
|
||||
if not base_url: return
|
||||
|
||||
pages = 1
|
||||
current_page = 1
|
||||
while current_page <= pages and not self.shuttingDown():
|
||||
data = self.getHTMLData(base_url % (freeleech, current_page))
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
page_nav = html.find('span', attrs = {'class': 'page_nav'})
|
||||
if page_nav:
|
||||
next_link = page_nav.find("a", text = "Next")
|
||||
if next_link:
|
||||
final_page_link = next_link.previous_sibling.previous_sibling
|
||||
pages = int(final_page_link.string)
|
||||
|
||||
result_table = html.find('table', attrs = {'class': 'torrents'})
|
||||
|
||||
if not result_table or 'nothing found!' in data.lower():
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
|
||||
torrent = result.find_all('td')
|
||||
if len(torrent) <= 1:
|
||||
break
|
||||
|
||||
torrent = torrent[1].find('a')
|
||||
|
||||
torrent_id = torrent['href'].replace('/details.php?id=', '')
|
||||
torrent_name = six.text_type(torrent.string)
|
||||
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href'].replace(' ', '.')
|
||||
torrent_details_url = self.urls['base_url'] + torrent['href']
|
||||
torrent_size = self.parseSize(result.find_all('td')[5].string)
|
||||
torrent_seeders = tryInt(result.find('td', attrs = {'class': 'ac t_seeders'}).string)
|
||||
torrent_leechers = tryInt(result.find('td', attrs = {'class': 'ac t_leechers'}).string)
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': torrent_name,
|
||||
'url': torrent_download_url,
|
||||
'detail_url': torrent_details_url,
|
||||
'size': torrent_size,
|
||||
'seeders': torrent_seeders,
|
||||
'leechers': torrent_leechers,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
break
|
||||
|
||||
current_page += 1
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'don\'t have an account' not in output.lower()
|
||||
|
||||
def loginCheckSuccess(self, output):
|
||||
return '/logout.php' in output.lower()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'iptorrents',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'IPTorrents',
|
||||
'description': '<a href="http://www.iptorrents.com">IPTorrents</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABRklEQVR42qWQO0vDUBiG8zeKY3EqQUtNO7g0J6ZJ1+ifKIIFQXAqDYKCyaaYxM3udrZLHdRFhXrZ6liCW6mubfk874EESgqaeOCF7/Y8hEh41aq6yZi2nyZgBGya9XKtZs4No05pAkZV2YbEmyMMsoSxLQeC46wCTdPPY4HruPQyGIhF97qLWsS78Miydn4XdK46NJ9OsQAYBzMIMf8MQ9wtCnTdWCaIDx/u7uljOIQEe0hiIWPamSTLay3+RxOCSPI9+RJAo7Er9r2bnqjBFAqyK+VyK4f5/Cr5ni8OFKVCz49PFI5GdNvvU7ttE1M1zMU+8AMqFksEhrMnQsBDzqmDAwzx2ehRLwT7yyCI+vSC99c3mozH1NxrJgWWtR1BOECfEJSVCm6WCzJGCA7+IWhBsM4zywDPwEp4vCjx2DzBH2ODAfsDb33Ps6dQwJgAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'freeleech',
|
||||
'default': 0,
|
||||
'type': 'bool',
|
||||
'description': 'Only search for [FreeLeech] torrents.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,181 +0,0 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentMagnetProvider):
|
||||
|
||||
urls = {
|
||||
'detail': '%s/%s',
|
||||
'search': '%s/%s-i%s/',
|
||||
}
|
||||
|
||||
cat_ids = [
|
||||
(['cam'], ['cam']),
|
||||
(['telesync'], ['ts', 'tc']),
|
||||
(['screener', 'tvrip'], ['screener']),
|
||||
(['x264', '720p', '1080p', 'blu-ray', 'hdrip'], ['bd50', '1080p', '720p', 'brrip']),
|
||||
(['dvdrip'], ['dvdrip']),
|
||||
(['dvd'], ['dvdr']),
|
||||
]
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
proxy_list = [
|
||||
'https://kickass.to',
|
||||
'http://kickass.pw',
|
||||
'http://kickassto.come.in',
|
||||
'http://katproxy.ws',
|
||||
'http://kickass.bitproxy.eu',
|
||||
'http://katph.eu',
|
||||
'http://kickassto.come.in',
|
||||
]
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
data = self.getHTMLData(self.urls['search'] % (self.getDomain(), 'm', getIdentifier(media).replace('tt', '')))
|
||||
|
||||
if data:
|
||||
|
||||
cat_ids = self.getCatId(quality)
|
||||
table_order = ['name', 'size', None, 'age', 'seeds', 'leechers']
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(data)
|
||||
resultdiv = html.find('div', attrs = {'class': 'tabs'})
|
||||
for result in resultdiv.find_all('div', recursive = False):
|
||||
if result.get('id').lower().strip('tab-') not in cat_ids:
|
||||
continue
|
||||
|
||||
try:
|
||||
for temp in result.find_all('tr'):
|
||||
if temp['class'] is 'firstr' or not temp.get('id'):
|
||||
continue
|
||||
|
||||
new = {}
|
||||
|
||||
nr = 0
|
||||
for td in temp.find_all('td'):
|
||||
column_name = table_order[nr]
|
||||
if column_name:
|
||||
|
||||
if column_name == 'name':
|
||||
link = td.find('div', {'class': 'torrentname'}).find_all('a')[2]
|
||||
new['id'] = temp.get('id')[-7:]
|
||||
new['name'] = link.text
|
||||
new['url'] = td.find('a', 'imagnet')['href']
|
||||
new['detail_url'] = self.urls['detail'] % (self.getDomain(), link['href'][1:])
|
||||
new['verified'] = True if td.find('a', 'iverify') else False
|
||||
new['score'] = 100 if new['verified'] else 0
|
||||
elif column_name is 'size':
|
||||
new['size'] = self.parseSize(td.text)
|
||||
elif column_name is 'age':
|
||||
new['age'] = self.ageToDays(td.text)
|
||||
elif column_name is 'seeds':
|
||||
new['seeders'] = tryInt(td.text)
|
||||
elif column_name is 'leechers':
|
||||
new['leechers'] = tryInt(td.text)
|
||||
|
||||
nr += 1
|
||||
|
||||
# Only store verified torrents
|
||||
if self.conf('only_verified') and not new['verified']:
|
||||
continue
|
||||
|
||||
results.append(new)
|
||||
except:
|
||||
log.error('Failed parsing KickAssTorrents: %s', traceback.format_exc())
|
||||
|
||||
except AttributeError:
|
||||
log.debug('No search results found.')
|
||||
|
||||
def ageToDays(self, age_str):
|
||||
age = 0
|
||||
age_str = age_str.replace(' ', ' ')
|
||||
|
||||
regex = '(\d*.?\d+).(sec|hour|day|week|month|year)+'
|
||||
matches = re.findall(regex, age_str)
|
||||
for match in matches:
|
||||
nr, size = match
|
||||
mult = 1
|
||||
if size == 'week':
|
||||
mult = 7
|
||||
elif size == 'month':
|
||||
mult = 30.5
|
||||
elif size == 'year':
|
||||
mult = 365
|
||||
|
||||
age += tryInt(nr) * mult
|
||||
|
||||
return tryInt(age)
|
||||
|
||||
def isEnabled(self):
|
||||
return super(Base, self).isEnabled() and self.getDomain()
|
||||
|
||||
def correctProxy(self, data):
|
||||
return 'search query' in data.lower()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'kickasstorrents',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'KickAssTorrents',
|
||||
'description': '<a href="https://kat.ph/">KickAssTorrents</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACD0lEQVR42pXK20uTcRjA8d/fsJsuap0orBuFlm3hir3JJvQOVmuwllN20Lb2isI2nVHKjBqrCWYaNnNuBrkSWxglhDVJOkBdSWUOq5FgoiOrMdRJ2xPPxW+8OUf1ge/FcyCUSVe2qedK5U/OxNTTXRNXEQ52Glb4O6dNEfK1auJkvRY7+/zxnQbA/D596laXcY3OWOiaIX2393SGznUmxkUo/YkDgqHemuzobQ7+NV+reo5Q1mqp68GABdY3+/EloO+JeN4tEqiFU8f3CwhyWo9E7wfMgI0ELTDx0AvjIxcgvZoC9P7NMN7yMmrFeoKa68rfDfmrARsNN0Ihr55cx59ctZWSiwS5bLKpwW4dYJH+M/B6/CYszE0BFZ+egG+Ln+HRoBN/cpl1pV6COIMkOnBVA/w+fXgGKJVM4LxhumMleoL06hJ3wKcCfl+/TAKKx17gnFePRwkqxR4BQSpFkbCrrQJueI7mWpyfATQ9OQY43+uv/+PutBycJ3y2qn2x7jY50GJvnwLKZjOwspyE5I8F4N+1yr1uwqcs3ym63Hwo29EiAyzUWQVr6WVAS4lZCPutQG/2GtES2YiW3d3XflYKtL72kzAcdEDHeSa3czeIMyyz/TApRKvcFfE0isHbJMnrHCf6xTLb1ORvWNlWo91cvHrJUQo0o6ZoRi7dIiT/g2WEDi27Iyov21xMCvgNfXvtwIACfHwAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True,
|
||||
},
|
||||
{
|
||||
'name': 'domain',
|
||||
'advanced': True,
|
||||
'label': 'Proxy server',
|
||||
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'only_verified',
|
||||
'advanced': True,
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'description': 'Only search for verified releases.'
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,135 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.sceneaccess.eu/',
|
||||
'login': 'https://www.sceneaccess.eu/login',
|
||||
'login_check': 'https://www.sceneaccess.eu/inbox',
|
||||
'detail': 'https://www.sceneaccess.eu/details?id=%s',
|
||||
'search': 'https://www.sceneaccess.eu/browse?c%d=%d',
|
||||
'archive': 'https://www.sceneaccess.eu/archive?&c%d=%d',
|
||||
'download': 'https://www.sceneaccess.eu/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
url = self.buildUrl(title, media, quality)
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
resultsTable = html.find('table', attrs = {'id': 'torrents-table'})
|
||||
if resultsTable is None:
|
||||
return
|
||||
|
||||
entries = resultsTable.find_all('tr', attrs = {'class': 'tt_row'})
|
||||
for result in entries:
|
||||
|
||||
link = result.find('td', attrs = {'class': 'ttr_name'}).find('a')
|
||||
url = result.find('td', attrs = {'class': 'td_dl'}).find('a')
|
||||
leechers = result.find('td', attrs = {'class': 'ttr_leechers'}).find('a')
|
||||
torrent_id = link['href'].replace('details?id=', '')
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': link['title'],
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': self.parseSize(result.find('td', attrs = {'class': 'ttr_size'}).contents[0]),
|
||||
'seeders': tryInt(result.find('td', attrs = {'class': 'ttr_seeders'}).find('a').string),
|
||||
'leechers': tryInt(leechers.string) if leechers else 0,
|
||||
'get_more_info': self.getMoreInfo,
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('sceneaccess.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('div', attrs = {'id': 'details_table'})
|
||||
description = toUnicode(nfo_pre.text) if nfo_pre else ''
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
# Login
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'submit': 'come on in',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return '/inbox' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'sceneaccess',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'SceneAccess',
|
||||
'description': '<a href="https://sceneaccess.eu/">SceneAccess</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAACT0lEQVR4AYVQS0sbURidO3OTmajJ5FElTTOkPmZ01GhHrIq0aoWAj1Vc+A/cuRMXbl24V9SlCGqrLhVFCrooEhCp2BAx0mobTY2kaR7qmOm87EXL1EWxh29xL+c7nPMdgGHYO5bF/gdbefnr6WlbWRnxluMwAB4Z0uEgXa7nwaDL7+/RNPzxbYvb/XJ0FBYVfd/ayh0fQ4qCGEHcm0KLRZUk7Pb2YRJPRwcsKMidnKD3t9VVT3s7BDh+z5FOZ3Vfn3h+Hltfx00mRRSRWFcUmmVNhYVqPn8dj3va2oh+txvcQRVF9ebm1fi4k+dRFbosY5rm4Hk7xxULQnJnx93S4g0EIEEQRoDLo6PrWEw8Pc0eHLwYGopMTDirqlJ7eyhYYGHhfgfHCcKYksZGVB/NcXI2mw6HhZERqrjYTNPHi4tFPh8aJIYIhgPlcCRDoZLW1s75+Z/7+59nZ/OJhLWigqAoKZX6Mjf3dXkZ3pydGYLc4aEoCCkInzQ1fRobS2xuvllaonkedfArnY5OTdGVldBkOADgqq2Nr6z8CIWaJietDHOhKB+HhwFKC6Gnq4ukKJvP9zcSbjYDXbeVlkKzuZBhnnV3e3t6UOmaJO0ODibW1hB1GYkg8R/gup7Z3TVZLJ5AILW9LcZiVpYtYBhw16O3t7cauckyeF9Tgz0ATpL2+nopmWycmbnY2LiKRjFk6/d7+/vRJfl4HGzV1T0UIM43MGBvaIBWK/YvwM5w+IMgGH8tkyEgvIpE7M3Nt6qqZrNyOq1kMmouh455Ggz+BhKY4GEc2CfwAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,172 +0,0 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentMagnetProvider):
|
||||
|
||||
urls = {
|
||||
'detail': '%s/torrent/%s',
|
||||
'search': '%s/search/%%s/%%s/7/%%s'
|
||||
}
|
||||
|
||||
cat_backup_id = 200
|
||||
disable_provider = False
|
||||
http_time_between_calls = 0
|
||||
|
||||
proxy_list = [
|
||||
'https://dieroschtibay.org',
|
||||
'https://thebay.al',
|
||||
'https://thepiratebay.se',
|
||||
'http://thepiratebay.se.net',
|
||||
'http://thebootlegbay.com',
|
||||
'http://tpb.ninja.so',
|
||||
'http://proxybay.fr',
|
||||
'http://pirateproxy.in',
|
||||
'http://piratebay.skey.sk',
|
||||
'http://pirateproxy.be',
|
||||
'http://bayproxy.li',
|
||||
'http://proxybay.pw',
|
||||
]
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
page = 0
|
||||
total_pages = 1
|
||||
cats = self.getCatId(quality)
|
||||
|
||||
base_search_url = self.urls['search'] % self.getDomain()
|
||||
|
||||
while page < total_pages:
|
||||
|
||||
search_url = base_search_url % self.buildUrl(media, page, cats)
|
||||
|
||||
page += 1
|
||||
|
||||
data = self.getHTMLData(search_url)
|
||||
|
||||
if data:
|
||||
try:
|
||||
soup = BeautifulSoup(data)
|
||||
results_table = soup.find('table', attrs = {'id': 'searchResult'})
|
||||
|
||||
if not results_table:
|
||||
return
|
||||
|
||||
try:
|
||||
total_pages = len(soup.find('div', attrs = {'align': 'center'}).find_all('a'))
|
||||
except:
|
||||
pass
|
||||
|
||||
entries = results_table.find_all('tr')
|
||||
for result in entries[1:]:
|
||||
link = result.find(href = re.compile('torrent\/\d+\/'))
|
||||
download = result.find(href = re.compile('magnet:'))
|
||||
|
||||
try:
|
||||
size = re.search('Size (?P<size>.+),', six.text_type(result.select('font.detDesc')[0])).group('size')
|
||||
except:
|
||||
continue
|
||||
|
||||
if link and download:
|
||||
|
||||
def extra_score(item):
|
||||
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
|
||||
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
|
||||
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
|
||||
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
|
||||
|
||||
return confirmed + trusted + vip + moderated
|
||||
|
||||
results.append({
|
||||
'id': re.search('/(?P<id>\d+)/', link['href']).group('id'),
|
||||
'name': six.text_type(link.string),
|
||||
'url': download['href'],
|
||||
'detail_url': self.getDomain(link['href']),
|
||||
'size': self.parseSize(size),
|
||||
'seeders': tryInt(result.find_all('td')[2].string),
|
||||
'leechers': tryInt(result.find_all('td')[3].string),
|
||||
'extra_score': extra_score,
|
||||
'get_more_info': self.getMoreInfo
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def isEnabled(self):
|
||||
return super(Base, self).isEnabled() and self.getDomain()
|
||||
|
||||
def correctProxy(self, data):
|
||||
return 'title="Pirate Search"' in data
|
||||
|
||||
def getMoreInfo(self, item):
|
||||
full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
|
||||
html = BeautifulSoup(full_description)
|
||||
nfo_pre = html.find('div', attrs = {'class': 'nfo'})
|
||||
description = ''
|
||||
try:
|
||||
description = toUnicode(nfo_pre.text)
|
||||
except:
|
||||
pass
|
||||
|
||||
item['description'] = description
|
||||
return item
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'thepiratebay',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'ThePirateBay',
|
||||
'description': 'The world\'s largest bittorrent tracker. <a href="http://fucktimkuik.org/">ThePirateBay</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAA3UlEQVQY02P4DwT/YADIZvj//7qnozMYODmtAAusZoCDELDAegYGViZhAWZmRoYoqIDupfhNN1M3dTBEggXWMZg9jZRXV77YxhAOFpjDwMAPMoCXmcHsF1SAQZ6bQY2VgUEbKHClcAYzg3mINEO8jSCD478/DPsZmvqWblu1bOmStes3Pp0ezVDF4Gif0Hfx9///74/ObRZ2YNiZ47C8XIRBxFJR0jbSSUud4f9zAQWn8NTuziAt2zy5xIMM/z8LFX0E+fD/x0MRDCeA1v7Z++Y/FDzyvAtyBxIA+h8A8ZKLeT+lJroAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False
|
||||
},
|
||||
{
|
||||
'name': 'domain',
|
||||
'advanced': True,
|
||||
'label': 'Proxy server',
|
||||
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
@@ -1,136 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.torrentbytes.net/',
|
||||
'login': 'https://www.torrentbytes.net/takelogin.php',
|
||||
'login_check': 'https://www.torrentbytes.net/inbox.php',
|
||||
'detail': 'https://www.torrentbytes.net/details.php?id=%s',
|
||||
'search': 'https://www.torrentbytes.net/browse.php?search=%s&cat=%d',
|
||||
'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s',
|
||||
}
|
||||
|
||||
cat_ids = [
|
||||
([5], ['720p', '1080p', 'bd50']),
|
||||
([19], ['cam']),
|
||||
([19], ['ts', 'tc']),
|
||||
([19], ['r5', 'scr']),
|
||||
([19], ['dvdrip']),
|
||||
([19], ['brrip']),
|
||||
([20], ['dvdr']),
|
||||
]
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def _searchOnTitle(self, title, movie, quality, results):
|
||||
|
||||
url = self.urls['search'] % (tryUrlencode('%s %s' % (title.replace(':', ''), movie['info']['year'])), self.getCatId(quality)[0])
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'border': '1'})
|
||||
if not result_table:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
cells = result.find_all('td')
|
||||
|
||||
link = cells[1].find('a', attrs = {'class': 'index'})
|
||||
|
||||
full_id = link['href'].replace('details.php?id=', '')
|
||||
torrent_id = full_id[:6]
|
||||
|
||||
results.append({
|
||||
'id': torrent_id,
|
||||
'name': link.contents[0],
|
||||
'url': self.urls['download'] % (torrent_id, link.contents[0]),
|
||||
'detail_url': self.urls['detail'] % torrent_id,
|
||||
'size': self.parseSize(cells[6].contents[0] + cells[6].contents[2]),
|
||||
'seeders': tryInt(cells[8].find('span').contents[0]),
|
||||
'leechers': tryInt(cells[9].find('span').contents[0]),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower() or 'Welcome' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentbytes',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentBytes',
|
||||
'description': '<a href="http://torrentbytes.net">TorrentBytes</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAeFBMVEUAAAAAAEQAA1QAEmEAKnQALHYAMoEAOokAQpIASYsASZgAS5UATZwATosATpgAVJ0AWZwAYZ4AZKAAaZ8Ab7IAcbMAfccAgcQAgcsAhM4AiscAjMkAmt0AoOIApecAp/EAqvQAs+kAt+wA3P8A4f8A//8VAAAfDbiaAl08AAAAjUlEQVQYGQXBO04DQRAFwHqz7Z8sECIl5f73ISRD5GBs7UxTlWfg9vYXnvJRQJqOL88D6BAwJtMMumHUVCl60aa6H93IrIv0b+157f1lpk+fm87lMWrZH0vncKbXdRUQrRmrh9C6Iwkq6rg4PXZcyXmbizzeV/g+rDra0rGve8jPKLSOJNi2AQAwAGjwD7ApPkEHdtPQAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,114 +0,0 @@
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://www.td.af/',
|
||||
'login': 'http://www.td.af/torrents/',
|
||||
'login_check': 'http://www.torrentday.com/userdetails.php',
|
||||
'detail': 'http://www.td.af/details.php?id=%s',
|
||||
'search': 'http://www.td.af/V3/API/API.php',
|
||||
'download': 'http://www.td.af/download.php/%s/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
query = '"%s" %s' % (title, media['info']['year'])
|
||||
|
||||
data = {
|
||||
'/browse.php?': None,
|
||||
'cata': 'yes',
|
||||
'jxt': 8,
|
||||
'jxw': 'b',
|
||||
'search': query,
|
||||
}
|
||||
|
||||
data = self.getJsonData(self.urls['search'], data = data)
|
||||
try: torrents = data.get('Fs', [])[0].get('Cn', {}).get('torrents', [])
|
||||
except: return
|
||||
|
||||
for torrent in torrents:
|
||||
results.append({
|
||||
'id': torrent['id'],
|
||||
'name': torrent['name'],
|
||||
'url': self.urls['download'] % (torrent['id'], torrent['fname']),
|
||||
'detail_url': self.urls['detail'] % torrent['id'],
|
||||
'size': self.parseSize(torrent.get('size')),
|
||||
'seeders': tryInt(torrent.get('seed')),
|
||||
'leechers': tryInt(torrent.get('leech')),
|
||||
})
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'submit.x': 18,
|
||||
'submit.y': 11,
|
||||
'submit': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'Password not correct' not in output
|
||||
|
||||
def loginCheckSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentday',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentDay',
|
||||
'description': '<a href="http://www.td.af/">TorrentDay</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC5ElEQVQ4y12TXUgUURTH//fO7Di7foeQJH6gEEEIZZllVohfSG/6UA+RSFAQQj74VA8+Bj30lmAlRVSEvZRfhNhaka5ZUG1paKaW39tq5O6Ou+PM3M4o6m6X+XPPzD3zm/+dcy574r515WfIW8CZBM4YAA5Gc/aQC3yd7oXYEONcsISE5dTDh91HS0t7FEWhBUAeN9ynV/d9qJAgE4AECURAcVsGlCCnly26LMA0IQwTa52dje3d3e3hcPi8qqrrMjcVYI3EHCQZlkFOHBwR2QHh2ASAAIJxWGAQEDxjePhs3527XjJwnb37OHBq0T+Tyyjh+9KnEzNJ7nouc1Q/3A3HGsOvnJy+PSUlj81w2Lny9WuJ6+3AmTjD4HOcrdR2dWXLRQePvyaSLfQOPMPC8mC9iHCsOxSyzJCelzdSXlNzD5ujpb25Wbfc/XXJemTXF4+nnCNq+AMLe50uFfEJTiw4GXSFtiHL0SnIq66+p0kSArqO+eH3RdsAv9+f5vW7L7GICq6rmM8XBCAXlBw90rOyxibn5yzfkg/L09M52/jxqdESaIrBXHYZZbB1GX8cEpySxKIB8S5XcOnvqpli1zuwmrTtoLjw5LOK/eeuWsE4JH5IRPaPZKiKigmPp+5pa+u1aEjIMhEgrRkmi9mgxGUhM7LNJSzOzsE3+cOeExovXOjdytE0LV4zqNZUtV0uZzAGoGkhDH/2YHZiErmv4uyWQnZZWc+hoqL3WzlTExN5hhA8IEwkZWZOxwB++30YG/9GkYCPvqAaHAW5uWPROW86OmqCprUR7z1yZDAGQNuCvkoB/baIKUBWMTYymv+gra3eJNvjXu+B562tFyXqTJ6YuHK8rKwvBmC3vR7cOCPQLWFz8LnfXWUrJo9U19BwMyUlJRjTSMJ2ENxUiGxq9KXQfwqYlnWstvbR5aamG9g0uzM8Q4OFt++3NNixQ2NgYmeN03FOTUv7XVpV9aKisvLl1vN/WVhNc/Fi1NEAAAAASUVORK5CYII=',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,126 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'https://www.torrentleech.org/',
|
||||
'login': 'https://www.torrentleech.org/user/account/login/',
|
||||
'login_check': 'https://torrentleech.org/user/messages',
|
||||
'detail': 'https://www.torrentleech.org/torrent/%s',
|
||||
'search': 'https://www.torrentleech.org/torrents/browse/index/query/%s/categories/%d',
|
||||
'download': 'https://www.torrentleech.org%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
cat_backup_id = None
|
||||
|
||||
def _searchOnTitle(self, title, media, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(title, media, quality)
|
||||
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'id': 'torrenttable'})
|
||||
if not result_table:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr')
|
||||
|
||||
for result in entries[1:]:
|
||||
|
||||
link = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||
url = result.find('td', attrs = {'class': 'quickdownload'}).find('a')
|
||||
details = result.find('td', attrs = {'class': 'name'}).find('a')
|
||||
|
||||
results.append({
|
||||
'id': link['href'].replace('/torrent/', ''),
|
||||
'name': six.text_type(link.string),
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['download'] % details['href'],
|
||||
'size': self.parseSize(result.find_all('td')[4].string),
|
||||
'seeders': tryInt(result.find('td', attrs = {'class': 'seeders'}).string),
|
||||
'leechers': tryInt(result.find('td', attrs = {'class': 'leechers'}).string),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'remember_me': 'on',
|
||||
'login': 'submit',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return '/user/account/logout' in output.lower() or 'welcome back' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentleech',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentLeech',
|
||||
'description': '<a href="http://torrentleech.org">TorrentLeech</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACHUlEQVR4AZVSO48SYRSdGTCBEMKzILLAWiybkKAGMZRUUJEoDZX7B9zsbuQPYEEjNLTQkYgJDwsoSaxspEBsCITXjjNAIKi8AkzceXgmbHQ1NJ5iMufmO9/9zrmXlCSJ+B8o75J8Pp/NZj0eTzweBy0Wi4PBYD6f12o1r9ebTCZx+22HcrnMsuxms7m6urTZ7LPZDMVYLBZ8ZV3yo8aq9Pq0wzCMTqe77dDv9y8uLyAWBH6xWOyL0K/56fcb+rrPgPZ6PZfLRe1fsl6vCUmGKIqoqNXqdDr9Dbjps9znUV0uTqdTjuPkDoVCIfcuJ4gizjMMm8u9vW+1nr04czqdK56c37CbKY9j2+1WEARZ0Gq1RFHAz2q1qlQqXxoN69HRcDjUarW8ZD6QUigUOnY8uKYH8N1sNkul9yiGw+F6vS4Rxn8EsodEIqHRaOSnq9T7ajQazWQycEIR1AEBYDabSZJyHDucJyegwWBQr9ebTCaKvHd4cCQANUU9evwQ1Ofz4YvUKUI43GE8HouSiFiNRhOowWBIpVLyHITJkuW3PwgAEf3pgIwxF5r+OplMEsk3CPT5szCMnY7EwUdhwUh/CXiej0Qi3idPz89fdrpdbsfBzH7S3Q9K5pP4c0sAKpVKoVAQGO1ut+t0OoFAQHkH2Da/3/+but3uarWK0ZMQoNdyucRutdttmqZxMTzY7XaYxsrgtUjEZrNhkSwWyy/0NCatZumrNQAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 20,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,188 +0,0 @@
|
||||
from six.moves import urllib
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.base import ResultList
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {}
|
||||
limits_reached = {}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def search(self, media, quality):
|
||||
hosts = self.getHosts()
|
||||
|
||||
results = ResultList(self, media, quality, imdb_results = True)
|
||||
|
||||
for host in hosts:
|
||||
if self.isDisabled(host):
|
||||
continue
|
||||
|
||||
self._searchOnHost(host, media, quality, results)
|
||||
|
||||
return results
|
||||
|
||||
def _searchOnHost(self, host, media, quality, results):
|
||||
|
||||
torrents = self.getJsonData(self.buildUrl(media, host), cache_timeout = 1800)
|
||||
|
||||
if torrents:
|
||||
try:
|
||||
if torrents.get('error'):
|
||||
log.error('%s: %s', (torrents.get('error'), host['host']))
|
||||
elif torrents.get('results'):
|
||||
for torrent in torrents.get('results', []):
|
||||
results.append({
|
||||
'id': torrent.get('torrent_id'),
|
||||
'protocol': 'torrent' if re.match('^(http|https|ftp)://.*$', torrent.get('download_url')) else 'torrent_magnet',
|
||||
'provider_extra': urllib.urlparse(host['host']).hostname or host['host'],
|
||||
'name': toUnicode(torrent.get('release_name')),
|
||||
'url': torrent.get('download_url'),
|
||||
'detail_url': torrent.get('details_url'),
|
||||
'size': torrent.get('size'),
|
||||
'score': host['extra_score'],
|
||||
'seeders': torrent.get('seeders'),
|
||||
'leechers': torrent.get('leechers'),
|
||||
'seed_ratio': host['seed_ratio'],
|
||||
'seed_time': host['seed_time'],
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (host['host'], traceback.format_exc()))
|
||||
|
||||
def getHosts(self):
|
||||
|
||||
uses = splitString(str(self.conf('use')), clean = False)
|
||||
hosts = splitString(self.conf('host'), clean = False)
|
||||
names = splitString(self.conf('name'), clean = False)
|
||||
seed_times = splitString(self.conf('seed_time'), clean = False)
|
||||
seed_ratios = splitString(self.conf('seed_ratio'), clean = False)
|
||||
pass_keys = splitString(self.conf('pass_key'), clean = False)
|
||||
extra_score = splitString(self.conf('extra_score'), clean = False)
|
||||
|
||||
host_list = []
|
||||
for nr in range(len(hosts)):
|
||||
|
||||
try: key = pass_keys[nr]
|
||||
except: key = ''
|
||||
|
||||
try: host = hosts[nr]
|
||||
except: host = ''
|
||||
|
||||
try: name = names[nr]
|
||||
except: name = ''
|
||||
|
||||
try: ratio = seed_ratios[nr]
|
||||
except: ratio = ''
|
||||
|
||||
try: seed_time = seed_times[nr]
|
||||
except: seed_time = ''
|
||||
|
||||
host_list.append({
|
||||
'use': uses[nr],
|
||||
'host': host,
|
||||
'name': name,
|
||||
'seed_ratio': tryFloat(ratio),
|
||||
'seed_time': tryInt(seed_time),
|
||||
'pass_key': key,
|
||||
'extra_score': tryInt(extra_score[nr]) if len(extra_score) > nr else 0
|
||||
})
|
||||
|
||||
return host_list
|
||||
|
||||
def belongsTo(self, url, provider = None, host = None):
|
||||
|
||||
hosts = self.getHosts()
|
||||
|
||||
for host in hosts:
|
||||
result = super(Base, self).belongsTo(url, host = host['host'], provider = provider)
|
||||
if result:
|
||||
return result
|
||||
|
||||
def isDisabled(self, host = None):
|
||||
return not self.isEnabled(host)
|
||||
|
||||
def isEnabled(self, host = None):
|
||||
|
||||
# Return true if at least one is enabled and no host is given
|
||||
if host is None:
|
||||
for host in self.getHosts():
|
||||
if self.isEnabled(host):
|
||||
return True
|
||||
return False
|
||||
|
||||
return TorrentProvider.isEnabled(self) and host['host'] and host['pass_key'] and int(host['use'])
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentpotato',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentPotato',
|
||||
'order': 10,
|
||||
'description': 'CouchPotato torrent provider. Checkout <a href="https://github.com/RuudBurger/CouchPotatoServer/wiki/CouchPotato-Torrent-Provider">the wiki page about this provider</a> for more info.',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABSElEQVR4AZ2Nz0oCURTGv8t1YMpqUxt9ARFxoQ/gQtppgvUKcu/sxB5iBJkogspaBC6iVUplEC6kv+oiiKDNhAtt16roP0HQgdsMLgaxfvy4nHP4Pi48qE2g4v91JOqT1CH/UnA7w7icUlLawyEdj+ZI/7h6YluWbRiddHonHh9M70aj7VTKzuXuikUMci/EO/ACnAI15599oAk8AR/AgxBQNCzreD7bmpl+FOIVuAHqQDUcJo+AK+CZFKLt95/MpSmMt0TiW9POxse6UvYZ6zB2wFgjFiNpOGesR0rZ0PVPXf8KhUCl22CwClz4eN8weoZBb9c0bdPsOWvHx/cYu9Y0CoNoZTJrwAbn5DrnZc6XOV+igVbnsgo0IxEomlJuA1vUIYGyq3PZBChwmExCUSmVZgMBDIUCK4UCFIv5vHIhm/XUDeAf/ADbcpd5+aXSWQAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'use',
|
||||
'default': ''
|
||||
},
|
||||
{
|
||||
'name': 'host',
|
||||
'default': '',
|
||||
'description': 'The url path of your TorrentPotato provider.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'default': '0',
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
},
|
||||
{
|
||||
'name': 'name',
|
||||
'label': 'Username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'default': '1',
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'default': '40',
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'pass_key',
|
||||
'default': ',',
|
||||
'label': 'Pass Key',
|
||||
'description': 'Can be found on your profile page',
|
||||
'type': 'combined',
|
||||
'combine': ['use', 'host', 'pass_key', 'name', 'seed_ratio', 'seed_time', 'extra_score'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,133 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from couchpotato.core.helpers.variable import tryInt
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentProvider):
|
||||
|
||||
urls = {
|
||||
'test': 'http://torrentshack.eu/',
|
||||
'login': 'http://torrentshack.eu/login.php',
|
||||
'login_check': 'http://torrentshack.eu/inbox.php',
|
||||
'detail': 'http://torrentshack.eu/torrent/%s',
|
||||
'search': 'http://torrentshack.eu/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
|
||||
'download': 'http://torrentshack.eu/%s',
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # Seconds
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
url = self.urls['search'] % self.buildUrl(media, quality)
|
||||
data = self.getHTMLData(url)
|
||||
|
||||
if data:
|
||||
html = BeautifulSoup(data)
|
||||
|
||||
try:
|
||||
result_table = html.find('table', attrs = {'id': 'torrent_table'})
|
||||
if not result_table:
|
||||
return
|
||||
|
||||
entries = result_table.find_all('tr', attrs = {'class': 'torrent'})
|
||||
|
||||
for result in entries:
|
||||
|
||||
link = result.find('span', attrs = {'class': 'torrent_name_link'}).parent
|
||||
url = result.find('td', attrs = {'class': 'torrent_td'}).find('a')
|
||||
tds = result.find_all('td')
|
||||
|
||||
results.append({
|
||||
'id': link['href'].replace('torrents.php?torrentid=', ''),
|
||||
'name': six.text_type(link.span.string).translate({ord(six.u('\xad')): None}),
|
||||
'url': self.urls['download'] % url['href'],
|
||||
'detail_url': self.urls['download'] % link['href'],
|
||||
'size': self.parseSize(result.find_all('td')[5].string),
|
||||
'seeders': tryInt(tds[len(tds)-2].string),
|
||||
'leechers': tryInt(tds[len(tds)-1].string),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def getLoginParams(self):
|
||||
return {
|
||||
'username': self.conf('username'),
|
||||
'password': self.conf('password'),
|
||||
'keeplogged': '1',
|
||||
'login': 'Login',
|
||||
}
|
||||
|
||||
def loginSuccess(self, output):
|
||||
return 'logout.php' in output.lower()
|
||||
|
||||
loginCheckSuccess = loginSuccess
|
||||
|
||||
def getSceneOnly(self):
|
||||
return '1' if self.conf('scene_only') else ''
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentshack',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'TorrentShack',
|
||||
'description': '<a href="http://torrentshack.eu/">TorrentShack</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABmElEQVQoFQXBzY2cVRiE0afqvd84CQiAnxWWtyxsS6ThINBYg2Dc7mZBMEjE4mzs6e9WcY5+ePNuVFJJodQAoLo+SaWCy9rcV8cmjah3CI6iYu7oRU30kE5xxELRfamklY3k1NL19sSm7vPzP/ZdNZzKVDaY2sPZJBh9fv5ITrmG2+Vp4e1sPchVqTCQZJnVXi+/L4uuAJGly1+Pw8CprLbi8Om7tbT19/XRqJUk11JP9uHj9ulxhXbvJbI9qJvr5YkGXFG2IBT8tXczt+sfzDZCp3765f3t9tHEHGEDACma77+8o4oATKk+/PfW9YmHruRFjWoVSFsVsGu1YSKq6Oc37+n98unPZSRlY7vsKDqN+92X3yR9+PdXee3iJNKMStqdcZqoTJbUSi5JOkpfRlhSI0mSpEmCFKoU7FqSNOLAk54uGwCStMUCgLrVic62g7oDoFmmdI+P3S0pDe1xvDqb6XrZqbtzShWNoh9fv/XQHaDdM9OqrZi2M7M3UrB2vlkPS1IbdEBk7UiSoD6VlZ6aKWer4aH4f/AvKoHUTjuyAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False,
|
||||
},
|
||||
{
|
||||
'name': 'username',
|
||||
'default': '',
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'default': '',
|
||||
'type': 'password',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'scene_only',
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'description': 'Only allow scene releases.'
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
@@ -1,129 +0,0 @@
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.encoding import tryUrlencode
|
||||
from couchpotato.core.helpers.rss import RSS
|
||||
from couchpotato.core.helpers.variable import tryInt, splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||
import six
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentMagnetProvider, RSS):
|
||||
|
||||
urls = {
|
||||
'detail': 'https://torrentz.eu/%s',
|
||||
'search': 'https://torrentz.eu/feed?q=%s',
|
||||
'verified_search': 'https://torrentz.eu/feed_verified?q=%s'
|
||||
}
|
||||
|
||||
http_time_between_calls = 0
|
||||
|
||||
def _search(self, media, quality, results):
|
||||
|
||||
search_url = self.urls['verified_search'] if self.conf('verified_only') else self.urls['search']
|
||||
|
||||
# Create search parameters
|
||||
search_params = self.buildUrl(media)
|
||||
|
||||
smin = quality.get('size_min')
|
||||
smax = quality.get('size_max')
|
||||
if smin and smax:
|
||||
search_params += ' size %sm - %sm' % (smin, smax)
|
||||
|
||||
min_seeds = tryInt(self.conf('minimal_seeds'))
|
||||
if min_seeds:
|
||||
search_params += ' seed > %s' % (min_seeds - 1)
|
||||
|
||||
rss_data = self.getRSSData(search_url % search_params)
|
||||
|
||||
if rss_data:
|
||||
try:
|
||||
|
||||
for result in rss_data:
|
||||
|
||||
name = self.getTextElement(result, 'title')
|
||||
detail_url = self.getTextElement(result, 'link')
|
||||
description = self.getTextElement(result, 'description')
|
||||
|
||||
magnet = splitString(detail_url, '/')[-1]
|
||||
magnet_url = 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (magnet.upper(), tryUrlencode(name), tryUrlencode('udp://tracker.openbittorrent.com/announce'))
|
||||
|
||||
reg = re.search('Size: (?P<size>\d+) MB Seeds: (?P<seeds>[\d,]+) Peers: (?P<peers>[\d,]+)', six.text_type(description))
|
||||
size = reg.group('size')
|
||||
seeds = reg.group('seeds').replace(',', '')
|
||||
peers = reg.group('peers').replace(',', '')
|
||||
|
||||
results.append({
|
||||
'id': magnet,
|
||||
'name': six.text_type(name),
|
||||
'url': magnet_url,
|
||||
'detail_url': detail_url,
|
||||
'size': tryInt(size),
|
||||
'seeders': tryInt(seeds),
|
||||
'leechers': tryInt(peers),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'torrentz',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'Torrentz',
|
||||
'description': 'Torrentz is a free, fast and powerful meta-search engine. <a href="https://torrentz.eu/">Torrentz</a>',
|
||||
'wizard': True,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAQklEQVQ4y2NgAALjtJn/ycEMlGiGG0IVAxiwAKzOxaKGARcgxgC8YNSAwWoAzuRMjgsIugqfAUR5CZcBRIcHsWEAADSA96Ig020yAAAAAElFTkSuQmCC',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': True
|
||||
},
|
||||
{
|
||||
'name': 'verified_only',
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'advanced': True,
|
||||
'description': 'Only search verified releases',
|
||||
},
|
||||
{
|
||||
'name': 'minimal_seeds',
|
||||
'type': 'int',
|
||||
'default': 1,
|
||||
'advanced': True,
|
||||
'description': 'Only return releases with minimal X seeds',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
@@ -1,120 +0,0 @@
|
||||
import traceback
|
||||
|
||||
from couchpotato.core.helpers.variable import tryInt, getIdentifier
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
|
||||
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Base(TorrentMagnetProvider):
|
||||
|
||||
urls = {
|
||||
'test': '%s/api',
|
||||
'search': '%s/api/list.json?keywords=%s&quality=%s',
|
||||
'detail': '%s/api/movie.json?id=%s'
|
||||
}
|
||||
|
||||
http_time_between_calls = 1 # seconds
|
||||
|
||||
proxy_list = [
|
||||
'http://yify.unlocktorrent.com',
|
||||
'http://yify-torrents.com.come.in',
|
||||
'http://yts.re',
|
||||
'http://yts.im'
|
||||
'http://yify-torrents.im',
|
||||
]
|
||||
|
||||
def search(self, movie, quality):
|
||||
|
||||
if not quality.get('hd', False):
|
||||
return []
|
||||
|
||||
return super(Base, self).search(movie, quality)
|
||||
|
||||
def _search(self, movie, quality, results):
|
||||
|
||||
domain = self.getDomain()
|
||||
if not domain:
|
||||
return
|
||||
|
||||
search_url = self.urls['search'] % (domain, getIdentifier(movie), quality['identifier'])
|
||||
|
||||
data = self.getJsonData(search_url)
|
||||
|
||||
if data and data.get('MovieList'):
|
||||
try:
|
||||
for result in data.get('MovieList'):
|
||||
|
||||
if result['Quality'] and result['Quality'] not in result['MovieTitle']:
|
||||
title = result['MovieTitle'] + ' BrRip ' + result['Quality']
|
||||
else:
|
||||
title = result['MovieTitle'] + ' BrRip'
|
||||
|
||||
results.append({
|
||||
'id': result['MovieID'],
|
||||
'name': title,
|
||||
'url': result['TorrentMagnetUrl'],
|
||||
'detail_url': self.urls['detail'] % (domain, result['MovieID']),
|
||||
'size': self.parseSize(result['Size']),
|
||||
'seeders': tryInt(result['TorrentSeeds']),
|
||||
'leechers': tryInt(result['TorrentPeers']),
|
||||
})
|
||||
|
||||
except:
|
||||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
|
||||
|
||||
def correctProxy(self, data):
|
||||
data = data.lower()
|
||||
return 'yify' in data and 'yts' in data
|
||||
|
||||
|
||||
config = [{
|
||||
'name': 'yify',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'list': 'torrent_providers',
|
||||
'name': 'Yify',
|
||||
'description': 'Free provider, less accurate. Small HD movies, encoded by <a href="https://yify-torrents.com/">Yify</a>.',
|
||||
'wizard': False,
|
||||
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACL0lEQVR4AS1SPW/UQBAd23fxne/Ld2dvzvHuzPocEBAKokCBqGiQ6IgACYmvUKRBFEQgKKGg4BAlUoggggYUEQpSHOI7CIEoQs/fYcbLaU/efTvvvZlnA1qydoxU5kcxX0CkgmQZtPy0hCUjvK+WgEByOZ5dns1O5bzna8fRVkgsxH8B0YouIvBhdD5T11NiVOoKrsttyUcpRW0InUrFnwe9HzuP2uaQZYhF2LQ76TTXw2RVMTK8mYYbjfh+zNquMVCrqn93aArLSixPxnafdGDLaz1tjY5rmNa8z5BczEQOxQfCl1GyoqoWxYRN1bkh7ELw3q/vhP6HIL4TG9KumpjgvwuyM7OsjSj98E/vszMfZ7xvPtMaWxGO5crwIumKCR5HxDtJ0AWKGG204RfUd/3smJYqwem/Q7BTS1ZGfM4LNpVwuKAz6cMeROst0S2EwNE7GjTehO2H3dxqIpdkydat15G3F8SXBi4GlpBNlSz012L/k2+W0CLLk/jbcf13rf41yJeMQ8QWUZiHCfCA9ad+81nEKPtoS9mJOf9v0NmMJHgUT6xayheK9EIK7JJeU/AF4scDF7Y5SPlJrRcxJ+um4ibNEdObxLiIwJim+eT2AL5D9CIcnZ5zvSJi9eIlNHVVtZ831dk5svPgvjPWTq+ktWkd/kD0qtm71x+sDQe3kt6DXnM7Ct+GajmTxKlkAokWljyAKSm5oWa2w+BH4P2UuVub7eTyiGOQYapY/wEztHduSDYz5gAAAABJRU5ErkJggg==',
|
||||
'options': [
|
||||
{
|
||||
'name': 'enabled',
|
||||
'type': 'enabler',
|
||||
'default': False
|
||||
},
|
||||
{
|
||||
'name': 'domain',
|
||||
'advanced': True,
|
||||
'label': 'Proxy server',
|
||||
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_ratio',
|
||||
'label': 'Seed ratio',
|
||||
'type': 'float',
|
||||
'default': 1,
|
||||
'description': 'Will not be (re)moved until this seed ratio is met.',
|
||||
},
|
||||
{
|
||||
'name': 'seed_time',
|
||||
'label': 'Seed time',
|
||||
'type': 'int',
|
||||
'default': 40,
|
||||
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
|
||||
},
|
||||
{
|
||||
'name': 'extra_score',
|
||||
'advanced': True,
|
||||
'label': 'Extra Score',
|
||||
'type': 'int',
|
||||
'default': 0,
|
||||
'description': 'Starting score for each release found via this provider.',
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}]
|
||||
@@ -1,5 +1,6 @@
|
||||
from .main import Search
|
||||
|
||||
|
||||
def autoload():
|
||||
def start():
|
||||
return Search()
|
||||
|
||||
config = []
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, addEvent
|
||||
from couchpotato.core.helpers.variable import mergeDicts, getImdb
|
||||
from couchpotato.core.helpers.variable import mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.plugins.base import Plugin
|
||||
import six
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -31,26 +30,17 @@ class Search(Plugin):
|
||||
def search(self, q = '', types = None, **kwargs):
|
||||
|
||||
# Make sure types is the correct instance
|
||||
if isinstance(types, six.string_types):
|
||||
if isinstance(types, (str, unicode)):
|
||||
types = [types]
|
||||
elif isinstance(types, (list, tuple, set)):
|
||||
types = list(types)
|
||||
|
||||
imdb_identifier = getImdb(q)
|
||||
|
||||
if not types:
|
||||
if imdb_identifier:
|
||||
result = fireEvent('movie.info', identifier = imdb_identifier, merge = True)
|
||||
result = {result['type']: [result]}
|
||||
else:
|
||||
result = fireEvent('info.search', q = q, merge = True)
|
||||
result = fireEvent('info.search', q = q, merge = True)
|
||||
else:
|
||||
result = {}
|
||||
for media_type in types:
|
||||
if imdb_identifier:
|
||||
result[media_type] = fireEvent('%s.info' % media_type, identifier = imdb_identifier)
|
||||
else:
|
||||
result[media_type] = fireEvent('%s.search' % media_type, q = q)
|
||||
result[media_type] = fireEvent('%s.search' % media_type)
|
||||
|
||||
return mergeDicts({
|
||||
'success': True,
|
||||
|
||||
@@ -6,10 +6,12 @@
|
||||
top: 0;
|
||||
text-align: right;
|
||||
height: 100%;
|
||||
border-bottom: 4px solid transparent;
|
||||
transition: all .4s cubic-bezier(0.9,0,0.1,1);
|
||||
position: absolute;
|
||||
z-index: 20;
|
||||
border: 0 solid transparent;
|
||||
border-bottom-width: 4px;
|
||||
border: 1px solid transparent;
|
||||
border-width: 0 0 4px;
|
||||
}
|
||||
.search_form:hover {
|
||||
border-color: #047792;
|
||||
@@ -20,19 +22,19 @@
|
||||
right: 44px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.search_form.focused,
|
||||
.search_form.shown {
|
||||
border-color: #04bce6;
|
||||
}
|
||||
|
||||
|
||||
.search_form .input {
|
||||
height: 100%;
|
||||
overflow: hidden;
|
||||
width: 45px;
|
||||
transition: all .4s cubic-bezier(0.9,0,0.1,1);
|
||||
}
|
||||
|
||||
|
||||
.search_form.focused .input,
|
||||
.search_form.shown .input {
|
||||
width: 380px;
|
||||
@@ -47,6 +49,7 @@
|
||||
color: #FFF;
|
||||
font-size: 25px;
|
||||
height: 100%;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
opacity: 0;
|
||||
padding: 0 40px 0 10px;
|
||||
@@ -56,23 +59,23 @@
|
||||
.search_form.shown .input input {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
|
||||
.search_form input::-ms-clear {
|
||||
width : 0;
|
||||
height: 0;
|
||||
}
|
||||
|
||||
|
||||
@media all and (max-width: 480px) {
|
||||
.search_form .input input {
|
||||
font-size: 15px;
|
||||
}
|
||||
|
||||
|
||||
.search_form.focused .input,
|
||||
.search_form.shown .input {
|
||||
width: 277px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.search_form .input a {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
@@ -86,7 +89,7 @@
|
||||
font-size: 15px;
|
||||
color: #FFF;
|
||||
}
|
||||
|
||||
|
||||
.search_form .input a:after {
|
||||
content: "\e03e";
|
||||
}
|
||||
@@ -94,7 +97,7 @@
|
||||
.search_form.shown.filled .input a:after {
|
||||
content: "\e04e";
|
||||
}
|
||||
|
||||
|
||||
@media all and (max-width: 480px) {
|
||||
.search_form .input a {
|
||||
line-height: 44px;
|
||||
@@ -164,13 +167,13 @@
|
||||
.media_result .options select[name=title] { width: 170px; }
|
||||
.media_result .options select[name=profile] { width: 90px; }
|
||||
.media_result .options select[name=category] { width: 80px; }
|
||||
|
||||
|
||||
@media all and (max-width: 480px) {
|
||||
|
||||
|
||||
.media_result .options select[name=title] { width: 90px; }
|
||||
.media_result .options select[name=profile] { width: 50px; }
|
||||
.media_result .options select[name=category] { width: 50px; }
|
||||
|
||||
|
||||
}
|
||||
|
||||
.media_result .options .button {
|
||||
@@ -224,14 +227,14 @@
|
||||
right: 7px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
|
||||
.media_result .info h2 {
|
||||
margin: 0;
|
||||
font-weight: normal;
|
||||
font-size: 20px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
|
||||
.search_form .info h2 {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
@@ -244,12 +247,12 @@
|
||||
overflow: hidden;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
|
||||
.search_form .info h2 .title {
|
||||
position: absolute;
|
||||
width: 88%;
|
||||
}
|
||||
|
||||
|
||||
.media_result .info h2 .year {
|
||||
padding: 0 5px;
|
||||
text-align: center;
|
||||
@@ -257,14 +260,14 @@
|
||||
width: 12%;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
|
||||
@media all and (max-width: 480px) {
|
||||
|
||||
|
||||
.search_form .info h2 .year {
|
||||
font-size: 12px;
|
||||
margin-top: 7px;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
.search_form .mask,
|
||||
@@ -274,4 +277,4 @@
|
||||
width: 100%;
|
||||
left: 0;
|
||||
top: 0;
|
||||
}
|
||||
}
|
||||
@@ -13,13 +13,10 @@ Block.Search = new Class({
|
||||
self.input = new Element('input', {
|
||||
'placeholder': 'Search & add a new media',
|
||||
'events': {
|
||||
'input': self.keyup.bind(self),
|
||||
'paste': self.keyup.bind(self),
|
||||
'change': self.keyup.bind(self),
|
||||
'keyup': self.keyup.bind(self),
|
||||
'focus': function(){
|
||||
if(focus_timer) clearTimeout(focus_timer);
|
||||
self.el.addClass('focused');
|
||||
self.el.addClass('focused')
|
||||
if(this.get('value'))
|
||||
self.hideResults(false)
|
||||
},
|
||||
@@ -60,17 +57,17 @@ Block.Search = new Class({
|
||||
(e).preventDefault();
|
||||
|
||||
if(self.last_q === ''){
|
||||
self.input.blur();
|
||||
self.input.blur()
|
||||
self.last_q = null;
|
||||
}
|
||||
else {
|
||||
|
||||
self.last_q = '';
|
||||
self.input.set('value', '');
|
||||
self.input.focus();
|
||||
self.input.focus()
|
||||
|
||||
self.media = {};
|
||||
self.results.empty();
|
||||
self.media = {}
|
||||
self.results.empty()
|
||||
self.el.removeClass('filled')
|
||||
|
||||
}
|
||||
@@ -95,16 +92,16 @@ Block.Search = new Class({
|
||||
self.hidden = bool;
|
||||
},
|
||||
|
||||
keyup: function(){
|
||||
keyup: function(e){
|
||||
var self = this;
|
||||
|
||||
self.el[self.q() ? 'addClass' : 'removeClass']('filled');
|
||||
self.el[self.q() ? 'addClass' : 'removeClass']('filled')
|
||||
|
||||
if(self.q() != self.last_q){
|
||||
if(self.api_request && self.api_request.isRunning())
|
||||
self.api_request.cancel();
|
||||
|
||||
if(self.autocomplete_timer) clearTimeout(self.autocomplete_timer);
|
||||
if(self.autocomplete_timer) clearTimeout(self.autocomplete_timer)
|
||||
self.autocomplete_timer = self.autocomplete.delay(300, self)
|
||||
}
|
||||
|
||||
@@ -114,7 +111,7 @@ Block.Search = new Class({
|
||||
var self = this;
|
||||
|
||||
if(!self.q()){
|
||||
self.hideResults(true);
|
||||
self.hideResults(true)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -142,7 +139,7 @@ Block.Search = new Class({
|
||||
})
|
||||
}
|
||||
else
|
||||
self.fill(q, cache);
|
||||
self.fill(q, cache)
|
||||
|
||||
self.last_q = q;
|
||||
|
||||
@@ -151,31 +148,31 @@ Block.Search = new Class({
|
||||
fill: function(q, json){
|
||||
var self = this;
|
||||
|
||||
self.cache[q] = json;
|
||||
self.cache[q] = json
|
||||
|
||||
self.media = {};
|
||||
self.results.empty();
|
||||
|
||||
Object.each(json, function(media){
|
||||
self.media = {}
|
||||
self.results.empty()
|
||||
|
||||
Object.each(json, function(media, type){
|
||||
if(typeOf(media) == 'array'){
|
||||
Object.each(media, function(m){
|
||||
|
||||
|
||||
var m = new Block.Search[m.type.capitalize() + 'Item'](m);
|
||||
$(m).inject(self.results);
|
||||
self.media[m.imdb || 'r-'+Math.floor(Math.random()*10000)] = m;
|
||||
|
||||
$(m).inject(self.results)
|
||||
self.media[m.imdb || 'r-'+Math.floor(Math.random()*10000)] = m
|
||||
|
||||
if(q == m.imdb)
|
||||
m.showOptions()
|
||||
|
||||
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
// Calculate result heights
|
||||
var w = window.getSize(),
|
||||
rc = self.result_container.getCoordinates();
|
||||
|
||||
self.results.setStyle('max-height', (w.y - rc.top - 50) + 'px');
|
||||
self.results.setStyle('max-height', (w.y - rc.top - 50) + 'px')
|
||||
self.mask.fade('out')
|
||||
|
||||
},
|
||||
@@ -188,4 +185,4 @@ Block.Search = new Class({
|
||||
return this.input.get('value').trim();
|
||||
}
|
||||
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,6 @@
|
||||
from .main import Searcher
|
||||
|
||||
|
||||
def autoload():
|
||||
def start():
|
||||
return Searcher()
|
||||
|
||||
config = [{
|
||||
@@ -73,24 +72,4 @@ config = [{
|
||||
],
|
||||
},
|
||||
],
|
||||
}, {
|
||||
'name': 'torrent',
|
||||
'groups': [
|
||||
{
|
||||
'tab': 'searcher',
|
||||
'name': 'searcher',
|
||||
'wizard': True,
|
||||
'options': [
|
||||
{
|
||||
'name': 'minimum_seeders',
|
||||
'advanced': True,
|
||||
'label': 'Minimum seeders',
|
||||
'description': 'Ignore torrents with seeders below this number',
|
||||
'default': 1,
|
||||
'type': 'int',
|
||||
'unit': 'seeders'
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}]
|
||||
|
||||
@@ -12,6 +12,7 @@ class SearcherBase(Plugin):
|
||||
def __init__(self):
|
||||
super(SearcherBase, self).__init__()
|
||||
|
||||
|
||||
addEvent('searcher.progress', self.getProgress)
|
||||
addEvent('%s.searcher.progress' % self.getType(), self.getProgress)
|
||||
|
||||
@@ -25,8 +26,9 @@ class SearcherBase(Plugin):
|
||||
_type = self.getType()
|
||||
|
||||
def setCrons():
|
||||
|
||||
fireEvent('schedule.cron', '%s.searcher.all' % _type, self.searchAll,
|
||||
day = self.conf('cron_day'), hour = self.conf('cron_hour'), minute = self.conf('cron_minute'))
|
||||
day = self.conf('cron_day'), hour = self.conf('cron_hour'), minute = self.conf('cron_minute'))
|
||||
|
||||
addEvent('app.load', setCrons)
|
||||
addEvent('setting.save.%s_searcher.cron_day.after' % _type, setCrons)
|
||||
|
||||
@@ -1,24 +1,20 @@
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import addEvent, fireEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString
|
||||
from couchpotato.core.helpers.variable import splitString, removeEmpty, removeDuplicate
|
||||
from couchpotato.core.helpers.variable import splitString
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media._base.searcher.base import SearcherBase
|
||||
|
||||
import datetime
|
||||
import re
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
|
||||
class Searcher(SearcherBase):
|
||||
|
||||
# noinspection PyMissingConstructor
|
||||
def __init__(self):
|
||||
addEvent('searcher.protocols', self.getSearchProtocols)
|
||||
addEvent('searcher.contains_other_quality', self.containsOtherQuality)
|
||||
addEvent('searcher.correct_3d', self.correct3D)
|
||||
addEvent('searcher.correct_year', self.correctYear)
|
||||
addEvent('searcher.correct_name', self.correctName)
|
||||
addEvent('searcher.correct_words', self.correctWords)
|
||||
@@ -52,7 +48,7 @@ class Searcher(SearcherBase):
|
||||
results = []
|
||||
|
||||
for search_protocol in protocols:
|
||||
protocol_results = fireEvent('provider.search.%s.%s' % (search_protocol, media.get('type')), media, quality, merge = True)
|
||||
protocol_results = fireEvent('provider.search.%s.%s' % (search_protocol, media['type']), media, quality, merge = True)
|
||||
if protocol_results:
|
||||
results += protocol_results
|
||||
|
||||
@@ -87,26 +83,34 @@ class Searcher(SearcherBase):
|
||||
def containsOtherQuality(self, nzb, movie_year = None, preferred_quality = None):
|
||||
if not preferred_quality: preferred_quality = {}
|
||||
|
||||
name = nzb['name']
|
||||
size = nzb.get('size', 0)
|
||||
nzb_words = re.split('\W+', simplifyString(name))
|
||||
|
||||
qualities = fireEvent('quality.all', single = True)
|
||||
|
||||
found = {}
|
||||
for quality in qualities:
|
||||
# Main in words
|
||||
if quality['identifier'] in nzb_words:
|
||||
found[quality['identifier']] = True
|
||||
|
||||
# Alt in words
|
||||
if list(set(nzb_words) & set(quality['alternative'])):
|
||||
found[quality['identifier']] = True
|
||||
|
||||
# Try guessing via quality tags
|
||||
guess = fireEvent('quality.guess', files = [nzb.get('name')], size = nzb.get('size', None), single = True)
|
||||
guess = fireEvent('quality.guess', [nzb.get('name')], single = True)
|
||||
if guess:
|
||||
found[guess['identifier']] = True
|
||||
|
||||
# Hack for older movies that don't contain quality tag
|
||||
name = nzb['name']
|
||||
size = nzb.get('size', 0)
|
||||
|
||||
year_name = fireEvent('scanner.name_year', name, single = True)
|
||||
if len(found) == 0 and movie_year < datetime.datetime.now().year - 3 and not year_name.get('year', None):
|
||||
if size > 20000: # Assume bd50
|
||||
log.info('Quality was missing in name, assuming it\'s a BR-Disk based on the size: %s', size)
|
||||
found['bd50'] = True
|
||||
elif size > 3000: # Assume dvdr
|
||||
if size > 3000: # Assume dvdr
|
||||
log.info('Quality was missing in name, assuming it\'s a DVD-R based on the size: %s', size)
|
||||
found['dvdr'] = True
|
||||
else: # Assume dvdrip
|
||||
else: # Assume dvdrip
|
||||
log.info('Quality was missing in name, assuming it\'s a DVD-Rip based on the size: %s', size)
|
||||
found['dvdrip'] = True
|
||||
|
||||
@@ -115,25 +119,7 @@ class Searcher(SearcherBase):
|
||||
if found.get(allowed):
|
||||
del found[allowed]
|
||||
|
||||
if found.get(preferred_quality['identifier']) and len(found) == 1:
|
||||
return False
|
||||
|
||||
return found
|
||||
|
||||
def correct3D(self, nzb, preferred_quality = None):
|
||||
if not preferred_quality: preferred_quality = {}
|
||||
if not preferred_quality.get('custom'): return
|
||||
|
||||
threed = preferred_quality['custom'].get('3d')
|
||||
|
||||
# Try guessing via quality tags
|
||||
guess = fireEvent('quality.guess', [nzb.get('name')], single = True)
|
||||
|
||||
if guess:
|
||||
return threed == guess.get('is_3d')
|
||||
# If no quality guess, assume not 3d
|
||||
else:
|
||||
return threed == False
|
||||
return not (found.get(preferred_quality['identifier']) and len(found) == 1)
|
||||
|
||||
def correctYear(self, haystack, year, year_range):
|
||||
|
||||
@@ -164,12 +150,12 @@ class Searcher(SearcherBase):
|
||||
try: check_names.append(max(re.findall(r'[^[]*\[([^]]*)\]', check_name), key = len).strip())
|
||||
except: pass
|
||||
|
||||
for check_name in removeDuplicate(check_names):
|
||||
for check_name in list(set(check_names)):
|
||||
check_movie = fireEvent('scanner.name_year', check_name, single = True)
|
||||
|
||||
try:
|
||||
check_words = removeEmpty(re.split('\W+', check_movie.get('name', '')))
|
||||
movie_words = removeEmpty(re.split('\W+', simplifyString(movie_name)))
|
||||
check_words = filter(None, re.split('\W+', check_movie.get('name', '')))
|
||||
movie_words = filter(None, re.split('\W+', simplifyString(movie_name)))
|
||||
|
||||
if len(check_words) > 0 and len(movie_words) > 0 and len(list(set(check_words) - set(movie_words))) == 0:
|
||||
return True
|
||||
@@ -178,25 +164,6 @@ class Searcher(SearcherBase):
|
||||
|
||||
return False
|
||||
|
||||
def containsWords(self, rel_name, rel_words, conf, media):
|
||||
|
||||
# Make sure it has required words
|
||||
words = splitString(self.conf('%s_words' % conf, section = 'searcher').lower())
|
||||
try: words = removeDuplicate(words + splitString(media['category'][conf].lower()))
|
||||
except: pass
|
||||
|
||||
req_match = 0
|
||||
for req_set in words:
|
||||
if len(req_set) >= 2 and (req_set[:1] + req_set[-1:]) == '//':
|
||||
if re.search(req_set[1:-1], rel_name):
|
||||
log.debug('Regex match: %s', req_set[1:-1])
|
||||
req_match += 1
|
||||
else:
|
||||
req = splitString(req_set, '&')
|
||||
req_match += len(list(set(rel_words) & set(req))) == len(req)
|
||||
|
||||
return words, req_match > 0
|
||||
|
||||
def correctWords(self, rel_name, media):
|
||||
media_title = fireEvent('searcher.get_search_title', media, single = True)
|
||||
media_words = re.split('\W+', simplifyString(media_title))
|
||||
@@ -204,13 +171,31 @@ class Searcher(SearcherBase):
|
||||
rel_name = simplifyString(rel_name)
|
||||
rel_words = re.split('\W+', rel_name)
|
||||
|
||||
required_words, contains_required = self.containsWords(rel_name, rel_words, 'required', media)
|
||||
if len(required_words) > 0 and not contains_required:
|
||||
# Make sure it has required words
|
||||
required_words = splitString(self.conf('required_words', section = 'searcher').lower())
|
||||
try: required_words = list(set(required_words + splitString(media['category']['required'].lower())))
|
||||
except: pass
|
||||
|
||||
req_match = 0
|
||||
for req_set in required_words:
|
||||
req = splitString(req_set, '&')
|
||||
req_match += len(list(set(rel_words) & set(req))) == len(req)
|
||||
|
||||
if len(required_words) > 0 and req_match == 0:
|
||||
log.info2('Wrong: Required word missing: %s', rel_name)
|
||||
return False
|
||||
|
||||
ignored_words, contains_ignored = self.containsWords(rel_name, rel_words, 'ignored', media)
|
||||
if len(ignored_words) > 0 and contains_ignored:
|
||||
# Ignore releases
|
||||
ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
|
||||
try: ignored_words = list(set(ignored_words + splitString(media['category']['ignored'].lower())))
|
||||
except: pass
|
||||
|
||||
ignored_match = 0
|
||||
for ignored_set in ignored_words:
|
||||
ignored = splitString(ignored_set, '&')
|
||||
ignored_match += len(list(set(rel_words) & set(ignored))) == len(ignored)
|
||||
|
||||
if len(ignored_words) > 0 and ignored_match:
|
||||
log.info2("Wrong: '%s' contains 'ignored words'", rel_name)
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from .main import MovieBase
|
||||
|
||||
|
||||
def autoload():
|
||||
def start():
|
||||
return MovieBase()
|
||||
|
||||
config = []
|
||||
|
||||
686
couchpotato/core/media/movie/_base/main.py
Executable file → Normal file
686
couchpotato/core/media/movie/_base/main.py
Executable file → Normal file
@@ -1,16 +1,17 @@
|
||||
import traceback
|
||||
import time
|
||||
|
||||
from couchpotato import get_db
|
||||
from couchpotato import get_session
|
||||
from couchpotato.api import addApiView
|
||||
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
|
||||
from couchpotato.core.helpers.database import RecordNotFound
|
||||
from couchpotato.core.helpers.encoding import toUnicode
|
||||
from couchpotato.core.helpers.variable import splitString, getTitle, getImdb, getIdentifier
|
||||
from couchpotato.core.helpers.variable import getImdb, splitString, tryInt, \
|
||||
mergeDicts
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.media.movie import MovieTypeBase
|
||||
import six
|
||||
|
||||
from couchpotato.core.settings.model import Library, LibraryTitle, Media, \
|
||||
Release
|
||||
from sqlalchemy.orm import joinedload_all
|
||||
from sqlalchemy.sql.expression import or_, asc, not_, desc
|
||||
from string import ascii_lowercase
|
||||
import time
|
||||
|
||||
log = CPLog(__name__)
|
||||
|
||||
@@ -25,16 +26,33 @@ class MovieBase(MovieTypeBase):
|
||||
super(MovieBase, self).__init__()
|
||||
self.initType()
|
||||
|
||||
addApiView('movie.add', self.addView, docs = {
|
||||
'desc': 'Add new movie to the wanted list',
|
||||
addApiView('movie.list', self.listView, docs = {
|
||||
'desc': 'List movies in wanted list',
|
||||
'params': {
|
||||
'status': {'type': 'array or csv', 'desc': 'Filter movie by status. Example:"active,done"'},
|
||||
'release_status': {'type': 'array or csv', 'desc': 'Filter movie by status of its releases. Example:"snatched,available"'},
|
||||
'limit_offset': {'desc': 'Limit and offset the movie list. Examples: "50" or "50,30"'},
|
||||
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all movies starting with the letter "a"'},
|
||||
'search': {'desc': 'Search movie title'},
|
||||
},
|
||||
'return': {'type': 'object', 'example': """{
|
||||
'success': True,
|
||||
'movie': object
|
||||
}"""},
|
||||
'empty': bool, any movies returned or not,
|
||||
'movies': array, movies found,
|
||||
}"""}
|
||||
})
|
||||
addApiView('movie.get', self.getView, docs = {
|
||||
'desc': 'Get a movie by id',
|
||||
'params': {
|
||||
'id': {'desc': 'The id of the movie'},
|
||||
}
|
||||
})
|
||||
addApiView('movie.available_chars', self.charView)
|
||||
addApiView('movie.add', self.addView, docs = {
|
||||
'desc': 'Add new movie to the wanted list',
|
||||
'params': {
|
||||
'identifier': {'desc': 'IMDB id of the movie your want to add.'},
|
||||
'profile_id': {'desc': 'ID of quality profile you want the add the movie in. If empty will use the default profile.'},
|
||||
'category_id': {'desc': 'ID of category you want the add the movie in. If empty will use no category.'},
|
||||
'title': {'desc': 'Movie title to use for searches. Has to be one of the titles returned by movie.search.'},
|
||||
}
|
||||
})
|
||||
@@ -43,27 +61,268 @@ class MovieBase(MovieTypeBase):
|
||||
'params': {
|
||||
'id': {'desc': 'Movie ID(s) you want to edit.', 'type': 'int (comma separated)'},
|
||||
'profile_id': {'desc': 'ID of quality profile you want the edit the movie to.'},
|
||||
'category_id': {'desc': 'ID of category you want the add the movie in. If empty will use no category.'},
|
||||
'default_title': {'desc': 'Movie title to use for searches. Has to be one of the titles returned by movie.search.'},
|
||||
}
|
||||
})
|
||||
addApiView('movie.delete', self.deleteView, docs = {
|
||||
'desc': 'Delete a movie from the wanted list',
|
||||
'params': {
|
||||
'id': {'desc': 'Movie ID(s) you want to delete.', 'type': 'int (comma separated)'},
|
||||
'delete_from': {'desc': 'Delete movie from this page', 'type': 'string: all (default), wanted, manage'},
|
||||
}
|
||||
})
|
||||
|
||||
addEvent('movie.add', self.add)
|
||||
addEvent('movie.update', self.update)
|
||||
addEvent('movie.update_release_dates', self.updateReleaseDate)
|
||||
addEvent('movie.delete', self.delete)
|
||||
addEvent('movie.get', self.get)
|
||||
addEvent('movie.list', self.list)
|
||||
addEvent('movie.restatus', self.restatus)
|
||||
|
||||
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None):
|
||||
def getView(self, id = None, **kwargs):
|
||||
|
||||
movie = self.get(id) if id else None
|
||||
|
||||
return {
|
||||
'success': movie is not None,
|
||||
'movie': movie,
|
||||
}
|
||||
|
||||
def get(self, movie_id):
|
||||
|
||||
db = get_session()
|
||||
|
||||
imdb_id = getImdb(str(movie_id))
|
||||
|
||||
if imdb_id:
|
||||
m = db.query(Media).filter(Media.library.has(identifier = imdb_id)).first()
|
||||
else:
|
||||
m = db.query(Media).filter_by(id = movie_id).first()
|
||||
|
||||
results = None
|
||||
if m:
|
||||
results = m.to_dict(self.default_dict)
|
||||
|
||||
db.expire_all()
|
||||
return results
|
||||
|
||||
def list(self, status = None, release_status = None, limit_offset = None, starts_with = None, search = None, order = None):
|
||||
|
||||
db = get_session()
|
||||
|
||||
# Make a list from string
|
||||
if status and not isinstance(status, (list, tuple)):
|
||||
status = [status]
|
||||
if release_status and not isinstance(release_status, (list, tuple)):
|
||||
release_status = [release_status]
|
||||
|
||||
# query movie ids
|
||||
q = db.query(Media) \
|
||||
.with_entities(Media.id) \
|
||||
.group_by(Media.id)
|
||||
|
||||
# Filter on movie status
|
||||
if status and len(status) > 0:
|
||||
statuses = fireEvent('status.get', status, single = len(status) > 1)
|
||||
statuses = [s.get('id') for s in statuses]
|
||||
|
||||
q = q.filter(Media.status_id.in_(statuses))
|
||||
|
||||
# Filter on release status
|
||||
if release_status and len(release_status) > 0:
|
||||
q = q.join(Media.releases)
|
||||
|
||||
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
|
||||
statuses = [s.get('id') for s in statuses]
|
||||
|
||||
q = q.filter(Release.status_id.in_(statuses))
|
||||
|
||||
# Only join when searching / ordering
|
||||
if starts_with or search or order != 'release_order':
|
||||
q = q.join(Media.library, Library.titles) \
|
||||
.filter(LibraryTitle.default == True)
|
||||
|
||||
# Add search filters
|
||||
filter_or = []
|
||||
if starts_with:
|
||||
starts_with = toUnicode(starts_with.lower())
|
||||
if starts_with in ascii_lowercase:
|
||||
filter_or.append(LibraryTitle.simple_title.startswith(starts_with))
|
||||
else:
|
||||
ignore = []
|
||||
for letter in ascii_lowercase:
|
||||
ignore.append(LibraryTitle.simple_title.startswith(toUnicode(letter)))
|
||||
filter_or.append(not_(or_(*ignore)))
|
||||
|
||||
if search:
|
||||
filter_or.append(LibraryTitle.simple_title.like('%%' + search + '%%'))
|
||||
|
||||
if len(filter_or) > 0:
|
||||
q = q.filter(or_(*filter_or))
|
||||
|
||||
total_count = q.count()
|
||||
if total_count == 0:
|
||||
return 0, []
|
||||
|
||||
if order == 'release_order':
|
||||
q = q.order_by(desc(Release.last_edit))
|
||||
else:
|
||||
q = q.order_by(asc(LibraryTitle.simple_title))
|
||||
|
||||
if limit_offset:
|
||||
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
|
||||
limit = splt[0]
|
||||
offset = 0 if len(splt) is 1 else splt[1]
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
# Get all movie_ids in sorted order
|
||||
movie_ids = [m.id for m in q.all()]
|
||||
|
||||
# List release statuses
|
||||
releases = db.query(Release) \
|
||||
.filter(Release.movie_id.in_(movie_ids)) \
|
||||
.all()
|
||||
|
||||
release_statuses = dict((m, set()) for m in movie_ids)
|
||||
releases_count = dict((m, 0) for m in movie_ids)
|
||||
for release in releases:
|
||||
release_statuses[release.movie_id].add('%d,%d' % (release.status_id, release.quality_id))
|
||||
releases_count[release.movie_id] += 1
|
||||
|
||||
# Get main movie data
|
||||
q2 = db.query(Media) \
|
||||
.options(joinedload_all('library.titles')) \
|
||||
.options(joinedload_all('library.files')) \
|
||||
.options(joinedload_all('status')) \
|
||||
.options(joinedload_all('files'))
|
||||
|
||||
q2 = q2.filter(Media.id.in_(movie_ids))
|
||||
|
||||
results = q2.all()
|
||||
|
||||
# Create dict by movie id
|
||||
movie_dict = {}
|
||||
for movie in results:
|
||||
movie_dict[movie.id] = movie
|
||||
|
||||
# List movies based on movie_ids order
|
||||
movies = []
|
||||
for movie_id in movie_ids:
|
||||
|
||||
releases = []
|
||||
for r in release_statuses.get(movie_id):
|
||||
x = splitString(r)
|
||||
releases.append({'status_id': x[0], 'quality_id': x[1]})
|
||||
|
||||
# Merge releases with movie dict
|
||||
movies.append(mergeDicts(movie_dict[movie_id].to_dict({
|
||||
'library': {'titles': {}, 'files':{}},
|
||||
'files': {},
|
||||
}), {
|
||||
'releases': releases,
|
||||
'releases_count': releases_count.get(movie_id),
|
||||
}))
|
||||
|
||||
db.expire_all()
|
||||
return total_count, movies
|
||||
|
||||
def availableChars(self, status = None, release_status = None):
|
||||
|
||||
status = status or []
|
||||
release_status = release_status or []
|
||||
|
||||
db = get_session()
|
||||
|
||||
# Make a list from string
|
||||
if not isinstance(status, (list, tuple)):
|
||||
status = [status]
|
||||
if release_status and not isinstance(release_status, (list, tuple)):
|
||||
release_status = [release_status]
|
||||
|
||||
q = db.query(Media)
|
||||
|
||||
# Filter on movie status
|
||||
if status and len(status) > 0:
|
||||
statuses = fireEvent('status.get', status, single = len(release_status) > 1)
|
||||
statuses = [s.get('id') for s in statuses]
|
||||
|
||||
q = q.filter(Media.status_id.in_(statuses))
|
||||
|
||||
# Filter on release status
|
||||
if release_status and len(release_status) > 0:
|
||||
|
||||
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
|
||||
statuses = [s.get('id') for s in statuses]
|
||||
|
||||
q = q.join(Media.releases) \
|
||||
.filter(Release.status_id.in_(statuses))
|
||||
|
||||
q = q.join(Library, LibraryTitle) \
|
||||
.with_entities(LibraryTitle.simple_title) \
|
||||
.filter(LibraryTitle.default == True)
|
||||
|
||||
titles = q.all()
|
||||
|
||||
chars = set()
|
||||
for title in titles:
|
||||
try:
|
||||
char = title[0][0]
|
||||
char = char if char in ascii_lowercase else '#'
|
||||
chars.add(str(char))
|
||||
except:
|
||||
log.error('Failed getting title for %s', title.libraries_id)
|
||||
|
||||
if len(chars) == 25:
|
||||
break
|
||||
|
||||
db.expire_all()
|
||||
return ''.join(sorted(chars))
|
||||
|
||||
def listView(self, **kwargs):
|
||||
|
||||
status = splitString(kwargs.get('status'))
|
||||
release_status = splitString(kwargs.get('release_status'))
|
||||
limit_offset = kwargs.get('limit_offset')
|
||||
starts_with = kwargs.get('starts_with')
|
||||
search = kwargs.get('search')
|
||||
order = kwargs.get('order')
|
||||
|
||||
total_movies, movies = self.list(
|
||||
status = status,
|
||||
release_status = release_status,
|
||||
limit_offset = limit_offset,
|
||||
starts_with = starts_with,
|
||||
search = search,
|
||||
order = order
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(movies) == 0,
|
||||
'total': total_movies,
|
||||
'movies': movies,
|
||||
}
|
||||
|
||||
def charView(self, **kwargs):
|
||||
|
||||
status = splitString(kwargs.get('status', None))
|
||||
release_status = splitString(kwargs.get('release_status', None))
|
||||
chars = self.availableChars(status, release_status)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'empty': len(chars) == 0,
|
||||
'chars': chars,
|
||||
}
|
||||
|
||||
def add(self, params = None, force_readd = True, search_after = True, update_library = False, status_id = None):
|
||||
if not params: params = {}
|
||||
|
||||
# Make sure it's a correct zero filled imdb id
|
||||
params['identifier'] = getImdb(params.get('identifier', ''))
|
||||
|
||||
if not params.get('identifier'):
|
||||
msg = 'Can\'t add movie without imdb identifier.'
|
||||
log.error(msg)
|
||||
fireEvent('notify.frontend', type = 'movie.is_tvshow', message = msg)
|
||||
return False
|
||||
elif not params.get('info'):
|
||||
else:
|
||||
try:
|
||||
is_movie = fireEvent('movie.is_movie', identifier = params.get('identifier'), single = True)
|
||||
if not is_movie:
|
||||
@@ -74,137 +333,78 @@ class MovieBase(MovieTypeBase):
|
||||
except:
|
||||
pass
|
||||
|
||||
info = params.get('info')
|
||||
if not info or (info and len(info.get('titles', [])) == 0):
|
||||
info = fireEvent('movie.info', merge = True, extended = False, identifier = params.get('identifier'))
|
||||
|
||||
# Set default title
|
||||
default_title = toUnicode(info.get('title'))
|
||||
titles = info.get('titles', [])
|
||||
counter = 0
|
||||
def_title = None
|
||||
for title in titles:
|
||||
if (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title):
|
||||
def_title = toUnicode(title)
|
||||
break
|
||||
counter += 1
|
||||
library = fireEvent('library.add.movie', single = True, attrs = params, update_after = update_library)
|
||||
|
||||
if not def_title:
|
||||
def_title = toUnicode(titles[0])
|
||||
# Status
|
||||
status_active, snatched_status, ignored_status, done_status, downloaded_status = \
|
||||
fireEvent('status.get', ['active', 'snatched', 'ignored', 'done', 'downloaded'], single = True)
|
||||
|
||||
# Default profile and category
|
||||
default_profile = {}
|
||||
if (not params.get('profile_id') and status != 'done') or params.get('ignore_previous', False):
|
||||
default_profile = fireEvent('profile.default', single = True)
|
||||
default_profile = fireEvent('profile.default', single = True)
|
||||
cat_id = params.get('category_id')
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
db = get_session()
|
||||
m = db.query(Media).filter_by(library_id = library.get('id')).first()
|
||||
added = True
|
||||
do_search = False
|
||||
search_after = search_after and self.conf('search_on_add', section = 'moviesearcher')
|
||||
if not m:
|
||||
m = Media(
|
||||
library_id = library.get('id'),
|
||||
profile_id = params.get('profile_id', default_profile.get('id')),
|
||||
status_id = status_id if status_id else status_active.get('id'),
|
||||
category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else None,
|
||||
)
|
||||
db.add(m)
|
||||
db.commit()
|
||||
|
||||
media = {
|
||||
'_t': 'media',
|
||||
'type': 'movie',
|
||||
'title': def_title,
|
||||
'identifiers': {
|
||||
'imdb': params.get('identifier')
|
||||
},
|
||||
'status': status if status else 'active',
|
||||
'profile_id': params.get('profile_id') or default_profile.get('_id'),
|
||||
'category_id': cat_id if cat_id is not None and len(cat_id) > 0 and cat_id != '-1' else None,
|
||||
}
|
||||
|
||||
# Update movie info
|
||||
try: del info['in_wanted']
|
||||
except: pass
|
||||
try: del info['in_library']
|
||||
except: pass
|
||||
media['info'] = info
|
||||
|
||||
new = False
|
||||
previous_profile = None
|
||||
try:
|
||||
m = db.get('media', 'imdb-%s' % params.get('identifier'), with_doc = True)['doc']
|
||||
|
||||
try:
|
||||
db.get('id', m.get('profile_id'))
|
||||
previous_profile = m.get('profile_id')
|
||||
except RecordNotFound:
|
||||
pass
|
||||
except:
|
||||
log.error('Failed getting previous profile: %s', traceback.format_exc())
|
||||
except:
|
||||
new = True
|
||||
m = db.insert(media)
|
||||
|
||||
# Update dict to be usable
|
||||
m.update(media)
|
||||
|
||||
added = True
|
||||
do_search = False
|
||||
search_after = search_after and self.conf('search_on_add', section = 'moviesearcher')
|
||||
onComplete = None
|
||||
if search_after:
|
||||
onComplete = self.createOnComplete(m.id)
|
||||
|
||||
if new:
|
||||
if search_after:
|
||||
onComplete = self.createOnComplete(m['_id'])
|
||||
search_after = False
|
||||
elif force_readd:
|
||||
fireEventAsync('library.update.movie', params.get('identifier'), default_title = params.get('title', ''), on_complete = onComplete)
|
||||
search_after = False
|
||||
elif force_readd:
|
||||
|
||||
# Clean snatched history
|
||||
for release in fireEvent('release.for_media', m['_id'], single = True):
|
||||
if release.get('status') in ['downloaded', 'snatched', 'seeding', 'done']:
|
||||
if params.get('ignore_previous', False):
|
||||
fireEvent('release.update_status', release['_id'], status = 'ignored')
|
||||
else:
|
||||
fireEvent('release.delete', release['_id'], single = True)
|
||||
|
||||
m['profile_id'] = (params.get('profile_id') or default_profile.get('_id')) if not previous_profile else previous_profile
|
||||
m['category_id'] = cat_id if cat_id is not None and len(cat_id) > 0 else (m.get('category_id') or None)
|
||||
m['last_edit'] = int(time.time())
|
||||
m['tags'] = []
|
||||
|
||||
do_search = True
|
||||
db.update(m)
|
||||
else:
|
||||
try: del params['info']
|
||||
except: pass
|
||||
log.debug('Movie already exists, not updating: %s', params)
|
||||
added = False
|
||||
|
||||
# Trigger update info
|
||||
if added and update_after:
|
||||
# Do full update to get images etc
|
||||
fireEventAsync('movie.update', m['_id'], default_title = params.get('title'), on_complete = onComplete)
|
||||
|
||||
# Remove releases
|
||||
for rel in fireEvent('release.for_media', m['_id'], single = True):
|
||||
if rel['status'] is 'available':
|
||||
db.delete(rel)
|
||||
|
||||
movie_dict = fireEvent('media.get', m['_id'], single = True)
|
||||
if not movie_dict:
|
||||
log.debug('Failed adding media, can\'t find it anymore')
|
||||
return False
|
||||
|
||||
if do_search and search_after:
|
||||
onComplete = self.createOnComplete(m['_id'])
|
||||
onComplete()
|
||||
|
||||
if added and notify_after:
|
||||
|
||||
if params.get('title'):
|
||||
message = 'Successfully added "%s" to your wanted list.' % params.get('title', '')
|
||||
else:
|
||||
title = getTitle(m)
|
||||
if title:
|
||||
message = 'Successfully added "%s" to your wanted list.' % title
|
||||
# Clean snatched history
|
||||
for release in m.releases:
|
||||
if release.status_id in [downloaded_status.get('id'), snatched_status.get('id'), done_status.get('id')]:
|
||||
if params.get('ignore_previous', False):
|
||||
release.status_id = ignored_status.get('id')
|
||||
else:
|
||||
message = 'Successfully added to your wanted list.'
|
||||
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = message)
|
||||
fireEvent('release.delete', release.id, single = True)
|
||||
|
||||
return movie_dict
|
||||
except:
|
||||
log.error('Failed adding media: %s', traceback.format_exc())
|
||||
m.profile_id = params.get('profile_id', default_profile.get('id'))
|
||||
m.category_id = tryInt(cat_id) if cat_id is not None and tryInt(cat_id) > 0 else (m.category_id or None)
|
||||
else:
|
||||
log.debug('Movie already exists, not updating: %s', params)
|
||||
added = False
|
||||
|
||||
if force_readd:
|
||||
m.status_id = status_id if status_id else status_active.get('id')
|
||||
m.last_edit = int(time.time())
|
||||
do_search = True
|
||||
|
||||
db.commit()
|
||||
|
||||
# Remove releases
|
||||
available_status = fireEvent('status.get', 'available', single = True)
|
||||
for rel in m.releases:
|
||||
if rel.status_id is available_status.get('id'):
|
||||
db.delete(rel)
|
||||
db.commit()
|
||||
|
||||
movie_dict = m.to_dict(self.default_dict)
|
||||
|
||||
if do_search and search_after:
|
||||
onComplete = self.createOnComplete(m.id)
|
||||
onComplete()
|
||||
|
||||
if added:
|
||||
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = 'Successfully added "%s" to your wanted list.' % params.get('title', ''))
|
||||
|
||||
db.expire_all()
|
||||
return movie_dict
|
||||
|
||||
def addView(self, **kwargs):
|
||||
add_dict = self.add(params = kwargs)
|
||||
@@ -216,148 +416,128 @@ class MovieBase(MovieTypeBase):
|
||||
|
||||
def edit(self, id = '', **kwargs):
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
db = get_session()
|
||||
|
||||
ids = splitString(id)
|
||||
for media_id in ids:
|
||||
available_status = fireEvent('status.get', 'available', single = True)
|
||||
|
||||
try:
|
||||
m = db.get('id', media_id)
|
||||
m['profile_id'] = kwargs.get('profile_id')
|
||||
ids = splitString(id)
|
||||
for movie_id in ids:
|
||||
|
||||
cat_id = kwargs.get('category_id')
|
||||
if cat_id is not None:
|
||||
m['category_id'] = cat_id if len(cat_id) > 0 else None
|
||||
m = db.query(Media).filter_by(id = movie_id).first()
|
||||
if not m:
|
||||
continue
|
||||
|
||||
# Remove releases
|
||||
for rel in fireEvent('release.for_media', m['_id'], single = True):
|
||||
if rel['status'] is 'available':
|
||||
db.delete(rel)
|
||||
m.profile_id = kwargs.get('profile_id')
|
||||
|
||||
# Default title
|
||||
if kwargs.get('default_title'):
|
||||
m['title'] = kwargs.get('default_title')
|
||||
cat_id = kwargs.get('category_id')
|
||||
if cat_id is not None:
|
||||
m.category_id = tryInt(cat_id) if tryInt(cat_id) > 0 else None
|
||||
|
||||
db.update(m)
|
||||
# Remove releases
|
||||
for rel in m.releases:
|
||||
if rel.status_id is available_status.get('id'):
|
||||
db.delete(rel)
|
||||
db.commit()
|
||||
|
||||
fireEvent('media.restatus', m['_id'], single = True)
|
||||
# Default title
|
||||
if kwargs.get('default_title'):
|
||||
for title in m.library.titles:
|
||||
title.default = toUnicode(kwargs.get('default_title', '')).lower() == toUnicode(title.title).lower()
|
||||
|
||||
m = db.get('id', media_id)
|
||||
db.commit()
|
||||
|
||||
movie_dict = fireEvent('media.get', m['_id'], single = True)
|
||||
fireEventAsync('movie.searcher.single', movie_dict, on_complete = self.createNotifyFront(media_id))
|
||||
fireEvent('movie.restatus', m.id)
|
||||
|
||||
except:
|
||||
log.error('Can\'t edit non-existing media')
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
}
|
||||
except:
|
||||
log.error('Failed editing media: %s', traceback.format_exc())
|
||||
movie_dict = m.to_dict(self.default_dict)
|
||||
fireEventAsync('movie.searcher.single', movie_dict, on_complete = self.createNotifyFront(movie_id))
|
||||
|
||||
db.expire_all()
|
||||
return {
|
||||
'success': False,
|
||||
'success': True,
|
||||
}
|
||||
|
||||
def update(self, media_id = None, identifier = None, default_title = None, extended = False):
|
||||
"""
|
||||
Update movie information inside media['doc']['info']
|
||||
def deleteView(self, id = '', **kwargs):
|
||||
|
||||
@param media_id: document id
|
||||
@param default_title: default title, if empty, use first one or existing one
|
||||
@param extended: update with extended info (parses more info, actors, images from some info providers)
|
||||
@return: dict, with media
|
||||
"""
|
||||
ids = splitString(id)
|
||||
for movie_id in ids:
|
||||
self.delete(movie_id, delete_from = kwargs.get('delete_from', 'all'))
|
||||
|
||||
if self.shuttingDown():
|
||||
return
|
||||
return {
|
||||
'success': True,
|
||||
}
|
||||
|
||||
lock_key = 'media.get.%s' % media_id if media_id else identifier
|
||||
self.acquireLock(lock_key)
|
||||
def delete(self, movie_id, delete_from = None):
|
||||
|
||||
media = {}
|
||||
try:
|
||||
db = get_db()
|
||||
db = get_session()
|
||||
|
||||
if media_id:
|
||||
media = db.get('id', media_id)
|
||||
movie = db.query(Media).filter_by(id = movie_id).first()
|
||||
if movie:
|
||||
deleted = False
|
||||
if delete_from == 'all':
|
||||
db.delete(movie)
|
||||
db.commit()
|
||||
deleted = True
|
||||
else:
|
||||
media = db.get('media', 'imdb-%s' % identifier, with_doc = True)['doc']
|
||||
done_status = fireEvent('status.get', 'done', single = True)
|
||||
|
||||
info = fireEvent('movie.info', merge = True, extended = extended, identifier = getIdentifier(media))
|
||||
total_releases = len(movie.releases)
|
||||
total_deleted = 0
|
||||
new_movie_status = None
|
||||
for release in movie.releases:
|
||||
if delete_from in ['wanted', 'snatched', 'late']:
|
||||
if release.status_id != done_status.get('id'):
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
new_movie_status = 'done'
|
||||
elif delete_from == 'manage':
|
||||
if release.status_id == done_status.get('id'):
|
||||
db.delete(release)
|
||||
total_deleted += 1
|
||||
new_movie_status = 'active'
|
||||
db.commit()
|
||||
|
||||
# Don't need those here
|
||||
try: del info['in_wanted']
|
||||
except: pass
|
||||
try: del info['in_library']
|
||||
except: pass
|
||||
if total_releases == total_deleted:
|
||||
db.delete(movie)
|
||||
db.commit()
|
||||
deleted = True
|
||||
elif new_movie_status:
|
||||
new_status = fireEvent('status.get', new_movie_status, single = True)
|
||||
movie.profile_id = None
|
||||
movie.status_id = new_status.get('id')
|
||||
db.commit()
|
||||
else:
|
||||
fireEvent('movie.restatus', movie.id, single = True)
|
||||
|
||||
if not info or len(info) == 0:
|
||||
log.error('Could not update, no movie info to work with: %s', identifier)
|
||||
return False
|
||||
if deleted:
|
||||
fireEvent('notify.frontend', type = 'movie.deleted', data = movie.to_dict())
|
||||
|
||||
# Update basic info
|
||||
media['info'] = info
|
||||
db.expire_all()
|
||||
return True
|
||||
|
||||
titles = info.get('titles', [])
|
||||
log.debug('Adding titles: %s', titles)
|
||||
def restatus(self, movie_id):
|
||||
|
||||
# Define default title
|
||||
if default_title:
|
||||
def_title = None
|
||||
if default_title:
|
||||
counter = 0
|
||||
for title in titles:
|
||||
if title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title):
|
||||
def_title = toUnicode(title)
|
||||
break
|
||||
counter += 1
|
||||
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
|
||||
|
||||
if not def_title:
|
||||
def_title = toUnicode(titles[0])
|
||||
db = get_session()
|
||||
|
||||
media['title'] = def_title
|
||||
m = db.query(Media).filter_by(id = movie_id).first()
|
||||
if not m or len(m.library.titles) == 0:
|
||||
log.debug('Can\'t restatus movie, doesn\'t seem to exist.')
|
||||
return False
|
||||
|
||||
# Files
|
||||
image_urls = info.get('images', [])
|
||||
log.debug('Changing status for %s', m.library.titles[0].title)
|
||||
if not m.profile:
|
||||
m.status_id = done_status.get('id')
|
||||
else:
|
||||
move_to_wanted = True
|
||||
|
||||
self.getPoster(media, image_urls)
|
||||
for t in m.profile.types:
|
||||
for release in m.releases:
|
||||
if t.quality.identifier is release.quality.identifier and (release.status_id is done_status.get('id') and t.finish):
|
||||
move_to_wanted = False
|
||||
|
||||
db.update(media)
|
||||
except:
|
||||
log.error('Failed update media: %s', traceback.format_exc())
|
||||
m.status_id = active_status.get('id') if move_to_wanted else done_status.get('id')
|
||||
|
||||
self.releaseLock(lock_key)
|
||||
return media
|
||||
db.commit()
|
||||
|
||||
def updateReleaseDate(self, media_id):
|
||||
"""
|
||||
Update release_date (eta) info only
|
||||
|
||||
@param media_id: document id
|
||||
@return: dict, with dates dvd, theater, bluray, expires
|
||||
"""
|
||||
|
||||
try:
|
||||
db = get_db()
|
||||
|
||||
media = db.get('id', media_id)
|
||||
|
||||
if not media.get('info'):
|
||||
media = self.update(media_id)
|
||||
dates = media.get('info', {}).get('release_date')
|
||||
else:
|
||||
dates = media.get('info').get('release_date')
|
||||
|
||||
if dates and (dates.get('expires', 0) < time.time() or dates.get('expires', 0) > time.time() + (604800 * 4)) or not dates:
|
||||
dates = fireEvent('movie.info.release_date', identifier = getIdentifier(media), merge = True)
|
||||
media['info'].update({'release_date': dates})
|
||||
db.update(media)
|
||||
|
||||
return dates
|
||||
except:
|
||||
log.error('Failed updating release dates: %s', traceback.format_exc())
|
||||
|
||||
return {}
|
||||
return True
|
||||
|
||||
@@ -26,7 +26,7 @@ var MovieList = new Class({
|
||||
self.filter = self.options.filter || {
|
||||
'starts_with': null,
|
||||
'search': null
|
||||
};
|
||||
}
|
||||
|
||||
self.el = new Element('div.movies').adopt(
|
||||
self.title = self.options.title ? new Element('h2', {
|
||||
@@ -52,20 +52,19 @@ var MovieList = new Class({
|
||||
|
||||
self.getMovies();
|
||||
|
||||
App.on('movie.added', self.movieAdded.bind(self));
|
||||
App.on('movie.deleted', self.movieDeleted.bind(self))
|
||||
App.addEvent('movie.added', self.movieAdded.bind(self))
|
||||
App.addEvent('movie.deleted', self.movieDeleted.bind(self))
|
||||
},
|
||||
|
||||
movieDeleted: function(notification){
|
||||
var self = this;
|
||||
|
||||
if(self.movies_added[notification.data._id]){
|
||||
if(self.movies_added[notification.data.id]){
|
||||
self.movies.each(function(movie){
|
||||
if(movie.get('_id') == notification.data._id){
|
||||
if(movie.get('id') == notification.data.id){
|
||||
movie.destroy();
|
||||
delete self.movies_added[notification.data._id];
|
||||
delete self.movies_added[notification.data.id];
|
||||
self.setCounter(self.counter_count-1);
|
||||
self.total_movies--;
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -76,8 +75,7 @@ var MovieList = new Class({
|
||||
movieAdded: function(notification){
|
||||
var self = this;
|
||||
|
||||
self.fireEvent('movieAdded', notification);
|
||||
if(self.options.add_new && !self.movies_added[notification.data._id] && notification.data.status == self.options.status){
|
||||
if(self.options.add_new && !self.movies_added[notification.data.id] && notification.data.status.identifier == self.options.status){
|
||||
window.scroll(0,0);
|
||||
self.createMovie(notification.data, 'top');
|
||||
self.setCounter(self.counter_count+1);
|
||||
@@ -96,7 +94,7 @@ var MovieList = new Class({
|
||||
if(self.options.load_more)
|
||||
self.scrollspy = new ScrollSpy({
|
||||
min: function(){
|
||||
var c = self.load_more.getCoordinates();
|
||||
var c = self.load_more.getCoordinates()
|
||||
return c.top - window.document.getSize().y - 300
|
||||
},
|
||||
onEnter: self.loadMore.bind(self)
|
||||
@@ -179,15 +177,15 @@ var MovieList = new Class({
|
||||
|
||||
m.fireEvent('injected');
|
||||
|
||||
self.movies.include(m);
|
||||
self.movies_added[movie._id] = true;
|
||||
self.movies.include(m)
|
||||
self.movies_added[movie.id] = true;
|
||||
},
|
||||
|
||||
createNavigation: function(){
|
||||
var self = this;
|
||||
var chars = '#ABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||
|
||||
self.el.addClass('with_navigation');
|
||||
self.el.addClass('with_navigation')
|
||||
|
||||
self.navigation = new Element('div.alph_nav').adopt(
|
||||
self.mass_edit_form = new Element('div.mass_edit_form').adopt(
|
||||
@@ -242,7 +240,7 @@ var MovieList = new Class({
|
||||
this.addClass(a);
|
||||
|
||||
el.inject(el.getParent(), 'top');
|
||||
el.getSiblings().hide();
|
||||
el.getSiblings().hide()
|
||||
setTimeout(function(){
|
||||
el.getSiblings().setStyle('display', null);
|
||||
}, 100)
|
||||
@@ -259,8 +257,8 @@ var MovieList = new Class({
|
||||
self.mass_edit_select_class = new Form.Check(self.mass_edit_select);
|
||||
Quality.getActiveProfiles().each(function(profile){
|
||||
new Element('option', {
|
||||
'value': profile.get('_id'),
|
||||
'text': profile.get('label')
|
||||
'value': profile.id ? profile.id : profile.data.id,
|
||||
'text': profile.label ? profile.label : profile.data.label
|
||||
}).inject(self.mass_edit_quality)
|
||||
});
|
||||
|
||||
@@ -281,14 +279,14 @@ var MovieList = new Class({
|
||||
|
||||
// Get available chars and highlight
|
||||
if(!available_chars && (self.navigation.isDisplayed() || self.navigation.isVisible()))
|
||||
Api.request('media.available_chars', {
|
||||
Api.request('movie.available_chars', {
|
||||
'data': Object.merge({
|
||||
'status': self.options.status
|
||||
}, self.filter),
|
||||
'onSuccess': function(json){
|
||||
available_chars = json.chars;
|
||||
available_chars = json.chars
|
||||
|
||||
available_chars.each(function(c){
|
||||
json.chars.split('').each(function(c){
|
||||
self.letters[c.capitalize()].addClass('available')
|
||||
})
|
||||
|
||||
@@ -300,7 +298,7 @@ var MovieList = new Class({
|
||||
self.navigation_alpha = new Element('ul.numbers', {
|
||||
'events': {
|
||||
'click:relay(li.available)': function(e, el){
|
||||
self.activateLetter(el.get('data-letter'));
|
||||
self.activateLetter(el.get('data-letter'))
|
||||
self.getMovies(true)
|
||||
}
|
||||
}
|
||||
@@ -318,7 +316,7 @@ var MovieList = new Class({
|
||||
|
||||
// All
|
||||
self.letters['all'] = new Element('li.letter_all.available.active', {
|
||||
'text': 'ALL'
|
||||
'text': 'ALL',
|
||||
}).inject(self.navigation_alpha);
|
||||
|
||||
// Chars
|
||||
@@ -334,7 +332,7 @@ var MovieList = new Class({
|
||||
if (self.options.menu.length > 0)
|
||||
self.options.menu.each(function(menu_item){
|
||||
self.navigation_menu.addLink(menu_item);
|
||||
});
|
||||
})
|
||||
else
|
||||
self.navigation_menu.hide();
|
||||
|
||||
@@ -347,15 +345,15 @@ var MovieList = new Class({
|
||||
movies = self.movies.length;
|
||||
self.movies.each(function(movie){
|
||||
selected += movie.isSelected() ? 1 : 0
|
||||
});
|
||||
})
|
||||
|
||||
var indeterminate = selected > 0 && selected < movies,
|
||||
checked = selected == movies && selected > 0;
|
||||
|
||||
self.mass_edit_select.set('indeterminate', indeterminate);
|
||||
self.mass_edit_select.set('indeterminate', indeterminate)
|
||||
|
||||
self.mass_edit_select_class[checked ? 'check' : 'uncheck']();
|
||||
self.mass_edit_select_class.element[indeterminate ? 'addClass' : 'removeClass']('indeterminate');
|
||||
self.mass_edit_select_class[checked ? 'check' : 'uncheck']()
|
||||
self.mass_edit_select_class.element[indeterminate ? 'addClass' : 'removeClass']('indeterminate')
|
||||
|
||||
self.mass_edit_selected.set('text', selected);
|
||||
},
|
||||
@@ -371,9 +369,8 @@ var MovieList = new Class({
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
this.set('text', 'Deleting..');
|
||||
Api.request('media.delete', {
|
||||
'method': 'post',
|
||||
this.set('text', 'Deleting..')
|
||||
Api.request('movie.delete', {
|
||||
'data': {
|
||||
'id': ids.join(','),
|
||||
'delete_from': self.options.identifier
|
||||
@@ -384,7 +381,7 @@ var MovieList = new Class({
|
||||
var erase_movies = [];
|
||||
self.movies.each(function(movie){
|
||||
if (movie.isSelected()){
|
||||
$(movie).destroy();
|
||||
$(movie).destroy()
|
||||
erase_movies.include(movie);
|
||||
}
|
||||
});
|
||||
@@ -393,7 +390,6 @@ var MovieList = new Class({
|
||||
self.movies.erase(movie);
|
||||
movie.destroy();
|
||||
self.setCounter(self.counter_count-1);
|
||||
self.total_movies--;
|
||||
});
|
||||
|
||||
self.calculateSelected();
|
||||
@@ -411,10 +407,9 @@ var MovieList = new Class({
|
||||
|
||||
changeQualitySelected: function(){
|
||||
var self = this;
|
||||
var ids = self.getSelectedMovies();
|
||||
var ids = self.getSelectedMovies()
|
||||
|
||||
Api.request('movie.edit', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(','),
|
||||
'profile_id': self.mass_edit_quality.get('value')
|
||||
@@ -425,12 +420,11 @@ var MovieList = new Class({
|
||||
|
||||
refreshSelected: function(){
|
||||
var self = this;
|
||||
var ids = self.getSelectedMovies();
|
||||
var ids = self.getSelectedMovies()
|
||||
|
||||
Api.request('media.refresh', {
|
||||
'method': 'post',
|
||||
'data': {
|
||||
'id': ids.join(',')
|
||||
'id': ids.join(','),
|
||||
}
|
||||
});
|
||||
},
|
||||
@@ -438,10 +432,10 @@ var MovieList = new Class({
|
||||
getSelectedMovies: function(){
|
||||
var self = this;
|
||||
|
||||
var ids = [];
|
||||
var ids = []
|
||||
self.movies.each(function(movie){
|
||||
if (movie.isSelected())
|
||||
ids.include(movie.get('_id'))
|
||||
ids.include(movie.get('id'))
|
||||
});
|
||||
|
||||
return ids
|
||||
@@ -462,15 +456,15 @@ var MovieList = new Class({
|
||||
reset: function(){
|
||||
var self = this;
|
||||
|
||||
self.movies = [];
|
||||
self.movies = []
|
||||
if(self.mass_edit_select)
|
||||
self.calculateSelected();
|
||||
self.calculateSelected()
|
||||
if(self.navigation_alpha)
|
||||
self.navigation_alpha.getElements('.active').removeClass('active');
|
||||
self.navigation_alpha.getElements('.active').removeClass('active')
|
||||
|
||||
self.offset = 0;
|
||||
if(self.scrollspy){
|
||||
//self.load_more.show();
|
||||
self.load_more.show();
|
||||
self.scrollspy.start();
|
||||
}
|
||||
},
|
||||
@@ -478,7 +472,7 @@ var MovieList = new Class({
|
||||
activateLetter: function(letter){
|
||||
var self = this;
|
||||
|
||||
self.reset();
|
||||
self.reset()
|
||||
|
||||
self.letters[letter || 'all'].addClass('active');
|
||||
self.filter.starts_with = letter;
|
||||
@@ -490,7 +484,7 @@ var MovieList = new Class({
|
||||
|
||||
self.el
|
||||
.removeClass(self.current_view+'_list')
|
||||
.addClass(new_view+'_list');
|
||||
.addClass(new_view+'_list')
|
||||
|
||||
self.current_view = new_view;
|
||||
Cookie.write(self.options.identifier+'_view2', new_view, {duration: 1000});
|
||||
@@ -507,9 +501,9 @@ var MovieList = new Class({
|
||||
if(self.search_timer) clearTimeout(self.search_timer);
|
||||
self.search_timer = (function(){
|
||||
var search_value = self.navigation_search_input.get('value');
|
||||
if (search_value == self.last_search_value) return;
|
||||
if (search_value == self.last_search_value) return
|
||||
|
||||
self.reset();
|
||||
self.reset()
|
||||
|
||||
self.activateLetter();
|
||||
self.filter.search = search_value;
|
||||
@@ -553,9 +547,8 @@ var MovieList = new Class({
|
||||
|
||||
}
|
||||
|
||||
Api.request(self.options.api_call || 'media.list', {
|
||||
Api.request(self.options.api_call || 'movie.list', {
|
||||
'data': Object.merge({
|
||||
'type': self.options.type || 'movie',
|
||||
'status': self.options.status,
|
||||
'limit_offset': self.options.limit ? self.options.limit + ',' + self.offset : null
|
||||
}, self.filter),
|
||||
@@ -566,7 +559,7 @@ var MovieList = new Class({
|
||||
|
||||
if(self.loader_first){
|
||||
var lf = self.loader_first;
|
||||
self.loader_first.addClass('hide');
|
||||
self.loader_first.addClass('hide')
|
||||
self.loader_first = null;
|
||||
setTimeout(function(){
|
||||
lf.destroy();
|
||||
@@ -606,10 +599,10 @@ var MovieList = new Class({
|
||||
var is_empty = self.movies.length == 0 && (self.total_movies == 0 || self.total_movies === undefined);
|
||||
|
||||
if(self.title)
|
||||
self.title[is_empty ? 'hide' : 'show']();
|
||||
self.title[is_empty ? 'hide' : 'show']()
|
||||
|
||||
if(self.description)
|
||||
self.description.setStyle('display', [is_empty ? 'none' : '']);
|
||||
self.description.setStyle('display', [is_empty ? 'none' : ''])
|
||||
|
||||
if(is_empty && self.options.on_empty_element){
|
||||
self.options.on_empty_element.inject(self.loader_first || self.title || self.movie_list, 'after');
|
||||
@@ -632,4 +625,4 @@ var MovieList = new Class({
|
||||
return this.el;
|
||||
}
|
||||
|
||||
});
|
||||
});
|
||||
@@ -60,6 +60,22 @@ var MovieAction = new Class({
|
||||
'z-index': '1'
|
||||
}
|
||||
}).inject(self.movie, 'top').fade('hide');
|
||||
//self.positionMask();
|
||||
},
|
||||
|
||||
positionMask: function(){
|
||||
var self = this,
|
||||
movie = $(self.movie),
|
||||
s = movie.getSize()
|
||||
|
||||
return;
|
||||
|
||||
return self.mask.setStyles({
|
||||
'width': s.x,
|
||||
'height': s.y
|
||||
}).position({
|
||||
'relativeTo': movie
|
||||
})
|
||||
},
|
||||
|
||||
toElement: function(){
|
||||
@@ -78,7 +94,7 @@ MA.IMDB = new Class({
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
self.id = self.movie.getIdentifier ? self.movie.getIdentifier() : self.get('imdb');
|
||||
self.id = self.movie.get('imdb') || self.movie.get('identifier');
|
||||
|
||||
self.el = new Element('a.imdb', {
|
||||
'title': 'Go to the IMDB page of ' + self.getTitle(),
|
||||
@@ -105,25 +121,16 @@ MA.Release = new Class({
|
||||
}
|
||||
});
|
||||
|
||||
if(!self.movie.data.releases || self.movie.data.releases.length == 0)
|
||||
self.el.hide();
|
||||
if(self.movie.data.releases.length == 0)
|
||||
self.el.hide()
|
||||
else
|
||||
self.showHelper();
|
||||
|
||||
App.on('movie.searcher.ended', function(notification){
|
||||
if(self.movie.data._id != notification.data._id) return;
|
||||
|
||||
App.addEvent('movie.searcher.ended.'+self.movie.data.id, function(notification){
|
||||
self.releases = null;
|
||||
if(self.options_container){
|
||||
// Releases are currently displayed
|
||||
if(self.options_container.isDisplayed()){
|
||||
self.options_container.destroy();
|
||||
self.createReleases();
|
||||
}
|
||||
else {
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
self.options_container.destroy();
|
||||
self.options_container = null;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -134,14 +141,37 @@ MA.Release = new Class({
|
||||
if(e)
|
||||
(e).preventDefault();
|
||||
|
||||
self.createReleases();
|
||||
if(self.releases)
|
||||
self.createReleases();
|
||||
else {
|
||||
|
||||
self.movie.busy(true);
|
||||
|
||||
Api.request('release.for_movie', {
|
||||
'data': {
|
||||
'id': self.movie.data.id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.movie.busy(false, 1);
|
||||
|
||||
if(json && json.releases){
|
||||
self.releases = json.releases;
|
||||
self.createReleases();
|
||||
}
|
||||
else
|
||||
alert('Something went wrong, check the logs.');
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
createReleases: function(refresh){
|
||||
createReleases: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.options_container || refresh){
|
||||
if(!self.options_container){
|
||||
self.options_container = new Element('div.options').grab(
|
||||
self.release_container = new Element('div.releases.table')
|
||||
);
|
||||
@@ -155,111 +185,113 @@ MA.Release = new Class({
|
||||
new Element('span.age', {'text': 'Age'}),
|
||||
new Element('span.score', {'text': 'Score'}),
|
||||
new Element('span.provider', {'text': 'Provider'})
|
||||
).inject(self.release_container);
|
||||
).inject(self.release_container)
|
||||
|
||||
if(self.movie.data.releases)
|
||||
self.movie.data.releases.each(function(release){
|
||||
self.releases.each(function(release){
|
||||
|
||||
var quality = Quality.getQuality(release.quality) || {},
|
||||
info = release.info || {},
|
||||
provider = self.get(release, 'provider') + (info['provider_extra'] ? self.get(release, 'provider_extra') : '');
|
||||
var status = Status.get(release.status_id),
|
||||
quality = Quality.getProfile(release.quality_id) || {},
|
||||
info = release.info,
|
||||
provider = self.get(release, 'provider') + (release.info['provider_extra'] ? self.get(release, 'provider_extra') : '');
|
||||
release.status = status;
|
||||
|
||||
var release_name = self.get(release, 'name');
|
||||
if(release.files && release.files.length > 0){
|
||||
try {
|
||||
var movie_file = release.files.filter(function(file){
|
||||
var type = File.Type.get(file.type_id);
|
||||
return type && type.identifier == 'movie'
|
||||
}).pick();
|
||||
release_name = movie_file.path.split(Api.getOption('path_sep')).getLast();
|
||||
}
|
||||
catch(e){}
|
||||
var release_name = self.get(release, 'name');
|
||||
if(release.files && release.files.length > 0){
|
||||
try {
|
||||
var movie_file = release.files.filter(function(file){
|
||||
var type = File.Type.get(file.type_id);
|
||||
return type && type.identifier == 'movie'
|
||||
}).pick();
|
||||
release_name = movie_file.path.split(Api.getOption('path_sep')).getLast();
|
||||
}
|
||||
catch(e){}
|
||||
}
|
||||
|
||||
// Create release
|
||||
release['el'] = new Element('div', {
|
||||
'class': 'item '+release.status,
|
||||
'id': 'release_'+release._id
|
||||
}).adopt(
|
||||
new Element('span.name', {'text': release_name, 'title': release_name}),
|
||||
new Element('span.status', {'text': release.status, 'class': 'release_status '+release.status}),
|
||||
new Element('span.quality', {'text': quality.label + (release.is_3d ? ' 3D' : '') || 'n/a'}),
|
||||
new Element('span.size', {'text': info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
|
||||
new Element('span.age', {'text': self.get(release, 'age')}),
|
||||
new Element('span.score', {'text': self.get(release, 'score')}),
|
||||
new Element('span.provider', { 'text': provider, 'title': provider }),
|
||||
info['detail_url'] ? new Element('a.info.icon2', {
|
||||
'href': info['detail_url'],
|
||||
'target': '_blank'
|
||||
}) : new Element('a'),
|
||||
new Element('a.download.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
if(!this.hasClass('completed'))
|
||||
self.download(release);
|
||||
}
|
||||
// Create release
|
||||
var item = new Element('div', {
|
||||
'class': 'item '+status.identifier,
|
||||
'id': 'release_'+release.id
|
||||
}).adopt(
|
||||
new Element('span.name', {'text': release_name, 'title': release_name}),
|
||||
new Element('span.status', {'text': status.identifier, 'class': 'release_status '+status.identifier}),
|
||||
new Element('span.quality', {'text': quality.get('label') || 'n/a'}),
|
||||
new Element('span.size', {'text': release.info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
|
||||
new Element('span.age', {'text': self.get(release, 'age')}),
|
||||
new Element('span.score', {'text': self.get(release, 'score')}),
|
||||
new Element('span.provider', { 'text': provider, 'title': provider }),
|
||||
release.info['detail_url'] ? new Element('a.info.icon2', {
|
||||
'href': release.info['detail_url'],
|
||||
'target': '_blank'
|
||||
}) : new Element('a'),
|
||||
new Element('a.download.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
if(!this.hasClass('completed'))
|
||||
self.download(release);
|
||||
}
|
||||
}),
|
||||
new Element('a.delete.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
self.ignore(release);
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(self.release_container);
|
||||
|
||||
if(release.status == 'ignored' || release.status == 'failed' || release.status == 'snatched'){
|
||||
if(!self.last_release || (self.last_release && self.last_release.status != 'snatched' && release.status == 'snatched'))
|
||||
self.last_release = release;
|
||||
}
|
||||
else if(!self.next_release && release.status == 'available'){
|
||||
self.next_release = release;
|
||||
}
|
||||
|
||||
var update_handle = function(notification) {
|
||||
if(notification.data._id != release._id) return;
|
||||
|
||||
var q = self.movie.quality.getElement('.q_' + release.quality),
|
||||
new_status = notification.data.status;
|
||||
|
||||
release.el.set('class', 'item ' + new_status);
|
||||
|
||||
var status_el = release.el.getElement('.release_status');
|
||||
status_el.set('class', 'release_status ' + new_status);
|
||||
status_el.set('text', new_status);
|
||||
|
||||
if(!q && (new_status == 'snatched' || new_status == 'seeding' || new_status == 'done'))
|
||||
q = self.addQuality(release.quality_id);
|
||||
|
||||
if(q && !q.hasClass(new_status)) {
|
||||
q.removeClass(release.status).addClass(new_status);
|
||||
q.set('title', q.get('title').replace(release.status, new_status));
|
||||
}
|
||||
};
|
||||
}),
|
||||
new Element('a.delete.icon2', {
|
||||
'events': {
|
||||
'click': function(e){
|
||||
(e).preventDefault();
|
||||
self.ignore(release);
|
||||
}
|
||||
}
|
||||
})
|
||||
).inject(self.release_container);
|
||||
release['el'] = item;
|
||||
|
||||
App.on('release.update_status', update_handle);
|
||||
if(status.identifier == 'ignored' || status.identifier == 'failed' || status.identifier == 'snatched'){
|
||||
if(!self.last_release || (self.last_release && self.last_release.status.identifier != 'snatched' && status.identifier == 'snatched'))
|
||||
self.last_release = release;
|
||||
}
|
||||
else if(!self.next_release && status.identifier == 'available'){
|
||||
self.next_release = release;
|
||||
}
|
||||
|
||||
var update_handle = function(notification) {
|
||||
var q = self.movie.quality.getElement('.q_id' + release.quality_id),
|
||||
status = Status.get(release.status_id),
|
||||
new_status = Status.get(notification.data);
|
||||
|
||||
release.status_id = new_status.id
|
||||
release.el.set('class', 'item ' + new_status.identifier);
|
||||
|
||||
});
|
||||
var status_el = release.el.getElement('.release_status');
|
||||
status_el.set('class', 'release_status ' + new_status.identifier);
|
||||
status_el.set('text', new_status.identifier);
|
||||
|
||||
if(!q && (new_status.identifier == 'snatched' || new_status.identifier == 'seeding' || new_status.identifier == 'done'))
|
||||
var q = self.addQuality(release.quality_id);
|
||||
|
||||
if(new_status && q && !q.hasClass(new_status.identifier)) {
|
||||
q.removeClass(status.identifier).addClass(new_status.identifier);
|
||||
q.set('title', q.get('title').replace(status.label, new_status.label));
|
||||
}
|
||||
}
|
||||
|
||||
App.addEvent('release.update_status.' + release.id, update_handle);
|
||||
|
||||
});
|
||||
|
||||
if(self.last_release)
|
||||
self.release_container.getElements('#release_'+self.last_release._id).addClass('last_release');
|
||||
self.release_container.getElements('#release_'+self.last_release.id).addClass('last_release');
|
||||
|
||||
if(self.next_release)
|
||||
self.release_container.getElements('#release_'+self.next_release._id).addClass('next_release');
|
||||
self.release_container.getElements('#release_'+self.next_release.id).addClass('next_release');
|
||||
|
||||
if(self.next_release || (self.last_release && ['ignored', 'failed'].indexOf(self.last_release.status) === false)){
|
||||
if(self.next_release || (self.last_release && ['ignored', 'failed'].indexOf(self.last_release.status.identifier) === false)){
|
||||
|
||||
self.trynext_container = new Element('div.buttons.try_container').inject(self.release_container, 'top');
|
||||
|
||||
|
||||
var nr = self.next_release,
|
||||
lr = self.last_release;
|
||||
|
||||
self.trynext_container.adopt(
|
||||
new Element('span.or', {
|
||||
'text': 'If anything went wrong, download'
|
||||
'text': 'This movie is snatched, if anything went wrong, download'
|
||||
}),
|
||||
lr ? new Element('a.button.orange', {
|
||||
'text': 'the same release again',
|
||||
@@ -305,17 +337,18 @@ MA.Release = new Class({
|
||||
var has_available = false,
|
||||
has_snatched = false;
|
||||
|
||||
if(self.movie.data.releases)
|
||||
self.movie.data.releases.each(function(release){
|
||||
if(has_available && has_snatched) return;
|
||||
self.movie.data.releases.each(function(release){
|
||||
if(has_available && has_snatched) return;
|
||||
|
||||
if(['snatched', 'downloaded', 'seeding', 'done'].contains(release.status))
|
||||
has_snatched = true;
|
||||
var status = Status.get(release.status_id);
|
||||
|
||||
if(['available'].contains(release.status))
|
||||
has_available = true;
|
||||
if(['snatched', 'downloaded', 'seeding'].contains(status.identifier))
|
||||
has_snatched = true;
|
||||
|
||||
});
|
||||
if(['available'].contains(status.identifier))
|
||||
has_available = true;
|
||||
|
||||
});
|
||||
|
||||
if(has_available || has_snatched){
|
||||
|
||||
@@ -348,13 +381,13 @@ MA.Release = new Class({
|
||||
},
|
||||
|
||||
get: function(release, type){
|
||||
return (release.info && release.info[type] !== undefined) ? release.info[type] : 'n/a'
|
||||
return release.info[type] !== undefined ? release.info[type] : 'n/a'
|
||||
},
|
||||
|
||||
download: function(release){
|
||||
var self = this;
|
||||
|
||||
var release_el = self.release_container.getElement('#release_'+release._id),
|
||||
var release_el = self.release_container.getElement('#release_'+release.id),
|
||||
icon = release_el.getElement('.download.icon2');
|
||||
|
||||
if(icon)
|
||||
@@ -362,7 +395,7 @@ MA.Release = new Class({
|
||||
|
||||
Api.request('release.manual_download', {
|
||||
'data': {
|
||||
'id': release._id
|
||||
'id': release.id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
if(icon)
|
||||
@@ -381,11 +414,12 @@ MA.Release = new Class({
|
||||
},
|
||||
|
||||
ignore: function(release){
|
||||
var self = this;
|
||||
|
||||
Api.request('release.ignore', {
|
||||
'data': {
|
||||
'id': release._id
|
||||
}
|
||||
'id': release.id
|
||||
},
|
||||
})
|
||||
|
||||
},
|
||||
@@ -393,9 +427,9 @@ MA.Release = new Class({
|
||||
markMovieDone: function(){
|
||||
var self = this;
|
||||
|
||||
Api.request('media.delete', {
|
||||
Api.request('movie.delete', {
|
||||
'data': {
|
||||
'id': self.movie.get('_id'),
|
||||
'id': self.movie.get('id'),
|
||||
'delete_from': 'wanted'
|
||||
},
|
||||
'onComplete': function(){
|
||||
@@ -412,12 +446,12 @@ MA.Release = new Class({
|
||||
|
||||
},
|
||||
|
||||
tryNextRelease: function(){
|
||||
tryNextRelease: function(movie_id){
|
||||
var self = this;
|
||||
|
||||
Api.request('movie.searcher.try_next', {
|
||||
'data': {
|
||||
'media_id': self.movie.get('_id')
|
||||
'id': self.movie.get('id')
|
||||
}
|
||||
});
|
||||
|
||||
@@ -445,7 +479,7 @@ MA.Trailer = new Class({
|
||||
watch: function(offset){
|
||||
var self = this;
|
||||
|
||||
var data_url = 'https://gdata.youtube.com/feeds/videos?vq="{title}" {year} trailer&max-results=1&alt=json-in-script&orderby=relevance&sortorder=descending&format=5&fmt=18';
|
||||
var data_url = 'https://gdata.youtube.com/feeds/videos?vq="{title}" {year} trailer&max-results=1&alt=json-in-script&orderby=relevance&sortorder=descending&format=5&fmt=18'
|
||||
var url = data_url.substitute({
|
||||
'title': encodeURI(self.getTitle()),
|
||||
'year': self.get('year'),
|
||||
@@ -504,7 +538,7 @@ MA.Trailer = new Class({
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
self.player.addEventListener('onStateChange', change_quality);
|
||||
|
||||
}
|
||||
@@ -521,7 +555,7 @@ MA.Trailer = new Class({
|
||||
$(self.movie).setStyle('height', null);
|
||||
|
||||
setTimeout(function(){
|
||||
self.container.destroy();
|
||||
self.container.destroy()
|
||||
self.close_button.destroy();
|
||||
}, 1800)
|
||||
}
|
||||
@@ -572,13 +606,13 @@ MA.Edit = new Class({
|
||||
)
|
||||
).inject(self.movie, 'top');
|
||||
|
||||
Array.each(self.movie.data.info.titles, function(title){
|
||||
Array.each(self.movie.data.library.titles, function(alt){
|
||||
new Element('option', {
|
||||
'text': title
|
||||
'text': alt.title
|
||||
}).inject(self.title_select);
|
||||
|
||||
if(title == self.movie.data.title)
|
||||
self.title_select.set('value', title);
|
||||
if(alt['default'])
|
||||
self.title_select.set('value', alt.title);
|
||||
});
|
||||
|
||||
|
||||
@@ -591,14 +625,14 @@ MA.Edit = new Class({
|
||||
self.category_select.show();
|
||||
categories.each(function(category){
|
||||
|
||||
var category_id = category.data._id;
|
||||
var category_id = category.data.id;
|
||||
|
||||
new Element('option', {
|
||||
'value': category_id,
|
||||
'text': category.data.label
|
||||
}).inject(self.category_select);
|
||||
|
||||
if(self.movie.category && self.movie.category.data && self.movie.category.data._id == category_id)
|
||||
if(self.movie.category && self.movie.category.data && self.movie.category.data.id == category_id)
|
||||
self.category_select.set('value', category_id);
|
||||
|
||||
});
|
||||
@@ -611,7 +645,7 @@ MA.Edit = new Class({
|
||||
|
||||
profiles.each(function(profile){
|
||||
|
||||
var profile_id = profile.get('_id');
|
||||
var profile_id = profile.id ? profile.id : profile.data.id;
|
||||
|
||||
new Element('option', {
|
||||
'value': profile_id,
|
||||
@@ -634,7 +668,7 @@ MA.Edit = new Class({
|
||||
|
||||
Api.request('movie.edit', {
|
||||
'data': {
|
||||
'id': self.movie.get('_id'),
|
||||
'id': self.movie.get('id'),
|
||||
'default_title': self.title_select.get('value'),
|
||||
'profile_id': self.profile_select.get('value'),
|
||||
'category_id': self.category_select.get('value')
|
||||
@@ -650,7 +684,7 @@ MA.Edit = new Class({
|
||||
self.movie.slide('out');
|
||||
}
|
||||
|
||||
});
|
||||
})
|
||||
|
||||
MA.Refresh = new Class({
|
||||
|
||||
@@ -674,7 +708,7 @@ MA.Refresh = new Class({
|
||||
|
||||
Api.request('media.refresh', {
|
||||
'data': {
|
||||
'id': self.movie.get('_id')
|
||||
'id': self.movie.get('id')
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -688,10 +722,10 @@ MA.Readd = new Class({
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
var movie_done = self.movie.data.status == 'done';
|
||||
if(self.movie.data.releases && !movie_done)
|
||||
var movie_done = Status.get(self.movie.data.status_id).identifier == 'done';
|
||||
if(!movie_done)
|
||||
var snatched = self.movie.data.releases.filter(function(release){
|
||||
return release.status && (release.status == 'snatched' || release.status == 'seeding' || release.status == 'downloaded' || release.status == 'done');
|
||||
return release.status && (release.status.identifier == 'snatched' || release.status.identifier == 'downloaded' || release.status.identifier == 'done');
|
||||
}).length;
|
||||
|
||||
if(movie_done || snatched && snatched > 0)
|
||||
@@ -710,7 +744,7 @@ MA.Readd = new Class({
|
||||
|
||||
Api.request('movie.add', {
|
||||
'data': {
|
||||
'identifier': self.movie.getIdentifier(),
|
||||
'identifier': self.movie.get('identifier'),
|
||||
'ignore_previous': 1
|
||||
}
|
||||
});
|
||||
@@ -783,9 +817,9 @@ MA.Delete = new Class({
|
||||
self.callChain();
|
||||
},
|
||||
function(){
|
||||
Api.request('media.delete', {
|
||||
Api.request('movie.delete', {
|
||||
'data': {
|
||||
'id': self.movie.get('_id'),
|
||||
'id': self.movie.get('id'),
|
||||
'delete_from': self.movie.list.options.identifier
|
||||
},
|
||||
'onComplete': function(){
|
||||
@@ -814,17 +848,46 @@ MA.Files = new Class({
|
||||
create: function(){
|
||||
var self = this;
|
||||
|
||||
if(self.movie.data.releases && self.movie.data.releases.length > 0)
|
||||
self.el = new Element('a.directory', {
|
||||
'title': 'Available files',
|
||||
'events': {
|
||||
'click': self.show.bind(self)
|
||||
}
|
||||
});
|
||||
self.el = new Element('a.directory', {
|
||||
'title': 'Available files',
|
||||
'events': {
|
||||
'click': self.show.bind(self)
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
show: function(){
|
||||
show: function(e){
|
||||
var self = this;
|
||||
(e).preventDefault();
|
||||
|
||||
if(self.releases)
|
||||
self.showFiles();
|
||||
else {
|
||||
|
||||
self.movie.busy(true);
|
||||
|
||||
Api.request('release.for_movie', {
|
||||
'data': {
|
||||
'id': self.movie.data.id
|
||||
},
|
||||
'onComplete': function(json){
|
||||
self.movie.busy(false, 1);
|
||||
|
||||
if(json && json.releases){
|
||||
self.releases = json.releases;
|
||||
self.showFiles();
|
||||
}
|
||||
else
|
||||
alert('Something went wrong, check the logs.');
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
showFiles: function(){
|
||||
var self = this;
|
||||
|
||||
if(!self.options_container){
|
||||
@@ -835,26 +898,26 @@ MA.Files = new Class({
|
||||
// Header
|
||||
new Element('div.item.head').adopt(
|
||||
new Element('span.name', {'text': 'File'}),
|
||||
new Element('span.type', {'text': 'Type'})
|
||||
).inject(self.files_container);
|
||||
new Element('span.type', {'text': 'Type'}),
|
||||
new Element('span.is_available', {'text': 'Available'})
|
||||
).inject(self.files_container)
|
||||
|
||||
if(self.movie.data.releases)
|
||||
Array.each(self.movie.data.releases, function(release){
|
||||
var rel = new Element('div.release').inject(self.files_container);
|
||||
Array.each(self.releases, function(release){
|
||||
|
||||
Object.each(release.files, function(files, type){
|
||||
Array.each(files, function(file){
|
||||
new Element('div.file.item').adopt(
|
||||
new Element('span.name', {'text': file}),
|
||||
new Element('span.type', {'text': type})
|
||||
).inject(rel)
|
||||
});
|
||||
});
|
||||
var rel = new Element('div.release').inject(self.files_container);
|
||||
|
||||
Array.each(release.files, function(file){
|
||||
new Element('div.file.item').adopt(
|
||||
new Element('span.name', {'text': file.path}),
|
||||
new Element('span.type', {'text': File.Type.get(file.type_id).name}),
|
||||
new Element('span.available', {'text': file.available})
|
||||
).inject(rel)
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
self.movie.slide('in', self.options_container);
|
||||
}
|
||||
},
|
||||
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user