Merge branch 'refs/heads/develop'

This commit is contained in:
Ruud
2014-06-20 12:22:13 +02:00
22 changed files with 272 additions and 79 deletions

View File

@@ -19,7 +19,12 @@ base_path = dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.join(base_path, 'libs'))
from couchpotato.environment import Env
from couchpotato.core.helpers.variable import getDataDir
from couchpotato.core.helpers.variable import getDataDir, removePyc
# Remove pyc files before dynamic load (sees .pyc files regular .py modules)
removePyc(base_path)
class Loader(object):

View File

@@ -11,6 +11,7 @@ from threading import RLock
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
from couchpotato.core.helpers.encoding import sp
from couchpotato.core.helpers.variable import removePyc
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
@@ -143,7 +144,7 @@ class Updater(Plugin):
def doShutdown(self):
if not Env.get('dev'):
self.updater.deletePyc(show_logs = False)
removePyc(Env.get('app_dir'), show_logs = False)
return super(Updater, self).doShutdown()
@@ -181,30 +182,6 @@ class BaseUpdater(Plugin):
def check(self):
pass
def deletePyc(self, only_excess = True, show_logs = True):
for root, dirs, files in os.walk(Env.get('app_dir')):
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
for excess_pyc_file in excess_pyc_files:
full_path = os.path.join(root, excess_pyc_file)
if show_logs: log.debug('Removing old PYC file: %s', full_path)
try:
os.remove(full_path)
except:
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
for dir_name in dirs:
full_path = os.path.join(root, dir_name)
if len(os.listdir(full_path)) == 0:
try:
os.rmdir(full_path)
except:
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))
class GitUpdater(BaseUpdater):
@@ -328,7 +305,7 @@ class SourceUpdater(BaseUpdater):
data_dir = Env.get('data_dir')
# Get list of files we want to overwrite
self.deletePyc()
removePyc(app_dir)
existing_files = []
for root, subfiles, filenames in os.walk(app_dir):
for filename in filenames:

View File

@@ -26,7 +26,9 @@ class Database(object):
addApiView('database.document.update', self.updateDocument)
addApiView('database.document.delete', self.deleteDocument)
addEvent('database.setup.after', self.startup_compact)
addEvent('database.setup_index', self.setupIndex)
addEvent('app.migrate', self.migrate)
addEvent('app.after_shutdown', self.close)
@@ -140,8 +142,14 @@ class Database(object):
success = True
try:
start = time.time()
db = self.getDB()
size = float(db.get_db_details().get('size', 0))
log.debug('Compacting database, current size: %sMB', round(size/1048576, 2))
db.compact()
new_size = float(db.get_db_details().get('size', 0))
log.debug('Done compacting database in %ss, new size: %sMB, saved: %sMB', (round(time.time()-start, 2), round(new_size/1048576, 2), round((size-new_size)/1048576, 2)))
except:
log.error('Failed compact: %s', traceback.format_exc())
success = False
@@ -150,6 +158,18 @@ class Database(object):
'success': success
}
# Compact on start
def startup_compact(self):
from couchpotato import Env
db = self.getDB()
size = db.get_db_details().get('size')
prop_name = 'last_db_compact'
last_check = int(Env.prop(prop_name, default = 0))
if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days
self.compact()
Env.prop(prop_name, value = int(time.time()))
def migrate(self):
from couchpotato import Env

View File

@@ -6,8 +6,9 @@ import random
import re
import string
import sys
import traceback
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss, sp
from couchpotato.core.logger import CPLog
import six
from six.moves import map, zip, filter
@@ -313,3 +314,30 @@ under_pat = re.compile(r'_([a-z])')
def underscoreToCamel(name):
return under_pat.sub(lambda x: x.group(1).upper(), name)
def removePyc(folder, only_excess = True, show_logs = True):
folder = sp(folder)
for root, dirs, files in os.walk(folder):
pyc_files = filter(lambda filename: filename.endswith('.pyc'), files)
py_files = set(filter(lambda filename: filename.endswith('.py'), files))
excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files
for excess_pyc_file in excess_pyc_files:
full_path = os.path.join(root, excess_pyc_file)
if show_logs: log.debug('Removing old PYC file: %s', full_path)
try:
os.remove(full_path)
except:
log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc()))
for dir_name in dirs:
full_path = os.path.join(root, dir_name)
if len(os.listdir(full_path)) == 0:
try:
os.rmdir(full_path)
except:
log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc()))

View File

@@ -176,3 +176,24 @@ class MediaChildrenIndex(TreeBasedIndex):
if data.get('_t') == 'media' and data.get('parent_id'):
return data.get('parent_id'), None
class MediaTagIndex(MultiTreeBasedIndex):
_version = 2
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(MediaTagIndex, self).__init__(*args, **kwargs)
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('tags') and len(data.get('tags', [])) > 0:
tags = set()
for tag in data.get('tags', []):
tags.add(self.make_key(tag))
return list(tags), None
def make_key(self, key):
return md5(key).hexdigest()

View File

@@ -9,7 +9,7 @@ from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString, getImdb, getTitle
from couchpotato.core.logger import CPLog
from couchpotato.core.media import MediaBase
from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex
from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex, MediaTagIndex
log = CPLog(__name__)
@@ -21,6 +21,7 @@ class MediaPlugin(MediaBase):
'media': MediaIndex,
'media_search_title': TitleSearchIndex,
'media_status': MediaStatusIndex,
'media_tag': MediaTagIndex,
'media_by_type': MediaTypeIndex,
'media_title': TitleIndex,
'media_startswith': StartsWithIndex,
@@ -81,6 +82,8 @@ class MediaPlugin(MediaBase):
addEvent('media.list', self.list)
addEvent('media.delete', self.delete)
addEvent('media.restatus', self.restatus)
addEvent('media.tag', self.tag)
addEvent('media.untag', self.unTag)
def refresh(self, id = '', **kwargs):
handlers = []
@@ -177,7 +180,7 @@ class MediaPlugin(MediaBase):
log.debug('No media found with identifiers: %s', identifiers)
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, starts_with = None, search = None):
def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None):
db = get_db()
@@ -188,6 +191,8 @@ class MediaPlugin(MediaBase):
release_status = [release_status]
if types and not isinstance(types, (list, tuple)):
types = [types]
if with_tags and not isinstance(with_tags, (list, tuple)):
with_tags = [with_tags]
# query media ids
if types:
@@ -214,11 +219,17 @@ class MediaPlugin(MediaBase):
# Add search filters
if starts_with:
filter_by['starts_with'] = set()
starts_with = toUnicode(starts_with.lower())[0]
starts_with = starts_with if starts_with in ascii_lowercase else '#'
filter_by['starts_with'] = [x['_id'] for x in db.get_many('media_startswith', starts_with)]
# Add tag filter
if with_tags:
filter_by['with_tags'] = set()
for tag in with_tags:
for x in db.get_many('media_tag', tag):
filter_by['with_tags'].add(x['_id'])
# Filter with search query
if search:
filter_by['search'] = [x['_id'] for x in db.get_many('media_search_title', search)]
@@ -271,7 +282,8 @@ class MediaPlugin(MediaBase):
release_status = splitString(kwargs.get('release_status')),
status_or = kwargs.get('status_or') is not None,
limit_offset = kwargs.get('limit_offset'),
starts_with = kwargs.get('starts_with'),
with_tags = kwargs.get('with_tags'),
starts_with = splitString(kwargs.get('starts_with')),
search = kwargs.get('search')
)
@@ -459,3 +471,41 @@ class MediaPlugin(MediaBase):
return True
except:
log.error('Failed restatus: %s', traceback.format_exc())
def tag(self, media_id, tag):
try:
db = get_db()
m = db.get('id', media_id)
tags = m.get('tags') or []
if tag not in tags:
tags.append(tag)
m['tags'] = tags
db.update(m)
return True
except:
log.error('Failed tagging: %s', traceback.format_exc())
return False
def unTag(self, media_id, tag):
try:
db = get_db()
m = db.get('id', media_id)
tags = m.get('tags') or []
if tag in tags:
new_tags = list(set(tags))
new_tags.remove(tag)
m['tags'] = new_tags
db.update(m)
return True
except:
log.error('Failed untagging: %s', traceback.format_exc())
return False

View File

@@ -1,6 +1,6 @@
import traceback
from bs4 import BeautifulSoup
from bs4 import BeautifulSoup, SoupStrainer
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
@@ -20,6 +20,7 @@ class Base(TorrentProvider):
}
http_time_between_calls = 1 # Seconds
only_tables_tags = SoupStrainer('table')
def _searchOnTitle(self, title, movie, quality, results):
@@ -27,7 +28,7 @@ class Base(TorrentProvider):
data = self.getHTMLData(url)
if data:
html = BeautifulSoup(data)
html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags)
try:
result_table = html.find('table', attrs = {'class': 'koptekst'})

View File

@@ -90,7 +90,7 @@ class MovieBase(MovieTypeBase):
# Default profile and category
default_profile = {}
if not params.get('profile_id'):
if not params.get('profile_id') and status != 'done':
default_profile = fireEvent('profile.default', single = True)
cat_id = params.get('category_id')
@@ -149,6 +149,7 @@ class MovieBase(MovieTypeBase):
m['profile_id'] = params.get('profile_id', default_profile.get('id'))
m['category_id'] = cat_id if cat_id is not None and len(cat_id) > 0 else (m.get('category_id') or None)
m['last_edit'] = int(time.time())
m['tags'] = []
do_search = True
db.update(m)

View File

@@ -365,6 +365,32 @@
display: none;
}
.movies .data .eta {
display: none;
}
.movies.details_list .data .eta {
position: absolute;
bottom: 0;
right: 0;
display: block;
min-height: 20px;
text-align: right;
font-style: italic;
opacity: .8;
font-size: 11px;
}
.movies.details_list .movie:hover .data .eta {
display: none;
}
.movies.thumbs_list .data .eta {
display: block;
position: absolute;
bottom: 40px;
}
.movies .data .quality {
position: absolute;
bottom: 2px;

View File

@@ -136,6 +136,21 @@ var Movie = new Class({
self.el.addClass('status_'+self.get('status'));
var eta = null,
eta_date = null,
now = Math.round(+new Date()/1000);
if(self.data.info.release_date)
[self.data.info.release_date.dvd, self.data.info.release_date.theater].each(function(timestamp){
if (timestamp > 0 && (eta == null || Math.abs(timestamp - now) < Math.abs(eta - now)))
eta = timestamp;
});
if(eta){
eta_date = new Date(eta * 1000);
eta_date = eta_date.toLocaleString('en-us', { month: "long" }) + ' ' + eta_date.getFullYear();
}
self.el.adopt(
self.select_checkbox = new Element('input[type=checkbox].inlay', {
'events': {
@@ -161,6 +176,10 @@ var Movie = new Class({
self.description = new Element('div.description.tiny_scroll', {
'text': self.data.info.plot
}),
self.eta = eta_date && (now+8035200 > eta) ? new Element('div.eta', {
'text': eta_date,
'title': 'ETA'
}) : null,
self.quality = new Element('div.quality', {
'events': {
'click': function(e){

View File

@@ -263,7 +263,7 @@ config = [{
'name': 'automation_charts_rentals',
'type': 'bool',
'label': 'DVD Rentals',
'description': 'Top DVD <a href="http://www.imdb.com/boxoffice/rentals/">rentals</a> chart',
'description': 'Top DVD <a href="http://www.imdb.com/boxoffice/rentals">rentals</a> chart',
'default': True,
},
{
@@ -312,7 +312,7 @@ config = [{
'name': 'chart_display_rentals',
'type': 'bool',
'label': 'DVD Rentals',
'description': 'Top DVD <a href="http://www.imdb.com/boxoffice/rentals/">rentals</a> chart',
'description': 'Top DVD <a href="http://www.imdb.com/boxoffice/rentals">rentals</a> chart',
'default': True,
},
{

View File

@@ -153,8 +153,10 @@ class TheMovieDb(MovieProvider):
movie_data = dict((k, v) for k, v in movie_data.items() if v)
# Add alternative names
if movie_data['original_title'] and movie_data['original_title'] not in movie_data['titles']:
movie_data['titles'].insert(0, movie_data['original_title'])
if extended:
movie_data['titles'].append(movie.originaltitle)
for alt in movie.alternate_titles:
alt_name = alt.title
if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None:

View File

@@ -131,6 +131,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
outside_eta_results = 0
alway_search = self.conf('always_search')
ignore_eta = manual
total_result_count = 0
default_title = getTitle(movie)
if not default_title:
@@ -199,6 +200,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or []
results_count = len(results)
total_result_count += results_count
if results_count == 0:
log.debug('Nothing found for %s in %s', (default_title, quality['label']))
@@ -235,6 +237,9 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
if self.shuttingDown() or ret:
break
if total_result_count > 0:
fireEvent('media.tag', movie['_id'], 'recent', single = True)
if len(too_early_to_search) > 0:
log.info2('Too early to search for %s, %s', (too_early_to_search, default_title))

View File

@@ -136,6 +136,7 @@ class Manage(Plugin):
# Get movies with done status
total_movies, done_movies = fireEvent('media.list', types = 'movie', status = 'done', release_status = 'done', status_or = True, single = True)
deleted_releases = []
for done_movie in done_movies:
if getIdentifier(done_movie) not in added_identifiers:
fireEvent('media.delete', media_id = done_movie['_id'], delete_from = 'all')
@@ -165,12 +166,11 @@ class Manage(Plugin):
already_used = used_files.get(release_file)
if already_used:
# delete current one
if already_used.get('last_edit', 0) < release.get('last_edit', 0):
fireEvent('release.delete', release['_id'], single = True)
# delete previous one
else:
fireEvent('release.delete', already_used['_id'], single = True)
if release_id not in deleted_releases:
release_id = release['_id'] if already_used.get('last_edit', 0) < release.get('last_edit', 0) else already_used['_id']
fireEvent('release.delete', release_id, single = True)
deleted_releases.append(release_id)
break
else:
used_files[release_file] = release

View File

@@ -1,12 +1,12 @@
import traceback
import re
from CodernityDB.database import RecordNotFound
from CodernityDB.database import RecordNotFound
from couchpotato import get_db
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode, ss
from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt
from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.plugins.quality.index import QualityIndex
@@ -22,12 +22,12 @@ class QualityPlugin(Plugin):
}
qualities = [
{'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25'], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']},
{'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts'], 'tags': ['m2ts', 'x264', 'h264']},
{'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25', ('br', 'disk')], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']},
{'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts', 'ts'], 'tags': ['m2ts', 'x264', 'h264']},
{'identifier': '720p', 'hd': True, 'allow_3d': True, 'size': (3000, 10000), 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264']},
{'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip'], 'allow': ['720p', '1080p'], 'ext':[], 'tags': ['hdtv', 'hdrip', 'webdl', ('web', 'dl')]},
{'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': ['br2dvd'], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']},
{'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': [], 'allow': [], 'ext':[], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
{'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip', ('br', 'rip')], 'allow': ['720p', '1080p'], 'ext':[], 'tags': ['hdtv', 'hdrip', 'webdl', ('web', 'dl')]},
{'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': ['br2dvd', ('dvd', 'r')], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']},
{'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': [('dvd', 'rip')], 'allow': [], 'ext':[], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
{'identifier': 'scr', 'size': (600, 1600), 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr'], 'allow': ['dvdr', 'dvdrip', '720p', '1080p'], 'ext':[], 'tags': ['webrip', ('web', 'rip')]},
{'identifier': 'r5', 'size': (600, 1000), 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr'], 'ext':[]},
{'identifier': 'tc', 'size': (600, 1000), 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': [], 'ext':[]},
@@ -95,15 +95,14 @@ class QualityPlugin(Plugin):
db = get_db()
qualities = db.all('quality', with_doc = True)
temp = []
for quality in qualities:
quality = quality['doc']
q = mergeDicts(self.getQuality(quality.get('identifier')), quality)
for quality in self.qualities:
quality_doc = db.get('quality', quality.get('identifier'), with_doc = True)['doc']
q = mergeDicts(quality, quality_doc)
temp.append(q)
self.cached_qualities = temp
if len(temp) == len(self.qualities):
self.cached_qualities = temp
return temp
@@ -209,10 +208,15 @@ class QualityPlugin(Plugin):
for cur_file in files:
words = re.split('\W+', cur_file.lower())
name_year = fireEvent('scanner.name_year', cur_file, file_name = cur_file, single = True)
threed_words = words
if name_year and name_year.get('name'):
split_name = splitString(name_year.get('name'), ' ')
threed_words = [x for x in words if x not in split_name]
for quality in qualities:
contains_score = self.containsTagScore(quality, words, cur_file)
threedscore = self.contains3D(quality, words, cur_file) if quality.get('allow_3d') else (0, None)
threedscore = self.contains3D(quality, threed_words, cur_file) if quality.get('allow_3d') else (0, None)
self.calcScore(score, quality, contains_score, threedscore)
@@ -257,6 +261,9 @@ class QualityPlugin(Plugin):
cur_file = ss(cur_file)
score = 0
extension = words[-1]
words = words[:-1]
points = {
'identifier': 10,
'label': 10,
@@ -276,7 +283,7 @@ class QualityPlugin(Plugin):
log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
score += points.get(tag_type)
if isinstance(alt, (str, unicode)) and ss(alt.lower()) in cur_file.lower():
if isinstance(alt, (str, unicode)) and ss(alt.lower()) in words:
log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
score += points.get(tag_type) / 2
@@ -286,8 +293,8 @@ class QualityPlugin(Plugin):
# Check extention
for ext in quality.get('ext', []):
if ext == words[-1]:
log.debug('Found %s extension in %s', (ext, cur_file))
if ext == extension:
log.debug('Found %s with .%s extension in %s', (quality['identifier'], ext, cur_file))
score += points['ext']
return score
@@ -433,7 +440,12 @@ class QualityPlugin(Plugin):
'Movie Monuments 2013 BrRip 720p': {'size': 1300, 'quality': 'brrip'},
'The.Movie.2014.3D.1080p.BluRay.AVC.DTS-HD.MA.5.1-GroupName': {'size': 30000, 'quality': 'bd50', 'is_3d': True},
'/home/namehou/Movie Monuments (2013)/Movie Monuments.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': False},
'/home/namehou/Movie Monuments (2013)/Movie Monuments Full-OU.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': True}
'/home/namehou/Movie Monuments (2013)/Movie Monuments Full-OU.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': True},
'/volume1/Public/3D/Moviename/Moviename (2009).3D.SBS.ts': {'size': 7500, 'quality': '1080p', 'is_3d': True},
'/volume1/Public/Moviename/Moviename (2009).ts': {'size': 5500, 'quality': '1080p'},
'/movies/BluRay HDDVD H.264 MKV 720p EngSub/QuiQui le fou (criterion collection #123, 1915)/QuiQui le fou (1915) 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p'},
'C:\\movies\QuiQui le fou (collection #123, 1915)\QuiQui le fou (1915) 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p'},
'C:\\movies\QuiQui le fou (collection #123, 1915)\QuiQui le fou (1915) half-sbs 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p', 'is_3d': True},
}
correct = 0
@@ -441,7 +453,10 @@ class QualityPlugin(Plugin):
test_quality = self.guess(files = [name], extra = tests[name].get('extra', None), size = tests[name].get('size', None)) or {}
success = test_quality.get('identifier') == tests[name]['quality'] and test_quality.get('is_3d') == tests[name].get('is_3d', False)
if not success:
log.error('%s failed check, thinks it\'s %s', (name, test_quality.get('identifier')))
log.error('%s failed check, thinks it\'s "%s" expecting "%s"', (name,
test_quality.get('identifier') + (' 3D' if test_quality.get('is_3d') else ''),
tests[name]['quality'] + (' 3D' if tests[name].get('is_3d') else '')
))
correct += success

View File

@@ -8,6 +8,7 @@ var QualityBase = new Class({
self.qualities = data.qualities;
self.profiles_list = null;
self.profiles = [];
Array.each(data.profiles, self.createProfilesClass.bind(self));
@@ -35,7 +36,7 @@ var QualityBase = new Class({
}).pick();
}
catch(e){}
return {}
},
@@ -106,14 +107,13 @@ var QualityBase = new Class({
createProfileOrdering: function(){
var self = this;
var profile_list;
self.settings.createGroup({
'label': 'Profile Defaults',
'description': '(Needs refresh \'' +(App.isMac() ? 'CMD+R' : 'F5')+ '\' after editing)'
}).adopt(
new Element('.ctrlHolder#profile_ordering').adopt(
new Element('label[text=Order]'),
profile_list = new Element('ul'),
self.profiles_list = new Element('ul'),
new Element('p.formHint', {
'html': 'Change the order the profiles are in the dropdown list. Uncheck to hide it completely.<br />First one will be default.'
})
@@ -133,7 +133,7 @@ var QualityBase = new Class({
'text': profile.data.label
}),
new Element('span.handle')
).inject(profile_list);
).inject(self.profiles_list);
new Form.Check(check);
@@ -141,7 +141,7 @@ var QualityBase = new Class({
// Sortable
var sorted_changed = false;
self.profile_sortable = new Sortables(profile_list, {
self.profile_sortable = new Sortables(self.profiles_list, {
'revert': true,
'handle': '.handle',
'opacity': 0.5,
@@ -163,7 +163,7 @@ var QualityBase = new Class({
ids = [],
hidden = [];
self.profile_sortable.list.getElements('li').each(function(el, nr){
self.profiles_list.getElements('li').each(function(el, nr){
ids.include(el.get('data-id'));
hidden[nr] = +!el.getElement('input[type=checkbox]').get('checked');
});

View File

@@ -104,6 +104,8 @@ class Release(Plugin):
elif rel['status'] in ['snatched', 'downloaded']:
self.updateStatus(rel['_id'], status = 'ignore')
fireEvent('media.untag', media.get('_id'), 'recent', single = True)
def add(self, group, update_info = True, update_id = None):
try:
@@ -149,7 +151,7 @@ class Release(Plugin):
r = db.get('release_identifier', release_identifier, with_doc = True)['doc']
r['media_id'] = media['_id']
except:
log.error('Failed updating release by identifier: %s', traceback.format_exc())
log.debug('Failed updating release by identifier "%s". Inserting new.', release_identifier)
r = db.insert(release)
# Update with ref and _id
@@ -184,7 +186,7 @@ class Release(Plugin):
db.delete(rel)
return True
except RecordDeleted:
log.error('Already deleted: %s', release_id)
log.debug('Already deleted: %s', release_id)
return True
except:
log.error('Failed: %s', traceback.format_exc())
@@ -346,6 +348,8 @@ class Release(Plugin):
mdia['last_edit'] = int(time.time())
db.update(mdia)
fireEvent('media.tag', media['_id'], 'recent', single = True)
return True
# Assume release downloaded

View File

@@ -457,9 +457,15 @@ class Renamer(Plugin):
mdia['last_edit'] = int(time.time())
db.update(mdia)
# List movie on dashboard
fireEvent('media.tag', media['_id'], 'recent', single = True)
except:
log.error('Failed marking movie finished: %s', (traceback.format_exc()))
# Mark media for dashboard
mark_as_recent = False
# Go over current movie releases
for release in fireEvent('release.for_media', media['_id'], single = True):
@@ -506,14 +512,21 @@ class Renamer(Plugin):
# Set the release to downloaded
fireEvent('release.update_status', release['_id'], status = 'downloaded', single = True)
group['release_download'] = release_download
mark_as_recent = True
elif release_download['status'] == 'seeding':
# Set the release to seeding
fireEvent('release.update_status', release['_id'], status = 'seeding', single = True)
mark_as_recent = True
elif release.get('identifier') == group['meta_data']['quality']['identifier']:
# Set the release to downloaded
fireEvent('release.update_status', release['_id'], status = 'downloaded', single = True)
group['release_download'] = release_download
mark_as_recent = True
# Mark media for dashboard
if mark_as_recent:
fireEvent('media.tag', group['media'].get('_id'), 'recent', single = True)
# Remove leftover files
if not remove_leftovers: # Don't remove anything

View File

@@ -105,7 +105,7 @@ class Scanner(Plugin):
'HDTV': ['hdtv']
}
clean = '([ _\,\.\(\)\[\]\-]|^)(3d|hsbs|sbs|ou|extended.cut|directors.cut|french|fr|swedisch|sw|danish|dutch|nl|swesub|subs|spanish|german|ac3|dts|custom|dc|divx|divx5|dsr|dsrip|dutch|dvd|dvdr|dvdrip|dvdscr|dvdscreener|screener|dvdivx|cam|fragment|fs|hdtv|hdrip' \
clean = '([ _\,\.\(\)\[\]\-]|^)(3d|hsbs|sbs|half.sbs|full.sbs|ou|half.ou|full.ou|extended.cut|directors.cut|french|fr|swedisch|sw|danish|dutch|nl|swesub|subs|spanish|german|ac3|dts|custom|dc|divx|divx5|dsr|dsrip|dutch|dvd|dvdr|dvdrip|dvdscr|dvdscreener|screener|dvdivx|cam|fragment|fs|hdtv|hdrip' \
'|hdtvrip|webdl|web.dl|webrip|web.rip|internal|limited|multisubs|ntsc|ogg|ogm|pal|pdtv|proper|repack|rerip|retail|r3|r5|bd5|se|svcd|swedish|german|read.nfo|nfofix|unrated|ws|telesync|ts|telecine|tc|brrip|bdrip|video_ts|audio_ts|480p|480i|576p|576i|720p|720i|1080p|1080i|hrhd|hrhdtv|hddvd|bluray|x264|h264|xvid|xvidvd|xxx|www.www|hc|\[.*\])(?=[ _\,\.\(\)\[\]\-]|$)'
multipart_regex = [
'[ _\.-]+cd[ _\.-]*([0-9a-d]+)', #*cd1
@@ -903,6 +903,7 @@ class Scanner(Plugin):
log.debug('Could not detect via guessit "%s": %s', (file_name, traceback.format_exc()))
# Backup to simple
release_name = os.path.basename(release_name.replace('\\', '/'))
cleaned = ' '.join(re.split('\W+', simplifyString(release_name)))
cleaned = re.sub(self.clean, ' ', cleaned)

View File

@@ -2,6 +2,7 @@ import os
from couchpotato.core.database import Database
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.loader import Loader
from couchpotato.core.settings import Settings
@@ -38,8 +39,11 @@ class Env(object):
return Env._debug
@staticmethod
def get(attr):
return getattr(Env, '_' + attr)
def get(attr, unicode = False):
if unicode:
return toUnicode(getattr(Env, '_' + attr))
else:
return getattr(Env, '_' + attr)
@staticmethod
def all():

View File

@@ -54,7 +54,8 @@ Page.Home = new Class({
})
),
'filter': {
'release_status': 'snatched,seeding,missing,available,downloaded'
'release_status': 'snatched,missing,available,downloaded,done,seeding',
'with_tags': 'recent'
},
'limit': null,
'onLoaded': function(){

View File

@@ -73,10 +73,10 @@
App.setup({
'base_url': {{ json_encode(Env.get('web_base')) }},
'args': {{ json_encode(Env.get('args')) }},
'args': {{ json_encode(Env.get('args', unicode = True)) }},
'options': {{ json_encode(('%s' % Env.get('options'))) }},
'app_dir': {{ json_encode(Env.get('app_dir')) }},
'data_dir': {{ json_encode(Env.get('data_dir')) }},
'app_dir': {{ json_encode(Env.get('app_dir', unicode = True)) }},
'data_dir': {{ json_encode(Env.get('data_dir', unicode = True)) }},
'pid': {{ json_encode(Env.getPid()) }},
'userscript_version': {{ json_encode(fireEvent('userscript.get_version', single = True)) }}
});