diff --git a/couchpotato/api.py b/couchpotato/api.py
index 091de42a..e86b127f 100644
--- a/couchpotato/api.py
+++ b/couchpotato/api.py
@@ -110,6 +110,7 @@ class ApiHandler(RequestHandler):
if jsonp_callback:
self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')')
+ self.set_header("Content-Type", "text/javascript")
elif isinstance(result, tuple) and result[0] == 'redirect':
self.redirect(result[1])
else:
diff --git a/couchpotato/core/_base/updater/main.py b/couchpotato/core/_base/updater/main.py
index f3b4b19c..aecf0c4f 100644
--- a/couchpotato/core/_base/updater/main.py
+++ b/couchpotato/core/_base/updater/main.py
@@ -298,6 +298,7 @@ class SourceUpdater(BaseUpdater):
def replaceWith(self, path):
app_dir = ss(Env.get('app_dir'))
+ data_dir = ss(Env.get('data_dir'))
# Get list of files we want to overwrite
self.deletePyc()
@@ -329,12 +330,15 @@ class SourceUpdater(BaseUpdater):
log.error('Failed overwriting file "%s": %s', (tofile, traceback.format_exc()))
return False
- if Env.get('app_dir') not in Env.get('data_dir'):
- for still_exists in existing_files:
- try:
- os.remove(still_exists)
- except:
- log.error('Failed removing non-used file: %s', traceback.format_exc())
+ for still_exists in existing_files:
+
+ if data_dir in still_exists:
+ continue
+
+ try:
+ os.remove(still_exists)
+ except:
+ log.error('Failed removing non-used file: %s', traceback.format_exc())
return True
diff --git a/couchpotato/core/downloaders/__init__.py b/couchpotato/core/downloaders/__init__.py
index 5fb7125f..a81ce881 100644
--- a/couchpotato/core/downloaders/__init__.py
+++ b/couchpotato/core/downloaders/__init__.py
@@ -1,4 +1,4 @@
-config = {
+config = [{
'name': 'download_providers',
'groups': [
{
@@ -10,4 +10,4 @@ config = {
'options': [],
},
],
-}
+}]
diff --git a/couchpotato/core/downloaders/base.py b/couchpotato/core/downloaders/base.py
index 08be4bd0..9e24d914 100644
--- a/couchpotato/core/downloaders/base.py
+++ b/couchpotato/core/downloaders/base.py
@@ -66,36 +66,36 @@ class Downloader(Provider):
def getAllDownloadStatus(self):
return
- def _removeFailed(self, item):
+ def _removeFailed(self, release_download):
if self.isDisabled(manual = True, data = {}):
return
- if item and item.get('downloader') == self.getName():
+ if release_download and release_download.get('downloader') == self.getName():
if self.conf('delete_failed'):
- return self.removeFailed(item)
+ return self.removeFailed(release_download)
return False
return
- def removeFailed(self, item):
+ def removeFailed(self, release_download):
return
- def _processComplete(self, item):
+ def _processComplete(self, release_download):
if self.isDisabled(manual = True, data = {}):
return
- if item and item.get('downloader') == self.getName():
+ if release_download and release_download.get('downloader') == self.getName():
if self.conf('remove_complete', default = False):
- return self.processComplete(item = item, delete_files = self.conf('delete_files', default = False))
+ return self.processComplete(release_download = release_download, delete_files = self.conf('delete_files', default = False))
return False
return
- def processComplete(self, item, delete_files):
+ def processComplete(self, release_download, delete_files):
return
- def isCorrectProtocol(self, item_protocol):
- is_correct = item_protocol in self.protocol
+ def isCorrectProtocol(self, protocol):
+ is_correct = protocol in self.protocol
if not is_correct:
log.debug("Downloader doesn't support this protocol")
@@ -151,20 +151,20 @@ class Downloader(Provider):
(d_manual and manual or d_manual is False) and \
(not data or self.isCorrectProtocol(data.get('protocol')))
- def _pause(self, item, pause = True):
+ def _pause(self, release_download, pause = True):
if self.isDisabled(manual = True, data = {}):
return
- if item and item.get('downloader') == self.getName():
- self.pause(item, pause)
+ if release_download and release_download.get('downloader') == self.getName():
+ self.pause(release_download, pause)
return True
return False
- def pause(self, item, pause):
+ def pause(self, release_download, pause):
return
-class StatusList(list):
+class ReleaseDownloadList(list):
provider = None
@@ -173,7 +173,7 @@ class StatusList(list):
self.provider = provider
self.kwargs = kwargs
- super(StatusList, self).__init__()
+ super(ReleaseDownloadList, self).__init__()
def extend(self, results):
for r in results:
@@ -181,7 +181,7 @@ class StatusList(list):
def append(self, result):
new_result = self.fillResult(result)
- super(StatusList, self).append(new_result)
+ super(ReleaseDownloadList, self).append(new_result)
def fillResult(self, result):
@@ -190,6 +190,7 @@ class StatusList(list):
'status': 'busy',
'downloader': self.provider.getName(),
'folder': '',
+ 'files': '',
}
return mergeDicts(defaults, result)
diff --git a/couchpotato/core/downloaders/blackhole/__init__.py b/couchpotato/core/downloaders/blackhole/__init__.py
index 290e8d43..6b5279a1 100644
--- a/couchpotato/core/downloaders/blackhole/__init__.py
+++ b/couchpotato/core/downloaders/blackhole/__init__.py
@@ -35,6 +35,13 @@ config = [{
'type': 'dropdown',
'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')],
},
+ {
+ 'name': 'create_subdir',
+ 'default': 0,
+ 'type': 'bool',
+ 'advanced': True,
+ 'description': 'Create a sub directory when saving the .nzb (or .torrent).',
+ },
{
'name': 'manual',
'default': 0,
diff --git a/couchpotato/core/downloaders/blackhole/main.py b/couchpotato/core/downloaders/blackhole/main.py
index 9a5a6217..854860cd 100644
--- a/couchpotato/core/downloaders/blackhole/main.py
+++ b/couchpotato/core/downloaders/blackhole/main.py
@@ -33,17 +33,27 @@ class Blackhole(Downloader):
log.error('No nzb/torrent available: %s', data.get('url'))
return False
- fullPath = os.path.join(directory, self.createFileName(data, filedata, movie))
+ file_name = self.createFileName(data, filedata, movie)
+ full_path = os.path.join(directory, file_name)
+
+ if self.conf('create_subdir'):
+ try:
+ new_path = os.path.splitext(full_path)[0]
+ if not os.path.exists(new_path):
+ os.makedirs(new_path)
+ full_path = os.path.join(new_path, file_name)
+ except:
+ log.error('Couldnt create sub dir, reverting to old one: %s', full_path)
try:
- if not os.path.isfile(fullPath):
- log.info('Downloading %s to %s.', (data.get('protocol'), fullPath))
- with open(fullPath, 'wb') as f:
+ if not os.path.isfile(full_path):
+ log.info('Downloading %s to %s.', (data.get('protocol'), full_path))
+ with open(full_path, 'wb') as f:
f.write(filedata)
- os.chmod(fullPath, Env.getPermission('file'))
+ os.chmod(full_path, Env.getPermission('file'))
return True
else:
- log.info('File %s already exists.', fullPath)
+ log.info('File %s already exists.', full_path)
return True
except:
diff --git a/couchpotato/core/downloaders/deluge/main.py b/couchpotato/core/downloaders/deluge/main.py
index 580ed7ff..f3a1238f 100644
--- a/couchpotato/core/downloaders/deluge/main.py
+++ b/couchpotato/core/downloaders/deluge/main.py
@@ -1,12 +1,14 @@
-from base64 import b64encode
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import isInt, ss
+from base64 import b64encode, b16encode, b32decode
+from bencode import bencode as benc, bdecode
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
-from couchpotato.environment import Env
from datetime import timedelta
+from hashlib import sha1
from synchronousdeluge import DelugeClient
import os.path
+import re
import traceback
log = CPLog(__name__)
@@ -72,7 +74,7 @@ class Deluge(Downloader):
remote_torrent = self.drpc.add_torrent_magnet(data.get('url'), options)
else:
filename = self.createFileName(data, filedata, movie)
- remote_torrent = self.drpc.add_torrent_file(filename, b64encode(filedata), options)
+ remote_torrent = self.drpc.add_torrent_file(filename, filedata, options)
if not remote_torrent:
log.error('Failed sending torrent to Deluge')
@@ -85,14 +87,10 @@ class Deluge(Downloader):
log.debug('Checking Deluge download status.')
- if not os.path.isdir(Env.setting('from', 'renamer')):
- log.error('Renamer "from" folder doesn\'t to exist.')
- return
-
if not self.connect():
return False
- statuses = StatusList(self)
+ release_downloads = ReleaseDownloadList(self)
queue = self.drpc.get_alltorrents()
@@ -101,50 +99,55 @@ class Deluge(Downloader):
return False
for torrent_id in queue:
- item = queue[torrent_id]
- log.debug('name=%s / id=%s / save_path=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (item['name'], item['hash'], item['save_path'], item['move_completed_path'], item['hash'], item['progress'], item['state'], item['eta'], item['ratio'], item['stop_ratio'], item['is_seed'], item['is_finished'], item['paused']))
+ torrent = queue[torrent_id]
+ log.debug('name=%s / id=%s / save_path=%s / move_completed_path=%s / hash=%s / progress=%s / state=%s / eta=%s / ratio=%s / stop_ratio=%s / is_seed=%s / is_finished=%s / paused=%s', (torrent['name'], torrent['hash'], torrent['save_path'], torrent['move_completed_path'], torrent['hash'], torrent['progress'], torrent['state'], torrent['eta'], torrent['ratio'], torrent['stop_ratio'], torrent['is_seed'], torrent['is_finished'], torrent['paused']))
# Deluge has no easy way to work out if a torrent is stalled or failing.
#status = 'failed'
status = 'busy'
- if item['is_seed'] and tryFloat(item['ratio']) < tryFloat(item['stop_ratio']):
- # We have item['seeding_time'] to work out what the seeding time is, but we do not
+ if torrent['is_seed'] and tryFloat(torrent['ratio']) < tryFloat(torrent['stop_ratio']):
+ # We have torrent['seeding_time'] to work out what the seeding time is, but we do not
# have access to the downloader seed_time, as with deluge we have no way to pass it
# when the torrent is added. So Deluge will only look at the ratio.
# See above comment in download().
status = 'seeding'
- elif item['is_seed'] and item['is_finished'] and item['paused'] and item['state'] == 'Paused':
+ elif torrent['is_seed'] and torrent['is_finished'] and torrent['paused'] and torrent['state'] == 'Paused':
status = 'completed'
- download_dir = item['save_path']
- if item['move_on_completed']:
- download_dir = item['move_completed_path']
+ download_dir = sp(torrent['save_path'])
+ if torrent['move_on_completed']:
+ download_dir = torrent['move_completed_path']
- statuses.append({
- 'id': item['hash'],
- 'name': item['name'],
+ torrent_files = []
+ for file_item in torrent['files']:
+ torrent_files.append(sp(os.path.join(download_dir, file_item['path'])))
+
+ release_downloads.append({
+ 'id': torrent['hash'],
+ 'name': torrent['name'],
'status': status,
- 'original_status': item['state'],
- 'seed_ratio': item['ratio'],
- 'timeleft': str(timedelta(seconds = item['eta'])),
- 'folder': ss(os.path.join(download_dir, item['name'])),
+ 'original_status': torrent['state'],
+ 'seed_ratio': torrent['ratio'],
+ 'timeleft': str(timedelta(seconds = torrent['eta'])),
+ 'folder': sp(download_dir if len(torrent_files) == 1 else os.path.join(download_dir, torrent['name'])),
+ 'files': '|'.join(torrent_files),
})
- return statuses
+ return release_downloads
- def pause(self, item, pause = True):
+ def pause(self, release_download, pause = True):
if pause:
- return self.drpc.pause_torrent([item['id']])
+ return self.drpc.pause_torrent([release_download['id']])
else:
- return self.drpc.resume_torrent([item['id']])
+ return self.drpc.resume_torrent([release_download['id']])
- def removeFailed(self, item):
- log.info('%s failed downloading, deleting...', item['name'])
- return self.drpc.remove_torrent(item['id'], True)
+ def removeFailed(self, release_download):
+ log.info('%s failed downloading, deleting...', release_download['name'])
+ return self.drpc.remove_torrent(release_download['id'], True)
- def processComplete(self, item, delete_files = False):
- log.debug('Requesting Deluge to remove the torrent %s%s.', (item['name'], ' and cleanup the downloaded files' if delete_files else ''))
- return self.drpc.remove_torrent(item['id'], remove_local_data = delete_files)
+ def processComplete(self, release_download, delete_files = False):
+ log.debug('Requesting Deluge to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
+ return self.drpc.remove_torrent(release_download['id'], remove_local_data = delete_files)
class DelugeRPC(object):
@@ -171,7 +174,10 @@ class DelugeRPC(object):
try:
self.connect()
torrent_id = self.client.core.add_torrent_magnet(torrent, options).get()
- if options['label']:
+ if not torrent_id:
+ torrent_id = self._check_torrent(True, torrent)
+
+ if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label']).get()
except Exception, err:
log.error('Failed to add torrent magnet %s: %s %s', (torrent, err, traceback.format_exc()))
@@ -185,8 +191,11 @@ class DelugeRPC(object):
torrent_id = False
try:
self.connect()
- torrent_id = self.client.core.add_torrent_file(filename, torrent, options).get()
- if options['label']:
+ torrent_id = self.client.core.add_torrent_file(filename, b64encode(torrent), options).get()
+ if not torrent_id:
+ torrent_id = self._check_torrent(False, torrent)
+
+ if torrent_id and options['label']:
self.client.label.set_torrent(torrent_id, options['label']).get()
except Exception, err:
log.error('Failed to add torrent file %s: %s %s', (filename, err, traceback.format_exc()))
@@ -242,3 +251,22 @@ class DelugeRPC(object):
def disconnect(self):
self.client.disconnect()
+
+ def _check_torrent(self, magnet, torrent):
+ # Torrent not added, check if it already existed.
+ if magnet:
+ torrent_hash = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
+ else:
+ info = bdecode(torrent)["info"]
+ torrent_hash = sha1(benc(info)).hexdigest()
+
+ # Convert base 32 to hex
+ if len(torrent_hash) == 32:
+ torrent_hash = b16encode(b32decode(torrent_hash))
+
+ torrent_hash = torrent_hash.lower()
+ torrent_check = self.client.core.get_torrent_status(torrent_hash, {}).get()
+ if torrent_check['hash']:
+ return torrent_hash
+
+ return False
diff --git a/couchpotato/core/downloaders/nzbget/__init__.py b/couchpotato/core/downloaders/nzbget/__init__.py
index 19483713..00763cfb 100644
--- a/couchpotato/core/downloaders/nzbget/__init__.py
+++ b/couchpotato/core/downloaders/nzbget/__init__.py
@@ -12,6 +12,7 @@ config = [{
'name': 'nzbget',
'label': 'NZBGet',
'description': 'Use NZBGet to download NZBs.',
+ 'wizard': True,
'options': [
{
'name': 'enabled',
diff --git a/couchpotato/core/downloaders/nzbget/main.py b/couchpotato/core/downloaders/nzbget/main.py
index b7cf0263..f8506134 100644
--- a/couchpotato/core/downloaders/nzbget/main.py
+++ b/couchpotato/core/downloaders/nzbget/main.py
@@ -1,6 +1,6 @@
from base64 import standard_b64encode
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import ss
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import ss, sp
from couchpotato.core.helpers.variable import tryInt, md5
from couchpotato.core.logger import CPLog
from datetime import timedelta
@@ -99,60 +99,60 @@ class NZBGet(Downloader):
log.error('Failed getting data: %s', traceback.format_exc(1))
return False
- statuses = StatusList(self)
+ release_downloads = ReleaseDownloadList(self)
- for item in groups:
- log.debug('Found %s in NZBGet download queue', item['NZBFilename'])
+ for nzb in groups:
+ log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
try:
- nzb_id = [param['Value'] for param in item['Parameters'] if param['Name'] == 'couchpotato'][0]
+ nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
- nzb_id = item['NZBID']
+ nzb_id = nzb['NZBID']
timeleft = -1
try:
- if item['ActiveDownloads'] > 0 and item['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
- timeleft = str(timedelta(seconds = item['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
+ if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
+ timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
except:
pass
- statuses.append({
+ release_downloads.append({
'id': nzb_id,
- 'name': item['NZBFilename'],
- 'original_status': 'DOWNLOADING' if item['ActiveDownloads'] > 0 else 'QUEUED',
+ 'name': nzb['NZBFilename'],
+ 'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
'timeleft': timeleft,
})
- for item in queue: # 'Parameters' is not passed in rpc.postqueue
- log.debug('Found %s in NZBGet postprocessing queue', item['NZBFilename'])
- statuses.append({
- 'id': item['NZBID'],
- 'name': item['NZBFilename'],
- 'original_status': item['Stage'],
+ for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
+ log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
+ release_downloads.append({
+ 'id': nzb['NZBID'],
+ 'name': nzb['NZBFilename'],
+ 'original_status': nzb['Stage'],
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
})
- for item in history:
- log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (item['NZBFilename'] , item['ParStatus'], item['ScriptStatus'] , item['Log']))
+ for nzb in history:
+ log.debug('Found %s in NZBGet history. ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
try:
- nzb_id = [param['Value'] for param in item['Parameters'] if param['Name'] == 'couchpotato'][0]
+ nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
- nzb_id = item['NZBID']
- statuses.append({
+ nzb_id = nzb['NZBID']
+ release_downloads.append({
'id': nzb_id,
- 'name': item['NZBFilename'],
- 'status': 'completed' if item['ParStatus'] in ['SUCCESS','NONE'] and item['ScriptStatus'] in ['SUCCESS','NONE'] else 'failed',
- 'original_status': item['ParStatus'] + ', ' + item['ScriptStatus'],
+ 'name': nzb['NZBFilename'],
+ 'status': 'completed' if nzb['ParStatus'] in ['SUCCESS', 'NONE'] and nzb['ScriptStatus'] in ['SUCCESS', 'NONE'] else 'failed',
+ 'original_status': nzb['ParStatus'] + ', ' + nzb['ScriptStatus'],
'timeleft': str(timedelta(seconds = 0)),
- 'folder': ss(item['DestDir'])
+ 'folder': sp(nzb['DestDir'])
})
- return statuses
+ return release_downloads
- def removeFailed(self, item):
+ def removeFailed(self, release_download):
- log.info('%s failed downloading, deleting...', item['name'])
+ log.info('%s failed downloading, deleting...', release_download['name'])
url = self.url % {'host': self.conf('host'), 'username': self.conf('username'), 'password': self.conf('password')}
@@ -179,9 +179,9 @@ class NZBGet(Downloader):
for hist in history:
for param in hist['Parameters']:
- if param['Name'] == 'couchpotato' and param['Value'] == item['id']:
+ if param['Name'] == 'couchpotato' and param['Value'] == release_download['id']:
nzb_id = hist['ID']
- path = hist['DestDir']
+ path = hist['DestDir']
if nzb_id and path and rpc.editqueue('HistoryDelete', 0, "", [tryInt(nzb_id)]):
shutil.rmtree(path, True)
diff --git a/couchpotato/core/downloaders/nzbvortex/main.py b/couchpotato/core/downloaders/nzbvortex/main.py
index a652f110..f4e233be 100644
--- a/couchpotato/core/downloaders/nzbvortex/main.py
+++ b/couchpotato/core/downloaders/nzbvortex/main.py
@@ -1,6 +1,6 @@
from base64 import b64encode
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import tryUrlencode, ss
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import tryUrlencode, sp
from couchpotato.core.helpers.variable import cleanHost
from couchpotato.core.logger import CPLog
from urllib2 import URLError
@@ -30,10 +30,10 @@ class NZBVortex(Downloader):
# Send the nzb
try:
nzb_filename = self.createFileName(data, filedata, movie)
- self.call('nzb/add', params = {'file': (ss(nzb_filename), filedata)}, multipart = True)
+ self.call('nzb/add', params = {'file': (nzb_filename, filedata)}, multipart = True)
raw_statuses = self.call('nzb')
- nzb_id = [item['id'] for item in raw_statuses.get('nzbs', []) if item['name'] == nzb_filename][0]
+ nzb_id = [nzb['id'] for nzb in raw_statuses.get('nzbs', []) if nzb['name'] == nzb_filename][0]
return self.downloadReturnId(nzb_id)
except:
log.error('Something went wrong sending the NZB file: %s', traceback.format_exc())
@@ -43,33 +43,33 @@ class NZBVortex(Downloader):
raw_statuses = self.call('nzb')
- statuses = StatusList(self)
- for item in raw_statuses.get('nzbs', []):
+ release_downloads = ReleaseDownloadList(self)
+ for nzb in raw_statuses.get('nzbs', []):
# Check status
status = 'busy'
- if item['state'] == 20:
+ if nzb['state'] == 20:
status = 'completed'
- elif item['state'] in [21, 22, 24]:
+ elif nzb['state'] in [21, 22, 24]:
status = 'failed'
- statuses.append({
- 'id': item['id'],
- 'name': item['uiTitle'],
+ release_downloads.append({
+ 'id': nzb['id'],
+ 'name': nzb['uiTitle'],
'status': status,
- 'original_status': item['state'],
+ 'original_status': nzb['state'],
'timeleft':-1,
- 'folder': ss(item['destinationPath']),
+ 'folder': sp(nzb['destinationPath']),
})
- return statuses
+ return release_downloads
- def removeFailed(self, item):
+ def removeFailed(self, release_download):
- log.info('%s failed downloading, deleting...', item['name'])
+ log.info('%s failed downloading, deleting...', release_download['name'])
try:
- self.call('nzb/%s/cancel' % item['id'])
+ self.call('nzb/%s/cancel' % release_download['id'])
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
return False
diff --git a/couchpotato/core/downloaders/rtorrent/__init__.py b/couchpotato/core/downloaders/rtorrent/__init__.py
index efc2234b..026a56c6 100755
--- a/couchpotato/core/downloaders/rtorrent/__init__.py
+++ b/couchpotato/core/downloaders/rtorrent/__init__.py
@@ -23,6 +23,8 @@ config = [{
{
'name': 'url',
'default': 'http://localhost:80/RPC2',
+ 'description': 'XML-RPC Endpoint URI. Usually scgi://localhost:5000 '
+ 'or http://localhost:80/RPC2'
},
{
'name': 'username',
@@ -35,6 +37,11 @@ config = [{
'name': 'label',
'description': 'Label to apply on added torrents.',
},
+ {
+ 'name': 'directory',
+ 'type': 'directory',
+ 'description': 'Download to this directory. Keep empty for default rTorrent download directory.',
+ },
{
'name': 'remove_complete',
'label': 'Remove torrent',
@@ -51,6 +58,14 @@ config = [{
'advanced': True,
'description': 'Also remove the leftover files.',
},
+ {
+ 'name': 'append_label',
+ 'label': 'Append Label',
+ 'default': False,
+ 'advanced': True,
+ 'type': 'bool',
+ 'description': 'Append label to download location. Requires you to set the download location above.',
+ },
{
'name': 'paused',
'type': 'bool',
diff --git a/couchpotato/core/downloaders/rtorrent/main.py b/couchpotato/core/downloaders/rtorrent/main.py
index 161c671a..d7ae589f 100755
--- a/couchpotato/core/downloaders/rtorrent/main.py
+++ b/couchpotato/core/downloaders/rtorrent/main.py
@@ -1,13 +1,13 @@
from base64 import b16encode, b32decode
from bencode import bencode, bdecode
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import ss
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import sp
from couchpotato.core.logger import CPLog
from datetime import timedelta
from hashlib import sha1
from rtorrent import RTorrent
from rtorrent.err import MethodError
-import shutil
+import os
log = CPLog(__name__)
@@ -71,7 +71,7 @@ class rTorrent(Downloader):
group.set_command()
group.disable()
except MethodError, err:
- log.error('Unable to set group options: %s', err.message)
+ log.error('Unable to set group options: %s', err.msg)
return False
return True
@@ -91,6 +91,7 @@ class rTorrent(Downloader):
if self.conf('label'):
torrent_params['label'] = self.conf('label')
+
if not filedata and data.get('protocol') == 'torrent':
log.error('Failed sending torrent, no data')
return False
@@ -116,10 +117,19 @@ class rTorrent(Downloader):
# Send torrent to rTorrent
torrent = self.rt.load_torrent(filedata)
+ if not torrent:
+ log.error('Unable to find the torrent, did it fail to load?')
+ return False
+
# Set label
if self.conf('label'):
torrent.set_custom(1, self.conf('label'))
+ if self.conf('directory') and self.conf('append_label'):
+ torrent.set_directory(os.path.join(self.conf('directory'), self.conf('label')))
+ elif self.conf('directory'):
+ torrent.set_directory(self.conf('directory'))
+
# Set Ratio Group
torrent.set_visible(group_name)
@@ -141,37 +151,42 @@ class rTorrent(Downloader):
try:
torrents = self.rt.get_torrents()
- statuses = StatusList(self)
+ release_downloads = ReleaseDownloadList(self)
+
+ for torrent in torrents:
+ torrent_files = []
+ for file_item in torrent.get_files():
+ torrent_files.append(sp(os.path.join(torrent.directory, file_item.path)))
- for item in torrents:
status = 'busy'
- if item.complete:
- if item.active:
+ if torrent.complete:
+ if torrent.active:
status = 'seeding'
else:
status = 'completed'
- statuses.append({
- 'id': item.info_hash,
- 'name': item.name,
+ release_downloads.append({
+ 'id': torrent.info_hash,
+ 'name': torrent.name,
'status': status,
- 'seed_ratio': item.ratio,
- 'original_status': item.state,
- 'timeleft': str(timedelta(seconds = float(item.left_bytes) / item.down_rate)) if item.down_rate > 0 else -1,
- 'folder': ss(item.directory)
+ 'seed_ratio': torrent.ratio,
+ 'original_status': torrent.state,
+ 'timeleft': str(timedelta(seconds = float(torrent.left_bytes) / torrent.down_rate)) if torrent.down_rate > 0 else -1,
+ 'folder': sp(torrent.directory),
+ 'files': '|'.join(torrent_files)
})
- return statuses
+ return release_downloads
except Exception, err:
log.error('Failed to get status from rTorrent: %s', err)
return False
- def pause(self, download_info, pause = True):
+ def pause(self, release_download, pause = True):
if not self.connect():
return False
- torrent = self.rt.find_torrent(download_info['id'])
+ torrent = self.rt.find_torrent(release_download['id'])
if torrent is None:
return False
@@ -179,23 +194,34 @@ class rTorrent(Downloader):
return torrent.pause()
return torrent.resume()
- def removeFailed(self, item):
- log.info('%s failed downloading, deleting...', item['name'])
- return self.processComplete(item, delete_files = True)
+ def removeFailed(self, release_download):
+ log.info('%s failed downloading, deleting...', release_download['name'])
+ return self.processComplete(release_download, delete_files = True)
- def processComplete(self, item, delete_files):
+ def processComplete(self, release_download, delete_files):
log.debug('Requesting rTorrent to remove the torrent %s%s.',
- (item['name'], ' and cleanup the downloaded files' if delete_files else ''))
+ (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
+
if not self.connect():
return False
- torrent = self.rt.find_torrent(item['id'])
+ torrent = self.rt.find_torrent(release_download['id'])
+
if torrent is None:
return False
+ if delete_files:
+ for file_item in torrent.get_files(): # will only delete files, not dir/sub-dir
+ os.unlink(os.path.join(torrent.directory, file_item.path))
+
+ if torrent.is_multi_file() and torrent.directory.endswith(torrent.name):
+ # Remove empty directories bottom up
+ try:
+ for path, _, _ in os.walk(torrent.directory, topdown = False):
+ os.rmdir(path)
+ except OSError:
+ log.info('Directory "%s" contains extra files, unable to remove', torrent.directory)
+
torrent.erase() # just removes the torrent, doesn't delete data
- if delete_files:
- shutil.rmtree(item['folder'], True)
-
return True
diff --git a/couchpotato/core/downloaders/sabnzbd/main.py b/couchpotato/core/downloaders/sabnzbd/main.py
index 08ee409c..aba21231 100644
--- a/couchpotato/core/downloaders/sabnzbd/main.py
+++ b/couchpotato/core/downloaders/sabnzbd/main.py
@@ -1,11 +1,12 @@
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import tryUrlencode, ss
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import tryUrlencode, ss, sp
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
from datetime import timedelta
from urllib2 import URLError
import json
+import os
import traceback
log = CPLog(__name__)
@@ -86,47 +87,58 @@ class Sabnzbd(Downloader):
log.error('Failed getting history json: %s', traceback.format_exc(1))
return False
- statuses = StatusList(self)
+ release_downloads = ReleaseDownloadList(self)
# Get busy releases
- for item in queue.get('slots', []):
- statuses.append({
- 'id': item['nzo_id'],
- 'name': item['filename'],
- 'original_status': item['status'],
- 'timeleft': item['timeleft'] if not queue['paused'] else -1,
+ for nzb in queue.get('slots', []):
+ status = 'busy'
+ if 'ENCRYPTED / ' in nzb['filename']:
+ status = 'failed'
+
+ release_downloads.append({
+ 'id': nzb['nzo_id'],
+ 'name': nzb['filename'],
+ 'status': status,
+ 'original_status': nzb['status'],
+ 'timeleft': nzb['timeleft'] if not queue['paused'] else -1,
})
# Get old releases
- for item in history.get('slots', []):
+ for nzb in history.get('slots', []):
status = 'busy'
- if item['status'] == 'Failed' or (item['status'] == 'Completed' and item['fail_message'].strip()):
+ if nzb['status'] == 'Failed' or (nzb['status'] == 'Completed' and nzb['fail_message'].strip()):
status = 'failed'
- elif item['status'] == 'Completed':
+ elif nzb['status'] == 'Completed':
status = 'completed'
- statuses.append({
- 'id': item['nzo_id'],
- 'name': item['name'],
+ release_downloads.append({
+ 'id': nzb['nzo_id'],
+ 'name': nzb['name'],
'status': status,
- 'original_status': item['status'],
+ 'original_status': nzb['status'],
'timeleft': str(timedelta(seconds = 0)),
- 'folder': ss(item['storage']),
+ 'folder': sp(os.path.dirname(nzb['storage']) if os.path.isfile(nzb['storage']) else nzb['storage']),
})
- return statuses
+ return release_downloads
- def removeFailed(self, item):
+ def removeFailed(self, release_download):
- log.info('%s failed downloading, deleting...', item['name'])
+ log.info('%s failed downloading, deleting...', release_download['name'])
try:
+ self.call({
+ 'mode': 'queue',
+ 'name': 'delete',
+ 'del_files': '1',
+ 'value': release_download['id']
+ }, use_json = False)
self.call({
'mode': 'history',
'name': 'delete',
'del_files': '1',
- 'value': item['id']
+ 'value': release_download['id']
}, use_json = False)
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
@@ -134,15 +146,15 @@ class Sabnzbd(Downloader):
return True
- def processComplete(self, item, delete_files = False):
- log.debug('Requesting SabNZBd to remove the NZB %s.', item['name'])
+ def processComplete(self, release_download, delete_files = False):
+ log.debug('Requesting SabNZBd to remove the NZB %s.', release_download['name'])
try:
self.call({
'mode': 'history',
'name': 'delete',
'del_files': '0',
- 'value': item['id']
+ 'value': release_download['id']
}, use_json = False)
except:
log.error('Failed removing: %s', traceback.format_exc(0))
diff --git a/couchpotato/core/downloaders/synology/main.py b/couchpotato/core/downloaders/synology/main.py
index d5082c77..0721085c 100644
--- a/couchpotato/core/downloaders/synology/main.py
+++ b/couchpotato/core/downloaders/synology/main.py
@@ -3,6 +3,7 @@ from couchpotato.core.helpers.encoding import isInt
from couchpotato.core.logger import CPLog
import json
import requests
+import traceback
log = CPLog(__name__)
@@ -34,12 +35,12 @@ class Synology(Downloader):
elif data['protocol'] in ['nzb', 'torrent']:
log.info('Adding %s' % data['protocol'])
if not filedata:
- log.error('No %s data found' % data['protocol'])
+ log.error('No %s data found', data['protocol'])
else:
filename = data['name'] + '.' + data['protocol']
response = srpc.create_task(filename = filename, filedata = filedata)
- except Exception, err:
- log.error('Exception while adding torrent: %s', err)
+ except:
+ log.error('Exception while adding torrent: %s', traceback.format_exc())
finally:
return response
diff --git a/couchpotato/core/downloaders/transmission/main.py b/couchpotato/core/downloaders/transmission/main.py
index 5ff33c05..2eabb2e8 100644
--- a/couchpotato/core/downloaders/transmission/main.py
+++ b/couchpotato/core/downloaders/transmission/main.py
@@ -1,9 +1,8 @@
from base64 import b64encode
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import isInt, ss
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import isInt, sp
from couchpotato.core.helpers.variable import tryInt, tryFloat
from couchpotato.core.logger import CPLog
-from couchpotato.environment import Env
from datetime import timedelta
import httplib
import json
@@ -89,10 +88,10 @@ class Transmission(Downloader):
if not self.connect():
return False
- statuses = StatusList(self)
+ release_downloads = ReleaseDownloadList(self)
return_params = {
- 'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit']
+ 'fields': ['id', 'name', 'hashString', 'percentDone', 'status', 'eta', 'isStalled', 'isFinished', 'downloadDir', 'uploadRatio', 'secondsSeeding', 'seedIdleLimit', 'files']
}
queue = self.trpc.get_alltorrents(return_params)
@@ -100,47 +99,48 @@ class Transmission(Downloader):
log.debug('Nothing in queue or error')
return False
- for item in queue['torrents']:
+ for torrent in queue['torrents']:
log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / eta=%s / uploadRatio=%s / isFinished=%s',
- (item['name'], item['id'], item['downloadDir'], item['hashString'], item['percentDone'], item['status'], item['eta'], item['uploadRatio'], item['isFinished']))
+ (torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent['eta'], torrent['uploadRatio'], torrent['isFinished']))
- if not os.path.isdir(Env.setting('from', 'renamer')):
- log.error('Renamer "from" folder doesn\'t to exist.')
- return
+ torrent_files = []
+ for file_item in torrent['files']:
+ torrent_files.append(sp(os.path.join(torrent['downloadDir'], file_item['name'])))
status = 'busy'
- if item['isStalled'] and self.conf('stalled_as_failed'):
+ if torrent.get('isStalled') and self.conf('stalled_as_failed'):
status = 'failed'
- elif item['status'] == 0 and item['percentDone'] == 1:
+ elif torrent['status'] == 0 and torrent['percentDone'] == 1:
status = 'completed'
- elif item['status'] in [5, 6]:
+ elif torrent['status'] in [5, 6]:
status = 'seeding'
- statuses.append({
- 'id': item['hashString'],
- 'name': item['name'],
+ release_downloads.append({
+ 'id': torrent['hashString'],
+ 'name': torrent['name'],
'status': status,
- 'original_status': item['status'],
- 'seed_ratio': item['uploadRatio'],
- 'timeleft': str(timedelta(seconds = item['eta'])),
- 'folder': ss(os.path.join(item['downloadDir'], item['name'])),
+ 'original_status': torrent['status'],
+ 'seed_ratio': torrent['uploadRatio'],
+ 'timeleft': str(timedelta(seconds = torrent['eta'])),
+ 'folder': sp(torrent['downloadDir'] if len(torrent_files) == 1 else os.path.join(torrent['downloadDir'], torrent['name'])),
+ 'files': '|'.join(torrent_files)
})
- return statuses
+ return release_downloads
- def pause(self, item, pause = True):
+ def pause(self, release_download, pause = True):
if pause:
- return self.trpc.stop_torrent(item['id'])
+ return self.trpc.stop_torrent(release_download['id'])
else:
- return self.trpc.start_torrent(item['id'])
+ return self.trpc.start_torrent(release_download['id'])
- def removeFailed(self, item):
- log.info('%s failed downloading, deleting...', item['name'])
- return self.trpc.remove_torrent(item['hashString'], True)
+ def removeFailed(self, release_download):
+ log.info('%s failed downloading, deleting...', release_download['name'])
+ return self.trpc.remove_torrent(release_download['id'], True)
- def processComplete(self, item, delete_files = False):
- log.debug('Requesting Transmission to remove the torrent %s%s.', (item['name'], ' and cleanup the downloaded files' if delete_files else ''))
- return self.trpc.remove_torrent(item['hashString'], delete_files)
+ def processComplete(self, release_download, delete_files = False):
+ log.debug('Requesting Transmission to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
+ return self.trpc.remove_torrent(release_download['id'], delete_files)
class TransmissionRPC(object):
diff --git a/couchpotato/core/downloaders/utorrent/main.py b/couchpotato/core/downloaders/utorrent/main.py
index ce82c8c2..1db1b8a3 100644
--- a/couchpotato/core/downloaders/utorrent/main.py
+++ b/couchpotato/core/downloaders/utorrent/main.py
@@ -1,7 +1,7 @@
from base64 import b16encode, b32decode
from bencode import bencode as benc, bdecode
-from couchpotato.core.downloaders.base import Downloader, StatusList
-from couchpotato.core.helpers.encoding import isInt, ss
+from couchpotato.core.downloaders.base import Downloader, ReleaseDownloadList
+from couchpotato.core.helpers.encoding import isInt, ss, sp
from couchpotato.core.helpers.variable import tryInt, tryFloat
from couchpotato.core.logger import CPLog
from datetime import timedelta
@@ -102,39 +102,6 @@ class uTorrent(Downloader):
if self.conf('paused', default = 0):
self.utorrent_api.pause_torrent(torrent_hash)
- count = 0
- while True:
-
- count += 1
- # Check if torrent is saved in subfolder of torrent name
- data = self.utorrent_api.get_files(torrent_hash)
-
- torrent_files = json.loads(data)
- if torrent_files.get('error'):
- log.error('Error getting data from uTorrent: %s', torrent_files.get('error'))
- return False
-
- if (torrent_files.get('files') and len(torrent_files['files'][1]) > 0) or count > 60:
- break
-
- time.sleep(1)
-
- # Torrent has only one file, so uTorrent wont create a folder for it
- if len(torrent_files['files'][1]) == 1:
- # Remove torrent and try again
- self.utorrent_api.remove_torrent(torrent_hash, remove_data = True)
-
- # Send request to uTorrent
- if data.get('protocol') == 'torrent_magnet':
- self.utorrent_api.add_torrent_uri(torrent_filename, data.get('url'), add_folder = True)
- else:
- self.utorrent_api.add_torrent_file(torrent_filename, filedata, add_folder = True)
-
- # Change settings of added torrent
- self.utorrent_api.set_torrent(torrent_hash, torrent_params)
- if self.conf('paused', default = 0):
- self.utorrent_api.pause_torrent(torrent_hash)
-
return self.downloadReturnId(torrent_hash)
def getAllDownloadStatus(self):
@@ -144,7 +111,7 @@ class uTorrent(Downloader):
if not self.connect():
return False
- statuses = StatusList(self)
+ release_downloads = ReleaseDownloadList(self)
data = self.utorrent_api.get_status()
if not data:
@@ -161,52 +128,74 @@ class uTorrent(Downloader):
return False
# Get torrents
- for item in queue['torrents']:
+ for torrent in queue['torrents']:
+
+ #Get files of the torrent
+ torrent_files = []
+ try:
+ torrent_files = json.loads(self.utorrent_api.get_files(torrent[0]))
+ torrent_files = [sp(os.path.join(torrent[26], torrent_file[0])) for torrent_file in torrent_files['files'][1]]
+ except:
+ log.debug('Failed getting files from torrent: %s', torrent[2])
+
+ status_flags = {
+ "STARTED" : 1,
+ "CHECKING" : 2,
+ "CHECK-START" : 4,
+ "CHECKED" : 8,
+ "ERROR" : 16,
+ "PAUSED" : 32,
+ "QUEUED" : 64,
+ "LOADED" : 128
+ }
- # item[21] = Paused | Downloading | Seeding | Finished
status = 'busy'
- if 'Finished' in item[21]:
- status = 'completed'
- self.removeReadOnly(item[26])
- elif 'Seeding' in item[21]:
+ if (torrent[1] & status_flags["STARTED"] or torrent[1] & status_flags["QUEUED"]) and torrent[4] == 1000:
status = 'seeding'
- self.removeReadOnly(item[26])
+ elif (torrent[1] & status_flags["ERROR"]):
+ status = 'failed'
+ elif torrent[4] == 1000:
+ status = 'completed'
- statuses.append({
- 'id': item[0],
- 'name': item[2],
- 'status': status,
- 'seed_ratio': float(item[7]) / 1000,
- 'original_status': item[1],
- 'timeleft': str(timedelta(seconds = item[10])),
- 'folder': ss(item[26]),
+ if not status == 'busy':
+ self.removeReadOnly(torrent_files)
+
+ release_downloads.append({
+ 'id': torrent[0],
+ 'name': torrent[2],
+ 'status': status,
+ 'seed_ratio': float(torrent[7]) / 1000,
+ 'original_status': torrent[1],
+ 'timeleft': str(timedelta(seconds = torrent[10])),
+ 'folder': sp(torrent[26]),
+ 'files': '|'.join(torrent_files)
})
- return statuses
+ return release_downloads
- def pause(self, item, pause = True):
+ def pause(self, release_download, pause = True):
if not self.connect():
return False
- return self.utorrent_api.pause_torrent(item['id'], pause)
+ return self.utorrent_api.pause_torrent(release_download['id'], pause)
- def removeFailed(self, item):
- log.info('%s failed downloading, deleting...', item['name'])
+ def removeFailed(self, release_download):
+ log.info('%s failed downloading, deleting...', release_download['name'])
if not self.connect():
return False
- return self.utorrent_api.remove_torrent(item['id'], remove_data = True)
+ return self.utorrent_api.remove_torrent(release_download['id'], remove_data = True)
- def processComplete(self, item, delete_files = False):
- log.debug('Requesting uTorrent to remove the torrent %s%s.', (item['name'], ' and cleanup the downloaded files' if delete_files else ''))
+ def processComplete(self, release_download, delete_files = False):
+ log.debug('Requesting uTorrent to remove the torrent %s%s.', (release_download['name'], ' and cleanup the downloaded files' if delete_files else ''))
if not self.connect():
return False
- return self.utorrent_api.remove_torrent(item['id'], remove_data = delete_files)
-
- def removeReadOnly(self, folder):
- #Removes all read-only flags in a folder
- if folder and os.path.isdir(folder):
- for root, folders, filenames in os.walk(folder):
- for filename in filenames:
- os.chmod(os.path.join(root, filename), stat.S_IWRITE)
+ return self.utorrent_api.remove_torrent(release_download['id'], remove_data = delete_files)
+
+ def removeReadOnly(self, files):
+ #Removes all read-on ly flags in a for all files
+ for filepath in files:
+ if os.path.isfile(filepath):
+ #Windows only needs S_IWRITE, but we bitwise-or with current perms to preserve other permission bits on Linux
+ os.chmod(filepath, stat.S_IWRITE | os.stat(filepath).st_mode)
class uTorrentAPI(object):
@@ -304,13 +293,13 @@ class uTorrentAPI(object):
utorrent_settings = json.loads(self._request(action))
# Create settings dict
- for item in utorrent_settings['settings']:
- if item[1] == 0: # int
- settings_dict[item[0]] = int(item[2] if not item[2].strip() == '' else '0')
- elif item[1] == 1: # bool
- settings_dict[item[0]] = True if item[2] == 'true' else False
- elif item[1] == 2: # string
- settings_dict[item[0]] = item[2]
+ for setting in utorrent_settings['settings']:
+ if setting[1] == 0: # int
+ settings_dict[setting[0]] = int(setting[2] if not setting[2].strip() == '' else '0')
+ elif setting[1] == 1: # bool
+ settings_dict[setting[0]] = True if setting[2] == 'true' else False
+ elif setting[1] == 2: # string
+ settings_dict[setting[0]] = setting[2]
#log.debug('uTorrent settings: %s', settings_dict)
diff --git a/couchpotato/core/helpers/encoding.py b/couchpotato/core/helpers/encoding.py
index 5fa2e2ad..bfebcc1f 100644
--- a/couchpotato/core/helpers/encoding.py
+++ b/couchpotato/core/helpers/encoding.py
@@ -1,6 +1,7 @@
from couchpotato.core.logger import CPLog
from string import ascii_letters, digits
from urllib import quote_plus
+import os
import re
import traceback
import unicodedata
@@ -47,6 +48,10 @@ def ss(original, *args):
log.debug('Failed ss encoding char, force UTF8: %s', e)
return u_original.encode('UTF-8')
+def sp(path, *args):
+ # Standardise encoding, normalise case, path and strip trailing '/' or '\'
+ return os.path.normcase(os.path.normpath(ss(path, *args))).rstrip(os.path.sep)
+
def ek(original, *args):
if isinstance(original, (str, unicode)):
try:
diff --git a/couchpotato/core/helpers/variable.py b/couchpotato/core/helpers/variable.py
index d93c9417..7d35b997 100644
--- a/couchpotato/core/helpers/variable.py
+++ b/couchpotato/core/helpers/variable.py
@@ -1,5 +1,6 @@
from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss
from couchpotato.core.logger import CPLog
+import collections
import hashlib
import os.path
import platform
@@ -136,18 +137,20 @@ def getImdb(txt, check_inside = False, multiple = False):
output.close()
try:
- ids = re.findall('(tt\d{7})', txt)
+ ids = re.findall('(tt\d{4,7})', txt)
+
if multiple:
- return list(set(ids)) if len(ids) > 0 else []
- return ids[0]
+ return list(set(['tt%07d' % tryInt(x[2:]) for x in ids])) if len(ids) > 0 else []
+
+ return 'tt%07d' % tryInt(ids[0][2:])
except IndexError:
pass
return False
-def tryInt(s):
+def tryInt(s, default = 0):
try: return int(s)
- except: return 0
+ except: return default
def tryFloat(s):
try:
@@ -163,6 +166,11 @@ def natsortKey(s):
def natcmp(a, b):
return cmp(natsortKey(a), natsortKey(b))
+def toIterable(value):
+ if isinstance(value, collections.Iterable):
+ return value
+ return [value]
+
def getTitle(library_dict):
try:
try:
@@ -205,3 +213,6 @@ def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
def splitString(str, split_on = ',', clean = True):
list = [x.strip() for x in str.split(split_on)] if str else []
return filter(None, list) if clean else list
+
+def dictIsSubset(a, b):
+ return all([k in b and b[k] == v for k, v in a.items()])
diff --git a/couchpotato/core/loader.py b/couchpotato/core/loader.py
index 2016d287..c14b55bd 100644
--- a/couchpotato/core/loader.py
+++ b/couchpotato/core/loader.py
@@ -1,7 +1,8 @@
from couchpotato.core.event import fireEvent
from couchpotato.core.logger import CPLog
-import glob
+from importlib import import_module
import os
+import sys
import traceback
log = CPLog(__name__)
@@ -12,17 +13,6 @@ class Loader(object):
providers = {}
modules = {}
- def addPath(self, root, base_path, priority, recursive = False):
- for filename in os.listdir(os.path.join(root, *base_path)):
- path = os.path.join(os.path.join(root, *base_path), filename)
- if os.path.isdir(path) and filename[:2] != '__':
- if u'__init__.py' in os.listdir(path):
- new_base_path = ''.join(s + '.' for s in base_path) + filename
- self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
-
- if recursive:
- self.addPath(root, base_path + [filename], priority, recursive = True)
-
def preload(self, root = ''):
core = os.path.join(root, 'couchpotato', 'core')
@@ -39,6 +29,14 @@ class Loader(object):
# Add media to loader
self.addPath(root, ['couchpotato', 'core', 'media'], 25, recursive = True)
+ # Add custom plugin folder
+ from couchpotato.environment import Env
+ custom_plugin_dir = os.path.join(Env.get('data_dir'), 'custom_plugins')
+ if os.path.isdir(custom_plugin_dir):
+ sys.path.insert(0, custom_plugin_dir)
+ self.paths['custom_plugins'] = (30, '', custom_plugin_dir)
+
+ # Loop over all paths and add to module list
for plugin_type, plugin_tuple in self.paths.iteritems():
priority, module, dir_name = plugin_tuple
self.addFromDir(plugin_type, priority, module, dir_name)
@@ -46,8 +44,9 @@ class Loader(object):
def run(self):
did_save = 0
- for priority in self.modules:
+ for priority in sorted(self.modules):
for module_name, plugin in sorted(self.modules[priority].iteritems()):
+
# Load module
try:
if plugin.get('name')[:2] == '__':
@@ -56,7 +55,6 @@ class Loader(object):
m = self.loadModule(module_name)
if m is None:
continue
- m = getattr(m, plugin.get('name'))
log.info('Loading %s: %s', (plugin['type'], plugin['name']))
@@ -78,20 +76,26 @@ class Loader(object):
if did_save:
fireEvent('settings.save')
+ def addPath(self, root, base_path, priority, recursive = False):
+ root_path = os.path.join(root, *base_path)
+ for filename in os.listdir(root_path):
+ path = os.path.join(root_path, filename)
+ if os.path.isdir(path) and filename[:2] != '__':
+ if u'__init__.py' in os.listdir(path):
+ new_base_path = ''.join(s + '.' for s in base_path) + filename
+ self.paths[new_base_path.replace('.', '_')] = (priority, new_base_path, path)
+
+ if recursive:
+ self.addPath(root, base_path + [filename], priority, recursive = True)
+
def addFromDir(self, plugin_type, priority, module, dir_name):
# Load dir module
- try:
- m = __import__(module)
- splitted = module.split('.')
- for sub in splitted[1:]:
- m = getattr(m, sub)
- except:
- raise
+ if module and len(module) > 0:
+ self.addModule(priority, plugin_type, module, os.path.basename(dir_name))
- for cur_file in glob.glob(os.path.join(dir_name, '*')):
- name = os.path.basename(cur_file)
- if os.path.isdir(os.path.join(dir_name, name)) and name != 'static' and os.path.isfile(os.path.join(cur_file, '__init__.py')):
+ for name in os.listdir(dir_name):
+ if os.path.isdir(os.path.join(dir_name, name)) and name != 'static' and os.path.isfile(os.path.join(dir_name, name, '__init__.py')):
module_name = '%s.%s' % (module, name)
self.addModule(priority, plugin_type, module_name, name)
@@ -131,6 +135,7 @@ class Loader(object):
if not self.modules.get(priority):
self.modules[priority] = {}
+ module = module.lstrip('.')
self.modules[priority][module] = {
'priority': priority,
'module': module,
@@ -140,11 +145,7 @@ class Loader(object):
def loadModule(self, name):
try:
- m = __import__(name)
- splitted = name.split('.')
- for sub in splitted[1:-1]:
- m = getattr(m, sub)
- return m
+ return import_module(name)
except ImportError:
log.debug('Skip loading module plugin %s: %s', (name, traceback.format_exc()))
return None
diff --git a/couchpotato/core/media/__init__.py b/couchpotato/core/media/__init__.py
index 1cef967b..e6a249d5 100644
--- a/couchpotato/core/media/__init__.py
+++ b/couchpotato/core/media/__init__.py
@@ -1,13 +1,44 @@
-from couchpotato.core.event import addEvent
+from couchpotato import get_session
+from couchpotato.core.event import addEvent, fireEventAsync, fireEvent
from couchpotato.core.plugins.base import Plugin
+from couchpotato.core.settings.model import Media
class MediaBase(Plugin):
_type = None
+ default_dict = {
+ 'profile': {'types': {'quality': {}}},
+ 'releases': {'status': {}, 'quality': {}, 'files':{}, 'info': {}},
+ 'library': {'titles': {}, 'files':{}},
+ 'files': {},
+ 'status': {},
+ 'category': {},
+ }
+
def initType(self):
addEvent('media.types', self.getType)
def getType(self):
return self._type
+
+ def createOnComplete(self, id):
+
+ def onComplete():
+ db = get_session()
+ media = db.query(Media).filter_by(id = id).first()
+ fireEventAsync('%s.searcher.single' % media.type, media.to_dict(self.default_dict), on_complete = self.createNotifyFront(id))
+ db.expire_all()
+
+ return onComplete
+
+ def createNotifyFront(self, media_id):
+
+ def notifyFront():
+ db = get_session()
+ media = db.query(Media).filter_by(id = media_id).first()
+ fireEvent('notify.frontend', type = '%s.update.%s' % (media.type, media.id), data = media.to_dict(self.default_dict))
+ db.expire_all()
+
+ return notifyFront
diff --git a/couchpotato/core/media/_base/media/__init__.py b/couchpotato/core/media/_base/media/__init__.py
new file mode 100644
index 00000000..a9693a3d
--- /dev/null
+++ b/couchpotato/core/media/_base/media/__init__.py
@@ -0,0 +1,6 @@
+from .main import MediaPlugin
+
+def start():
+ return MediaPlugin()
+
+config = []
diff --git a/couchpotato/core/media/_base/media/main.py b/couchpotato/core/media/_base/media/main.py
new file mode 100644
index 00000000..87afb82a
--- /dev/null
+++ b/couchpotato/core/media/_base/media/main.py
@@ -0,0 +1,49 @@
+from couchpotato import get_session
+from couchpotato.api import addApiView
+from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
+from couchpotato.core.helpers.variable import splitString
+from couchpotato.core.logger import CPLog
+from couchpotato.core.media import MediaBase
+from couchpotato.core.settings.model import Media
+
+log = CPLog(__name__)
+
+
+class MediaPlugin(MediaBase):
+
+ def __init__(self):
+
+ addApiView('media.refresh', self.refresh, docs = {
+ 'desc': 'Refresh a any media type by ID',
+ 'params': {
+ 'id': {'desc': 'Movie, Show, Season or Episode ID(s) you want to refresh.', 'type': 'int (comma separated)'},
+ }
+ })
+
+ addEvent('app.load', self.addSingleRefresh)
+
+ def refresh(self, id = '', **kwargs):
+ db = get_session()
+
+ for x in splitString(id):
+ media = db.query(Media).filter_by(id = x).first()
+
+ if media:
+ # Get current selected title
+ default_title = ''
+ for title in media.library.titles:
+ if title.default: default_title = title.title
+
+ fireEvent('notify.frontend', type = '%s.busy.%s' % (media.type, x), data = True)
+ fireEventAsync('library.update.%s' % media.type, identifier = media.library.identifier, default_title = default_title, force = True, on_complete = self.createOnComplete(x))
+
+ db.expire_all()
+
+ return {
+ 'success': True,
+ }
+
+ def addSingleRefresh(self):
+
+ for media_type in fireEvent('media.types', merge = True):
+ addApiView('%s.refresh' % media_type, self.refresh)
diff --git a/couchpotato/core/media/_base/search/__init__.py b/couchpotato/core/media/_base/search/__init__.py
new file mode 100644
index 00000000..4b2eae27
--- /dev/null
+++ b/couchpotato/core/media/_base/search/__init__.py
@@ -0,0 +1,6 @@
+from .main import Search
+
+def start():
+ return Search()
+
+config = []
diff --git a/couchpotato/core/media/_base/search/main.py b/couchpotato/core/media/_base/search/main.py
new file mode 100644
index 00000000..81897b5f
--- /dev/null
+++ b/couchpotato/core/media/_base/search/main.py
@@ -0,0 +1,59 @@
+from couchpotato.api import addApiView
+from couchpotato.core.event import fireEvent, addEvent
+from couchpotato.core.helpers.variable import mergeDicts
+from couchpotato.core.logger import CPLog
+from couchpotato.core.plugins.base import Plugin
+
+log = CPLog(__name__)
+
+
+class Search(Plugin):
+
+ def __init__(self):
+
+ addApiView('search', self.search, docs = {
+ 'desc': 'Search the info in providers for a movie',
+ 'params': {
+ 'q': {'desc': 'The (partial) movie name you want to search for'},
+ 'type': {'desc': 'Search for a specific media type. Leave empty to search all.'},
+ },
+ 'return': {'type': 'object', 'example': """{
+ 'success': True,
+ 'movies': array,
+ 'show': array,
+ etc
+}"""}
+ })
+
+ addEvent('app.load', self.addSingleSearches)
+
+ def search(self, q = '', types = None, **kwargs):
+
+ # Make sure types is the correct instance
+ if isinstance(types, (str, unicode)):
+ types = [types]
+ elif isinstance(types, (list, tuple, set)):
+ types = list(types)
+
+ if not types:
+ result = fireEvent('info.search', q = q, merge = True)
+ else:
+ result = {}
+ for media_type in types:
+ result[media_type] = fireEvent('%s.search' % media_type)
+
+ return mergeDicts({
+ 'success': True,
+ }, result)
+
+ def createSingleSearch(self, media_type):
+
+ def singleSearch(q, **kwargs):
+ return self.search(q, type = media_type, **kwargs)
+
+ return singleSearch
+
+ def addSingleSearches(self):
+
+ for media_type in fireEvent('media.types', merge = True):
+ addApiView('%s.search' % media_type, self.createSingleSearch(media_type))
diff --git a/couchpotato/core/media/movie/_base/static/search.css b/couchpotato/core/media/_base/search/static/search.css
similarity index 81%
rename from couchpotato/core/media/movie/_base/static/search.css
rename to couchpotato/core/media/_base/search/static/search.css
index 80c18153..57210d68 100644
--- a/couchpotato/core/media/movie/_base/static/search.css
+++ b/couchpotato/core/media/_base/search/static/search.css
@@ -129,13 +129,13 @@
overflow-x: hidden;
}
- .movie_result {
+ .media_result {
overflow: hidden;
height: 50px;
position: relative;
}
- .movie_result .options {
+ .media_result .options {
position: absolute;
height: 100%;
top: 0;
@@ -147,48 +147,48 @@
border-radius: 0;
box-shadow: inset 0 1px 8px rgba(0,0,0,0.25);
}
- .movie_result .options > .in_library_wanted {
+ .media_result .options > .in_library_wanted {
margin-top: -7px;
}
- .movie_result .options > div {
+ .media_result .options > div {
border: 0;
}
- .movie_result .options .thumbnail {
+ .media_result .options .thumbnail {
vertical-align: middle;
}
- .movie_result .options select {
+ .media_result .options select {
vertical-align: middle;
display: inline-block;
margin-right: 10px;
}
- .movie_result .options select[name=title] { width: 170px; }
- .movie_result .options select[name=profile] { width: 90px; }
- .movie_result .options select[name=category] { width: 80px; }
+ .media_result .options select[name=title] { width: 170px; }
+ .media_result .options select[name=profile] { width: 90px; }
+ .media_result .options select[name=category] { width: 80px; }
@media all and (max-width: 480px) {
- .movie_result .options select[name=title] { width: 90px; }
- .movie_result .options select[name=profile] { width: 50px; }
- .movie_result .options select[name=category] { width: 50px; }
+ .media_result .options select[name=title] { width: 90px; }
+ .media_result .options select[name=profile] { width: 50px; }
+ .media_result .options select[name=category] { width: 50px; }
}
- .movie_result .options .button {
+ .media_result .options .button {
vertical-align: middle;
display: inline-block;
}
- .movie_result .options .message {
+ .media_result .options .message {
height: 100%;
font-size: 20px;
color: #fff;
line-height: 20px;
}
- .movie_result .data {
+ .media_result .data {
position: absolute;
height: 100%;
top: 0;
@@ -199,20 +199,20 @@
border-top: 1px solid rgba(255,255,255, 0.08);
transition: all .4s cubic-bezier(0.9,0,0.1,1);
}
- .movie_result .data.open {
+ .media_result .data.open {
left: 100% !important;
}
- .movie_result:last-child .data { border-bottom: 0; }
+ .media_result:last-child .data { border-bottom: 0; }
- .movie_result .in_wanted, .movie_result .in_library {
+ .media_result .in_wanted, .media_result .in_library {
position: absolute;
bottom: 2px;
left: 14px;
font-size: 11px;
}
- .movie_result .thumbnail {
+ .media_result .thumbnail {
width: 34px;
min-height: 100%;
display: block;
@@ -220,7 +220,7 @@
vertical-align: top;
}
- .movie_result .info {
+ .media_result .info {
position: absolute;
top: 20%;
left: 15px;
@@ -228,7 +228,7 @@
vertical-align: middle;
}
- .movie_result .info h2 {
+ .media_result .info h2 {
margin: 0;
font-weight: normal;
font-size: 20px;
@@ -240,7 +240,7 @@
width: 100%;
}
- .movie_result .info h2 .title {
+ .media_result .info h2 .title {
display: block;
margin: 0;
text-overflow: ellipsis;
@@ -253,7 +253,7 @@
width: 88%;
}
- .movie_result .info h2 .year {
+ .media_result .info h2 .year {
padding: 0 5px;
text-align: center;
position: absolute;
@@ -271,7 +271,7 @@
}
.search_form .mask,
-.movie_result .mask {
+.media_result .mask {
position: absolute;
height: 100%;
width: 100%;
diff --git a/couchpotato/core/media/_base/search/static/search.js b/couchpotato/core/media/_base/search/static/search.js
new file mode 100644
index 00000000..470dcf0b
--- /dev/null
+++ b/couchpotato/core/media/_base/search/static/search.js
@@ -0,0 +1,188 @@
+Block.Search = new Class({
+
+ Extends: BlockBase,
+
+ cache: {},
+
+ create: function(){
+ var self = this;
+
+ var focus_timer = 0;
+ self.el = new Element('div.search_form').adopt(
+ new Element('div.input').adopt(
+ self.input = new Element('input', {
+ 'placeholder': 'Search & add a new media',
+ 'events': {
+ 'keyup': self.keyup.bind(self),
+ 'focus': function(){
+ if(focus_timer) clearTimeout(focus_timer);
+ self.el.addClass('focused')
+ if(this.get('value'))
+ self.hideResults(false)
+ },
+ 'blur': function(){
+ focus_timer = (function(){
+ self.el.removeClass('focused')
+ }).delay(100);
+ }
+ }
+ }),
+ new Element('a.icon2', {
+ 'events': {
+ 'click': self.clear.bind(self),
+ 'touchend': self.clear.bind(self)
+ }
+ })
+ ),
+ self.result_container = new Element('div.results_container', {
+ 'tween': {
+ 'duration': 200
+ },
+ 'events': {
+ 'mousewheel': function(e){
+ (e).stopPropagation();
+ }
+ }
+ }).adopt(
+ self.results = new Element('div.results')
+ )
+ );
+
+ self.mask = new Element('div.mask').inject(self.result_container).fade('hide');
+
+ },
+
+ clear: function(e){
+ var self = this;
+ (e).preventDefault();
+
+ if(self.last_q === ''){
+ self.input.blur()
+ self.last_q = null;
+ }
+ else {
+
+ self.last_q = '';
+ self.input.set('value', '');
+ self.input.focus()
+
+ self.media = {}
+ self.results.empty()
+ self.el.removeClass('filled')
+
+ }
+ },
+
+ hideResults: function(bool){
+ var self = this;
+
+ if(self.hidden == bool) return;
+
+ self.el[bool ? 'removeClass' : 'addClass']('shown');
+
+ if(bool){
+ History.removeEvent('change', self.hideResults.bind(self, !bool));
+ self.el.removeEvent('outerClick', self.hideResults.bind(self, !bool));
+ }
+ else {
+ History.addEvent('change', self.hideResults.bind(self, !bool));
+ self.el.addEvent('outerClick', self.hideResults.bind(self, !bool));
+ }
+
+ self.hidden = bool;
+ },
+
+ keyup: function(e){
+ var self = this;
+
+ self.el[self.q() ? 'addClass' : 'removeClass']('filled')
+
+ if(self.q() != self.last_q){
+ if(self.api_request && self.api_request.isRunning())
+ self.api_request.cancel();
+
+ if(self.autocomplete_timer) clearTimeout(self.autocomplete_timer)
+ self.autocomplete_timer = self.autocomplete.delay(300, self)
+ }
+
+ },
+
+ autocomplete: function(){
+ var self = this;
+
+ if(!self.q()){
+ self.hideResults(true)
+ return
+ }
+
+ self.list()
+ },
+
+ list: function(){
+ var self = this,
+ q = self.q(),
+ cache = self.cache[q];
+
+ self.hideResults(false);
+
+ if(!cache){
+ self.mask.fade('in');
+
+ if(!self.spinner)
+ self.spinner = createSpinner(self.mask);
+
+ self.api_request = Api.request('search', {
+ 'data': {
+ 'q': q
+ },
+ 'onComplete': self.fill.bind(self, q)
+ })
+ }
+ else
+ self.fill(q, cache)
+
+ self.last_q = q;
+
+ },
+
+ fill: function(q, json){
+ var self = this;
+
+ self.cache[q] = json
+
+ self.media = {}
+ self.results.empty()
+
+ Object.each(json, function(media, type){
+ if(typeOf(media) == 'array'){
+ Object.each(media, function(m){
+
+ var m = new Block.Search[m.type.capitalize() + 'Item'](m);
+ $(m).inject(self.results)
+ self.media[m.imdb || 'r-'+Math.floor(Math.random()*10000)] = m
+
+ if(q == m.imdb)
+ m.showOptions()
+
+ });
+ }
+ })
+
+ // Calculate result heights
+ var w = window.getSize(),
+ rc = self.result_container.getCoordinates();
+
+ self.results.setStyle('max-height', (w.y - rc.top - 50) + 'px')
+ self.mask.fade('out')
+
+ },
+
+ loading: function(bool){
+ this.el[bool ? 'addClass' : 'removeClass']('loading')
+ },
+
+ q: function(){
+ return this.input.get('value').trim();
+ }
+
+});
\ No newline at end of file
diff --git a/couchpotato/core/media/_base/searcher/__init__.py b/couchpotato/core/media/_base/searcher/__init__.py
index 0fb6cc09..5e029a25 100644
--- a/couchpotato/core/media/_base/searcher/__init__.py
+++ b/couchpotato/core/media/_base/searcher/__init__.py
@@ -47,7 +47,7 @@ config = [{
{
'name': 'ignored_words',
'label': 'Ignored',
- 'default': 'german, dutch, french, truefrench, danish, swedish, spanish, italian, korean, dubbed, swesub, korsub, dksubs',
+ 'default': 'german, dutch, french, truefrench, danish, swedish, spanish, italian, korean, dubbed, swesub, korsub, dksubs, vain',
'description': 'Ignores releases that match any of these sets. (Works like explained above)'
},
],
diff --git a/couchpotato/core/media/_base/searcher/main.py b/couchpotato/core/media/_base/searcher/main.py
index f09be64b..3c73eb27 100644
--- a/couchpotato/core/media/_base/searcher/main.py
+++ b/couchpotato/core/media/_base/searcher/main.py
@@ -1,17 +1,11 @@
-from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent
-from couchpotato.core.helpers.encoding import simplifyString, toUnicode
-from couchpotato.core.helpers.variable import md5, getTitle
+from couchpotato.core.helpers.encoding import simplifyString
+from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.searcher.base import SearcherBase
-from couchpotato.core.settings.model import Movie, Release, ReleaseInfo
-from couchpotato.environment import Env
-from inspect import ismethod, isfunction
import datetime
import re
-import time
-import traceback
log = CPLog(__name__)
@@ -23,7 +17,8 @@ class Searcher(SearcherBase):
addEvent('searcher.contains_other_quality', self.containsOtherQuality)
addEvent('searcher.correct_year', self.correctYear)
addEvent('searcher.correct_name', self.correctName)
- addEvent('searcher.download', self.download)
+ addEvent('searcher.correct_words', self.correctWords)
+ addEvent('searcher.search', self.search)
addApiView('searcher.full_search', self.searchAllView, docs = {
'desc': 'Starts a full search for all media',
@@ -49,86 +44,21 @@ class Searcher(SearcherBase):
progress = fireEvent('searcher.progress', merge = True)
return progress
- def download(self, data, movie, manual = False):
+ def search(self, protocols, media, quality):
+ results = []
- if not data.get('protocol'):
- data['protocol'] = data['type']
- data['type'] = 'movie'
+ for search_protocol in protocols:
+ protocol_results = fireEvent('provider.search.%s.%s' % (search_protocol, media['type']), media, quality, merge = True)
+ if protocol_results:
+ results += protocol_results
- # Test to see if any downloaders are enabled for this type
- downloader_enabled = fireEvent('download.enabled', manual, data, single = True)
+ sorted_results = sorted(results, key = lambda k: k['score'], reverse = True)
- if downloader_enabled:
+ download_preference = self.conf('preferred_method', section = 'searcher')
+ if download_preference != 'both':
+ sorted_results = sorted(sorted_results, key = lambda k: k['protocol'][:3], reverse = (download_preference == 'torrent'))
- snatched_status = fireEvent('status.get', 'snatched', single = True)
-
- # Download movie to temp
- filedata = None
- if data.get('download') and (ismethod(data.get('download')) or isfunction(data.get('download'))):
- filedata = data.get('download')(url = data.get('url'), nzb_id = data.get('id'))
- if filedata == 'try_next':
- return filedata
-
- download_result = fireEvent('download', data = data, movie = movie, manual = manual, filedata = filedata, single = True)
- log.debug('Downloader result: %s', download_result)
-
- if download_result:
- try:
- # Mark release as snatched
- db = get_session()
- rls = db.query(Release).filter_by(identifier = md5(data['url'])).first()
- if rls:
- renamer_enabled = Env.setting('enabled', 'renamer')
-
- done_status = fireEvent('status.get', 'done', single = True)
- rls.status_id = done_status.get('id') if not renamer_enabled else snatched_status.get('id')
-
- # Save download-id info if returned
- if isinstance(download_result, dict):
- for key in download_result:
- rls_info = ReleaseInfo(
- identifier = 'download_%s' % key,
- value = toUnicode(download_result.get(key))
- )
- rls.info.append(rls_info)
- db.commit()
-
- log_movie = '%s (%s) in %s' % (getTitle(movie['library']), movie['library']['year'], rls.quality.label)
- snatch_message = 'Snatched "%s": %s' % (data.get('name'), log_movie)
- log.info(snatch_message)
- fireEvent('movie.snatched', message = snatch_message, data = rls.to_dict())
-
- # If renamer isn't used, mark movie done
- if not renamer_enabled:
- active_status = fireEvent('status.get', 'active', single = True)
- done_status = fireEvent('status.get', 'done', single = True)
- try:
- if movie['status_id'] == active_status.get('id'):
- for profile_type in movie['profile']['types']:
- if profile_type['quality_id'] == rls.quality.id and profile_type['finish']:
- log.info('Renamer disabled, marking movie as finished: %s', log_movie)
-
- # Mark release done
- rls.status_id = done_status.get('id')
- rls.last_edit = int(time.time())
- db.commit()
-
- # Mark movie done
- mvie = db.query(Movie).filter_by(id = movie['id']).first()
- mvie.status_id = done_status.get('id')
- mvie.last_edit = int(time.time())
- db.commit()
- except:
- log.error('Failed marking movie finished, renamer disabled: %s', traceback.format_exc())
-
- except:
- log.error('Failed marking movie finished: %s', traceback.format_exc())
-
- return True
-
- log.info('Tried to download, but none of the "%s" downloaders are enabled or gave an error', (data.get('protocol')))
-
- return False
+ return sorted_results
def getSearchProtocols(self):
@@ -217,7 +147,7 @@ class Searcher(SearcherBase):
except: pass
# Match longest name between []
- try: check_names.append(max(check_name.split('['), key = len))
+ try: check_names.append(max(re.findall(r'[^[]*\[([^]]*)\]', check_name), key = len).strip())
except: pass
for check_name in list(set(check_names)):
@@ -234,5 +164,49 @@ class Searcher(SearcherBase):
return False
+ def correctWords(self, rel_name, media):
+ media_title = fireEvent('searcher.get_search_title', media, single = True)
+ media_words = re.split('\W+', simplifyString(media_title))
+
+ rel_name = simplifyString(rel_name)
+ rel_words = re.split('\W+', rel_name)
+
+ # Make sure it has required words
+ required_words = splitString(self.conf('required_words', section = 'searcher').lower())
+ try: required_words = list(set(required_words + splitString(media['category']['required'].lower())))
+ except: pass
+
+ req_match = 0
+ for req_set in required_words:
+ req = splitString(req_set, '&')
+ req_match += len(list(set(rel_words) & set(req))) == len(req)
+
+ if len(required_words) > 0 and req_match == 0:
+ log.info2('Wrong: Required word missing: %s', rel_name)
+ return False
+
+ # Ignore releases
+ ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
+ try: ignored_words = list(set(ignored_words + splitString(media['category']['ignored'].lower())))
+ except: pass
+
+ ignored_match = 0
+ for ignored_set in ignored_words:
+ ignored = splitString(ignored_set, '&')
+ ignored_match += len(list(set(rel_words) & set(ignored))) == len(ignored)
+
+ if len(ignored_words) > 0 and ignored_match:
+ log.info2("Wrong: '%s' contains 'ignored words'", rel_name)
+ return False
+
+ # Ignore porn stuff
+ pron_tags = ['xxx', 'sex', 'anal', 'tits', 'fuck', 'porn', 'orgy', 'milf', 'boobs', 'erotica', 'erotic', 'cock', 'dick']
+ pron_words = list(set(rel_words) & set(pron_tags) - set(media_words))
+ if pron_words:
+ log.info('Wrong: %s, probably pr0n', rel_name)
+ return False
+
+ return True
+
class SearchSetupError(Exception):
pass
diff --git a/couchpotato/core/media/movie/_base/main.py b/couchpotato/core/media/movie/_base/main.py
index 310b4e92..817b0a38 100644
--- a/couchpotato/core/media/movie/_base/main.py
+++ b/couchpotato/core/media/movie/_base/main.py
@@ -1,14 +1,13 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent
-from couchpotato.core.helpers.encoding import toUnicode, simplifyString
+from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import getImdb, splitString, tryInt, \
mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie import MovieTypeBase
-from couchpotato.core.settings.model import Library, LibraryTitle, Movie, \
+from couchpotato.core.settings.model import Library, LibraryTitle, Media, \
Release
-from couchpotato.environment import Env
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import or_, asc, not_, desc
from string import ascii_lowercase
@@ -19,14 +18,7 @@ log = CPLog(__name__)
class MovieBase(MovieTypeBase):
- default_dict = {
- 'profile': {'types': {'quality': {}}},
- 'releases': {'status': {}, 'quality': {}, 'files':{}, 'info': {}},
- 'library': {'titles': {}, 'files':{}},
- 'files': {},
- 'status': {},
- 'category': {},
- }
+ _type = 'movie'
def __init__(self):
@@ -34,17 +26,6 @@ class MovieBase(MovieTypeBase):
super(MovieBase, self).__init__()
self.initType()
- addApiView('movie.search', self.search, docs = {
- 'desc': 'Search the movie providers for a movie',
- 'params': {
- 'q': {'desc': 'The (partial) movie name you want to search for'},
- },
- 'return': {'type': 'object', 'example': """{
- 'success': True,
- 'empty': bool, any movies returned or not,
- 'movies': array, movies found,
-}"""}
- })
addApiView('movie.list', self.listView, docs = {
'desc': 'List movies in wanted list',
'params': {
@@ -66,12 +47,6 @@ class MovieBase(MovieTypeBase):
'id': {'desc': 'The id of the movie'},
}
})
- addApiView('movie.refresh', self.refresh, docs = {
- 'desc': 'Refresh a movie by id',
- 'params': {
- 'id': {'desc': 'Movie ID(s) you want to refresh.', 'type': 'int (comma separated)'},
- }
- })
addApiView('movie.available_chars', self.charView)
addApiView('movie.add', self.addView, docs = {
'desc': 'Add new movie to the wanted list',
@@ -103,34 +78,6 @@ class MovieBase(MovieTypeBase):
addEvent('movie.list', self.list)
addEvent('movie.restatus', self.restatus)
- # Clean releases that didn't have activity in the last week
- addEvent('app.load', self.cleanReleases)
- fireEvent('schedule.interval', 'movie.clean_releases', self.cleanReleases, hours = 4)
-
- def cleanReleases(self):
-
- log.debug('Removing releases from dashboard')
-
- now = time.time()
- week = 262080
-
- done_status, available_status, snatched_status = \
- fireEvent('status.get', ['done', 'available', 'snatched'], single = True)
-
- db = get_session()
-
- # get movies last_edit more than a week ago
- movies = db.query(Movie) \
- .filter(Movie.status_id == done_status.get('id'), Movie.last_edit < (now - week)) \
- .all()
-
- for movie in movies:
- for rel in movie.releases:
- if rel.status_id in [available_status.get('id'), snatched_status.get('id')]:
- fireEvent('release.delete', id = rel.id, single = True)
-
- db.expire_all()
-
def getView(self, id = None, **kwargs):
movie = self.get(id) if id else None
@@ -147,9 +94,9 @@ class MovieBase(MovieTypeBase):
imdb_id = getImdb(str(movie_id))
if imdb_id:
- m = db.query(Movie).filter(Movie.library.has(identifier = imdb_id)).first()
+ m = db.query(Media).filter(Media.library.has(identifier = imdb_id)).first()
else:
- m = db.query(Movie).filter_by(id = movie_id).first()
+ m = db.query(Media).filter_by(id = movie_id).first()
results = None
if m:
@@ -169,20 +116,20 @@ class MovieBase(MovieTypeBase):
release_status = [release_status]
# query movie ids
- q = db.query(Movie) \
- .with_entities(Movie.id) \
- .group_by(Movie.id)
+ q = db.query(Media) \
+ .with_entities(Media.id) \
+ .group_by(Media.id)
# Filter on movie status
if status and len(status) > 0:
statuses = fireEvent('status.get', status, single = len(status) > 1)
statuses = [s.get('id') for s in statuses]
- q = q.filter(Movie.status_id.in_(statuses))
+ q = q.filter(Media.status_id.in_(statuses))
# Filter on release status
if release_status and len(release_status) > 0:
- q = q.join(Movie.releases)
+ q = q.join(Media.releases)
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
statuses = [s.get('id') for s in statuses]
@@ -191,7 +138,7 @@ class MovieBase(MovieTypeBase):
# Only join when searching / ordering
if starts_with or search or order != 'release_order':
- q = q.join(Movie.library, Library.titles) \
+ q = q.join(Media.library, Library.titles) \
.filter(LibraryTitle.default == True)
# Add search filters
@@ -242,13 +189,13 @@ class MovieBase(MovieTypeBase):
releases_count[release.movie_id] += 1
# Get main movie data
- q2 = db.query(Movie) \
+ q2 = db.query(Media) \
.options(joinedload_all('library.titles')) \
.options(joinedload_all('library.files')) \
.options(joinedload_all('status')) \
.options(joinedload_all('files'))
- q2 = q2.filter(Movie.id.in_(movie_ids))
+ q2 = q2.filter(Media.id.in_(movie_ids))
results = q2.all()
@@ -291,14 +238,14 @@ class MovieBase(MovieTypeBase):
if release_status and not isinstance(release_status, (list, tuple)):
release_status = [release_status]
- q = db.query(Movie)
+ q = db.query(Media)
# Filter on movie status
if status and len(status) > 0:
statuses = fireEvent('status.get', status, single = len(release_status) > 1)
statuses = [s.get('id') for s in statuses]
- q = q.filter(Movie.status_id.in_(statuses))
+ q = q.filter(Media.status_id.in_(statuses))
# Filter on release status
if release_status and len(release_status) > 0:
@@ -306,7 +253,7 @@ class MovieBase(MovieTypeBase):
statuses = fireEvent('status.get', release_status, single = len(release_status) > 1)
statuses = [s.get('id') for s in statuses]
- q = q.join(Movie.releases) \
+ q = q.join(Media.releases) \
.filter(Release.status_id.in_(statuses))
q = q.join(Library, LibraryTitle) \
@@ -367,47 +314,6 @@ class MovieBase(MovieTypeBase):
'chars': chars,
}
- def refresh(self, id = '', **kwargs):
-
- db = get_session()
-
- for x in splitString(id):
- movie = db.query(Movie).filter_by(id = x).first()
-
- if movie:
-
- # Get current selected title
- default_title = ''
- for title in movie.library.titles:
- if title.default: default_title = title.title
-
- fireEvent('notify.frontend', type = 'movie.busy.%s' % x, data = True)
- fireEventAsync('library.update.movie', identifier = movie.library.identifier, default_title = default_title, force = True, on_complete = self.createOnComplete(x))
-
- db.expire_all()
- return {
- 'success': True,
- }
-
- def search(self, q = '', **kwargs):
-
- cache_key = u'%s/%s' % (__name__, simplifyString(q))
- movies = Env.get('cache').get(cache_key)
-
- if not movies:
-
- if getImdb(q):
- movies = [fireEvent('movie.info', identifier = q, merge = True)]
- else:
- movies = fireEvent('movie.search', q = q, merge = True)
- Env.get('cache').set(cache_key, movies)
-
- return {
- 'success': True,
- 'empty': len(movies) == 0 if movies else 0,
- 'movies': movies,
- }
-
def add(self, params = None, force_readd = True, search_after = True, update_library = False, status_id = None):
if not params: params = {}
@@ -438,12 +344,12 @@ class MovieBase(MovieTypeBase):
cat_id = params.get('category_id')
db = get_session()
- m = db.query(Movie).filter_by(library_id = library.get('id')).first()
+ m = db.query(Media).filter_by(library_id = library.get('id')).first()
added = True
do_search = False
search_after = search_after and self.conf('search_on_add', section = 'moviesearcher')
if not m:
- m = Movie(
+ m = Media(
library_id = library.get('id'),
profile_id = params.get('profile_id', default_profile.get('id')),
status_id = status_id if status_id else status_active.get('id'),
@@ -500,15 +406,12 @@ class MovieBase(MovieTypeBase):
db.expire_all()
return movie_dict
-
def addView(self, **kwargs):
-
- movie_dict = self.add(params = kwargs)
+ add_dict = self.add(params = kwargs)
return {
- 'success': True,
- 'added': True if movie_dict else False,
- 'movie': movie_dict,
+ 'success': True if add_dict else False,
+ 'movie': add_dict,
}
def edit(self, id = '', **kwargs):
@@ -520,7 +423,7 @@ class MovieBase(MovieTypeBase):
ids = splitString(id)
for movie_id in ids:
- m = db.query(Movie).filter_by(id = movie_id).first()
+ m = db.query(Media).filter_by(id = movie_id).first()
if not m:
continue
@@ -567,7 +470,7 @@ class MovieBase(MovieTypeBase):
db = get_session()
- movie = db.query(Movie).filter_by(id = movie_id).first()
+ movie = db.query(Media).filter_by(id = movie_id).first()
if movie:
deleted = False
if delete_from == 'all':
@@ -617,7 +520,7 @@ class MovieBase(MovieTypeBase):
db = get_session()
- m = db.query(Movie).filter_by(id = movie_id).first()
+ m = db.query(Media).filter_by(id = movie_id).first()
if not m or len(m.library.titles) == 0:
log.debug('Can\'t restatus movie, doesn\'t seem to exist.')
return False
@@ -638,24 +541,3 @@ class MovieBase(MovieTypeBase):
db.commit()
return True
-
- def createOnComplete(self, movie_id):
-
- def onComplete():
- db = get_session()
- movie = db.query(Movie).filter_by(id = movie_id).first()
- fireEventAsync('movie.searcher.single', movie.to_dict(self.default_dict), on_complete = self.createNotifyFront(movie_id))
- db.expire_all()
-
- return onComplete
-
-
- def createNotifyFront(self, movie_id):
-
- def notifyFront():
- db = get_session()
- movie = db.query(Movie).filter_by(id = movie_id).first()
- fireEvent('notify.frontend', type = 'movie.update.%s' % movie.id, data = movie.to_dict(self.default_dict))
- db.expire_all()
-
- return notifyFront
diff --git a/couchpotato/core/media/movie/_base/static/list.js b/couchpotato/core/media/movie/_base/static/list.js
index 341d2348..aaa8be12 100644
--- a/couchpotato/core/media/movie/_base/static/list.js
+++ b/couchpotato/core/media/movie/_base/static/list.js
@@ -422,7 +422,7 @@ var MovieList = new Class({
var self = this;
var ids = self.getSelectedMovies()
- Api.request('movie.refresh', {
+ Api.request('media.refresh', {
'data': {
'id': ids.join(','),
}
diff --git a/couchpotato/core/media/movie/_base/static/movie.actions.js b/couchpotato/core/media/movie/_base/static/movie.actions.js
index e9f6141f..f6e0f542 100644
--- a/couchpotato/core/media/movie/_base/static/movie.actions.js
+++ b/couchpotato/core/media/movie/_base/static/movie.actions.js
@@ -241,7 +241,6 @@ MA.Release = new Class({
}
})
).inject(self.release_container);
-
release['el'] = item;
if(status.identifier == 'ignored' || status.identifier == 'failed' || status.identifier == 'snatched'){
@@ -251,6 +250,30 @@ MA.Release = new Class({
else if(!self.next_release && status.identifier == 'available'){
self.next_release = release;
}
+
+ var update_handle = function(notification) {
+ var q = self.movie.quality.getElement('.q_id' + release.quality_id),
+ status = Status.get(release.status_id),
+ new_status = Status.get(notification.data);
+
+ release.status_id = new_status.id
+ release.el.set('class', 'item ' + new_status.identifier);
+
+ var status_el = release.el.getElement('.release_status');
+ status_el.set('class', 'release_status ' + new_status.identifier);
+ status_el.set('text', new_status.identifier);
+
+ if(!q && (new_status.identifier == 'snatched' || new_status.identifier == 'seeding' || new_status.identifier == 'done'))
+ var q = self.addQuality(release.quality_id);
+
+ if(new_status && q && !q.hasClass(new_status.identifier)) {
+ q.removeClass(status.identifier).addClass(new_status.identifier);
+ q.set('title', q.get('title').replace(status.label, new_status.label));
+ }
+ }
+
+ App.addEvent('release.update_status.' + release.id, update_handle);
+
});
if(self.last_release)
@@ -358,7 +381,7 @@ MA.Release = new Class({
},
get: function(release, type){
- return release.info[type] || 'n/a'
+ return release.info[type] !== undefined ? release.info[type] : 'n/a'
},
download: function(release){
@@ -370,7 +393,7 @@ MA.Release = new Class({
if(icon)
icon.addClass('icon spinner').removeClass('download');
- Api.request('release.download', {
+ Api.request('release.manual_download', {
'data': {
'id': release.id
},
@@ -397,17 +420,6 @@ MA.Release = new Class({
'data': {
'id': release.id
},
- 'onComplete': function(){
- var el = release.el;
- if(el && (el.hasClass('failed') || el.hasClass('ignored'))){
- el.removeClass('failed').removeClass('ignored');
- el.getElement('.release_status').set('text', 'available');
- }
- else if(el) {
- el.addClass('ignored');
- el.getElement('.release_status').set('text', 'ignored');
- }
- }
})
},
@@ -694,7 +706,7 @@ MA.Refresh = new Class({
var self = this;
(e).preventDefault();
- Api.request('movie.refresh', {
+ Api.request('media.refresh', {
'data': {
'id': self.movie.get('id')
}
diff --git a/couchpotato/core/media/movie/_base/static/movie.css b/couchpotato/core/media/movie/_base/static/movie.css
index 0200417c..c013bd80 100644
--- a/couchpotato/core/media/movie/_base/static/movie.css
+++ b/couchpotato/core/media/movie/_base/static/movie.css
@@ -419,22 +419,25 @@
}
.movies .data .quality .available,
- .movies .data .quality .snatched {
+ .movies .data .quality .snatched,
+ .movies .data .quality .seeding {
opacity: 1;
cursor: pointer;
}
.movies .data .quality .available { background-color: #578bc3; }
- .movies .data .quality .failed { background-color: #a43d34; }
+ .movies .data .quality .failed,
+ .movies .data .quality .missing,
+ .movies .data .quality .ignored { background-color: #a43d34; }
.movies .data .quality .snatched { background-color: #a2a232; }
- .movies .data .quality .seeding { background-color: #0a6819; }
.movies .data .quality .done {
background-color: #369545;
opacity: 1;
}
+ .movies .data .quality .seeding { background-color: #0a6819; }
.movies .data .quality .finish {
- background-image: url('../images/sprite.png');
- background-repeat: no-repeat;
+ background-image: url('../../images/sprite.png');
+ background-repeat: no-repeat;
background-position: 0 2px;
padding-left: 14px;
background-size: 14px
@@ -646,7 +649,7 @@
margin-top: 25px;
}
}
-
+
.trailer_container.hide {
height: 0 !important;
}
@@ -989,7 +992,7 @@
}
.movies .empty_wanted {
- background-image: url('../images/emptylist.png');
+ background-image: url('../../images/emptylist.png');
background-position: 80% 0;
height: 750px;
width: 100%;
@@ -1029,7 +1032,7 @@
.movies .progress > div .folder {
display: inline-block;
padding: 5px 20px 5px 0;
- white-space: nowrap;
+ white-space: nowrap;
text-overflow: ellipsis;
overflow: hidden;
width: 85%;
diff --git a/couchpotato/core/media/movie/_base/static/movie.js b/couchpotato/core/media/movie/_base/static/movie.js
index 6defc2ad..a865325b 100644
--- a/couchpotato/core/media/movie/_base/static/movie.js
+++ b/couchpotato/core/media/movie/_base/static/movie.js
@@ -185,7 +185,7 @@ var Movie = new Class({
var q = self.quality.getElement('.q_id'+ release.quality_id),
status = Status.get(release.status_id);
- if(!q && (status.identifier == 'snatched' || status.identifier == 'done'))
+ if(!q && (status.identifier == 'snatched' || status.identifier == 'seeding' || status.identifier == 'done'))
var q = self.addQuality(release.quality_id)
if (status && q && !q.hasClass(status.identifier)){
diff --git a/couchpotato/core/media/movie/_base/static/search.js b/couchpotato/core/media/movie/_base/static/search.js
index 7332381b..e04167f0 100644
--- a/couchpotato/core/media/movie/_base/static/search.js
+++ b/couchpotato/core/media/movie/_base/static/search.js
@@ -1,189 +1,4 @@
-Block.Search = new Class({
-
- Extends: BlockBase,
-
- cache: {},
-
- create: function(){
- var self = this;
-
- var focus_timer = 0;
- self.el = new Element('div.search_form').adopt(
- new Element('div.input').adopt(
- self.input = new Element('input', {
- 'placeholder': 'Search & add a new movie',
- 'events': {
- 'keyup': self.keyup.bind(self),
- 'focus': function(){
- if(focus_timer) clearTimeout(focus_timer);
- self.el.addClass('focused')
- if(this.get('value'))
- self.hideResults(false)
- },
- 'blur': function(){
- focus_timer = (function(){
- self.el.removeClass('focused')
- }).delay(100);
- }
- }
- }),
- new Element('a.icon2', {
- 'events': {
- 'click': self.clear.bind(self),
- 'touchend': self.clear.bind(self)
- }
- })
- ),
- self.result_container = new Element('div.results_container', {
- 'tween': {
- 'duration': 200
- },
- 'events': {
- 'mousewheel': function(e){
- (e).stopPropagation();
- }
- }
- }).adopt(
- self.results = new Element('div.results')
- )
- );
-
- self.mask = new Element('div.mask').inject(self.result_container).fade('hide');
-
- },
-
- clear: function(e){
- var self = this;
- (e).preventDefault();
-
- if(self.last_q === ''){
- self.input.blur()
- self.last_q = null;
- }
- else {
-
- self.last_q = '';
- self.input.set('value', '');
- self.input.focus()
-
- self.movies = []
- self.results.empty()
- self.el.removeClass('filled')
-
- }
- },
-
- hideResults: function(bool){
- var self = this;
-
- if(self.hidden == bool) return;
-
- self.el[bool ? 'removeClass' : 'addClass']('shown');
-
- if(bool){
- History.removeEvent('change', self.hideResults.bind(self, !bool));
- self.el.removeEvent('outerClick', self.hideResults.bind(self, !bool));
- }
- else {
- History.addEvent('change', self.hideResults.bind(self, !bool));
- self.el.addEvent('outerClick', self.hideResults.bind(self, !bool));
- }
-
- self.hidden = bool;
- },
-
- keyup: function(e){
- var self = this;
-
- self.el[self.q() ? 'addClass' : 'removeClass']('filled')
-
- if(self.q() != self.last_q){
- if(self.api_request && self.api_request.isRunning())
- self.api_request.cancel();
-
- if(self.autocomplete_timer) clearTimeout(self.autocomplete_timer)
- self.autocomplete_timer = self.autocomplete.delay(300, self)
- }
-
- },
-
- autocomplete: function(){
- var self = this;
-
- if(!self.q()){
- self.hideResults(true)
- return
- }
-
- self.list()
- },
-
- list: function(){
- var self = this,
- q = self.q(),
- cache = self.cache[q];
-
- self.hideResults(false);
-
- if(!cache){
- self.mask.fade('in');
-
- if(!self.spinner)
- self.spinner = createSpinner(self.mask);
-
- self.api_request = Api.request('movie.search', {
- 'data': {
- 'q': q
- },
- 'onComplete': self.fill.bind(self, q)
- })
- }
- else
- self.fill(q, cache)
-
- self.last_q = q;
-
- },
-
- fill: function(q, json){
- var self = this;
-
- self.cache[q] = json
-
- self.movies = {}
- self.results.empty()
-
- Object.each(json.movies, function(movie){
-
- var m = new Block.Search.Item(movie);
- $(m).inject(self.results)
- self.movies[movie.imdb || 'r-'+Math.floor(Math.random()*10000)] = m
-
- if(q == movie.imdb)
- m.showOptions()
-
- });
-
- // Calculate result heights
- var w = window.getSize(),
- rc = self.result_container.getCoordinates();
-
- self.results.setStyle('max-height', (w.y - rc.top - 50) + 'px')
- self.mask.fade('out')
-
- },
-
- loading: function(bool){
- this.el[bool ? 'addClass' : 'removeClass']('loading')
- },
-
- q: function(){
- return this.input.get('value').trim();
- }
-
-});
-
-Block.Search.Item = new Class({
+Block.Search.MovieItem = new Class({
Implements: [Options, Events],
@@ -201,7 +16,7 @@ Block.Search.Item = new Class({
var self = this,
info = self.info;
- self.el = new Element('div.movie_result', {
+ self.el = new Element('div.media_result', {
'id': info.imdb
}).adopt(
self.thumbnail = info.images && info.images.poster.length > 0 ? new Element('img.thumbnail', {
@@ -292,7 +107,7 @@ Block.Search.Item = new Class({
self.options_el.empty();
self.options_el.adopt(
new Element('div.message', {
- 'text': json.added ? 'Movie successfully added.' : 'Movie didn\'t add properly. Check logs'
+ 'text': json.success ? 'Movie successfully added.' : 'Movie didn\'t add properly. Check logs'
})
);
self.mask.fade('out');
diff --git a/couchpotato/core/media/movie/library/movie/main.py b/couchpotato/core/media/movie/library/movie/main.py
index 718e7390..b0d05202 100644
--- a/couchpotato/core/media/movie/library/movie/main.py
+++ b/couchpotato/core/media/movie/library/movie/main.py
@@ -151,7 +151,7 @@ class MovieLibraryPlugin(LibraryBase):
else:
dates = library.info.get('release_date')
- if dates and dates.get('expires', 0) < time.time() or not dates:
+ if dates and (dates.get('expires', 0) < time.time() or dates.get('expires', 0) > time.time() + (604800 * 4)) or not dates:
dates = fireEvent('movie.release_date', identifier = identifier, merge = True)
library.info.update({'release_date': dates })
db.commit()
diff --git a/couchpotato/core/media/movie/searcher/main.py b/couchpotato/core/media/movie/searcher/main.py
index b08e7532..93441c59 100644
--- a/couchpotato/core/media/movie/searcher/main.py
+++ b/couchpotato/core/media/movie/searcher/main.py
@@ -1,16 +1,14 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
-from couchpotato.core.helpers.encoding import simplifyString, toUnicode, ss
-from couchpotato.core.helpers.variable import md5, getTitle, splitString, \
- possibleTitles, getImdb
+from couchpotato.core.helpers.encoding import simplifyString
+from couchpotato.core.helpers.variable import getTitle, possibleTitles, getImdb
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.searcher.base import SearcherBase
from couchpotato.core.media.movie import MovieTypeBase
-from couchpotato.core.settings.model import Movie, Release, ReleaseInfo
+from couchpotato.core.settings.model import Media, Release
from couchpotato.environment import Env
from datetime import date
-from sqlalchemy.exc import InterfaceError
import random
import re
import time
@@ -29,9 +27,10 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
addEvent('movie.searcher.all', self.searchAll)
addEvent('movie.searcher.all_view', self.searchAllView)
addEvent('movie.searcher.single', self.single)
- addEvent('movie.searcher.correct_movie', self.correctMovie)
addEvent('movie.searcher.try_next_release', self.tryNextRelease)
addEvent('movie.searcher.could_be_released', self.couldBeReleased)
+ addEvent('searcher.correct_release', self.correctRelease)
+ addEvent('searcher.get_search_title', self.getSearchTitle)
addApiView('movie.searcher.try_next', self.tryNextReleaseView, docs = {
'desc': 'Marks the snatched results as ignored and try the next best release',
@@ -74,8 +73,8 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
db = get_session()
- movies = db.query(Movie).filter(
- Movie.status.has(identifier = 'active')
+ movies = db.query(Media).filter(
+ Media.status.has(identifier = 'active')
).all()
random.shuffle(movies)
@@ -117,6 +116,10 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
def single(self, movie, search_protocols = None, manual = False):
+ # movies don't contain 'type' yet, so just set to default here
+ if not movie.has_key('type'):
+ movie['type'] = 'movie'
+
# Find out search type
try:
if not search_protocols:
@@ -167,82 +170,20 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
log.info('Search for %s in %s', (default_title, quality_type['quality']['label']))
quality = fireEvent('quality.single', identifier = quality_type['quality']['identifier'], single = True)
- results = []
- for search_protocol in search_protocols:
- protocol_results = fireEvent('provider.search.%s.movie' % search_protocol, movie, quality, merge = True)
- if protocol_results:
- results += protocol_results
-
- sorted_results = sorted(results, key = lambda k: k['score'], reverse = True)
- if len(sorted_results) == 0:
+ results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or []
+ if len(results) == 0:
log.debug('Nothing found for %s in %s', (default_title, quality_type['quality']['label']))
- download_preference = self.conf('preferred_method', section = 'searcher')
- if download_preference != 'both':
- sorted_results = sorted(sorted_results, key = lambda k: k['protocol'][:3], reverse = (download_preference == 'torrent'))
-
# Check if movie isn't deleted while searching
- if not db.query(Movie).filter_by(id = movie.get('id')).first():
+ if not db.query(Media).filter_by(id = movie.get('id')).first():
break
# Add them to this movie releases list
- for nzb in sorted_results:
+ found_releases += fireEvent('release.create_from_search', results, movie, quality_type, single = True)
- nzb_identifier = md5(nzb['url'])
- found_releases.append(nzb_identifier)
-
- rls = db.query(Release).filter_by(identifier = nzb_identifier).first()
- if not rls:
- rls = Release(
- identifier = nzb_identifier,
- movie_id = movie.get('id'),
- quality_id = quality_type.get('quality_id'),
- status_id = available_status.get('id')
- )
- db.add(rls)
- else:
- [db.delete(old_info) for old_info in rls.info]
- rls.last_edit = int(time.time())
-
- db.commit()
-
- for info in nzb:
- try:
- if not isinstance(nzb[info], (str, unicode, int, long, float)):
- continue
-
- rls_info = ReleaseInfo(
- identifier = info,
- value = toUnicode(nzb[info])
- )
- rls.info.append(rls_info)
- except InterfaceError:
- log.debug('Couldn\'t add %s to ReleaseInfo: %s', (info, traceback.format_exc()))
-
- db.commit()
-
- nzb['status_id'] = rls.status_id
-
-
- for nzb in sorted_results:
- if not quality_type.get('finish', False) and quality_type.get('wait_for', 0) > 0 and nzb.get('age') <= quality_type.get('wait_for', 0):
- log.info('Ignored, waiting %s days: %s', (quality_type.get('wait_for'), nzb['name']))
- continue
-
- if nzb['status_id'] in [ignored_status.get('id'), failed_status.get('id')]:
- log.info('Ignored: %s', nzb['name'])
- continue
-
- if nzb['score'] <= 0:
- log.info('Ignored, score to low: %s', nzb['name'])
- continue
-
- downloaded = fireEvent('searcher.download', data = nzb, movie = movie, manual = manual, single = True)
- if downloaded is True:
- ret = True
- break
- elif downloaded != 'try_next':
- break
+ # Try find a valid result and download it
+ if fireEvent('release.try_download_result', results, movie, quality_type, manual, single = True):
+ ret = True
# Remove releases that aren't found anymore
for release in movie.get('releases', []):
@@ -265,7 +206,11 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
return ret
- def correctMovie(self, nzb = None, movie = None, quality = None, **kwargs):
+ def correctRelease(self, nzb = None, media = None, quality = None, **kwargs):
+
+ if media.get('type') != 'movie': return
+
+ media_title = fireEvent('searcher.get_search_title', media, single = True)
imdb_results = kwargs.get('imdb_results', False)
retention = Env.setting('retention', section = 'nzb')
@@ -274,50 +219,14 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (nzb['age'], retention, nzb['name']))
return False
- movie_name = getTitle(movie['library'])
- movie_words = re.split('\W+', simplifyString(movie_name))
- nzb_name = simplifyString(nzb['name'])
- nzb_words = re.split('\W+', nzb_name)
-
- # Make sure it has required words
- required_words = splitString(self.conf('required_words', section = 'searcher').lower())
- try: required_words = list(set(required_words + splitString(movie['category']['required'].lower())))
- except: pass
-
- req_match = 0
- for req_set in required_words:
- req = splitString(req_set, '&')
- req_match += len(list(set(nzb_words) & set(req))) == len(req)
-
- if len(required_words) > 0 and req_match == 0:
- log.info2('Wrong: Required word missing: %s', nzb['name'])
- return False
-
- # Ignore releases
- ignored_words = splitString(self.conf('ignored_words', section = 'searcher').lower())
- try: ignored_words = list(set(ignored_words + splitString(movie['category']['ignored'].lower())))
- except: pass
-
- ignored_match = 0
- for ignored_set in ignored_words:
- ignored = splitString(ignored_set, '&')
- ignored_match += len(list(set(nzb_words) & set(ignored))) == len(ignored)
-
- if len(ignored_words) > 0 and ignored_match:
- log.info2("Wrong: '%s' contains 'ignored words'", (nzb['name']))
- return False
-
- # Ignore porn stuff
- pron_tags = ['xxx', 'sex', 'anal', 'tits', 'fuck', 'porn', 'orgy', 'milf', 'boobs', 'erotica', 'erotic', 'cock', 'dick']
- pron_words = list(set(nzb_words) & set(pron_tags) - set(movie_words))
- if pron_words:
- log.info('Wrong: %s, probably pr0n', (nzb['name']))
+ # Check for required and ignored words
+ if not fireEvent('searcher.correct_words', nzb['name'], media, single = True):
return False
preferred_quality = fireEvent('quality.single', identifier = quality['identifier'], single = True)
# Contains lower quality string
- if fireEvent('searcher.contains_other_quality', nzb, movie_year = movie['library']['year'], preferred_quality = preferred_quality, single = True):
+ if fireEvent('searcher.contains_other_quality', nzb, movie_year = media['library']['year'], preferred_quality = preferred_quality, single = True):
log.info2('Wrong: %s, looking for %s', (nzb['name'], quality['label']))
return False
@@ -347,23 +256,23 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
return True
# Check if nzb contains imdb link
- if getImdb(nzb.get('description', '')) == movie['library']['identifier']:
+ if getImdb(nzb.get('description', '')) == media['library']['identifier']:
return True
- for raw_title in movie['library']['titles']:
+ for raw_title in media['library']['titles']:
for movie_title in possibleTitles(raw_title['title']):
movie_words = re.split('\W+', simplifyString(movie_title))
if fireEvent('searcher.correct_name', nzb['name'], movie_title, single = True):
# if no IMDB link, at least check year range 1
- if len(movie_words) > 2 and fireEvent('searcher.correct_year', nzb['name'], movie['library']['year'], 1, single = True):
+ if len(movie_words) > 2 and fireEvent('searcher.correct_year', nzb['name'], media['library']['year'], 1, single = True):
return True
# if no IMDB link, at least check year
- if len(movie_words) <= 2 and fireEvent('searcher.correct_year', nzb['name'], movie['library']['year'], 0, single = True):
+ if len(movie_words) <= 2 and fireEvent('searcher.correct_year', nzb['name'], media['library']['year'], 0, single = True):
return True
- log.info("Wrong: %s, undetermined naming. Looking for '%s (%s)'", (nzb['name'], movie_name, movie['library']['year']))
+ log.info("Wrong: %s, undetermined naming. Looking for '%s (%s)'", (nzb['name'], media_title, media['library']['year']))
return False
def couldBeReleased(self, is_pre_release, dates, year = None):
@@ -434,5 +343,9 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
log.error('Failed searching for next release: %s', traceback.format_exc())
return False
+ def getSearchTitle(self, media):
+ if media['type'] == 'movie':
+ return getTitle(media['library'])
+
class SearchSetupError(Exception):
pass
diff --git a/couchpotato/core/plugins/suggestion/__init__.py b/couchpotato/core/media/movie/suggestion/__init__.py
similarity index 100%
rename from couchpotato/core/plugins/suggestion/__init__.py
rename to couchpotato/core/media/movie/suggestion/__init__.py
diff --git a/couchpotato/core/plugins/suggestion/main.py b/couchpotato/core/media/movie/suggestion/main.py
similarity index 93%
rename from couchpotato/core/plugins/suggestion/main.py
rename to couchpotato/core/media/movie/suggestion/main.py
index eb31d26e..f29281ea 100644
--- a/couchpotato/core/plugins/suggestion/main.py
+++ b/couchpotato/core/media/movie/suggestion/main.py
@@ -3,7 +3,7 @@ from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.plugins.base import Plugin
-from couchpotato.core.settings.model import Movie, Library
+from couchpotato.core.settings.model import Media, Library
from couchpotato.environment import Env
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import or_
@@ -29,9 +29,9 @@ class Suggestion(Plugin):
if not movies or len(movies) == 0:
db = get_session()
- active_movies = db.query(Movie) \
+ active_movies = db.query(Media) \
.options(joinedload_all('library')) \
- .filter(or_(*[Movie.status.has(identifier = s) for s in ['active', 'done']])).all()
+ .filter(or_(*[Media.status.has(identifier = s) for s in ['active', 'done']])).all()
movies = [x.library.identifier for x in active_movies]
if not ignored or len(ignored) == 0:
@@ -89,10 +89,10 @@ class Suggestion(Plugin):
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
db = get_session()
- active_movies = db.query(Movie) \
+ active_movies = db.query(Media) \
.join(Library) \
.with_entities(Library.identifier) \
- .filter(Movie.status_id.in_([active_status.get('id'), done_status.get('id')])).all()
+ .filter(Media.status_id.in_([active_status.get('id'), done_status.get('id')])).all()
movies = [x[0] for x in active_movies]
movies.extend(seen)
diff --git a/couchpotato/core/media/movie/suggestion/static/suggest.css b/couchpotato/core/media/movie/suggestion/static/suggest.css
new file mode 100644
index 00000000..d4ba734b
--- /dev/null
+++ b/couchpotato/core/media/movie/suggestion/static/suggest.css
@@ -0,0 +1,160 @@
+.suggestions {
+}
+
+ .suggestions > h2 {
+ height: 40px;
+ }
+
+.suggestions .media_result {
+ display: inline-block;
+ width: 33.333%;
+ height: 150px;
+}
+
+ @media all and (max-width: 960px) {
+ .suggestions .media_result {
+ width: 50%;
+ }
+ }
+
+ @media all and (max-width: 600px) {
+ .suggestions .media_result {
+ width: 100%;
+ }
+ }
+
+ .suggestions .media_result .data {
+ left: 100px;
+ background: #4e5969;
+ border: none;
+ }
+
+ .suggestions .media_result .data .info {
+ top: 10px;
+ left: 15px;
+ right: 15px;
+ bottom: 10px;
+ overflow: hidden;
+ }
+
+ .suggestions .media_result .data .info h2 {
+ white-space: normal;
+ max-height: 120px;
+ font-size: 18px;
+ line-height: 18px;
+ }
+
+ .suggestions .media_result .data .info .rating,
+ .suggestions .media_result .data .info .genres,
+ .suggestions .media_result .data .info .year {
+ position: static;
+ display: block;
+ padding: 0;
+ opacity: .6;
+ }
+
+ .suggestions .media_result .data .info .year {
+ margin: 10px 0 0;
+ }
+
+ .suggestions .media_result .data .info .rating {
+ font-size: 20px;
+ float: right;
+ margin-top: -20px;
+ }
+ .suggestions .media_result .data .info .rating:before {
+ content: "\e031";
+ font-family: 'Elusive-Icons';
+ font-size: 14px;
+ margin: 0 5px 0 0;
+ vertical-align: bottom;
+ }
+
+ .suggestions .media_result .data .info .genres {
+ font-size: 11px;
+ font-style: italic;
+ text-align: right;
+ }
+
+ .suggestions .media_result .data .info .plot {
+ display: block;
+ font-size: 11px;
+ overflow: hidden;
+ text-align: justify;
+ height: 100%;
+ z-index: 2;
+ top: 64px;
+ position: absolute;
+ background: #4e5969;
+ cursor: pointer;
+ transition: all .4s ease-in-out;
+ padding: 0 3px 10px 0;
+ }
+ .suggestions .media_result .data:before {
+ bottom: 0;
+ content: '';
+ display: block;
+ height: 10px;
+ right: 0;
+ left: 0;
+ bottom: 10px;
+ position: absolute;
+ background: linear-gradient(
+ 0deg,
+ rgba(78, 89, 105, 1) 0%,
+ rgba(78, 89, 105, 0) 100%
+ );
+ z-index: 3;
+ pointer-events: none;
+ }
+
+ .suggestions .media_result .data .info .plot.full {
+ top: 0;
+ overflow: auto;
+ }
+
+ .suggestions .media_result .data {
+ cursor: default;
+ }
+
+ .suggestions .media_result .options {
+ left: 100px;
+ }
+ .suggestions .media_result .options select[name=title] { width: 100%; }
+ .suggestions .media_result .options select[name=profile] { width: 100%; }
+ .suggestions .media_result .options select[name=category] { width: 100%; }
+
+ .suggestions .media_result .button {
+ position: absolute;
+ margin: 2px 0 0 0;
+ right: 15px;
+ bottom: 15px;
+ }
+
+
+ .suggestions .media_result .thumbnail {
+ width: 100px;
+ }
+
+ .suggestions .media_result .actions {
+ position: absolute;
+ top: 10px;
+ right: 10px;
+ display: none;
+ width: 140px;
+ }
+ .suggestions .media_result:hover .actions {
+ display: block;
+ }
+ .suggestions .media_result:hover h2 .title {
+ opacity: 0;
+ }
+ .suggestions .media_result .data.open .actions {
+ display: none;
+ }
+
+ .suggestions .media_result .actions a {
+ margin-left: 10px;
+ vertical-align: middle;
+ }
+
diff --git a/couchpotato/core/plugins/suggestion/static/suggest.js b/couchpotato/core/media/movie/suggestion/static/suggest.js
similarity index 86%
rename from couchpotato/core/plugins/suggestion/static/suggest.js
rename to couchpotato/core/media/movie/suggestion/static/suggest.js
index 40fe53b9..cb09ef4a 100644
--- a/couchpotato/core/plugins/suggestion/static/suggest.js
+++ b/couchpotato/core/media/movie/suggestion/static/suggest.js
@@ -17,7 +17,7 @@ var SuggestList = new Class({
'click:relay(a.delete)': function(e, el){
(e).stop();
- $(el).getParent('.movie_result').destroy();
+ $(el).getParent('.media_result').destroy();
Api.request('suggestion.ignore', {
'data': {
@@ -30,7 +30,7 @@ var SuggestList = new Class({
'click:relay(a.eye-open)': function(e, el){
(e).stop();
- $(el).getParent('.movie_result').destroy();
+ $(el).getParent('.media_result').destroy();
Api.request('suggestion.ignore', {
'data': {
@@ -65,7 +65,7 @@ var SuggestList = new Class({
Object.each(json.suggestions, function(movie){
- var m = new Block.Search.Item(movie, {
+ var m = new Block.Search.MovieItem(movie, {
'onAdded': function(){
self.afterAdded(m, movie)
}
@@ -95,6 +95,10 @@ var SuggestList = new Class({
);
m.data_container.removeEvents('click');
+ var plot = false;
+ if(m.info.plot && m.info.plot.length > 0)
+ plot = m.info.plot;
+
// Add rating
m.info_container.adopt(
m.rating = m.info.rating && m.info.rating.imdb.length == 2 && parseFloat(m.info.rating.imdb[0]) > 0 ? new Element('span.rating', {
@@ -103,6 +107,14 @@ var SuggestList = new Class({
}) : null,
m.genre = m.info.genres && m.info.genres.length > 0 ? new Element('span.genres', {
'text': m.info.genres.slice(0, 3).join(', ')
+ }) : null,
+ m.plot = plot ? new Element('span.plot', {
+ 'text': plot,
+ 'events': {
+ 'click': function(){
+ this.toggleClass('full')
+ }
+ }
}) : null
)
diff --git a/couchpotato/core/notifications/__init__.py b/couchpotato/core/notifications/__init__.py
index 8ac24dfb..5958fe66 100644
--- a/couchpotato/core/notifications/__init__.py
+++ b/couchpotato/core/notifications/__init__.py
@@ -1,4 +1,4 @@
-config = {
+config = [{
'name': 'notification_providers',
'groups': [
{
@@ -10,4 +10,4 @@ config = {
'options': [],
},
],
-}
+}]
diff --git a/couchpotato/core/notifications/email/__init__.py b/couchpotato/core/notifications/email/__init__.py
index b41cc8e6..33c2f634 100644
--- a/couchpotato/core/notifications/email/__init__.py
+++ b/couchpotato/core/notifications/email/__init__.py
@@ -28,12 +28,23 @@ config = [{
'name': 'smtp_server',
'label': 'SMTP server',
},
+ { 'name': 'smtp_port',
+ 'label': 'SMTP server port',
+ 'default': '25',
+ 'type': 'int',
+ },
{
'name': 'ssl',
'label': 'Enable SSL',
'default': 0,
'type': 'bool',
},
+ {
+ 'name': 'starttls',
+ 'label': 'Enable StartTLS',
+ 'default': 0,
+ 'type': 'bool',
+ },
{
'name': 'smtp_user',
'label': 'SMTP user',
diff --git a/couchpotato/core/notifications/email/main.py b/couchpotato/core/notifications/email/main.py
index f94688d5..c67ac97d 100644
--- a/couchpotato/core/notifications/email/main.py
+++ b/couchpotato/core/notifications/email/main.py
@@ -2,6 +2,7 @@ from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
+from couchpotato.environment import Env
from email.mime.text import MIMEText
import smtplib
import traceback
@@ -21,18 +22,28 @@ class Email(Notification):
smtp_server = self.conf('smtp_server')
smtp_user = self.conf('smtp_user')
smtp_pass = self.conf('smtp_pass')
+ smtp_port = self.conf('smtp_port')
+ starttls = self.conf('starttls')
# Make the basic message
- message = MIMEText(toUnicode(message))
+ message = MIMEText(toUnicode(message), _charset = Env.get('encoding'))
message['Subject'] = self.default_title
message['From'] = from_address
message['To'] = to_address
try:
# Open the SMTP connection, via SSL if requested
+ log.debug("Connecting to host %s on port %s" % (smtp_server, smtp_port))
log.debug("SMTP over SSL %s", ("enabled" if ssl == 1 else "disabled"))
mailserver = smtplib.SMTP_SSL(smtp_server) if ssl == 1 else smtplib.SMTP(smtp_server)
+ if (starttls):
+ log.debug("Using StartTLS to initiate the connection with the SMTP server")
+ mailserver.starttls()
+
+ # Say hello to the server
+ mailserver.ehlo()
+
# Check too see if an login attempt should be attempted
if len(smtp_user) > 0:
log.debug("Logging on to SMTP server using username \'%s\'%s", (smtp_user, " and a password" if len(smtp_pass) > 0 else ""))
diff --git a/couchpotato/core/notifications/notifo/__init__.py b/couchpotato/core/notifications/notifo/__init__.py
deleted file mode 100644
index 941246cc..00000000
--- a/couchpotato/core/notifications/notifo/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from .main import Notifo
-
-def start():
- return Notifo()
-
-config = [{
- 'name': 'notifo',
- 'groups': [
- {
- 'tab': 'notifications',
- 'list': 'notification_providers',
- 'name': 'notifo',
- 'description': 'Keep in mind that Notifo service will end soon.',
- 'options': [
- {
- 'name': 'enabled',
- 'default': 0,
- 'type': 'enabler',
- },
- {
- 'name': 'username',
- },
- {
- 'name': 'api_key',
- },
- {
- 'name': 'on_snatch',
- 'default': 0,
- 'type': 'bool',
- 'advanced': True,
- 'description': 'Also send message when movie is snatched.',
- },
- ],
- }
- ],
-}]
diff --git a/couchpotato/core/notifications/notifo/main.py b/couchpotato/core/notifications/notifo/main.py
deleted file mode 100644
index 2d56ed71..00000000
--- a/couchpotato/core/notifications/notifo/main.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from couchpotato.core.helpers.encoding import toUnicode
-from couchpotato.core.logger import CPLog
-from couchpotato.core.notifications.base import Notification
-import base64
-import json
-import traceback
-
-log = CPLog(__name__)
-
-
-class Notifo(Notification):
-
- url = 'https://api.notifo.com/v1/send_notification'
-
- def notify(self, message = '', data = None, listener = None):
- if not data: data = {}
-
- try:
- params = {
- 'label': self.default_title,
- 'msg': toUnicode(message),
- }
-
- headers = {
- 'Authorization': "Basic %s" % base64.encodestring('%s:%s' % (self.conf('username'), self.conf('api_key')))[:-1]
- }
-
- handle = self.urlopen(self.url, params = params, headers = headers)
- result = json.loads(handle)
-
- if result['status'] != 'success' or result['response_message'] != 'OK':
- raise Exception
-
- except:
- log.error('Notification failed: %s', traceback.format_exc())
- return False
-
- log.info('Notifo notification successful.')
- return True
diff --git a/couchpotato/core/notifications/plex/__init__.py b/couchpotato/core/notifications/plex/__init__.py
old mode 100644
new mode 100755
index c00ea6d4..d68ddb19
--- a/couchpotato/core/notifications/plex/__init__.py
+++ b/couchpotato/core/notifications/plex/__init__.py
@@ -17,10 +17,15 @@ config = [{
'type': 'enabler',
},
{
- 'name': 'host',
+ 'name': 'media_server',
+ 'label': 'Media Server',
'default': 'localhost',
- 'description': 'Default should be on localhost',
- 'advanced': True,
+ 'description': 'Hostname/IP, default localhost'
+ },
+ {
+ 'name': 'clients',
+ 'default': '',
+ 'description': 'Comma separated list of client names\'s (computer names). Top right when you start Plex'
},
{
'name': 'on_snatch',
diff --git a/couchpotato/core/notifications/plex/client.py b/couchpotato/core/notifications/plex/client.py
new file mode 100644
index 00000000..b873518e
--- /dev/null
+++ b/couchpotato/core/notifications/plex/client.py
@@ -0,0 +1,85 @@
+import json
+from couchpotato import CPLog
+from couchpotato.core.event import addEvent
+from couchpotato.core.helpers.encoding import tryUrlencode
+import requests
+
+log = CPLog(__name__)
+
+
+class PlexClientProtocol(object):
+ def __init__(self, plex):
+ self.plex = plex
+
+ addEvent('notify.plex.notifyClient', self.notify)
+
+ def notify(self, client, message):
+ raise NotImplementedError()
+
+
+class PlexClientHTTP(PlexClientProtocol):
+ def request(self, command, client):
+ url = 'http://%s:%s/xbmcCmds/xbmcHttp/?%s' % (
+ client['address'],
+ client['port'],
+ tryUrlencode(command)
+ )
+
+ headers = {}
+
+ try:
+ self.plex.urlopen(url, headers = headers, timeout = 3, show_error = False)
+ except Exception, err:
+ log.error("Couldn't sent command to Plex: %s", err)
+ return False
+
+ return True
+
+ def notify(self, client, message):
+ if client.get('protocol') != 'xbmchttp':
+ return None
+
+ data = {
+ 'command': 'ExecBuiltIn',
+ 'parameter': 'Notification(CouchPotato, %s)' % message
+ }
+
+ return self.request(data, client)
+
+
+class PlexClientJSON(PlexClientProtocol):
+ def request(self, method, params, client):
+ log.debug('sendJSON("%s", %s, %s)', (method, params, client))
+ url = 'http://%s:%s/jsonrpc' % (
+ client['address'],
+ client['port']
+ )
+
+ headers = {
+ 'Content-Type': 'application/json'
+ }
+
+ request = {
+ 'id': 1,
+ 'jsonrpc': '2.0',
+ 'method': method,
+ 'params': params
+ }
+
+ try:
+ requests.post(url, headers = headers, timeout = 3, data = json.dumps(request))
+ except Exception, err:
+ log.error("Couldn't sent command to Plex: %s", err)
+ return False
+
+ return True
+
+ def notify(self, client, message):
+ if client.get('protocol') not in ['xbmcjson', 'plex']:
+ return None
+
+ params = {
+ 'title': 'CouchPotato',
+ 'message': message
+ }
+ return self.request('GUI.ShowNotification', params, client)
diff --git a/couchpotato/core/notifications/plex/main.py b/couchpotato/core/notifications/plex/main.py
old mode 100644
new mode 100755
index f6088f5b..ce25c8f0
--- a/couchpotato/core/notifications/plex/main.py
+++ b/couchpotato/core/notifications/plex/main.py
@@ -1,78 +1,64 @@
-from couchpotato.core.event import addEvent
-from couchpotato.core.helpers.encoding import tryUrlencode
-from couchpotato.core.helpers.variable import cleanHost, splitString
+from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
-from urllib2 import URLError
-from urlparse import urlparse
-from xml.dom import minidom
-import traceback
+from .client import PlexClientHTTP, PlexClientJSON
+from .server import PlexServer
log = CPLog(__name__)
class Plex(Notification):
+ http_time_between_calls = 0
+
def __init__(self):
super(Plex, self).__init__()
+
+ self.server = PlexServer(self)
+
+ self.client_protocols = {
+ 'http': PlexClientHTTP(self),
+ 'json': PlexClientJSON(self)
+ }
+
addEvent('renamer.after', self.addToLibrary)
- def addToLibrary(self, message = None, group = None):
+
+ def addToLibrary(self, message = None, group = {}):
if self.isDisabled(): return
- if not group: group = {}
- log.info('Sending notification to Plex')
- hosts = self.getHosts(port = 32400)
+ return self.server.refresh()
- for host in hosts:
+ def getClientNames(self):
+ return [
+ x.strip().lower()
+ for x in self.conf('clients').split(',')
+ ]
- source_type = ['movie']
- base_url = '%s/library/sections' % host
- refresh_url = '%s/%%s/refresh' % base_url
+ def notifyClients(self, message, client_names):
+ success = True
- try:
- sections_xml = self.urlopen(base_url)
- xml_sections = minidom.parseString(sections_xml)
- sections = xml_sections.getElementsByTagName('Directory')
+ for client_name in client_names:
- for s in sections:
- if s.getAttribute('type') in source_type:
- url = refresh_url % s.getAttribute('key')
- self.urlopen(url)
+ client_success = False
+ client = self.server.clients.get(client_name)
- except:
- log.error('Plex library update failed for %s, Media Server not running: %s', (host, traceback.format_exc(1)))
- return False
+ if client and client['found']:
+ client_success = fireEvent('notify.plex.notifyClient', client, message, single = True)
- return True
+ if not client_success:
+ if self.server.staleClients() or not client:
+ log.info('Failed to send notification to client "%s". '
+ 'Client list is stale, updating the client list and retrying.', client_name)
+ self.server.updateClients(self.getClientNames())
+ else:
+ log.warning('Failed to send notification to client %s, skipping this time', client_name)
+ success = False
- def notify(self, message = '', data = None, listener = None):
- if not data: data = {}
+ return success
- hosts = self.getHosts(port = 3000)
- successful = 0
- for host in hosts:
- if self.send({'command': 'ExecBuiltIn', 'parameter': 'Notification(CouchPotato, %s)' % message}, host):
- successful += 1
-
- return successful == len(hosts)
-
- def send(self, command, host):
-
- url = '%s/xbmcCmds/xbmcHttp/?%s' % (host, tryUrlencode(command))
- headers = {}
-
- try:
- self.urlopen(url, headers = headers, show_error = False)
- except URLError:
- log.error("Couldn't sent command to Plex, probably just running Media Server")
- return False
- except:
- log.error("Couldn't sent command to Plex: %s", traceback.format_exc())
- return False
-
- log.info('Plex notification to %s successful.', host)
- return True
+ def notify(self, message = '', data = {}, listener = None):
+ return self.notifyClients(message, self.getClientNames())
def test(self, **kwargs):
@@ -80,28 +66,12 @@ class Plex(Notification):
log.info('Sending test to %s', test_type)
- success = self.notify(
+ notify_success = self.notify(
message = self.test_message,
data = {},
listener = 'test'
)
- success2 = self.addToLibrary()
- return {
- 'success': success or success2
- }
+ refresh_success = self.addToLibrary()
- def getHosts(self, port = None):
-
- raw_hosts = splitString(self.conf('host'))
- hosts = []
-
- for h in raw_hosts:
- h = cleanHost(h)
- p = urlparse(h)
- h = h.rstrip('/')
- if port and not p.port:
- h += ':%s' % port
- hosts.append(h)
-
- return hosts
+ return {'success': notify_success or refresh_success}
diff --git a/couchpotato/core/notifications/plex/server.py b/couchpotato/core/notifications/plex/server.py
new file mode 100644
index 00000000..b66db8fe
--- /dev/null
+++ b/couchpotato/core/notifications/plex/server.py
@@ -0,0 +1,114 @@
+from datetime import timedelta, datetime
+from couchpotato.core.helpers.variable import cleanHost
+from couchpotato import CPLog
+from urlparse import urlparse
+import traceback
+
+
+try:
+ import xml.etree.cElementTree as etree
+except ImportError:
+ import xml.etree.ElementTree as etree
+
+log = CPLog(__name__)
+
+
+class PlexServer(object):
+ def __init__(self, plex):
+ self.plex = plex
+
+ self.clients = {}
+ self.last_clients_update = None
+
+ def staleClients(self):
+ if not self.last_clients_update:
+ return True
+
+ return self.last_clients_update + timedelta(minutes=15) < datetime.now()
+
+ def request(self, path, data_type='xml'):
+ if not self.plex.conf('media_server'):
+ log.warning("Plex media server hostname is required")
+ return None
+
+ if path.startswith('/'):
+ path = path[1:]
+
+ data = self.plex.urlopen('%s/%s' % (
+ self.createHost(self.plex.conf('media_server'), port = 32400),
+ path
+ ))
+
+ if data_type == 'xml':
+ return etree.fromstring(data)
+ else:
+ return data
+
+ def updateClients(self, client_names):
+ log.info('Searching for clients on Plex Media Server')
+
+ self.clients = {}
+
+ result = self.request('clients')
+ if not result:
+ return
+
+ found_clients = [
+ c for c in result.findall('Server')
+ if c.get('name') and c.get('name').lower() in client_names
+ ]
+
+ # Store client details in cache
+ for client in found_clients:
+ name = client.get('name').lower()
+
+ self.clients[name] = {
+ 'name': client.get('name'),
+ 'found': True,
+ 'address': client.get('address'),
+ 'port': client.get('port'),
+ 'protocol': client.get('protocol', 'xbmchttp')
+ }
+
+ client_names.remove(name)
+
+ # Store dummy info for missing clients
+ for client_name in client_names:
+ self.clients[client_name] = {
+ 'found': False
+ }
+
+ if len(client_names) > 0:
+ log.debug('Unable to find clients: %s', ', '.join(client_names))
+
+ self.last_clients_update = datetime.now()
+
+ def refresh(self, section_types=None):
+ if not section_types:
+ section_types = ['movie']
+
+ sections = self.request('library/sections')
+
+ try:
+ for section in sections.findall('Directory'):
+ if section.get('type') not in section_types:
+ continue
+
+ self.request('library/sections/%s/refresh' % section.get('key'), 'text')
+ except:
+ log.error('Plex library update failed for %s, Media Server not running: %s',
+ (self.plex.conf('media_server'), traceback.format_exc(1)))
+ return False
+
+ return True
+
+ def createHost(self, host, port = None):
+
+ h = cleanHost(host)
+ p = urlparse(h)
+ h = h.rstrip('/')
+
+ if port and not p.port:
+ h += ':%s' % port
+
+ return h
diff --git a/couchpotato/core/notifications/xmpp/__init__.py b/couchpotato/core/notifications/xmpp/__init__.py
new file mode 100644
index 00000000..a52242ff
--- /dev/null
+++ b/couchpotato/core/notifications/xmpp/__init__.py
@@ -0,0 +1,52 @@
+from .main import Xmpp
+
+def start():
+ return Xmpp()
+
+config = [{
+ 'name': 'xmpp',
+ 'groups': [
+ {
+ 'tab': 'notifications',
+ 'list': 'notification_providers',
+ 'name': 'xmpp',
+ 'label': 'XMPP',
+ 'description`': 'for Jabber, Hangouts (Google Talk), AIM...',
+ 'options': [
+ {
+ 'name': 'enabled',
+ 'default': 0,
+ 'type': 'enabler',
+ },
+ {
+ 'name': 'username',
+ 'description': 'User sending the message. For Hangouts, e-mail of a single-step authentication Google account.',
+ },
+ {
+ 'name': 'password',
+ 'type': 'Password',
+ },
+ {
+ 'name': 'hostname',
+ 'default': 'talk.google.com',
+ },
+ {
+ 'name': 'to',
+ 'description': 'Username (or e-mail for Hangouts) of the person to send the messages to.',
+ },
+ {
+ 'name': 'port',
+ 'type': 'int',
+ 'default': 5222,
+ },
+ {
+ 'name': 'on_snatch',
+ 'default': 0,
+ 'type': 'bool',
+ 'advanced': True,
+ 'description': 'Also send message when movie is snatched.',
+ },
+ ],
+ }
+ ],
+}]
diff --git a/couchpotato/core/notifications/xmpp/main.py b/couchpotato/core/notifications/xmpp/main.py
new file mode 100644
index 00000000..0011e41c
--- /dev/null
+++ b/couchpotato/core/notifications/xmpp/main.py
@@ -0,0 +1,43 @@
+from couchpotato.core.logger import CPLog
+from couchpotato.core.notifications.base import Notification
+from time import sleep
+import traceback
+import xmpp
+
+log = CPLog(__name__)
+
+
+class Xmpp(Notification):
+
+ def notify(self, message = '', data = None, listener = None):
+ if not data: data = {}
+
+ try:
+ jid = xmpp.protocol.JID(self.conf('username'))
+ client = xmpp.Client(jid.getDomain(), debug = [])
+
+ # Connect
+ if not client.connect(server = (self.conf('hostname'), self.conf('port'))):
+ log.error('XMPP failed: Connection to server failed.')
+ return False
+
+ # Authenticate
+ if not client.auth(jid.getNode(), self.conf('password'), resource = jid.getResource()):
+ log.error('XMPP failed: Failed to authenticate.')
+ return False
+
+ # Send message
+ client.send(xmpp.protocol.Message(to = self.conf('to'), body = message, typ = 'chat'))
+
+ # Disconnect
+ # some older servers will not send the message if you disconnect immediately after sending
+ sleep(1)
+ client.disconnect()
+
+ log.info('XMPP notifications sent.')
+ return True
+
+ except:
+ log.error('XMPP failed: %s', traceback.format_exc())
+
+ return False
diff --git a/couchpotato/core/plugins/base.py b/couchpotato/core/plugins/base.py
index ce7c1b49..649e359d 100644
--- a/couchpotato/core/plugins/base.py
+++ b/couchpotato/core/plugins/base.py
@@ -1,7 +1,7 @@
from StringIO import StringIO
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import tryUrlencode, ss, toSafeString, \
- toUnicode
+ toUnicode, sp
from couchpotato.core.helpers.variable import getExt, md5, isLocalIP
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
@@ -121,7 +121,7 @@ class Plugin(object):
# http request
def urlopen(self, url, timeout = 30, params = None, headers = None, opener = None, multipart = False, show_error = True):
- url = ss(url)
+ url = urllib2.quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]")
if not headers: headers = {}
if not params: params = {}
@@ -291,10 +291,10 @@ class Plugin(object):
def createNzbName(self, data, movie):
tag = self.cpTag(movie)
- return '%s%s' % (toSafeString(data.get('name')[:127 - len(tag)]), tag)
+ return '%s%s' % (toSafeString(toUnicode(data.get('name'))[:127 - len(tag)]), tag)
def createFileName(self, data, filedata, movie):
- name = os.path.join(self.createNzbName(data, movie))
+ name = sp(os.path.join(self.createNzbName(data, movie)))
if data.get('protocol') == 'nzb' and 'DOCTYPE nzb' not in filedata and '' not in filedata:
return '%s.%s' % (name, 'rar')
return '%s.%s' % (name, data.get('protocol'))
diff --git a/couchpotato/core/plugins/category/main.py b/couchpotato/core/plugins/category/main.py
index d13a74a3..87cd0ea4 100644
--- a/couchpotato/core/plugins/category/main.py
+++ b/couchpotato/core/plugins/category/main.py
@@ -4,7 +4,7 @@ from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
-from couchpotato.core.settings.model import Movie, Category
+from couchpotato.core.settings.model import Media, Category
log = CPLog(__name__)
@@ -113,7 +113,7 @@ class CategoryPlugin(Plugin):
def removeFromMovie(self, category_id):
db = get_session()
- movies = db.query(Movie).filter(Movie.category_id == category_id).all()
+ movies = db.query(Media).filter(Media.category_id == category_id).all()
if len(movies) > 0:
for movie in movies:
diff --git a/couchpotato/core/plugins/custom/__init__.py b/couchpotato/core/plugins/custom/__init__.py
new file mode 100644
index 00000000..573cd99f
--- /dev/null
+++ b/couchpotato/core/plugins/custom/__init__.py
@@ -0,0 +1,6 @@
+from .main import Custom
+
+def start():
+ return Custom()
+
+config = []
diff --git a/couchpotato/core/plugins/custom/main.py b/couchpotato/core/plugins/custom/main.py
new file mode 100644
index 00000000..a15c915c
--- /dev/null
+++ b/couchpotato/core/plugins/custom/main.py
@@ -0,0 +1,21 @@
+from couchpotato.core.event import addEvent
+from couchpotato.core.logger import CPLog
+from couchpotato.core.plugins.base import Plugin
+from couchpotato.environment import Env
+import os
+
+log = CPLog(__name__)
+
+
+class Custom(Plugin):
+
+ def __init__(self):
+ addEvent('app.load', self.createStructure)
+
+ def createStructure(self):
+
+ custom_dir = os.path.join(Env.get('data_dir'), 'custom_plugins')
+
+ if not os.path.isdir(custom_dir):
+ self.makeDir(custom_dir)
+ self.createFile(os.path.join(custom_dir, '__init__.py'), '# Don\'t remove this file')
diff --git a/couchpotato/core/plugins/dashboard/main.py b/couchpotato/core/plugins/dashboard/main.py
index f006ac41..4f4d85ab 100644
--- a/couchpotato/core/plugins/dashboard/main.py
+++ b/couchpotato/core/plugins/dashboard/main.py
@@ -4,7 +4,7 @@ from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.variable import splitString, tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
-from couchpotato.core.settings.model import Movie, Library, LibraryTitle, \
+from couchpotato.core.settings.model import Media, Library, LibraryTitle, \
Release
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import asc, or_
@@ -50,12 +50,12 @@ class Dashboard(Plugin):
# Get all active movies
active_status, ignored_status = fireEvent('status.get', ['active', 'ignored'], single = True)
- q = db.query(Movie) \
+ q = db.query(Media) \
.join(Library) \
- .outerjoin(Movie.releases) \
- .filter(Movie.status_id == active_status.get('id')) \
- .with_entities(Movie.id, Movie.profile_id, Library.info, Library.year) \
- .group_by(Movie.id) \
+ .outerjoin(Media.releases) \
+ .filter(Media.status_id == active_status.get('id')) \
+ .with_entities(Media.id, Media.profile_id, Library.info, Library.year) \
+ .group_by(Media.id) \
.filter(or_(Release.id == None, Release.status_id == ignored_status.get('id')))
if not random:
@@ -101,11 +101,11 @@ class Dashboard(Plugin):
if len(movie_ids) > 0:
# Get all movie information
- movies_raw = db.query(Movie) \
+ movies_raw = db.query(Media) \
.options(joinedload_all('library.titles')) \
.options(joinedload_all('library.files')) \
.options(joinedload_all('files')) \
- .filter(Movie.id.in_(movie_ids)) \
+ .filter(Media.id.in_(movie_ids)) \
.all()
# Create dict by movie id
diff --git a/couchpotato/core/plugins/manage/main.py b/couchpotato/core/plugins/manage/main.py
index 702b1293..e8ccaf7e 100644
--- a/couchpotato/core/plugins/manage/main.py
+++ b/couchpotato/core/plugins/manage/main.py
@@ -222,9 +222,10 @@ class Manage(Plugin):
groups = fireEvent('scanner.scan', folder = folder, files = files, single = True)
- for group in groups.itervalues():
- if group['library'] and group['library'].get('identifier'):
- fireEvent('release.add', group = group)
+ if groups:
+ for group in groups.itervalues():
+ if group['library'] and group['library'].get('identifier'):
+ fireEvent('release.add', group = group)
def getDiskSpace(self):
diff --git a/couchpotato/core/plugins/profile/main.py b/couchpotato/core/plugins/profile/main.py
index 68ab9360..9ff3ead2 100644
--- a/couchpotato/core/plugins/profile/main.py
+++ b/couchpotato/core/plugins/profile/main.py
@@ -4,7 +4,7 @@ from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
-from couchpotato.core.settings.model import Profile, ProfileType, Movie
+from couchpotato.core.settings.model import Profile, ProfileType, Media
from sqlalchemy.orm import joinedload_all
log = CPLog(__name__)
@@ -38,7 +38,7 @@ class ProfilePlugin(Plugin):
active_status = fireEvent('status.get', 'active', single = True)
db = get_session()
- movies = db.query(Movie).filter(Movie.status_id == active_status.get('id'), Movie.profile == None).all()
+ movies = db.query(Media).filter(Media.status_id == active_status.get('id'), Media.profile == None).all()
if len(movies) > 0:
default_profile = self.default()
diff --git a/couchpotato/core/plugins/quality/main.py b/couchpotato/core/plugins/quality/main.py
index 1149c036..0c0636e6 100644
--- a/couchpotato/core/plugins/quality/main.py
+++ b/couchpotato/core/plugins/quality/main.py
@@ -1,7 +1,7 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
-from couchpotato.core.helpers.encoding import toUnicode
+from couchpotato.core.helpers.encoding import toUnicode, ss
from couchpotato.core.helpers.variable import mergeDicts, md5, getExt
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
@@ -17,12 +17,12 @@ class QualityPlugin(Plugin):
qualities = [
{'identifier': 'bd50', 'hd': True, 'size': (15000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25'], 'allow': ['1080p'], 'ext':[], 'tags': ['bdmv', 'certificate', ('complete', 'bluray')]},
- {'identifier': '1080p', 'hd': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts'], 'tags': ['m2ts']},
- {'identifier': '720p', 'hd': True, 'size': (3000, 10000), 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts']},
+ {'identifier': '1080p', 'hd': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts'], 'tags': ['m2ts', 'x264', 'h264']},
+ {'identifier': '720p', 'hd': True, 'size': (3000, 10000), 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264']},
{'identifier': 'brrip', 'hd': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip'], 'allow': ['720p', '1080p'], 'ext':['avi'], 'tags': ['hdtv', 'hdrip', 'webdl', ('web', 'dl')]},
- {'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': [], 'allow': [], 'ext':['iso', 'img'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts']},
+ {'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': ['br2dvd'], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r')]},
{'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': [], 'allow': [], 'ext':['avi', 'mpg', 'mpeg'], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
- {'identifier': 'scr', 'size': (600, 1600), 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr'], 'allow': ['dvdr', 'dvd'], 'ext':['avi', 'mpg', 'mpeg'], 'tags': ['webrip', ('web', 'rip')]},
+ {'identifier': 'scr', 'size': (600, 1600), 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr'], 'allow': ['dvdr', 'dvdrip', '720p', '1080p'], 'ext':['avi', 'mpg', 'mpeg'], 'tags': ['webrip', ('web', 'rip')]},
{'identifier': 'r5', 'size': (600, 1000), 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr'], 'ext':['avi', 'mpg', 'mpeg']},
{'identifier': 'tc', 'size': (600, 1000), 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': [], 'ext':['avi', 'mpg', 'mpeg']},
{'identifier': 'ts', 'size': (600, 1000), 'label': 'TeleSync', 'alternative': ['telesync', 'hdts'], 'allow': [], 'ext':['avi', 'mpg', 'mpeg']},
@@ -30,6 +30,9 @@ class QualityPlugin(Plugin):
]
pre_releases = ['cam', 'ts', 'tc', 'r5', 'scr']
+ cached_qualities = None
+ cached_order = None
+
def __init__(self):
addEvent('quality.all', self.all)
addEvent('quality.single', self.single)
@@ -59,6 +62,9 @@ class QualityPlugin(Plugin):
def all(self):
+ if self.cached_qualities:
+ return self.cached_qualities
+
db = get_session()
qualities = db.query(Quality).all()
@@ -68,6 +74,7 @@ class QualityPlugin(Plugin):
q = mergeDicts(self.getQuality(quality.identifier), quality.to_dict())
temp.append(q)
+ self.cached_qualities = temp
return temp
def single(self, identifier = ''):
@@ -96,6 +103,8 @@ class QualityPlugin(Plugin):
setattr(quality, kwargs.get('value_type'), kwargs.get('value'))
db.commit()
+ self.cached_qualities = None
+
return {
'success': True
}
@@ -161,68 +170,118 @@ class QualityPlugin(Plugin):
if cached and len(extra) == 0: return cached
qualities = self.all()
+
+ # Start with 0
+ score = {}
+ for quality in qualities:
+ score[quality.get('identifier')] = 0
+
for cur_file in files:
words = re.split('\W+', cur_file.lower())
- found = {}
for quality in qualities:
- contains = self.containsTag(quality, words, cur_file)
- if contains:
- found[quality['identifier']] = True
-
- for quality in qualities:
-
- # Check identifier
- if quality['identifier'] in words:
- if len(found) == 0 or len(found) == 1 and found.get(quality['identifier']):
- log.debug('Found via identifier "%s" in %s', (quality['identifier'], cur_file))
- return self.setCache(cache_key, quality)
-
- # Check alt and tags
- contains = self.containsTag(quality, words, cur_file)
- if contains:
- return self.setCache(cache_key, quality)
+ contains_score = self.containsTagScore(quality, words, cur_file)
+ self.calcScore(score, quality, contains_score)
# Try again with loose testing
- quality = self.guessLoose(cache_key, files = files, extra = extra)
- if quality:
- return self.setCache(cache_key, quality)
+ for quality in qualities:
+ loose_score = self.guessLooseScore(quality, files = files, extra = extra)
+ self.calcScore(score, quality, loose_score)
+
+
+ # Return nothing if all scores are 0
+ has_non_zero = 0
+ for s in score:
+ if score[s] > 0:
+ has_non_zero += 1
+
+ if not has_non_zero:
+ return None
+
+ heighest_quality = max(score, key = score.get)
+ if heighest_quality:
+ for quality in qualities:
+ if quality.get('identifier') == heighest_quality:
+ return self.setCache(cache_key, quality)
- log.debug('Could not identify quality for: %s', files)
return None
- def containsTag(self, quality, words, cur_file = ''):
+ def containsTagScore(self, quality, words, cur_file = ''):
+ cur_file = ss(cur_file)
+ score = 0
+
+ points = {
+ 'identifier': 10,
+ 'label': 10,
+ 'alternative': 9,
+ 'tags': 9,
+ 'ext': 3,
+ }
# Check alt and tags
- for tag_type in ['alternative', 'tags']:
- for alt in quality.get(tag_type, []):
- if isinstance(alt, tuple) and '.'.join(alt) in '.'.join(words):
+ for tag_type in ['identifier', 'alternative', 'tags', 'label']:
+ qualities = quality.get(tag_type, [])
+ qualities = [qualities] if isinstance(qualities, (str, unicode)) else qualities
+
+ for alt in qualities:
+ if (isinstance(alt, tuple)):
+ if len(set(words) & set(alt)) == len(alt):
+ log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
+ score += points.get(tag_type)
+ elif len(set(words) & set(alt)) > 0:
+ partial = list(set(words) & set(alt))[0]
+ if len(partial) > 2:
+ log.debug('Found %s via partial %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
+ score += points.get(tag_type) / 3
+
+ if (isinstance(alt, (str, unicode)) and ss(alt.lower()) in cur_file.lower()):
log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
- return True
+ score += points.get(tag_type) / 2
- if list(set(quality.get(tag_type, [])) & set(words)):
+ if list(set(qualities) & set(words)):
log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file))
- return True
+ score += points.get(tag_type)
- return
+ # Check extention
+ for ext in quality.get('ext', []):
+ if ext == words[-1]:
+ log.debug('Found %s extension in %s', (ext, cur_file))
+ score += points['ext']
- def guessLoose(self, cache_key, files = None, extra = None):
+ return score
+
+ def guessLooseScore(self, quality, files = None, extra = None):
+
+ score = 0
if extra:
- for quality in self.all():
- # Check width resolution, range 20
- if quality.get('width') and (quality.get('width') - 20) <= extra.get('resolution_width', 0) <= (quality.get('width') + 20):
- log.debug('Found %s via resolution_width: %s == %s', (quality['identifier'], quality.get('width'), extra.get('resolution_width', 0)))
- return self.setCache(cache_key, quality)
+ # Check width resolution, range 20
+ if quality.get('width') and (quality.get('width') - 20) <= extra.get('resolution_width', 0) <= (quality.get('width') + 20):
+ log.debug('Found %s via resolution_width: %s == %s', (quality['identifier'], quality.get('width'), extra.get('resolution_width', 0)))
+ score += 5
- # Check height resolution, range 20
- if quality.get('height') and (quality.get('height') - 20) <= extra.get('resolution_height', 0) <= (quality.get('height') + 20):
- log.debug('Found %s via resolution_height: %s == %s', (quality['identifier'], quality.get('height'), extra.get('resolution_height', 0)))
- return self.setCache(cache_key, quality)
+ # Check height resolution, range 20
+ if quality.get('height') and (quality.get('height') - 20) <= extra.get('resolution_height', 0) <= (quality.get('height') + 20):
+ log.debug('Found %s via resolution_height: %s == %s', (quality['identifier'], quality.get('height'), extra.get('resolution_height', 0)))
+ score += 5
- if 480 <= extra.get('resolution_width', 0) <= 720:
- log.debug('Found as dvdrip')
- return self.setCache(cache_key, self.single('dvdrip'))
+ if quality.get('identifier') == 'dvdrip' and 480 <= extra.get('resolution_width', 0) <= 720:
+ log.debug('Add point for correct dvdrip resolutions')
+ score += 1
- return None
+ return score
+
+ def calcScore(self, score, quality, add_score):
+
+ score[quality['identifier']] += add_score
+
+ # Set order for allow calculation (and cache)
+ if not self.cached_order:
+ self.cached_order = {}
+ for q in self.qualities:
+ self.cached_order[q.get('identifier')] = self.qualities.index(q)
+
+ if add_score != 0:
+ for allow in quality.get('allow', []):
+ score[allow] -= 40 if self.cached_order[allow] < self.cached_order[quality['identifier']] else 5
diff --git a/couchpotato/core/plugins/release/main.py b/couchpotato/core/plugins/release/main.py
index 46857adf..009a60e9 100644
--- a/couchpotato/core/plugins/release/main.py
+++ b/couchpotato/core/plugins/release/main.py
@@ -1,14 +1,20 @@
-from couchpotato import get_session
+from couchpotato import get_session, md5
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent
-from couchpotato.core.helpers.encoding import ss
+from couchpotato.core.helpers.encoding import ss, toUnicode
+from couchpotato.core.helpers.variable import getTitle
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.plugins.scanner.main import Scanner
-from couchpotato.core.settings.model import File, Release as Relea, Movie
+from couchpotato.core.settings.model import File, Release as Relea, Media, \
+ ReleaseInfo
+from couchpotato.environment import Env
+from inspect import ismethod, isfunction
+from sqlalchemy.exc import InterfaceError
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import and_, or_
import os
+import time
import traceback
log = CPLog(__name__)
@@ -19,7 +25,7 @@ class Release(Plugin):
def __init__(self):
addEvent('release.add', self.add)
- addApiView('release.download', self.download, docs = {
+ addApiView('release.manual_download', self.manualDownload, docs = {
'desc': 'Send a release manually to the downloaders',
'params': {
'id': {'type': 'id', 'desc': 'ID of the release object in release-table'}
@@ -44,9 +50,45 @@ class Release(Plugin):
}
})
+ addEvent('release.download', self.download)
+ addEvent('release.try_download_result', self.tryDownloadResult)
+ addEvent('release.create_from_search', self.createFromSearch)
addEvent('release.for_movie', self.forMovie)
addEvent('release.delete', self.delete)
addEvent('release.clean', self.clean)
+ addEvent('release.update_status', self.updateStatus)
+
+ # Clean releases that didn't have activity in the last week
+ addEvent('app.load', self.cleanDone)
+ fireEvent('schedule.interval', 'movie.clean_releases', self.cleanDone, hours = 4)
+
+ def cleanDone(self):
+
+ log.debug('Removing releases from dashboard')
+
+ now = time.time()
+ week = 262080
+
+ done_status, available_status, snatched_status, downloaded_status, ignored_status = \
+ fireEvent('status.get', ['done', 'available', 'snatched', 'downloaded', 'ignored'], single = True)
+
+ db = get_session()
+
+ # get movies last_edit more than a week ago
+ media = db.query(Media) \
+ .filter(Media.status_id == done_status.get('id'), Media.last_edit < (now - week)) \
+ .all()
+
+ for item in media:
+ for rel in item.releases:
+ # Remove all available releases
+ if rel.status_id in [available_status.get('id')]:
+ fireEvent('release.delete', id = rel.id, single = True)
+ # Set all snatched and downloaded releases to ignored to make sure they are ignored when re-adding the move
+ elif rel.status_id in [snatched_status.get('id'), downloaded_status.get('id')]:
+ self.updateStatus(id = rel.id, status = ignored_status)
+
+ db.expire_all()
def add(self, group):
@@ -58,9 +100,9 @@ class Release(Plugin):
done_status, snatched_status = fireEvent('status.get', ['done', 'snatched'], single = True)
# Add movie
- movie = db.query(Movie).filter_by(library_id = group['library'].get('id')).first()
+ movie = db.query(Media).filter_by(library_id = group['library'].get('id')).first()
if not movie:
- movie = Movie(
+ movie = Media(
library_id = group['library'].get('id'),
profile_id = 0,
status_id = done_status.get('id')
@@ -104,7 +146,6 @@ class Release(Plugin):
return True
-
def saveFile(self, filepath, type = 'unknown', include_media_info = False):
properties = {}
@@ -159,26 +200,23 @@ class Release(Plugin):
rel = db.query(Relea).filter_by(id = id).first()
if rel:
ignored_status, failed_status, available_status = fireEvent('status.get', ['ignored', 'failed', 'available'], single = True)
- rel.status_id = available_status.get('id') if rel.status_id in [ignored_status.get('id'), failed_status.get('id')] else ignored_status.get('id')
- db.commit()
+ self.updateStatus(id, available_status if rel.status_id in [ignored_status.get('id'), failed_status.get('id')] else ignored_status)
return {
'success': True
}
- def download(self, id = None, **kwargs):
+ def manualDownload(self, id = None, **kwargs):
db = get_session()
- snatched_status, done_status = fireEvent('status.get', ['snatched', 'done'], single = True)
-
rel = db.query(Relea).filter_by(id = id).first()
if rel:
item = {}
for info in rel.info:
item[info.identifier] = info.value
- fireEvent('notify.frontend', type = 'release.download', data = True, message = 'Snatching "%s"' % item['name'])
+ fireEvent('notify.frontend', type = 'release.manual_download', data = True, message = 'Snatching "%s"' % item['name'])
# Get matching provider
provider = fireEvent('provider.belongs_to', item['url'], provider = item.get('provider'), single = True)
@@ -190,23 +228,18 @@ class Release(Plugin):
if item.get('protocol') != 'torrent_magnet':
item['download'] = provider.loginDownload if provider.urls.get('login') else provider.download
- success = fireEvent('searcher.download', data = item, movie = rel.movie.to_dict({
+ success = self.download(data = item, media = rel.movie.to_dict({
'profile': {'types': {'quality': {}}},
'releases': {'status': {}, 'quality': {}},
'library': {'titles': {}, 'files':{}},
'files': {}
- }), manual = True, single = True)
+ }), manual = True)
if success:
db.expunge_all()
- rel = db.query(Relea).filter_by(id = id).first() # Get release again
-
- if rel.status_id != done_status.get('id'):
- rel.status_id = snatched_status.get('id')
- db.commit()
-
- fireEvent('notify.frontend', type = 'release.download', data = True, message = 'Successfully snatched "%s"' % item['name'])
+ rel = db.query(Relea).filter_by(id = id).first() # Get release again @RuudBurger why do we need to get it again??
+ fireEvent('notify.frontend', type = 'release.manual_download', data = True, message = 'Successfully snatched "%s"' % item['name'])
return {
'success': success
}
@@ -217,6 +250,152 @@ class Release(Plugin):
'success': False
}
+ def download(self, data, media, manual = False):
+
+ if not data.get('protocol'):
+ data['protocol'] = data['type']
+ data['type'] = 'movie'
+
+ # Test to see if any downloaders are enabled for this type
+ downloader_enabled = fireEvent('download.enabled', manual, data, single = True)
+
+ if downloader_enabled:
+ snatched_status, done_status, active_status = fireEvent('status.get', ['snatched', 'done', 'active'], single = True)
+
+ # Download release to temp
+ filedata = None
+ if data.get('download') and (ismethod(data.get('download')) or isfunction(data.get('download'))):
+ filedata = data.get('download')(url = data.get('url'), nzb_id = data.get('id'))
+ if filedata == 'try_next':
+ return filedata
+
+ download_result = fireEvent('download', data = data, movie = media, manual = manual, filedata = filedata, single = True)
+ log.debug('Downloader result: %s', download_result)
+
+ if download_result:
+ try:
+ # Mark release as snatched
+ db = get_session()
+ rls = db.query(Relea).filter_by(identifier = md5(data['url'])).first()
+ if rls:
+ renamer_enabled = Env.setting('enabled', 'renamer')
+
+ # Save download-id info if returned
+ if isinstance(download_result, dict):
+ for key in download_result:
+ rls_info = ReleaseInfo(
+ identifier = 'download_%s' % key,
+ value = toUnicode(download_result.get(key))
+ )
+ rls.info.append(rls_info)
+ db.commit()
+
+ log_movie = '%s (%s) in %s' % (getTitle(media['library']), media['library']['year'], rls.quality.label)
+ snatch_message = 'Snatched "%s": %s' % (data.get('name'), log_movie)
+ log.info(snatch_message)
+ fireEvent('%s.snatched' % data['type'], message = snatch_message, data = rls.to_dict())
+
+ # If renamer isn't used, mark media done
+ if not renamer_enabled:
+ try:
+ if media['status_id'] == active_status.get('id'):
+ for profile_type in media['profile']['types']:
+ if profile_type['quality_id'] == rls.quality.id and profile_type['finish']:
+ log.info('Renamer disabled, marking media as finished: %s', log_movie)
+
+ # Mark release done
+ self.updateStatus(rls.id, status = done_status)
+
+ # Mark media done
+ mdia = db.query(Media).filter_by(id = media['id']).first()
+ mdia.status_id = done_status.get('id')
+ mdia.last_edit = int(time.time())
+ db.commit()
+ except:
+ log.error('Failed marking media finished, renamer disabled: %s', traceback.format_exc())
+ else:
+ self.updateStatus(rls.id, status = snatched_status)
+
+ except:
+ log.error('Failed marking media finished: %s', traceback.format_exc())
+
+ return True
+
+ log.info('Tried to download, but none of the "%s" downloaders are enabled or gave an error', (data.get('protocol')))
+
+ return False
+
+ def tryDownloadResult(self, results, media, quality_type, manual = False):
+ ignored_status, failed_status = fireEvent('status.get', ['ignored', 'failed'], single = True)
+
+ for rel in results:
+ if not quality_type.get('finish', False) and quality_type.get('wait_for', 0) > 0 and rel.get('age') <= quality_type.get('wait_for', 0):
+ log.info('Ignored, waiting %s days: %s', (quality_type.get('wait_for'), rel['name']))
+ continue
+
+ if rel['status_id'] in [ignored_status.get('id'), failed_status.get('id')]:
+ log.info('Ignored: %s', rel['name'])
+ continue
+
+ if rel['score'] <= 0:
+ log.info('Ignored, score to low: %s', rel['name'])
+ continue
+
+ downloaded = fireEvent('release.download', data = rel, media = media, manual = manual, single = True)
+ if downloaded is True:
+ return True
+ elif downloaded != 'try_next':
+ break
+
+ return False
+
+ def createFromSearch(self, search_results, media, quality_type):
+
+ available_status = fireEvent('status.get', ['available'], single = True)
+ db = get_session()
+
+ found_releases = []
+
+ for rel in search_results:
+
+ rel_identifier = md5(rel['url'])
+ found_releases.append(rel_identifier)
+
+ rls = db.query(Relea).filter_by(identifier = rel_identifier).first()
+ if not rls:
+ rls = Relea(
+ identifier = rel_identifier,
+ movie_id = media.get('id'),
+ #media_id = media.get('id'),
+ quality_id = quality_type.get('quality_id'),
+ status_id = available_status.get('id')
+ )
+ db.add(rls)
+ else:
+ [db.delete(old_info) for old_info in rls.info]
+ rls.last_edit = int(time.time())
+
+ db.commit()
+
+ for info in rel:
+ try:
+ if not isinstance(rel[info], (str, unicode, int, long, float)):
+ continue
+
+ rls_info = ReleaseInfo(
+ identifier = info,
+ value = toUnicode(rel[info])
+ )
+ rls.info.append(rls_info)
+ except InterfaceError:
+ log.debug('Couldn\'t add %s to ReleaseInfo: %s', (info, traceback.format_exc()))
+
+ db.commit()
+
+ rel['status_id'] = rls.status_id
+
+ return found_releases
+
def forMovie(self, id = None):
db = get_session()
@@ -241,3 +420,32 @@ class Release(Plugin):
'success': True
}
+ def updateStatus(self, id, status = None):
+ if not status: return False
+
+ db = get_session()
+
+ rel = db.query(Relea).filter_by(id = id).first()
+ if rel and status and rel.status_id != status.get('id'):
+
+ item = {}
+ for info in rel.info:
+ item[info.identifier] = info.value
+
+ if rel.files:
+ for file_item in rel.files:
+ if file_item.type.identifier == 'movie':
+ release_name = os.path.basename(file_item.path)
+ break
+ else:
+ release_name = item['name']
+ #update status in Db
+ log.debug('Marking release %s as %s', (release_name, status.get("label")))
+ rel.status_id = status.get('id')
+ rel.last_edit = int(time.time())
+ db.commit()
+
+ #Update all movie info as there is no release update function
+ fireEvent('notify.frontend', type = 'release.update_status.%s' % rel.id, data = status.get('id'))
+
+ return True
diff --git a/couchpotato/core/plugins/renamer/__init__.py b/couchpotato/core/plugins/renamer/__init__.py
index 921b3e1e..c8f6b37f 100755
--- a/couchpotato/core/plugins/renamer/__init__.py
+++ b/couchpotato/core/plugins/renamer/__init__.py
@@ -28,6 +28,7 @@ rename_options = {
'cd': 'CD number (cd1)',
'cd_nr': 'Just the cd nr. (1)',
'mpaa': 'MPAA Rating',
+ 'category': 'Category label',
},
}
diff --git a/couchpotato/core/plugins/renamer/main.py b/couchpotato/core/plugins/renamer/main.py
index ad7df1cf..bbe79a39 100755
--- a/couchpotato/core/plugins/renamer/main.py
+++ b/couchpotato/core/plugins/renamer/main.py
@@ -1,9 +1,9 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync
-from couchpotato.core.helpers.encoding import toUnicode, ss
+from couchpotato.core.helpers.encoding import toUnicode, ss, sp
from couchpotato.core.helpers.variable import getExt, mergeDicts, getTitle, \
- getImdb, link, symlink, tryInt
+ getImdb, link, symlink, tryInt, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Library, File, Profile, Release, \
@@ -31,8 +31,9 @@ class Renamer(Plugin):
'params': {
'async': {'desc': 'Optional: Set to 1 if you dont want to fire the renamer.scan asynchronous.'},
'movie_folder': {'desc': 'Optional: The folder of the movie to scan. Keep empty for default renamer folder.'},
- 'downloader' : {'desc': 'Optional: The downloader this movie has been downloaded with'},
- 'download_id': {'desc': 'Optional: The downloader\'s nzb/torrent ID'},
+ 'downloader' : {'desc': 'Optional: The downloader the release has been downloaded with. \'download_id\' is required with this option.'},
+ 'download_id': {'desc': 'Optional: The nzb/torrent ID of the release in movie_folder. \'downloader\' is required with this option.'},
+ 'status': {'desc': 'Optional: The status of the release: \'completed\' (default) or \'seeding\''},
},
})
@@ -62,23 +63,24 @@ class Renamer(Plugin):
def scanView(self, **kwargs):
async = tryInt(kwargs.get('async', 0))
- movie_folder = kwargs.get('movie_folder')
+ movie_folder = sp(kwargs.get('movie_folder'))
downloader = kwargs.get('downloader')
download_id = kwargs.get('download_id')
+ status = kwargs.get('status', 'completed')
- download_info = {'folder': movie_folder} if movie_folder else None
- if download_info:
- download_info.update({'id': download_id, 'downloader': downloader} if download_id else {})
+ release_download = {'folder': movie_folder} if movie_folder else None
+ if release_download:
+ release_download.update({'id': download_id, 'downloader': downloader, 'status': status} if download_id else {})
fire_handle = fireEvent if not async else fireEventAsync
- fire_handle('renamer.scan', download_info)
+ fire_handle('renamer.scan', release_download)
return {
'success': True
}
- def scan(self, download_info = None):
+ def scan(self, release_download = None):
if self.isDisabled():
return
@@ -87,22 +89,66 @@ class Renamer(Plugin):
log.info('Renamer is already running, if you see this often, check the logs above for errors.')
return
- movie_folder = download_info and download_info.get('folder')
+ from_folder = sp(self.conf('from'))
+ to_folder = sp(self.conf('to'))
- # Check to see if the "to" folder is inside the "from" folder.
- if movie_folder and not os.path.isdir(movie_folder) or not os.path.isdir(self.conf('from')) or not os.path.isdir(self.conf('to')):
- l = log.debug if movie_folder else log.error
- l('Both the "To" and "From" have to exist.')
- return
- elif self.conf('from') in self.conf('to'):
- log.error('The "to" can\'t be inside of the "from" folder. You\'ll get an infinite loop.')
- return
- elif movie_folder and movie_folder in [self.conf('to'), self.conf('from')]:
- log.error('The "to" and "from" folders can\'t be inside of or the same as the provided movie folder.')
+ # Get movie folder to process
+ movie_folder = release_download and release_download.get('folder')
+
+ # Get all folders that should not be processed
+ no_process = [to_folder]
+ cat_list = fireEvent('category.all', single = True) or []
+ no_process.extend([item['destination'] for item in cat_list])
+ try:
+ if Env.setting('library', section = 'manage').strip():
+ no_process.extend(splitString(Env.setting('library', section = 'manage'), '::'))
+ except:
+ pass
+
+ # Check to see if the no_process folders are inside the "from" folder.
+ if not os.path.isdir(from_folder) or not os.path.isdir(to_folder):
+ log.error('Both the "To" and "From" have to exist.')
return
+ else:
+ for item in no_process:
+ if from_folder in item:
+ log.error('To protect your data, the movie libraries can\'t be inside of or the same as the "from" folder.')
+ return
+
+ # Check to see if the no_process folders are inside the provided movie_folder
+ if movie_folder and not os.path.isdir(movie_folder):
+ log.debug('The provided movie folder %s does not exist. Trying to find it in the \'from\' folder.', movie_folder)
+
+ # Update to the from folder
+ if len(release_download.get('files')) == 1:
+ new_movie_folder = from_folder
+ else:
+ new_movie_folder = sp(os.path.join(from_folder, os.path.basename(movie_folder)))
+
+ if not os.path.isdir(new_movie_folder):
+ log.error('The provided movie folder %s does not exist and could also not be found in the \'from\' folder.', movie_folder)
+ return
+
+ # Update the files
+ new_files = [os.path.join(new_movie_folder, os.path.relpath(filename, movie_folder)) for filename in splitString(release_download.get('files'), '|')]
+ if new_files and not os.path.isfile(new_files[0]):
+ log.error('The provided movie folder %s does not exist and its files could also not be found in the \'from\' folder.', movie_folder)
+ return
+
+ # Update release_download info to the from folder
+ log.debug('Release %s found in the \'from\' folder.', movie_folder)
+ release_download['folder'] = new_movie_folder
+ release_download['files'] = '|'.join(new_files)
+ movie_folder = new_movie_folder
+
+ if movie_folder:
+ for item in no_process:
+ if movie_folder in item:
+ log.error('To protect your data, the movie libraries can\'t be inside of or the same as the provided movie folder.')
+ return
# Make sure a checkSnatched marked all downloads/seeds as such
- if not download_info and self.conf('run_every') > 0:
+ if not release_download and self.conf('run_every') > 0:
fireEvent('renamer.check_snatched')
self.renaming_started = True
@@ -112,29 +158,35 @@ class Renamer(Plugin):
files = []
if movie_folder:
log.info('Scanning movie folder %s...', movie_folder)
- movie_folder = movie_folder.rstrip(os.path.sep)
folder = os.path.dirname(movie_folder)
- # Get all files from the specified folder
- try:
- for root, folders, names in os.walk(movie_folder):
- files.extend([os.path.join(root, name) for name in names])
- except:
- log.error('Failed getting files from %s: %s', (movie_folder, traceback.format_exc()))
+ if release_download.get('files', ''):
+ files = splitString(release_download['files'], '|')
+
+ # If there is only one file in the torrent, the downloader did not create a subfolder
+ if len(files) == 1:
+ folder = movie_folder
+ else:
+ # Get all files from the specified folder
+ try:
+ for root, folders, names in os.walk(movie_folder):
+ files.extend([os.path.join(root, name) for name in names])
+ except:
+ log.error('Failed getting files from %s: %s', (movie_folder, traceback.format_exc()))
db = get_session()
# Extend the download info with info stored in the downloaded release
- download_info = self.extendDownloadInfo(download_info)
+ release_download = self.extendReleaseDownload(release_download)
# Unpack any archives
extr_files = None
if self.conf('unrar'):
folder, movie_folder, files, extr_files = self.extractFiles(folder = folder, movie_folder = movie_folder, files = files,
- cleanup = self.conf('cleanup') and not self.downloadIsTorrent(download_info))
+ cleanup = self.conf('cleanup') and not self.downloadIsTorrent(release_download))
- groups = fireEvent('scanner.scan', folder = folder if folder else self.conf('from'),
- files = files, download_info = download_info, return_ignored = False, single = True)
+ groups = fireEvent('scanner.scan', folder = folder if folder else from_folder,
+ files = files, release_download = release_download, return_ignored = False, single = True) or []
folder_name = self.conf('folder_name')
file_name = self.conf('file_name')
@@ -142,9 +194,9 @@ class Renamer(Plugin):
nfo_name = self.conf('nfo_name')
separator = self.conf('separator')
- # Statusses
- done_status, active_status, downloaded_status, snatched_status = \
- fireEvent('status.get', ['done', 'active', 'downloaded', 'snatched'], single = True)
+ # Statuses
+ done_status, active_status, downloaded_status, snatched_status, seeding_status = \
+ fireEvent('status.get', ['done', 'active', 'downloaded', 'snatched', 'seeding'], single = True)
for group_identifier in groups:
@@ -157,7 +209,7 @@ class Renamer(Plugin):
# Add _UNKNOWN_ if no library item is connected
if not group['library'] or not movie_title:
- self.tagDir(group, 'unknown')
+ self.tagRelease(group = group, tag = 'unknown')
continue
# Rename the files using the library data
else:
@@ -172,8 +224,13 @@ class Renamer(Plugin):
movie_title = getTitle(library)
# Overwrite destination when set in category
- destination = self.conf('to')
+ destination = to_folder
+ category_label = ''
for movie in library_ent.movies:
+
+ if movie.category and movie.category.label:
+ category_label = movie.category.label
+
if movie.category and movie.category.destination and len(movie.category.destination) > 0 and movie.category.destination != 'None':
destination = movie.category.destination
log.debug('Setting category destination for "%s": %s' % (movie_title, destination))
@@ -190,7 +247,7 @@ class Renamer(Plugin):
if extr_files:
group['before_rename'].extend(extr_files)
- # Remove weird chars from moviename
+ # Remove weird chars from movie name
movie_name = re.sub(r"[\x00\/\\:\*\?\"<>\|]", '', movie_title)
# Put 'The' at the end
@@ -217,6 +274,7 @@ class Renamer(Plugin):
'cd': '',
'cd_nr': '',
'mpaa': library['info'].get('mpaa', ''),
+ 'category': category_label,
}
for file_type in group['files']:
@@ -225,7 +283,7 @@ class Renamer(Plugin):
if file_type is 'nfo' and not self.conf('rename_nfo'):
log.debug('Skipping, renaming of %s disabled', file_type)
for current_file in group['files'][file_type]:
- if self.conf('cleanup') and (not self.downloadIsTorrent(download_info) or self.fileIsAdded(current_file, group)):
+ if self.conf('cleanup') and (not self.downloadIsTorrent(release_download) or self.fileIsAdded(current_file, group)):
remove_files.append(current_file)
continue
@@ -385,7 +443,7 @@ class Renamer(Plugin):
log.info('Better quality release already exists for %s, with quality %s', (movie.library.titles[0].title, release.quality.label))
# Add exists tag to the .ignore file
- self.tagDir(group, 'exists')
+ self.tagRelease(group = group, tag = 'exists')
# Notify on rename fail
download_message = 'Renaming of %s (%s) cancelled, exists in %s already.' % (movie.library.titles[0].title, group['meta_data']['quality']['label'], release.quality.label)
@@ -393,16 +451,20 @@ class Renamer(Plugin):
remove_leftovers = False
break
- elif release.status_id is snatched_status.get('id'):
- if release.quality.id is group['meta_data']['quality']['id']:
- log.debug('Marking release as downloaded')
- try:
- release.status_id = downloaded_status.get('id')
- release.last_edit = int(time.time())
- except Exception, e:
- log.error('Failed marking release as finished: %s %s', (e, traceback.format_exc()))
- db.commit()
+ elif release.status_id in [snatched_status.get('id'), seeding_status.get('id')]:
+ if release_download and release_download.get('rls_id'):
+ if release_download['rls_id'] == release.id:
+ if release_download['status'] == 'completed':
+ # Set the release to downloaded
+ fireEvent('release.update_status', release.id, status = downloaded_status, single = True)
+ elif release_download['status'] == 'seeding':
+ # Set the release to seeding
+ fireEvent('release.update_status', release.id, status = seeding_status, single = True)
+
+ elif release.quality.id is group['meta_data']['quality']['id']:
+ # Set the release to downloaded
+ fireEvent('release.update_status', release.id, status = downloaded_status, single = True)
# Remove leftover files
if not remove_leftovers: # Don't remove anything
@@ -411,7 +473,7 @@ class Renamer(Plugin):
log.debug('Removing leftover files')
for current_file in group['files']['leftover']:
if self.conf('cleanup') and not self.conf('move_leftover') and \
- (not self.downloadIsTorrent(download_info) or self.fileIsAdded(current_file, group)):
+ (not self.downloadIsTorrent(release_download) or self.fileIsAdded(current_file, group)):
remove_files.append(current_file)
# Remove files
@@ -427,17 +489,17 @@ class Renamer(Plugin):
log.info('Removing "%s"', src)
try:
- src = ss(src)
+ src = sp(src)
if os.path.isfile(src):
os.remove(src)
- parent_dir = os.path.normpath(os.path.dirname(src))
- if delete_folders.count(parent_dir) == 0 and os.path.isdir(parent_dir) and not parent_dir in [destination, movie_folder] and not self.conf('from') in parent_dir:
+ parent_dir = os.path.dirname(src)
+ if delete_folders.count(parent_dir) == 0 and os.path.isdir(parent_dir) and not parent_dir in [destination, movie_folder] and not from_folder in parent_dir:
delete_folders.append(parent_dir)
except:
log.error('Failed removing %s: %s', (src, traceback.format_exc()))
- self.tagDir(group, 'failed_remove')
+ self.tagRelease(group = group, tag = 'failed_remove')
# Delete leftover folder from older releases
for delete_folder in delete_folders:
@@ -457,15 +519,15 @@ class Renamer(Plugin):
self.makeDir(os.path.dirname(dst))
try:
- self.moveFile(src, dst, forcemove = not self.downloadIsTorrent(download_info) or self.fileIsAdded(src, group))
+ self.moveFile(src, dst, forcemove = not self.downloadIsTorrent(release_download) or self.fileIsAdded(src, group))
group['renamed_files'].append(dst)
except:
log.error('Failed moving the file "%s" : %s', (os.path.basename(src), traceback.format_exc()))
- self.tagDir(group, 'failed_rename')
+ self.tagRelease(group = group, tag = 'failed_rename')
# Tag folder if it is in the 'from' folder and it will not be removed because it is a torrent
- if self.movieInFromFolder(movie_folder) and self.downloadIsTorrent(download_info):
- self.tagDir(group, 'renamed_already')
+ if self.movieInFromFolder(movie_folder) and self.downloadIsTorrent(release_download):
+ self.tagRelease(group = group, tag = 'renamed_already')
# Remove matching releases
for release in remove_releases:
@@ -475,12 +537,19 @@ class Renamer(Plugin):
except:
log.error('Failed removing %s: %s', (release.identifier, traceback.format_exc()))
- if group['dirname'] and group['parentdir'] and not self.downloadIsTorrent(download_info):
+ if group['dirname'] and group['parentdir'] and not self.downloadIsTorrent(release_download):
+ if movie_folder:
+ # Delete the movie folder
+ group_folder = movie_folder
+ else:
+ # Delete the first empty subfolder in the tree relative to the 'from' folder
+ group_folder = sp(os.path.join(from_folder, os.path.relpath(group['parentdir'], from_folder)).split(os.path.sep)[0])
+
try:
- log.info('Deleting folder: %s', group['parentdir'])
- self.deleteEmptyFolder(group['parentdir'])
+ log.info('Deleting folder: %s', group_folder)
+ self.deleteEmptyFolder(group_folder)
except:
- log.error('Failed removing %s: %s', (group['parentdir'], traceback.format_exc()))
+ log.error('Failed removing %s: %s', (group_folder, traceback.format_exc()))
# Notify on download, search for trailers etc
download_message = 'Downloaded %s (%s)' % (movie_title, replacements['quality'])
@@ -515,18 +584,9 @@ class Renamer(Plugin):
return rename_files
# This adds a file to ignore / tag a release so it is ignored later
- def tagDir(self, group, tag):
-
- ignore_file = None
- if isinstance(group, dict):
- for movie_file in sorted(list(group['files']['movie'])):
- ignore_file = '%s.%s.ignore' % (os.path.splitext(movie_file)[0], tag)
- break
- else:
- if not os.path.isdir(group) or not tag:
- return
- ignore_file = os.path.join(group, '%s.ignore' % tag)
-
+ def tagRelease(self, tag, group = None, release_download = None):
+ if not tag:
+ return
text = """This file is from CouchPotato
It has marked this release as "%s"
@@ -534,25 +594,88 @@ This file hides the release from the renamer
Remove it if you want it to be renamed (again, or at least let it try again)
""" % tag
- if ignore_file:
- self.createFile(ignore_file, text)
+ tag_files = []
- def untagDir(self, folder, tag = ''):
- if not os.path.isdir(folder):
+ # Tag movie files if they are known
+ if isinstance(group, dict):
+ tag_files = [sorted(list(group['files']['movie']))[0]]
+
+ elif isinstance(release_download, dict):
+ # Tag download_files if they are known
+ if release_download['files']:
+ tag_files = release_download['files'].split('|')
+
+ # Tag all files in release folder
+ else:
+ for root, folders, names in os.walk(release_download['folder']):
+ tag_files.extend([os.path.join(root, name) for name in names])
+
+ for filename in tag_files:
+ tag_filename = '%s.%s.ignore' % (os.path.splitext(filename)[0], tag)
+ if not os.path.isfile(tag_filename):
+ self.createFile(tag_filename, text)
+
+ def untagRelease(self, release_download, tag = ''):
+ if not release_download:
return
- # Remove any .ignore files
- for root, dirnames, filenames in os.walk(folder):
- for filename in fnmatch.filter(filenames, '*%s.ignore' % tag):
- os.remove((os.path.join(root, filename)))
+ tag_files = []
- def hastagDir(self, folder, tag = ''):
+ folder = release_download['folder']
if not os.path.isdir(folder):
return False
- # Find any .ignore files
+ # Untag download_files if they are known
+ if release_download['files']:
+ tag_files = release_download['files'].split('|')
+
+ # Untag all files in release folder
+ else:
+ for root, folders, names in os.walk(release_download['folder']):
+ tag_files.extend([os.path.join(root, name) for name in names if not os.path.splitext(name)[1] == '.ignore'])
+
+ # Find all .ignore files in folder
+ ignore_files = []
for root, dirnames, filenames in os.walk(folder):
- if fnmatch.filter(filenames, '*%s.ignore' % tag):
+ ignore_files.extend(fnmatch.filter([os.path.join(root, filename) for filename in filenames], '*%s.ignore' % tag))
+
+ # Match all found ignore files with the tag_files and delete if found
+ for tag_file in tag_files:
+ ignore_file = fnmatch.filter(ignore_files, '%s.%s.ignore' % (re.escape(os.path.splitext(tag_file)[0]), tag if tag else '*'))
+ for filename in ignore_file:
+ try:
+ os.remove(filename)
+ except:
+ log.debug('Unable to remove ignore file: %s. Error: %s.' % (filename, traceback.format_exc()))
+
+ def hastagRelease(self, release_download, tag = ''):
+ if not release_download:
+ return False
+
+ folder = release_download['folder']
+ if not os.path.isdir(folder):
+ return False
+
+ tag_files = []
+ ignore_files = []
+
+ # Find tag on download_files if they are known
+ if release_download['files']:
+ tag_files = release_download['files'].split('|')
+
+ # Find tag on all files in release folder
+ else:
+ for root, folders, names in os.walk(release_download['folder']):
+ tag_files.extend([os.path.join(root, name) for name in names if not os.path.splitext(name)[1] == '.ignore'])
+
+ # Find all .ignore files in folder
+ for root, dirnames, filenames in os.walk(folder):
+ ignore_files.extend(fnmatch.filter([os.path.join(root, filename) for filename in filenames], '*%s.ignore' % tag))
+
+ # Match all found ignore files with the tag_files and return True found
+ for tag_file in tag_files:
+ ignore_file = fnmatch.filter(ignore_files, '%s.%s.ignore' % (os.path.splitext(tag_file)[0], tag if tag else '*'))
+ if ignore_file:
return True
return False
@@ -571,7 +694,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
link(old, dest)
except:
# Try to simlink next
- log.debug('Couldn\'t hardlink file "%s" to "%s". Simlinking instead. Error: %s. ', (old, dest, traceback.format_exc()))
+ log.debug('Couldn\'t hardlink file "%s" to "%s". Simlinking instead. Error: %s.', (old, dest, traceback.format_exc()))
shutil.copy(old, dest)
try:
symlink(dest, old + '.link')
@@ -615,19 +738,35 @@ Remove it if you want it to be renamed (again, or at least let it try again)
replaced = toUnicode(string)
for x, r in replacements.iteritems():
+ if x in ['thename', 'namethe']:
+ continue
if r is not None:
replaced = replaced.replace(u'<%s>' % toUnicode(x), toUnicode(r))
else:
#If information is not available, we don't want the tag in the filename
replaced = replaced.replace('<' + x + '>', '')
+ replaced = self.replaceDoubles(replaced.lstrip('. '))
+ for x, r in replacements.iteritems():
+ if x in ['thename', 'namethe']:
+ replaced = replaced.replace(u'<%s>' % toUnicode(x), toUnicode(r))
replaced = re.sub(r"[\x00:\*\?\"<>\|]", '', replaced)
sep = self.conf('foldersep') if folder else self.conf('separator')
- return self.replaceDoubles(replaced.lstrip('. ')).replace(' ', ' ' if not sep else sep)
+ return replaced.replace(' ', ' ' if not sep else sep)
def replaceDoubles(self, string):
- return string.replace(' ', ' ').replace(' .', '.')
+
+ replaces = [
+ ('\.+', '.'), ('_+', '_'), ('-+', '-'), ('\s+', ' '),
+ ('(\s\.)+', '.'), ('(-\.)+', '.'), ('(\s-)+', '-'),
+ ]
+
+ for r in replaces:
+ reg, replace_with = r
+ string = re.sub(reg, replace_with, string)
+
+ return string
def deleteEmptyFolder(self, folder, show_error = True):
folder = ss(folder)
@@ -656,117 +795,115 @@ Remove it if you want it to be renamed (again, or at least let it try again)
self.checking_snatched = True
- snatched_status, ignored_status, failed_status, done_status, seeding_status, downloaded_status = \
- fireEvent('status.get', ['snatched', 'ignored', 'failed', 'done', 'seeding', 'downloaded'], single = True)
+ snatched_status, ignored_status, failed_status, seeding_status, downloaded_status, missing_status = \
+ fireEvent('status.get', ['snatched', 'ignored', 'failed', 'seeding', 'downloaded', 'missing'], single = True)
db = get_session()
- rels = db.query(Release).filter_by(status_id = snatched_status.get('id')).all()
- rels.extend(db.query(Release).filter_by(status_id = seeding_status.get('id')).all())
+ rels = db.query(Release).filter(
+ Release.status_id.in_([snatched_status.get('id'), seeding_status.get('id'), missing_status.get('id')])
+ ).all()
- scan_items = []
+ scan_releases = []
scan_required = False
if rels:
log.debug('Checking status snatched releases...')
- statuses = fireEvent('download.status', merge = True)
- if not statuses:
+ release_downloads = fireEvent('download.status', merge = True)
+ if not release_downloads:
log.debug('Download status functionality is not implemented for active downloaders.')
scan_required = True
else:
try:
for rel in rels:
rel_dict = rel.to_dict({'info': {}})
-
movie_dict = fireEvent('movie.get', rel.movie_id, single = True)
+ if not isinstance(rel_dict['info'], (dict)):
+ log.error('Faulty release found without any info, ignoring.')
+ fireEvent('release.update_status', rel.id, status = ignored_status, single = True)
+ continue
+
# check status
nzbname = self.createNzbName(rel_dict['info'], movie_dict)
found = False
- for item in statuses:
+ for release_download in release_downloads:
found_release = False
if rel_dict['info'].get('download_id'):
- if item['id'] == rel_dict['info']['download_id'] and item['downloader'] == rel_dict['info']['download_downloader']:
- log.debug('Found release by id: %s', item['id'])
+ if release_download['id'] == rel_dict['info']['download_id'] and release_download['downloader'] == rel_dict['info']['download_downloader']:
+ log.debug('Found release by id: %s', release_download['id'])
found_release = True
else:
- if item['name'] == nzbname or rel_dict['info']['name'] in item['name'] or getImdb(item['name']) == movie_dict['library']['identifier']:
+ if release_download['name'] == nzbname or rel_dict['info']['name'] in release_download['name'] or getImdb(release_download['name']) == movie_dict['library']['identifier']:
found_release = True
if found_release:
- timeleft = 'N/A' if item['timeleft'] == -1 else item['timeleft']
- log.debug('Found %s: %s, time to go: %s', (item['name'], item['status'].upper(), timeleft))
+ timeleft = 'N/A' if release_download['timeleft'] == -1 else release_download['timeleft']
+ log.debug('Found %s: %s, time to go: %s', (release_download['name'], release_download['status'].upper(), timeleft))
+
+ if release_download['status'] == 'busy':
+ # Set the release to snatched if it was missing before
+ fireEvent('release.update_status', rel.id, status = snatched_status, single = True)
- if item['status'] == 'busy':
# Tag folder if it is in the 'from' folder and it will not be processed because it is still downloading
- if item['folder'] and self.conf('from') in item['folder']:
- self.tagDir(item['folder'], 'downloading')
-
- elif item['status'] == 'seeding':
+ if self.movieInFromFolder(release_download['folder']):
+ self.tagRelease(release_download = release_download, tag = 'downloading')
+ elif release_download['status'] == 'seeding':
#If linking setting is enabled, process release
- if self.conf('file_action') != 'move' and not rel.movie.status_id == done_status.get('id') and self.statusInfoComplete(item):
- log.info('Download of %s completed! It is now being processed while leaving the original files alone for seeding. Current ratio: %s.', (item['name'], item['seed_ratio']))
+ if self.conf('file_action') != 'move' and not rel.status_id == seeding_status.get('id') and self.statusInfoComplete(release_download):
+ log.info('Download of %s completed! It is now being processed while leaving the original files alone for seeding. Current ratio: %s.', (release_download['name'], release_download['seed_ratio']))
# Remove the downloading tag
- self.untagDir(item['folder'], 'downloading')
-
- rel.status_id = seeding_status.get('id')
- rel.last_edit = int(time.time())
- db.commit()
+ self.untagRelease(release_download = release_download, tag = 'downloading')
# Scan and set the torrent to paused if required
- item.update({'pause': True, 'scan': True, 'process_complete': False})
- scan_items.append(item)
+ release_download.update({'pause': True, 'scan': True, 'process_complete': False})
+ scan_releases.append(release_download)
else:
- if rel.status_id != seeding_status.get('id'):
- rel.status_id = seeding_status.get('id')
- rel.last_edit = int(time.time())
- db.commit()
-
#let it seed
- log.debug('%s is seeding with ratio: %s', (item['name'], item['seed_ratio']))
- elif item['status'] == 'failed':
- fireEvent('download.remove_failed', item, single = True)
- rel.status_id = failed_status.get('id')
- rel.last_edit = int(time.time())
- db.commit()
+ log.debug('%s is seeding with ratio: %s', (release_download['name'], release_download['seed_ratio']))
+
+ # Set the release to seeding
+ fireEvent('release.update_status', rel.id, status = seeding_status, single = True)
+
+ elif release_download['status'] == 'failed':
+ # Set the release to failed
+ fireEvent('release.update_status', rel.id, status = failed_status, single = True)
+
+ fireEvent('download.remove_failed', release_download, single = True)
if self.conf('next_on_failed'):
fireEvent('movie.searcher.try_next_release', movie_id = rel.movie_id)
- elif item['status'] == 'completed':
- log.info('Download of %s completed!', item['name'])
- if self.statusInfoComplete(item):
+ elif release_download['status'] == 'completed':
+ log.info('Download of %s completed!', release_download['name'])
+ if self.statusInfoComplete(release_download):
# If the release has been seeding, process now the seeding is done
if rel.status_id == seeding_status.get('id'):
- if rel.movie.status_id == done_status.get('id'):
+ if self.conf('file_action') != 'move':
# Set the release to done as the movie has already been renamed
- rel.status_id = downloaded_status.get('id')
- rel.last_edit = int(time.time())
- db.commit()
+ fireEvent('release.update_status', rel.id, status = downloaded_status, single = True)
# Allow the downloader to clean-up
- item.update({'pause': False, 'scan': False, 'process_complete': True})
- scan_items.append(item)
+ release_download.update({'pause': False, 'scan': False, 'process_complete': True})
+ scan_releases.append(release_download)
else:
- # Set the release to snatched so that the renamer can process the release as if it was never seeding
- rel.status_id = snatched_status.get('id')
- rel.last_edit = int(time.time())
- db.commit()
-
# Scan and Allow the downloader to clean-up
- item.update({'pause': False, 'scan': True, 'process_complete': True})
- scan_items.append(item)
+ release_download.update({'pause': False, 'scan': True, 'process_complete': True})
+ scan_releases.append(release_download)
else:
+ # Set the release to snatched if it was missing before
+ fireEvent('release.update_status', rel.id, status = snatched_status, single = True)
+
# Remove the downloading tag
- self.untagDir(item['folder'], 'downloading')
+ self.untagRelease(release_download = release_download, tag = 'downloading')
# Scan and Allow the downloader to clean-up
- item.update({'pause': False, 'scan': True, 'process_complete': True})
- scan_items.append(item)
+ release_download.update({'pause': False, 'scan': True, 'process_complete': True})
+ scan_releases.append(release_download)
else:
scan_required = True
@@ -776,25 +913,33 @@ Remove it if you want it to be renamed (again, or at least let it try again)
if not found:
log.info('%s not found in downloaders', nzbname)
+ #Check status if already missing and for how long, if > 1 week, set to ignored else to missing
+ if rel.status_id == missing_status.get('id'):
+ if rel.last_edit < int(time.time()) - 7 * 24 * 60 * 60:
+ fireEvent('release.update_status', rel.id, status = ignored_status, single = True)
+ else:
+ # Set the release to missing
+ fireEvent('release.update_status', rel.id, status = missing_status, single = True)
+
except:
log.error('Failed checking for release in downloader: %s', traceback.format_exc())
# The following can either be done here, or inside the scanner if we pass it scan_items in one go
- for item in scan_items:
+ for release_download in scan_releases:
# Ask the renamer to scan the item
- if item['scan']:
- if item['pause'] and self.conf('file_action') == 'link':
- fireEvent('download.pause', item = item, pause = True, single = True)
- fireEvent('renamer.scan', download_info = item)
- if item['pause'] and self.conf('file_action') == 'link':
- fireEvent('download.pause', item = item, pause = False, single = True)
- if item['process_complete']:
+ if release_download['scan']:
+ if release_download['pause'] and self.conf('file_action') == 'link':
+ fireEvent('download.pause', release_download = release_download, pause = True, single = True)
+ fireEvent('renamer.scan', release_download = release_download)
+ if release_download['pause'] and self.conf('file_action') == 'link':
+ fireEvent('download.pause', release_download = release_download, pause = False, single = True)
+ if release_download['process_complete']:
#First make sure the files were succesfully processed
- if not self.hastagDir(item['folder'], 'failed_rename'):
+ if not self.hastagRelease(release_download = release_download, tag = 'failed_rename'):
# Remove the seeding tag if it exists
- self.untagDir(item['folder'], 'renamed_already')
+ self.untagRelease(release_download = release_download, tag = 'renamed_already')
# Ask the downloader to process the item
- fireEvent('download.process_complete', item = item, single = True)
+ fireEvent('download.process_complete', release_download = release_download, single = True)
if scan_required:
fireEvent('renamer.scan')
@@ -803,16 +948,16 @@ Remove it if you want it to be renamed (again, or at least let it try again)
return True
- def extendDownloadInfo(self, download_info):
+ def extendReleaseDownload(self, release_download):
rls = None
- if download_info and download_info.get('id') and download_info.get('downloader'):
+ if release_download and release_download.get('id') and release_download.get('downloader'):
db = get_session()
- rlsnfo_dwnlds = db.query(ReleaseInfo).filter_by(identifier = 'download_downloader', value = download_info.get('downloader')).all()
- rlsnfo_ids = db.query(ReleaseInfo).filter_by(identifier = 'download_id', value = download_info.get('id')).all()
+ rlsnfo_dwnlds = db.query(ReleaseInfo).filter_by(identifier = 'download_downloader', value = release_download.get('downloader')).all()
+ rlsnfo_ids = db.query(ReleaseInfo).filter_by(identifier = 'download_id', value = release_download.get('id')).all()
for rlsnfo_dwnld in rlsnfo_dwnlds:
for rlsnfo_id in rlsnfo_ids:
@@ -822,32 +967,33 @@ Remove it if you want it to be renamed (again, or at least let it try again)
if rls: break
if not rls:
- log.error('Download ID %s from downloader %s not found in releases', (download_info.get('id'), download_info.get('downloader')))
+ log.error('Download ID %s from downloader %s not found in releases', (release_download.get('id'), release_download.get('downloader')))
if rls:
rls_dict = rls.to_dict({'info':{}})
- download_info.update({
+ release_download.update({
'imdb_id': rls.movie.library.identifier,
'quality': rls.quality.identifier,
'protocol': rls_dict.get('info', {}).get('protocol') or rls_dict.get('info', {}).get('type'),
+ 'rls_id': rls.id,
})
- return download_info
+ return release_download
- def downloadIsTorrent(self, download_info):
- return download_info and download_info.get('protocol') in ['torrent', 'torrent_magnet']
+ def downloadIsTorrent(self, release_download):
+ return release_download and release_download.get('protocol') in ['torrent', 'torrent_magnet']
def fileIsAdded(self, src, group):
if not group or not group.get('before_rename'):
return False
return src in group['before_rename']
- def statusInfoComplete(self, item):
- return item['id'] and item['downloader'] and item['folder']
+ def statusInfoComplete(self, release_download):
+ return release_download['id'] and release_download['downloader'] and release_download['folder']
def movieInFromFolder(self, movie_folder):
- return movie_folder and self.conf('from') in movie_folder or not movie_folder
+ return movie_folder and sp(self.conf('from')) in movie_folder or not movie_folder
def extractFiles(self, folder = None, movie_folder = None, files = None, cleanup = False):
if not files: files = []
@@ -859,7 +1005,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
# Check input variables
if not folder:
- folder = self.conf('from')
+ folder = sp(self.conf('from'))
check_file_date = True
if movie_folder:
@@ -875,7 +1021,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
#Extract all found archives
for archive in archives:
# Check if it has already been processed by CPS
- if self.hastagDir(os.path.dirname(archive['file'])):
+ if self.hastagRelease(release_download = {'folder': os.path.dirname(archive['file']), 'files': archive['file']}):
continue
# Find all related archive files
@@ -913,7 +1059,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
log.info('Archive %s found. Extracting...', os.path.basename(archive['file']))
try:
rar_handle = RarFile(archive['file'])
- extr_path = os.path.join(self.conf('from'), os.path.relpath(os.path.dirname(archive['file']), folder))
+ extr_path = os.path.join(sp(self.conf('from')), os.path.relpath(os.path.dirname(archive['file']), folder))
self.makeDir(extr_path)
for packedinfo in rar_handle.infolist():
if not packedinfo.isdir and not os.path.isfile(os.path.join(extr_path, os.path.basename(packedinfo.filename))):
@@ -936,9 +1082,9 @@ Remove it if you want it to be renamed (again, or at least let it try again)
files.remove(filename)
# Move the rest of the files and folders if any files are extracted to the from folder (only if folder was provided)
- if extr_files and os.path.normpath(os.path.normcase(folder)) != os.path.normpath(os.path.normcase(self.conf('from'))):
+ if extr_files and folder != sp(self.conf('from')):
for leftoverfile in list(files):
- move_to = os.path.join(self.conf('from'), os.path.relpath(leftoverfile, folder))
+ move_to = os.path.join(sp(self.conf('from')), os.path.relpath(leftoverfile, folder))
try:
self.makeDir(os.path.dirname(move_to))
@@ -961,8 +1107,8 @@ Remove it if you want it to be renamed (again, or at least let it try again)
log.debug('Removing old movie folder %s...', movie_folder)
self.deleteEmptyFolder(movie_folder)
- movie_folder = os.path.join(self.conf('from'), os.path.relpath(movie_folder, folder))
- folder = self.conf('from')
+ movie_folder = os.path.join(sp(self.conf('from')), os.path.relpath(movie_folder, folder))
+ folder = sp(self.conf('from'))
if extr_files:
files.extend(extr_files)
diff --git a/couchpotato/core/plugins/scanner/main.py b/couchpotato/core/plugins/scanner/main.py
index 0662d008..e77e62ad 100644
--- a/couchpotato/core/plugins/scanner/main.py
+++ b/couchpotato/core/plugins/scanner/main.py
@@ -1,10 +1,11 @@
from couchpotato import get_session
from couchpotato.core.event import fireEvent, addEvent
-from couchpotato.core.helpers.encoding import toUnicode, simplifyString, ss
-from couchpotato.core.helpers.variable import getExt, getImdb, tryInt
+from couchpotato.core.helpers.encoding import toUnicode, simplifyString, ss, sp
+from couchpotato.core.helpers.variable import getExt, getImdb, tryInt, \
+ splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
-from couchpotato.core.settings.model import File, Movie
+from couchpotato.core.settings.model import File, Media
from enzyme.exceptions import NoParserError, ParseError
from guessit import guess_movie_info
from subliminal.videos import Video
@@ -20,11 +21,9 @@ log = CPLog(__name__)
class Scanner(Plugin):
- minimal_filesize = {
- 'media': 314572800, # 300MB
- 'trailer': 1048576, # 1MB
- }
- ignored_in_path = [os.path.sep + 'extracted' + os.path.sep, 'extracting', '_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_', '_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo'] #unpacking, smb-crap, hidden files
+ ignored_in_path = [os.path.sep + 'extracted' + os.path.sep, 'extracting', '_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_',
+ '_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo',
+ 'thumbs.db', 'ehthumbs.db', 'desktop.ini'] #unpacking, smb-crap, hidden files
ignore_names = ['extract', 'extracting', 'extracted', 'movie', 'movies', 'film', 'films', 'download', 'downloads', 'video_ts', 'audio_ts', 'bdmv', 'certificate']
extensions = {
'movie': ['mkv', 'wmv', 'avi', 'mpg', 'mpeg', 'mp4', 'm2ts', 'iso', 'img', 'mdf', 'ts', 'm4v'],
@@ -49,6 +48,12 @@ class Scanner(Plugin):
'leftover': ('leftover', 'leftover'),
}
+ file_sizes = { # in MB
+ 'movie': {'min': 300},
+ 'trailer': {'min': 2, 'max': 250},
+ 'backdrop': {'min': 0, 'max': 5},
+ }
+
codecs = {
'audio': ['dts', 'ac3', 'ac3d', 'mp3'],
'video': ['x264', 'h264', 'divx', 'xvid']
@@ -101,9 +106,9 @@ class Scanner(Plugin):
addEvent('scanner.name_year', self.getReleaseNameYear)
addEvent('scanner.partnumber', self.getPartNumber)
- def scan(self, folder = None, files = None, download_info = None, simple = False, newer_than = 0, return_ignored = True, on_found = None):
+ def scan(self, folder = None, files = None, release_download = None, simple = False, newer_than = 0, return_ignored = True, on_found = None):
- folder = ss(os.path.normpath(folder))
+ folder = sp(folder)
if not folder or not os.path.isdir(folder):
log.error('Folder doesn\'t exists: %s', folder)
@@ -119,7 +124,7 @@ class Scanner(Plugin):
try:
files = []
for root, dirs, walk_files in os.walk(folder):
- files.extend(os.path.join(root, filename) for filename in walk_files)
+ files.extend([os.path.join(root, filename) for filename in walk_files])
# Break if CP wants to shut down
if self.shuttingDown():
@@ -145,7 +150,7 @@ class Scanner(Plugin):
continue
is_dvd_file = self.isDVDFile(file_path)
- if os.path.getsize(file_path) > self.minimal_filesize['media'] or is_dvd_file: # Minimal 300MB files or is DVD file
+ if self.filesizeBetween(file_path, self.file_sizes['movie']) or is_dvd_file: # Minimal 300MB files or is DVD file
# Normal identifier
identifier = self.createStringIdentifier(file_path, folder, exclude_filename = is_dvd_file)
@@ -179,7 +184,6 @@ class Scanner(Plugin):
# files will be grouped first.
leftovers = set(sorted(leftovers, reverse = True))
-
# Group files minus extension
ignored_identifiers = []
for identifier, group in movie_files.iteritems():
@@ -188,7 +192,7 @@ class Scanner(Plugin):
log.debug('Grouping files: %s', identifier)
has_ignored = 0
- for file_path in group['unsorted_files']:
+ for file_path in list(group['unsorted_files']):
ext = getExt(file_path)
wo_ext = file_path[:-(len(ext) + 1)]
found_files = set([i for i in leftovers if wo_ext in i])
@@ -197,6 +201,11 @@ class Scanner(Plugin):
has_ignored += 1 if ext == 'ignore' else 0
+ if has_ignored == 0:
+ for file_path in list(group['unsorted_files']):
+ ext = getExt(file_path)
+ has_ignored += 1 if ext == 'ignore' else 0
+
if has_ignored > 0:
ignored_identifiers.append(identifier)
@@ -229,10 +238,6 @@ class Scanner(Plugin):
# Remove the found files from the leftover stack
leftovers = leftovers - set(found_files)
- exts = [getExt(ff) for ff in found_files]
- if 'ignore' in exts:
- ignored_identifiers.append(identifier)
-
# Break if CP wants to shut down
if self.shuttingDown():
break
@@ -259,14 +264,14 @@ class Scanner(Plugin):
# Remove the found files from the leftover stack
leftovers = leftovers - set([ff])
- ext = getExt(ff)
- if ext == 'ignore':
- ignored_identifiers.append(new_identifier)
-
# Break if CP wants to shut down
if self.shuttingDown():
break
+ # leftovers should be empty
+ if leftovers:
+ log.debug('Some files are still left over: %s', leftovers)
+
# Cleaning up used
for identifier in delete_identifiers:
if path_identifiers.get(identifier):
@@ -336,11 +341,11 @@ class Scanner(Plugin):
total_found = len(valid_files)
# Make sure only one movie was found if a download ID is provided
- if download_info and total_found == 0:
- log.info('Download ID provided (%s), but no groups found! Make sure the download contains valid media files (fully extracted).', download_info.get('imdb_id'))
- elif download_info and total_found > 1:
- log.info('Download ID provided (%s), but more than one group found (%s). Ignoring Download ID...', (download_info.get('imdb_id'), len(valid_files)))
- download_info = None
+ if release_download and total_found == 0:
+ log.info('Download ID provided (%s), but no groups found! Make sure the download contains valid media files (fully extracted).', release_download.get('imdb_id'))
+ elif release_download and total_found > 1:
+ log.info('Download ID provided (%s), but more than one group found (%s). Ignoring Download ID...', (release_download.get('imdb_id'), len(valid_files)))
+ release_download = None
# Determine file types
db = get_session()
@@ -376,7 +381,7 @@ class Scanner(Plugin):
continue
log.debug('Getting metadata for %s', identifier)
- group['meta_data'] = self.getMetaData(group, folder = folder, download_info = download_info)
+ group['meta_data'] = self.getMetaData(group, folder = folder, release_download = release_download)
# Subtitle meta
group['subtitle_language'] = self.getSubtitleLanguage(group) if not simple else {}
@@ -408,11 +413,11 @@ class Scanner(Plugin):
del group['unsorted_files']
# Determine movie
- group['library'] = self.determineMovie(group, download_info = download_info)
+ group['library'] = self.determineMovie(group, release_download = release_download)
if not group['library']:
log.error('Unable to determine movie: %s', group['identifiers'])
else:
- movie = db.query(Movie).filter_by(library_id = group['library']['id']).first()
+ movie = db.query(Media).filter_by(library_id = group['library']['id']).first()
group['movie_id'] = None if not movie else movie.id
processed_movies[identifier] = group
@@ -433,13 +438,13 @@ class Scanner(Plugin):
return processed_movies
- def getMetaData(self, group, folder = '', download_info = None):
+ def getMetaData(self, group, folder = '', release_download = None):
data = {}
files = list(group['files']['movie'])
for cur_file in files:
- if os.path.getsize(cur_file) < self.minimal_filesize['media']: continue # Ignore smaller files
+ if not self.filesizeBetween(cur_file, self.file_sizes['movie']): continue # Ignore smaller files
meta = self.getMeta(cur_file)
@@ -458,8 +463,8 @@ class Scanner(Plugin):
# Use the quality guess first, if that failes use the quality we wanted to download
data['quality'] = None
- if download_info and download_info.get('quality'):
- data['quality'] = fireEvent('quality.single', download_info.get('quality'), single = True)
+ if release_download and release_download.get('quality'):
+ data['quality'] = fireEvent('quality.single', release_download.get('quality'), single = True)
if not data['quality']:
data['quality'] = fireEvent('quality.guess', files = files, extra = data, single = True)
@@ -543,12 +548,12 @@ class Scanner(Plugin):
return detected_languages
- def determineMovie(self, group, download_info = None):
+ def determineMovie(self, group, release_download = None):
# Get imdb id from downloader
- imdb_id = download_info and download_info.get('imdb_id')
+ imdb_id = release_download and release_download.get('imdb_id')
if imdb_id:
- log.debug('Found movie via imdb id from it\'s download id: %s', download_info.get('imdb_id'))
+ log.debug('Found movie via imdb id from it\'s download id: %s', release_download.get('imdb_id'))
files = group['files']
@@ -649,7 +654,7 @@ class Scanner(Plugin):
def getMediaFiles(self, files):
def test(s):
- return self.filesizeBetween(s, 300, 100000) and getExt(s.lower()) in self.extensions['movie'] and not self.isSampleFile(s)
+ return self.filesizeBetween(s, self.file_sizes['movie']) and getExt(s.lower()) in self.extensions['movie'] and not self.isSampleFile(s)
return set(filter(test, files))
@@ -674,7 +679,7 @@ class Scanner(Plugin):
def getTrailers(self, files):
def test(s):
- return re.search('(^|[\W_])trailer\d*[\W_]', s.lower()) and self.filesizeBetween(s, 2, 250)
+ return re.search('(^|[\W_])trailer\d*[\W_]', s.lower()) and self.filesizeBetween(s, self.file_sizes['trailer'])
return set(filter(test, files))
@@ -685,7 +690,7 @@ class Scanner(Plugin):
files = set(filter(test, files))
images = {
- 'backdrop': set(filter(lambda s: re.search('(^|[\W_])fanart|backdrop\d*[\W_]', s.lower()) and self.filesizeBetween(s, 0, 5), files))
+ 'backdrop': set(filter(lambda s: re.search('(^|[\W_])fanart|backdrop\d*[\W_]', s.lower()) and self.filesizeBetween(s, self.file_sizes['backdrop']), files))
}
# Rest
@@ -713,16 +718,6 @@ class Scanner(Plugin):
log.debug('Ignored "%s" contains "%s".', (filename, i))
return False
- # Sample file
- if self.isSampleFile(filename):
- log.debug('Is sample file "%s".', filename)
- return False
-
- # Minimal size
- if self.filesizeBetween(filename, self.minimal_filesize['media']):
- log.debug('File to small: %s', filename)
- return False
-
# All is OK
return True
@@ -731,9 +726,9 @@ class Scanner(Plugin):
if is_sample: log.debug('Is sample file: %s', filename)
return is_sample
- def filesizeBetween(self, file, min = 0, max = 100000):
+ def filesizeBetween(self, file, file_size = []):
try:
- return (min * 1048576) < os.path.getsize(file) < (max * 1048576)
+ return (file_size.get('min', 0) * 1048576) < os.path.getsize(file) < (file_size.get('max', 100000) * 1048576)
except:
log.error('Couldn\'t get filesize of %s.', file)
@@ -741,9 +736,16 @@ class Scanner(Plugin):
def createStringIdentifier(self, file_path, folder = '', exclude_filename = False):
- identifier = file_path.replace(folder, '') # root folder
+ year = self.findYear(file_path)
+
+ identifier = file_path.replace(folder, '').lstrip(os.path.sep) # root folder
identifier = os.path.splitext(identifier)[0] # ext
+ try:
+ path_split = splitString(identifier, os.path.sep)
+ identifier = path_split[-2] if len(path_split) > 1 and len(path_split[-2]) > len(path_split[-1]) else path_split[-1] # Only get filename
+ except: pass
+
if exclude_filename:
identifier = identifier[:len(identifier) - len(os.path.split(identifier)[-1])]
@@ -757,7 +759,6 @@ class Scanner(Plugin):
identifier = re.sub(self.clean, '::', simplifyString(identifier)).strip(':')
# Year
- year = self.findYear(identifier)
if year and identifier[:4] != year:
identifier = '%s %s' % (identifier.split(year)[0].strip(), year)
else:
@@ -821,19 +822,21 @@ class Scanner(Plugin):
def findYear(self, text):
# Search year inside () or [] first
- matches = re.search('(\(|\[)(?P19[0-9]{2}|20[0-9]{2})(\]|\))', text)
+ matches = re.findall('(\(|\[)(?P19[0-9]{2}|20[0-9]{2})(\]|\))', text)
if matches:
- return matches.group('year')
+ return matches[-1][1]
# Search normal
- matches = re.search('(?P19[0-9]{2}|20[0-9]{2})', text)
+ matches = re.findall('(?P19[0-9]{2}|20[0-9]{2})', text)
if matches:
- return matches.group('year')
+ return matches[-1]
return ''
def getReleaseNameYear(self, release_name, file_name = None):
+ release_name = release_name.strip(' .-_')
+
# Use guessit first
guess = {}
if file_name:
@@ -851,7 +854,7 @@ class Scanner(Plugin):
cleaned = ' '.join(re.split('\W+', simplifyString(release_name)))
cleaned = re.sub(self.clean, ' ', cleaned)
- for year_str in [file_name, cleaned]:
+ for year_str in [file_name, release_name, cleaned]:
if not year_str: continue
year = self.findYear(year_str)
if year:
@@ -861,19 +864,21 @@ class Scanner(Plugin):
if year: # Split name on year
try:
- movie_name = cleaned.split(year).pop(0).strip()
- cp_guess = {
- 'name': movie_name,
- 'year': int(year),
- }
+ movie_name = cleaned.rsplit(year, 1).pop(0).strip()
+ if movie_name:
+ cp_guess = {
+ 'name': movie_name,
+ 'year': int(year),
+ }
except:
pass
- else: # Split name on multiple spaces
+
+ if not cp_guess: # Split name on multiple spaces
try:
movie_name = cleaned.split(' ').pop(0).strip()
cp_guess = {
'name': movie_name,
- 'year': int(year),
+ 'year': int(year) if movie_name[:4] != year else 0,
}
except:
pass
diff --git a/couchpotato/core/plugins/score/main.py b/couchpotato/core/plugins/score/main.py
index 5f9da1a1..54b6ca31 100644
--- a/couchpotato/core/plugins/score/main.py
+++ b/couchpotato/core/plugins/score/main.py
@@ -1,11 +1,11 @@
-from couchpotato.core.event import addEvent
+from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import getTitle, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.plugins.score.scores import nameScore, nameRatioScore, \
sizeScore, providerScore, duplicateScore, partialIgnoredScore, namePositionScore, \
- halfMultipartScore
+ halfMultipartScore, sceneScore
from couchpotato.environment import Env
log = CPLog(__name__)
@@ -62,4 +62,7 @@ class Score(Plugin):
if extra_score:
score += extra_score(nzb)
+ # Scene / Nuke scoring
+ score += sceneScore(nzb['name'])
+
return score
diff --git a/couchpotato/core/plugins/score/scores.py b/couchpotato/core/plugins/score/scores.py
index 6aa0b465..895f5fc0 100644
--- a/couchpotato/core/plugins/score/scores.py
+++ b/couchpotato/core/plugins/score/scores.py
@@ -1,8 +1,13 @@
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.encoding import simplifyString
from couchpotato.core.helpers.variable import tryInt
+from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
import re
+import traceback
+
+log = CPLog(__name__)
+
name_scores = [
# Tags
@@ -160,3 +165,38 @@ def halfMultipartScore(nzb_name):
return -30
return 0
+
+
+def sceneScore(nzb_name):
+
+ check_names = [nzb_name]
+
+ # Match names between "
+ try: check_names.append(re.search(r'([\'"])[^\1]*\1', nzb_name).group(0))
+ except: pass
+
+ # Match longest name between []
+ try: check_names.append(max(re.findall(r'[^[]*\[([^]]*)\]', nzb_name), key = len).strip())
+ except: pass
+
+ for name in check_names:
+
+ # Strip twice, remove possible file extensions
+ name = name.lower().strip(' "\'\.-_\[\]')
+ name = re.sub('\.([a-z0-9]{0,4})$', '', name)
+ name = name.strip(' "\'\.-_\[\]')
+
+ # Make sure year and groupname is in there
+ year = re.findall('(?P19[0-9]{2}|20[0-9]{2})', name)
+ group = re.findall('\-([a-z0-9]+)$', name)
+
+ if len(year) > 0 and len(group) > 0:
+ try:
+ validate = fireEvent('release.validate', name, single = True)
+ if validate and tryInt(validate.get('score')) != 0:
+ log.debug('Release "%s" scored %s, reason: %s', (nzb_name, validate['score'], validate['reasons']))
+ return tryInt(validate.get('score'))
+ except:
+ log.error('Failed scoring scene: %s', traceback.format_exc())
+
+ return 0
diff --git a/couchpotato/core/plugins/status/main.py b/couchpotato/core/plugins/status/main.py
index 7546c651..b3b37bdc 100644
--- a/couchpotato/core/plugins/status/main.py
+++ b/couchpotato/core/plugins/status/main.py
@@ -24,6 +24,7 @@ class StatusPlugin(Plugin):
'available': 'Available',
'suggest': 'Suggest',
'seeding': 'Seeding',
+ 'missing': 'Missing',
}
status_cached = {}
diff --git a/couchpotato/core/plugins/suggestion/static/suggest.css b/couchpotato/core/plugins/suggestion/static/suggest.css
deleted file mode 100644
index c321ca28..00000000
--- a/couchpotato/core/plugins/suggestion/static/suggest.css
+++ /dev/null
@@ -1,121 +0,0 @@
-.suggestions {
-}
-
- .suggestions > h2 {
- height: 40px;
- }
-
-.suggestions .movie_result {
- display: inline-block;
- width: 33.333%;
- height: 150px;
-}
-
- @media all and (max-width: 960px) {
- .suggestions .movie_result {
- width: 50%;
- }
- }
-
- @media all and (max-width: 600px) {
- .suggestions .movie_result {
- width: 100%;
- }
- }
-
- .suggestions .movie_result .data {
- left: 100px;
- background: #4e5969;
- border: none;
- }
-
- .suggestions .movie_result .data .info {
- top: 15px;
- left: 15px;
- right: 15px;
- bottom: 15px;
- overflow: hidden;
- }
-
- .suggestions .movie_result .data .info h2 {
- white-space: normal;
- max-height: 120px;
- font-size: 18px;
- line-height: 18px;
- }
-
- .suggestions .movie_result .data .info .rating,
- .suggestions .movie_result .data .info .genres,
- .suggestions .movie_result .data .info .year {
- position: static;
- display: block;
- padding: 0;
- opacity: .6;
- }
-
- .suggestions .movie_result .data .info .year {
- margin: 10px 0 0;
- }
-
- .suggestions .movie_result .data .info .rating {
- font-size: 20px;
- float: right;
- margin-top: -20px;
- }
- .suggestions .movie_result .data .info .rating:before {
- content: "\e031";
- font-family: 'Elusive-Icons';
- font-size: 14px;
- margin: 0 5px 0 0;
- vertical-align: bottom;
- }
-
- .suggestions .movie_result .data .info .genres {
- font-size: 11px;
- font-style: italic;
- text-align: right;
-
- }
-
- .suggestions .movie_result .data {
- cursor: default;
- }
-
- .suggestions .movie_result .options {
- left: 100px;
- }
- .suggestions .movie_result .options select[name=title] { width: 100%; }
- .suggestions .movie_result .options select[name=profile] { width: 100%; }
- .suggestions .movie_result .options select[name=category] { width: 100%; }
-
- .suggestions .movie_result .button {
- position: absolute;
- margin: 2px 0 0 0;
- right: 15px;
- bottom: 15px;
- }
-
-
- .suggestions .movie_result .thumbnail {
- width: 100px;
- }
-
- .suggestions .movie_result .actions {
- position: absolute;
- bottom: 10px;
- right: 10px;
- display: none;
- width: 140px;
- }
- .suggestions .movie_result:hover .actions {
- display: block;
- }
- .suggestions .movie_result .data.open .actions {
- display: none;
- }
-
- .suggestions .movie_result .actions a {
- margin-left: 10px;
- vertical-align: middle;
- }
-
diff --git a/couchpotato/core/plugins/userscript/static/userscript.css b/couchpotato/core/plugins/userscript/static/userscript.css
index d08953a6..d8161014 100644
--- a/couchpotato/core/plugins/userscript/static/userscript.css
+++ b/couchpotato/core/plugins/userscript/static/userscript.css
@@ -14,25 +14,25 @@
padding: 20px;
}
- .page.userscript .movie_result {
+ .page.userscript .media_result {
height: 140px;
}
- .page.userscript .movie_result .thumbnail {
+ .page.userscript .media_result .thumbnail {
width: 90px;
}
- .page.userscript .movie_result .options {
+ .page.userscript .media_result .options {
left: 90px;
padding: 54px 15px;
}
- .page.userscript .movie_result .year {
+ .page.userscript .media_result .year {
display: none;
}
- .page.userscript .movie_result .options select[name="title"] {
+ .page.userscript .media_result .options select[name="title"] {
width: 190px;
}
- .page.userscript .movie_result .options select[name="profile"] {
+ .page.userscript .media_result .options select[name="profile"] {
width: 70px;
}
diff --git a/couchpotato/core/plugins/userscript/static/userscript.js b/couchpotato/core/plugins/userscript/static/userscript.js
index 2aeb7b5f..11daa068 100644
--- a/couchpotato/core/plugins/userscript/static/userscript.js
+++ b/couchpotato/core/plugins/userscript/static/userscript.js
@@ -34,7 +34,7 @@ Page.Userscript = new Class({
if(json.error)
self.frame.set('html', json.error);
else {
- var item = new Block.Search.Item(json.movie);
+ var item = new Block.Search.MovieItem(json.movie);
self.frame.adopt(item);
item.showOptions();
}
diff --git a/couchpotato/core/providers/automation/__init__.py b/couchpotato/core/providers/automation/__init__.py
index a217948a..93f6c10a 100644
--- a/couchpotato/core/providers/automation/__init__.py
+++ b/couchpotato/core/providers/automation/__init__.py
@@ -1,4 +1,4 @@
-config = {
+config = [{
'name': 'automation_providers',
'groups': [
{
@@ -18,4 +18,4 @@ config = {
'options': [],
},
],
-}
+}]
diff --git a/couchpotato/core/providers/automation/bluray/__init__.py b/couchpotato/core/providers/automation/bluray/__init__.py
index e0675247..ed270056 100644
--- a/couchpotato/core/providers/automation/bluray/__init__.py
+++ b/couchpotato/core/providers/automation/bluray/__init__.py
@@ -18,6 +18,13 @@ config = [{
'default': False,
'type': 'enabler',
},
+ {
+ 'name': 'backlog',
+ 'advanced': True,
+ 'description': 'Parses the history until the minimum movie year is reached. (Will be disabled once it has completed)',
+ 'default': False,
+ 'type': 'bool',
+ },
],
},
],
diff --git a/couchpotato/core/providers/automation/bluray/main.py b/couchpotato/core/providers/automation/bluray/main.py
index 235a1e5f..d98557ec 100644
--- a/couchpotato/core/providers/automation/bluray/main.py
+++ b/couchpotato/core/providers/automation/bluray/main.py
@@ -1,3 +1,4 @@
+from bs4 import BeautifulSoup
from couchpotato.core.helpers.rss import RSS
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
@@ -10,11 +11,49 @@ class Bluray(Automation, RSS):
interval = 1800
rss_url = 'http://www.blu-ray.com/rss/newreleasesfeed.xml'
+ backlog_url = 'http://www.blu-ray.com/movies/movies.php?show=newreleases&page=%s'
def getIMDBids(self):
movies = []
+ if self.conf('backlog'):
+
+ page = 0
+ while True:
+ page = page + 1
+
+ url = self.backlog_url % page
+ data = self.getHTMLData(url)
+ soup = BeautifulSoup(data)
+
+ try:
+ # Stop if the release year is before the minimal year
+ page_year = soup.body.find_all('center')[3].table.tr.find_all('td', recursive = False)[3].h3.get_text().split(', ')[1]
+ if tryInt(page_year) < self.getMinimal('year'):
+ break
+
+ for table in soup.body.find_all('center')[3].table.tr.find_all('td', recursive = False)[3].find_all('table')[1:20]:
+ name = table.h3.get_text().lower().split('blu-ray')[0].strip()
+ year = table.small.get_text().split('|')[1].strip()
+
+ if not name.find('/') == -1: # make sure it is not a double movie release
+ continue
+
+ if tryInt(year) < self.getMinimal('year'):
+ continue
+
+ imdb = self.search(name, year)
+
+ if imdb:
+ if self.isMinimalMovie(imdb):
+ movies.append(imdb['imdb'])
+ except:
+ log.debug('Error loading page: %s', page)
+ break
+
+ self.conf('backlog', value = False)
+
rss_movies = self.getRSSData(self.rss_url)
for movie in rss_movies:
diff --git a/couchpotato/core/providers/automation/flixster/__init__.py b/couchpotato/core/providers/automation/flixster/__init__.py
new file mode 100644
index 00000000..1c6c4590
--- /dev/null
+++ b/couchpotato/core/providers/automation/flixster/__init__.py
@@ -0,0 +1,34 @@
+from .main import Flixster
+
+def start():
+ return Flixster()
+
+config = [{
+ 'name': 'flixster',
+ 'groups': [
+ {
+ 'tab': 'automation',
+ 'list': 'watchlist_providers',
+ 'name': 'flixster_automation',
+ 'label': 'Flixster',
+ 'description': 'Import movies from any public Flixster watchlist',
+ 'options': [
+ {
+ 'name': 'automation_enabled',
+ 'default': False,
+ 'type': 'enabler',
+ },
+ {
+ 'name': 'automation_ids_use',
+ 'label': 'Use',
+ },
+ {
+ 'name': 'automation_ids',
+ 'label': 'User ID',
+ 'type': 'combined',
+ 'combine': ['automation_ids_use', 'automation_ids'],
+ },
+ ],
+ },
+ ],
+}]
diff --git a/couchpotato/core/providers/automation/flixster/main.py b/couchpotato/core/providers/automation/flixster/main.py
new file mode 100644
index 00000000..7fd2f717
--- /dev/null
+++ b/couchpotato/core/providers/automation/flixster/main.py
@@ -0,0 +1,47 @@
+from couchpotato.core.helpers.variable import tryInt, splitString
+from couchpotato.core.logger import CPLog
+from couchpotato.core.providers.automation.base import Automation
+
+log = CPLog(__name__)
+
+
+class Flixster(Automation):
+
+ url = 'http://www.flixster.com/api/users/%s/movies/ratings?scoreTypes=wts'
+
+ interval = 60
+
+ def getIMDBids(self):
+
+ ids = splitString(self.conf('automation_ids'))
+
+ if len(ids) == 0:
+ return []
+
+ movies = []
+
+ for movie in self.getWatchlist():
+ imdb_id = self.search(movie.get('title'), movie.get('year'), imdb_only = True)
+ movies.append(imdb_id)
+
+ return movies
+
+ def getWatchlist(self):
+
+ enablers = [tryInt(x) for x in splitString(self.conf('automation_ids_use'))]
+ ids = splitString(self.conf('automation_ids'))
+
+ index = -1
+ movies = []
+ for user_id in ids:
+
+ index += 1
+ if not enablers[index]:
+ continue
+
+ data = self.getJsonData(self.url % user_id, decode_from = 'iso-8859-1')
+
+ for movie in data:
+ movies.append({'title': movie['movie']['title'], 'year': movie['movie']['year'] })
+
+ return movies
diff --git a/couchpotato/core/providers/automation/imdb/__init__.py b/couchpotato/core/providers/automation/imdb/__init__.py
index 546cba97..20e4f41b 100644
--- a/couchpotato/core/providers/automation/imdb/__init__.py
+++ b/couchpotato/core/providers/automation/imdb/__init__.py
@@ -55,7 +55,14 @@ config = [{
'label': 'TOP 250',
'description': 'IMDB TOP 250 chart',
'default': True,
- },
+ },
+ {
+ 'name': 'automation_charts_boxoffice',
+ 'type': 'bool',
+ 'label': 'Box offce TOP 10',
+ 'description': 'IMDB Box office TOP 10 chart',
+ 'default': True,
+ },
],
},
],
diff --git a/couchpotato/core/providers/automation/imdb/main.py b/couchpotato/core/providers/automation/imdb/main.py
index e9d14b5a..76afb24c 100644
--- a/couchpotato/core/providers/automation/imdb/main.py
+++ b/couchpotato/core/providers/automation/imdb/main.py
@@ -70,8 +70,11 @@ class IMDBAutomation(IMDBBase):
chart_urls = {
'theater': 'http://www.imdb.com/movies-in-theaters/',
'top250': 'http://www.imdb.com/chart/top',
+ 'boxoffice': 'http://www.imdb.com/chart/',
}
+ first_table = ['boxoffice']
+
def getIMDBids(self):
movies = []
@@ -84,6 +87,14 @@ class IMDBAutomation(IMDBBase):
try:
result_div = html.find('div', attrs = {'id': 'main'})
+
+ try:
+ if url in self.first_table:
+ table = result_div.find('table')
+ result_div = table if table else result_div
+ except:
+ pass
+
imdb_ids = getImdb(str(result_div), multiple = True)
for imdb_id in imdb_ids:
diff --git a/couchpotato/core/providers/automation/itunes/main.py b/couchpotato/core/providers/automation/itunes/main.py
index 8e352370..eb68e348 100644
--- a/couchpotato/core/providers/automation/itunes/main.py
+++ b/couchpotato/core/providers/automation/itunes/main.py
@@ -16,9 +16,6 @@ class ITunes(Automation, RSS):
def getIMDBids(self):
- if self.isDisabled():
- return
-
movies = []
enablers = [tryInt(x) for x in splitString(self.conf('automation_urls_use'))]
diff --git a/couchpotato/core/providers/base.py b/couchpotato/core/providers/base.py
index e6a9cb00..da27d853 100644
--- a/couchpotato/core/providers/base.py
+++ b/couchpotato/core/providers/base.py
@@ -15,7 +15,6 @@ import xml.etree.ElementTree as XMLTree
log = CPLog(__name__)
-
class MultiProvider(Plugin):
def __init__(self):
@@ -63,13 +62,17 @@ class Provider(Plugin):
return self.is_available.get(host, False)
- def getJsonData(self, url, **kwargs):
+ def getJsonData(self, url, decode_from = None, **kwargs):
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('params', {})))
data = self.getCache(cache_key, url, **kwargs)
if data:
try:
+ data = data.strip()
+ if decode_from:
+ data = data.decode(decode_from)
+
return json.loads(data)
except:
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
@@ -251,7 +254,10 @@ class YarrProvider(Provider):
if identifier in qualities:
return ids
- return [self.cat_backup_id]
+ if self.cat_backup_id:
+ return [self.cat_backup_id]
+
+ return []
class ResultList(list):
@@ -279,13 +285,23 @@ class ResultList(list):
new_result = self.fillResult(result)
- is_correct_movie = fireEvent('movie.searcher.correct_movie',
- nzb = new_result, movie = self.movie, quality = self.quality,
+ is_correct = fireEvent('searcher.correct_release', new_result, self.movie, self.quality,
imdb_results = self.kwargs.get('imdb_results', False), single = True)
- if is_correct_movie and new_result['id'] not in self.result_ids:
+ if is_correct and new_result['id'] not in self.result_ids:
+ is_correct_weight = float(is_correct)
+
new_result['score'] += fireEvent('score.calculate', new_result, self.movie, single = True)
+ old_score = new_result['score']
+ new_result['score'] = int(old_score * is_correct_weight)
+
+ log.info('Found correct release with weight %.02f, old_score(%d) now scaled to score(%d)', (
+ is_correct_weight,
+ old_score,
+ new_result['score']
+ ))
+
self.found(new_result)
self.result_ids.append(result['id'])
diff --git a/couchpotato/core/providers/info/_modifier/main.py b/couchpotato/core/providers/info/_modifier/main.py
index 835cce04..0bb2e6a4 100644
--- a/couchpotato/core/providers/info/_modifier/main.py
+++ b/couchpotato/core/providers/info/_modifier/main.py
@@ -32,9 +32,26 @@ class MovieResultModifier(Plugin):
}
def __init__(self):
+ addEvent('result.modify.info.search', self.returnByType)
addEvent('result.modify.movie.search', self.combineOnIMDB)
addEvent('result.modify.movie.info', self.checkLibrary)
+ def returnByType(self, results):
+
+ new_results = {}
+ for r in results:
+ type_name = r.get('type', 'movie') + 's'
+ if not new_results.has_key(type_name):
+ new_results[type_name] = []
+
+ new_results[type_name].append(r)
+
+ # Combine movies, needs a cleaner way..
+ if new_results.has_key('movies'):
+ new_results['movies'] = self.combineOnIMDB(new_results['movies'])
+
+ return new_results
+
def combineOnIMDB(self, results):
temp = {}
diff --git a/couchpotato/core/providers/info/couchpotatoapi/main.py b/couchpotato/core/providers/info/couchpotatoapi/main.py
index ef7db1f9..4dd942e0 100644
--- a/couchpotato/core/providers/info/couchpotatoapi/main.py
+++ b/couchpotato/core/providers/info/couchpotatoapi/main.py
@@ -3,6 +3,7 @@ from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.info.base import MovieProvider
from couchpotato.environment import Env
+import base64
import time
log = CPLog(__name__)
@@ -11,6 +12,7 @@ log = CPLog(__name__)
class CouchPotatoApi(MovieProvider):
urls = {
+ 'validate': 'https://api.couchpota.to/validate/%s/',
'search': 'https://api.couchpota.to/search/%s/',
'info': 'https://api.couchpota.to/info/%s/',
'is_movie': 'https://api.couchpota.to/ismovie/%s/',
@@ -24,11 +26,14 @@ class CouchPotatoApi(MovieProvider):
def __init__(self):
addEvent('movie.info', self.getInfo, priority = 1)
+ addEvent('info.search', self.search, priority = 1)
addEvent('movie.search', self.search, priority = 1)
addEvent('movie.release_date', self.getReleaseDate)
addEvent('movie.suggest', self.getSuggestions)
addEvent('movie.is_movie', self.isMovie)
+ addEvent('release.validate', self.validate)
+
addEvent('cp.source_url', self.getSourceUrl)
addEvent('cp.messages', self.getMessages)
@@ -50,6 +55,14 @@ class CouchPotatoApi(MovieProvider):
def search(self, q, limit = 5):
return self.getJsonData(self.urls['search'] % tryUrlencode(q) + ('?limit=%s' % limit), headers = self.getRequestHeaders())
+ def validate(self, name = None):
+
+ if not name:
+ return
+
+ name_enc = base64.b64encode(name)
+ return self.getJsonData(self.urls['validate'] % name_enc, headers = self.getRequestHeaders())
+
def isMovie(self, identifier = None):
if not identifier:
diff --git a/couchpotato/core/providers/info/omdbapi/main.py b/couchpotato/core/providers/info/omdbapi/main.py
index 87bb0a73..47374f47 100755
--- a/couchpotato/core/providers/info/omdbapi/main.py
+++ b/couchpotato/core/providers/info/omdbapi/main.py
@@ -20,6 +20,7 @@ class OMDBAPI(MovieProvider):
http_time_between_calls = 0
def __init__(self):
+ addEvent('info.search', self.search)
addEvent('movie.search', self.search)
addEvent('movie.info', self.getInfo)
@@ -84,6 +85,7 @@ class OMDBAPI(MovieProvider):
year = tryInt(movie.get('Year', ''))
movie_data = {
+ 'type': 'movie',
'via_imdb': True,
'titles': [movie.get('Title')] if movie.get('Title') else [],
'original_title': movie.get('Title'),
diff --git a/couchpotato/core/providers/info/themoviedb/main.py b/couchpotato/core/providers/info/themoviedb/main.py
index 376ddad0..a7901351 100644
--- a/couchpotato/core/providers/info/themoviedb/main.py
+++ b/couchpotato/core/providers/info/themoviedb/main.py
@@ -11,6 +11,7 @@ log = CPLog(__name__)
class TheMovieDb(MovieProvider):
def __init__(self):
+ addEvent('info.search', self.search, priority = 2)
addEvent('movie.search', self.search, priority = 2)
addEvent('movie.info', self.getInfo, priority = 2)
addEvent('movie.info_by_tmdb', self.getInfo)
@@ -103,6 +104,7 @@ class TheMovieDb(MovieProvider):
year = None
movie_data = {
+ 'type': 'movie',
'via_tmdb': True,
'tmdb_id': movie.id,
'titles': [toUnicode(movie.title)],
@@ -119,6 +121,7 @@ class TheMovieDb(MovieProvider):
'year': year,
'plot': movie.overview,
'genres': genres,
+ 'collection': getattr(movie.collection, 'name', None),
}
movie_data = dict((k, v) for k, v in movie_data.iteritems() if v)
diff --git a/couchpotato/core/providers/metadata/xbmc/main.py b/couchpotato/core/providers/metadata/xbmc/main.py
index e865e2d4..7073363d 100644
--- a/couchpotato/core/providers/metadata/xbmc/main.py
+++ b/couchpotato/core/providers/metadata/xbmc/main.py
@@ -104,6 +104,13 @@ class XBMC(MetaDataBase):
writers = SubElement(nfoxml, 'credits')
writers.text = toUnicode(writer)
+ # Sets or collections
+ collection_name = movie_info.get('collection')
+ if collection_name:
+ collection = SubElement(nfoxml, 'set')
+ collection.text = toUnicode(collection_name)
+ sorttitle = SubElement(nfoxml, 'sorttitle')
+ sorttitle.text = '%s %s' % (toUnicode(collection_name), movie_info.get('year'))
# Clean up the xml and return it
nfoxml = xml.dom.minidom.parseString(tostring(nfoxml))
diff --git a/couchpotato/core/providers/nzb/__init__.py b/couchpotato/core/providers/nzb/__init__.py
index 36098bb3..88d9865d 100644
--- a/couchpotato/core/providers/nzb/__init__.py
+++ b/couchpotato/core/providers/nzb/__init__.py
@@ -1,4 +1,4 @@
-config = {
+config = [{
'name': 'nzb_providers',
'groups': [
{
@@ -11,4 +11,4 @@ config = {
'options': [],
},
],
-}
+}]
diff --git a/couchpotato/core/providers/nzb/binsearch/main.py b/couchpotato/core/providers/nzb/binsearch/main.py
index 770ed50a..db0fb5b8 100644
--- a/couchpotato/core/providers/nzb/binsearch/main.py
+++ b/couchpotato/core/providers/nzb/binsearch/main.py
@@ -65,7 +65,7 @@ class BinSearch(NZBProvider):
total = tryInt(parts.group('total'))
parts = tryInt(parts.group('parts'))
- if (total / parts) < 0.95 or ((total / parts) >= 0.95 and not 'par2' in info.text.lower()):
+ if (total / parts) < 0.95 or ((total / parts) >= 0.95 and not ('par2' in info.text.lower() or 'pa3' in info.text.lower())):
log.info2('Wrong: \'%s\', not complete: %s out of %s', (item['name'], parts, total))
return False
diff --git a/couchpotato/core/providers/nzb/newznab/main.py b/couchpotato/core/providers/nzb/newznab/main.py
index 02ffcfdc..bd1b6c32 100644
--- a/couchpotato/core/providers/nzb/newznab/main.py
+++ b/couchpotato/core/providers/nzb/newznab/main.py
@@ -1,4 +1,4 @@
-from couchpotato.core.helpers.encoding import tryUrlencode
+from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
from couchpotato.core.helpers.rss import RSS
from couchpotato.core.helpers.variable import cleanHost, splitString, tryInt
from couchpotato.core.logger import CPLog
@@ -83,7 +83,7 @@ class Newznab(NZBProvider, RSS):
results.append({
'id': nzb_id,
'provider_extra': urlparse(host['host']).hostname or host['host'],
- 'name': name,
+ 'name': toUnicode(name),
'name_extra': name_extra,
'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))),
'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024,
diff --git a/couchpotato/core/providers/nzb/omgwtfnzbs/main.py b/couchpotato/core/providers/nzb/omgwtfnzbs/main.py
index 0a18b8f4..8cc4a3eb 100644
--- a/couchpotato/core/providers/nzb/omgwtfnzbs/main.py
+++ b/couchpotato/core/providers/nzb/omgwtfnzbs/main.py
@@ -14,7 +14,8 @@ log = CPLog(__name__)
class OMGWTFNZBs(NZBProvider, RSS):
urls = {
- 'search': 'http://rss.omgwtfnzbs.org/rss-search.php?%s',
+ 'search': 'https://rss.omgwtfnzbs.org/rss-search.php?%s',
+ 'detail_url': 'https://omgwtfnzbs.org/details.php?id=%s',
}
http_time_between_calls = 1 #seconds
@@ -49,13 +50,14 @@ class OMGWTFNZBs(NZBProvider, RSS):
for nzb in nzbs:
enclosure = self.getElement(nzb, 'enclosure').attrib
+ nzb_id = parse_qs(urlparse(self.getTextElement(nzb, 'link')).query).get('id')[0]
results.append({
- 'id': parse_qs(urlparse(self.getTextElement(nzb, 'link')).query).get('id')[0],
+ 'id': nzb_id,
'name': toUnicode(self.getTextElement(nzb, 'title')),
'age': self.calculateAge(int(time.mktime(parse(self.getTextElement(nzb, 'pubDate')).timetuple()))),
'size': tryInt(enclosure['length']) / 1024 / 1024,
'url': enclosure['url'],
- 'detail_url': self.getTextElement(nzb, 'link'),
+ 'detail_url': self.urls['detail_url'] % nzb_id,
'description': self.getTextElement(nzb, 'description')
})
diff --git a/couchpotato/core/providers/torrent/__init__.py b/couchpotato/core/providers/torrent/__init__.py
index 250bcead..12dda708 100644
--- a/couchpotato/core/providers/torrent/__init__.py
+++ b/couchpotato/core/providers/torrent/__init__.py
@@ -1,4 +1,4 @@
-config = {
+config = [{
'name': 'torrent_providers',
'groups': [
{
@@ -11,4 +11,4 @@ config = {
'options': [],
},
],
-}
+}]
diff --git a/couchpotato/core/providers/torrent/base.py b/couchpotato/core/providers/torrent/base.py
index 3e7ddde8..c16e6c52 100644
--- a/couchpotato/core/providers/torrent/base.py
+++ b/couchpotato/core/providers/torrent/base.py
@@ -1,6 +1,8 @@
-from couchpotato.core.helpers.variable import getImdb, md5
+from couchpotato.core.helpers.variable import getImdb, md5, cleanHost
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.base import YarrProvider
+from couchpotato.environment import Env
+import time
log = CPLog(__name__)
@@ -9,6 +11,9 @@ class TorrentProvider(YarrProvider):
protocol = 'torrent'
+ proxy_domain = None
+ proxy_list = []
+
def imdbMatch(self, url, imdbId):
if getImdb(url) == imdbId:
return True
@@ -25,6 +30,42 @@ class TorrentProvider(YarrProvider):
return False
+ def getDomain(self, url = ''):
+
+ forced_domain = self.conf('domain')
+ if forced_domain:
+ return cleanHost(forced_domain).rstrip('/') + url
+
+ if not self.proxy_domain:
+ for proxy in self.proxy_list:
+
+ prop_name = 'proxy.%s' % proxy
+ last_check = float(Env.prop(prop_name, default = 0))
+ if last_check > time.time() - 1209600:
+ continue
+
+ data = ''
+ try:
+ data = self.urlopen(proxy, timeout = 3, show_error = False)
+ except:
+ log.debug('Failed %s proxy %s', (self.getName(), proxy))
+
+ if self.correctProxy(data):
+ log.debug('Using proxy for %s: %s', (self.getName(), proxy))
+ self.proxy_domain = proxy
+ break
+
+ Env.prop(prop_name, time.time())
+
+ if not self.proxy_domain:
+ log.error('No %s proxies left, please add one in settings, or let us know which one to add on the forum.', self.getName())
+ return None
+
+ return cleanHost(self.proxy_domain).rstrip('/') + url
+
+ def correctProxy(self):
+ return True
+
class TorrentMagnetProvider(TorrentProvider):
protocol = 'torrent_magnet'
diff --git a/couchpotato/core/providers/torrent/scenehd/__init__.py b/couchpotato/core/providers/torrent/bithdtv/__init__.py
similarity index 87%
rename from couchpotato/core/providers/torrent/scenehd/__init__.py
rename to couchpotato/core/providers/torrent/bithdtv/__init__.py
index c0a82ae7..8c6f97a0 100644
--- a/couchpotato/core/providers/torrent/scenehd/__init__.py
+++ b/couchpotato/core/providers/torrent/bithdtv/__init__.py
@@ -1,16 +1,16 @@
-from .main import SceneHD
+from .main import BiTHDTV
def start():
- return SceneHD()
+ return BiTHDTV()
config = [{
- 'name': 'scenehd',
+ 'name': 'bithdtv',
'groups': [
{
'tab': 'searcher',
'list': 'torrent_providers',
- 'name': 'SceneHD',
- 'description': 'See SceneHD',
+ 'name': 'BiT-HDTV',
+ 'description': 'See BiT-HDTV',
'wizard': True,
'options': [
{
@@ -46,7 +46,7 @@ config = [{
'advanced': True,
'label': 'Extra Score',
'type': 'int',
- 'default': 0,
+ 'default': 20,
'description': 'Starting score for each release found via this provider.',
}
],
diff --git a/couchpotato/core/providers/torrent/bithdtv/main.py b/couchpotato/core/providers/torrent/bithdtv/main.py
new file mode 100644
index 00000000..2cacff3d
--- /dev/null
+++ b/couchpotato/core/providers/torrent/bithdtv/main.py
@@ -0,0 +1,88 @@
+from bs4 import BeautifulSoup
+from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
+from couchpotato.core.helpers.variable import tryInt
+from couchpotato.core.logger import CPLog
+from couchpotato.core.providers.torrent.base import TorrentProvider
+import traceback
+
+log = CPLog(__name__)
+
+class BiTHDTV(TorrentProvider):
+
+ urls = {
+ 'test' : 'http://www.bit-hdtv.com/',
+ 'login' : 'http://www.bit-hdtv.com/takelogin.php',
+ 'login_check': 'http://www.bit-hdtv.com/messages.php',
+ 'detail' : 'http://www.bit-hdtv.com/details.php?id=%s',
+ 'search' : 'http://www.bit-hdtv.com/torrents.php?',
+ }
+
+ # Searches for movies only - BiT-HDTV's subcategory and resolution search filters appear to be broken
+ cat_id_movies = 7
+
+ http_time_between_calls = 1 #seconds
+
+ def _searchOnTitle(self, title, movie, quality, results):
+
+ arguments = tryUrlencode({
+ 'search': '%s %s' % (title.replace(':', ''), movie['library']['year']),
+ 'cat': self.cat_id_movies
+ })
+
+ url = "%s&%s" % (self.urls['search'], arguments)
+
+ data = self.getHTMLData(url, opener = self.login_opener)
+
+ if data:
+ # Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML
+ split_data = data.partition('-->')
+ if '## SELECT COUNT(' in split_data[0]:
+ data = split_data[2]
+
+ html = BeautifulSoup(data)
+
+ try:
+ result_table = html.find('table', attrs = {'width' : '750', 'class' : ''})
+ if result_table is None:
+ return
+
+ entries = result_table.find_all('tr')
+ for result in entries[1:]:
+
+ cells = result.find_all('td')
+ link = cells[2].find('a')
+ torrent_id = link['href'].replace('/details.php?id=', '')
+
+ results.append({
+ 'id': torrent_id,
+ 'name': link.contents[0].get_text(),
+ 'url': cells[0].find('a')['href'],
+ 'detail_url': self.urls['detail'] % torrent_id,
+ 'size': self.parseSize(cells[6].get_text()),
+ 'seeders': tryInt(cells[8].string),
+ 'leechers': tryInt(cells[9].string),
+ 'get_more_info': self.getMoreInfo,
+ })
+
+ except:
+ log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
+
+ def getLoginParams(self):
+ return tryUrlencode({
+ 'username': self.conf('username'),
+ 'password': self.conf('password'),
+ })
+
+ def getMoreInfo(self, item):
+ full_description = self.getCache('bithdtv.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
+ html = BeautifulSoup(full_description)
+ nfo_pre = html.find('table', attrs = {'class':'detail'})
+ description = toUnicode(nfo_pre.text) if nfo_pre else ''
+
+ item['description'] = description
+ return item
+
+ def loginSuccess(self, output):
+ return 'logout.php' in output.lower()
+
+ loginCheckSuccess = loginSuccess
diff --git a/couchpotato/core/providers/torrent/ilovetorrents/__init__.py b/couchpotato/core/providers/torrent/ilovetorrents/__init__.py
new file mode 100644
index 00000000..c6702d7f
--- /dev/null
+++ b/couchpotato/core/providers/torrent/ilovetorrents/__init__.py
@@ -0,0 +1,60 @@
+from main import ILoveTorrents
+
+def start():
+ return ILoveTorrents()
+
+config = [{
+ 'name': 'ilovetorrents',
+ 'groups': [
+ {
+ 'tab': 'searcher',
+ 'list': 'torrent_providers',
+ 'name': 'ILoveTorrents',
+ 'description': 'Where the Love of Torrents is Born',
+ 'wizard': True,
+ 'options': [
+ {
+ 'name': 'enabled',
+ 'type': 'enabler',
+ 'default': False
+ },
+ {
+ 'name': 'username',
+ 'label': 'Username',
+ 'type': 'string',
+ 'default': '',
+ 'description': 'The user name for your ILT account',
+ },
+ {
+ 'name': 'password',
+ 'label': 'Password',
+ 'type': 'password',
+ 'default': '',
+ 'description': 'The password for your ILT account.',
+ },
+ {
+ 'name': 'seed_ratio',
+ 'label': 'Seed ratio',
+ 'type': 'float',
+ 'default': 1,
+ 'description': 'Will not be (re)moved until this seed ratio is met.',
+ },
+ {
+ 'name': 'seed_time',
+ 'label': 'Seed time',
+ 'type': 'int',
+ 'default': 40,
+ 'description': 'Will not be (re)moved until this seed time (in hours) is met.',
+ },
+ {
+ 'name': 'extra_score',
+ 'advanced': True,
+ 'label': 'Extra Score',
+ 'type': 'int',
+ 'default': 0,
+ 'description': 'Starting score for each release found via this provider.',
+ }
+ ],
+ }
+ ]
+}]
diff --git a/couchpotato/core/providers/torrent/ilovetorrents/main.py b/couchpotato/core/providers/torrent/ilovetorrents/main.py
new file mode 100644
index 00000000..8c060ec3
--- /dev/null
+++ b/couchpotato/core/providers/torrent/ilovetorrents/main.py
@@ -0,0 +1,128 @@
+from bs4 import BeautifulSoup
+from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
+from couchpotato.core.helpers.variable import tryInt
+from couchpotato.core.logger import CPLog
+from couchpotato.core.providers.torrent.base import TorrentProvider
+import re
+import traceback
+
+log = CPLog(__name__)
+
+
+class ILoveTorrents(TorrentProvider):
+
+ urls = {
+ 'download': 'http://www.ilovetorrents.me/%s',
+ 'detail': 'http://www.ilovetorrents.me/%s',
+ 'search': 'http://www.ilovetorrents.me/browse.php?search=%s&page=%s&cat=%s',
+ 'test' : 'http://www.ilovetorrents.me/',
+ 'login' : 'http://www.ilovetorrents.me/takelogin.php',
+ 'login_check' : 'http://www.ilovetorrents.me'
+ }
+
+ cat_ids = [
+ (['41'], ['720p', '1080p', 'brrip']),
+ (['19'], ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr']),
+ (['20'], ['dvdr'])
+ ]
+
+ cat_backup_id = 200
+ disable_provider = False
+ http_time_between_calls = 1
+
+ def _searchOnTitle(self, title, movie, quality, results):
+
+ page = 0
+ total_pages = 1
+ cats = self.getCatId(quality['identifier'])
+
+ while page < total_pages:
+
+ movieTitle = tryUrlencode('"%s" %s' % (title, movie['library']['year']))
+ search_url = self.urls['search'] % (movieTitle, page, cats[0])
+ page += 1
+
+ data = self.getHTMLData(search_url, opener = self.login_opener)
+ if data:
+ try:
+ soup = BeautifulSoup(data)
+
+ results_table = soup.find('table', attrs = {'class': 'koptekst'})
+ if not results_table:
+ return
+
+ try:
+ pagelinks = soup.findAll(href = re.compile('page'))
+ pageNumbers = [int(re.search('page=(?P.+'')', i['href']).group('pageNumber')) for i in pagelinks]
+ total_pages = max(pageNumbers)
+
+ except:
+ pass
+
+ entries = results_table.find_all('tr')
+
+ for result in entries[1:]:
+ prelink = result.find(href = re.compile('details.php'))
+ link = prelink['href']
+ download = result.find('a', href = re.compile('download.php'))['href']
+
+ if link and download:
+
+ def extra_score(item):
+ trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
+ vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
+ confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
+ moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
+
+ return confirmed + trusted + vip + moderated
+
+ id = re.search('id=(?P\d+)&', link).group('id')
+ url = self.urls['download'] % (download)
+
+ fileSize = self.parseSize(result.select('td.rowhead')[5].text)
+ results.append({
+ 'id': id,
+ 'name': toUnicode(prelink.find('b').text),
+ 'url': url,
+ 'detail_url': self.urls['detail'] % link,
+ 'size': fileSize,
+ 'seeders': tryInt(result.find_all('td')[2].string),
+ 'leechers': tryInt(result.find_all('td')[3].string),
+ 'extra_score': extra_score,
+ 'get_more_info': self.getMoreInfo
+ })
+
+ except:
+ log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
+
+ def getLoginParams(self):
+ return tryUrlencode({
+ 'username': self.conf('username'),
+ 'password': self.conf('password'),
+ 'submit': 'Welcome to ILT',
+ })
+
+ def getMoreInfo(self, item):
+ cache_key = 'ilt.%s' % item['id']
+ description = self.getCache(cache_key)
+
+ if not description:
+
+ try:
+ full_description = self.getHTMLData(item['detail_url'], opener = self.login_opener)
+ html = BeautifulSoup(full_description)
+ nfo_pre = html.find('td', attrs = {'class':'main'}).findAll('table')[1]
+ description = toUnicode(nfo_pre.text) if nfo_pre else ''
+ except:
+ log.error('Failed getting more info for %s', item['name'])
+ description = ''
+
+ self.setCache(cache_key, description, timeout = 25920000)
+
+ item['description'] = description
+ return item
+
+ def loginSuccess(self, output):
+ return 'logout.php' in output.lower()
+
+ loginCheckSuccess = loginSuccess
diff --git a/couchpotato/core/providers/torrent/kickasstorrents/__init__.py b/couchpotato/core/providers/torrent/kickasstorrents/__init__.py
index b095a97d..0b79c81a 100644
--- a/couchpotato/core/providers/torrent/kickasstorrents/__init__.py
+++ b/couchpotato/core/providers/torrent/kickasstorrents/__init__.py
@@ -18,6 +18,12 @@ config = [{
'type': 'enabler',
'default': True,
},
+ {
+ 'name': 'domain',
+ 'advanced': True,
+ 'label': 'Proxy server',
+ 'description': 'Domain for requests, keep empty to let CouchPotato pick.',
+ },
{
'name': 'seed_ratio',
'label': 'Seed ratio',
diff --git a/couchpotato/core/providers/torrent/kickasstorrents/main.py b/couchpotato/core/providers/torrent/kickasstorrents/main.py
index b85aadc7..50f14ce2 100644
--- a/couchpotato/core/providers/torrent/kickasstorrents/main.py
+++ b/couchpotato/core/providers/torrent/kickasstorrents/main.py
@@ -11,9 +11,8 @@ log = CPLog(__name__)
class KickAssTorrents(TorrentMagnetProvider):
urls = {
- 'test': 'https://kickass.to/',
- 'detail': 'https://kickass.to/%s',
- 'search': 'https://kickass.to/%s-i%s/',
+ 'detail': '%s/%s',
+ 'search': '%s/%s-i%s/',
}
cat_ids = [
@@ -28,9 +27,16 @@ class KickAssTorrents(TorrentMagnetProvider):
http_time_between_calls = 1 #seconds
cat_backup_id = None
+ proxy_list = [
+ 'https://kickass.to',
+ 'http://kickass.pw',
+ 'http://www.kickassunblock.info',
+ 'http://www.kickassproxy.info',
+ ]
+
def _search(self, movie, quality, results):
- data = self.getHTMLData(self.urls['search'] % ('m', movie['library']['identifier'].replace('tt', '')))
+ data = self.getHTMLData(self.urls['search'] % (self.getDomain(), 'm', movie['library']['identifier'].replace('tt', '')))
if data:
@@ -41,7 +47,7 @@ class KickAssTorrents(TorrentMagnetProvider):
html = BeautifulSoup(data)
resultdiv = html.find('div', attrs = {'class':'tabs'})
for result in resultdiv.find_all('div', recursive = False):
- if result.get('id').lower() not in cat_ids:
+ if result.get('id').lower().strip('tab-') not in cat_ids:
continue
try:
@@ -56,12 +62,12 @@ class KickAssTorrents(TorrentMagnetProvider):
column_name = table_order[nr]
if column_name:
- if column_name is 'name':
+ if column_name == 'name':
link = td.find('div', {'class': 'torrentname'}).find_all('a')[1]
new['id'] = temp.get('id')[-8:]
new['name'] = link.text
new['url'] = td.find('a', 'imagnet')['href']
- new['detail_url'] = self.urls['detail'] % link['href'][1:]
+ new['detail_url'] = self.urls['detail'] % (self.getDomain(), link['href'][1:])
new['score'] = 20 if td.find('a', 'iverif') else 0
elif column_name is 'size':
new['size'] = self.parseSize(td.text)
@@ -100,3 +106,10 @@ class KickAssTorrents(TorrentMagnetProvider):
age += tryInt(nr) * mult
return tryInt(age)
+
+
+ def isEnabled(self):
+ return super(KickAssTorrents, self).isEnabled() and self.getDomain()
+
+ def correctProxy(self, data):
+ return 'search query' in data.lower()
diff --git a/couchpotato/core/providers/torrent/scenehd/main.py b/couchpotato/core/providers/torrent/scenehd/main.py
deleted file mode 100644
index 2b76e43d..00000000
--- a/couchpotato/core/providers/torrent/scenehd/main.py
+++ /dev/null
@@ -1,79 +0,0 @@
-from bs4 import BeautifulSoup
-from couchpotato.core.helpers.encoding import simplifyString, tryUrlencode
-from couchpotato.core.helpers.variable import tryInt
-from couchpotato.core.logger import CPLog
-from couchpotato.core.providers.torrent.base import TorrentProvider
-import traceback
-
-log = CPLog(__name__)
-
-
-class SceneHD(TorrentProvider):
-
- urls = {
- 'test': 'https://scenehd.org/',
- 'login' : 'https://scenehd.org/takelogin.php',
- 'login_check': 'https://scenehd.org/my.php',
- 'detail': 'https://scenehd.org/details.php?id=%s',
- 'search': 'https://scenehd.org/browse.php?ajax',
- 'download': 'https://scenehd.org/download.php?id=%s',
- }
-
- http_time_between_calls = 1 #seconds
-
- def _searchOnTitle(self, title, movie, quality, results):
-
- q = '"%s %s"' % (simplifyString(title), movie['library']['year'])
- arguments = tryUrlencode({
- 'search': q,
- })
- url = "%s&%s" % (self.urls['search'], arguments)
-
- data = self.getHTMLData(url, opener = self.login_opener)
-
- if data:
- html = BeautifulSoup(data)
-
- try:
- resultsTable = html.find_all('table')[6]
- entries = resultsTable.find_all('tr')
- for result in entries[1:]:
-
- all_cells = result.find_all('td')
-
- detail_link = all_cells[2].find('a')
- details = detail_link['href']
- torrent_id = details.replace('details.php?id=', '')
-
- leechers = all_cells[11].find('a')
- if leechers:
- leechers = leechers.string
- else:
- leechers = all_cells[11].string
-
- results.append({
- 'id': torrent_id,
- 'name': detail_link['title'],
- 'size': self.parseSize(all_cells[7].string),
- 'seeders': tryInt(all_cells[10].find('a').string),
- 'leechers': tryInt(leechers),
- 'url': self.urls['download'] % torrent_id,
- 'description': all_cells[1].find('a')['href'],
- })
-
- except:
- log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
-
-
- def getLoginParams(self):
- return tryUrlencode({
- 'username': self.conf('username'),
- 'password': self.conf('password'),
- 'ssl': 'yes',
- })
-
- def loginSuccess(self, output):
- return 'logout.php' in output.lower()
-
- loginCheckSuccess = loginSuccess
-
diff --git a/couchpotato/core/providers/torrent/thepiratebay/__init__.py b/couchpotato/core/providers/torrent/thepiratebay/__init__.py
index 83de7a94..8cf9f86c 100644
--- a/couchpotato/core/providers/torrent/thepiratebay/__init__.py
+++ b/couchpotato/core/providers/torrent/thepiratebay/__init__.py
@@ -16,7 +16,7 @@ config = [{
{
'name': 'enabled',
'type': 'enabler',
- 'default': True
+ 'default': False
},
{
'name': 'domain',
diff --git a/couchpotato/core/providers/torrent/thepiratebay/main.py b/couchpotato/core/providers/torrent/thepiratebay/main.py
index 6aa22167..b967d5f0 100644
--- a/couchpotato/core/providers/torrent/thepiratebay/main.py
+++ b/couchpotato/core/providers/torrent/thepiratebay/main.py
@@ -1,11 +1,9 @@
from bs4 import BeautifulSoup
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode
-from couchpotato.core.helpers.variable import tryInt, cleanHost
+from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.torrent.base import TorrentMagnetProvider
-from couchpotato.environment import Env
import re
-import time
import traceback
log = CPLog(__name__)
@@ -30,8 +28,8 @@ class ThePirateBay(TorrentMagnetProvider):
http_time_between_calls = 0
proxy_list = [
- 'https://thepiratebay.se',
'https://tpb.ipredator.se',
+ 'https://thepiratebay.se',
'https://depiraatbaai.be',
'https://piratereverse.info',
'https://tpb.pirateparty.org.uk',
@@ -43,10 +41,6 @@ class ThePirateBay(TorrentMagnetProvider):
'https://kuiken.co',
]
- def __init__(self):
- self.domain = self.conf('domain')
- super(ThePirateBay, self).__init__()
-
def _searchOnTitle(self, title, movie, quality, results):
page = 0
@@ -108,38 +102,11 @@ class ThePirateBay(TorrentMagnetProvider):
except:
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
-
def isEnabled(self):
return super(ThePirateBay, self).isEnabled() and self.getDomain()
- def getDomain(self, url = ''):
-
- if not self.domain:
- for proxy in self.proxy_list:
-
- prop_name = 'tpb_proxy.%s' % proxy
- last_check = float(Env.prop(prop_name, default = 0))
- if last_check > time.time() - 1209600:
- continue
-
- data = ''
- try:
- data = self.urlopen(proxy, timeout = 3, show_error = False)
- except:
- log.debug('Failed tpb proxy %s', proxy)
-
- if 'title="Pirate Search"' in data:
- log.debug('Using proxy: %s', proxy)
- self.domain = proxy
- break
-
- Env.prop(prop_name, time.time())
-
- if not self.domain:
- log.error('No TPB proxies left, please add one in settings, or let us know which one to add on the forum.')
- return None
-
- return cleanHost(self.domain).rstrip('/') + url
+ def correctProxy(self, data):
+ return 'title="Pirate Search"' in data
def getMoreInfo(self, item):
full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
diff --git a/couchpotato/core/providers/torrent/torrentshack/main.py b/couchpotato/core/providers/torrent/torrentshack/main.py
index 353b606e..6b3b5548 100644
--- a/couchpotato/core/providers/torrent/torrentshack/main.py
+++ b/couchpotato/core/providers/torrent/torrentshack/main.py
@@ -15,7 +15,7 @@ class TorrentShack(TorrentProvider):
'login' : 'https://torrentshack.net/login.php',
'login_check': 'https://torrentshack.net/inbox.php',
'detail' : 'https://torrentshack.net/torrent/%s',
- 'search' : 'https://torrentshack.net/torrents.php?searchstr=%s&filter_cat[%d]=1',
+ 'search' : 'https://torrentshack.net/torrents.php?action=advanced&searchstr=%s&scene=%s&filter_cat[%d]=1',
'download' : 'https://torrentshack.net/%s',
}
@@ -31,7 +31,9 @@ class TorrentShack(TorrentProvider):
def _searchOnTitle(self, title, movie, quality, results):
- url = self.urls['search'] % (tryUrlencode('"%s" %s' % (title.replace(':', ''), movie['library']['year'])), self.getCatId(quality['identifier'])[0])
+ scene_only = '1' if self.conf('scene_only') else ''
+
+ url = self.urls['search'] % (tryUrlencode('%s %s' % (title.replace(':', ''), movie['library']['year'])), scene_only, self.getCatId(quality['identifier'])[0])
data = self.getHTMLData(url, opener = self.login_opener)
if data:
@@ -49,22 +51,15 @@ class TorrentShack(TorrentProvider):
link = result.find('span', attrs = {'class' : 'torrent_name_link'}).parent
url = result.find('td', attrs = {'class' : 'torrent_td'}).find('a')
- extra_info = ''
- if result.find('span', attrs = {'class' : 'torrent_extra_info'}):
- extra_info = result.find('span', attrs = {'class' : 'torrent_extra_info'}).text
-
- if not self.conf('scene_only') or extra_info != '[NotScene]':
- results.append({
- 'id': link['href'].replace('torrents.php?torrentid=', ''),
- 'name': unicode(link.span.string).translate({ord(u'\xad'): None}),
- 'url': self.urls['download'] % url['href'],
- 'detail_url': self.urls['download'] % link['href'],
- 'size': self.parseSize(result.find_all('td')[4].string),
- 'seeders': tryInt(result.find_all('td')[6].string),
- 'leechers': tryInt(result.find_all('td')[7].string),
- })
- else:
- log.info('Not adding release %s [NotScene]' % unicode(link.span.string).translate({ord(u'\xad'): None}))
+ results.append({
+ 'id': link['href'].replace('torrents.php?torrentid=', ''),
+ 'name': unicode(link.span.string).translate({ord(u'\xad'): None}),
+ 'url': self.urls['download'] % url['href'],
+ 'detail_url': self.urls['download'] % link['href'],
+ 'size': self.parseSize(result.find_all('td')[4].string),
+ 'seeders': tryInt(result.find_all('td')[6].string),
+ 'leechers': tryInt(result.find_all('td')[7].string),
+ })
except:
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
diff --git a/couchpotato/core/providers/torrent/yify/main.py b/couchpotato/core/providers/torrent/yify/main.py
index 4c059463..60b2f9b1 100644
--- a/couchpotato/core/providers/torrent/yify/main.py
+++ b/couchpotato/core/providers/torrent/yify/main.py
@@ -23,9 +23,9 @@ class Yify(TorrentProvider):
return super(Yify, self).search(movie, quality)
- def _searchOnTitle(self, title, movie, quality, results):
+ def _search(self, movie, quality, results):
- data = self.getJsonData(self.urls['search'] % (title, quality['identifier']))
+ data = self.getJsonData(self.urls['search'] % (movie['library']['identifier'], quality['identifier']))
if data and data.get('MovieList'):
try:
diff --git a/couchpotato/core/providers/userscript/flickchart/__init__.py b/couchpotato/core/providers/userscript/flickchart/__init__.py
new file mode 100644
index 00000000..89d45d9c
--- /dev/null
+++ b/couchpotato/core/providers/userscript/flickchart/__init__.py
@@ -0,0 +1,6 @@
+from .main import Flickchart
+
+def start():
+ return Flickchart()
+
+config = []
diff --git a/couchpotato/core/providers/userscript/flickchart/main.py b/couchpotato/core/providers/userscript/flickchart/main.py
new file mode 100644
index 00000000..a66bd38f
--- /dev/null
+++ b/couchpotato/core/providers/userscript/flickchart/main.py
@@ -0,0 +1,30 @@
+from couchpotato.core.event import fireEvent
+from couchpotato.core.logger import CPLog
+from couchpotato.core.providers.userscript.base import UserscriptBase
+import traceback
+
+log = CPLog(__name__)
+
+
+class Flickchart(UserscriptBase):
+
+ includes = ['http://www.flickchart.com/movie/*']
+
+ def getMovie(self, url):
+
+ try:
+ data = self.getUrl(url)
+ except:
+ return
+
+ try:
+ start = data.find('')
+ end = data.find('', start)
+ page_title = data[start + len(''):end].strip().split('-')
+
+ year_name = fireEvent('scanner.name_year', page_title[0], single = True)
+
+ return self.search(**year_name)
+ except:
+ log.error('Failed parsing page for title and year: %s', traceback.format_exc())
+
diff --git a/couchpotato/core/settings/model.py b/couchpotato/core/settings/model.py
index f39544bc..8601c2b4 100644
--- a/couchpotato/core/settings/model.py
+++ b/couchpotato/core/settings/model.py
@@ -78,6 +78,7 @@ class Movie(Entity):
such as trailers, nfo, thumbnails"""
last_edit = Field(Integer, default = lambda: int(time.time()), index = True)
+ type = 'movie' # Compat tv branch
library = ManyToOne('Library', cascade = 'delete, delete-orphan', single_parent = True)
status = ManyToOne('Status')
@@ -86,6 +87,7 @@ class Movie(Entity):
releases = OneToMany('Release', cascade = 'all, delete-orphan')
files = ManyToMany('File', cascade = 'all, delete-orphan', single_parent = True)
+Media = Movie # Compat tv branch
class Library(Entity):
""""""
diff --git a/couchpotato/static/scripts/couchpotato.js b/couchpotato/static/scripts/couchpotato.js
index dcd0f7bd..59fac34b 100644
--- a/couchpotato/static/scripts/couchpotato.js
+++ b/couchpotato/static/scripts/couchpotato.js
@@ -32,7 +32,7 @@
self.c.addEvent('click:relay(a[href^=http])', self.openDerefered.bind(self));
// Check if device is touchenabled
- self.touch_device = 'ontouchstart' in document.documentElement;
+ self.touch_device = 'ontouchstart' in window || navigator.msMaxTouchPoints;
if(self.touch_device)
self.c.addClass('touch_enabled');
diff --git a/couchpotato/static/scripts/page/about.js b/couchpotato/static/scripts/page/about.js
index 3efa3933..d0439bdd 100644
--- a/couchpotato/static/scripts/page/about.js
+++ b/couchpotato/static/scripts/page/about.js
@@ -106,7 +106,7 @@ var AboutSettingTab = new Class({
new Element('div.donate', {
'html':
'Or support me via:' +
- ''
+ ''
})
);
diff --git a/couchpotato/static/scripts/page/home.js b/couchpotato/static/scripts/page/home.js
index b93db5bd..9967ff41 100644
--- a/couchpotato/static/scripts/page/home.js
+++ b/couchpotato/static/scripts/page/home.js
@@ -52,7 +52,7 @@ Page.Home = new Class({
})
),
'filter': {
- 'release_status': 'snatched,available'
+ 'release_status': 'snatched,seeding,missing,available,downloaded'
},
'limit': null,
'onLoaded': function(){
diff --git a/couchpotato/static/style/settings.css b/couchpotato/static/style/settings.css
index 61d5239f..744531a9 100644
--- a/couchpotato/static/style/settings.css
+++ b/couchpotato/static/style/settings.css
@@ -542,7 +542,7 @@
line-height: 140%;
cursor: help;
}
- .page .combined_table .head abbr.use, .page .combined_table .head abbr.automation_urls_use {
+ .page .combined_table .head abbr:first-child {
display: none;
}
.page .combined_table .head abbr.host {
diff --git a/init/ubuntu b/init/ubuntu
old mode 100644
new mode 100755
index 7f770a67..1d2eb57a
--- a/init/ubuntu
+++ b/init/ubuntu
@@ -20,6 +20,8 @@ else
echo "/etc/default/couchpotato not found using default settings.";
fi
+. /lib/lsb/init-functions
+
# Script name
NAME=couchpotato
diff --git a/libs/importlib/__init__.py b/libs/importlib/__init__.py
new file mode 100644
index 00000000..ad31a1ac
--- /dev/null
+++ b/libs/importlib/__init__.py
@@ -0,0 +1,38 @@
+"""Backport of importlib.import_module from 3.x."""
+# While not critical (and in no way guaranteed!), it would be nice to keep this
+# code compatible with Python 2.3.
+import sys
+
+def _resolve_name(name, package, level):
+ """Return the absolute name of the module to be imported."""
+ if not hasattr(package, 'rindex'):
+ raise ValueError("'package' not set to a string")
+ dot = len(package)
+ for x in xrange(level, 1, -1):
+ try:
+ dot = package.rindex('.', 0, dot)
+ except ValueError:
+ raise ValueError("attempted relative import beyond top-level "
+ "package")
+ return "%s.%s" % (package[:dot], name)
+
+
+def import_module(name, package=None):
+ """Import a module.
+
+ The 'package' argument is required when performing a relative import. It
+ specifies the package to use as the anchor point from which to resolve the
+ relative import to an absolute import.
+
+ """
+ if name.startswith('.'):
+ if not package:
+ raise TypeError("relative imports require the 'package' argument")
+ level = 0
+ for character in name:
+ if character != '.':
+ break
+ level += 1
+ name = _resolve_name(name[level:], package, level)
+ __import__(name)
+ return sys.modules[name]
diff --git a/libs/rtorrent/__init__.py b/libs/rtorrent/__init__.py
index d19c78b4..683ef1c7 100755
--- a/libs/rtorrent/__init__.py
+++ b/libs/rtorrent/__init__.py
@@ -17,18 +17,21 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+import urllib
+import os.path
+import time
+import xmlrpclib
from rtorrent.common import find_torrent, \
is_valid_port, convert_version_tuple_to_str
from rtorrent.lib.torrentparser import TorrentParser
from rtorrent.lib.xmlrpc.http import HTTPServerProxy
-from rtorrent.rpc import Method, BasicAuthTransport
+from rtorrent.lib.xmlrpc.scgi import SCGIServerProxy
+from rtorrent.rpc import Method
+from rtorrent.lib.xmlrpc.basic_auth import BasicAuthTransport
from rtorrent.torrent import Torrent
from rtorrent.group import Group
-import os.path
import rtorrent.rpc # @UnresolvedImport
-import time
-import xmlrpclib
__version__ = "0.2.9"
__author__ = "Chris Lucas"
@@ -43,13 +46,25 @@ class RTorrent:
""" Create a new rTorrent connection """
rpc_prefix = None
- def __init__(self, url, username=None, password=None,
- verify=False, sp=HTTPServerProxy, sp_kwargs={}):
- self.url = url # : From X{__init__(self, url)}
+ def __init__(self, uri, username=None, password=None,
+ verify=False, sp=None, sp_kwargs=None):
+ self.uri = uri # : From X{__init__(self, url)}
+
self.username = username
self.password = password
- self.sp = sp
- self.sp_kwargs = sp_kwargs
+
+ self.schema = urllib.splittype(uri)[0]
+
+ if sp:
+ self.sp = sp
+ elif self.schema in ['http', 'https']:
+ self.sp = HTTPServerProxy
+ elif self.schema == 'scgi':
+ self.sp = SCGIServerProxy
+ else:
+ raise NotImplementedError()
+
+ self.sp_kwargs = sp_kwargs or {}
self.torrents = [] # : List of L{Torrent} instances
self._rpc_methods = [] # : List of rTorrent RPC methods
@@ -62,21 +77,23 @@ class RTorrent:
def _get_conn(self):
"""Get ServerProxy instance"""
if self.username is not None and self.password is not None:
+ if self.schema == 'scgi':
+ raise NotImplementedError()
+
return self.sp(
- self.url,
+ self.uri,
transport=BasicAuthTransport(self.username, self.password),
**self.sp_kwargs
)
- return self.sp(self.url, **self.sp_kwargs)
+
+ return self.sp(self.uri, **self.sp_kwargs)
def _verify_conn(self):
# check for rpc methods that should be available
- assert {"system.client_version",
- "system.library_version"}.issubset(set(self._get_rpc_methods())),\
- "Required RPC methods not available."
+ assert "system.client_version" in self._get_rpc_methods(), "Required RPC method not available."
+ assert "system.library_version" in self._get_rpc_methods(), "Required RPC method not available."
# minimum rTorrent version check
-
assert self._meets_version_requirement() is True,\
"Error: Minimum rTorrent version required is {0}".format(
MIN_RTORRENT_VERSION_STR)
@@ -98,6 +115,11 @@ class RTorrent:
return self._client_version_tuple
+ def _update_rpc_methods(self):
+ self._rpc_methods = self._get_conn().system.listMethods()
+
+ return self._rpc_methods
+
def _get_rpc_methods(self):
""" Get list of raw RPC commands
@@ -105,10 +127,7 @@ class RTorrent:
@rtype: list
"""
- if self._rpc_methods == []:
- self._rpc_methods = self._get_conn().system.listMethods()
-
- return(self._rpc_methods)
+ return(self._rpc_methods or self._update_rpc_methods())
def get_torrents(self, view="main"):
"""Get list of all torrents in specified view
@@ -300,6 +319,8 @@ class RTorrent:
assert view is not None, "view parameter required on non-persistent groups"
p.group.insert('', name, view)
+ self._update_rpc_methods()
+
def get_group(self, name):
assert name is not None, "group name required"
diff --git a/libs/rtorrent/lib/torrentparser.py b/libs/rtorrent/lib/torrentparser.py
index 19dd12aa..30170d32 100755
--- a/libs/rtorrent/lib/torrentparser.py
+++ b/libs/rtorrent/lib/torrentparser.py
@@ -90,9 +90,10 @@ class TorrentParser():
def _calc_info_hash(self):
self.info_hash = None
if "info" in self._torrent_decoded.keys():
- info_dict = self._torrent_decoded["info"]
- self.info_hash = hashlib.sha1(bencode.encode(
- info_dict)).hexdigest().upper()
+ info_encoded = bencode.encode(self._torrent_decoded["info"])
+
+ if info_encoded:
+ self.info_hash = hashlib.sha1(info_encoded).hexdigest().upper()
return(self.info_hash)
diff --git a/libs/rtorrent/lib/xmlrpc/basic_auth.py b/libs/rtorrent/lib/xmlrpc/basic_auth.py
new file mode 100644
index 00000000..20c02d9a
--- /dev/null
+++ b/libs/rtorrent/lib/xmlrpc/basic_auth.py
@@ -0,0 +1,73 @@
+#
+# Copyright (c) 2013 Dean Gardiner,
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from base64 import encodestring
+import string
+import xmlrpclib
+
+
+class BasicAuthTransport(xmlrpclib.Transport):
+ def __init__(self, username=None, password=None):
+ xmlrpclib.Transport.__init__(self)
+
+ self.username = username
+ self.password = password
+
+ def send_auth(self, h):
+ if self.username is not None and self.password is not None:
+ h.putheader('AUTHORIZATION', "Basic %s" % string.replace(
+ encodestring("%s:%s" % (self.username, self.password)),
+ "\012", ""
+ ))
+
+ def single_request(self, host, handler, request_body, verbose=0):
+ # issue XML-RPC request
+
+ h = self.make_connection(host)
+ if verbose:
+ h.set_debuglevel(1)
+
+ try:
+ self.send_request(h, handler, request_body)
+ self.send_host(h, host)
+ self.send_user_agent(h)
+ self.send_auth(h)
+ self.send_content(h, request_body)
+
+ response = h.getresponse(buffering=True)
+ if response.status == 200:
+ self.verbose = verbose
+ return self.parse_response(response)
+ except xmlrpclib.Fault:
+ raise
+ except Exception:
+ self.close()
+ raise
+
+ #discard any response data and raise exception
+ if response.getheader("content-length", 0):
+ response.read()
+ raise xmlrpclib.ProtocolError(
+ host + handler,
+ response.status, response.reason,
+ response.msg,
+ )
diff --git a/libs/rtorrent/lib/xmlrpc/scgi.py b/libs/rtorrent/lib/xmlrpc/scgi.py
new file mode 100644
index 00000000..5ba61fa5
--- /dev/null
+++ b/libs/rtorrent/lib/xmlrpc/scgi.py
@@ -0,0 +1,219 @@
+#!/usr/bin/python
+
+# rtorrent_xmlrpc
+# (c) 2011 Roger Que
+#
+# Modified portions:
+# (c) 2013 Dean Gardiner
+#
+# Python module for interacting with rtorrent's XML-RPC interface
+# directly over SCGI, instead of through an HTTP server intermediary.
+# Inspired by Glenn Washburn's xmlrpc2scgi.py [1], but subclasses the
+# built-in xmlrpclib classes so that it is compatible with features
+# such as MultiCall objects.
+#
+# [1]
+#
+# Usage: server = SCGIServerProxy('scgi://localhost:7000/')
+# server = SCGIServerProxy('scgi:///path/to/scgi.sock')
+# print server.system.listMethods()
+# mc = xmlrpclib.MultiCall(server)
+# mc.get_up_rate()
+# mc.get_down_rate()
+# print mc()
+#
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# In addition, as a special exception, the copyright holders give
+# permission to link the code of portions of this program with the
+# OpenSSL library under certain conditions as described in each
+# individual source file, and distribute linked combinations
+# including the two.
+#
+# You must obey the GNU General Public License in all respects for
+# all of the code used other than OpenSSL. If you modify file(s)
+# with this exception, you may extend this exception to your version
+# of the file(s), but you are not obligated to do so. If you do not
+# wish to do so, delete this exception statement from your version.
+# If you delete this exception statement from all source files in the
+# program, then also delete it here.
+#
+#
+#
+# Portions based on Python's xmlrpclib:
+#
+# Copyright (c) 1999-2002 by Secret Labs AB
+# Copyright (c) 1999-2002 by Fredrik Lundh
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of
+# Secret Labs AB or the author not be used in advertising or publicity
+# pertaining to distribution of the software without specific, written
+# prior permission.
+#
+# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
+# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
+# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
+# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+# OF THIS SOFTWARE.
+
+import httplib
+import re
+import socket
+import urllib
+import xmlrpclib
+import errno
+
+
+class SCGITransport(xmlrpclib.Transport):
+ # Added request() from Python 2.7 xmlrpclib here to backport to Python 2.6
+ def request(self, host, handler, request_body, verbose=0):
+ #retry request once if cached connection has gone cold
+ for i in (0, 1):
+ try:
+ return self.single_request(host, handler, request_body, verbose)
+ except socket.error, e:
+ if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE):
+ raise
+ except httplib.BadStatusLine: #close after we sent request
+ if i:
+ raise
+
+ def single_request(self, host, handler, request_body, verbose=0):
+ # Add SCGI headers to the request.
+ headers = {'CONTENT_LENGTH': str(len(request_body)), 'SCGI': '1'}
+ header = '\x00'.join(('%s\x00%s' % item for item in headers.iteritems())) + '\x00'
+ header = '%d:%s' % (len(header), header)
+ request_body = '%s,%s' % (header, request_body)
+
+ sock = None
+
+ try:
+ if host:
+ host, port = urllib.splitport(host)
+ addrinfo = socket.getaddrinfo(host, int(port), socket.AF_INET,
+ socket.SOCK_STREAM)
+ sock = socket.socket(*addrinfo[0][:3])
+ sock.connect(addrinfo[0][4])
+ else:
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.connect(handler)
+
+ self.verbose = verbose
+
+ sock.send(request_body)
+ return self.parse_response(sock.makefile())
+ finally:
+ if sock:
+ sock.close()
+
+ def parse_response(self, response):
+ p, u = self.getparser()
+
+ response_body = ''
+ while True:
+ data = response.read(1024)
+ if not data:
+ break
+ response_body += data
+
+ # Remove SCGI headers from the response.
+ response_header, response_body = re.split(r'\n\s*?\n', response_body,
+ maxsplit=1)
+
+ if self.verbose:
+ print 'body:', repr(response_body)
+
+ p.feed(response_body)
+ p.close()
+
+ return u.close()
+
+
+class SCGIServerProxy(xmlrpclib.ServerProxy):
+ def __init__(self, uri, transport=None, encoding=None, verbose=False,
+ allow_none=False, use_datetime=False):
+ type, uri = urllib.splittype(uri)
+ if type not in ('scgi'):
+ raise IOError('unsupported XML-RPC protocol')
+ self.__host, self.__handler = urllib.splithost(uri)
+ if not self.__handler:
+ self.__handler = '/'
+
+ if transport is None:
+ transport = SCGITransport(use_datetime=use_datetime)
+ self.__transport = transport
+
+ self.__encoding = encoding
+ self.__verbose = verbose
+ self.__allow_none = allow_none
+
+ def __close(self):
+ self.__transport.close()
+
+ def __request(self, methodname, params):
+ # call a method on the remote server
+
+ request = xmlrpclib.dumps(params, methodname, encoding=self.__encoding,
+ allow_none=self.__allow_none)
+
+ response = self.__transport.request(
+ self.__host,
+ self.__handler,
+ request,
+ verbose=self.__verbose
+ )
+
+ if len(response) == 1:
+ response = response[0]
+
+ return response
+
+ def __repr__(self):
+ return (
+ "" %
+ (self.__host, self.__handler)
+ )
+
+ __str__ = __repr__
+
+ def __getattr__(self, name):
+ # magic method dispatcher
+ return xmlrpclib._Method(self.__request, name)
+
+ # note: to call a remote object with an non-standard name, use
+ # result getattr(server, "strange-python-name")(args)
+
+ def __call__(self, attr):
+ """A workaround to get special attributes on the ServerProxy
+ without interfering with the magic __getattr__
+ """
+ if attr == "close":
+ return self.__close
+ elif attr == "transport":
+ return self.__transport
+ raise AttributeError("Attribute %r not found" % (attr,))
diff --git a/libs/rtorrent/rpc/__init__.py b/libs/rtorrent/rpc/__init__.py
index 034f4eef..116ca1c2 100755
--- a/libs/rtorrent/rpc/__init__.py
+++ b/libs/rtorrent/rpc/__init__.py
@@ -17,66 +17,16 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-from base64 import encodestring
-import httplib
-import inspect
-import string
+import inspect
import rtorrent
import re
from rtorrent.common import bool_to_int, convert_version_tuple_to_str,\
safe_repr
-from rtorrent.err import RTorrentVersionError, MethodError
+from rtorrent.err import MethodError
from rtorrent.compat import xmlrpclib
-class BasicAuthTransport(xmlrpclib.Transport):
- def __init__(self, username=None, password=None):
- xmlrpclib.Transport.__init__(self)
- self.username = username
- self.password = password
-
- def send_auth(self, h):
- if self.username is not None and self.password is not None:
- h.putheader('AUTHORIZATION', "Basic %s" % string.replace(
- encodestring("%s:%s" % (self.username, self.password)),
- "\012", ""
- ))
-
- def single_request(self, host, handler, request_body, verbose=0):
- # issue XML-RPC request
-
- h = self.make_connection(host)
- if verbose:
- h.set_debuglevel(1)
-
- try:
- self.send_request(h, handler, request_body)
- self.send_host(h, host)
- self.send_user_agent(h)
- self.send_auth(h)
- self.send_content(h, request_body)
-
- response = h.getresponse(buffering=True)
- if response.status == 200:
- self.verbose = verbose
- return self.parse_response(response)
- except xmlrpclib.Fault:
- raise
- except Exception:
- self.close()
- raise
-
- #discard any response data and raise exception
- if (response.getheader("content-length", 0)):
- response.read()
- raise xmlrpclib.ProtocolError(
- host + handler,
- response.status, response.reason,
- response.msg,
- )
-
-
def get_varname(rpc_call):
"""Transform rpc method into variable name.
diff --git a/libs/rtorrent/torrent.py b/libs/rtorrent/torrent.py
index c610e368..bd6bb689 100755
--- a/libs/rtorrent/torrent.py
+++ b/libs/rtorrent/torrent.py
@@ -172,6 +172,17 @@ class Torrent:
self.directory = m.call()[-1]
+ def set_directory_base(self, d):
+ """Modify base download directory
+
+ @note: Needs to stop torrent in order to change the directory.
+ Also doesn't restart after directory is set, that must be called
+ separately.
+ """
+ m = rtorrent.rpc.Multicall(self)
+ self.multicall_add(m, "d.try_stop")
+ self.multicall_add(m, "d.set_directory_base", d)
+
def start(self):
"""Start the torrent"""
m = rtorrent.rpc.Multicall(self)
diff --git a/libs/synchronousdeluge/client.py b/libs/synchronousdeluge/client.py
index 98a80848..22419e80 100644
--- a/libs/synchronousdeluge/client.py
+++ b/libs/synchronousdeluge/client.py
@@ -1,4 +1,5 @@
import os
+import platform
from collections import defaultdict
from itertools import imap
@@ -23,22 +24,48 @@ class DelugeClient(object):
self._request_counter = 0
def _get_local_auth(self):
- xdg_config = os.path.expanduser(os.environ.get("XDG_CONFIG_HOME", "~/.config"))
- config_home = os.path.join(xdg_config, "deluge")
- auth_file = os.path.join(config_home, "auth")
-
+ auth_file = ""
username = password = ""
- with open(auth_file) as fd:
- for line in fd:
+ if platform.system() in ('Windows', 'Microsoft'):
+ appDataPath = os.environ.get("APPDATA")
+ if not appDataPath:
+ import _winreg
+ hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders")
+ appDataReg = _winreg.QueryValueEx(hkey, "AppData")
+ appDataPath = appDataReg[0]
+ _winreg.CloseKey(hkey)
+
+ auth_file = os.path.join(appDataPath, "deluge", "auth")
+ else:
+ from xdg.BaseDirectory import save_config_path
+ try:
+ auth_file = os.path.join(save_config_path("deluge"), "auth")
+ except OSError, e:
+ return username, password
+
+
+ if os.path.exists(auth_file):
+ for line in open(auth_file):
if line.startswith("#"):
+ # This is a comment line
+ continue
+ line = line.strip()
+ try:
+ lsplit = line.split(":")
+ except Exception, e:
continue
- auth = line.split(":")
- if len(auth) >= 2 and auth[0] == "localclient":
- username, password = auth[0], auth[1]
- break
+ if len(lsplit) == 2:
+ username, password = lsplit
+ elif len(lsplit) == 3:
+ username, password, level = lsplit
+ else:
+ continue
- return username, password
+ if username == "localclient":
+ return (username, password)
+
+ return ("", "")
def _create_module_method(self, module, method):
fullname = "{0}.{1}".format(module, method)
diff --git a/libs/xmpp/__init__.py b/libs/xmpp/__init__.py
new file mode 100644
index 00000000..ad03b288
--- /dev/null
+++ b/libs/xmpp/__init__.py
@@ -0,0 +1,31 @@
+# $Id: __init__.py,v 1.9 2005/03/07 09:34:51 snakeru Exp $
+
+"""
+All features of xmpppy library contained within separate modules.
+At present there are modules:
+simplexml - XML handling routines
+protocol - jabber-objects (I.e. JID and different stanzas and sub-stanzas) handling routines.
+debug - Jacob Lundquist's debugging module. Very handy if you like colored debug.
+auth - Non-SASL and SASL stuff. You will need it to auth as a client or transport.
+transports - low level connection handling. TCP and TLS currently. HTTP support planned.
+roster - simple roster for use in clients.
+dispatcher - decision-making logic. Handles all hooks. The first who takes control over fresh stanzas.
+features - different stuff that didn't worths separating into modules
+browser - DISCO server framework. Allows to build dynamic disco tree.
+filetransfer - Currently contains only IBB stuff. Can be used for bot-to-bot transfers.
+
+Most of the classes that is defined in all these modules is an ancestors of
+class PlugIn so they share a single set of methods allowing you to compile
+a featured XMPP client. For every instance of PlugIn class the 'owner' is the class
+in what the plug was plugged. While plugging in such instance usually sets some
+methods of owner to it's own ones for easy access. All session specific info stored
+either in instance of PlugIn or in owner's instance. This is considered unhandy
+and there are plans to port 'Session' class from xmppd.py project for storing all
+session-related info. Though if you are not accessing instances variables directly
+and use only methods for access all values you should not have any problems.
+
+"""
+
+import simplexml,protocol,debug,auth,transports,roster,dispatcher,features,browser,filetransfer,commands
+from client import *
+from protocol import *
diff --git a/libs/xmpp/auth.py b/libs/xmpp/auth.py
new file mode 100644
index 00000000..6e51d72b
--- /dev/null
+++ b/libs/xmpp/auth.py
@@ -0,0 +1,326 @@
+## auth.py
+##
+## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: auth.py,v 1.41 2008/09/13 21:45:21 normanr Exp $
+
+"""
+Provides library with all Non-SASL and SASL authentication mechanisms.
+Can be used both for client and transport authentication.
+"""
+
+from protocol import *
+from client import PlugIn
+import sha,base64,random,dispatcher,re
+
+import md5
+def HH(some): return md5.new(some).hexdigest()
+def H(some): return md5.new(some).digest()
+def C(some): return ':'.join(some)
+
+class NonSASL(PlugIn):
+ """ Implements old Non-SASL (JEP-0078) authentication used in jabberd1.4 and transport authentication."""
+ def __init__(self,user,password,resource):
+ """ Caches username, password and resource for auth. """
+ PlugIn.__init__(self)
+ self.DBG_LINE='gen_auth'
+ self.user=user
+ self.password=password
+ self.resource=resource
+
+ def plugin(self,owner):
+ """ Determine the best auth method (digest/0k/plain) and use it for auth.
+ Returns used method name on success. Used internally. """
+ if not self.resource: return self.authComponent(owner)
+ self.DEBUG('Querying server about possible auth methods','start')
+ resp=owner.Dispatcher.SendAndWaitForResponse(Iq('get',NS_AUTH,payload=[Node('username',payload=[self.user])]))
+ if not isResultNode(resp):
+ self.DEBUG('No result node arrived! Aborting...','error')
+ return
+ iq=Iq(typ='set',node=resp)
+ query=iq.getTag('query')
+ query.setTagData('username',self.user)
+ query.setTagData('resource',self.resource)
+
+ if query.getTag('digest'):
+ self.DEBUG("Performing digest authentication",'ok')
+ query.setTagData('digest',sha.new(owner.Dispatcher.Stream._document_attrs['id']+self.password).hexdigest())
+ if query.getTag('password'): query.delChild('password')
+ method='digest'
+ elif query.getTag('token'):
+ token=query.getTagData('token')
+ seq=query.getTagData('sequence')
+ self.DEBUG("Performing zero-k authentication",'ok')
+ hash = sha.new(sha.new(self.password).hexdigest()+token).hexdigest()
+ for foo in xrange(int(seq)): hash = sha.new(hash).hexdigest()
+ query.setTagData('hash',hash)
+ method='0k'
+ else:
+ self.DEBUG("Sequre methods unsupported, performing plain text authentication",'warn')
+ query.setTagData('password',self.password)
+ method='plain'
+ resp=owner.Dispatcher.SendAndWaitForResponse(iq)
+ if isResultNode(resp):
+ self.DEBUG('Sucessfully authenticated with remove host.','ok')
+ owner.User=self.user
+ owner.Resource=self.resource
+ owner._registered_name=owner.User+'@'+owner.Server+'/'+owner.Resource
+ return method
+ self.DEBUG('Authentication failed!','error')
+
+ def authComponent(self,owner):
+ """ Authenticate component. Send handshake stanza and wait for result. Returns "ok" on success. """
+ self.handshake=0
+ owner.send(Node(NS_COMPONENT_ACCEPT+' handshake',payload=[sha.new(owner.Dispatcher.Stream._document_attrs['id']+self.password).hexdigest()]))
+ owner.RegisterHandler('handshake',self.handshakeHandler,xmlns=NS_COMPONENT_ACCEPT)
+ while not self.handshake:
+ self.DEBUG("waiting on handshake",'notify')
+ owner.Process(1)
+ owner._registered_name=self.user
+ if self.handshake+1: return 'ok'
+
+ def handshakeHandler(self,disp,stanza):
+ """ Handler for registering in dispatcher for accepting transport authentication. """
+ if stanza.getName()=='handshake': self.handshake=1
+ else: self.handshake=-1
+
+class SASL(PlugIn):
+ """ Implements SASL authentication. """
+ def __init__(self,username,password):
+ PlugIn.__init__(self)
+ self.username=username
+ self.password=password
+
+ def plugin(self,owner):
+ if not self._owner.Dispatcher.Stream._document_attrs.has_key('version'): self.startsasl='not-supported'
+ elif self._owner.Dispatcher.Stream.features:
+ try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
+ except NodeProcessed: pass
+ else: self.startsasl=None
+
+ def auth(self):
+ """ Start authentication. Result can be obtained via "SASL.startsasl" attribute and will be
+ either "success" or "failure". Note that successfull auth will take at least
+ two Dispatcher.Process() calls. """
+ if self.startsasl: pass
+ elif self._owner.Dispatcher.Stream.features:
+ try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
+ except NodeProcessed: pass
+ else: self._owner.RegisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
+
+ def plugout(self):
+ """ Remove SASL handlers from owner's dispatcher. Used internally. """
+ if self._owner.__dict__.has_key('features'): self._owner.UnregisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
+ if self._owner.__dict__.has_key('challenge'): self._owner.UnregisterHandler('challenge',self.SASLHandler,xmlns=NS_SASL)
+ if self._owner.__dict__.has_key('failure'): self._owner.UnregisterHandler('failure',self.SASLHandler,xmlns=NS_SASL)
+ if self._owner.__dict__.has_key('success'): self._owner.UnregisterHandler('success',self.SASLHandler,xmlns=NS_SASL)
+
+ def FeaturesHandler(self,conn,feats):
+ """ Used to determine if server supports SASL auth. Used internally. """
+ if not feats.getTag('mechanisms',namespace=NS_SASL):
+ self.startsasl='not-supported'
+ self.DEBUG('SASL not supported by server','error')
+ return
+ mecs=[]
+ for mec in feats.getTag('mechanisms',namespace=NS_SASL).getTags('mechanism'):
+ mecs.append(mec.getData())
+ self._owner.RegisterHandler('challenge',self.SASLHandler,xmlns=NS_SASL)
+ self._owner.RegisterHandler('failure',self.SASLHandler,xmlns=NS_SASL)
+ self._owner.RegisterHandler('success',self.SASLHandler,xmlns=NS_SASL)
+ if "ANONYMOUS" in mecs and self.username == None:
+ node=Node('auth',attrs={'xmlns':NS_SASL,'mechanism':'ANONYMOUS'})
+ elif "DIGEST-MD5" in mecs:
+ node=Node('auth',attrs={'xmlns':NS_SASL,'mechanism':'DIGEST-MD5'})
+ elif "PLAIN" in mecs:
+ sasl_data='%s\x00%s\x00%s'%(self.username+'@'+self._owner.Server,self.username,self.password)
+ node=Node('auth',attrs={'xmlns':NS_SASL,'mechanism':'PLAIN'},payload=[base64.encodestring(sasl_data).replace('\r','').replace('\n','')])
+ else:
+ self.startsasl='failure'
+ self.DEBUG('I can only use DIGEST-MD5 and PLAIN mecanisms.','error')
+ return
+ self.startsasl='in-process'
+ self._owner.send(node.__str__())
+ raise NodeProcessed
+
+ def SASLHandler(self,conn,challenge):
+ """ Perform next SASL auth step. Used internally. """
+ if challenge.getNamespace()<>NS_SASL: return
+ if challenge.getName()=='failure':
+ self.startsasl='failure'
+ try: reason=challenge.getChildren()[0]
+ except: reason=challenge
+ self.DEBUG('Failed SASL authentification: %s'%reason,'error')
+ raise NodeProcessed
+ elif challenge.getName()=='success':
+ self.startsasl='success'
+ self.DEBUG('Successfully authenticated with remote server.','ok')
+ handlers=self._owner.Dispatcher.dumpHandlers()
+ self._owner.Dispatcher.PlugOut()
+ dispatcher.Dispatcher().PlugIn(self._owner)
+ self._owner.Dispatcher.restoreHandlers(handlers)
+ self._owner.User=self.username
+ raise NodeProcessed
+########################################3333
+ incoming_data=challenge.getData()
+ chal={}
+ data=base64.decodestring(incoming_data)
+ self.DEBUG('Got challenge:'+data,'ok')
+ for pair in re.findall('(\w+\s*=\s*(?:(?:"[^"]+")|(?:[^,]+)))',data):
+ key,value=[x.strip() for x in pair.split('=', 1)]
+ if value[:1]=='"' and value[-1:]=='"': value=value[1:-1]
+ chal[key]=value
+ if chal.has_key('qop') and 'auth' in [x.strip() for x in chal['qop'].split(',')]:
+ resp={}
+ resp['username']=self.username
+ resp['realm']=self._owner.Server
+ resp['nonce']=chal['nonce']
+ cnonce=''
+ for i in range(7):
+ cnonce+=hex(int(random.random()*65536*4096))[2:]
+ resp['cnonce']=cnonce
+ resp['nc']=('00000001')
+ resp['qop']='auth'
+ resp['digest-uri']='xmpp/'+self._owner.Server
+ A1=C([H(C([resp['username'],resp['realm'],self.password])),resp['nonce'],resp['cnonce']])
+ A2=C(['AUTHENTICATE',resp['digest-uri']])
+ response= HH(C([HH(A1),resp['nonce'],resp['nc'],resp['cnonce'],resp['qop'],HH(A2)]))
+ resp['response']=response
+ resp['charset']='utf-8'
+ sasl_data=''
+ for key in ['charset','username','realm','nonce','nc','cnonce','digest-uri','response','qop']:
+ if key in ['nc','qop','response','charset']: sasl_data+="%s=%s,"%(key,resp[key])
+ else: sasl_data+='%s="%s",'%(key,resp[key])
+########################################3333
+ node=Node('response',attrs={'xmlns':NS_SASL},payload=[base64.encodestring(sasl_data[:-1]).replace('\r','').replace('\n','')])
+ self._owner.send(node.__str__())
+ elif chal.has_key('rspauth'): self._owner.send(Node('response',attrs={'xmlns':NS_SASL}).__str__())
+ else:
+ self.startsasl='failure'
+ self.DEBUG('Failed SASL authentification: unknown challenge','error')
+ raise NodeProcessed
+
+class Bind(PlugIn):
+ """ Bind some JID to the current connection to allow router know of our location."""
+ def __init__(self):
+ PlugIn.__init__(self)
+ self.DBG_LINE='bind'
+ self.bound=None
+
+ def plugin(self,owner):
+ """ Start resource binding, if allowed at this time. Used internally. """
+ if self._owner.Dispatcher.Stream.features:
+ try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
+ except NodeProcessed: pass
+ else: self._owner.RegisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
+
+ def plugout(self):
+ """ Remove Bind handler from owner's dispatcher. Used internally. """
+ self._owner.UnregisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
+
+ def FeaturesHandler(self,conn,feats):
+ """ Determine if server supports resource binding and set some internal attributes accordingly. """
+ if not feats.getTag('bind',namespace=NS_BIND):
+ self.bound='failure'
+ self.DEBUG('Server does not requested binding.','error')
+ return
+ if feats.getTag('session',namespace=NS_SESSION): self.session=1
+ else: self.session=-1
+ self.bound=[]
+
+ def Bind(self,resource=None):
+ """ Perform binding. Use provided resource name or random (if not provided). """
+ while self.bound is None and self._owner.Process(1): pass
+ if resource: resource=[Node('resource',payload=[resource])]
+ else: resource=[]
+ resp=self._owner.SendAndWaitForResponse(Protocol('iq',typ='set',payload=[Node('bind',attrs={'xmlns':NS_BIND},payload=resource)]))
+ if isResultNode(resp):
+ self.bound.append(resp.getTag('bind').getTagData('jid'))
+ self.DEBUG('Successfully bound %s.'%self.bound[-1],'ok')
+ jid=JID(resp.getTag('bind').getTagData('jid'))
+ self._owner.User=jid.getNode()
+ self._owner.Resource=jid.getResource()
+ resp=self._owner.SendAndWaitForResponse(Protocol('iq',typ='set',payload=[Node('session',attrs={'xmlns':NS_SESSION})]))
+ if isResultNode(resp):
+ self.DEBUG('Successfully opened session.','ok')
+ self.session=1
+ return 'ok'
+ else:
+ self.DEBUG('Session open failed.','error')
+ self.session=0
+ elif resp: self.DEBUG('Binding failed: %s.'%resp.getTag('error'),'error')
+ else:
+ self.DEBUG('Binding failed: timeout expired.','error')
+ return ''
+
+class ComponentBind(PlugIn):
+ """ ComponentBind some JID to the current connection to allow router know of our location."""
+ def __init__(self, sasl):
+ PlugIn.__init__(self)
+ self.DBG_LINE='bind'
+ self.bound=None
+ self.needsUnregister=None
+ self.sasl = sasl
+
+ def plugin(self,owner):
+ """ Start resource binding, if allowed at this time. Used internally. """
+ if not self.sasl:
+ self.bound=[]
+ return
+ if self._owner.Dispatcher.Stream.features:
+ try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
+ except NodeProcessed: pass
+ else:
+ self._owner.RegisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
+ self.needsUnregister=1
+
+ def plugout(self):
+ """ Remove ComponentBind handler from owner's dispatcher. Used internally. """
+ if self.needsUnregister:
+ self._owner.UnregisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
+
+ def FeaturesHandler(self,conn,feats):
+ """ Determine if server supports resource binding and set some internal attributes accordingly. """
+ if not feats.getTag('bind',namespace=NS_BIND):
+ self.bound='failure'
+ self.DEBUG('Server does not requested binding.','error')
+ return
+ if feats.getTag('session',namespace=NS_SESSION): self.session=1
+ else: self.session=-1
+ self.bound=[]
+
+ def Bind(self,domain=None):
+ """ Perform binding. Use provided domain name (if not provided). """
+ while self.bound is None and self._owner.Process(1): pass
+ if self.sasl:
+ xmlns = NS_COMPONENT_1
+ else:
+ xmlns = None
+ self.bindresponse = None
+ ttl = dispatcher.DefaultTimeout
+ self._owner.RegisterHandler('bind',self.BindHandler,xmlns=xmlns)
+ self._owner.send(Protocol('bind',attrs={'name':domain},xmlns=NS_COMPONENT_1))
+ while self.bindresponse is None and self._owner.Process(1) and ttl > 0: ttl-=1
+ self._owner.UnregisterHandler('bind',self.BindHandler,xmlns=xmlns)
+ resp=self.bindresponse
+ if resp and resp.getAttr('error'):
+ self.DEBUG('Binding failed: %s.'%resp.getAttr('error'),'error')
+ elif resp:
+ self.DEBUG('Successfully bound.','ok')
+ return 'ok'
+ else:
+ self.DEBUG('Binding failed: timeout expired.','error')
+ return ''
+
+ def BindHandler(self,conn,bind):
+ self.bindresponse = bind
+ pass
diff --git a/libs/xmpp/browser.py b/libs/xmpp/browser.py
new file mode 100644
index 00000000..8848ea4e
--- /dev/null
+++ b/libs/xmpp/browser.py
@@ -0,0 +1,221 @@
+## browser.py
+##
+## Copyright (C) 2004 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: browser.py,v 1.12 2007/05/13 17:55:14 normanr Exp $
+
+"""Browser module provides DISCO server framework for your application.
+This functionality can be used for very different purposes - from publishing
+software version and supported features to building of "jabber site" that users
+can navigate with their disco browsers and interact with active content.
+
+Such functionality is achieved via registering "DISCO handlers" that are
+automatically called when user requests some node of your disco tree.
+"""
+
+from dispatcher import *
+from client import PlugIn
+
+class Browser(PlugIn):
+ """ WARNING! This class is for components only. It will not work in client mode!
+
+ Standart xmpppy class that is ancestor of PlugIn and can be attached
+ to your application.
+ All processing will be performed in the handlers registered in the browser
+ instance. You can register any number of handlers ensuring that for each
+ node/jid combination only one (or none) handler registered.
+ You can register static information or the fully-blown function that will
+ calculate the answer dynamically.
+ Example of static info (see JEP-0030, examples 13-14):
+ # cl - your xmpppy connection instance.
+ b=xmpp.browser.Browser()
+ b.PlugIn(cl)
+ items=[]
+ item={}
+ item['jid']='catalog.shakespeare.lit'
+ item['node']='books'
+ item['name']='Books by and about Shakespeare'
+ items.append(item)
+ item={}
+ item['jid']='catalog.shakespeare.lit'
+ item['node']='clothing'
+ item['name']='Wear your literary taste with pride'
+ items.append(item)
+ item={}
+ item['jid']='catalog.shakespeare.lit'
+ item['node']='music'
+ item['name']='Music from the time of Shakespeare'
+ items.append(item)
+ info={'ids':[], 'features':[]}
+ b.setDiscoHandler({'items':items,'info':info})
+
+ items should be a list of item elements.
+ every item element can have any of these four keys: 'jid', 'node', 'name', 'action'
+ info should be a dicionary and must have keys 'ids' and 'features'.
+ Both of them should be lists:
+ ids is a list of dictionaries and features is a list of text strings.
+ Example (see JEP-0030, examples 1-2)
+ # cl - your xmpppy connection instance.
+ b=xmpp.browser.Browser()
+ b.PlugIn(cl)
+ items=[]
+ ids=[]
+ ids.append({'category':'conference','type':'text','name':'Play-Specific Chatrooms'})
+ ids.append({'category':'directory','type':'chatroom','name':'Play-Specific Chatrooms'})
+ features=[NS_DISCO_INFO,NS_DISCO_ITEMS,NS_MUC,NS_REGISTER,NS_SEARCH,NS_TIME,NS_VERSION]
+ info={'ids':ids,'features':features}
+ # info['xdata']=xmpp.protocol.DataForm() # JEP-0128
+ b.setDiscoHandler({'items':[],'info':info})
+ """
+ def __init__(self):
+ """Initialises internal variables. Used internally."""
+ PlugIn.__init__(self)
+ DBG_LINE='browser'
+ self._exported_methods=[]
+ self._handlers={'':{}}
+
+ def plugin(self, owner):
+ """ Registers it's own iq handlers in your application dispatcher instance.
+ Used internally."""
+ owner.RegisterHandler('iq',self._DiscoveryHandler,typ='get',ns=NS_DISCO_INFO)
+ owner.RegisterHandler('iq',self._DiscoveryHandler,typ='get',ns=NS_DISCO_ITEMS)
+
+ def plugout(self):
+ """ Unregisters browser's iq handlers from your application dispatcher instance.
+ Used internally."""
+ self._owner.UnregisterHandler('iq',self._DiscoveryHandler,typ='get',ns=NS_DISCO_INFO)
+ self._owner.UnregisterHandler('iq',self._DiscoveryHandler,typ='get',ns=NS_DISCO_ITEMS)
+
+ def _traversePath(self,node,jid,set=0):
+ """ Returns dictionary and key or None,None
+ None - root node (w/o "node" attribute)
+ /a/b/c - node
+ /a/b/ - branch
+ Set returns '' or None as the key
+ get returns '' or None as the key or None as the dict.
+ Used internally."""
+ if self._handlers.has_key(jid): cur=self._handlers[jid]
+ elif set:
+ self._handlers[jid]={}
+ cur=self._handlers[jid]
+ else: cur=self._handlers['']
+ if node is None: node=[None]
+ else: node=node.replace('/',' /').split('/')
+ for i in node:
+ if i<>'' and cur.has_key(i): cur=cur[i]
+ elif set and i<>'': cur[i]={dict:cur,str:i}; cur=cur[i]
+ elif set or cur.has_key(''): return cur,''
+ else: return None,None
+ if cur.has_key(1) or set: return cur,1
+ raise "Corrupted data"
+
+ def setDiscoHandler(self,handler,node='',jid=''):
+ """ This is the main method that you will use in this class.
+ It is used to register supplied DISCO handler (or dictionary with static info)
+ as handler of some disco tree branch.
+ If you do not specify the node this handler will be used for all queried nodes.
+ If you do not specify the jid this handler will be used for all queried JIDs.
+
+ Usage:
+ cl.Browser.setDiscoHandler(someDict,node,jid)
+ or
+ cl.Browser.setDiscoHandler(someDISCOHandler,node,jid)
+ where
+
+ someDict={
+ 'items':[
+ {'jid':'jid1','action':'action1','node':'node1','name':'name1'},
+ {'jid':'jid2','action':'action2','node':'node2','name':'name2'},
+ {'jid':'jid3','node':'node3','name':'name3'},
+ {'jid':'jid4','node':'node4'}
+ ],
+ 'info' :{
+ 'ids':[
+ {'category':'category1','type':'type1','name':'name1'},
+ {'category':'category2','type':'type2','name':'name2'},
+ {'category':'category3','type':'type3','name':'name3'},
+ ],
+ 'features':['feature1','feature2','feature3','feature4'],
+ 'xdata':DataForm
+ }
+ }
+
+ and/or
+
+ def someDISCOHandler(session,request,TYR):
+ # if TYR=='items': # returns items list of the same format as shown above
+ # elif TYR=='info': # returns info dictionary of the same format as shown above
+ # else: # this case is impossible for now.
+ """
+ self.DEBUG('Registering handler %s for "%s" node->%s'%(handler,jid,node), 'info')
+ node,key=self._traversePath(node,jid,1)
+ node[key]=handler
+
+ def getDiscoHandler(self,node='',jid=''):
+ """ Returns the previously registered DISCO handler
+ that is resonsible for this node/jid combination.
+ Used internally."""
+ node,key=self._traversePath(node,jid)
+ if node: return node[key]
+
+ def delDiscoHandler(self,node='',jid=''):
+ """ Unregisters DISCO handler that is resonsible for this
+ node/jid combination. When handler is unregistered the branch
+ is handled in the same way that it's parent branch from this moment.
+ """
+ node,key=self._traversePath(node,jid)
+ if node:
+ handler=node[key]
+ del node[dict][node[str]]
+ return handler
+
+ def _DiscoveryHandler(self,conn,request):
+ """ Servers DISCO iq request from the remote client.
+ Automatically determines the best handler to use and calls it
+ to handle the request. Used internally.
+ """
+ node=request.getQuerynode()
+ if node:
+ nodestr=node
+ else:
+ nodestr='None'
+ handler=self.getDiscoHandler(node,request.getTo())
+ if not handler:
+ self.DEBUG("No Handler for request with jid->%s node->%s ns->%s"%(request.getTo().__str__().encode('utf8'),nodestr.encode('utf8'),request.getQueryNS().encode('utf8')),'error')
+ conn.send(Error(request,ERR_ITEM_NOT_FOUND))
+ raise NodeProcessed
+ self.DEBUG("Handling request with jid->%s node->%s ns->%s"%(request.getTo().__str__().encode('utf8'),nodestr.encode('utf8'),request.getQueryNS().encode('utf8')),'ok')
+ rep=request.buildReply('result')
+ if node: rep.setQuerynode(node)
+ q=rep.getTag('query')
+ if request.getQueryNS()==NS_DISCO_ITEMS:
+ # handler must return list: [{jid,action,node,name}]
+ if type(handler)==dict: lst=handler['items']
+ else: lst=handler(conn,request,'items')
+ if lst==None:
+ conn.send(Error(request,ERR_ITEM_NOT_FOUND))
+ raise NodeProcessed
+ for item in lst: q.addChild('item',item)
+ elif request.getQueryNS()==NS_DISCO_INFO:
+ if type(handler)==dict: dt=handler['info']
+ else: dt=handler(conn,request,'info')
+ if dt==None:
+ conn.send(Error(request,ERR_ITEM_NOT_FOUND))
+ raise NodeProcessed
+ # handler must return dictionary:
+ # {'ids':[{},{},{},{}], 'features':[fe,at,ur,es], 'xdata':DataForm}
+ for id in dt['ids']: q.addChild('identity',id)
+ for feature in dt['features']: q.addChild('feature',{'var':feature})
+ if dt.has_key('xdata'): q.addChild(node=dt['xdata'])
+ conn.send(rep)
+ raise NodeProcessed
diff --git a/libs/xmpp/client.py b/libs/xmpp/client.py
new file mode 100644
index 00000000..4d932119
--- /dev/null
+++ b/libs/xmpp/client.py
@@ -0,0 +1,325 @@
+## client.py
+##
+## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: client.py,v 1.61 2009/04/07 06:19:42 snakeru Exp $
+
+"""
+Provides PlugIn class functionality to develop extentions for xmpppy.
+Also provides Client and Component classes implementations as the
+examples of xmpppy structures usage.
+These classes can be used for simple applications "AS IS" though.
+"""
+
+import socket
+import debug
+Debug=debug
+Debug.DEBUGGING_IS_ON=1
+Debug.Debug.colors['socket']=debug.color_dark_gray
+Debug.Debug.colors['CONNECTproxy']=debug.color_dark_gray
+Debug.Debug.colors['nodebuilder']=debug.color_brown
+Debug.Debug.colors['client']=debug.color_cyan
+Debug.Debug.colors['component']=debug.color_cyan
+Debug.Debug.colors['dispatcher']=debug.color_green
+Debug.Debug.colors['browser']=debug.color_blue
+Debug.Debug.colors['auth']=debug.color_yellow
+Debug.Debug.colors['roster']=debug.color_magenta
+Debug.Debug.colors['ibb']=debug.color_yellow
+
+Debug.Debug.colors['down']=debug.color_brown
+Debug.Debug.colors['up']=debug.color_brown
+Debug.Debug.colors['data']=debug.color_brown
+Debug.Debug.colors['ok']=debug.color_green
+Debug.Debug.colors['warn']=debug.color_yellow
+Debug.Debug.colors['error']=debug.color_red
+Debug.Debug.colors['start']=debug.color_dark_gray
+Debug.Debug.colors['stop']=debug.color_dark_gray
+Debug.Debug.colors['sent']=debug.color_yellow
+Debug.Debug.colors['got']=debug.color_bright_cyan
+
+DBG_CLIENT='client'
+DBG_COMPONENT='component'
+
+class PlugIn:
+ """ Common xmpppy plugins infrastructure: plugging in/out, debugging. """
+ def __init__(self):
+ self._exported_methods=[]
+ self.DBG_LINE=self.__class__.__name__.lower()
+
+ def PlugIn(self,owner):
+ """ Attach to main instance and register ourself and all our staff in it. """
+ self._owner=owner
+ if self.DBG_LINE not in owner.debug_flags:
+ owner.debug_flags.append(self.DBG_LINE)
+ self.DEBUG('Plugging %s into %s'%(self,self._owner),'start')
+ if owner.__dict__.has_key(self.__class__.__name__):
+ return self.DEBUG('Plugging ignored: another instance already plugged.','error')
+ self._old_owners_methods=[]
+ for method in self._exported_methods:
+ if owner.__dict__.has_key(method.__name__):
+ self._old_owners_methods.append(owner.__dict__[method.__name__])
+ owner.__dict__[method.__name__]=method
+ owner.__dict__[self.__class__.__name__]=self
+ if self.__class__.__dict__.has_key('plugin'): return self.plugin(owner)
+
+ def PlugOut(self):
+ """ Unregister all our staff from main instance and detach from it. """
+ self.DEBUG('Plugging %s out of %s.'%(self,self._owner),'stop')
+ ret = None
+ if self.__class__.__dict__.has_key('plugout'): ret = self.plugout()
+ self._owner.debug_flags.remove(self.DBG_LINE)
+ for method in self._exported_methods: del self._owner.__dict__[method.__name__]
+ for method in self._old_owners_methods: self._owner.__dict__[method.__name__]=method
+ del self._owner.__dict__[self.__class__.__name__]
+ return ret
+
+ def DEBUG(self,text,severity='info'):
+ """ Feed a provided debug line to main instance's debug facility along with our ID string. """
+ self._owner.DEBUG(self.DBG_LINE,text,severity)
+
+import transports,dispatcher,auth,roster
+class CommonClient:
+ """ Base for Client and Component classes."""
+ def __init__(self,server,port=5222,debug=['always', 'nodebuilder']):
+ """ Caches server name and (optionally) port to connect to. "debug" parameter specifies
+ the debug IDs that will go into debug output. You can either specifiy an "include"
+ or "exclude" list. The latter is done via adding "always" pseudo-ID to the list.
+ Full list: ['nodebuilder', 'dispatcher', 'gen_auth', 'SASL_auth', 'bind', 'socket',
+ 'CONNECTproxy', 'TLS', 'roster', 'browser', 'ibb'] . """
+ if self.__class__.__name__=='Client': self.Namespace,self.DBG='jabber:client',DBG_CLIENT
+ elif self.__class__.__name__=='Component': self.Namespace,self.DBG=dispatcher.NS_COMPONENT_ACCEPT,DBG_COMPONENT
+ self.defaultNamespace=self.Namespace
+ self.disconnect_handlers=[]
+ self.Server=server
+ self.Port=port
+ if debug and type(debug)<>list: debug=['always', 'nodebuilder']
+ self._DEBUG=Debug.Debug(debug)
+ self.DEBUG=self._DEBUG.Show
+ self.debug_flags=self._DEBUG.debug_flags
+ self.debug_flags.append(self.DBG)
+ self._owner=self
+ self._registered_name=None
+ self.RegisterDisconnectHandler(self.DisconnectHandler)
+ self.connected=''
+ self._route=0
+
+ def RegisterDisconnectHandler(self,handler):
+ """ Register handler that will be called on disconnect."""
+ self.disconnect_handlers.append(handler)
+
+ def UnregisterDisconnectHandler(self,handler):
+ """ Unregister handler that is called on disconnect."""
+ self.disconnect_handlers.remove(handler)
+
+ def disconnected(self):
+ """ Called on disconnection. Calls disconnect handlers and cleans things up. """
+ self.connected=''
+ self.DEBUG(self.DBG,'Disconnect detected','stop')
+ self.disconnect_handlers.reverse()
+ for i in self.disconnect_handlers: i()
+ self.disconnect_handlers.reverse()
+ if self.__dict__.has_key('TLS'): self.TLS.PlugOut()
+
+ def DisconnectHandler(self):
+ """ Default disconnect handler. Just raises an IOError.
+ If you choosed to use this class in your production client,
+ override this method or at least unregister it. """
+ raise IOError('Disconnected from server.')
+
+ def event(self,eventName,args={}):
+ """ Default event handler. To be overriden. """
+ print "Event: ",(eventName,args)
+
+ def isConnected(self):
+ """ Returns connection state. F.e.: None / 'tls' / 'tcp+non_sasl' . """
+ return self.connected
+
+ def reconnectAndReauth(self):
+ """ Example of reconnection method. In fact, it can be used to batch connection and auth as well. """
+ handlerssave=self.Dispatcher.dumpHandlers()
+ if self.__dict__.has_key('ComponentBind'): self.ComponentBind.PlugOut()
+ if self.__dict__.has_key('Bind'): self.Bind.PlugOut()
+ self._route=0
+ if self.__dict__.has_key('NonSASL'): self.NonSASL.PlugOut()
+ if self.__dict__.has_key('SASL'): self.SASL.PlugOut()
+ if self.__dict__.has_key('TLS'): self.TLS.PlugOut()
+ self.Dispatcher.PlugOut()
+ if self.__dict__.has_key('HTTPPROXYsocket'): self.HTTPPROXYsocket.PlugOut()
+ if self.__dict__.has_key('TCPsocket'): self.TCPsocket.PlugOut()
+ if not self.connect(server=self._Server,proxy=self._Proxy): return
+ if not self.auth(self._User,self._Password,self._Resource): return
+ self.Dispatcher.restoreHandlers(handlerssave)
+ return self.connected
+
+ def connect(self,server=None,proxy=None,ssl=None,use_srv=None):
+ """ Make a tcp/ip connection, protect it with tls/ssl if possible and start XMPP stream.
+ Returns None or 'tcp' or 'tls', depending on the result."""
+ if not server: server=(self.Server,self.Port)
+ if proxy: sock=transports.HTTPPROXYsocket(proxy,server,use_srv)
+ else: sock=transports.TCPsocket(server,use_srv)
+ connected=sock.PlugIn(self)
+ if not connected:
+ sock.PlugOut()
+ return
+ self._Server,self._Proxy=server,proxy
+ self.connected='tcp'
+ if (ssl is None and self.Connection.getPort() in (5223, 443)) or ssl:
+ try: # FIXME. This should be done in transports.py
+ transports.TLS().PlugIn(self,now=1)
+ self.connected='ssl'
+ except socket.sslerror:
+ return
+ dispatcher.Dispatcher().PlugIn(self)
+ while self.Dispatcher.Stream._document_attrs is None:
+ if not self.Process(1): return
+ if self.Dispatcher.Stream._document_attrs.has_key('version') and self.Dispatcher.Stream._document_attrs['version']=='1.0':
+ while not self.Dispatcher.Stream.features and self.Process(1): pass # If we get version 1.0 stream the features tag MUST BE presented
+ return self.connected
+
+class Client(CommonClient):
+ """ Example client class, based on CommonClient. """
+ def connect(self,server=None,proxy=None,secure=None,use_srv=True):
+ """ Connect to jabber server. If you want to specify different ip/port to connect to you can
+ pass it as tuple as first parameter. If there is HTTP proxy between you and server
+ specify it's address and credentials (if needed) in the second argument.
+ If you want ssl/tls support to be discovered and enable automatically - leave third argument as None. (ssl will be autodetected only if port is 5223 or 443)
+ If you want to force SSL start (i.e. if port 5223 or 443 is remapped to some non-standard port) then set it to 1.
+ If you want to disable tls/ssl support completely, set it to 0.
+ Example: connect(('192.168.5.5',5222),{'host':'proxy.my.net','port':8080,'user':'me','password':'secret'})
+ Returns '' or 'tcp' or 'tls', depending on the result."""
+ if not CommonClient.connect(self,server,proxy,secure,use_srv) or secure<>None and not secure: return self.connected
+ transports.TLS().PlugIn(self)
+ if not self.Dispatcher.Stream._document_attrs.has_key('version') or not self.Dispatcher.Stream._document_attrs['version']=='1.0': return self.connected
+ while not self.Dispatcher.Stream.features and self.Process(1): pass # If we get version 1.0 stream the features tag MUST BE presented
+ if not self.Dispatcher.Stream.features.getTag('starttls'): return self.connected # TLS not supported by server
+ while not self.TLS.starttls and self.Process(1): pass
+ if not hasattr(self, 'TLS') or self.TLS.starttls!='success': self.event('tls_failed'); return self.connected
+ self.connected='tls'
+ return self.connected
+
+ def auth(self,user,password,resource='',sasl=1):
+ """ Authenticate connnection and bind resource. If resource is not provided
+ random one or library name used. """
+ self._User,self._Password,self._Resource=user,password,resource
+ while not self.Dispatcher.Stream._document_attrs and self.Process(1): pass
+ if self.Dispatcher.Stream._document_attrs.has_key('version') and self.Dispatcher.Stream._document_attrs['version']=='1.0':
+ while not self.Dispatcher.Stream.features and self.Process(1): pass # If we get version 1.0 stream the features tag MUST BE presented
+ if sasl: auth.SASL(user,password).PlugIn(self)
+ if not sasl or self.SASL.startsasl=='not-supported':
+ if not resource: resource='xmpppy'
+ if auth.NonSASL(user,password,resource).PlugIn(self):
+ self.connected+='+old_auth'
+ return 'old_auth'
+ return
+ self.SASL.auth()
+ while self.SASL.startsasl=='in-process' and self.Process(1): pass
+ if self.SASL.startsasl=='success':
+ auth.Bind().PlugIn(self)
+ while self.Bind.bound is None and self.Process(1): pass
+ if self.Bind.Bind(resource):
+ self.connected+='+sasl'
+ return 'sasl'
+ else:
+ if self.__dict__.has_key('SASL'): self.SASL.PlugOut()
+
+ def getRoster(self):
+ """ Return the Roster instance, previously plugging it in and
+ requesting roster from server if needed. """
+ if not self.__dict__.has_key('Roster'): roster.Roster().PlugIn(self)
+ return self.Roster.getRoster()
+
+ def sendInitPresence(self,requestRoster=1):
+ """ Send roster request and initial .
+ You can disable the first by setting requestRoster argument to 0. """
+ self.sendPresence(requestRoster=requestRoster)
+
+ def sendPresence(self,jid=None,typ=None,requestRoster=0):
+ """ Send some specific presence state.
+ Can also request roster from server if according agrument is set."""
+ if requestRoster: roster.Roster().PlugIn(self)
+ self.send(dispatcher.Presence(to=jid, typ=typ))
+
+class Component(CommonClient):
+ """ Component class. The only difference from CommonClient is ability to perform component authentication. """
+ def __init__(self,transport,port=5347,typ=None,debug=['always', 'nodebuilder'],domains=None,sasl=0,bind=0,route=0,xcp=0):
+ """ Init function for Components.
+ As components use a different auth mechanism which includes the namespace of the component.
+ Jabberd1.4 and Ejabberd use the default namespace then for all client messages.
+ Jabberd2 uses jabber:client.
+ 'transport' argument is a transport name that you are going to serve (f.e. "irc.localhost").
+ 'port' can be specified if 'transport' resolves to correct IP. If it is not then you'll have to specify IP
+ and port while calling "connect()".
+ If you are going to serve several different domains with single Component instance - you must list them ALL
+ in the 'domains' argument.
+ For jabberd2 servers you should set typ='jabberd2' argument.
+ """
+ CommonClient.__init__(self,transport,port=port,debug=debug)
+ self.typ=typ
+ self.sasl=sasl
+ self.bind=bind
+ self.route=route
+ self.xcp=xcp
+ if domains:
+ self.domains=domains
+ else:
+ self.domains=[transport]
+
+ def connect(self,server=None,proxy=None):
+ """ This will connect to the server, and if the features tag is found then set
+ the namespace to be jabber:client as that is required for jabberd2.
+ 'server' and 'proxy' arguments have the same meaning as in xmpp.Client.connect() """
+ if self.sasl:
+ self.Namespace=auth.NS_COMPONENT_1
+ self.Server=server[0]
+ CommonClient.connect(self,server=server,proxy=proxy)
+ if self.connected and (self.typ=='jabberd2' or not self.typ and self.Dispatcher.Stream.features != None) and (not self.xcp):
+ self.defaultNamespace=auth.NS_CLIENT
+ self.Dispatcher.RegisterNamespace(self.defaultNamespace)
+ self.Dispatcher.RegisterProtocol('iq',dispatcher.Iq)
+ self.Dispatcher.RegisterProtocol('message',dispatcher.Message)
+ self.Dispatcher.RegisterProtocol('presence',dispatcher.Presence)
+ return self.connected
+
+ def dobind(self, sasl):
+ # This has to be done before binding, because we can receive a route stanza before binding finishes
+ self._route = self.route
+ if self.bind:
+ for domain in self.domains:
+ auth.ComponentBind(sasl).PlugIn(self)
+ while self.ComponentBind.bound is None: self.Process(1)
+ if (not self.ComponentBind.Bind(domain)):
+ self.ComponentBind.PlugOut()
+ return
+ self.ComponentBind.PlugOut()
+
+ def auth(self,name,password,dup=None):
+ """ Authenticate component "name" with password "password"."""
+ self._User,self._Password,self._Resource=name,password,''
+ try:
+ if self.sasl: auth.SASL(name,password).PlugIn(self)
+ if not self.sasl or self.SASL.startsasl=='not-supported':
+ if auth.NonSASL(name,password,'').PlugIn(self):
+ self.dobind(sasl=False)
+ self.connected+='+old_auth'
+ return 'old_auth'
+ return
+ self.SASL.auth()
+ while self.SASL.startsasl=='in-process' and self.Process(1): pass
+ if self.SASL.startsasl=='success':
+ self.dobind(sasl=True)
+ self.connected+='+sasl'
+ return 'sasl'
+ else:
+ raise auth.NotAuthorized(self.SASL.startsasl)
+ except:
+ self.DEBUG(self.DBG,"Failed to authenticate %s"%name,'error')
diff --git a/libs/xmpp/commands.py b/libs/xmpp/commands.py
new file mode 100644
index 00000000..cdebf8f2
--- /dev/null
+++ b/libs/xmpp/commands.py
@@ -0,0 +1,328 @@
+## $Id: commands.py,v 1.17 2007/08/28 09:54:15 normanr Exp $
+
+## Ad-Hoc Command manager
+## Mike Albon (c) 5th January 2005
+
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+
+"""This module is a ad-hoc command processor for xmpppy. It uses the plug-in mechanism like most of the core library. It depends on a DISCO browser manager.
+
+There are 3 classes here, a command processor Commands like the Browser, and a command template plugin Command, and an example command.
+
+To use this module:
+
+ Instansiate the module with the parent transport and disco browser manager as parameters.
+ 'Plug in' commands using the command template.
+ The command feature must be added to existing disco replies where neccessary.
+
+What it supplies:
+
+ Automatic command registration with the disco browser manager.
+ Automatic listing of commands in the public command list.
+ A means of handling requests, by redirection though the command manager.
+"""
+
+from protocol import *
+from client import PlugIn
+
+class Commands(PlugIn):
+ """Commands is an ancestor of PlugIn and can be attached to any session.
+
+ The commands class provides a lookup and browse mechnism. It follows the same priciple of the Browser class, for Service Discovery to provide the list of commands, it adds the 'list' disco type to your existing disco handler function.
+
+ How it works:
+ The commands are added into the existing Browser on the correct nodes. When the command list is built the supplied discovery handler function needs to have a 'list' option in type. This then gets enumerated, all results returned as None are ignored.
+ The command executed is then called using it's Execute method. All session management is handled by the command itself.
+ """
+ def __init__(self, browser):
+ """Initialises class and sets up local variables"""
+ PlugIn.__init__(self)
+ DBG_LINE='commands'
+ self._exported_methods=[]
+ self._handlers={'':{}}
+ self._browser = browser
+
+ def plugin(self, owner):
+ """Makes handlers within the session"""
+ # Plug into the session and the disco manager
+ # We only need get and set, results are not needed by a service provider, only a service user.
+ owner.RegisterHandler('iq',self._CommandHandler,typ='set',ns=NS_COMMANDS)
+ owner.RegisterHandler('iq',self._CommandHandler,typ='get',ns=NS_COMMANDS)
+ self._browser.setDiscoHandler(self._DiscoHandler,node=NS_COMMANDS,jid='')
+
+ def plugout(self):
+ """Removes handlers from the session"""
+ # unPlug from the session and the disco manager
+ self._owner.UnregisterHandler('iq',self._CommandHandler,ns=NS_COMMANDS)
+ for jid in self._handlers:
+ self._browser.delDiscoHandler(self._DiscoHandler,node=NS_COMMANDS)
+
+ def _CommandHandler(self,conn,request):
+ """The internal method to process the routing of command execution requests"""
+ # This is the command handler itself.
+ # We must:
+ # Pass on command execution to command handler
+ # (Do we need to keep session details here, or can that be done in the command?)
+ jid = str(request.getTo())
+ try:
+ node = request.getTagAttr('command','node')
+ except:
+ conn.send(Error(request,ERR_BAD_REQUEST))
+ raise NodeProcessed
+ if self._handlers.has_key(jid):
+ if self._handlers[jid].has_key(node):
+ self._handlers[jid][node]['execute'](conn,request)
+ else:
+ conn.send(Error(request,ERR_ITEM_NOT_FOUND))
+ raise NodeProcessed
+ elif self._handlers[''].has_key(node):
+ self._handlers[''][node]['execute'](conn,request)
+ else:
+ conn.send(Error(request,ERR_ITEM_NOT_FOUND))
+ raise NodeProcessed
+
+ def _DiscoHandler(self,conn,request,typ):
+ """The internal method to process service discovery requests"""
+ # This is the disco manager handler.
+ if typ == 'items':
+ # We must:
+ # Generate a list of commands and return the list
+ # * This handler does not handle individual commands disco requests.
+ # Pseudo:
+ # Enumerate the 'item' disco of each command for the specified jid
+ # Build responce and send
+ # To make this code easy to write we add an 'list' disco type, it returns a tuple or 'none' if not advertised
+ list = []
+ items = []
+ jid = str(request.getTo())
+ # Get specific jid based results
+ if self._handlers.has_key(jid):
+ for each in self._handlers[jid].keys():
+ items.append((jid,each))
+ else:
+ # Get generic results
+ for each in self._handlers[''].keys():
+ items.append(('',each))
+ if items != []:
+ for each in items:
+ i = self._handlers[each[0]][each[1]]['disco'](conn,request,'list')
+ if i != None:
+ list.append(Node(tag='item',attrs={'jid':i[0],'node':i[1],'name':i[2]}))
+ iq = request.buildReply('result')
+ if request.getQuerynode(): iq.setQuerynode(request.getQuerynode())
+ iq.setQueryPayload(list)
+ conn.send(iq)
+ else:
+ conn.send(Error(request,ERR_ITEM_NOT_FOUND))
+ raise NodeProcessed
+ elif typ == 'info':
+ return {'ids':[{'category':'automation','type':'command-list'}],'features':[]}
+
+ def addCommand(self,name,cmddisco,cmdexecute,jid=''):
+ """The method to call if adding a new command to the session, the requred parameters of cmddisco and cmdexecute are the methods to enable that command to be executed"""
+ # This command takes a command object and the name of the command for registration
+ # We must:
+ # Add item into disco
+ # Add item into command list
+ if not self._handlers.has_key(jid):
+ self._handlers[jid]={}
+ self._browser.setDiscoHandler(self._DiscoHandler,node=NS_COMMANDS,jid=jid)
+ if self._handlers[jid].has_key(name):
+ raise NameError,'Command Exists'
+ else:
+ self._handlers[jid][name]={'disco':cmddisco,'execute':cmdexecute}
+ # Need to add disco stuff here
+ self._browser.setDiscoHandler(cmddisco,node=name,jid=jid)
+
+ def delCommand(self,name,jid=''):
+ """Removed command from the session"""
+ # This command takes a command object and the name used for registration
+ # We must:
+ # Remove item from disco
+ # Remove item from command list
+ if not self._handlers.has_key(jid):
+ raise NameError,'Jid not found'
+ if not self._handlers[jid].has_key(name):
+ raise NameError, 'Command not found'
+ else:
+ #Do disco removal here
+ command = self.getCommand(name,jid)['disco']
+ del self._handlers[jid][name]
+ self._browser.delDiscoHandler(command,node=name,jid=jid)
+
+ def getCommand(self,name,jid=''):
+ """Returns the command tuple"""
+ # This gets the command object with name
+ # We must:
+ # Return item that matches this name
+ if not self._handlers.has_key(jid):
+ raise NameError,'Jid not found'
+ elif not self._handlers[jid].has_key(name):
+ raise NameError,'Command not found'
+ else:
+ return self._handlers[jid][name]
+
+class Command_Handler_Prototype(PlugIn):
+ """This is a prototype command handler, as each command uses a disco method
+ and execute method you can implement it any way you like, however this is
+ my first attempt at making a generic handler that you can hang process
+ stages on too. There is an example command below.
+
+ The parameters are as follows:
+ name : the name of the command within the jabber environment
+ description : the natural language description
+ discofeatures : the features supported by the command
+ initial : the initial command in the from of {'execute':commandname}
+
+ All stages set the 'actions' dictionary for each session to represent the possible options available.
+ """
+ name = 'examplecommand'
+ count = 0
+ description = 'an example command'
+ discofeatures = [NS_COMMANDS,NS_DATA]
+ # This is the command template
+ def __init__(self,jid=''):
+ """Set up the class"""
+ PlugIn.__init__(self)
+ DBG_LINE='command'
+ self.sessioncount = 0
+ self.sessions = {}
+ # Disco information for command list pre-formatted as a tuple
+ self.discoinfo = {'ids':[{'category':'automation','type':'command-node','name':self.description}],'features': self.discofeatures}
+ self._jid = jid
+
+ def plugin(self,owner):
+ """Plug command into the commands class"""
+ # The owner in this instance is the Command Processor
+ self._commands = owner
+ self._owner = owner._owner
+ self._commands.addCommand(self.name,self._DiscoHandler,self.Execute,jid=self._jid)
+
+ def plugout(self):
+ """Remove command from the commands class"""
+ self._commands.delCommand(self.name,self._jid)
+
+ def getSessionID(self):
+ """Returns an id for the command session"""
+ self.count = self.count+1
+ return 'cmd-%s-%d'%(self.name,self.count)
+
+ def Execute(self,conn,request):
+ """The method that handles all the commands, and routes them to the correct method for that stage."""
+ # New request or old?
+ try:
+ session = request.getTagAttr('command','sessionid')
+ except:
+ session = None
+ try:
+ action = request.getTagAttr('command','action')
+ except:
+ action = None
+ if action == None: action = 'execute'
+ # Check session is in session list
+ if self.sessions.has_key(session):
+ if self.sessions[session]['jid']==request.getFrom():
+ # Check action is vaild
+ if self.sessions[session]['actions'].has_key(action):
+ # Execute next action
+ self.sessions[session]['actions'][action](conn,request)
+ else:
+ # Stage not presented as an option
+ self._owner.send(Error(request,ERR_BAD_REQUEST))
+ raise NodeProcessed
+ else:
+ # Jid and session don't match. Go away imposter
+ self._owner.send(Error(request,ERR_BAD_REQUEST))
+ raise NodeProcessed
+ elif session != None:
+ # Not on this sessionid you won't.
+ self._owner.send(Error(request,ERR_BAD_REQUEST))
+ raise NodeProcessed
+ else:
+ # New session
+ self.initial[action](conn,request)
+
+ def _DiscoHandler(self,conn,request,type):
+ """The handler for discovery events"""
+ if type == 'list':
+ return (request.getTo(),self.name,self.description)
+ elif type == 'items':
+ return []
+ elif type == 'info':
+ return self.discoinfo
+
+class TestCommand(Command_Handler_Prototype):
+ """ Example class. You should read source if you wish to understate how it works.
+ Generally, it presents a "master" that giudes user through to calculate something.
+ """
+ name = 'testcommand'
+ description = 'a noddy example command'
+ def __init__(self,jid=''):
+ """ Init internal constants. """
+ Command_Handler_Prototype.__init__(self,jid)
+ self.initial = {'execute':self.cmdFirstStage}
+
+ def cmdFirstStage(self,conn,request):
+ """ Determine """
+ # This is the only place this should be repeated as all other stages should have SessionIDs
+ try:
+ session = request.getTagAttr('command','sessionid')
+ except:
+ session = None
+ if session == None:
+ session = self.getSessionID()
+ self.sessions[session]={'jid':request.getFrom(),'actions':{'cancel':self.cmdCancel,'next':self.cmdSecondStage,'execute':self.cmdSecondStage},'data':{'type':None}}
+ # As this is the first stage we only send a form
+ reply = request.buildReply('result')
+ form = DataForm(title='Select type of operation',data=['Use the combobox to select the type of calculation you would like to do, then click Next',DataField(name='calctype',desc='Calculation Type',value=self.sessions[session]['data']['type'],options=[['circlediameter','Calculate the Diameter of a circle'],['circlearea','Calculate the area of a circle']],typ='list-single',required=1)])
+ replypayload = [Node('actions',attrs={'execute':'next'},payload=[Node('next')]),form]
+ reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':session,'status':'executing'},payload=replypayload)
+ self._owner.send(reply)
+ raise NodeProcessed
+
+ def cmdSecondStage(self,conn,request):
+ form = DataForm(node = request.getTag(name='command').getTag(name='x',namespace=NS_DATA))
+ self.sessions[request.getTagAttr('command','sessionid')]['data']['type']=form.getField('calctype').getValue()
+ self.sessions[request.getTagAttr('command','sessionid')]['actions']={'cancel':self.cmdCancel,None:self.cmdThirdStage,'previous':self.cmdFirstStage,'execute':self.cmdThirdStage,'next':self.cmdThirdStage}
+ # The form generation is split out to another method as it may be called by cmdThirdStage
+ self.cmdSecondStageReply(conn,request)
+
+ def cmdSecondStageReply(self,conn,request):
+ reply = request.buildReply('result')
+ form = DataForm(title = 'Enter the radius', data=['Enter the radius of the circle (numbers only)',DataField(desc='Radius',name='radius',typ='text-single')])
+ replypayload = [Node('actions',attrs={'execute':'complete'},payload=[Node('complete'),Node('prev')]),form]
+ reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':request.getTagAttr('command','sessionid'),'status':'executing'},payload=replypayload)
+ self._owner.send(reply)
+ raise NodeProcessed
+
+ def cmdThirdStage(self,conn,request):
+ form = DataForm(node = request.getTag(name='command').getTag(name='x',namespace=NS_DATA))
+ try:
+ num = float(form.getField('radius').getValue())
+ except:
+ self.cmdSecondStageReply(conn,request)
+ from math import pi
+ if self.sessions[request.getTagAttr('command','sessionid')]['data']['type'] == 'circlearea':
+ result = (num**2)*pi
+ else:
+ result = num*2*pi
+ reply = request.buildReply('result')
+ form = DataForm(typ='result',data=[DataField(desc='result',name='result',value=result)])
+ reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':request.getTagAttr('command','sessionid'),'status':'completed'},payload=[form])
+ self._owner.send(reply)
+ raise NodeProcessed
+
+ def cmdCancel(self,conn,request):
+ reply = request.buildReply('result')
+ reply.addChild(name='command',namespace=NS_COMMANDS,attrs={'node':request.getTagAttr('command','node'),'sessionid':request.getTagAttr('command','sessionid'),'status':'cancelled'})
+ self._owner.send(reply)
+ del self.sessions[request.getTagAttr('command','sessionid')]
diff --git a/libs/xmpp/debug.py b/libs/xmpp/debug.py
new file mode 100644
index 00000000..34ade88f
--- /dev/null
+++ b/libs/xmpp/debug.py
@@ -0,0 +1,423 @@
+## debug.py
+##
+## Copyright (C) 2003 Jacob Lundqvist
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU Lesser General Public License as published
+## by the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU Lesser General Public License for more details.
+
+_version_ = '1.4.0'
+
+"""\
+
+Generic debug class
+
+Other modules can always define extra debug flags for local usage, as long as
+they make sure they append them to debug_flags
+
+Also its always a good thing to prefix local flags with something, to reduce risk
+of coliding flags. Nothing breaks if two flags would be identical, but it might
+activate unintended debugging.
+
+flags can be numeric, but that makes analysing harder, on creation its
+not obvious what is activated, and when flag_show is given, output isnt
+really meaningfull.
+
+This Debug class can either be initialized and used on app level, or used independantly
+by the individual classes.
+
+For samples of usage, see samples subdir in distro source, and selftest
+in this code
+
+"""
+
+
+
+import sys
+import traceback
+import time
+import os
+
+import types
+
+if os.environ.has_key('TERM'):
+ colors_enabled=True
+else:
+ colors_enabled=False
+
+color_none = chr(27) + "[0m"
+color_black = chr(27) + "[30m"
+color_red = chr(27) + "[31m"
+color_green = chr(27) + "[32m"
+color_brown = chr(27) + "[33m"
+color_blue = chr(27) + "[34m"
+color_magenta = chr(27) + "[35m"
+color_cyan = chr(27) + "[36m"
+color_light_gray = chr(27) + "[37m"
+color_dark_gray = chr(27) + "[30;1m"
+color_bright_red = chr(27) + "[31;1m"
+color_bright_green = chr(27) + "[32;1m"
+color_yellow = chr(27) + "[33;1m"
+color_bright_blue = chr(27) + "[34;1m"
+color_purple = chr(27) + "[35;1m"
+color_bright_cyan = chr(27) + "[36;1m"
+color_white = chr(27) + "[37;1m"
+
+
+"""
+Define your flags in yor modules like this:
+
+from debug import *
+
+DBG_INIT = 'init' ; debug_flags.append( DBG_INIT )
+DBG_CONNECTION = 'connection' ; debug_flags.append( DBG_CONNECTION )
+
+ The reason for having a double statement wis so we can validate params
+ and catch all undefined debug flags
+
+ This gives us control over all used flags, and makes it easier to allow
+ global debugging in your code, just do something like
+
+ foo = Debug( debug_flags )
+
+ group flags, that is a flag in it self containing multiple flags should be
+ defined without the debug_flags.append() sequence, since the parts are already
+ in the list, also they must of course be defined after the flags they depend on ;)
+ example:
+
+DBG_MULTI = [ DBG_INIT, DBG_CONNECTION ]
+
+
+
+ NoDebug
+ -------
+ To speed code up, typically for product releases or such
+ use this class instead if you globaly want to disable debugging
+"""
+
+
+class NoDebug:
+ def __init__( self, *args, **kwargs ):
+ self.debug_flags = []
+ def show( self, *args, **kwargs):
+ pass
+ def Show( self, *args, **kwargs):
+ pass
+ def is_active( self, flag ):
+ pass
+ colors={}
+ def active_set( self, active_flags = None ):
+ return 0
+
+
+LINE_FEED = '\n'
+
+
+class Debug:
+ def __init__( self,
+ #
+ # active_flags are those that will trigger output
+ #
+ active_flags = None,
+ #
+ # Log file should be file object or file namne
+ #
+ log_file = sys.stderr,
+ #
+ # prefix and sufix can either be set globaly or per call.
+ # personally I use this to color code debug statements
+ # with prefix = chr(27) + '[34m'
+ # sufix = chr(27) + '[37;1m\n'
+ #
+ prefix = 'DEBUG: ',
+ sufix = '\n',
+ #
+ # If you want unix style timestamps,
+ # 0 disables timestamps
+ # 1 before prefix, good when prefix is a string
+ # 2 after prefix, good when prefix is a color
+ #
+ time_stamp = 0,
+ #
+ # flag_show should normaly be of, but can be turned on to get a
+ # good view of what flags are actually used for calls,
+ # if it is not None, it should be a string
+ # flags for current call will be displayed
+ # with flag_show as separator
+ # recomended values vould be '-' or ':', but any string goes
+ #
+ flag_show = None,
+ #
+ # If you dont want to validate flags on each call to
+ # show(), set this to 0
+ #
+ validate_flags = 1,
+ #
+ # If you dont want the welcome message, set to 0
+ # default is to show welcome if any flags are active
+ welcome = -1
+ ):
+
+ self.debug_flags = []
+ if welcome == -1:
+ if active_flags and len(active_flags):
+ welcome = 1
+ else:
+ welcome = 0
+
+ self._remove_dupe_flags()
+ if log_file:
+ if type( log_file ) is type(''):
+ try:
+ self._fh = open(log_file,'w')
+ except:
+ print 'ERROR: can open %s for writing'
+ sys.exit(0)
+ else: ## assume its a stream type object
+ self._fh = log_file
+ else:
+ self._fh = sys.stdout
+
+ if time_stamp not in (0,1,2):
+ msg2 = '%s' % time_stamp
+ raise 'Invalid time_stamp param', msg2
+ self.prefix = prefix
+ self.sufix = sufix
+ self.time_stamp = time_stamp
+ self.flag_show = None # must be initialised after possible welcome
+ self.validate_flags = validate_flags
+
+ self.active_set( active_flags )
+ if welcome:
+ self.show('')
+ caller = sys._getframe(1) # used to get name of caller
+ try:
+ mod_name= ":%s" % caller.f_locals['__name__']
+ except:
+ mod_name = ""
+ self.show('Debug created for %s%s' % (caller.f_code.co_filename,
+ mod_name ))
+ self.show(' flags defined: %s' % ','.join( self.active ))
+
+ if type(flag_show) in (type(''), type(None)):
+ self.flag_show = flag_show
+ else:
+ msg2 = '%s' % type(flag_show )
+ raise 'Invalid type for flag_show!', msg2
+
+
+
+
+
+ def show( self, msg, flag = None, prefix = None, sufix = None,
+ lf = 0 ):
+ """
+ flag can be of folowing types:
+ None - this msg will always be shown if any debugging is on
+ flag - will be shown if flag is active
+ (flag1,flag2,,,) - will be shown if any of the given flags
+ are active
+
+ if prefix / sufix are not given, default ones from init will be used
+
+ lf = -1 means strip linefeed if pressent
+ lf = 1 means add linefeed if not pressent
+ """
+
+ if self.validate_flags:
+ self._validate_flag( flag )
+
+ if not self.is_active(flag):
+ return
+ if prefix:
+ pre = prefix
+ else:
+ pre = self.prefix
+ if sufix:
+ suf = sufix
+ else:
+ suf = self.sufix
+
+ if self.time_stamp == 2:
+ output = '%s%s ' % ( pre,
+ time.strftime('%b %d %H:%M:%S',
+ time.localtime(time.time() )),
+ )
+ elif self.time_stamp == 1:
+ output = '%s %s' % ( time.strftime('%b %d %H:%M:%S',
+ time.localtime(time.time() )),
+ pre,
+ )
+ else:
+ output = pre
+
+ if self.flag_show:
+ if flag:
+ output = '%s%s%s' % ( output, flag, self.flag_show )
+ else:
+ # this call uses the global default,
+ # dont print "None", just show the separator
+ output = '%s %s' % ( output, self.flag_show )
+
+ output = '%s%s%s' % ( output, msg, suf )
+ if lf:
+ # strip/add lf if needed
+ last_char = output[-1]
+ if lf == 1 and last_char != LINE_FEED:
+ output = output + LINE_FEED
+ elif lf == -1 and last_char == LINE_FEED:
+ output = output[:-1]
+ try:
+ self._fh.write( output )
+ except:
+ # unicode strikes again ;)
+ s=u''
+ for i in range(len(output)):
+ if ord(output[i]) < 128:
+ c = output[i]
+ else:
+ c = '?'
+ s=s+c
+ self._fh.write( '%s%s%s' % ( pre, s, suf ))
+ self._fh.flush()
+
+
+ def is_active( self, flag ):
+ 'If given flag(s) should generate output.'
+
+ # try to abort early to quicken code
+ if not self.active:
+ return 0
+ if not flag or flag in self.active:
+ return 1
+ else:
+ # check for multi flag type:
+ if type( flag ) in ( type(()), type([]) ):
+ for s in flag:
+ if s in self.active:
+ return 1
+ return 0
+
+
+ def active_set( self, active_flags = None ):
+ "returns 1 if any flags where actually set, otherwise 0."
+ r = 0
+ ok_flags = []
+ if not active_flags:
+ #no debuging at all
+ self.active = []
+ elif type( active_flags ) in ( types.TupleType, types.ListType ):
+ flags = self._as_one_list( active_flags )
+ for t in flags:
+ if t not in self.debug_flags:
+ sys.stderr.write('Invalid debugflag given: %s\n' % t )
+ ok_flags.append( t )
+
+ self.active = ok_flags
+ r = 1
+ else:
+ # assume comma string
+ try:
+ flags = active_flags.split(',')
+ except:
+ self.show( '***' )
+ self.show( '*** Invalid debug param given: %s' % active_flags )
+ self.show( '*** please correct your param!' )
+ self.show( '*** due to this, full debuging is enabled' )
+ self.active = self.debug_flags
+
+ for f in flags:
+ s = f.strip()
+ ok_flags.append( s )
+ self.active = ok_flags
+
+ self._remove_dupe_flags()
+ return r
+
+ def active_get( self ):
+ "returns currently active flags."
+ return self.active
+
+
+ def _as_one_list( self, items ):
+ """ init param might contain nested lists, typically from group flags.
+
+ This code organises lst and remves dupes
+ """
+ if type( items ) <> type( [] ) and type( items ) <> type( () ):
+ return [ items ]
+ r = []
+ for l in items:
+ if type( l ) == type([]):
+ lst2 = self._as_one_list( l )
+ for l2 in lst2:
+ self._append_unique_str(r, l2 )
+ elif l == None:
+ continue
+ else:
+ self._append_unique_str(r, l )
+ return r
+
+
+ def _append_unique_str( self, lst, item ):
+ """filter out any dupes."""
+ if type(item) <> type(''):
+ msg2 = '%s' % item
+ raise 'Invalid item type (should be string)',msg2
+ if item not in lst:
+ lst.append( item )
+ return lst
+
+
+ def _validate_flag( self, flags ):
+ 'verify that flag is defined.'
+ if flags:
+ for f in self._as_one_list( flags ):
+ if not f in self.debug_flags:
+ msg2 = '%s' % f
+ raise 'Invalid debugflag given', msg2
+
+ def _remove_dupe_flags( self ):
+ """
+ if multiple instances of Debug is used in same app,
+ some flags might be created multiple time, filter out dupes
+ """
+ unique_flags = []
+ for f in self.debug_flags:
+ if f not in unique_flags:
+ unique_flags.append(f)
+ self.debug_flags = unique_flags
+
+ colors={}
+ def Show(self, flag, msg, prefix=''):
+ msg=msg.replace('\r','\\r').replace('\n','\\n').replace('><','>\n <')
+ if not colors_enabled: pass
+ elif self.colors.has_key(prefix): msg=self.colors[prefix]+msg+color_none
+ else: msg=color_none+msg
+ if not colors_enabled: prefixcolor=''
+ elif self.colors.has_key(flag): prefixcolor=self.colors[flag]
+ else: prefixcolor=color_none
+
+ if prefix=='error':
+ _exception = sys.exc_info()
+ if _exception[0]:
+ msg=msg+'\n'+''.join(traceback.format_exception(_exception[0], _exception[1], _exception[2])).rstrip()
+
+ prefix= self.prefix+prefixcolor+(flag+' '*12)[:12]+' '+(prefix+' '*6)[:6]
+ self.show(msg, flag, prefix)
+
+ def is_active( self, flag ):
+ if not self.active: return 0
+ if not flag or flag in self.active and DBG_ALWAYS not in self.active or flag not in self.active and DBG_ALWAYS in self.active : return 1
+ return 0
+
+DBG_ALWAYS='always'
+
+##Uncomment this to effectively disable all debugging and all debugging overhead.
+#Debug=NoDebug
diff --git a/libs/xmpp/dispatcher.py b/libs/xmpp/dispatcher.py
new file mode 100644
index 00000000..cc94ee04
--- /dev/null
+++ b/libs/xmpp/dispatcher.py
@@ -0,0 +1,373 @@
+## transports.py
+##
+## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: dispatcher.py,v 1.42 2007/05/18 23:18:36 normanr Exp $
+
+"""
+Main xmpppy mechanism. Provides library with methods to assign different handlers
+to different XMPP stanzas.
+Contains one tunable attribute: DefaultTimeout (25 seconds by default). It defines time that
+Dispatcher.SendAndWaitForResponce method will wait for reply stanza before giving up.
+"""
+
+import simplexml,time,sys
+from protocol import *
+from client import PlugIn
+
+DefaultTimeout=25
+ID=0
+
+class Dispatcher(PlugIn):
+ """ Ancestor of PlugIn class. Handles XMPP stream, i.e. aware of stream headers.
+ Can be plugged out/in to restart these headers (used for SASL f.e.). """
+ def __init__(self):
+ PlugIn.__init__(self)
+ DBG_LINE='dispatcher'
+ self.handlers={}
+ self._expected={}
+ self._defaultHandler=None
+ self._pendingExceptions=[]
+ self._eventHandler=None
+ self._cycleHandlers=[]
+ self._exported_methods=[self.Process,self.RegisterHandler,self.RegisterDefaultHandler,\
+ self.RegisterEventHandler,self.UnregisterCycleHandler,self.RegisterCycleHandler,\
+ self.RegisterHandlerOnce,self.UnregisterHandler,self.RegisterProtocol,\
+ self.WaitForResponse,self.SendAndWaitForResponse,self.send,self.disconnect,\
+ self.SendAndCallForResponse, ]
+
+ def dumpHandlers(self):
+ """ Return set of user-registered callbacks in it's internal format.
+ Used within the library to carry user handlers set over Dispatcher replugins. """
+ return self.handlers
+ def restoreHandlers(self,handlers):
+ """ Restores user-registered callbacks structure from dump previously obtained via dumpHandlers.
+ Used within the library to carry user handlers set over Dispatcher replugins. """
+ self.handlers=handlers
+
+ def _init(self):
+ """ Registers default namespaces/protocols/handlers. Used internally. """
+ self.RegisterNamespace('unknown')
+ self.RegisterNamespace(NS_STREAMS)
+ self.RegisterNamespace(self._owner.defaultNamespace)
+ self.RegisterProtocol('iq',Iq)
+ self.RegisterProtocol('presence',Presence)
+ self.RegisterProtocol('message',Message)
+ self.RegisterDefaultHandler(self.returnStanzaHandler)
+ self.RegisterHandler('error',self.streamErrorHandler,xmlns=NS_STREAMS)
+
+ def plugin(self, owner):
+ """ Plug the Dispatcher instance into Client class instance and send initial stream header. Used internally."""
+ self._init()
+ for method in self._old_owners_methods:
+ if method.__name__=='send': self._owner_send=method; break
+ self._owner.lastErrNode=None
+ self._owner.lastErr=None
+ self._owner.lastErrCode=None
+ self.StreamInit()
+
+ def plugout(self):
+ """ Prepares instance to be destructed. """
+ self.Stream.dispatch=None
+ self.Stream.DEBUG=None
+ self.Stream.features=None
+ self.Stream.destroy()
+
+ def StreamInit(self):
+ """ Send an initial stream header. """
+ self.Stream=simplexml.NodeBuilder()
+ self.Stream._dispatch_depth=2
+ self.Stream.dispatch=self.dispatch
+ self.Stream.stream_header_received=self._check_stream_start
+ self._owner.debug_flags.append(simplexml.DBG_NODEBUILDER)
+ self.Stream.DEBUG=self._owner.DEBUG
+ self.Stream.features=None
+ self._metastream=Node('stream:stream')
+ self._metastream.setNamespace(self._owner.Namespace)
+ self._metastream.setAttr('version','1.0')
+ self._metastream.setAttr('xmlns:stream',NS_STREAMS)
+ self._metastream.setAttr('to',self._owner.Server)
+ self._owner.send("%s>"%str(self._metastream)[:-2])
+
+ def _check_stream_start(self,ns,tag,attrs):
+ if ns<>NS_STREAMS or tag<>'stream':
+ raise ValueError('Incorrect stream start: (%s,%s). Terminating.'%(tag,ns))
+
+ def Process(self, timeout=0):
+ """ Check incoming stream for data waiting. If "timeout" is positive - block for as max. this time.
+ Returns:
+ 1) length of processed data if some data were processed;
+ 2) '0' string if no data were processed but link is alive;
+ 3) 0 (zero) if underlying connection is closed.
+ Take note that in case of disconnection detect during Process() call
+ disconnect handlers are called automatically.
+ """
+ for handler in self._cycleHandlers: handler(self)
+ if len(self._pendingExceptions) > 0:
+ _pendingException = self._pendingExceptions.pop()
+ raise _pendingException[0], _pendingException[1], _pendingException[2]
+ if self._owner.Connection.pending_data(timeout):
+ try: data=self._owner.Connection.receive()
+ except IOError: return
+ self.Stream.Parse(data)
+ if len(self._pendingExceptions) > 0:
+ _pendingException = self._pendingExceptions.pop()
+ raise _pendingException[0], _pendingException[1], _pendingException[2]
+ if data: return len(data)
+ return '0' # It means that nothing is received but link is alive.
+
+ def RegisterNamespace(self,xmlns,order='info'):
+ """ Creates internal structures for newly registered namespace.
+ You can register handlers for this namespace afterwards. By default one namespace
+ already registered (jabber:client or jabber:component:accept depending on context. """
+ self.DEBUG('Registering namespace "%s"'%xmlns,order)
+ self.handlers[xmlns]={}
+ self.RegisterProtocol('unknown',Protocol,xmlns=xmlns)
+ self.RegisterProtocol('default',Protocol,xmlns=xmlns)
+
+ def RegisterProtocol(self,tag_name,Proto,xmlns=None,order='info'):
+ """ Used to declare some top-level stanza name to dispatcher.
+ Needed to start registering handlers for such stanzas.
+ Iq, message and presence protocols are registered by default. """
+ if not xmlns: xmlns=self._owner.defaultNamespace
+ self.DEBUG('Registering protocol "%s" as %s(%s)'%(tag_name,Proto,xmlns), order)
+ self.handlers[xmlns][tag_name]={type:Proto, 'default':[]}
+
+ def RegisterNamespaceHandler(self,xmlns,handler,typ='',ns='', makefirst=0, system=0):
+ """ Register handler for processing all stanzas for specified namespace. """
+ self.RegisterHandler('default', handler, typ, ns, xmlns, makefirst, system)
+
+ def RegisterHandler(self,name,handler,typ='',ns='',xmlns=None, makefirst=0, system=0):
+ """Register user callback as stanzas handler of declared type. Callback must take
+ (if chained, see later) arguments: dispatcher instance (for replying), incomed
+ return of previous handlers.
+ The callback must raise xmpp.NodeProcessed just before return if it want preven
+ callbacks to be called with the same stanza as argument _and_, more importantly
+ library from returning stanza to sender with error set (to be enabled in 0.2 ve
+ Arguments:
+ "name" - name of stanza. F.e. "iq".
+ "handler" - user callback.
+ "typ" - value of stanza's "type" attribute. If not specified any value match
+ "ns" - namespace of child that stanza must contain.
+ "chained" - chain together output of several handlers.
+ "makefirst" - insert handler in the beginning of handlers list instead of
+ adding it to the end. Note that more common handlers (i.e. w/o "typ" and "
+ will be called first nevertheless.
+ "system" - call handler even if NodeProcessed Exception were raised already.
+ """
+ if not xmlns: xmlns=self._owner.defaultNamespace
+ self.DEBUG('Registering handler %s for "%s" type->%s ns->%s(%s)'%(handler,name,typ,ns,xmlns), 'info')
+ if not typ and not ns: typ='default'
+ if not self.handlers.has_key(xmlns): self.RegisterNamespace(xmlns,'warn')
+ if not self.handlers[xmlns].has_key(name): self.RegisterProtocol(name,Protocol,xmlns,'warn')
+ if not self.handlers[xmlns][name].has_key(typ+ns): self.handlers[xmlns][name][typ+ns]=[]
+ if makefirst: self.handlers[xmlns][name][typ+ns].insert(0,{'func':handler,'system':system})
+ else: self.handlers[xmlns][name][typ+ns].append({'func':handler,'system':system})
+
+ def RegisterHandlerOnce(self,name,handler,typ='',ns='',xmlns=None,makefirst=0, system=0):
+ """ Unregister handler after first call (not implemented yet). """
+ if not xmlns: xmlns=self._owner.defaultNamespace
+ self.RegisterHandler(name, handler, typ, ns, xmlns, makefirst, system)
+
+ def UnregisterHandler(self,name,handler,typ='',ns='',xmlns=None):
+ """ Unregister handler. "typ" and "ns" must be specified exactly the same as with registering."""
+ if not xmlns: xmlns=self._owner.defaultNamespace
+ if not self.handlers.has_key(xmlns): return
+ if not typ and not ns: typ='default'
+ for pack in self.handlers[xmlns][name][typ+ns]:
+ if handler==pack['func']: break
+ else: pack=None
+ try: self.handlers[xmlns][name][typ+ns].remove(pack)
+ except ValueError: pass
+
+ def RegisterDefaultHandler(self,handler):
+ """ Specify the handler that will be used if no NodeProcessed exception were raised.
+ This is returnStanzaHandler by default. """
+ self._defaultHandler=handler
+
+ def RegisterEventHandler(self,handler):
+ """ Register handler that will process events. F.e. "FILERECEIVED" event. """
+ self._eventHandler=handler
+
+ def returnStanzaHandler(self,conn,stanza):
+ """ Return stanza back to the sender with error set. """
+ if stanza.getType() in ['get','set']:
+ conn.send(Error(stanza,ERR_FEATURE_NOT_IMPLEMENTED))
+
+ def streamErrorHandler(self,conn,error):
+ name,text='error',error.getData()
+ for tag in error.getChildren():
+ if tag.getNamespace()==NS_XMPP_STREAMS:
+ if tag.getName()=='text': text=tag.getData()
+ else: name=tag.getName()
+ if name in stream_exceptions.keys(): exc=stream_exceptions[name]
+ else: exc=StreamError
+ raise exc((name,text))
+
+ def RegisterCycleHandler(self,handler):
+ """ Register handler that will be called on every Dispatcher.Process() call. """
+ if handler not in self._cycleHandlers: self._cycleHandlers.append(handler)
+
+ def UnregisterCycleHandler(self,handler):
+ """ Unregister handler that will is called on every Dispatcher.Process() call."""
+ if handler in self._cycleHandlers: self._cycleHandlers.remove(handler)
+
+ def Event(self,realm,event,data):
+ """ Raise some event. Takes three arguments:
+ 1) "realm" - scope of event. Usually a namespace.
+ 2) "event" - the event itself. F.e. "SUCESSFULL SEND".
+ 3) data that comes along with event. Depends on event."""
+ if self._eventHandler: self._eventHandler(realm,event,data)
+
+ def dispatch(self,stanza,session=None,direct=0):
+ """ Main procedure that performs XMPP stanza recognition and calling apppropriate handlers for it.
+ Called internally. """
+ if not session: session=self
+ session.Stream._mini_dom=None
+ name=stanza.getName()
+
+ if not direct and self._owner._route:
+ if name == 'route':
+ if stanza.getAttr('error') == None:
+ if len(stanza.getChildren()) == 1:
+ stanza = stanza.getChildren()[0]
+ name=stanza.getName()
+ else:
+ for each in stanza.getChildren():
+ self.dispatch(each,session,direct=1)
+ return
+ elif name == 'presence':
+ return
+ elif name in ('features','bind'):
+ pass
+ else:
+ raise UnsupportedStanzaType(name)
+
+ if name=='features': session.Stream.features=stanza
+
+ xmlns=stanza.getNamespace()
+ if not self.handlers.has_key(xmlns):
+ self.DEBUG("Unknown namespace: " + xmlns,'warn')
+ xmlns='unknown'
+ if not self.handlers[xmlns].has_key(name):
+ self.DEBUG("Unknown stanza: " + name,'warn')
+ name='unknown'
+ else:
+ self.DEBUG("Got %s/%s stanza"%(xmlns,name), 'ok')
+
+ if stanza.__class__.__name__=='Node': stanza=self.handlers[xmlns][name][type](node=stanza)
+
+ typ=stanza.getType()
+ if not typ: typ=''
+ stanza.props=stanza.getProperties()
+ ID=stanza.getID()
+
+ session.DEBUG("Dispatching %s stanza with type->%s props->%s id->%s"%(name,typ,stanza.props,ID),'ok')
+
+ list=['default'] # we will use all handlers:
+ if self.handlers[xmlns][name].has_key(typ): list.append(typ) # from very common...
+ for prop in stanza.props:
+ if self.handlers[xmlns][name].has_key(prop): list.append(prop)
+ if typ and self.handlers[xmlns][name].has_key(typ+prop): list.append(typ+prop) # ...to very particular
+
+ chain=self.handlers[xmlns]['default']['default']
+ for key in list:
+ if key: chain = chain + self.handlers[xmlns][name][key]
+
+ output=''
+ if session._expected.has_key(ID):
+ user=0
+ if type(session._expected[ID])==type(()):
+ cb,args=session._expected[ID]
+ session.DEBUG("Expected stanza arrived. Callback %s(%s) found!"%(cb,args),'ok')
+ try: cb(session,stanza,**args)
+ except Exception, typ:
+ if typ.__class__.__name__<>'NodeProcessed': raise
+ else:
+ session.DEBUG("Expected stanza arrived!",'ok')
+ session._expected[ID]=stanza
+ else: user=1
+ for handler in chain:
+ if user or handler['system']:
+ try:
+ handler['func'](session,stanza)
+ except Exception, typ:
+ if typ.__class__.__name__<>'NodeProcessed':
+ self._pendingExceptions.insert(0, sys.exc_info())
+ return
+ user=0
+ if user and self._defaultHandler: self._defaultHandler(session,stanza)
+
+ def WaitForResponse(self, ID, timeout=DefaultTimeout):
+ """ Block and wait until stanza with specific "id" attribute will come.
+ If no such stanza is arrived within timeout, return None.
+ If operation failed for some reason then owner's attributes
+ lastErrNode, lastErr and lastErrCode are set accordingly. """
+ self._expected[ID]=None
+ has_timed_out=0
+ abort_time=time.time() + timeout
+ self.DEBUG("Waiting for ID:%s with timeout %s..." % (ID,timeout),'wait')
+ while not self._expected[ID]:
+ if not self.Process(0.04):
+ self._owner.lastErr="Disconnect"
+ return None
+ if time.time() > abort_time:
+ self._owner.lastErr="Timeout"
+ return None
+ response=self._expected[ID]
+ del self._expected[ID]
+ if response.getErrorCode():
+ self._owner.lastErrNode=response
+ self._owner.lastErr=response.getError()
+ self._owner.lastErrCode=response.getErrorCode()
+ return response
+
+ def SendAndWaitForResponse(self, stanza, timeout=DefaultTimeout):
+ """ Put stanza on the wire and wait for recipient's response to it. """
+ return self.WaitForResponse(self.send(stanza),timeout)
+
+ def SendAndCallForResponse(self, stanza, func, args={}):
+ """ Put stanza on the wire and call back when recipient replies.
+ Additional callback arguments can be specified in args. """
+ self._expected[self.send(stanza)]=(func,args)
+
+ def send(self,stanza):
+ """ Serialise stanza and put it on the wire. Assign an unique ID to it before send.
+ Returns assigned ID."""
+ if type(stanza) in [type(''), type(u'')]: return self._owner_send(stanza)
+ if not isinstance(stanza,Protocol): _ID=None
+ elif not stanza.getID():
+ global ID
+ ID+=1
+ _ID=`ID`
+ stanza.setID(_ID)
+ else: _ID=stanza.getID()
+ if self._owner._registered_name and not stanza.getAttr('from'): stanza.setAttr('from',self._owner._registered_name)
+ if self._owner._route and stanza.getName()!='bind':
+ to=self._owner.Server
+ if stanza.getTo() and stanza.getTo().getDomain():
+ to=stanza.getTo().getDomain()
+ frm=stanza.getFrom()
+ if frm.getDomain():
+ frm=frm.getDomain()
+ route=Protocol('route',to=to,frm=frm,payload=[stanza])
+ stanza=route
+ stanza.setNamespace(self._owner.Namespace)
+ stanza.setParent(self._metastream)
+ self._owner_send(stanza)
+ return _ID
+
+ def disconnect(self):
+ """ Send a stream terminator and and handle all incoming stanzas before stream closure. """
+ self._owner_send('')
+ while self.Process(1): pass
diff --git a/libs/xmpp/features.py b/libs/xmpp/features.py
new file mode 100644
index 00000000..c7993c29
--- /dev/null
+++ b/libs/xmpp/features.py
@@ -0,0 +1,182 @@
+## features.py
+##
+## Copyright (C) 2003-2004 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: features.py,v 1.25 2009/04/07 07:11:48 snakeru Exp $
+
+"""
+This module contains variable stuff that is not worth splitting into separate modules.
+Here is:
+ DISCO client and agents-to-DISCO and browse-to-DISCO emulators.
+ IBR and password manager.
+ jabber:iq:privacy methods
+All these methods takes 'disp' first argument that should be already connected
+(and in most cases already authorised) dispatcher instance.
+"""
+
+from protocol import *
+
+REGISTER_DATA_RECEIVED='REGISTER DATA RECEIVED'
+
+### DISCO ### http://jabber.org/protocol/disco ### JEP-0030 ####################
+### Browse ### jabber:iq:browse ### JEP-0030 ###################################
+### Agents ### jabber:iq:agents ### JEP-0030 ###################################
+def _discover(disp,ns,jid,node=None,fb2b=0,fb2a=1):
+ """ Try to obtain info from the remote object.
+ If remote object doesn't support disco fall back to browse (if fb2b is true)
+ and if it doesnt support browse (or fb2b is not true) fall back to agents protocol
+ (if gb2a is true). Returns obtained info. Used internally. """
+ iq=Iq(to=jid,typ='get',queryNS=ns)
+ if node: iq.setQuerynode(node)
+ rep=disp.SendAndWaitForResponse(iq)
+ if fb2b and not isResultNode(rep): rep=disp.SendAndWaitForResponse(Iq(to=jid,typ='get',queryNS=NS_BROWSE)) # Fallback to browse
+ if fb2a and not isResultNode(rep): rep=disp.SendAndWaitForResponse(Iq(to=jid,typ='get',queryNS=NS_AGENTS)) # Fallback to agents
+ if isResultNode(rep): return [n for n in rep.getQueryPayload() if isinstance(n, Node)]
+ return []
+
+def discoverItems(disp,jid,node=None):
+ """ Query remote object about any items that it contains. Return items list. """
+ """ According to JEP-0030:
+ query MAY have node attribute
+ item: MUST HAVE jid attribute and MAY HAVE name, node, action attributes.
+ action attribute of item can be either of remove or update value."""
+ ret=[]
+ for i in _discover(disp,NS_DISCO_ITEMS,jid,node):
+ if i.getName()=='agent' and i.getTag('name'): i.setAttr('name',i.getTagData('name'))
+ ret.append(i.attrs)
+ return ret
+
+def discoverInfo(disp,jid,node=None):
+ """ Query remote object about info that it publishes. Returns identities and features lists."""
+ """ According to JEP-0030:
+ query MAY have node attribute
+ identity: MUST HAVE category and name attributes and MAY HAVE type attribute.
+ feature: MUST HAVE var attribute"""
+ identities , features = [] , []
+ for i in _discover(disp,NS_DISCO_INFO,jid,node):
+ if i.getName()=='identity': identities.append(i.attrs)
+ elif i.getName()=='feature': features.append(i.getAttr('var'))
+ elif i.getName()=='agent':
+ if i.getTag('name'): i.setAttr('name',i.getTagData('name'))
+ if i.getTag('description'): i.setAttr('name',i.getTagData('description'))
+ identities.append(i.attrs)
+ if i.getTag('groupchat'): features.append(NS_GROUPCHAT)
+ if i.getTag('register'): features.append(NS_REGISTER)
+ if i.getTag('search'): features.append(NS_SEARCH)
+ return identities , features
+
+### Registration ### jabber:iq:register ### JEP-0077 ###########################
+def getRegInfo(disp,host,info={},sync=True):
+ """ Gets registration form from remote host.
+ You can pre-fill the info dictionary.
+ F.e. if you are requesting info on registering user joey than specify
+ info as {'username':'joey'}. See JEP-0077 for details.
+ 'disp' must be connected dispatcher instance."""
+ iq=Iq('get',NS_REGISTER,to=host)
+ for i in info.keys(): iq.setTagData(i,info[i])
+ if sync:
+ resp=disp.SendAndWaitForResponse(iq)
+ _ReceivedRegInfo(disp.Dispatcher,resp, host)
+ return resp
+ else: disp.SendAndCallForResponse(iq,_ReceivedRegInfo, {'agent': host})
+
+def _ReceivedRegInfo(con, resp, agent):
+ iq=Iq('get',NS_REGISTER,to=agent)
+ if not isResultNode(resp): return
+ df=resp.getTag('query',namespace=NS_REGISTER).getTag('x',namespace=NS_DATA)
+ if df:
+ con.Event(NS_REGISTER,REGISTER_DATA_RECEIVED,(agent, DataForm(node=df)))
+ return
+ df=DataForm(typ='form')
+ for i in resp.getQueryPayload():
+ if type(i)<>type(iq): pass
+ elif i.getName()=='instructions': df.addInstructions(i.getData())
+ else: df.setField(i.getName()).setValue(i.getData())
+ con.Event(NS_REGISTER,REGISTER_DATA_RECEIVED,(agent, df))
+
+def register(disp,host,info):
+ """ Perform registration on remote server with provided info.
+ disp must be connected dispatcher instance.
+ Returns true or false depending on registration result.
+ If registration fails you can get additional info from the dispatcher's owner
+ attributes lastErrNode, lastErr and lastErrCode.
+ """
+ iq=Iq('set',NS_REGISTER,to=host)
+ if type(info)<>type({}): info=info.asDict()
+ for i in info.keys(): iq.setTag('query').setTagData(i,info[i])
+ resp=disp.SendAndWaitForResponse(iq)
+ if isResultNode(resp): return 1
+
+def unregister(disp,host):
+ """ Unregisters with host (permanently removes account).
+ disp must be connected and authorized dispatcher instance.
+ Returns true on success."""
+ resp=disp.SendAndWaitForResponse(Iq('set',NS_REGISTER,to=host,payload=[Node('remove')]))
+ if isResultNode(resp): return 1
+
+def changePasswordTo(disp,newpassword,host=None):
+ """ Changes password on specified or current (if not specified) server.
+ disp must be connected and authorized dispatcher instance.
+ Returns true on success."""
+ if not host: host=disp._owner.Server
+ resp=disp.SendAndWaitForResponse(Iq('set',NS_REGISTER,to=host,payload=[Node('username',payload=[disp._owner.Server]),Node('password',payload=[newpassword])]))
+ if isResultNode(resp): return 1
+
+### Privacy ### jabber:iq:privacy ### draft-ietf-xmpp-im-19 ####################
+#type=[jid|group|subscription]
+#action=[allow|deny]
+
+def getPrivacyLists(disp):
+ """ Requests privacy lists from connected server.
+ Returns dictionary of existing lists on success."""
+ try:
+ dict={'lists':[]}
+ resp=disp.SendAndWaitForResponse(Iq('get',NS_PRIVACY))
+ if not isResultNode(resp): return
+ for list in resp.getQueryPayload():
+ if list.getName()=='list': dict['lists'].append(list.getAttr('name'))
+ else: dict[list.getName()]=list.getAttr('name')
+ return dict
+ except: pass
+
+def getPrivacyList(disp,listname):
+ """ Requests specific privacy list listname. Returns list of XML nodes (rules)
+ taken from the server responce."""
+ try:
+ resp=disp.SendAndWaitForResponse(Iq('get',NS_PRIVACY,payload=[Node('list',{'name':listname})]))
+ if isResultNode(resp): return resp.getQueryPayload()[0]
+ except: pass
+
+def setActivePrivacyList(disp,listname=None,typ='active'):
+ """ Switches privacy list 'listname' to specified type.
+ By default the type is 'active'. Returns true on success."""
+ if listname: attrs={'name':listname}
+ else: attrs={}
+ resp=disp.SendAndWaitForResponse(Iq('set',NS_PRIVACY,payload=[Node(typ,attrs)]))
+ if isResultNode(resp): return 1
+
+def setDefaultPrivacyList(disp,listname=None):
+ """ Sets the default privacy list as 'listname'. Returns true on success."""
+ return setActivePrivacyList(disp,listname,'default')
+
+def setPrivacyList(disp,list):
+ """ Set the ruleset. 'list' should be the simpleXML node formatted
+ according to RFC 3921 (XMPP-IM) (I.e. Node('list',{'name':listname},payload=[...]) )
+ Returns true on success."""
+ resp=disp.SendAndWaitForResponse(Iq('set',NS_PRIVACY,payload=[list]))
+ if isResultNode(resp): return 1
+
+def delPrivacyList(disp,listname):
+ """ Deletes privacy list 'listname'. Returns true on success."""
+ resp=disp.SendAndWaitForResponse(Iq('set',NS_PRIVACY,payload=[Node('list',{'name':listname})]))
+ if isResultNode(resp): return 1
diff --git a/libs/xmpp/filetransfer.py b/libs/xmpp/filetransfer.py
new file mode 100644
index 00000000..87ddc219
--- /dev/null
+++ b/libs/xmpp/filetransfer.py
@@ -0,0 +1,199 @@
+## filetransfer.py
+##
+## Copyright (C) 2004 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: filetransfer.py,v 1.6 2004/12/25 20:06:59 snakeru Exp $
+
+"""
+This module contains IBB class that is the simple implementation of JEP-0047.
+Note that this is just a transport for data. You have to negotiate data transfer before
+(via StreamInitiation most probably). Unfortunately SI is not implemented yet.
+"""
+
+from protocol import *
+from dispatcher import PlugIn
+import base64
+
+class IBB(PlugIn):
+ """ IBB used to transfer small-sized data chunk over estabilished xmpp connection.
+ Data is split into small blocks (by default 3000 bytes each), encoded as base 64
+ and sent to another entity that compiles these blocks back into the data chunk.
+ This is very inefficiend but should work under any circumstances. Note that
+ using IBB normally should be the last resort.
+ """
+ def __init__(self):
+ """ Initialise internal variables. """
+ PlugIn.__init__(self)
+ self.DBG_LINE='ibb'
+ self._exported_methods=[self.OpenStream]
+ self._streams={}
+ self._ampnode=Node(NS_AMP+' amp',payload=[Node('rule',{'condition':'deliver-at','value':'stored','action':'error'}),Node('rule',{'condition':'match-resource','value':'exact','action':'error'})])
+
+ def plugin(self,owner):
+ """ Register handlers for receiving incoming datastreams. Used internally. """
+ self._owner.RegisterHandlerOnce('iq',self.StreamOpenReplyHandler) # Move to StreamOpen and specify stanza id
+ self._owner.RegisterHandler('iq',self.IqHandler,ns=NS_IBB)
+ self._owner.RegisterHandler('message',self.ReceiveHandler,ns=NS_IBB)
+
+ def IqHandler(self,conn,stanza):
+ """ Handles streams state change. Used internally. """
+ typ=stanza.getType()
+ self.DEBUG('IqHandler called typ->%s'%typ,'info')
+ if typ=='set' and stanza.getTag('open',namespace=NS_IBB): self.StreamOpenHandler(conn,stanza)
+ elif typ=='set' and stanza.getTag('close',namespace=NS_IBB): self.StreamCloseHandler(conn,stanza)
+ elif typ=='result': self.StreamCommitHandler(conn,stanza)
+ elif typ=='error': self.StreamOpenReplyHandler(conn,stanza)
+ else: conn.send(Error(stanza,ERR_BAD_REQUEST))
+ raise NodeProcessed
+
+ def StreamOpenHandler(self,conn,stanza):
+ """ Handles opening of new incoming stream. Used internally. """
+ """
+
+
+
+"""
+ err=None
+ sid,blocksize=stanza.getTagAttr('open','sid'),stanza.getTagAttr('open','block-size')
+ self.DEBUG('StreamOpenHandler called sid->%s blocksize->%s'%(sid,blocksize),'info')
+ try: blocksize=int(blocksize)
+ except: err=ERR_BAD_REQUEST
+ if not sid or not blocksize: err=ERR_BAD_REQUEST
+ elif sid in self._streams.keys(): err=ERR_UNEXPECTED_REQUEST
+ if err: rep=Error(stanza,err)
+ else:
+ self.DEBUG("Opening stream: id %s, block-size %s"%(sid,blocksize),'info')
+ rep=Protocol('iq',stanza.getFrom(),'result',stanza.getTo(),{'id':stanza.getID()})
+ self._streams[sid]={'direction':'<'+str(stanza.getFrom()),'block-size':blocksize,'fp':open('/tmp/xmpp_file_'+sid,'w'),'seq':0,'syn_id':stanza.getID()}
+ conn.send(rep)
+
+ def OpenStream(self,sid,to,fp,blocksize=3000):
+ """ Start new stream. You should provide stream id 'sid', the endpoind jid 'to',
+ the file object containing info for send 'fp'. Also the desired blocksize can be specified.
+ Take into account that recommended stanza size is 4k and IBB uses base64 encoding
+ that increases size of data by 1/3."""
+ if sid in self._streams.keys(): return
+ if not JID(to).getResource(): return
+ self._streams[sid]={'direction':'|>'+to,'block-size':blocksize,'fp':fp,'seq':0}
+ self._owner.RegisterCycleHandler(self.SendHandler)
+ syn=Protocol('iq',to,'set',payload=[Node(NS_IBB+' open',{'sid':sid,'block-size':blocksize})])
+ self._owner.send(syn)
+ self._streams[sid]['syn_id']=syn.getID()
+ return self._streams[sid]
+
+ def SendHandler(self,conn):
+ """ Send next portion of data if it is time to do it. Used internally. """
+ self.DEBUG('SendHandler called','info')
+ for sid in self._streams.keys():
+ stream=self._streams[sid]
+ if stream['direction'][:2]=='|>': cont=1
+ elif stream['direction'][0]=='>':
+ chunk=stream['fp'].read(stream['block-size'])
+ if chunk:
+ datanode=Node(NS_IBB+' data',{'sid':sid,'seq':stream['seq']},base64.encodestring(chunk))
+ stream['seq']+=1
+ if stream['seq']==65536: stream['seq']=0
+ conn.send(Protocol('message',stream['direction'][1:],payload=[datanode,self._ampnode]))
+ else:
+ """ notify the other side about stream closing
+ notify the local user about sucessfull send
+ delete the local stream"""
+ conn.send(Protocol('iq',stream['direction'][1:],'set',payload=[Node(NS_IBB+' close',{'sid':sid})]))
+ conn.Event(self.DBG_LINE,'SUCCESSFULL SEND',stream)
+ del self._streams[sid]
+ self._owner.UnregisterCycleHandler(self.SendHandler)
+
+ """
+
+
+ qANQR1DBwU4DX7jmYZnncmUQB/9KuKBddzQH+tZ1ZywKK0yHKnq57kWq+RFtQdCJ
+ WpdWpR0uQsuJe7+vh3NWn59/gTc5MDlX8dS9p0ovStmNcyLhxVgmqS8ZKhsblVeu
+ IpQ0JgavABqibJolc3BKrVtVV1igKiX/N7Pi8RtY1K18toaMDhdEfhBRzO/XB0+P
+ AQhYlRjNacGcslkhXqNjK5Va4tuOAPy2n1Q8UUrHbUd0g+xJ9Bm0G0LZXyvCWyKH
+ kuNEHFQiLuCY6Iv0myq6iX6tjuHehZlFSh80b5BVV9tNLwNR5Eqz1klxMhoghJOA
+
+
+
+
+
+
+"""
+
+ def ReceiveHandler(self,conn,stanza):
+ """ Receive next portion of incoming datastream and store it write
+ it to temporary file. Used internally.
+ """
+ sid,seq,data=stanza.getTagAttr('data','sid'),stanza.getTagAttr('data','seq'),stanza.getTagData('data')
+ self.DEBUG('ReceiveHandler called sid->%s seq->%s'%(sid,seq),'info')
+ try: seq=int(seq); data=base64.decodestring(data)
+ except: seq=''; data=''
+ err=None
+ if not sid in self._streams.keys(): err=ERR_ITEM_NOT_FOUND
+ else:
+ stream=self._streams[sid]
+ if not data: err=ERR_BAD_REQUEST
+ elif seq<>stream['seq']: err=ERR_UNEXPECTED_REQUEST
+ else:
+ self.DEBUG('Successfull receive sid->%s %s+%s bytes'%(sid,stream['fp'].tell(),len(data)),'ok')
+ stream['seq']+=1
+ stream['fp'].write(data)
+ if err:
+ self.DEBUG('Error on receive: %s'%err,'error')
+ conn.send(Error(Iq(to=stanza.getFrom(),frm=stanza.getTo(),payload=[Node(NS_IBB+' close')]),err,reply=0))
+
+ def StreamCloseHandler(self,conn,stanza):
+ """ Handle stream closure due to all data transmitted.
+ Raise xmpppy event specifying successfull data receive. """
+ sid=stanza.getTagAttr('close','sid')
+ self.DEBUG('StreamCloseHandler called sid->%s'%sid,'info')
+ if sid in self._streams.keys():
+ conn.send(stanza.buildReply('result'))
+ conn.Event(self.DBG_LINE,'SUCCESSFULL RECEIVE',self._streams[sid])
+ del self._streams[sid]
+ else: conn.send(Error(stanza,ERR_ITEM_NOT_FOUND))
+
+ def StreamBrokenHandler(self,conn,stanza):
+ """ Handle stream closure due to all some error while receiving data.
+ Raise xmpppy event specifying unsuccessfull data receive. """
+ syn_id=stanza.getID()
+ self.DEBUG('StreamBrokenHandler called syn_id->%s'%syn_id,'info')
+ for sid in self._streams.keys():
+ stream=self._streams[sid]
+ if stream['syn_id']==syn_id:
+ if stream['direction'][0]=='<': conn.Event(self.DBG_LINE,'ERROR ON RECEIVE',stream)
+ else: conn.Event(self.DBG_LINE,'ERROR ON SEND',stream)
+ del self._streams[sid]
+
+ def StreamOpenReplyHandler(self,conn,stanza):
+ """ Handle remote side reply about is it agree or not to receive our datastream.
+ Used internally. Raises xmpppy event specfiying if the data transfer
+ is agreed upon."""
+ syn_id=stanza.getID()
+ self.DEBUG('StreamOpenReplyHandler called syn_id->%s'%syn_id,'info')
+ for sid in self._streams.keys():
+ stream=self._streams[sid]
+ if stream['syn_id']==syn_id:
+ if stanza.getType()=='error':
+ if stream['direction'][0]=='<': conn.Event(self.DBG_LINE,'ERROR ON RECEIVE',stream)
+ else: conn.Event(self.DBG_LINE,'ERROR ON SEND',stream)
+ del self._streams[sid]
+ elif stanza.getType()=='result':
+ if stream['direction'][0]=='|':
+ stream['direction']=stream['direction'][1:]
+ conn.Event(self.DBG_LINE,'STREAM COMMITTED',stream)
+ else: conn.send(Error(stanza,ERR_UNEXPECTED_REQUEST))
diff --git a/libs/xmpp/jep0106.py b/libs/xmpp/jep0106.py
new file mode 100644
index 00000000..fcf11145
--- /dev/null
+++ b/libs/xmpp/jep0106.py
@@ -0,0 +1,57 @@
+
+# JID Escaping XEP-0106 for the xmpppy based transports written by Norman Rasmussen
+
+"""This file is the XEP-0106 commands.
+
+Implemented commands as follows:
+
+4.2 Encode : Encoding Transformation
+4.3 Decode : Decoding Transformation
+
+
+"""
+
+xep0106mapping = [
+ [' ' ,'20'],
+ ['"' ,'22'],
+ ['&' ,'26'],
+ ['\'','27'],
+ ['/' ,'2f'],
+ [':' ,'3a'],
+ ['<' ,'3c'],
+ ['>' ,'3e'],
+ ['@' ,'40']]
+
+def JIDEncode(str):
+ str = str.replace('\\5c', '\\5c5c')
+ for each in xep0106mapping:
+ str = str.replace('\\' + each[1], '\\5c' + each[1])
+ for each in xep0106mapping:
+ str = str.replace(each[0], '\\' + each[1])
+ return str
+
+def JIDDecode(str):
+ for each in xep0106mapping:
+ str = str.replace('\\' + each[1], each[0])
+ return str.replace('\\5c', '\\')
+
+if __name__ == "__main__":
+ def test(before,valid):
+ during = JIDEncode(before)
+ after = JIDDecode(during)
+ if during == valid and after == before:
+ print 'PASS Before: ' + before
+ print 'PASS During: ' + during
+ else:
+ print 'FAIL Before: ' + before
+ print 'FAIL During: ' + during
+ print 'FAIL After : ' + after
+ print
+
+ test('jid escaping',r'jid\20escaping')
+ test(r'\3and\2is\5@example.com',r'\5c3and\2is\5\40example.com')
+ test(r'\3catsand\2catsis\5cats@example.com',r'\5c3catsand\2catsis\5c5cats\40example.com')
+ test(r'\2plus\2is\4',r'\2plus\2is\4')
+ test(r'foo\bar',r'foo\bar')
+ test(r'foob\41r',r'foob\41r')
+ test('here\'s_a wild_&_/cr%zy/_address@example.com',r'here\27s_a\20wild_\26_\2fcr%zy\2f_address\40example.com')
diff --git a/libs/xmpp/protocol.py b/libs/xmpp/protocol.py
new file mode 100644
index 00000000..3e49b8d2
--- /dev/null
+++ b/libs/xmpp/protocol.py
@@ -0,0 +1,860 @@
+## protocol.py
+##
+## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: protocol.py,v 1.60 2009/04/07 11:14:28 snakeru Exp $
+
+"""
+Protocol module contains tools that is needed for processing of
+xmpp-related data structures.
+"""
+
+from simplexml import Node,ustr
+import time
+NS_ACTIVITY ='http://jabber.org/protocol/activity' # XEP-0108
+NS_ADDRESS ='http://jabber.org/protocol/address' # XEP-0033
+NS_ADMIN ='http://jabber.org/protocol/admin' # XEP-0133
+NS_ADMIN_ADD_USER =NS_ADMIN+'#add-user' # XEP-0133
+NS_ADMIN_DELETE_USER =NS_ADMIN+'#delete-user' # XEP-0133
+NS_ADMIN_DISABLE_USER =NS_ADMIN+'#disable-user' # XEP-0133
+NS_ADMIN_REENABLE_USER =NS_ADMIN+'#reenable-user' # XEP-0133
+NS_ADMIN_END_USER_SESSION =NS_ADMIN+'#end-user-session' # XEP-0133
+NS_ADMIN_GET_USER_PASSWORD =NS_ADMIN+'#get-user-password' # XEP-0133
+NS_ADMIN_CHANGE_USER_PASSWORD =NS_ADMIN+'#change-user-password' # XEP-0133
+NS_ADMIN_GET_USER_ROSTER =NS_ADMIN+'#get-user-roster' # XEP-0133
+NS_ADMIN_GET_USER_LASTLOGIN =NS_ADMIN+'#get-user-lastlogin' # XEP-0133
+NS_ADMIN_USER_STATS =NS_ADMIN+'#user-stats' # XEP-0133
+NS_ADMIN_EDIT_BLACKLIST =NS_ADMIN+'#edit-blacklist' # XEP-0133
+NS_ADMIN_EDIT_WHITELIST =NS_ADMIN+'#edit-whitelist' # XEP-0133
+NS_ADMIN_REGISTERED_USERS_NUM =NS_ADMIN+'#get-registered-users-num' # XEP-0133
+NS_ADMIN_DISABLED_USERS_NUM =NS_ADMIN+'#get-disabled-users-num' # XEP-0133
+NS_ADMIN_ONLINE_USERS_NUM =NS_ADMIN+'#get-online-users-num' # XEP-0133
+NS_ADMIN_ACTIVE_USERS_NUM =NS_ADMIN+'#get-active-users-num' # XEP-0133
+NS_ADMIN_IDLE_USERS_NUM =NS_ADMIN+'#get-idle-users-num' # XEP-0133
+NS_ADMIN_REGISTERED_USERS_LIST =NS_ADMIN+'#get-registered-users-list' # XEP-0133
+NS_ADMIN_DISABLED_USERS_LIST =NS_ADMIN+'#get-disabled-users-list' # XEP-0133
+NS_ADMIN_ONLINE_USERS_LIST =NS_ADMIN+'#get-online-users-list' # XEP-0133
+NS_ADMIN_ACTIVE_USERS_LIST =NS_ADMIN+'#get-active-users-list' # XEP-0133
+NS_ADMIN_IDLE_USERS_LIST =NS_ADMIN+'#get-idle-users-list' # XEP-0133
+NS_ADMIN_ANNOUNCE =NS_ADMIN+'#announce' # XEP-0133
+NS_ADMIN_SET_MOTD =NS_ADMIN+'#set-motd' # XEP-0133
+NS_ADMIN_EDIT_MOTD =NS_ADMIN+'#edit-motd' # XEP-0133
+NS_ADMIN_DELETE_MOTD =NS_ADMIN+'#delete-motd' # XEP-0133
+NS_ADMIN_SET_WELCOME =NS_ADMIN+'#set-welcome' # XEP-0133
+NS_ADMIN_DELETE_WELCOME =NS_ADMIN+'#delete-welcome' # XEP-0133
+NS_ADMIN_EDIT_ADMIN =NS_ADMIN+'#edit-admin' # XEP-0133
+NS_ADMIN_RESTART =NS_ADMIN+'#restart' # XEP-0133
+NS_ADMIN_SHUTDOWN =NS_ADMIN+'#shutdown' # XEP-0133
+NS_AGENTS ='jabber:iq:agents' # XEP-0094 (historical)
+NS_AMP ='http://jabber.org/protocol/amp' # XEP-0079
+NS_AMP_ERRORS =NS_AMP+'#errors' # XEP-0079
+NS_AUTH ='jabber:iq:auth' # XEP-0078
+NS_AVATAR ='jabber:iq:avatar' # XEP-0008 (historical)
+NS_BIND ='urn:ietf:params:xml:ns:xmpp-bind' # RFC 3920
+NS_BROWSE ='jabber:iq:browse' # XEP-0011 (historical)
+NS_BYTESTREAM ='http://jabber.org/protocol/bytestreams' # XEP-0065
+NS_CAPS ='http://jabber.org/protocol/caps' # XEP-0115
+NS_CHATSTATES ='http://jabber.org/protocol/chatstates' # XEP-0085
+NS_CLIENT ='jabber:client' # RFC 3921
+NS_COMMANDS ='http://jabber.org/protocol/commands' # XEP-0050
+NS_COMPONENT_ACCEPT ='jabber:component:accept' # XEP-0114
+NS_COMPONENT_1 ='http://jabberd.jabberstudio.org/ns/component/1.0' # Jabberd2
+NS_COMPRESS ='http://jabber.org/protocol/compress' # XEP-0138
+NS_DATA ='jabber:x:data' # XEP-0004
+NS_DATA_LAYOUT ='http://jabber.org/protocol/xdata-layout' # XEP-0141
+NS_DATA_VALIDATE ='http://jabber.org/protocol/xdata-validate' # XEP-0122
+NS_DELAY ='jabber:x:delay' # XEP-0091 (deprecated)
+NS_DIALBACK ='jabber:server:dialback' # RFC 3921
+NS_DISCO ='http://jabber.org/protocol/disco' # XEP-0030
+NS_DISCO_INFO =NS_DISCO+'#info' # XEP-0030
+NS_DISCO_ITEMS =NS_DISCO+'#items' # XEP-0030
+NS_ENCRYPTED ='jabber:x:encrypted' # XEP-0027
+NS_EVENT ='jabber:x:event' # XEP-0022 (deprecated)
+NS_FEATURE ='http://jabber.org/protocol/feature-neg' # XEP-0020
+NS_FILE ='http://jabber.org/protocol/si/profile/file-transfer' # XEP-0096
+NS_GATEWAY ='jabber:iq:gateway' # XEP-0100
+NS_GEOLOC ='http://jabber.org/protocol/geoloc' # XEP-0080
+NS_GROUPCHAT ='gc-1.0' # XEP-0045
+NS_HTTP_BIND ='http://jabber.org/protocol/httpbind' # XEP-0124
+NS_IBB ='http://jabber.org/protocol/ibb' # XEP-0047
+NS_INVISIBLE ='presence-invisible' # Jabberd2
+NS_IQ ='iq' # Jabberd2
+NS_LAST ='jabber:iq:last' # XEP-0012
+NS_MESSAGE ='message' # Jabberd2
+NS_MOOD ='http://jabber.org/protocol/mood' # XEP-0107
+NS_MUC ='http://jabber.org/protocol/muc' # XEP-0045
+NS_MUC_ADMIN =NS_MUC+'#admin' # XEP-0045
+NS_MUC_OWNER =NS_MUC+'#owner' # XEP-0045
+NS_MUC_UNIQUE =NS_MUC+'#unique' # XEP-0045
+NS_MUC_USER =NS_MUC+'#user' # XEP-0045
+NS_MUC_REGISTER =NS_MUC+'#register' # XEP-0045
+NS_MUC_REQUEST =NS_MUC+'#request' # XEP-0045
+NS_MUC_ROOMCONFIG =NS_MUC+'#roomconfig' # XEP-0045
+NS_MUC_ROOMINFO =NS_MUC+'#roominfo' # XEP-0045
+NS_MUC_ROOMS =NS_MUC+'#rooms' # XEP-0045
+NS_MUC_TRAFIC =NS_MUC+'#traffic' # XEP-0045
+NS_NICK ='http://jabber.org/protocol/nick' # XEP-0172
+NS_OFFLINE ='http://jabber.org/protocol/offline' # XEP-0013
+NS_PHYSLOC ='http://jabber.org/protocol/physloc' # XEP-0112
+NS_PRESENCE ='presence' # Jabberd2
+NS_PRIVACY ='jabber:iq:privacy' # RFC 3921
+NS_PRIVATE ='jabber:iq:private' # XEP-0049
+NS_PUBSUB ='http://jabber.org/protocol/pubsub' # XEP-0060
+NS_REGISTER ='jabber:iq:register' # XEP-0077
+NS_RC ='http://jabber.org/protocol/rc' # XEP-0146
+NS_ROSTER ='jabber:iq:roster' # RFC 3921
+NS_ROSTERX ='http://jabber.org/protocol/rosterx' # XEP-0144
+NS_RPC ='jabber:iq:rpc' # XEP-0009
+NS_SASL ='urn:ietf:params:xml:ns:xmpp-sasl' # RFC 3920
+NS_SEARCH ='jabber:iq:search' # XEP-0055
+NS_SERVER ='jabber:server' # RFC 3921
+NS_SESSION ='urn:ietf:params:xml:ns:xmpp-session' # RFC 3921
+NS_SI ='http://jabber.org/protocol/si' # XEP-0096
+NS_SI_PUB ='http://jabber.org/protocol/sipub' # XEP-0137
+NS_SIGNED ='jabber:x:signed' # XEP-0027
+NS_STANZAS ='urn:ietf:params:xml:ns:xmpp-stanzas' # RFC 3920
+NS_STREAMS ='http://etherx.jabber.org/streams' # RFC 3920
+NS_TIME ='jabber:iq:time' # XEP-0090 (deprecated)
+NS_TLS ='urn:ietf:params:xml:ns:xmpp-tls' # RFC 3920
+NS_VACATION ='http://jabber.org/protocol/vacation' # XEP-0109
+NS_VCARD ='vcard-temp' # XEP-0054
+NS_VCARD_UPDATE ='vcard-temp:x:update' # XEP-0153
+NS_VERSION ='jabber:iq:version' # XEP-0092
+NS_WAITINGLIST ='http://jabber.org/protocol/waitinglist' # XEP-0130
+NS_XHTML_IM ='http://jabber.org/protocol/xhtml-im' # XEP-0071
+NS_XMPP_STREAMS ='urn:ietf:params:xml:ns:xmpp-streams' # RFC 3920
+
+xmpp_stream_error_conditions="""
+bad-format -- -- -- The entity has sent XML that cannot be processed.
+bad-namespace-prefix -- -- -- The entity has sent a namespace prefix that is unsupported, or has sent no namespace prefix on an element that requires such a prefix.
+conflict -- -- -- The server is closing the active stream for this entity because a new stream has been initiated that conflicts with the existing stream.
+connection-timeout -- -- -- The entity has not generated any traffic over the stream for some period of time.
+host-gone -- -- -- The value of the 'to' attribute provided by the initiating entity in the stream header corresponds to a hostname that is no longer hosted by the server.
+host-unknown -- -- -- The value of the 'to' attribute provided by the initiating entity in the stream header does not correspond to a hostname that is hosted by the server.
+improper-addressing -- -- -- A stanza sent between two servers lacks a 'to' or 'from' attribute (or the attribute has no value).
+internal-server-error -- -- -- The server has experienced a misconfiguration or an otherwise-undefined internal error that prevents it from servicing the stream.
+invalid-from -- cancel -- -- The JID or hostname provided in a 'from' address does not match an authorized JID or validated domain negotiated between servers via SASL or dialback, or between a client and a server via authentication and resource authorization.
+invalid-id -- -- -- The stream ID or dialback ID is invalid or does not match an ID previously provided.
+invalid-namespace -- -- -- The streams namespace name is something other than "http://etherx.jabber.org/streams" or the dialback namespace name is something other than "jabber:server:dialback".
+invalid-xml -- -- -- The entity has sent invalid XML over the stream to a server that performs validation.
+not-authorized -- -- -- The entity has attempted to send data before the stream has been authenticated, or otherwise is not authorized to perform an action related to stream negotiation.
+policy-violation -- -- -- The entity has violated some local service policy.
+remote-connection-failed -- -- -- The server is unable to properly connect to a remote resource that is required for authentication or authorization.
+resource-constraint -- -- -- The server lacks the system resources necessary to service the stream.
+restricted-xml -- -- -- The entity has attempted to send restricted XML features such as a comment, processing instruction, DTD, entity reference, or unescaped character.
+see-other-host -- -- -- The server will not provide service to the initiating entity but is redirecting traffic to another host.
+system-shutdown -- -- -- The server is being shut down and all active streams are being closed.
+undefined-condition -- -- -- The error condition is not one of those defined by the other conditions in this list.
+unsupported-encoding -- -- -- The initiating entity has encoded the stream in an encoding that is not supported by the server.
+unsupported-stanza-type -- -- -- The initiating entity has sent a first-level child of the stream that is not supported by the server.
+unsupported-version -- -- -- The value of the 'version' attribute provided by the initiating entity in the stream header specifies a version of XMPP that is not supported by the server.
+xml-not-well-formed -- -- -- The initiating entity has sent XML that is not well-formed."""
+xmpp_stanza_error_conditions="""
+bad-request -- 400 -- modify -- The sender has sent XML that is malformed or that cannot be processed.
+conflict -- 409 -- cancel -- Access cannot be granted because an existing resource or session exists with the same name or address.
+feature-not-implemented -- 501 -- cancel -- The feature requested is not implemented by the recipient or server and therefore cannot be processed.
+forbidden -- 403 -- auth -- The requesting entity does not possess the required permissions to perform the action.
+gone -- 302 -- modify -- The recipient or server can no longer be contacted at this address.
+internal-server-error -- 500 -- wait -- The server could not process the stanza because of a misconfiguration or an otherwise-undefined internal server error.
+item-not-found -- 404 -- cancel -- The addressed JID or item requested cannot be found.
+jid-malformed -- 400 -- modify -- The value of the 'to' attribute in the sender's stanza does not adhere to the syntax defined in Addressing Scheme.
+not-acceptable -- 406 -- cancel -- The recipient or server understands the request but is refusing to process it because it does not meet criteria defined by the recipient or server.
+not-allowed -- 405 -- cancel -- The recipient or server does not allow any entity to perform the action.
+not-authorized -- 401 -- auth -- The sender must provide proper credentials before being allowed to perform the action, or has provided improper credentials.
+payment-required -- 402 -- auth -- The requesting entity is not authorized to access the requested service because payment is required.
+recipient-unavailable -- 404 -- wait -- The intended recipient is temporarily unavailable.
+redirect -- 302 -- modify -- The recipient or server is redirecting requests for this information to another entity.
+registration-required -- 407 -- auth -- The requesting entity is not authorized to access the requested service because registration is required.
+remote-server-not-found -- 404 -- cancel -- A remote server or service specified as part or all of the JID of the intended recipient does not exist.
+remote-server-timeout -- 504 -- wait -- A remote server or service specified as part or all of the JID of the intended recipient could not be contacted within a reasonable amount of time.
+resource-constraint -- 500 -- wait -- The server or recipient lacks the system resources necessary to service the request.
+service-unavailable -- 503 -- cancel -- The server or recipient does not currently provide the requested service.
+subscription-required -- 407 -- auth -- The requesting entity is not authorized to access the requested service because a subscription is required.
+undefined-condition -- 500 -- --
+unexpected-request -- 400 -- wait -- The recipient or server understood the request but was not expecting it at this time (e.g., the request was out of order)."""
+sasl_error_conditions="""
+aborted -- -- -- The receiving entity acknowledges an element sent by the initiating entity; sent in reply to the element.
+incorrect-encoding -- -- -- The data provided by the initiating entity could not be processed because the [BASE64]Josefsson, S., The Base16, Base32, and Base64 Data Encodings, July 2003. encoding is incorrect (e.g., because the encoding does not adhere to the definition in Section 3 of [BASE64]Josefsson, S., The Base16, Base32, and Base64 Data Encodings, July 2003.); sent in reply to a element or an element with initial response data.
+invalid-authzid -- -- -- The authzid provided by the initiating entity is invalid, either because it is incorrectly formatted or because the initiating entity does not have permissions to authorize that ID; sent in reply to a element or an element with initial response data.
+invalid-mechanism -- -- -- The initiating entity did not provide a mechanism or requested a mechanism that is not supported by the receiving entity; sent in reply to an element.
+mechanism-too-weak -- -- -- The mechanism requested by the initiating entity is weaker than server policy permits for that initiating entity; sent in reply to a element or an element with initial response data.
+not-authorized -- -- -- The authentication failed because the initiating entity did not provide valid credentials (this includes but is not limited to the case of an unknown username); sent in reply to a element or an element with initial response data.
+temporary-auth-failure -- -- -- The authentication failed because of a temporary error condition within the receiving entity; sent in reply to an element or element."""
+
+ERRORS,_errorcodes={},{}
+for ns,errname,errpool in [(NS_XMPP_STREAMS,'STREAM',xmpp_stream_error_conditions),
+ (NS_STANZAS ,'ERR' ,xmpp_stanza_error_conditions),
+ (NS_SASL ,'SASL' ,sasl_error_conditions)]:
+ for err in errpool.split('\n')[1:]:
+ cond,code,typ,text=err.split(' -- ')
+ name=errname+'_'+cond.upper().replace('-','_')
+ locals()[name]=ns+' '+cond
+ ERRORS[ns+' '+cond]=[code,typ,text]
+ if code: _errorcodes[code]=cond
+del ns,errname,errpool,err,cond,code,typ,text
+
+def isResultNode(node):
+ """ Returns true if the node is a positive reply. """
+ return node and node.getType()=='result'
+def isErrorNode(node):
+ """ Returns true if the node is a negative reply. """
+ return node and node.getType()=='error'
+
+class NodeProcessed(Exception):
+ """ Exception that should be raised by handler when the handling should be stopped. """
+class StreamError(Exception):
+ """ Base exception class for stream errors."""
+class BadFormat(StreamError): pass
+class BadNamespacePrefix(StreamError): pass
+class Conflict(StreamError): pass
+class ConnectionTimeout(StreamError): pass
+class HostGone(StreamError): pass
+class HostUnknown(StreamError): pass
+class ImproperAddressing(StreamError): pass
+class InternalServerError(StreamError): pass
+class InvalidFrom(StreamError): pass
+class InvalidID(StreamError): pass
+class InvalidNamespace(StreamError): pass
+class InvalidXML(StreamError): pass
+class NotAuthorized(StreamError): pass
+class PolicyViolation(StreamError): pass
+class RemoteConnectionFailed(StreamError): pass
+class ResourceConstraint(StreamError): pass
+class RestrictedXML(StreamError): pass
+class SeeOtherHost(StreamError): pass
+class SystemShutdown(StreamError): pass
+class UndefinedCondition(StreamError): pass
+class UnsupportedEncoding(StreamError): pass
+class UnsupportedStanzaType(StreamError): pass
+class UnsupportedVersion(StreamError): pass
+class XMLNotWellFormed(StreamError): pass
+
+stream_exceptions = {'bad-format': BadFormat,
+ 'bad-namespace-prefix': BadNamespacePrefix,
+ 'conflict': Conflict,
+ 'connection-timeout': ConnectionTimeout,
+ 'host-gone': HostGone,
+ 'host-unknown': HostUnknown,
+ 'improper-addressing': ImproperAddressing,
+ 'internal-server-error': InternalServerError,
+ 'invalid-from': InvalidFrom,
+ 'invalid-id': InvalidID,
+ 'invalid-namespace': InvalidNamespace,
+ 'invalid-xml': InvalidXML,
+ 'not-authorized': NotAuthorized,
+ 'policy-violation': PolicyViolation,
+ 'remote-connection-failed': RemoteConnectionFailed,
+ 'resource-constraint': ResourceConstraint,
+ 'restricted-xml': RestrictedXML,
+ 'see-other-host': SeeOtherHost,
+ 'system-shutdown': SystemShutdown,
+ 'undefined-condition': UndefinedCondition,
+ 'unsupported-encoding': UnsupportedEncoding,
+ 'unsupported-stanza-type': UnsupportedStanzaType,
+ 'unsupported-version': UnsupportedVersion,
+ 'xml-not-well-formed': XMLNotWellFormed}
+
+class JID:
+ """ JID object. JID can be built from string, modified, compared, serialised into string. """
+ def __init__(self, jid=None, node='', domain='', resource=''):
+ """ Constructor. JID can be specified as string (jid argument) or as separate parts.
+ Examples:
+ JID('node@domain/resource')
+ JID(node='node',domain='domain.org')
+ """
+ if not jid and not domain: raise ValueError('JID must contain at least domain name')
+ elif type(jid)==type(self): self.node,self.domain,self.resource=jid.node,jid.domain,jid.resource
+ elif domain: self.node,self.domain,self.resource=node,domain,resource
+ else:
+ if jid.find('@')+1: self.node,jid=jid.split('@',1)
+ else: self.node=''
+ if jid.find('/')+1: self.domain,self.resource=jid.split('/',1)
+ else: self.domain,self.resource=jid,''
+ def getNode(self):
+ """ Return the node part of the JID """
+ return self.node
+ def setNode(self,node):
+ """ Set the node part of the JID to new value. Specify None to remove the node part."""
+ self.node=node.lower()
+ def getDomain(self):
+ """ Return the domain part of the JID """
+ return self.domain
+ def setDomain(self,domain):
+ """ Set the domain part of the JID to new value."""
+ self.domain=domain.lower()
+ def getResource(self):
+ """ Return the resource part of the JID """
+ return self.resource
+ def setResource(self,resource):
+ """ Set the resource part of the JID to new value. Specify None to remove the resource part."""
+ self.resource=resource
+ def getStripped(self):
+ """ Return the bare representation of JID. I.e. string value w/o resource. """
+ return self.__str__(0)
+ def __eq__(self, other):
+ """ Compare the JID to another instance or to string for equality. """
+ try: other=JID(other)
+ except ValueError: return 0
+ return self.resource==other.resource and self.__str__(0) == other.__str__(0)
+ def __ne__(self, other):
+ """ Compare the JID to another instance or to string for non-equality. """
+ return not self.__eq__(other)
+ def bareMatch(self, other):
+ """ Compare the node and domain parts of the JID's for equality. """
+ return self.__str__(0) == JID(other).__str__(0)
+ def __str__(self,wresource=1):
+ """ Serialise JID into string. """
+ if self.node: jid=self.node+'@'+self.domain
+ else: jid=self.domain
+ if wresource and self.resource: return jid+'/'+self.resource
+ return jid
+ def __hash__(self):
+ """ Produce hash of the JID, Allows to use JID objects as keys of the dictionary. """
+ return hash(self.__str__())
+
+class Protocol(Node):
+ """ A "stanza" object class. Contains methods that are common for presences, iqs and messages. """
+ def __init__(self, name=None, to=None, typ=None, frm=None, attrs={}, payload=[], timestamp=None, xmlns=None, node=None):
+ """ Constructor, name is the name of the stanza i.e. 'message' or 'presence' or 'iq'.
+ to is the value of 'to' attribure, 'typ' - 'type' attribute
+ frn - from attribure, attrs - other attributes mapping, payload - same meaning as for simplexml payload definition
+ timestamp - the time value that needs to be stamped over stanza
+ xmlns - namespace of top stanza node
+ node - parsed or unparsed stana to be taken as prototype.
+ """
+ if not attrs: attrs={}
+ if to: attrs['to']=to
+ if frm: attrs['from']=frm
+ if typ: attrs['type']=typ
+ Node.__init__(self, tag=name, attrs=attrs, payload=payload, node=node)
+ if not node and xmlns: self.setNamespace(xmlns)
+ if self['to']: self.setTo(self['to'])
+ if self['from']: self.setFrom(self['from'])
+ if node and type(self)==type(node) and self.__class__==node.__class__ and self.attrs.has_key('id'): del self.attrs['id']
+ self.timestamp=None
+ for x in self.getTags('x',namespace=NS_DELAY):
+ try:
+ if not self.getTimestamp() or x.getAttr('stamp')'text': return tag.getName()
+ return errtag.getData()
+ def getErrorCode(self):
+ """ Return the error code. Obsolette. """
+ return self.getTagAttr('error','code')
+ def setError(self,error,code=None):
+ """ Set the error code. Obsolette. Use error-conditions instead. """
+ if code:
+ if str(code) in _errorcodes.keys(): error=ErrorNode(_errorcodes[str(code)],text=error)
+ else: error=ErrorNode(ERR_UNDEFINED_CONDITION,code=code,typ='cancel',text=error)
+ elif type(error) in [type(''),type(u'')]: error=ErrorNode(error)
+ self.setType('error')
+ self.addChild(node=error)
+ def setTimestamp(self,val=None):
+ """Set the timestamp. timestamp should be the yyyymmddThhmmss string."""
+ if not val: val=time.strftime('%Y%m%dT%H:%M:%S', time.gmtime())
+ self.timestamp=val
+ self.setTag('x',{'stamp':self.timestamp},namespace=NS_DELAY)
+ def getProperties(self):
+ """ Return the list of namespaces to which belongs the direct childs of element"""
+ props=[]
+ for child in self.getChildren():
+ prop=child.getNamespace()
+ if prop not in props: props.append(prop)
+ return props
+ def __setitem__(self,item,val):
+ """ Set the item 'item' to the value 'val'."""
+ if item in ['to','from']: val=JID(val)
+ return self.setAttr(item,val)
+
+class Message(Protocol):
+ """ XMPP Message stanza - "push" mechanism."""
+ def __init__(self, to=None, body=None, typ=None, subject=None, attrs={}, frm=None, payload=[], timestamp=None, xmlns=NS_CLIENT, node=None):
+ """ Create message object. You can specify recipient, text of message, type of message
+ any additional attributes, sender of the message, any additional payload (f.e. jabber:x:delay element) and namespace in one go.
+ Alternatively you can pass in the other XML object as the 'node' parameted to replicate it as message. """
+ Protocol.__init__(self, 'message', to=to, typ=typ, attrs=attrs, frm=frm, payload=payload, timestamp=timestamp, xmlns=xmlns, node=node)
+ if body: self.setBody(body)
+ if subject: self.setSubject(subject)
+ def getBody(self):
+ """ Returns text of the message. """
+ return self.getTagData('body')
+ def getSubject(self):
+ """ Returns subject of the message. """
+ return self.getTagData('subject')
+ def getThread(self):
+ """ Returns thread of the message. """
+ return self.getTagData('thread')
+ def setBody(self,val):
+ """ Sets the text of the message. """
+ self.setTagData('body',val)
+ def setSubject(self,val):
+ """ Sets the subject of the message. """
+ self.setTagData('subject',val)
+ def setThread(self,val):
+ """ Sets the thread of the message. """
+ self.setTagData('thread',val)
+ def buildReply(self,text=None):
+ """ Builds and returns another message object with specified text.
+ The to, from and thread properties of new message are pre-set as reply to this message. """
+ m=Message(to=self.getFrom(),frm=self.getTo(),body=text)
+ th=self.getThread()
+ if th: m.setThread(th)
+ return m
+
+class Presence(Protocol):
+ """ XMPP Presence object."""
+ def __init__(self, to=None, typ=None, priority=None, show=None, status=None, attrs={}, frm=None, timestamp=None, payload=[], xmlns=NS_CLIENT, node=None):
+ """ Create presence object. You can specify recipient, type of message, priority, show and status values
+ any additional attributes, sender of the presence, timestamp, any additional payload (f.e. jabber:x:delay element) and namespace in one go.
+ Alternatively you can pass in the other XML object as the 'node' parameted to replicate it as presence. """
+ Protocol.__init__(self, 'presence', to=to, typ=typ, attrs=attrs, frm=frm, payload=payload, timestamp=timestamp, xmlns=xmlns, node=node)
+ if priority: self.setPriority(priority)
+ if show: self.setShow(show)
+ if status: self.setStatus(status)
+ def getPriority(self):
+ """ Returns the priority of the message. """
+ return self.getTagData('priority')
+ def getShow(self):
+ """ Returns the show value of the message. """
+ return self.getTagData('show')
+ def getStatus(self):
+ """ Returns the status string of the message. """
+ return self.getTagData('status')
+ def setPriority(self,val):
+ """ Sets the priority of the message. """
+ self.setTagData('priority',val)
+ def setShow(self,val):
+ """ Sets the show value of the message. """
+ self.setTagData('show',val)
+ def setStatus(self,val):
+ """ Sets the status string of the message. """
+ self.setTagData('status',val)
+
+ def _muc_getItemAttr(self,tag,attr):
+ for xtag in self.getTags('x'):
+ for child in xtag.getTags(tag):
+ return child.getAttr(attr)
+ def _muc_getSubTagDataAttr(self,tag,attr):
+ for xtag in self.getTags('x'):
+ for child in xtag.getTags('item'):
+ for cchild in child.getTags(tag):
+ return cchild.getData(),cchild.getAttr(attr)
+ return None,None
+ def getRole(self):
+ """Returns the presence role (for groupchat)"""
+ return self._muc_getItemAttr('item','role')
+ def getAffiliation(self):
+ """Returns the presence affiliation (for groupchat)"""
+ return self._muc_getItemAttr('item','affiliation')
+ def getNick(self):
+ """Returns the nick value (for nick change in groupchat)"""
+ return self._muc_getItemAttr('item','nick')
+ def getJid(self):
+ """Returns the presence jid (for groupchat)"""
+ return self._muc_getItemAttr('item','jid')
+ def getReason(self):
+ """Returns the reason of the presence (for groupchat)"""
+ return self._muc_getSubTagDataAttr('reason','')[0]
+ def getActor(self):
+ """Returns the reason of the presence (for groupchat)"""
+ return self._muc_getSubTagDataAttr('actor','jid')[1]
+ def getStatusCode(self):
+ """Returns the status code of the presence (for groupchat)"""
+ return self._muc_getItemAttr('status','code')
+
+class Iq(Protocol):
+ """ XMPP Iq object - get/set dialog mechanism. """
+ def __init__(self, typ=None, queryNS=None, attrs={}, to=None, frm=None, payload=[], xmlns=NS_CLIENT, node=None):
+ """ Create Iq object. You can specify type, query namespace
+ any additional attributes, recipient of the iq, sender of the iq, any additional payload (f.e. jabber:x:data node) and namespace in one go.
+ Alternatively you can pass in the other XML object as the 'node' parameted to replicate it as an iq. """
+ Protocol.__init__(self, 'iq', to=to, typ=typ, attrs=attrs, frm=frm, xmlns=xmlns, node=node)
+ if payload: self.setQueryPayload(payload)
+ if queryNS: self.setQueryNS(queryNS)
+ def getQueryNS(self):
+ """ Return the namespace of the 'query' child element."""
+ tag=self.getTag('query')
+ if tag: return tag.getNamespace()
+ def getQuerynode(self):
+ """ Return the 'node' attribute value of the 'query' child element."""
+ return self.getTagAttr('query','node')
+ def getQueryPayload(self):
+ """ Return the 'query' child element payload."""
+ tag=self.getTag('query')
+ if tag: return tag.getPayload()
+ def getQueryChildren(self):
+ """ Return the 'query' child element child nodes."""
+ tag=self.getTag('query')
+ if tag: return tag.getChildren()
+ def setQueryNS(self,namespace):
+ """ Set the namespace of the 'query' child element."""
+ self.setTag('query').setNamespace(namespace)
+ def setQueryPayload(self,payload):
+ """ Set the 'query' child element payload."""
+ self.setTag('query').setPayload(payload)
+ def setQuerynode(self,node):
+ """ Set the 'node' attribute value of the 'query' child element."""
+ self.setTagAttr('query','node',node)
+ def buildReply(self,typ):
+ """ Builds and returns another Iq object of specified type.
+ The to, from and query child node of new Iq are pre-set as reply to this Iq. """
+ iq=Iq(typ,to=self.getFrom(),frm=self.getTo(),attrs={'id':self.getID()})
+ if self.getTag('query'): iq.setQueryNS(self.getQueryNS())
+ return iq
+
+class ErrorNode(Node):
+ """ XMPP-style error element.
+ In the case of stanza error should be attached to XMPP stanza.
+ In the case of stream-level errors should be used separately. """
+ def __init__(self,name,code=None,typ=None,text=None):
+ """ Create new error node object.
+ Mandatory parameter: name - name of error condition.
+ Optional parameters: code, typ, text. Used for backwards compartibility with older jabber protocol."""
+ if ERRORS.has_key(name):
+ cod,type,txt=ERRORS[name]
+ ns=name.split()[0]
+ else: cod,ns,type,txt='500',NS_STANZAS,'cancel',''
+ if typ: type=typ
+ if code: cod=code
+ if text: txt=text
+ Node.__init__(self,'error',{},[Node(name)])
+ if type: self.setAttr('type',type)
+ if not cod: self.setName('stream:error')
+ if txt: self.addChild(node=Node(ns+' text',{},[txt]))
+ if cod: self.setAttr('code',cod)
+
+class Error(Protocol):
+ """ Used to quickly transform received stanza into error reply."""
+ def __init__(self,node,error,reply=1):
+ """ Create error reply basing on the received 'node' stanza and the 'error' error condition.
+ If the 'node' is not the received stanza but locally created ('to' and 'from' fields needs not swapping)
+ specify the 'reply' argument as false."""
+ if reply: Protocol.__init__(self,to=node.getFrom(),frm=node.getTo(),node=node)
+ else: Protocol.__init__(self,node=node)
+ self.setError(error)
+ if node.getType()=='error': self.__str__=self.__dupstr__
+ def __dupstr__(self,dup1=None,dup2=None):
+ """ Dummy function used as preventor of creating error node in reply to error node.
+ I.e. you will not be able to serialise "double" error into string.
+ """
+ return ''
+
+class DataField(Node):
+ """ This class is used in the DataForm class to describe the single data item.
+ If you are working with jabber:x:data (XEP-0004, XEP-0068, XEP-0122)
+ then you will need to work with instances of this class. """
+ def __init__(self,name=None,value=None,typ=None,required=0,label=None,desc=None,options=[],node=None):
+ """ Create new data field of specified name,value and type.
+ Also 'required','desc' and 'options' fields can be set.
+ Alternatively other XML object can be passed in as the 'node' parameted to replicate it as a new datafiled.
+ """
+ Node.__init__(self,'field',node=node)
+ if name: self.setVar(name)
+ if type(value) in [list,tuple]: self.setValues(value)
+ elif value: self.setValue(value)
+ if typ: self.setType(typ)
+ elif not typ and not node: self.setType('text-single')
+ if required: self.setRequired(required)
+ if label: self.setLabel(label)
+ if desc: self.setDesc(desc)
+ if options: self.setOptions(options)
+ def setRequired(self,req=1):
+ """ Change the state of the 'required' flag. """
+ if req: self.setTag('required')
+ else:
+ try: self.delChild('required')
+ except ValueError: return
+ def isRequired(self):
+ """ Returns in this field a required one. """
+ return self.getTag('required')
+ def setLabel(self,label):
+ """ Set the label of this field. """
+ self.setAttr('label',label)
+ def getLabel(self):
+ """ Return the label of this field. """
+ return self.getAttr('label')
+ def setDesc(self,desc):
+ """ Set the description of this field. """
+ self.setTagData('desc',desc)
+ def getDesc(self):
+ """ Return the description of this field. """
+ return self.getTagData('desc')
+ def setValue(self,val):
+ """ Set the value of this field. """
+ self.setTagData('value',val)
+ def getValue(self):
+ return self.getTagData('value')
+ def setValues(self,lst):
+ """ Set the values of this field as values-list.
+ Replaces all previous filed values! If you need to just add a value - use addValue method."""
+ while self.getTag('value'): self.delChild('value')
+ for val in lst: self.addValue(val)
+ def addValue(self,val):
+ """ Add one more value to this field. Used in 'get' iq's or such."""
+ self.addChild('value',{},[val])
+ def getValues(self):
+ """ Return the list of values associated with this field."""
+ ret=[]
+ for tag in self.getTags('value'): ret.append(tag.getData())
+ return ret
+ def getOptions(self):
+ """ Return label-option pairs list associated with this field."""
+ ret=[]
+ for tag in self.getTags('option'): ret.append([tag.getAttr('label'),tag.getTagData('value')])
+ return ret
+ def setOptions(self,lst):
+ """ Set label-option pairs list associated with this field."""
+ while self.getTag('option'): self.delChild('option')
+ for opt in lst: self.addOption(opt)
+ def addOption(self,opt):
+ """ Add one more label-option pair to this field."""
+ if type(opt) in [str,unicode]: self.addChild('option').setTagData('value',opt)
+ else: self.addChild('option',{'label':opt[0]}).setTagData('value',opt[1])
+ def getType(self):
+ """ Get type of this field. """
+ return self.getAttr('type')
+ def setType(self,val):
+ """ Set type of this field. """
+ return self.setAttr('type',val)
+ def getVar(self):
+ """ Get 'var' attribute value of this field. """
+ return self.getAttr('var')
+ def setVar(self,val):
+ """ Set 'var' attribute value of this field. """
+ return self.setAttr('var',val)
+
+class DataReported(Node):
+ """ This class is used in the DataForm class to describe the 'reported data field' data items which are used in
+ 'multiple item form results' (as described in XEP-0004).
+ Represents the fields that will be returned from a search. This information is useful when
+ you try to use the jabber:iq:search namespace to return dynamic form information.
+ """
+ def __init__(self,node=None):
+ """ Create new empty 'reported data' field. However, note that, according XEP-0004:
+ * It MUST contain one or more DataFields.
+ * Contained DataFields SHOULD possess a 'type' and 'label' attribute in addition to 'var' attribute
+ * Contained DataFields SHOULD NOT contain a element.
+ Alternatively other XML object can be passed in as the 'node' parameted to replicate it as a new
+ dataitem.
+ """
+ Node.__init__(self,'reported',node=node)
+ if node:
+ newkids=[]
+ for n in self.getChildren():
+ if n.getName()=='field': newkids.append(DataField(node=n))
+ else: newkids.append(n)
+ self.kids=newkids
+ def getField(self,name):
+ """ Return the datafield object with name 'name' (if exists). """
+ return self.getTag('field',attrs={'var':name})
+ def setField(self,name,typ=None,label=None):
+ """ Create if nessessary or get the existing datafield object with name 'name' and return it.
+ If created, attributes 'type' and 'label' are applied to new datafield."""
+ f=self.getField(name)
+ if f: return f
+ return self.addChild(node=DataField(name,None,typ,0,label))
+ def asDict(self):
+ """ Represent dataitem as simple dictionary mapping of datafield names to their values."""
+ ret={}
+ for field in self.getTags('field'):
+ name=field.getAttr('var')
+ typ=field.getType()
+ if isinstance(typ,(str,unicode)) and typ[-6:]=='-multi':
+ val=[]
+ for i in field.getTags('value'): val.append(i.getData())
+ else: val=field.getTagData('value')
+ ret[name]=val
+ if self.getTag('instructions'): ret['instructions']=self.getInstructions()
+ return ret
+ def __getitem__(self,name):
+ """ Simple dictionary interface for getting datafields values by their names."""
+ item=self.getField(name)
+ if item: return item.getValue()
+ raise IndexError('No such field')
+ def __setitem__(self,name,val):
+ """ Simple dictionary interface for setting datafields values by their names."""
+ return self.setField(name).setValue(val)
+
+class DataItem(Node):
+ """ This class is used in the DataForm class to describe data items which are used in 'multiple
+ item form results' (as described in XEP-0004).
+ """
+ def __init__(self,node=None):
+ """ Create new empty data item. However, note that, according XEP-0004, DataItem MUST contain ALL
+ DataFields described in DataReported.
+ Alternatively other XML object can be passed in as the 'node' parameted to replicate it as a new
+ dataitem.
+ """
+ Node.__init__(self,'item',node=node)
+ if node:
+ newkids=[]
+ for n in self.getChildren():
+ if n.getName()=='field': newkids.append(DataField(node=n))
+ else: newkids.append(n)
+ self.kids=newkids
+ def getField(self,name):
+ """ Return the datafield object with name 'name' (if exists). """
+ return self.getTag('field',attrs={'var':name})
+ def setField(self,name):
+ """ Create if nessessary or get the existing datafield object with name 'name' and return it. """
+ f=self.getField(name)
+ if f: return f
+ return self.addChild(node=DataField(name))
+ def asDict(self):
+ """ Represent dataitem as simple dictionary mapping of datafield names to their values."""
+ ret={}
+ for field in self.getTags('field'):
+ name=field.getAttr('var')
+ typ=field.getType()
+ if isinstance(typ,(str,unicode)) and typ[-6:]=='-multi':
+ val=[]
+ for i in field.getTags('value'): val.append(i.getData())
+ else: val=field.getTagData('value')
+ ret[name]=val
+ if self.getTag('instructions'): ret['instructions']=self.getInstructions()
+ return ret
+ def __getitem__(self,name):
+ """ Simple dictionary interface for getting datafields values by their names."""
+ item=self.getField(name)
+ if item: return item.getValue()
+ raise IndexError('No such field')
+ def __setitem__(self,name,val):
+ """ Simple dictionary interface for setting datafields values by their names."""
+ return self.setField(name).setValue(val)
+
+class DataForm(Node):
+ """ DataForm class. Used for manipulating dataforms in XMPP.
+ Relevant XEPs: 0004, 0068, 0122.
+ Can be used in disco, pub-sub and many other applications."""
+ def __init__(self, typ=None, data=[], title=None, node=None):
+ """
+ Create new dataform of type 'typ'; 'data' is the list of DataReported,
+ DataItem and DataField instances that this dataform contains; 'title'
+ is the title string.
+ You can specify the 'node' argument as the other node to be used as
+ base for constructing this dataform.
+
+ title and instructions is optional and SHOULD NOT contain newlines.
+ Several instructions MAY be present.
+ 'typ' can be one of ('form' | 'submit' | 'cancel' | 'result' )
+ 'typ' of reply iq can be ( 'result' | 'set' | 'set' | 'result' ) respectively.
+ 'cancel' form can not contain any fields. All other forms contains AT LEAST one field.
+ 'title' MAY be included in forms of type "form" and "result"
+ """
+ Node.__init__(self,'x',node=node)
+ if node:
+ newkids=[]
+ for n in self.getChildren():
+ if n.getName()=='field': newkids.append(DataField(node=n))
+ elif n.getName()=='item': newkids.append(DataItem(node=n))
+ elif n.getName()=='reported': newkids.append(DataReported(node=n))
+ else: newkids.append(n)
+ self.kids=newkids
+ if typ: self.setType(typ)
+ self.setNamespace(NS_DATA)
+ if title: self.setTitle(title)
+ if type(data)==type({}):
+ newdata=[]
+ for name in data.keys(): newdata.append(DataField(name,data[name]))
+ data=newdata
+ for child in data:
+ if type(child) in [type(''),type(u'')]: self.addInstructions(child)
+ elif child.__class__.__name__=='DataField': self.kids.append(child)
+ elif child.__class__.__name__=='DataItem': self.kids.append(child)
+ elif child.__class__.__name__=='DataReported': self.kids.append(child)
+ else: self.kids.append(DataField(node=child))
+ def getType(self):
+ """ Return the type of dataform. """
+ return self.getAttr('type')
+ def setType(self,typ):
+ """ Set the type of dataform. """
+ self.setAttr('type',typ)
+ def getTitle(self):
+ """ Return the title of dataform. """
+ return self.getTagData('title')
+ def setTitle(self,text):
+ """ Set the title of dataform. """
+ self.setTagData('title',text)
+ def getInstructions(self):
+ """ Return the instructions of dataform. """
+ return self.getTagData('instructions')
+ def setInstructions(self,text):
+ """ Set the instructions of dataform. """
+ self.setTagData('instructions',text)
+ def addInstructions(self,text):
+ """ Add one more instruction to the dataform. """
+ self.addChild('instructions',{},[text])
+ def getField(self,name):
+ """ Return the datafield object with name 'name' (if exists). """
+ return self.getTag('field',attrs={'var':name})
+ def setField(self,name):
+ """ Create if nessessary or get the existing datafield object with name 'name' and return it. """
+ f=self.getField(name)
+ if f: return f
+ return self.addChild(node=DataField(name))
+ def asDict(self):
+ """ Represent dataform as simple dictionary mapping of datafield names to their values."""
+ ret={}
+ for field in self.getTags('field'):
+ name=field.getAttr('var')
+ typ=field.getType()
+ if isinstance(typ,(str,unicode)) and typ[-6:]=='-multi':
+ val=[]
+ for i in field.getTags('value'): val.append(i.getData())
+ else: val=field.getTagData('value')
+ ret[name]=val
+ if self.getTag('instructions'): ret['instructions']=self.getInstructions()
+ return ret
+ def __getitem__(self,name):
+ """ Simple dictionary interface for getting datafields values by their names."""
+ item=self.getField(name)
+ if item: return item.getValue()
+ raise IndexError('No such field')
+ def __setitem__(self,name,val):
+ """ Simple dictionary interface for setting datafields values by their names."""
+ return self.setField(name).setValue(val)
diff --git a/libs/xmpp/roster.py b/libs/xmpp/roster.py
new file mode 100644
index 00000000..676a4c9a
--- /dev/null
+++ b/libs/xmpp/roster.py
@@ -0,0 +1,184 @@
+## roster.py
+##
+## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: roster.py,v 1.20 2005/07/13 13:22:52 snakeru Exp $
+
+"""
+Simple roster implementation. Can be used though for different tasks like
+mass-renaming of contacts.
+"""
+
+from protocol import *
+from client import PlugIn
+
+class Roster(PlugIn):
+ """ Defines a plenty of methods that will allow you to manage roster.
+ Also automatically track presences from remote JIDs taking into
+ account that every JID can have multiple resources connected. Does not
+ currently support 'error' presences.
+ You can also use mapping interface for access to the internal representation of
+ contacts in roster.
+ """
+ def __init__(self):
+ """ Init internal variables. """
+ PlugIn.__init__(self)
+ self.DBG_LINE='roster'
+ self._data = {}
+ self.set=None
+ self._exported_methods=[self.getRoster]
+
+ def plugin(self,owner,request=1):
+ """ Register presence and subscription trackers in the owner's dispatcher.
+ Also request roster from server if the 'request' argument is set.
+ Used internally."""
+ self._owner.RegisterHandler('iq',self.RosterIqHandler,'result',NS_ROSTER)
+ self._owner.RegisterHandler('iq',self.RosterIqHandler,'set',NS_ROSTER)
+ self._owner.RegisterHandler('presence',self.PresenceHandler)
+ if request: self.Request()
+
+ def Request(self,force=0):
+ """ Request roster from server if it were not yet requested
+ (or if the 'force' argument is set). """
+ if self.set is None: self.set=0
+ elif not force: return
+ self._owner.send(Iq('get',NS_ROSTER))
+ self.DEBUG('Roster requested from server','start')
+
+ def getRoster(self):
+ """ Requests roster from server if neccessary and returns self."""
+ if not self.set: self.Request()
+ while not self.set: self._owner.Process(10)
+ return self
+
+ def RosterIqHandler(self,dis,stanza):
+ """ Subscription tracker. Used internally for setting items state in
+ internal roster representation. """
+ for item in stanza.getTag('query').getTags('item'):
+ jid=item.getAttr('jid')
+ if item.getAttr('subscription')=='remove':
+ if self._data.has_key(jid): del self._data[jid]
+ raise NodeProcessed # a MUST
+ self.DEBUG('Setting roster item %s...'%jid,'ok')
+ if not self._data.has_key(jid): self._data[jid]={}
+ self._data[jid]['name']=item.getAttr('name')
+ self._data[jid]['ask']=item.getAttr('ask')
+ self._data[jid]['subscription']=item.getAttr('subscription')
+ self._data[jid]['groups']=[]
+ if not self._data[jid].has_key('resources'): self._data[jid]['resources']={}
+ for group in item.getTags('group'): self._data[jid]['groups'].append(group.getData())
+ self._data[self._owner.User+'@'+self._owner.Server]={'resources':{},'name':None,'ask':None,'subscription':None,'groups':None,}
+ self.set=1
+ raise NodeProcessed # a MUST. Otherwise you'll get back an
+
+ def PresenceHandler(self,dis,pres):
+ """ Presence tracker. Used internally for setting items' resources state in
+ internal roster representation. """
+ jid=JID(pres.getFrom())
+ if not self._data.has_key(jid.getStripped()): self._data[jid.getStripped()]={'name':None,'ask':None,'subscription':'none','groups':['Not in roster'],'resources':{}}
+
+ item=self._data[jid.getStripped()]
+ typ=pres.getType()
+
+ if not typ:
+ self.DEBUG('Setting roster item %s for resource %s...'%(jid.getStripped(),jid.getResource()),'ok')
+ item['resources'][jid.getResource()]=res={'show':None,'status':None,'priority':'0','timestamp':None}
+ if pres.getTag('show'): res['show']=pres.getShow()
+ if pres.getTag('status'): res['status']=pres.getStatus()
+ if pres.getTag('priority'): res['priority']=pres.getPriority()
+ if not pres.getTimestamp(): pres.setTimestamp()
+ res['timestamp']=pres.getTimestamp()
+ elif typ=='unavailable' and item['resources'].has_key(jid.getResource()): del item['resources'][jid.getResource()]
+ # Need to handle type='error' also
+
+ def _getItemData(self,jid,dataname):
+ """ Return specific jid's representation in internal format. Used internally. """
+ jid=jid[:(jid+'/').find('/')]
+ return self._data[jid][dataname]
+ def _getResourceData(self,jid,dataname):
+ """ Return specific jid's resource representation in internal format. Used internally. """
+ if jid.find('/')+1:
+ jid,resource=jid.split('/',1)
+ if self._data[jid]['resources'].has_key(resource): return self._data[jid]['resources'][resource][dataname]
+ elif self._data[jid]['resources'].keys():
+ lastpri=-129
+ for r in self._data[jid]['resources'].keys():
+ if int(self._data[jid]['resources'][r]['priority'])>lastpri: resource,lastpri=r,int(self._data[jid]['resources'][r]['priority'])
+ return self._data[jid]['resources'][resource][dataname]
+ def delItem(self,jid):
+ """ Delete contact 'jid' from roster."""
+ self._owner.send(Iq('set',NS_ROSTER,payload=[Node('item',{'jid':jid,'subscription':'remove'})]))
+ def getAsk(self,jid):
+ """ Returns 'ask' value of contact 'jid'."""
+ return self._getItemData(jid,'ask')
+ def getGroups(self,jid):
+ """ Returns groups list that contact 'jid' belongs to."""
+ return self._getItemData(jid,'groups')
+ def getName(self,jid):
+ """ Returns name of contact 'jid'."""
+ return self._getItemData(jid,'name')
+ def getPriority(self,jid):
+ """ Returns priority of contact 'jid'. 'jid' should be a full (not bare) JID."""
+ return self._getResourceData(jid,'priority')
+ def getRawRoster(self):
+ """ Returns roster representation in internal format. """
+ return self._data
+ def getRawItem(self,jid):
+ """ Returns roster item 'jid' representation in internal format. """
+ return self._data[jid[:(jid+'/').find('/')]]
+ def getShow(self, jid):
+ """ Returns 'show' value of contact 'jid'. 'jid' should be a full (not bare) JID."""
+ return self._getResourceData(jid,'show')
+ def getStatus(self, jid):
+ """ Returns 'status' value of contact 'jid'. 'jid' should be a full (not bare) JID."""
+ return self._getResourceData(jid,'status')
+ def getSubscription(self,jid):
+ """ Returns 'subscription' value of contact 'jid'."""
+ return self._getItemData(jid,'subscription')
+ def getResources(self,jid):
+ """ Returns list of connected resources of contact 'jid'."""
+ return self._data[jid[:(jid+'/').find('/')]]['resources'].keys()
+ def setItem(self,jid,name=None,groups=[]):
+ """ Creates/renames contact 'jid' and sets the groups list that it now belongs to."""
+ iq=Iq('set',NS_ROSTER)
+ query=iq.getTag('query')
+ attrs={'jid':jid}
+ if name: attrs['name']=name
+ item=query.setTag('item',attrs)
+ for group in groups: item.addChild(node=Node('group',payload=[group]))
+ self._owner.send(iq)
+ def getItems(self):
+ """ Return list of all [bare] JIDs that the roster is currently tracks."""
+ return self._data.keys()
+ def keys(self):
+ """ Same as getItems. Provided for the sake of dictionary interface."""
+ return self._data.keys()
+ def __getitem__(self,item):
+ """ Get the contact in the internal format. Raises KeyError if JID 'item' is not in roster."""
+ return self._data[item]
+ def getItem(self,item):
+ """ Get the contact in the internal format (or None if JID 'item' is not in roster)."""
+ if self._data.has_key(item): return self._data[item]
+ def Subscribe(self,jid):
+ """ Send subscription request to JID 'jid'."""
+ self._owner.send(Presence(jid,'subscribe'))
+ def Unsubscribe(self,jid):
+ """ Ask for removing our subscription for JID 'jid'."""
+ self._owner.send(Presence(jid,'unsubscribe'))
+ def Authorize(self,jid):
+ """ Authorise JID 'jid'. Works only if these JID requested auth previously. """
+ self._owner.send(Presence(jid,'subscribed'))
+ def Unauthorize(self,jid):
+ """ Unauthorise JID 'jid'. Use for declining authorisation request
+ or for removing existing authorization. """
+ self._owner.send(Presence(jid,'unsubscribed'))
diff --git a/libs/xmpp/session.py b/libs/xmpp/session.py
new file mode 100644
index 00000000..24066b32
--- /dev/null
+++ b/libs/xmpp/session.py
@@ -0,0 +1,349 @@
+##
+## XMPP server
+##
+## Copyright (C) 2004 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+__version__="$Id"
+
+"""
+When your handler is called it is getting the session instance as the first argument.
+This is the difference from xmpppy 0.1 where you got the "Client" instance.
+With Session class you can have "multi-session" client instead of having
+one client for each connection. Is is specifically important when you are
+writing the server.
+"""
+
+from protocol import *
+
+# Transport-level flags
+SOCKET_UNCONNECTED =0
+SOCKET_ALIVE =1
+SOCKET_DEAD =2
+# XML-level flags
+STREAM__NOT_OPENED =1
+STREAM__OPENED =2
+STREAM__CLOSING =3
+STREAM__CLOSED =4
+# XMPP-session flags
+SESSION_NOT_AUTHED =1
+SESSION_AUTHED =2
+SESSION_BOUND =3
+SESSION_OPENED =4
+SESSION_CLOSED =5
+
+class Session:
+ """
+ The Session class instance is used for storing all session-related info like
+ credentials, socket/xml stream/session state flags, roster items (in case of
+ client type connection) etc.
+ Session object have no means of discovering is any info is ready to be read.
+ Instead you should use poll() (recomended) or select() methods for this purpose.
+ Session can be one of two types: 'server' and 'client'. 'server' session handles
+ inbound connection and 'client' one used to create an outbound one.
+ Session instance have multitude of internal attributes. The most imporant is the 'peer' one.
+ It is set once the peer is authenticated (client).
+ """
+ def __init__(self,socket,owner,xmlns=None,peer=None):
+ """ When the session is created it's type (client/server) is determined from the beginning.
+ socket argument is the pre-created socket-like object.
+ It must have the following methods: send, recv, fileno, close.
+ owner is the 'master' instance that have Dispatcher plugged into it and generally
+ will take care about all session events.
+ xmlns is the stream namespace that will be used. Client must set this argument
+ If server sets this argument than stream will be dropped if opened with some another namespace.
+ peer is the name of peer instance. This is the flag that differentiates client session from
+ server session. Client must set it to the name of the server that will be connected, server must
+ leave this argument alone.
+ """
+ self.xmlns=xmlns
+ if peer:
+ self.TYP='client'
+ self.peer=peer
+ self._socket_state=SOCKET_UNCONNECTED
+ else:
+ self.TYP='server'
+ self.peer=None
+ self._socket_state=SOCKET_ALIVE
+ self._sock=socket
+ self._send=socket.send
+ self._recv=socket.recv
+ self.fileno=socket.fileno
+ self._registered=0
+
+ self.Dispatcher=owner.Dispatcher
+ self.DBG_LINE='session'
+ self.DEBUG=owner.Dispatcher.DEBUG
+ self._expected={}
+ self._owner=owner
+ if self.TYP=='server': self.ID=`random.random()`[2:]
+ else: self.ID=None
+
+ self.sendbuffer=''
+ self._stream_pos_queued=None
+ self._stream_pos_sent=0
+ self.deliver_key_queue=[]
+ self.deliver_queue_map={}
+ self.stanza_queue=[]
+
+ self._session_state=SESSION_NOT_AUTHED
+ self.waiting_features=[]
+ for feature in [NS_TLS,NS_SASL,NS_BIND,NS_SESSION]:
+ if feature in owner.features: self.waiting_features.append(feature)
+ self.features=[]
+ self.feature_in_process=None
+ self.slave_session=None
+ self.StartStream()
+
+ def StartStream(self):
+ """ This method is used to initialise the internal xml expat parser
+ and to send initial stream header (in case of client connection).
+ Should be used after initial connection and after every stream restart."""
+ self._stream_state=STREAM__NOT_OPENED
+ self.Stream=simplexml.NodeBuilder()
+ self.Stream._dispatch_depth=2
+ self.Stream.dispatch=self._dispatch
+ self.Parse=self.Stream.Parse
+ self.Stream.stream_footer_received=self._stream_close
+ if self.TYP=='client':
+ self.Stream.stream_header_received=self._catch_stream_id
+ self._stream_open()
+ else:
+ self.Stream.stream_header_received=self._stream_open
+
+ def receive(self):
+ """ Reads all pending incoming data.
+ Raises IOError on disconnection.
+ Blocks until at least one byte is read."""
+ try: received = self._recv(10240)
+ except: received = ''
+
+ if len(received): # length of 0 means disconnect
+ self.DEBUG(`self.fileno()`+' '+received,'got')
+ else:
+ self.DEBUG('Socket error while receiving data','error')
+ self.set_socket_state(SOCKET_DEAD)
+ raise IOError("Peer disconnected")
+ return received
+
+ def sendnow(self,chunk):
+ """ Put chunk into "immidiatedly send" queue.
+ Should only be used for auth/TLS stuff and like.
+ If you just want to shedule regular stanza for delivery use enqueue method.
+ """
+ if isinstance(chunk,Node): chunk = chunk.__str__().encode('utf-8')
+ elif type(chunk)==type(u''): chunk = chunk.encode('utf-8')
+ self.enqueue(chunk)
+
+ def enqueue(self,stanza):
+ """ Takes Protocol instance as argument.
+ Puts stanza into "send" fifo queue. Items into the send queue are hold until
+ stream authenticated. After that this method is effectively the same as "sendnow" method."""
+ if isinstance(stanza,Protocol):
+ self.stanza_queue.append(stanza)
+ else: self.sendbuffer+=stanza
+ if self._socket_state>=SOCKET_ALIVE: self.push_queue()
+
+ def push_queue(self,failreason=ERR_RECIPIENT_UNAVAILABLE):
+ """ If stream is authenticated than move items from "send" queue to "immidiatedly send" queue.
+ Else if the stream is failed then return all queued stanzas with error passed as argument.
+ Otherwise do nothing."""
+ # If the stream authed - convert stanza_queue into sendbuffer and set the checkpoints
+
+ if self._stream_state>=STREAM__CLOSED or self._socket_state>=SOCKET_DEAD: # the stream failed. Return all stanzas that are still waiting for delivery.
+ self._owner.deactivatesession(self)
+ for key in self.deliver_key_queue: # Not sure. May be I
+ self._dispatch(Error(self.deliver_queue_map[key],failreason),trusted=1) # should simply re-dispatch it?
+ for stanza in self.stanza_queue: # But such action can invoke
+ self._dispatch(Error(stanza,failreason),trusted=1) # Infinite loops in case of S2S connection...
+ self.deliver_queue_map,self.deliver_key_queue,self.stanza_queue={},[],[]
+ return
+ elif self._session_state>=SESSION_AUTHED: # FIXME! Должен быть какой-то другой флаг.
+ #### LOCK_QUEUE
+ for stanza in self.stanza_queue:
+ txt=stanza.__str__().encode('utf-8')
+ self.sendbuffer+=txt
+ self._stream_pos_queued+=len(txt) # should be re-evaluated for SSL connection.
+ self.deliver_queue_map[self._stream_pos_queued]=stanza # position of the stream when stanza will be successfully and fully sent
+ self.deliver_key_queue.append(self._stream_pos_queued)
+ self.stanza_queue=[]
+ #### UNLOCK_QUEUE
+
+ def flush_queue(self):
+ """ Put the "immidiatedly send" queue content on the wire. Blocks until at least one byte sent."""
+ if self.sendbuffer:
+ try:
+ # LOCK_QUEUE
+ sent=self._send(self.sendbuffer) # Блокирующая штучка!
+ except:
+ # UNLOCK_QUEUE
+ self.set_socket_state(SOCKET_DEAD)
+ self.DEBUG("Socket error while sending data",'error')
+ return self.terminate_stream()
+ self.DEBUG(`self.fileno()`+' '+self.sendbuffer[:sent],'sent')
+ self._stream_pos_sent+=sent
+ self.sendbuffer=self.sendbuffer[sent:]
+ self._stream_pos_delivered=self._stream_pos_sent # Should be acquired from socket somehow. Take SSL into account.
+ while self.deliver_key_queue and self._stream_pos_delivered>self.deliver_key_queue[0]:
+ del self.deliver_queue_map[self.deliver_key_queue[0]]
+ self.deliver_key_queue.remove(self.deliver_key_queue[0])
+ # UNLOCK_QUEUE
+
+ def _dispatch(self,stanza,trusted=0):
+ """ This is callback that is used to pass the received stanza forth to owner's dispatcher
+ _if_ the stream is authorised. Otherwise the stanza is just dropped.
+ The 'trusted' argument is used to emulate stanza receive.
+ This method is used internally.
+ """
+ self._owner.packets+=1
+ if self._stream_state==STREAM__OPENED or trusted: # if the server really should reject all stanzas after he is closed stream (himeself)?
+ self.DEBUG(stanza.__str__(),'dispatch')
+ stanza.trusted=trusted
+ return self.Dispatcher.dispatch(stanza,self)
+
+ def _catch_stream_id(self,ns=None,tag='stream',attrs={}):
+ """ This callback is used to detect the stream namespace of incoming stream. Used internally. """
+ if not attrs.has_key('id') or not attrs['id']:
+ return self.terminate_stream(STREAM_INVALID_XML)
+ self.ID=attrs['id']
+ if not attrs.has_key('version'): self._owner.Dialback(self)
+
+ def _stream_open(self,ns=None,tag='stream',attrs={}):
+ """ This callback is used to handle opening stream tag of the incoming stream.
+ In the case of client session it just make some validation.
+ Server session also sends server headers and if the stream valid the features node.
+ Used internally. """
+ text='\n')
+ self.set_stream_state(STREAM__OPENED)
+ if self.TYP=='client': return
+ if tag<>'stream': return self.terminate_stream(STREAM_INVALID_XML)
+ if ns<>NS_STREAMS: return self.terminate_stream(STREAM_INVALID_NAMESPACE)
+ if self.Stream.xmlns<>self.xmlns: return self.terminate_stream(STREAM_BAD_NAMESPACE_PREFIX)
+ if not attrs.has_key('to'): return self.terminate_stream(STREAM_IMPROPER_ADDRESSING)
+ if attrs['to'] not in self._owner.servernames: return self.terminate_stream(STREAM_HOST_UNKNOWN)
+ self.ourname=attrs['to'].lower()
+ if self.TYP=='server' and attrs.has_key('version'):
+ # send features
+ features=Node('stream:features')
+ if NS_TLS in self.waiting_features:
+ features.NT.starttls.setNamespace(NS_TLS)
+ features.T.starttls.NT.required
+ if NS_SASL in self.waiting_features:
+ features.NT.mechanisms.setNamespace(NS_SASL)
+ for mec in self._owner.SASL.mechanisms:
+ features.T.mechanisms.NT.mechanism=mec
+ else:
+ if NS_BIND in self.waiting_features: features.NT.bind.setNamespace(NS_BIND)
+ if NS_SESSION in self.waiting_features: features.NT.session.setNamespace(NS_SESSION)
+ self.sendnow(features)
+
+ def feature(self,feature):
+ """ Declare some stream feature as activated one. """
+ if feature not in self.features: self.features.append(feature)
+ self.unfeature(feature)
+
+ def unfeature(self,feature):
+ """ Declare some feature as illegal. Illegal features can not be used.
+ Example: BIND feature becomes illegal after Non-SASL auth. """
+ if feature in self.waiting_features: self.waiting_features.remove(feature)
+
+ def _stream_close(self,unregister=1):
+ """ Write the closing stream tag and destroy the underlaying socket. Used internally. """
+ if self._stream_state>=STREAM__CLOSED: return
+ self.set_stream_state(STREAM__CLOSING)
+ self.sendnow('')
+ self.set_stream_state(STREAM__CLOSED)
+ self.push_queue() # decompose queue really since STREAM__CLOSED
+ self._owner.flush_queues()
+ if unregister: self._owner.unregistersession(self)
+ self._destroy_socket()
+
+ def terminate_stream(self,error=None,unregister=1):
+ """ Notify the peer about stream closure.
+ Ensure that xmlstream is not brokes - i.e. if the stream isn't opened yet -
+ open it before closure.
+ If the error condition is specified than create a stream error and send it along with
+ closing stream tag.
+ Emulate receiving 'unavailable' type presence just before stream closure.
+ """
+ if self._stream_state>=STREAM__CLOSING: return
+ if self._stream_statef: raise "Stopping feature %s instead of %s !"%(f,self.feature_in_process)
+ self.feature_in_process=None
+
+ def set_socket_state(self,newstate):
+ """ Change the underlaying socket state.
+ Socket starts with SOCKET_UNCONNECTED state
+ and then proceeds (possibly) to SOCKET_ALIVE
+ and then to SOCKET_DEAD """
+ if self._socket_state=SESSION_AUTHED: self._stream_pos_queued=self._stream_pos_sent
+ self._session_state=newstate
+
+ def set_stream_state(self,newstate):
+ """ Change the underlaying XML stream state
+ Stream starts with STREAM__NOT_OPENED and then proceeds with
+ STREAM__OPENED, STREAM__CLOSING and STREAM__CLOSED states.
+ Note that some features (like TLS and SASL)
+ requires stream re-start so this state can have non-linear changes. """
+ if self._stream_state " replaced by their respective XML entities."""
+ # replace also FORM FEED and ESC, because they are not valid XML chars
+ return txt.replace("&", "&").replace("<", "<").replace(">", ">").replace('"', """).replace(u'\x0C', "").replace(u'\x1B', "")
+
+ENCODING='utf-8'
+def ustr(what):
+ """Converts object "what" to unicode string using it's own __str__ method if accessible or unicode method otherwise."""
+ if isinstance(what, unicode): return what
+ try: r=what.__str__()
+ except AttributeError: r=str(what)
+ if not isinstance(r, unicode): return unicode(r,ENCODING)
+ return r
+
+class Node(object):
+ """ Node class describes syntax of separate XML Node. It have a constructor that permits node creation
+ from set of "namespace name", attributes and payload of text strings and other nodes.
+ It does not natively support building node from text string and uses NodeBuilder class for that purpose.
+ After creation node can be mangled in many ways so it can be completely changed.
+ Also node can be serialised into string in one of two modes: default (where the textual representation
+ of node describes it exactly) and "fancy" - with whitespace added to make indentation and thus make
+ result more readable by human.
+
+ Node class have attribute FORCE_NODE_RECREATION that is defaults to False thus enabling fast node
+ replication from the some other node. The drawback of the fast way is that new node shares some
+ info with the "original" node that is changing the one node may influence the other. Though it is
+ rarely needed (in xmpppy it is never needed at all since I'm usually never using original node after
+ replication (and using replication only to move upwards on the classes tree).
+ """
+ FORCE_NODE_RECREATION=0
+ def __init__(self, tag=None, attrs={}, payload=[], parent=None, nsp=None, node_built=False, node=None):
+ """ Takes "tag" argument as the name of node (prepended by namespace, if needed and separated from it
+ by a space), attrs dictionary as the set of arguments, payload list as the set of textual strings
+ and child nodes that this node carries within itself and "parent" argument that is another node
+ that this one will be the child of. Also the __init__ can be provided with "node" argument that is
+ either a text string containing exactly one node or another Node instance to begin with. If both
+ "node" and other arguments is provided then the node initially created as replica of "node"
+ provided and then modified to be compliant with other arguments."""
+ if node:
+ if self.FORCE_NODE_RECREATION and isinstance(node, Node):
+ node=str(node)
+ if not isinstance(node, Node):
+ node=NodeBuilder(node,self)
+ node_built = True
+ else:
+ self.name,self.namespace,self.attrs,self.data,self.kids,self.parent,self.nsd = node.name,node.namespace,{},[],[],node.parent,{}
+ for key in node.attrs.keys(): self.attrs[key]=node.attrs[key]
+ for data in node.data: self.data.append(data)
+ for kid in node.kids: self.kids.append(kid)
+ for k,v in node.nsd.items(): self.nsd[k] = v
+ else: self.name,self.namespace,self.attrs,self.data,self.kids,self.parent,self.nsd = 'tag','',{},[],[],None,{}
+ if parent:
+ self.parent = parent
+ self.nsp_cache = {}
+ if nsp:
+ for k,v in nsp.items(): self.nsp_cache[k] = v
+ for attr,val in attrs.items():
+ if attr == 'xmlns':
+ self.nsd[u''] = val
+ elif attr.startswith('xmlns:'):
+ self.nsd[attr[6:]] = val
+ self.attrs[attr]=attrs[attr]
+ if tag:
+ if node_built:
+ pfx,self.name = (['']+tag.split(':'))[-2:]
+ self.namespace = self.lookup_nsp(pfx)
+ else:
+ if ' ' in tag:
+ self.namespace,self.name = tag.split()
+ else:
+ self.name = tag
+ if isinstance(payload, basestring): payload=[payload]
+ for i in payload:
+ if isinstance(i, Node): self.addChild(node=i)
+ else: self.data.append(ustr(i))
+
+ def lookup_nsp(self,pfx=''):
+ ns = self.nsd.get(pfx,None)
+ if ns is None:
+ ns = self.nsp_cache.get(pfx,None)
+ if ns is None:
+ if self.parent:
+ ns = self.parent.lookup_nsp(pfx)
+ self.nsp_cache[pfx] = ns
+ else:
+ return 'http://www.gajim.org/xmlns/undeclared'
+ return ns
+
+ def __str__(self,fancy=0):
+ """ Method used to dump node into textual representation.
+ if "fancy" argument is set to True produces indented output for readability."""
+ s = (fancy-1) * 2 * ' ' + "<" + self.name
+ if self.namespace:
+ if not self.parent or self.parent.namespace!=self.namespace:
+ if 'xmlns' not in self.attrs:
+ s = s + ' xmlns="%s"'%self.namespace
+ for key in self.attrs.keys():
+ val = ustr(self.attrs[key])
+ s = s + ' %s="%s"' % ( key, XMLescape(val) )
+ s = s + ">"
+ cnt = 0
+ if self.kids:
+ if fancy: s = s + "\n"
+ for a in self.kids:
+ if not fancy and (len(self.data)-1)>=cnt: s=s+XMLescape(self.data[cnt])
+ elif (len(self.data)-1)>=cnt: s=s+XMLescape(self.data[cnt].strip())
+ if isinstance(a, Node):
+ s = s + a.__str__(fancy and fancy+1)
+ elif a:
+ s = s + a.__str__()
+ cnt=cnt+1
+ if not fancy and (len(self.data)-1) >= cnt: s = s + XMLescape(self.data[cnt])
+ elif (len(self.data)-1) >= cnt: s = s + XMLescape(self.data[cnt].strip())
+ if not self.kids and s.endswith('>'):
+ s=s[:-1]+' />'
+ if fancy: s = s + "\n"
+ else:
+ if fancy and not self.data: s = s + (fancy-1) * 2 * ' '
+ s = s + "" + self.name + ">"
+ if fancy: s = s + "\n"
+ return s
+ def getCDATA(self):
+ """ Serialise node, dropping all tags and leaving CDATA intact.
+ That is effectively kills all formatiing, leaving only text were contained in XML.
+ """
+ s = ""
+ cnt = 0
+ if self.kids:
+ for a in self.kids:
+ s=s+self.data[cnt]
+ if a: s = s + a.getCDATA()
+ cnt=cnt+1
+ if (len(self.data)-1) >= cnt: s = s + self.data[cnt]
+ return s
+ def addChild(self, name=None, attrs={}, payload=[], namespace=None, node=None):
+ """ If "node" argument is provided, adds it as child node. Else creates new node from
+ the other arguments' values and adds it as well."""
+ if 'xmlns' in attrs:
+ raise AttributeError("Use namespace=x instead of attrs={'xmlns':x}")
+ if node:
+ newnode=node
+ node.parent = self
+ else: newnode=Node(tag=name, parent=self, attrs=attrs, payload=payload)
+ if namespace:
+ newnode.setNamespace(namespace)
+ self.kids.append(newnode)
+ self.data.append(u'')
+ return newnode
+ def addData(self, data):
+ """ Adds some CDATA to node. """
+ self.data.append(ustr(data))
+ self.kids.append(None)
+ def clearData(self):
+ """ Removes all CDATA from the node. """
+ self.data=[]
+ def delAttr(self, key):
+ """ Deletes an attribute "key" """
+ del self.attrs[key]
+ def delChild(self, node, attrs={}):
+ """ Deletes the "node" from the node's childs list, if "node" is an instance.
+ Else deletes the first node that have specified name and (optionally) attributes. """
+ if not isinstance(node, Node): node=self.getTag(node,attrs)
+ self.kids[self.kids.index(node)]=None
+ return node
+ def getAttrs(self):
+ """ Returns all node's attributes as dictionary. """
+ return self.attrs
+ def getAttr(self, key):
+ """ Returns value of specified attribute. """
+ try: return self.attrs[key]
+ except: return None
+ def getChildren(self):
+ """ Returns all node's child nodes as list. """
+ return self.kids
+ def getData(self):
+ """ Returns all node CDATA as string (concatenated). """
+ return ''.join(self.data)
+ def getName(self):
+ """ Returns the name of node """
+ return self.name
+ def getNamespace(self):
+ """ Returns the namespace of node """
+ return self.namespace
+ def getParent(self):
+ """ Returns the parent of node (if present). """
+ return self.parent
+ def getPayload(self):
+ """ Return the payload of node i.e. list of child nodes and CDATA entries.
+ F.e. for "text1 text2" will be returned list:
+ ['text1', , , ' text2']. """
+ ret=[]
+ for i in range(max(len(self.data),len(self.kids))):
+ if i < len(self.data) and self.data[i]: ret.append(self.data[i])
+ if i < len(self.kids) and self.kids[i]: ret.append(self.kids[i])
+ return ret
+ def getTag(self, name, attrs={}, namespace=None):
+ """ Filters all child nodes using specified arguments as filter.
+ Returns the first found or None if not found. """
+ return self.getTags(name, attrs, namespace, one=1)
+ def getTagAttr(self,tag,attr):
+ """ Returns attribute value of the child with specified name (or None if no such attribute)."""
+ try: return self.getTag(tag).attrs[attr]
+ except: return None
+ def getTagData(self,tag):
+ """ Returns cocatenated CDATA of the child with specified name."""
+ try: return self.getTag(tag).getData()
+ except: return None
+ def getTags(self, name, attrs={}, namespace=None, one=0):
+ """ Filters all child nodes using specified arguments as filter.
+ Returns the list of nodes found. """
+ nodes=[]
+ for node in self.kids:
+ if not node: continue
+ if namespace and namespace!=node.getNamespace(): continue
+ if node.getName() == name:
+ for key in attrs.keys():
+ if key not in node.attrs or node.attrs[key]!=attrs[key]: break
+ else: nodes.append(node)
+ if one and nodes: return nodes[0]
+ if not one: return nodes
+
+ def iterTags(self, name, attrs={}, namespace=None):
+ """ Iterate over all children using specified arguments as filter. """
+ for node in self.kids:
+ if not node: continue
+ if namespace is not None and namespace!=node.getNamespace(): continue
+ if node.getName() == name:
+ for key in attrs.keys():
+ if key not in node.attrs or \
+ node.attrs[key]!=attrs[key]: break
+ else:
+ yield node
+
+ def setAttr(self, key, val):
+ """ Sets attribute "key" with the value "val". """
+ self.attrs[key]=val
+ def setData(self, data):
+ """ Sets node's CDATA to provided string. Resets all previous CDATA!"""
+ self.data=[ustr(data)]
+ def setName(self,val):
+ """ Changes the node name. """
+ self.name = val
+ def setNamespace(self, namespace):
+ """ Changes the node namespace. """
+ self.namespace=namespace
+ def setParent(self, node):
+ """ Sets node's parent to "node". WARNING: do not checks if the parent already present
+ and not removes the node from the list of childs of previous parent. """
+ self.parent = node
+ def setPayload(self,payload,add=0):
+ """ Sets node payload according to the list specified. WARNING: completely replaces all node's
+ previous content. If you wish just to add child or CDATA - use addData or addChild methods. """
+ if isinstance(payload, basestring): payload=[payload]
+ if add: self.kids+=payload
+ else: self.kids=payload
+ def setTag(self, name, attrs={}, namespace=None):
+ """ Same as getTag but if the node with specified namespace/attributes not found, creates such
+ node and returns it. """
+ node=self.getTags(name, attrs, namespace=namespace, one=1)
+ if node: return node
+ else: return self.addChild(name, attrs, namespace=namespace)
+ def setTagAttr(self,tag,attr,val):
+ """ Creates new node (if not already present) with name "tag"
+ and sets it's attribute "attr" to value "val". """
+ try: self.getTag(tag).attrs[attr]=val
+ except: self.addChild(tag,attrs={attr:val})
+ def setTagData(self,tag,val,attrs={}):
+ """ Creates new node (if not already present) with name "tag" and (optionally) attributes "attrs"
+ and sets it's CDATA to string "val". """
+ try: self.getTag(tag,attrs).setData(ustr(val))
+ except: self.addChild(tag,attrs,payload=[ustr(val)])
+ def has_attr(self,key):
+ """ Checks if node have attribute "key"."""
+ return key in self.attrs
+ def __getitem__(self,item):
+ """ Returns node's attribute "item" value. """
+ return self.getAttr(item)
+ def __setitem__(self,item,val):
+ """ Sets node's attribute "item" value. """
+ return self.setAttr(item,val)
+ def __delitem__(self,item):
+ """ Deletes node's attribute "item". """
+ return self.delAttr(item)
+ def __getattr__(self,attr):
+ """ Reduce memory usage caused by T/NT classes - use memory only when needed. """
+ if attr=='T':
+ self.T=T(self)
+ return self.T
+ if attr=='NT':
+ self.NT=NT(self)
+ return self.NT
+ raise AttributeError
+
+class T:
+ """ Auxiliary class used to quick access to node's child nodes. """
+ def __init__(self,node): self.__dict__['node']=node
+ def __getattr__(self,attr): return self.node.getTag(attr)
+ def __setattr__(self,attr,val):
+ if isinstance(val,Node): Node.__init__(self.node.setTag(attr),node=val)
+ else: return self.node.setTagData(attr,val)
+ def __delattr__(self,attr): return self.node.delChild(attr)
+
+class NT(T):
+ """ Auxiliary class used to quick create node's child nodes. """
+ def __getattr__(self,attr): return self.node.addChild(attr)
+ def __setattr__(self,attr,val):
+ if isinstance(val,Node): self.node.addChild(attr,node=val)
+ else: return self.node.addChild(attr,payload=[val])
+
+DBG_NODEBUILDER = 'nodebuilder'
+class NodeBuilder:
+ """ Builds a Node class minidom from data parsed to it. This class used for two purposes:
+ 1. Creation an XML Node from a textual representation. F.e. reading a config file. See an XML2Node method.
+ 2. Handling an incoming XML stream. This is done by mangling
+ the __dispatch_depth parameter and redefining the dispatch method.
+ You do not need to use this class directly if you do not designing your own XML handler."""
+ def __init__(self,data=None,initial_node=None):
+ """ Takes two optional parameters: "data" and "initial_node".
+ By default class initialised with empty Node class instance.
+ Though, if "initial_node" is provided it used as "starting point".
+ You can think about it as of "node upgrade".
+ "data" (if provided) feeded to parser immidiatedly after instance init.
+ """
+ self.DEBUG(DBG_NODEBUILDER, "Preparing to handle incoming XML stream.", 'start')
+ self._parser = xml.parsers.expat.ParserCreate()
+ self._parser.StartElementHandler = self.starttag
+ self._parser.EndElementHandler = self.endtag
+ self._parser.CharacterDataHandler = self.handle_cdata
+ self._parser.StartNamespaceDeclHandler = self.handle_namespace_start
+ self._parser.buffer_text = True
+ self.Parse = self._parser.Parse
+
+ self.__depth = 0
+ self.__last_depth = 0
+ self.__max_depth = 0
+ self._dispatch_depth = 1
+ self._document_attrs = None
+ self._document_nsp = None
+ self._mini_dom=initial_node
+ self.last_is_data = 1
+ self._ptr=None
+ self.data_buffer = None
+ self.streamError = ''
+ if data:
+ self._parser.Parse(data,1)
+
+ def check_data_buffer(self):
+ if self.data_buffer:
+ self._ptr.data.append(''.join(self.data_buffer))
+ del self.data_buffer[:]
+ self.data_buffer = None
+
+ def destroy(self):
+ """ Method used to allow class instance to be garbage-collected. """
+ self.check_data_buffer()
+ self._parser.StartElementHandler = None
+ self._parser.EndElementHandler = None
+ self._parser.CharacterDataHandler = None
+ self._parser.StartNamespaceDeclHandler = None
+
+ def starttag(self, tag, attrs):
+ """XML Parser callback. Used internally"""
+ self.check_data_buffer()
+ self._inc_depth()
+ self.DEBUG(DBG_NODEBUILDER, "DEPTH -> %i , tag -> %s, attrs -> %s" % (self.__depth, tag, `attrs`), 'down')
+ if self.__depth == self._dispatch_depth:
+ if not self._mini_dom :
+ self._mini_dom = Node(tag=tag, attrs=attrs, nsp = self._document_nsp, node_built=True)
+ else:
+ Node.__init__(self._mini_dom,tag=tag, attrs=attrs, nsp = self._document_nsp, node_built=True)
+ self._ptr = self._mini_dom
+ elif self.__depth > self._dispatch_depth:
+ self._ptr.kids.append(Node(tag=tag,parent=self._ptr,attrs=attrs, node_built=True))
+ self._ptr = self._ptr.kids[-1]
+ if self.__depth == 1:
+ self._document_attrs = {}
+ self._document_nsp = {}
+ nsp, name = (['']+tag.split(':'))[-2:]
+ for attr,val in attrs.items():
+ if attr == 'xmlns':
+ self._document_nsp[u''] = val
+ elif attr.startswith('xmlns:'):
+ self._document_nsp[attr[6:]] = val
+ else:
+ self._document_attrs[attr] = val
+ ns = self._document_nsp.get(nsp, 'http://www.gajim.org/xmlns/undeclared-root')
+ try:
+ self.stream_header_received(ns, name, attrs)
+ except ValueError, e:
+ self._document_attrs = None
+ raise ValueError(str(e))
+ if not self.last_is_data and self._ptr.parent:
+ self._ptr.parent.data.append('')
+ self.last_is_data = 0
+
+ def endtag(self, tag ):
+ """XML Parser callback. Used internally"""
+ self.DEBUG(DBG_NODEBUILDER, "DEPTH -> %i , tag -> %s" % (self.__depth, tag), 'up')
+ self.check_data_buffer()
+ if self.__depth == self._dispatch_depth:
+ if self._mini_dom.getName() == 'error':
+ self.streamError = self._mini_dom.getChildren()[0].getName()
+ self.dispatch(self._mini_dom)
+ elif self.__depth > self._dispatch_depth:
+ self._ptr = self._ptr.parent
+ else:
+ self.DEBUG(DBG_NODEBUILDER, "Got higher than dispatch level. Stream terminated?", 'stop')
+ self._dec_depth()
+ self.last_is_data = 0
+ if self.__depth == 0: self.stream_footer_received()
+
+ def handle_cdata(self, data):
+ """XML Parser callback. Used internally"""
+ self.DEBUG(DBG_NODEBUILDER, data, 'data')
+ if self.last_is_data:
+ if self.data_buffer:
+ self.data_buffer.append(data)
+ elif self._ptr:
+ self.data_buffer = [data]
+ self.last_is_data = 1
+
+ def handle_namespace_start(self, prefix, uri):
+ """XML Parser callback. Used internally"""
+ self.check_data_buffer()
+
+ def DEBUG(self, level, text, comment=None):
+ """ Gets all NodeBuilder walking events. Can be used for debugging if redefined."""
+ def getDom(self):
+ """ Returns just built Node. """
+ self.check_data_buffer()
+ return self._mini_dom
+ def dispatch(self,stanza):
+ """ Gets called when the NodeBuilder reaches some level of depth on it's way up with the built
+ node as argument. Can be redefined to convert incoming XML stanzas to program events. """
+ def stream_header_received(self,ns,tag,attrs):
+ """ Method called when stream just opened. """
+ self.check_data_buffer()
+ def stream_footer_received(self):
+ """ Method called when stream just closed. """
+ self.check_data_buffer()
+
+ def has_received_endtag(self, level=0):
+ """ Return True if at least one end tag was seen (at level) """
+ return self.__depth <= level and self.__max_depth > level
+
+ def _inc_depth(self):
+ self.__last_depth = self.__depth
+ self.__depth += 1
+ self.__max_depth = max(self.__depth, self.__max_depth)
+
+ def _dec_depth(self):
+ self.__last_depth = self.__depth
+ self.__depth -= 1
+
+def XML2Node(xml):
+ """ Converts supplied textual string into XML node. Handy f.e. for reading configuration file.
+ Raises xml.parser.expat.parsererror if provided string is not well-formed XML. """
+ return NodeBuilder(xml).getDom()
+
+def BadXML2Node(xml):
+ """ Converts supplied textual string into XML node. Survives if xml data is cutted half way round.
+ I.e. "some text
some more text". Will raise xml.parser.expat.parsererror on misplaced
+ tags though. F.e. "some text
some more text" will not work."""
+ return NodeBuilder(xml).getDom()
diff --git a/libs/xmpp/transports.py b/libs/xmpp/transports.py
new file mode 100644
index 00000000..f3d13167
--- /dev/null
+++ b/libs/xmpp/transports.py
@@ -0,0 +1,339 @@
+## transports.py
+##
+## Copyright (C) 2003-2004 Alexey "Snake" Nezhdanov
+##
+## This program is free software; you can redistribute it and/or modify
+## it under the terms of the GNU General Public License as published by
+## the Free Software Foundation; either version 2, or (at your option)
+## any later version.
+##
+## This program is distributed in the hope that it will be useful,
+## but WITHOUT ANY WARRANTY; without even the implied warranty of
+## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+## GNU General Public License for more details.
+
+# $Id: transports.py,v 1.35 2009/04/07 08:34:09 snakeru Exp $
+
+"""
+This module contains the low-level implementations of xmpppy connect methods or
+(in other words) transports for xmpp-stanzas.
+Currently here is three transports:
+direct TCP connect - TCPsocket class
+proxied TCP connect - HTTPPROXYsocket class (CONNECT proxies)
+TLS connection - TLS class. Can be used for SSL connections also.
+
+Transports are stackable so you - f.e. TLS use HTPPROXYsocket or TCPsocket as more low-level transport.
+
+Also exception 'error' is defined to allow capture of this module specific exceptions.
+"""
+
+import socket, select, base64, dispatcher, sys
+from simplexml import ustr
+from client import PlugIn
+from protocol import *
+
+# determine which DNS resolution library is available
+HAVE_DNSPYTHON = False
+HAVE_PYDNS = False
+try:
+ import dns.resolver # http://dnspython.org/
+ HAVE_DNSPYTHON = True
+except ImportError:
+ try:
+ import DNS # http://pydns.sf.net/
+ HAVE_PYDNS = True
+ except ImportError:
+ pass
+
+DATA_RECEIVED = 'DATA RECEIVED'
+DATA_SENT = 'DATA SENT'
+
+class error:
+ """An exception to be raised in case of low-level errors in methods of 'transports' module."""
+ def __init__(self, comment):
+ """Cache the descriptive string"""
+ self._comment = comment
+
+ def __str__(self):
+ """Serialise exception into pre-cached descriptive string."""
+ return self._comment
+
+BUFLEN = 1024
+class TCPsocket(PlugIn):
+ """ This class defines direct TCP connection method. """
+ def __init__(self, server = None, use_srv = True):
+ """ Cache connection point 'server'. 'server' is the tuple of (host, port)
+ absolutely the same as standard tcp socket uses. However library will lookup for
+ ('_xmpp-client._tcp.' + host) SRV record in DNS and connect to the found (if it is)
+ server instead
+ """
+ PlugIn.__init__(self)
+ self.DBG_LINE = 'socket'
+ self._exported_methods = [self.send, self.disconnect]
+ self._server, self.use_srv = server, use_srv
+
+ def srv_lookup(self, server):
+ " SRV resolver. Takes server=(host, port) as argument. Returns new (host, port) pair "
+ if HAVE_DNSPYTHON or HAVE_PYDNS:
+ host, port = server
+ possible_queries = ['_xmpp-client._tcp.' + host]
+
+ for query in possible_queries:
+ try:
+ if HAVE_DNSPYTHON:
+ answers = [x for x in dns.resolver.query(query, 'SRV')]
+ if answers:
+ host = str(answers[0].target)
+ port = int(answers[0].port)
+ break
+ elif HAVE_PYDNS:
+ # ensure we haven't cached an old configuration
+ DNS.DiscoverNameServers()
+ response = DNS.Request().req(query, qtype = 'SRV')
+ answers = response.answers
+ if len(answers) > 0:
+ # ignore the priority and weight for now
+ _, _, port, host = answers[0]['data']
+ del _
+ port = int(port)
+ break
+ except:
+ self.DEBUG('An error occurred while looking up %s' % query, 'warn')
+ server = (host, port)
+ else:
+ self.DEBUG("Could not load one of the supported DNS libraries (dnspython or pydns). SRV records will not be queried and you may need to set custom hostname/port for some servers to be accessible.\n", 'warn')
+ # end of SRV resolver
+ return server
+
+ def plugin(self, owner):
+ """ Fire up connection. Return non-empty string on success.
+ Also registers self.disconnected method in the owner's dispatcher.
+ Called internally. """
+ if not self._server: self._server = (self._owner.Server, 5222)
+ if self.use_srv: server = self.srv_lookup(self._server)
+ else: server = self._server
+ if not self.connect(server): return
+ self._owner.Connection = self
+ self._owner.RegisterDisconnectHandler(self.disconnected)
+ return 'ok'
+
+ def getHost(self):
+ """ Return the 'host' value that is connection is [will be] made to."""
+ return self._server[0]
+ def getPort(self):
+ """ Return the 'port' value that is connection is [will be] made to."""
+ return self._server[1]
+
+ def connect(self, server = None):
+ """ Try to connect to the given host/port. Does not lookup for SRV record.
+ Returns non-empty string on success. """
+ try:
+ if not server: server = self._server
+ self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self._sock.connect((server[0], int(server[1])))
+ self._send = self._sock.sendall
+ self._recv = self._sock.recv
+ self.DEBUG("Successfully connected to remote host %s" % `server`, 'start')
+ return 'ok'
+ except socket.error, (errno, strerror):
+ self.DEBUG("Failed to connect to remote host %s: %s (%s)" % (`server`, strerror, errno), 'error')
+ except: pass
+
+ def plugout(self):
+ """ Disconnect from the remote server and unregister self.disconnected method from
+ the owner's dispatcher. """
+ self._sock.close()
+ if self._owner.__dict__.has_key('Connection'):
+ del self._owner.Connection
+ self._owner.UnregisterDisconnectHandler(self.disconnected)
+
+ def receive(self):
+ """ Reads all pending incoming data.
+ In case of disconnection calls owner's disconnected() method and then raises IOError exception."""
+ try: received = self._recv(BUFLEN)
+ except socket.sslerror, e:
+ self._seen_data = 0
+ if e[0] == socket.SSL_ERROR_WANT_READ: return ''
+ if e[0] == socket.SSL_ERROR_WANT_WRITE: return ''
+ self.DEBUG('Socket error while receiving data', 'error')
+ sys.exc_clear()
+ self._owner.disconnected()
+ raise IOError("Disconnected from server")
+ except: received = ''
+
+ while self.pending_data(0):
+ try: add = self._recv(BUFLEN)
+ except: add = ''
+ received += add
+ if not add: break
+
+ if len(received): # length of 0 means disconnect
+ self._seen_data = 1
+ self.DEBUG(received, 'got')
+ if hasattr(self._owner, 'Dispatcher'):
+ self._owner.Dispatcher.Event('', DATA_RECEIVED, received)
+ else:
+ self.DEBUG('Socket error while receiving data', 'error')
+ self._owner.disconnected()
+ raise IOError("Disconnected from server")
+ return received
+
+ def send(self, raw_data):
+ """ Writes raw outgoing data. Blocks until done.
+ If supplied data is unicode string, encodes it to utf-8 before send."""
+ if type(raw_data) == type(u''): raw_data = raw_data.encode('utf-8')
+ elif type(raw_data) <> type(''): raw_data = ustr(raw_data).encode('utf-8')
+ try:
+ self._send(raw_data)
+ # Avoid printing messages that are empty keepalive packets.
+ if raw_data.strip():
+ self.DEBUG(raw_data, 'sent')
+ if hasattr(self._owner, 'Dispatcher'): # HTTPPROXYsocket will send data before we have a Dispatcher
+ self._owner.Dispatcher.Event('', DATA_SENT, raw_data)
+ except:
+ self.DEBUG("Socket error while sending data", 'error')
+ self._owner.disconnected()
+
+ def pending_data(self, timeout = 0):
+ """ Returns true if there is a data ready to be read. """
+ return select.select([self._sock], [], [], timeout)[0]
+
+ def disconnect(self):
+ """ Closes the socket. """
+ self.DEBUG("Closing socket", 'stop')
+ self._sock.close()
+
+ def disconnected(self):
+ """ Called when a Network Error or disconnection occurs.
+ Designed to be overidden. """
+ self.DEBUG("Socket operation failed", 'error')
+
+DBG_CONNECT_PROXY = 'CONNECTproxy'
+class HTTPPROXYsocket(TCPsocket):
+ """ HTTP (CONNECT) proxy connection class. Uses TCPsocket as the base class
+ redefines only connect method. Allows to use HTTP proxies like squid with
+ (optionally) simple authentication (using login and password). """
+ def __init__(self, proxy, server, use_srv = True):
+ """ Caches proxy and target addresses.
+ 'proxy' argument is a dictionary with mandatory keys 'host' and 'port' (proxy address)
+ and optional keys 'user' and 'password' to use for authentication.
+ 'server' argument is a tuple of host and port - just like TCPsocket uses. """
+ TCPsocket.__init__(self, server, use_srv)
+ self.DBG_LINE = DBG_CONNECT_PROXY
+ self._proxy = proxy
+
+ def plugin(self, owner):
+ """ Starts connection. Used interally. Returns non-empty string on success."""
+ owner.debug_flags.append(DBG_CONNECT_PROXY)
+ return TCPsocket.plugin(self, owner)
+
+ def connect(self, dupe = None):
+ """ Starts connection. Connects to proxy, supplies login and password to it
+ (if were specified while creating instance). Instructs proxy to make
+ connection to the target server. Returns non-empty sting on success. """
+ if not TCPsocket.connect(self, (self._proxy['host'], self._proxy['port'])): return
+ self.DEBUG("Proxy server contacted, performing authentification", 'start')
+ connector = ['CONNECT %s:%s HTTP/1.0' % self._server,
+ 'Proxy-Connection: Keep-Alive',
+ 'Pragma: no-cache',
+ 'Host: %s:%s' % self._server,
+ 'User-Agent: HTTPPROXYsocket/v0.1']
+ if self._proxy.has_key('user') and self._proxy.has_key('password'):
+ credentials = '%s:%s' % (self._proxy['user'], self._proxy['password'])
+ credentials = base64.encodestring(credentials).strip()
+ connector.append('Proxy-Authorization: Basic ' + credentials)
+ connector.append('\r\n')
+ self.send('\r\n'.join(connector))
+ try: reply = self.receive().replace('\r', '')
+ except IOError:
+ self.DEBUG('Proxy suddenly disconnected', 'error')
+ self._owner.disconnected()
+ return
+ try: proto, code, desc = reply.split('\n')[0].split(' ', 2)
+ except: raise error('Invalid proxy reply')
+ if code <> '200':
+ self.DEBUG('Invalid proxy reply: %s %s %s' % (proto, code, desc), 'error')
+ self._owner.disconnected()
+ return
+ while reply.find('\n\n') == -1:
+ try: reply += self.receive().replace('\r', '')
+ except IOError:
+ self.DEBUG('Proxy suddenly disconnected', 'error')
+ self._owner.disconnected()
+ return
+ self.DEBUG("Authentification successfull. Jabber server contacted.", 'ok')
+ return 'ok'
+
+ def DEBUG(self, text, severity):
+ """Overwrites DEBUG tag to allow debug output be presented as "CONNECTproxy"."""
+ return self._owner.DEBUG(DBG_CONNECT_PROXY, text, severity)
+
+class TLS(PlugIn):
+ """ TLS connection used to encrypts already estabilished tcp connection."""
+ def PlugIn(self, owner, now = 0):
+ """ If the 'now' argument is true then starts using encryption immidiatedly.
+ If 'now' in false then starts encryption as soon as TLS feature is
+ declared by the server (if it were already declared - it is ok).
+ """
+ if owner.__dict__.has_key('TLS'): return # Already enabled.
+ PlugIn.PlugIn(self, owner)
+ DBG_LINE = 'TLS'
+ if now: return self._startSSL()
+ if self._owner.Dispatcher.Stream.features:
+ try: self.FeaturesHandler(self._owner.Dispatcher, self._owner.Dispatcher.Stream.features)
+ except NodeProcessed: pass
+ else: self._owner.RegisterHandlerOnce('features', self.FeaturesHandler, xmlns = NS_STREAMS)
+ self.starttls = None
+
+ def plugout(self, now = 0):
+ """ Unregisters TLS handler's from owner's dispatcher. Take note that encription
+ can not be stopped once started. You can only break the connection and start over."""
+ self._owner.UnregisterHandler('features', self.FeaturesHandler, xmlns = NS_STREAMS)
+ self._owner.UnregisterHandler('proceed', self.StartTLSHandler, xmlns = NS_TLS)
+ self._owner.UnregisterHandler('failure', self.StartTLSHandler, xmlns = NS_TLS)
+
+ def FeaturesHandler(self, conn, feats):
+ """ Used to analyse server tag for TLS support.
+ If TLS is supported starts the encryption negotiation. Used internally"""
+ if not feats.getTag('starttls', namespace = NS_TLS):
+ self.DEBUG("TLS unsupported by remote server.", 'warn')
+ return
+ self.DEBUG("TLS supported by remote server. Requesting TLS start.", 'ok')
+ self._owner.RegisterHandlerOnce('proceed', self.StartTLSHandler, xmlns = NS_TLS)
+ self._owner.RegisterHandlerOnce('failure', self.StartTLSHandler, xmlns = NS_TLS)
+ self._owner.Connection.send('' % NS_TLS)
+ raise NodeProcessed
+
+ def pending_data(self, timeout = 0):
+ """ Returns true if there possible is a data ready to be read. """
+ return self._tcpsock._seen_data or select.select([self._tcpsock._sock], [], [], timeout)[0]
+
+ def _startSSL(self):
+ """ Immidiatedly switch socket to TLS mode. Used internally."""
+ """ Here we should switch pending_data to hint mode."""
+ tcpsock = self._owner.Connection
+ tcpsock._sslObj = socket.ssl(tcpsock._sock, None, None)
+ tcpsock._sslIssuer = tcpsock._sslObj.issuer()
+ tcpsock._sslServer = tcpsock._sslObj.server()
+ tcpsock._recv = tcpsock._sslObj.read
+ tcpsock._send = tcpsock._sslObj.write
+
+ tcpsock._seen_data = 1
+ self._tcpsock = tcpsock
+ tcpsock.pending_data = self.pending_data
+ tcpsock._sock.setblocking(0)
+
+ self.starttls = 'success'
+
+ def StartTLSHandler(self, conn, starttls):
+ """ Handle server reply if TLS is allowed to process. Behaves accordingly.
+ Used internally."""
+ if starttls.getNamespace() <> NS_TLS: return
+ self.starttls = starttls.getName()
+ if self.starttls == 'failure':
+ self.DEBUG("Got starttls response: " + self.starttls, 'error')
+ return
+ self.DEBUG("Got starttls proceed response. Switching to TLS/SSL...", 'ok')
+ self._startSSL()
+ self._owner.Dispatcher.PlugOut()
+ dispatcher.Dispatcher().PlugIn(self._owner)