Compare commits

...

70 Commits

Author SHA1 Message Date
Ruud
1cd8040692 One up 2013-03-17 16:39:09 +01:00
Ruud
7268e02386 zindex fixes & empty home element 2013-03-17 15:50:45 +01:00
Ruud
805aa3ca9f Split query to fix title bug. fix #1510 2013-03-17 15:14:04 +01:00
Ruud
29cb34551c Hide title and description by default 2013-03-17 14:42:24 +01:00
Ruud
d267be4455 Only sleep on 404 when not in dev mode 2013-03-17 14:10:29 +01:00
Ruud
92f4ade371 Save the last view properly 2013-03-17 12:55:07 +01:00
Ruud
9235eda73b Reverse merging using priority 2013-03-17 12:42:14 +01:00
Ruud
1fe23afd1b Don't mark first title default 2013-03-17 11:40:22 +01:00
Ruud
09637c3069 Revert "Search priority"
This reverts commit 2cafd509fc.
2013-03-17 11:39:54 +01:00
Ruud
2cafd509fc Search priority 2013-03-17 02:01:48 +01:00
Ruud
62cc570ab2 Mask zindex fix 2013-03-17 01:52:27 +01:00
Ruud
1ec9370e68 Make sure to set default title on refresh. fix #1436 2013-03-17 01:40:36 +01:00
Ruud
5b4c60ecba Optimize dashboard.soon with joins 2013-03-17 01:14:15 +01:00
Ruud
7b7488ece8 Dashboard split
Do more with snatched and other statusses
2013-03-16 22:23:11 +01:00
Ruud
4ba7ff9f27 Search mask fix 2013-03-16 22:21:33 +01:00
Ruud
df2d1aca4b Allow email notification to send to multiple addresses 2013-03-16 15:27:38 +01:00
Ruud
4fcba70c9a Cleanup dashboard snatched movies 2013-03-16 11:53:55 +01:00
Ruud
d0fc20ca6e Add last_edit to movie and release tables 2013-03-16 11:51:46 +01:00
Ruud
9402b54f9b Force to wanted after wizard 2013-03-15 16:38:26 +01:00
Ruud
f0e7795b9b Ubuntu init script /etc/default 2013-03-15 14:33:34 +01:00
dfiore1230
bba18d8bc9 added the ability to source /etc/default/couchpotato file
added the ability to source /etc/default/couchpotato file by testing for file existence and source when available

added lines 39 - 45
2013-03-15 13:58:33 +01:00
Ruud
0494e5fc8f Cleanup pushalot notifier 2013-03-13 22:08:51 +01:00
Travis La Marr
df1b46272d Pushalot notifier for Windows Phone 7/8 and Windows 8 2013-03-13 21:34:18 +01:00
Ruud Burger
ed068f09b0 Only chown PID file 2013-03-12 10:40:20 +01:00
Ruud Burger
5e852d05ee Only remove PID file 2013-03-12 08:29:29 +01:00
Ruud Burger
d111393bd6 Remove PID path 2013-03-12 08:21:23 +01:00
Ruud
89bff73431 Decode torrent hash for magnets also 2013-03-09 18:15:06 +01:00
Ruud
cd16dddf13 Make sure to use the correct hash for utorrent 2013-03-08 14:45:32 +01:00
Ruud
25605c45b9 IPTorrent download url fix
Thanks seedboy
2013-03-08 14:28:46 +01:00
Ruud
b6d0d54609 Add params to cache_key 2013-03-04 23:11:40 +01:00
Ruud
98981dac27 Suggestions 2013-03-04 23:11:26 +01:00
Ruud
ddf03cbcf2 Diskspace event 2013-03-04 23:11:20 +01:00
Ruud
1e1abf407c Dashboard 2013-03-04 23:11:13 +01:00
Ruud
1267cdac4d Remove print from TPB provider 2013-02-24 00:18:10 +01:00
Ruud
05bcee12ae No need for folder for pid file 2013-02-24 00:17:57 +01:00
Ruud
fc3f15e0cf Remove dots and spaces from left movie name. fixes #1428 2013-02-23 17:45:27 +01:00
Ruud
0a7765f639 uTorrent status support. closes #1391
Thanks to Stourwalk
2013-02-23 16:36:12 +01:00
Ruud
c214458770 IPTorrents, don't continue if nothing found. fixes #1423 2013-02-23 16:09:53 +01:00
Ruud
bfe501c84a Better XBMC notification image. close #1427 2013-02-23 16:01:20 +01:00
Ruud
e034465df8 Show newznab name in release list. fix #1400 2013-02-23 15:58:36 +01:00
Ruud
a7b78d4131 Tornado update 2013-02-22 23:20:16 +01:00
Ruud
3eed34c710 Gzip Tornado response 2013-02-22 22:56:08 +01:00
Ruud
9cb3bef156 Fallback to non-minified scripts 2013-02-22 21:23:38 +01:00
Ruud
46c7e3fbed IPTorrent support. closes #1411
Thanks to @seedboy
2013-02-15 21:36:59 +01:00
Ruud
eed0382b41 Host to 0.0.0.0 2013-02-14 23:01:34 +01:00
Ruud
4e45c94fc3 Renamer NTFS permission fix #778 2013-02-12 23:23:18 +01:00
Ruud
0a11dc6673 Set file permissions on .nzb or torrent file. closes #1362
Thanks clinton
2013-02-12 23:12:40 +01:00
Ruud
4ede2c20a1 Goodfilm automation provider. closes #1366 2013-02-12 23:10:34 +01:00
Ruud
af0cf523e3 Fedora init script. closes #1399 2013-02-12 22:56:02 +01:00
Ruud
3908e00650 Stop progress search on fail. fix #1409 2013-02-12 22:49:44 +01:00
Ruud
f9bdf6da1c Send correct headers to SABNZBd. fix #1406 2013-02-12 22:42:26 +01:00
Ruud
87cdf9222d Hide test notification button 2013-02-04 23:05:21 +01:00
Ruud
2ca2cc9597 Don't fire openpage twice on start 2013-02-04 22:36:22 +01:00
Ruud
edb232df60 Don't fire progress untill other request ended 2013-02-04 22:35:25 +01:00
Ruud
af113c0ffd Minifier 2 2013-02-04 21:59:12 +01:00
Ruud
856b495995 Minifier 2013-02-04 21:48:02 +01:00
Ruud
a56bbf0b3b CP API cleanup 2013-02-03 21:50:29 +01:00
Ruud
4b54113f08 Use CP api for movie check 2013-02-03 18:20:11 +01:00
Ruud
52371b7705 Daemonize cleanup 2013-02-02 23:16:02 +01:00
Ruud
629bead919 Raise current exception 2013-02-02 12:02:54 +01:00
Ruud
c7cd72787f Ignore extracted folder. fix #1369 2013-02-02 11:49:12 +01:00
Ruud
a60e9dc4c3 Encode nzbname before sending it to NZBGet. fix #1321 2013-01-25 21:29:25 +01:00
Ruud
b168c1364d Blackhole error on manual download. fix #1351 2013-01-25 21:08:19 +01:00
Ruud
14fffda3ff Don't add signal handlers when daemonized. fix #1346 2013-01-25 15:26:06 +01:00
Ruud
51364a3c25 Transmission params not set properly. fix #1344 2013-01-25 13:12:10 +01:00
Ruud
c6642ffeb7 Allow 1080p in brrip releases. fixes #1339 2013-01-25 12:57:41 +01:00
Ruud
9fe9ccf0ad Open browser didn't work. 2013-01-24 23:51:11 +01:00
Ruud
cb92b00534 Manage setting instead of getting folders. fix #1307 2013-01-24 23:33:48 +01:00
Ruud
7d3780133f Missing download function. fix #1337 2013-01-24 23:02:36 +01:00
Ruud
f23b9d7cb9 Use host from config again. fix #1329 2013-01-24 22:56:33 +01:00
105 changed files with 4396 additions and 2385 deletions

View File

@@ -78,6 +78,7 @@ def page_not_found(error):
r = '%s%s' % (request.url.rstrip('/'), index_url + '#' + url)
return redirect(r)
else:
time.sleep(0.1)
if not Env.get('dev'):
time.sleep(0.1)
return 'Wrong API key used', 404

View File

@@ -10,7 +10,6 @@ from uuid import uuid4
import os
import platform
import signal
import sys
import time
import traceback
import webbrowser
@@ -153,7 +152,7 @@ class Core(Plugin):
def createBaseUrl(self):
host = Env.setting('host')
if host == '0.0.0.0':
if host == '0.0.0.0' or host == '':
host = 'localhost'
port = Env.setting('port')
@@ -177,9 +176,10 @@ class Core(Plugin):
})
def signalHandler(self):
if Env.get('daemonized'): return
def signal_handler(signal, frame):
fireEvent('app.shutdown')
fireEvent('app.shutdown', single = True)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)

View File

@@ -1,15 +1,60 @@
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from minify.cssmin import cssmin
from minify.jsmin import jsmin
import os
import traceback
log = CPLog(__name__)
class ClientScript(Plugin):
urls = {
'style': {},
'script': {},
core_static = {
'style': [
'style/main.css',
'style/uniform.generic.css',
'style/uniform.css',
'style/settings.css',
],
'script': [
'scripts/library/mootools.js',
'scripts/library/mootools_more.js',
'scripts/library/prefix_free.js',
'scripts/library/uniform.js',
'scripts/library/form_replacement/form_check.js',
'scripts/library/form_replacement/form_radio.js',
'scripts/library/form_replacement/form_dropdown.js',
'scripts/library/form_replacement/form_selectoption.js',
'scripts/library/question.js',
'scripts/library/scrollspy.js',
'scripts/library/spin.js',
'scripts/couchpotato.js',
'scripts/api.js',
'scripts/library/history.js',
'scripts/page.js',
'scripts/block.js',
'scripts/block/navigation.js',
'scripts/block/footer.js',
'scripts/block/menu.js',
'scripts/page/home.js',
'scripts/page/wanted.js',
'scripts/page/settings.js',
'scripts/page/about.js',
'scripts/page/manage.js',
],
}
urls = {'style': {}, 'script': {}, }
minified = {'style': {}, 'script': {}, }
paths = {'style': {}, 'script': {}, }
comment = {
'style': '/*** %s:%d ***/\n',
'script': '// %s:%d\n'
}
html = {
@@ -24,6 +69,66 @@ class ClientScript(Plugin):
addEvent('clientscript.get_styles', self.getStyles)
addEvent('clientscript.get_scripts', self.getScripts)
addEvent('app.load', self.minify)
self.addCore()
def addCore(self):
for static_type in self.core_static:
for rel_path in self.core_static.get(static_type):
file_path = os.path.join(Env.get('app_dir'), 'couchpotato', 'static', rel_path)
core_url = 'api/%s/static/%s?%s' % (Env.setting('api_key'), rel_path, tryInt(os.path.getmtime(file_path)))
if static_type == 'script':
self.registerScript(core_url, file_path, position = 'front')
else:
self.registerStyle(core_url, file_path, position = 'front')
def minify(self):
for file_type in ['style', 'script']:
ext = 'js' if file_type is 'script' else 'css'
positions = self.paths.get(file_type, {})
for position in positions:
files = positions.get(position)
self._minify(file_type, files, position, position + '.' + ext)
def _minify(self, file_type, files, position, out):
cache = Env.get('cache_dir')
out_name = 'minified_' + out
out = os.path.join(cache, out_name)
raw = []
for file_path in files:
f = open(file_path, 'r').read()
if file_type == 'script':
data = jsmin(f)
else:
data = cssmin(f)
data = data.replace('../images/', '../static/images/')
raw.append({'file': file_path, 'date': int(os.path.getmtime(file_path)), 'data': data})
# Combine all files together with some comments
data = ''
for r in raw:
data += self.comment.get(file_type) % (r.get('file'), r.get('date'))
data += r.get('data') + '\n\n'
self.createFile(out, data.strip())
if not self.minified.get(file_type):
self.minified[file_type] = {}
if not self.minified[file_type].get(position):
self.minified[file_type][position] = []
minified_url = 'api/%s/file.cache/%s?%s' % (Env.setting('api_key'), out_name, tryInt(os.path.getmtime(out)))
self.minified[file_type][position].append(minified_url)
def getStyles(self, *args, **kwargs):
return self.get('style', *args, **kwargs)
@@ -35,22 +140,30 @@ class ClientScript(Plugin):
data = '' if as_html else []
try:
try:
if not Env.get('dev'):
return self.minified[type][location]
except:
pass
return self.urls[type][location]
except Exception, e:
log.error(e)
except:
log.error('Error getting minified %s, %s: %s', (type, location, traceback.format_exc()))
return data
def registerStyle(self, path, position = 'head'):
self.register(path, 'style', position)
def registerStyle(self, api_path, file_path, position = 'head'):
self.register(api_path, file_path, 'style', position)
def registerScript(self, path, position = 'head'):
self.register(path, 'script', position)
def registerScript(self, api_path, file_path, position = 'head'):
self.register(api_path, file_path, 'script', position)
def register(self, filepath, type, location):
def register(self, api_path, file_path, type, location):
if not self.urls[type].get(location):
self.urls[type][location] = []
self.urls[type][location].append(api_path)
filePath = filepath
self.urls[type][location].append(filePath)
if not self.paths[type].get(location):
self.paths[type][location] = []
self.paths[type][location].append(file_path)

View File

@@ -57,6 +57,9 @@ class Downloader(Provider):
return self.getAllDownloadStatus()
def getAllDownloadStatus(self):
return
def _removeFailed(self, item):
if self.isDisabled(manual = True, data = {}):
return

View File

@@ -1,6 +1,7 @@
from __future__ import with_statement
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
import os
import traceback
@@ -36,6 +37,7 @@ class Blackhole(Downloader):
log.info('Downloading %s to %s.', (data.get('type'), fullPath))
with open(fullPath, 'wb') as f:
f.write(filedata)
os.chmod(fullPath, Env.getPermission('file'))
return True
else:
log.info('File %s already exists.', fullPath)
@@ -60,5 +62,11 @@ class Blackhole(Downloader):
return ['nzb']
def isEnabled(self, manual, data = {}):
for_type = ['both']
if data and 'torrent' in data.get('type'):
for_type.append('torrent')
elif data:
for_type.append(data.get('type'))
return super(Blackhole, self).isEnabled(manual, data) and \
((self.conf('use_for') in ['both', 'torrent' if 'torrent' in data.get('type') else data.get('type')]))
((self.conf('use_for') in for_type))

View File

@@ -1,5 +1,6 @@
from base64 import standard_b64encode
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
import re
@@ -24,7 +25,7 @@ class NZBGet(Downloader):
log.info('Sending "%s" to NZBGet.', data.get('name'))
url = self.url % {'host': self.conf('host'), 'password': self.conf('password')}
nzb_name = '%s.nzb' % self.createNzbName(data, movie)
nzb_name = ss('%s.nzb' % self.createNzbName(data, movie))
rpc = xmlrpclib.ServerProxy(url)
try:

View File

@@ -2,6 +2,7 @@ from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import tryUrlencode, ss
from couchpotato.core.helpers.variable import cleanHost, mergeDicts
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
from urllib2 import URLError
import json
import traceback
@@ -38,9 +39,9 @@ class Sabnzbd(Downloader):
try:
if params.get('mode') is 'addfile':
sab = self.urlopen(url, timeout = 60, params = {'nzbfile': (ss(nzb_filename), filedata)}, multipart = True, show_error = False)
sab = self.urlopen(url, timeout = 60, params = {'nzbfile': (ss(nzb_filename), filedata)}, multipart = True, show_error = False, headers = {'User-Agent': Env.getIdentifier()})
else:
sab = self.urlopen(url, timeout = 60, show_error = False)
sab = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()})
except URLError:
log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0))
return False
@@ -139,7 +140,7 @@ class Sabnzbd(Downloader):
'output': 'json'
}))
data = self.urlopen(url, timeout = 60, show_error = False)
data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()})
if use_json:
d = json.loads(data)
if d.get('error'):

View File

@@ -38,6 +38,7 @@ class Transmission(Downloader):
'download-dir': folder_path
}
torrent_params = {}
if self.conf('ratio'):
torrent_params = {
'seedRatioLimit': self.conf('ratio'),
@@ -58,7 +59,8 @@ class Transmission(Downloader):
remote_torrent = trpc.add_torrent_file(b64encode(filedata), arguments = params)
# Change settings of added torrents
trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
if torrent_params:
trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
return True
except Exception, err:

View File

@@ -1,3 +1,4 @@
from base64 import b16encode, b32decode
from bencode import bencode, bdecode
from couchpotato.core.downloaders.base import Downloader
from couchpotato.core.helpers.encoding import isInt, ss
@@ -6,6 +7,7 @@ from hashlib import sha1
from multipartpost import MultipartPostHandler
import cookielib
import httplib
import json
import re
import time
import urllib
@@ -37,6 +39,7 @@ class uTorrent(Downloader):
if not filedata and data.get('type') == 'torrent':
log.error('Failed sending torrent, no data')
return False
if data.get('type') == 'torrent_magnet':
torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper()
torrent_params['trackers'] = '%0D%0A%0D%0A'.join(self.torrent_trackers)
@@ -45,6 +48,10 @@ class uTorrent(Downloader):
torrent_hash = sha1(bencode(info)).hexdigest().upper()
torrent_filename = self.createFileName(data, filedata, movie)
# Convert base 32 to hex
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash))
# Send request to uTorrent
try:
if not self.utorrent_api:
@@ -64,6 +71,59 @@ class uTorrent(Downloader):
log.error('Failed to send torrent to uTorrent: %s', err)
return False
def getAllDownloadStatus(self):
log.debug('Checking uTorrent download status.')
# Load host from config and split out port.
host = self.conf('host').split(':')
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
try:
self.utorrent_api = uTorrentAPI(host[0], port = host[1], username = self.conf('username'), password = self.conf('password'))
except Exception, err:
log.error('Failed to get uTorrent object: %s', err)
return False
data = ''
try:
data = self.utorrent_api.get_status()
queue = json.loads(data)
if queue.get('error'):
log.error('Error getting data from uTorrent: %s', queue.get('error'))
return False
except Exception, err:
log.error('Failed to get status from uTorrent: %s', err)
return False
if queue.get('torrents', []) == []:
log.debug('Nothing in queue')
return False
statuses = []
# Get torrents
for item in queue.get('torrents', []):
# item[21] = Paused | Downloading | Seeding | Finished
status = 'busy'
if item[21] == 'Finished' or item[21] == 'Seeding':
status = 'completed'
statuses.append({
'id': item[0],
'name': item[2],
'status': status,
'original_status': item[1],
'timeleft': item[10],
})
return statuses
class uTorrentAPI(object):
@@ -94,9 +154,7 @@ class uTorrentAPI(object):
try:
open_request = self.opener.open(request)
response = open_request.read()
log.debug('response: %s', response)
if response:
log.debug('uTorrent action successfull')
return response
else:
log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response)
@@ -133,3 +191,7 @@ class uTorrentAPI(object):
def pause_torrent(self, hash):
action = "action=pause&hash=%s" % hash
return self._request(action)
def get_status(self):
action = "list=1"
return self._request(action)

View File

@@ -104,6 +104,8 @@ def fireEvent(name, *args, **kwargs):
# Merge
if options['merge'] and len(results) > 0:
results.reverse() # Priority 1 is higher then 100
# Dict
if isinstance(results[0], dict):
merged = {}

View File

@@ -168,4 +168,4 @@ def randomString(size = 8, chars = string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def splitString(str, split_on = ','):
return [x.strip() for x in str.split(split_on)]
return [x.strip() for x in str.split(split_on)] if str else []

View File

@@ -0,0 +1,25 @@
from migrate.changeset.schema import create_column
from sqlalchemy.schema import MetaData, Column, Table, Index
from sqlalchemy.types import Integer
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
# Change release, add last_edit and index
last_edit_column = Column('last_edit', Integer)
release = Table('release', meta, last_edit_column)
create_column(last_edit_column, release)
Index('ix_release_last_edit', release.c.last_edit).create()
# Change movie last_edit
last_edit_column = Column('last_edit', Integer)
movie = Table('movie', meta, last_edit_column)
Index('ix_movie_last_edit', movie.c.last_edit).create()
def downgrade(migrate_engine):
pass

View File

@@ -178,11 +178,14 @@ var NotificationBase = new Class({
},
addTestButton: function(fieldset, plugin_name){
var self = this;
var self = this,
button_name = self.testButtonName(fieldset);
if(button_name.contains('Notifications')) return;
new Element('.ctrlHolder.test_button').adopt(
new Element('a.button', {
'text': self.testButtonName(fieldset),
'text': button_name,
'events': {
'click': function(){
var button = fieldset.getElement('.test_button .button');
@@ -191,7 +194,7 @@ var NotificationBase = new Class({
Api.request('notify.'+plugin_name+'.test', {
'onComplete': function(json){
button.set('text', self.testButtonName(fieldset));
button.set('text', button_name);
if(json.success){
var message = new Element('span.success', {

View File

@@ -1,4 +1,5 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from email.mime.text import MIMEText
@@ -39,7 +40,7 @@ class Email(Notification):
# Send the e-mail
log.debug("Sending the email")
mailserver.sendmail(from_address, to_address, message.as_string())
mailserver.sendmail(from_address, splitString(to_address), message.as_string())
# Close the SMTP connection
mailserver.quit()

View File

@@ -0,0 +1,48 @@
from .main import Pushalot
def start():
return Pushalot()
config = [{
'name': 'pushalot',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'pushalot',
'description': 'for Windows Phone and Windows 8',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'auth_token',
'label': 'Auth Token',
},
{
'name': 'silent',
'label': 'Silent',
'default': 0,
'type': 'bool',
'description': 'Don\'t send Toast notifications. Only update Live Tile',
},
{
'name': 'important',
'label': 'High Priority',
'default': 0,
'type': 'bool',
'description': 'Send message with High priority.',
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]

View File

@@ -0,0 +1,37 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
import traceback
log = CPLog(__name__)
class Pushalot(Notification):
urls = {
'api': 'https://pushalot.com/api/sendmessage'
}
def notify(self, message = '', data = {}, listener = None):
if self.isDisabled(): return
data = {
'AuthorizationToken': self.conf('auth_token'),
'Title': self.default_title,
'Body': toUnicode(message),
'LinkTitle': toUnicode("CouchPotato"),
'link': toUnicode("https://couchpota.to/"),
'IsImportant': self.conf('important'),
'IsSilent': self.conf('silent'),
}
headers = {
'Content-type': 'application/x-www-form-urlencoded'
}
try:
self.urlopen(self.urls['api'], headers = headers, params = data, multipart = True, show_error = False)
return True
except:
log.error('PushAlot failed: %s', traceback.format_exc())
return False

View File

@@ -64,7 +64,7 @@ class Plugin(object):
for f in glob.glob(os.path.join(self.plugin_path, 'static', '*')):
ext = getExt(f)
if ext in ['js', 'css']:
fireEvent('register_%s' % ('script' if ext in 'js' else 'style'), path + os.path.basename(f))
fireEvent('register_%s' % ('script' if ext in 'js' else 'style'), path + os.path.basename(f), f)
def showStatic(self, filename):
d = os.path.join(self.plugin_path, 'static')
@@ -240,7 +240,6 @@ class Plugin(object):
del kwargs['cache_timeout']
data = self.urlopen(url, **kwargs)
if data:
self.setCache(cache_key, data, timeout = cache_timeout)
return data

View File

@@ -15,7 +15,7 @@ if os.name == 'nt':
raise ImportError("Missing the win32file module, which is a part of the prerequisite \
pywin32 package. You can get it from http://sourceforge.net/projects/pywin32/files/pywin32/");
else:
import win32file
import win32file #@UnresolvedImport
class FileBrowser(Plugin):
@@ -98,7 +98,7 @@ class FileBrowser(Plugin):
def has_hidden_attribute(self, filepath):
try:
attrs = ctypes.windll.kernel32.GetFileAttributesW(unicode(filepath))
attrs = ctypes.windll.kernel32.GetFileAttributesW(unicode(filepath)) #@UndefinedVariable
assert attrs != -1
result = bool(attrs & 2)
except (AttributeError, AssertionError):

View File

@@ -0,0 +1,6 @@
from .main import Dashboard
def start():
return Dashboard()
config = []

View File

@@ -0,0 +1,134 @@
from couchpotato import get_session
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.request import jsonified, getParams
from couchpotato.core.helpers.variable import splitString, tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Movie
from sqlalchemy.orm import joinedload_all
import random
import time
log = CPLog(__name__)
class Dashboard(Plugin):
def __init__(self):
addApiView('dashboard.suggestions', self.suggestView)
addApiView('dashboard.soon', self.getSoonView)
def newSuggestions(self):
movies = fireEvent('movie.list', status = ['active', 'done'], limit_offset = (20, 0), single = True)
movie_identifiers = [m['library']['identifier'] for m in movies[1]]
ignored_movies = fireEvent('movie.list', status = ['ignored', 'deleted'], limit_offset = (100, 0), single = True)
ignored_identifiers = [m['library']['identifier'] for m in ignored_movies[1]]
suggestions = fireEvent('movie.suggest', movies = movie_identifiers, ignore = ignored_identifiers, single = True)
suggest_status = fireEvent('status.get', 'suggest', single = True)
for suggestion in suggestions:
fireEvent('movie.add', params = {'identifier': suggestion}, force_readd = False, search_after = False, status_id = suggest_status.get('id'))
def suggestView(self):
db = get_session()
movies = db.query(Movie).limit(20).all()
identifiers = [m.library.identifier for m in movies]
suggestions = fireEvent('movie.suggest', movies = identifiers, single = True)
return jsonified({
'result': True,
'suggestions': suggestions
})
def getSoonView(self):
params = getParams()
db = get_session()
now = time.time()
# Get profiles first, determine pre or post theater
profiles = fireEvent('profile.all', single = True)
qualities = fireEvent('quality.all', single = True)
pre_releases = fireEvent('quality.pre_releases', single = True)
id_pre = {}
for quality in qualities:
id_pre[quality.get('id')] = quality.get('identifier') in pre_releases
# See what the profile contain and cache it
profile_pre = {}
for profile in profiles:
contains = {}
for profile_type in profile.get('types', []):
contains['theater' if id_pre.get(profile_type.get('quality_id')) else 'dvd'] = True
profile_pre[profile.get('id')] = contains
# Get all active movies
active_status = fireEvent('status.get', 'active', single = True)
subq = db.query(Movie).filter(Movie.status_id == active_status.get('id')).subquery()
q = db.query(Movie).join((subq, subq.c.id == Movie.id)) \
.options(joinedload_all('releases')) \
.options(joinedload_all('profile.types')) \
.options(joinedload_all('library.titles')) \
.options(joinedload_all('library.files')) \
.options(joinedload_all('status')) \
.options(joinedload_all('files'))
# Add limit
limit_offset = params.get('limit_offset')
limit = 12
if limit_offset:
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
limit = tryInt(splt[0])
all_movies = q.all()
if params.get('random', False):
random.shuffle(all_movies)
movies = []
for movie in all_movies:
pp = profile_pre.get(movie.profile.id)
eta = movie.library.info.get('release_date', {}) or {}
coming_soon = False
# Theater quality
if pp.get('theater') and fireEvent('searcher.could_be_released', True, eta, single = True):
coming_soon = True
if pp.get('dvd') and fireEvent('searcher.could_be_released', False, eta, single = True):
coming_soon = True
if coming_soon:
temp = movie.to_dict({
'profile': {'types': {}},
'releases': {'files':{}, 'info': {}},
'library': {'titles': {}, 'files':{}},
'files': {},
})
# Don't list older movies
if ((not params.get('late') and (not eta.get('dvd') or (eta.get('dvd') and eta.get('dvd') > (now - 2419200)))) or \
(params.get('late') and eta.get('dvd') and eta.get('dvd') < (now - 2419200))):
movies.append(temp)
if len(movies) >= limit:
break
return jsonified({
'success': True,
'empty': len(movies) == 0,
'movies': movies,
})
getLateView = getSoonView

View File

@@ -71,7 +71,7 @@ class FileManager(Plugin):
db = get_session()
for root, dirs, walk_files in os.walk(Env.get('cache_dir')):
for filename in walk_files:
if root == python_cache: continue
if root == python_cache or 'minified' in filename: continue
file_path = os.path.join(root, filename)
f = db.query(File).filter(File.path == toUnicode(file_path)).first()
if not f:

View File

@@ -4,6 +4,7 @@ var File = new Class({
var self = this;
if(!file){
self.empty = true;
self.el = new Element('div');
return
}

View File

@@ -38,7 +38,7 @@ class LibraryPlugin(Plugin):
title = LibraryTitle(
title = toUnicode(attrs.get('title')),
simple_title = self.simplifyTitle(attrs.get('title'))
simple_title = self.simplifyTitle(attrs.get('title')),
)
l.titles.append(title)
@@ -96,6 +96,7 @@ class LibraryPlugin(Plugin):
titles = info.get('titles', [])
log.debug('Adding titles: %s', titles)
counter = 0
for title in titles:
if not title:
continue
@@ -103,9 +104,10 @@ class LibraryPlugin(Plugin):
t = LibraryTitle(
title = title,
simple_title = self.simplifyTitle(title),
default = title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == u'' and toUnicode(titles[0]) == title)
default = (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == u'' and toUnicode(titles[0]) == title)
)
library.titles.append(t)
counter += 1
db.commit()

View File

@@ -2,11 +2,13 @@ from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent, fireEventAsync
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.request import jsonified, getParam
from couchpotato.core.helpers.variable import getTitle, splitString
from couchpotato.core.helpers.variable import splitString, getTitle
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
import ctypes
import os
import sys
import time
import traceback
@@ -22,6 +24,7 @@ class Manage(Plugin):
fireEvent('scheduler.interval', identifier = 'manage.update_library', handle = self.updateLibrary, hours = 2)
addEvent('manage.update', self.updateLibrary)
addEvent('manage.diskspace', self.getDiskSpace)
# Add files after renaming
def after_rename(message = None, group = {}):
@@ -192,13 +195,14 @@ class Manage(Plugin):
self.in_progress[folder]['to_go'] = self.in_progress[folder]['to_go'] - 1
total = self.in_progress[folder]['total']
movie_dict = fireEvent('movie.get', identifier, single = True)
fireEvent('notify.frontend', type = 'movie.added', data = movie_dict, message = None if total > 5 else 'Added "%s" to manage.' % getTitle(movie_dict['library']))
return afterUpdate
def directories(self):
try:
if self.conf('library', '').strip():
if self.conf('library', default = '').strip():
return splitString(self.conf('library', default = ''), '::')
except:
pass
@@ -214,3 +218,31 @@ class Manage(Plugin):
for group in groups.itervalues():
if group['library'] and group['library'].get('identifier'):
fireEvent('release.add', group = group)
def getDiskSpace(self):
free_space = {}
for folder in self.directories():
size = None
if os.path.isdir(folder):
if os.name == 'nt':
_, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
ctypes.c_ulonglong()
if sys.version_info >= (3,) or isinstance(folder, unicode):
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW #@UndefinedVariable
else:
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA #@UndefinedVariable
ret = fun(folder, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
if ret == 0:
raise ctypes.WinError()
used = total.value - free.value
return [total.value, used, free.value]
else:
s = os.statvfs(folder)
size = [s.f_blocks * s.f_frsize / (1024 * 1024), (s.f_bavail * s.f_frsize) / (1024 * 1024)]
free_space[folder] = size
return free_space

View File

@@ -6,11 +6,13 @@ from couchpotato.core.helpers.request import getParams, jsonified, getParam
from couchpotato.core.helpers.variable import getImdb, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Library, LibraryTitle, Movie
from couchpotato.core.settings.model import Library, LibraryTitle, Movie, \
Release
from couchpotato.environment import Env
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql.expression import or_, asc, not_
from sqlalchemy.sql.expression import or_, asc, not_, desc
from string import ascii_lowercase
import time
log = CPLog(__name__)
@@ -41,6 +43,7 @@ class MoviePlugin(Plugin):
'desc': 'List movies in wanted list',
'params': {
'status': {'type': 'array or csv', 'desc': 'Filter movie by status. Example:"active,done"'},
'release_status': {'type': 'array or csv', 'desc': 'Filter movie by status of its releases. Example:"snatched,available"'},
'limit_offset': {'desc': 'Limit and offset the movie list. Examples: "50" or "50,30"'},
'starts_with': {'desc': 'Starts with these characters. Example: "a" returns all movies starting with the letter "a"'},
'search': {'desc': 'Search movie title'},
@@ -94,6 +97,34 @@ class MoviePlugin(Plugin):
addEvent('movie.list', self.list)
addEvent('movie.restatus', self.restatus)
# Clean releases that didn't have activity in the last week
addEvent('app.load', self.cleanReleases)
fireEvent('schedule.interval', 'movie.clean_releases', self.cleanReleases, hours = 4)
def cleanReleases(self):
log.debug('Removing releases from dashboard')
now = time.time()
week = 262080
done_status = fireEvent('status.get', 'done', single = True)
available_status = fireEvent('status.get', 'available', single = True)
snatched_status = fireEvent('status.get', 'snatched', single = True)
db = get_session()
# get movies last_edit more than a week ago
movies = db.query(Movie) \
.filter(Movie.status_id == done_status.get('id'), Movie.last_edit < (now - week)) \
.all()
#
for movie in movies:
for rel in movie.releases:
if rel.status_id in [available_status.get('id'), snatched_status.get('id')]:
fireEvent('release.delete', id = rel.id, single = True)
def getView(self):
movie_id = getParam('id')
@@ -121,20 +152,29 @@ class MoviePlugin(Plugin):
return results
def list(self, status = ['active'], limit_offset = None, starts_with = None, search = None):
def list(self, status = None, release_status = None, limit_offset = None, starts_with = None, search = None, order = None):
db = get_session()
# Make a list from string
if not isinstance(status, (list, tuple)):
if status and not isinstance(status, (list, tuple)):
status = [status]
if release_status and not isinstance(release_status, (list, tuple)):
release_status = [release_status]
q = db.query(Movie) \
.join(Movie.library, Library.titles) \
.outerjoin(Movie.releases, Movie.library, Library.titles) \
.filter(LibraryTitle.default == True) \
.filter(or_(*[Movie.status.has(identifier = s) for s in status])) \
.group_by(Movie.id)
# Filter on movie status
if status and len(status) > 0:
q = q.filter(or_(*[Movie.status.has(identifier = s) for s in status]))
# Filter on release status
if release_status and len(release_status) > 0:
q = q.filter(or_(*[Release.status.has(identifier = s) for s in release_status]))
total_count = q.count()
filter_or = []
@@ -154,7 +194,10 @@ class MoviePlugin(Plugin):
if filter_or:
q = q.filter(or_(*filter_or))
q = q.order_by(asc(LibraryTitle.simple_title))
if order == 'release_order':
q = q.order_by(desc(Release.last_edit))
else:
q = q.order_by(asc(LibraryTitle.simple_title))
q = q.subquery()
q2 = db.query(Movie).join((q, q.c.id == Movie.id)) \
@@ -166,7 +209,7 @@ class MoviePlugin(Plugin):
.options(joinedload_all('files'))
if limit_offset:
splt = splitString(limit_offset)
splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
limit = splt[0]
offset = 0 if len(splt) is 1 else splt[1]
q2 = q2.limit(limit).offset(offset)
@@ -185,7 +228,7 @@ class MoviePlugin(Plugin):
#db.close()
return (total_count, movies)
def availableChars(self, status = ['active']):
def availableChars(self, status = None, release_status = None):
chars = ''
@@ -194,11 +237,20 @@ class MoviePlugin(Plugin):
# Make a list from string
if not isinstance(status, (list, tuple)):
status = [status]
if release_status and not isinstance(release_status, (list, tuple)):
release_status = [release_status]
q = db.query(Movie) \
.join(Movie.library, Library.titles, Movie.status) \
.options(joinedload_all('library.titles')) \
.filter(or_(*[Movie.status.has(identifier = s) for s in status]))
.outerjoin(Movie.releases, Movie.library, Library.titles, Movie.status) \
.options(joinedload_all('library.titles'))
# Filter on movie status
if status and len(status) > 0:
q = q.filter(or_(*[Movie.status.has(identifier = s) for s in status]))
# Filter on release status
if release_status and len(release_status) > 0:
q = q.filter(or_(*[Release.status.has(identifier = s) for s in release_status]))
results = q.all()
@@ -206,20 +258,29 @@ class MoviePlugin(Plugin):
char = movie.library.titles[0].simple_title[0]
char = char if char in ascii_lowercase else '#'
if char not in chars:
chars += char
chars += str(char)
#db.close()
return chars
return ''.join(sorted(chars, key = str.lower))
def listView(self):
params = getParams()
status = params.get('status', ['active'])
status = splitString(params.get('status', None))
release_status = splitString(params.get('release_status', None))
limit_offset = params.get('limit_offset', None)
starts_with = params.get('starts_with', None)
search = params.get('search', None)
order = params.get('order', None)
total_movies, movies = self.list(status = status, limit_offset = limit_offset, starts_with = starts_with, search = search)
total_movies, movies = self.list(
status = status,
release_status = release_status,
limit_offset = limit_offset,
starts_with = starts_with,
search = search,
order = order
)
return jsonified({
'success': True,
@@ -231,8 +292,9 @@ class MoviePlugin(Plugin):
def charView(self):
params = getParams()
status = params.get('status', ['active'])
chars = self.availableChars(status)
status = splitString(params.get('status', None))
release_status = splitString(params.get('release_status', None))
chars = self.availableChars(status, release_status)
return jsonified({
'success': True,
@@ -283,7 +345,7 @@ class MoviePlugin(Plugin):
'movies': movies,
})
def add(self, params = {}, force_readd = True, search_after = True, update_library = False):
def add(self, params = {}, force_readd = True, search_after = True, update_library = False, status_id = None):
if not params.get('identifier'):
msg = 'Can\'t add movie without imdb identifier.'
@@ -292,9 +354,8 @@ class MoviePlugin(Plugin):
return False
else:
try:
url = 'http://thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s' % params.get('identifier')
tvdb = self.getCache('thetvdb.%s' % params.get('identifier'), url = url, show_error = False)
if tvdb and 'series' in tvdb.lower():
is_movie = fireEvent('movie.is_movie', identifier = params.get('identifier'), single = True)
if not is_movie:
msg = 'Can\'t add movie, seems to be a TV show.'
log.error(msg)
fireEvent('notify.frontend', type = 'movie.is_tvshow', message = msg)
@@ -307,7 +368,9 @@ class MoviePlugin(Plugin):
# Status
status_active = fireEvent('status.add', 'active', single = True)
status_snatched = fireEvent('status.add', 'snatched', single = True)
snatched_status = fireEvent('status.add', 'snatched', single = True)
ignored_status = fireEvent('status.add', 'ignored', single = True)
downloaded_status = fireEvent('status.add', 'downloaded', single = True)
default_profile = fireEvent('profile.default', single = True)
@@ -319,7 +382,7 @@ class MoviePlugin(Plugin):
m = Movie(
library_id = library.get('id'),
profile_id = params.get('profile_id', default_profile.get('id')),
status_id = status_active.get('id'),
status_id = status_id if status_id else status_active.get('id'),
)
db.add(m)
db.commit()
@@ -331,10 +394,14 @@ class MoviePlugin(Plugin):
fireEventAsync('library.update', params.get('identifier'), default_title = params.get('title', ''), on_complete = onComplete)
search_after = False
elif force_readd:
# Clean snatched history
for release in m.releases:
if release.status_id == status_snatched.get('id'):
release.delete()
if release.status_id in [downloaded_status.get('id'), snatched_status.get('id')]:
if params.get('ignore_previous', False):
release.status_id = ignored_status.get('id')
else:
fireEvent('release.delete', release.id, single = True)
m.profile_id = params.get('profile_id', default_profile.get('id'))
else:
@@ -342,7 +409,8 @@ class MoviePlugin(Plugin):
added = False
if force_readd:
m.status_id = status_active.get('id')
m.status_id = status_id if status_id else status_active.get('id')
m.last_edit = int(time.time())
do_search = True
db.commit()
@@ -448,7 +516,7 @@ class MoviePlugin(Plugin):
total_deleted = 0
new_movie_status = None
for release in movie.releases:
if delete_from == 'wanted':
if delete_from in ['wanted', 'snatched']:
if release.status_id != done_status.get('id'):
db.delete(release)
total_deleted += 1

View File

@@ -5,6 +5,7 @@ var MovieList = new Class({
options: {
navigation: true,
limit: 50,
load_more: true,
menu: [],
add_new: false
},
@@ -12,25 +13,37 @@ var MovieList = new Class({
movies: [],
movies_added: {},
letters: {},
filter: {
'startswith': null,
'search': null
},
filter: null,
initialize: function(options){
var self = this;
self.setOptions(options);
self.offset = 0;
self.filter = self.options.filter || {
'startswith': null,
'search': null
}
self.el = new Element('div.movies').adopt(
self.title = self.options.title ? new Element('h2', {
'text': self.options.title,
'styles': {'display': 'none'}
}) : null,
self.description = self.options.description ? new Element('div.description', {
'html': self.options.description,
'styles': {'display': 'none'}
}) : null,
self.movie_list = new Element('div'),
self.load_more = new Element('a.load_more', {
self.load_more = self.options.load_more ? new Element('a.load_more', {
'events': {
'click': self.loadMore.bind(self)
}
})
}) : null
);
self.changeView(self.getSavedView() || self.options.view || 'details');
self.getMovies();
App.addEvent('movie.added', self.movieAdded.bind(self))
@@ -70,22 +83,14 @@ var MovieList = new Class({
if(self.options.navigation)
self.createNavigation();
self.movie_list.addEvents({
'mouseenter:relay(.movie)': function(e, el){
el.addClass('hover');
},
'mouseleave:relay(.movie)': function(e, el){
el.removeClass('hover');
}
});
self.scrollspy = new ScrollSpy({
min: function(){
var c = self.load_more.getCoordinates()
return c.top - window.document.getSize().y - 300
},
onEnter: self.loadMore.bind(self)
});
if(self.options.load_more)
self.scrollspy = new ScrollSpy({
min: function(){
var c = self.load_more.getCoordinates()
return c.top - window.document.getSize().y - 300
},
onEnter: self.loadMore.bind(self)
});
self.created = true;
},
@@ -96,7 +101,7 @@ var MovieList = new Class({
if(!self.created) self.create();
// do scrollspy
if(movies.length < self.options.limit){
if(movies.length < self.options.limit && self.scrollspy){
self.load_more.hide();
self.scrollspy.stop();
}
@@ -121,18 +126,14 @@ var MovieList = new Class({
createMovie: function(movie, inject_at){
var self = this;
// Attach proper actions
var a = self.options.actions,
status = Status.get(movie.status_id);
var actions = a[status.identifier.capitalize()] || a.Wanted || {};
var m = new Movie(self, {
'actions': actions,
'actions': self.options.actions,
'view': self.current_view,
'onSelect': self.calculateSelected.bind(self)
}, movie);
$(m).inject(self.movie_list, inject_at || 'bottom');
m.fireEvent('injected');
self.movies.include(m)
@@ -216,7 +217,7 @@ var MovieList = new Class({
});
// Actions
['mass_edit', 'thumbs', 'list'].each(function(view){
['mass_edit', 'details', 'list'].each(function(view){
self.navigation_actions.adopt(
new Element('li.'+view+(self.current_view == view ? '.active' : '')+'[data-view='+view+']', {
'events': {
@@ -398,11 +399,16 @@ var MovieList = new Class({
var self = this;
self.movies = []
self.calculateSelected()
self.navigation_alpha.getElements('.active').removeClass('active')
if(self.mass_edit_select)
self.calculateSelected()
if(self.navigation_alpha)
self.navigation_alpha.getElements('.active').removeClass('active')
self.offset = 0;
self.load_more.show();
self.scrollspy.start();
if(self.scrollspy){
self.load_more.show();
self.scrollspy.start();
}
},
activateLetter: function(letter){
@@ -418,10 +424,6 @@ var MovieList = new Class({
changeView: function(new_view){
var self = this;
self.movies.each(function(movie){
movie.changeView(new_view)
});
self.el
.removeClass(self.current_view+'_list')
.addClass(new_view+'_list')
@@ -432,7 +434,7 @@ var MovieList = new Class({
getSavedView: function(){
var self = this;
return Cookie.read(self.options.identifier+'_view') || 'thumbs';
return Cookie.read(self.options.identifier+'_view') || 'details';
},
search: function(){
@@ -468,9 +470,12 @@ var MovieList = new Class({
getMovies: function(){
var self = this;
if(self.scrollspy) self.scrollspy.stop();
self.load_more.set('text', 'loading...');
Api.request('movie.list', {
if(self.scrollspy){
self.scrollspy.stop();
self.load_more.set('text', 'loading...');
}
Api.request(self.options.api_call || 'movie.list', {
'data': Object.merge({
'status': self.options.status,
'limit_offset': self.options.limit + ',' + self.offset
@@ -478,8 +483,10 @@ var MovieList = new Class({
'onComplete': function(json){
self.store(json.movies);
self.addMovies(json.movies, json.total);
self.load_more.set('text', 'load more movies');
if(self.scrollspy) self.scrollspy.start();
if(self.scrollspy) {
self.load_more.set('text', 'load more movies');
self.scrollspy.start();
}
self.checkIfEmpty()
}
@@ -502,7 +509,13 @@ var MovieList = new Class({
checkIfEmpty: function(){
var self = this;
var is_empty = self.movies.length == 0 && self.total_movies == 0;
var is_empty = self.movies.length == 0 && (self.total_movies == 0 || self.total_movies === undefined);
if(self.title)
self.title[is_empty ? 'hide' : 'show']()
if(self.description)
self.description[is_empty ? 'hide' : 'show']()
if(is_empty && self.options.on_empty_element){
self.el.grab(self.options.on_empty_element);

View File

@@ -0,0 +1,699 @@
var MovieAction = new Class({
class_name: 'action icon',
initialize: function(movie){
var self = this;
self.movie = movie;
self.create();
if(self.el)
self.el.addClass(self.class_name)
},
create: function(){},
disable: function(){
this.el.addClass('disable')
},
enable: function(){
this.el.removeClass('disable')
},
createMask: function(){
var self = this;
self.mask = new Element('div.mask', {
'styles': {
'z-index': '1'
}
}).inject(self.movie, 'top').fade('hide');
//self.positionMask();
},
positionMask: function(){
var self = this,
movie = $(self.movie),
s = movie.getSize()
return;
return self.mask.setStyles({
'width': s.x,
'height': s.y
}).position({
'relativeTo': movie
})
},
toElement: function(){
return this.el || null
}
});
var MA = {};
MA.IMDB = new Class({
Extends: MovieAction,
id: null,
create: function(){
var self = this;
self.id = self.movie.get('identifier');
self.el = new Element('a.imdb', {
'title': 'Go to the IMDB page of ' + self.movie.getTitle(),
'href': 'http://www.imdb.com/title/'+self.id+'/',
'target': '_blank'
});
if(!self.id) self.disable();
}
});
MA.Release = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.releases.icon.download', {
'title': 'Show the releases that are available for ' + self.movie.getTitle(),
'events': {
'click': self.show.bind(self)
}
});
if(self.movie.data.releases.length == 0){
self.el.hide()
}
else {
var buttons_done = false;
self.movie.data.releases.sortBy('-info.score').each(function(release){
if(buttons_done) return;
var status = Status.get(release.status_id);
if((self.next_release && (status.identifier == 'ignored' || status.identifier == 'failed')) || (!self.next_release && status.identifier == 'available')){
self.hide_on_click = false;
self.show();
buttons_done = true;
}
});
}
},
show: function(e){
var self = this;
if(e)
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
self.release_container = new Element('div.releases.table').adopt(
self.trynext_container = new Element('div.buttons.try_container')
)
).inject(self.movie, 'top');
// Header
new Element('div.item.head').adopt(
new Element('span.name', {'text': 'Release name'}),
new Element('span.status', {'text': 'Status'}),
new Element('span.quality', {'text': 'Quality'}),
new Element('span.size', {'text': 'Size'}),
new Element('span.age', {'text': 'Age'}),
new Element('span.score', {'text': 'Score'}),
new Element('span.provider', {'text': 'Provider'})
).inject(self.release_container)
self.movie.data.releases.sortBy('-info.score').each(function(release){
var status = Status.get(release.status_id),
quality = Quality.getProfile(release.quality_id) || {},
info = release.info,
provider = self.get(release, 'provider') + (release.info['provider_extra'] ? self.get(release, 'provider_extra') : '');
release.status = status;
var release_name = self.get(release, 'name');
if(release.files && release.files.length > 0){
try {
var movie_file = release.files.filter(function(file){
var type = File.Type.get(file.type_id);
return type && type.identifier == 'movie'
}).pick();
release_name = movie_file.path.split(Api.getOption('path_sep')).getLast();
}
catch(e){}
}
// Create release
new Element('div', {
'class': 'item '+status.identifier,
'id': 'release_'+release.id
}).adopt(
new Element('span.name', {'text': release_name, 'title': release_name}),
new Element('span.status', {'text': status.identifier, 'class': 'release_status '+status.identifier}),
new Element('span.quality', {'text': quality.get('label') || 'n/a'}),
new Element('span.size', {'text': release.info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
new Element('span.age', {'text': self.get(release, 'age')}),
new Element('span.score', {'text': self.get(release, 'score')}),
new Element('span.provider', { 'text': provider, 'title': provider }),
release.info['detail_url'] ? new Element('a.info.icon', {
'href': release.info['detail_url'],
'target': '_blank'
}) : null,
new Element('a.download.icon', {
'events': {
'click': function(e){
(e).preventDefault();
if(!this.hasClass('completed'))
self.download(release);
}
}
}),
new Element('a.delete.icon', {
'events': {
'click': function(e){
(e).preventDefault();
self.ignore(release);
this.getParent('.item').toggleClass('ignored')
}
}
})
).inject(self.release_container)
if(status.identifier == 'ignored' || status.identifier == 'failed' || status.identifier == 'snatched'){
if(!self.last_release || (self.last_release && self.last_release.status.identifier != 'snatched' && status.identifier == 'snatched'))
self.last_release = release;
}
else if(!self.next_release && status.identifier == 'available'){
self.next_release = release;
}
});
if(self.last_release){
self.release_container.getElement('#release_'+self.last_release.id).addClass('last_release');
}
if(self.next_release){
self.release_container.getElement('#release_'+self.next_release.id).addClass('next_release');
}
if(self.next_release || self.last_release){
self.trynext_container.adopt(
new Element('span.or', {
'text': 'This movie is snatched, if anything went wrong, download'
}),
self.last_release ? new Element('a.button.orange', {
'text': 'the same release again',
'events': {
'click': self.trySameRelease.bind(self)
}
}) : null,
self.next_release && self.last_release ? new Element('span.or', {
'text': ','
}) : null,
self.next_release ? [new Element('a.button.green', {
'text': self.last_release ? 'another release' : 'the best release',
'events': {
'click': self.tryNextRelease.bind(self)
}
}),
new Element('span.or', {
'text': 'or pick one below'
})] : null
)
}
}
self.movie.slide('in', self.options_container);
},
get: function(release, type){
return release.info[type] || 'n/a'
},
download: function(release){
var self = this;
var release_el = self.release_container.getElement('#release_'+release.id),
icon = release_el.getElement('.download.icon');
icon.addClass('spinner');
Api.request('release.download', {
'data': {
'id': release.id
},
'onComplete': function(json){
icon.removeClass('spinner')
if(json.success)
icon.addClass('completed');
else
icon.addClass('attention').set('title', 'Something went wrong when downloading, please check logs.');
}
});
},
ignore: function(release){
var self = this;
Api.request('release.ignore', {
'data': {
'id': release.id
}
})
},
tryNextRelease: function(movie_id){
var self = this;
if(self.last_release)
self.ignore(self.last_release);
if(self.next_release)
self.download(self.next_release);
},
trySameRelease: function(movie_id){
var self = this;
if(self.last_release)
self.download(self.last_release);
}
});
MA.Trailer = new Class({
Extends: MovieAction,
id: null,
create: function(){
var self = this;
self.el = new Element('a.trailer', {
'title': 'Watch the trailer of ' + self.movie.getTitle(),
'events': {
'click': self.watch.bind(self)
}
});
},
watch: function(offset){
var self = this;
var data_url = 'http://gdata.youtube.com/feeds/videos?vq="{title}" {year} trailer&max-results=1&alt=json-in-script&orderby=relevance&sortorder=descending&format=5&fmt=18'
var url = data_url.substitute({
'title': encodeURI(self.movie.getTitle()),
'year': self.movie.get('year'),
'offset': offset || 1
}),
size = $(self.movie).getSize(),
height = (size.x/16)*9,
id = 'trailer-'+randomString();
self.player_container = new Element('div[id='+id+']');
self.container = new Element('div.hide.trailer_container')
.adopt(self.player_container)
.inject($(self.movie), 'top');
self.container.setStyle('height', 0);
self.container.removeClass('hide');
self.close_button = new Element('a.hide.hide_trailer', {
'text': 'Hide trailer',
'events': {
'click': self.stop.bind(self)
}
}).inject(self.movie);
self.container.setStyle('height', height);
$(self.movie).setStyle('height', height);
new Request.JSONP({
'url': url,
'onComplete': function(json){
var video_url = json.feed.entry[0].id.$t.split('/'),
video_id = video_url[video_url.length-1];
self.player = new YT.Player(id, {
'height': height,
'width': size.x,
'videoId': video_id,
'playerVars': {
'autoplay': 1,
'showsearch': 0,
'wmode': 'transparent',
'iv_load_policy': 3
}
});
self.close_button.removeClass('hide');
var quality_set = false;
var change_quality = function(state){
if(!quality_set && (state.data == 1 || state.data || 2)){
try {
self.player.setPlaybackQuality('hd720');
quality_set = true;
}
catch(e){
}
}
}
self.player.addEventListener('onStateChange', change_quality);
}
}).send()
},
stop: function(){
var self = this;
self.player.stopVideo();
self.container.addClass('hide');
self.close_button.addClass('hide');
$(self.movie).setStyle('height', null);
setTimeout(function(){
self.container.destroy()
self.close_button.destroy();
}, 1800)
}
});
MA.Edit = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.edit', {
'title': 'Change movie information, like title and quality.',
'events': {
'click': self.editMovie.bind(self)
}
});
},
editMovie: function(e){
var self = this;
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
new Element('div.form').adopt(
self.title_select = new Element('select', {
'name': 'title'
}),
self.profile_select = new Element('select', {
'name': 'profile'
}),
new Element('a.button.edit', {
'text': 'Save & Search',
'events': {
'click': self.save.bind(self)
}
})
)
).inject(self.movie, 'top');
Array.each(self.movie.data.library.titles, function(alt){
new Element('option', {
'text': alt.title
}).inject(self.title_select);
if(alt['default'])
self.title_select.set('value', alt.title);
});
Quality.getActiveProfiles().each(function(profile){
var profile_id = profile.id ? profile.id : profile.data.id;
new Element('option', {
'value': profile_id,
'text': profile.label ? profile.label : profile.data.label
}).inject(self.profile_select);
if(self.movie.profile && self.movie.profile.data && self.movie.profile.data.id == profile_id)
self.profile_select.set('value', profile_id);
});
}
self.movie.slide('in', self.options_container);
},
save: function(e){
(e).preventDefault();
var self = this;
Api.request('movie.edit', {
'data': {
'id': self.movie.get('id'),
'default_title': self.title_select.get('value'),
'profile_id': self.profile_select.get('value')
},
'useSpinner': true,
'spinnerTarget': $(self.movie),
'onComplete': function(){
self.movie.quality.set('text', self.profile_select.getSelected()[0].get('text'));
self.movie.title.set('text', self.title_select.getSelected()[0].get('text'));
}
});
self.movie.slide('out');
}
})
MA.Refresh = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.refresh', {
'title': 'Refresh the movie info and do a forced search',
'events': {
'click': self.doRefresh.bind(self)
}
});
},
doRefresh: function(e){
var self = this;
(e).preventDefault();
Api.request('movie.refresh', {
'data': {
'id': self.movie.get('id')
}
});
}
});
MA.Readd = new Class({
Extends: MovieAction,
create: function(){
var self = this;
var movie_done = Status.get(self.movie.data.status_id).identifier == 'done';
if(!movie_done)
var snatched = self.movie.data.releases.filter(function(release){
return release.status && (release.status.identifier == 'snatched' || release.status.identifier == 'downloaded' || release.status.identifier == 'done');
}).length;
if(movie_done || snatched && snatched > 0)
self.el = new Element('a.readd', {
'title': 'Readd the movie and mark all previous snatched/downloaded as ignored',
'events': {
'click': self.doReadd.bind(self)
}
});
},
doReadd: function(e){
var self = this;
(e).preventDefault();
Api.request('movie.add', {
'data': {
'identifier': self.movie.get('identifier'),
'ignore_previous': 1
}
});
}
});
MA.Delete = new Class({
Extends: MovieAction,
Implements: [Chain],
create: function(){
var self = this;
self.el = new Element('a.delete', {
'title': 'Remove the movie from this CP list',
'events': {
'click': self.showConfirm.bind(self)
}
});
},
showConfirm: function(e){
var self = this;
(e).preventDefault();
if(!self.delete_container){
self.delete_container = new Element('div.buttons.delete_container').adopt(
new Element('a.cancel', {
'text': 'Cancel',
'events': {
'click': self.hideConfirm.bind(self)
}
}),
new Element('span.or', {
'text': 'or'
}),
new Element('a.button.delete', {
'text': 'Delete ' + self.movie.title.get('text'),
'events': {
'click': self.del.bind(self)
}
})
).inject(self.movie, 'top');
}
self.movie.slide('in', self.delete_container);
},
hideConfirm: function(e){
var self = this;
(e).preventDefault();
self.movie.slide('out');
},
del: function(e){
(e).preventDefault();
var self = this;
var movie = $(self.movie);
self.chain(
function(){
self.callChain();
},
function(){
Api.request('movie.delete', {
'data': {
'id': self.movie.get('id'),
'delete_from': self.movie.list.options.identifier
},
'onComplete': function(){
movie.set('tween', {
'duration': 300,
'onComplete': function(){
self.movie.destroy()
}
});
movie.tween('height', 0);
}
});
}
);
self.callChain();
}
});
MA.Files = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.directory', {
'title': 'Available files',
'events': {
'click': self.showFiles.bind(self)
}
});
},
showFiles: function(e){
var self = this;
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
self.files_container = new Element('div.files.table')
).inject(self.movie, 'top');
// Header
new Element('div.item.head').adopt(
new Element('span.name', {'text': 'File'}),
new Element('span.type', {'text': 'Type'}),
new Element('span.is_available', {'text': 'Available'})
).inject(self.files_container)
Array.each(self.movie.data.releases, function(release){
var rel = new Element('div.release').inject(self.files_container);
Array.each(release.files, function(file){
new Element('div.file.item').adopt(
new Element('span.name', {'text': file.path}),
new Element('span.type', {'text': File.Type.get(file.type_id).name}),
new Element('span.available', {'text': file.available})
).inject(rel)
});
});
}
self.movie.slide('in', self.options_container);
},
});

View File

@@ -1,7 +1,33 @@
.movies {
padding: 60px 0 20px;
position: relative;
z-index: 3;
}
.movies h2 {
margin-bottom: 20px;
}
.movies > .description {
position: absolute;
top: 30px;
right: 0;
font-style: italic;
text-shadow: none;
opacity: 0.8;
}
.movies:hover > .description {
opacity: 1;
}
.movies.thumbs_list {
padding: 20px 0 20px;
}
.home .movies {
padding-top: 6px;
}
.movies.mass_edit_list {
padding-top: 90px;
}
@@ -12,33 +38,58 @@
margin: 10px 0;
overflow: hidden;
width: 100%;
height: 180px;
transition: all 0.2s linear;
}
.movies .movie.list_view, .movies .movie.mass_edit_view {
.movies.list_list .movie:not(.details_view),
.movies.mass_edit_list .movie {
height: 32px;
}
.movies.thumbs_list .movie {
width: 153px;
height: 230px;
display: inline-block;
margin: 0 8px 0 0;
}
.movies.thumbs_list .movie:nth-child(6n+6) {
margin: 0;
}
.movies .movie .mask {
position: absolute;
top: 0;
left: 0;
height: 100%;
width: 100%;
}
.movies.list_list .movie:not(.details_view),
.movies.mass_edit_list .movie {
margin: 1px 0;
border-radius: 0;
background: no-repeat;
box-shadow: none;
border-bottom: 1px solid rgba(255,255,255,0.05);
}
.movies .movie.list_view:hover, .movies .movie.mass_edit_view:hover {
background: rgba(255,255,255,0.03);
}
.movies .movie_container {
overflow: hidden;
.movies.list_list .movie:hover:not(.details_view),
.movies.mass_edit_list .movie {
background: rgba(255,255,255,0.03);
}
.movies .data {
padding: 20px;
height: 180px;
height: 100%;
width: 840px;
position: relative;
float: right;
position: absolute;
right: 0;
border-radius: 0;
transition: all 0.2s linear;
transition: all .6s cubic-bezier(0.9,0,0.1,1);
}
.movies .list_view .data, .movies .mass_edit_view .data {
.movies.list_list .movie:not(.details_view) .data,
.movies.mass_edit_list .movie .data {
height: 30px;
padding: 3px 0 3px 10px;
width: 938px;
@@ -46,79 +97,148 @@
border: 0;
background: none;
}
.movies.thumbs_list .data {
left: 0;
width: 100%;
padding: 10px;
height: 100%;
background: none;
transition: none;
}
.movies.thumbs_list .movie.no_thumbnail .data { background-image: linear-gradient(-30deg, rgba(255, 0, 85, .2) 0,rgba(125, 185, 235, .2) 100%);
}
.movies.thumbs_list .movie.no_thumbnail:nth-child(2n+6) .data { background-image: linear-gradient(-20deg, rgba(125, 0, 215, .2) 0, rgba(4, 55, 5, .7) 100%); }
.movies.thumbs_list .movie.no_thumbnail:nth-child(3n+6) .data { background-image: linear-gradient(-30deg, rgba(155, 0, 85, .2) 0,rgba(25, 185, 235, .7) 100%); }
.movies.thumbs_list .movie.no_thumbnail:nth-child(4n+6) .data { background-image: linear-gradient(-30deg, rgba(115, 5, 235, .2) 0, rgba(55, 180, 5, .7) 100%); }
.movies.thumbs_list .movie.no_thumbnail:nth-child(5n+6) .data { background-image: linear-gradient(-30deg, rgba(35, 15, 215, .2) 0, rgba(135, 215, 115, .7) 100%); }
.movies.thumbs_list .movie.no_thumbnail:nth-child(6n+6) .data { background-image: linear-gradient(-30deg, rgba(35, 15, 215, .2) 0, rgba(135, 15, 115, .7) 100%); }
.movies.thumbs_list .movie:hover .data {
background: rgba(0,0,0,0.9);
}
.movies .data.hide_right {
right: -100%;
}
.movies .movie .check {
display: none;
}
.movies.mass_edit_list .movie .check {
float: left;
position: absolute;
left: 0;
top: 0;
display: block;
margin: 7px 0 0 5px;
}
.movies .poster {
float: left;
position: absolute;
left: 0;
width: 120px;
line-height: 0;
overflow: hidden;
height: 180px;
height: 100%;
border-radius: 4px 0 0 4px;
transition: all 0.2s linear;
transition: all .6s cubic-bezier(0.9,0,0.1,1);
}
.movies .list_view .poster, .movies .mass_edit_view .poster {
.movies.list_list .movie:not(.details_view) .poster,
.movies.mass_edit_list .poster {
width: 20px;
height: 30px;
border-radius: 1px 0 0 1px;
}
.movies.mass_edit_list .poster {
display: none;
}
.movies.thumbs_list .poster {
width: 100%;
height: 100%;
}
.movies .poster img, .options .poster img {
.movies .poster img,
.options .poster img {
width: 101%;
height: 101%;
}
.movies .info {
position: relative;
height: 100%;
}
.movies .info .title {
font-size: 30px;
display: inline;
position: absolute;
font-size: 28px;
font-weight: bold;
margin-bottom: 10px;
float: left;
left: 0;
top: 0;
width: 90%;
transition: all 0.2s linear;
}
.movies .list_view .info .title, .movies .mass_edit_view .info .title {
.movies.list_list .movie:not(.details_view) .info .title,
.movies.mass_edit_list .info .title {
font-size: 16px;
font-weight: normal;
text-overflow: ellipsis;
width: auto;
overflow: hidden;
}
.movies.thumbs_list .movie:not(.no_thumbnail) .info {
display: none;
}
.movies.thumbs_list .movie:hover .info {
display: block;
}
.movies.thumbs_list .info .title {
font-size: 21px;
text-shadow: 0 0 10px #000;
word-wrap: break-word;
}
.movies .info .year {
position: absolute;
font-size: 30px;
margin-bottom: 10px;
float: right;
color: #bbb;
width: 10%;
right: 0;
top: 0;
text-align: right;
transition: all 0.2s linear;
}
.movies .list_view .info .year, .movies .mass_edit_view .info .year {
.movies.list_list .movie:not(.details_view) .info .year,
.movies.mass_edit_list .info .year {
font-size: 16px;
width: 6%;
right: 10px;
}
.movies.thumbs_list .info .year {
font-size: 23px;
margin: 0;
bottom: 0;
left: 0;
top: auto;
right: auto;
color: #FFF;
text-shadow: none;
text-shadow: 0 0 6px #000;
}
.movies .info .rating {
font-size: 30px;
margin-bottom: 10px;
color: #444;
float: left;
width: 5%;
padding: 0 0 0 3%;
}
.movies .info .description {
position: absolute;
top: 30px;
clear: both;
height: 80px;
overflow: hidden;
@@ -126,63 +246,82 @@
.movies .data:hover .description {
overflow: auto;
}
.movies .list_view .info .description, .movies .mass_edit_view .info .description {
.movies.list_list .movie:not(.details_view) .info .description,
.movies.mass_edit_list .info .description,
.movies.thumbs_list .info .description {
display: none;
}
.movies .data .quality {
position: absolute;
bottom: 0;
display: block;
min-height: 20px;
vertical-align: mid;
}
.movies .data .quality span {
padding: 2px 3px;
font-weight: bold;
opacity: 0.5;
font-size: 10px;
height: 16px;
line-height: 12px;
vertical-align: middle;
display: inline-block;
text-transform: uppercase;
text-shadow: none;
font-weight: normal;
margin: 0 2px;
border-radius: 2px;
background-color: rgba(255,255,255,0.1);
}
.movies .list_view .data .quality, .movies .mass_edit_view .data .quality {
text-align: right;
float: right;
.movies .status_suggest .data .quality,
.movies.thumbs_list .data .quality {
display: none;
}
.movies .data .quality .available, .movies .data .quality .snatched {
opacity: 1;
box-shadow: 1px 1px 0 rgba(0,0,0,0.2);
cursor: pointer;
}
.movies .data .quality .available { background-color: #578bc3; }
.movies .data .quality .snatched { background-color: #369545; }
.movies .data .quality .done {
background-color: #369545;
opacity: 1;
}
.movies .data .quality .finish {
background-image: url('../images/sprite.png');
background-repeat: no-repeat;
background-position: 0 2px;
padding-left: 14px;
background-size: 14px
}
.movies .data .quality span {
padding: 2px 3px;
font-weight: bold;
opacity: 0.5;
font-size: 10px;
height: 16px;
line-height: 12px;
vertical-align: middle;
display: inline-block;
text-transform: uppercase;
text-shadow: none;
font-weight: normal;
margin: 0 2px;
border-radius: 2px;
background-color: rgba(255,255,255,0.1);
}
.movies.list_list .data .quality,
.movies.mass_edit_list .data .quality {
text-align: right;
right: 0;
margin-right: 50px;
z-index: 1;
}
.movies .data .quality .available,
.movies .data .quality .snatched {
opacity: 1;
box-shadow: 1px 1px 0 rgba(0,0,0,0.2);
cursor: pointer;
}
.movies .data .quality .available { background-color: #578bc3; }
.movies .data .quality .snatched { background-color: #369545; }
.movies .data .quality .done {
background-color: #369545;
opacity: 1;
}
.movies .data .quality .finish {
background-image: url('../images/sprite.png');
background-repeat: no-repeat;
background-position: 0 2px;
padding-left: 14px;
background-size: 14px
}
.movies .data .actions {
position: absolute;
bottom: 20px;
right: 20px;
line-height: 0;
clear: both;
float: right;
margin-top: -25px;
}
.movies.thumbs_list .data .actions {
bottom: 8px;
right: 10px;
}
.movies .data:hover .action { opacity: 0.6; }
.movies .data:hover .action:hover { opacity: 1; }
.movies.mass_edit_list .data .actions {
@@ -199,10 +338,14 @@
opacity: 0;
}
.movies .list_view .data:hover .actions, .movies .mass_edit_view .data:hover .actions {
margin: -34px 2px 0 0;
.movies.list_list .movie:not(.details_view) .data:hover .actions,
.movies.mass_edit_list .data:hover .actions {
margin: 0;
background: #4e5969;
position: relative;
top: 2px;
bottom: 2px;
right: 5px;
z-index: 3;
}
.movies .delete_container {
@@ -284,6 +427,7 @@
.movies .options .table .provider {
width: 120px;
text-overflow: ellipsis;
overflow: hidden;
}
.movies .options .table .name {
width: 350px;
@@ -335,11 +479,11 @@
padding: 3px 10px;
background: #4e5969;
border-radius: 0 0 2px 2px;
transition: all .6s cubic-bezier(0.9,0,0.1,1) .2s;
}
.movies .movie .hide_trailer.hide {
top: -30px;
transition: all .2s cubic-bezier(0.9,0,0.1,1) .2s;
}
.movies .movie .hide_trailer.hide {
top: -30px;
}
.movies .movie .try_container {
padding: 5px 10px;
@@ -380,7 +524,7 @@
.movies .alph_nav {
transition: box-shadow .4s linear;
position: fixed;
z-index: 2;
z-index: 4;
top: 0;
padding: 100px 60px 7px;
width: 1080px;
@@ -409,7 +553,8 @@
text-align: center;
}
.movies .alph_nav .numbers li, .movies .alph_nav .actions li {
.movies .alph_nav .numbers li,
.movies .alph_nav .actions li {
display: inline-block;
vertical-align: top;
width: 20px;
@@ -472,7 +617,7 @@
background-position: 3px -95px;
}
.movies .alph_nav .actions li.thumbs span {
.movies .alph_nav .actions li.details span {
background-position: 3px -74px;
}

View File

@@ -8,7 +8,7 @@ var Movie = new Class({
var self = this;
self.data = data;
self.view = options.view || 'thumbs';
self.view = options.view || 'details';
self.list = list;
self.el = new Element('div.movie.inlay');
@@ -72,7 +72,6 @@ var Movie = new Class({
else if(!self.spinner) {
self.createMask();
self.spinner = createSpinner(self.mask);
self.positionMask();
self.mask.fade('in');
}
},
@@ -81,10 +80,9 @@ var Movie = new Class({
var self = this;
self.mask = new Element('div.mask', {
'styles': {
'z-index': '1'
'z-index': 4
}
}).inject(self.el, 'top').fade('hide');
self.positionMask();
},
positionMask: function(){
@@ -103,7 +101,7 @@ var Movie = new Class({
var self = this;
self.data = notification.data;
self.container.destroy();
self.el.empty();
self.profile = Quality.getProfile(self.data.profile_id) || {};
self.create();
@@ -114,52 +112,50 @@ var Movie = new Class({
create: function(){
var self = this;
var s = Status.get(self.get('status_id'));
self.el.addClass('status_'+s.identifier);
self.el.adopt(
self.container = new Element('div.movie_container').adopt(
self.select_checkbox = new Element('input[type=checkbox].inlay', {
'events': {
'change': function(){
self.fireEvent('select')
}
self.select_checkbox = new Element('input[type=checkbox].inlay', {
'events': {
'change': function(){
self.fireEvent('select')
}
}),
self.thumbnail = File.Select.single('poster', self.data.library.files),
self.data_container = new Element('div.data.inlay.light', {
'tween': {
duration: 400,
transition: 'quint:in:out',
onComplete: self.fireEvent.bind(self, 'slideEnd')
}
}).adopt(
self.info_container = new Element('div.info').adopt(
self.title = new Element('div.title', {
'text': self.getTitle() || 'n/a'
}),
self.year = new Element('div.year', {
'text': self.data.library.year || 'n/a'
}),
self.rating = new Element('div.rating.icon', {
'text': self.data.library.rating
}),
self.description = new Element('div.description', {
'text': self.data.library.plot
}),
self.quality = new Element('div.quality', {
'events': {
'click': function(e){
var releases = self.el.getElement('.actions .releases');
if(releases)
releases.fireEvent('click', [e])
}
}
}),
self.thumbnail = File.Select.single('poster', self.data.library.files),
self.data_container = new Element('div.data.inlay.light').adopt(
self.info_container = new Element('div.info').adopt(
self.title = new Element('div.title', {
'text': self.getTitle() || 'n/a'
}),
self.year = new Element('div.year', {
'text': self.data.library.year || 'n/a'
}),
self.rating = new Element('div.rating.icon', {
'text': self.data.library.rating
}),
self.description = new Element('div.description', {
'text': self.data.library.plot
}),
self.quality = new Element('div.quality', {
'events': {
'click': function(e){
var releases = self.el.getElement('.actions .releases');
if(releases)
releases.fireEvent('click', [e])
}
})
),
self.actions = new Element('div.actions')
)
}
})
),
self.actions = new Element('div.actions')
)
);
self.changeView(self.view);
if(self.thumbnail.empty)
self.el.addClass('no_thumbnail');
//self.changeView(self.view);
self.select_checkbox_class = new Form.Check(self.select_checkbox);
// Add profile
@@ -174,7 +170,7 @@ var Movie = new Class({
});
// Add done releases
// Add releases
self.data.releases.each(function(release){
var q = self.quality.getElement('.q_id'+ release.quality_id),
@@ -241,23 +237,23 @@ var Movie = new Class({
if(direction == 'in'){
self.temp_view = self.view;
self.changeView('thumbs')
self.changeView('details')
self.el.addEvent('outerClick', function(){
self.changeView(self.temp_view)
self.removeView()
self.slide('out')
})
el.show();
self.data_container.tween('right', 0, -840);
self.data_container.addClass('hide_right');
}
else {
self.el.removeEvents('outerClick')
self.addEvent('slideEnd:once', function(){
setTimeout(function(){
self.el.getElements('> :not(.data):not(.poster):not(.movie_container)').hide();
});
}, 600);
self.data_container.tween('right', -840, 0);
self.data_container.removeClass('hide_right');
}
},
@@ -271,6 +267,12 @@ var Movie = new Class({
self.view = new_view;
},
removeView: function(){
var self = this;
self.el.removeClass(self.view+'_view')
},
get: function(attr){
return this.data[attr] || this.data.library[attr]
},
@@ -288,388 +290,4 @@ var Movie = new Class({
return this.el;
}
});
var MovieAction = new Class({
class_name: 'action icon',
initialize: function(movie){
var self = this;
self.movie = movie;
self.create();
if(self.el)
self.el.addClass(self.class_name)
},
create: function(){},
disable: function(){
this.el.addClass('disable')
},
enable: function(){
this.el.removeClass('disable')
},
createMask: function(){
var self = this;
self.mask = new Element('div.mask', {
'styles': {
'z-index': '1'
}
}).inject(self.movie, 'top').fade('hide');
self.positionMask();
},
positionMask: function(){
var self = this,
movie = $(self.movie),
s = movie.getSize()
return;
return self.mask.setStyles({
'width': s.x,
'height': s.y
}).position({
'relativeTo': movie
})
},
toElement: function(){
return this.el || null
}
});
var IMDBAction = new Class({
Extends: MovieAction,
id: null,
create: function(){
var self = this;
self.id = self.movie.get('identifier');
self.el = new Element('a.imdb', {
'title': 'Go to the IMDB page of ' + self.movie.getTitle(),
'href': 'http://www.imdb.com/title/'+self.id+'/',
'target': '_blank'
});
if(!self.id) self.disable();
}
});
var ReleaseAction = new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.releases.icon.download', {
'title': 'Show the releases that are available for ' + self.movie.getTitle(),
'events': {
'click': self.show.bind(self)
}
});
var buttons_done = false;
self.movie.data.releases.sortBy('-info.score').each(function(release){
if(buttons_done) return;
var status = Status.get(release.status_id);
if((self.next_release && (status.identifier == 'ignored' || status.identifier == 'failed')) || (!self.next_release && status.identifier == 'available')){
self.hide_on_click = false;
self.show();
buttons_done = true;
}
});
},
show: function(e){
var self = this;
if(e)
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
self.release_container = new Element('div.releases.table').adopt(
self.trynext_container = new Element('div.buttons.try_container')
)
).inject(self.movie, 'top');
// Header
new Element('div.item.head').adopt(
new Element('span.name', {'text': 'Release name'}),
new Element('span.status', {'text': 'Status'}),
new Element('span.quality', {'text': 'Quality'}),
new Element('span.size', {'text': 'Size'}),
new Element('span.age', {'text': 'Age'}),
new Element('span.score', {'text': 'Score'}),
new Element('span.provider', {'text': 'Provider'})
).inject(self.release_container)
self.movie.data.releases.sortBy('-info.score').each(function(release){
var status = Status.get(release.status_id),
quality = Quality.getProfile(release.quality_id) || {},
info = release.info;
release.status = status;
// Create release
new Element('div', {
'class': 'item '+status.identifier,
'id': 'release_'+release.id
}).adopt(
new Element('span.name', {'text': self.get(release, 'name'), 'title': self.get(release, 'name')}),
new Element('span.status', {'text': status.identifier, 'class': 'release_status '+status.identifier}),
new Element('span.quality', {'text': quality.get('label') || 'n/a'}),
new Element('span.size', {'text': release.info['size'] ? Math.floor(self.get(release, 'size')) : 'n/a'}),
new Element('span.age', {'text': self.get(release, 'age')}),
new Element('span.score', {'text': self.get(release, 'score')}),
new Element('span.provider', {'text': self.get(release, 'provider')}),
release.info['detail_url'] ? new Element('a.info.icon', {
'href': release.info['detail_url'],
'target': '_blank'
}) : null,
new Element('a.download.icon', {
'events': {
'click': function(e){
(e).preventDefault();
if(!this.hasClass('completed'))
self.download(release);
}
}
}),
new Element('a.delete.icon', {
'events': {
'click': function(e){
(e).preventDefault();
self.ignore(release);
this.getParent('.item').toggleClass('ignored')
}
}
})
).inject(self.release_container)
if(status.identifier == 'ignored' || status.identifier == 'failed' || status.identifier == 'snatched'){
if(!self.last_release || (self.last_release && self.last_release.status.identifier != 'snatched' && status.identifier == 'snatched'))
self.last_release = release;
}
else if(!self.next_release && status.identifier == 'available'){
self.next_release = release;
}
});
if(self.last_release){
self.release_container.getElement('#release_'+self.last_release.id).addClass('last_release');
}
if(self.next_release){
self.release_container.getElement('#release_'+self.next_release.id).addClass('next_release');
}
if(self.next_release || self.last_release){
self.trynext_container.adopt(
new Element('span.or', {
'text': 'This movie is snatched, if anything went wrong, download'
}),
self.last_release ? new Element('a.button.orange', {
'text': 'the same release again',
'events': {
'click': self.trySameRelease.bind(self)
}
}) : null,
self.next_release && self.last_release ? new Element('span.or', {
'text': ','
}) : null,
self.next_release ? [new Element('a.button.green', {
'text': self.last_release ? 'another release' : 'the best release',
'events': {
'click': self.tryNextRelease.bind(self)
}
}),
new Element('span.or', {
'text': 'or pick one below'
})] : null
)
}
}
self.movie.slide('in', self.options_container);
},
get: function(release, type){
return release.info[type] || 'n/a'
},
download: function(release){
var self = this;
var release_el = self.release_container.getElement('#release_'+release.id),
icon = release_el.getElement('.download.icon');
icon.addClass('spinner');
Api.request('release.download', {
'data': {
'id': release.id
},
'onComplete': function(json){
icon.removeClass('spinner')
if(json.success)
icon.addClass('completed');
else
icon.addClass('attention').set('title', 'Something went wrong when downloading, please check logs.');
}
});
},
ignore: function(release){
var self = this;
Api.request('release.ignore', {
'data': {
'id': release.id
}
})
},
tryNextRelease: function(movie_id){
var self = this;
if(self.last_release)
self.ignore(self.last_release);
if(self.next_release)
self.download(self.next_release);
},
trySameRelease: function(movie_id){
var self = this;
if(self.last_release)
self.download(self.last_release);
}
});
var TrailerAction = new Class({
Extends: MovieAction,
id: null,
create: function(){
var self = this;
self.el = new Element('a.trailer', {
'title': 'Watch the trailer of ' + self.movie.getTitle(),
'events': {
'click': self.watch.bind(self)
}
});
},
watch: function(offset){
var self = this;
var data_url = 'http://gdata.youtube.com/feeds/videos?vq="{title}" {year} trailer&max-results=1&alt=json-in-script&orderby=relevance&sortorder=descending&format=5&fmt=18'
var url = data_url.substitute({
'title': encodeURI(self.movie.getTitle()),
'year': self.movie.get('year'),
'offset': offset || 1
}),
size = $(self.movie).getSize(),
height = (size.x/16)*9,
id = 'trailer-'+randomString();
self.player_container = new Element('div[id='+id+']');
self.container = new Element('div.hide.trailer_container')
.adopt(self.player_container)
.inject(self.movie.container, 'top');
self.container.setStyle('height', 0);
self.container.removeClass('hide');
self.close_button = new Element('a.hide.hide_trailer', {
'text': 'Hide trailer',
'events': {
'click': self.stop.bind(self)
}
}).inject(self.movie);
setTimeout(function(){
$(self.movie).setStyle('max-height', height);
self.container.setStyle('height', height);
}, 100)
new Request.JSONP({
'url': url,
'onComplete': function(json){
var video_url = json.feed.entry[0].id.$t.split('/'),
video_id = video_url[video_url.length-1];
self.player = new YT.Player(id, {
'height': height,
'width': size.x,
'videoId': video_id,
'playerVars': {
'autoplay': 1,
'showsearch': 0,
'wmode': 'transparent',
'iv_load_policy': 3
}
});
self.close_button.removeClass('hide');
var quality_set = false;
var change_quality = function(state){
if(!quality_set && (state.data == 1 || state.data || 2)){
try {
self.player.setPlaybackQuality('hd720');
quality_set = true;
}
catch(e){
}
}
}
self.player.addEventListener('onStateChange', change_quality);
}
}).send()
},
stop: function(){
var self = this;
self.player.stopVideo();
self.container.addClass('hide');
self.close_button.addClass('hide');
setTimeout(function(){
self.container.destroy()
self.close_button.destroy();
}, 1800)
}
});

View File

@@ -191,6 +191,8 @@
.movie_result .info h2 {
margin: 0;
font-size: 17px;
line-height: 20px;
}
.movie_result .info h2 span {
@@ -200,7 +202,8 @@
.movie_result .info h2 span:before { content: "("; }
.movie_result .info h2 span:after { content: ")"; }
.search_form .mask {
.search_form .mask,
.movie_result .mask {
border-radius: 3px;
position: absolute;
height: 100%;

View File

@@ -366,7 +366,7 @@ Block.Search.Item = new Class({
loadingMask: function(){
var self = this;
self.mask = new Element('span.mask').inject(self.el).fade('hide')
self.mask = new Element('div.mask').inject(self.el).fade('hide')
createSpinner(self.mask)
self.mask.fade('in')

View File

@@ -21,7 +21,7 @@ class QualityPlugin(Plugin):
{'identifier': 'bd50', 'hd': True, 'size': (15000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25'], 'allow': ['1080p'], 'ext':[], 'tags': ['bdmv', 'certificate', ('complete', 'bluray')]},
{'identifier': '1080p', 'hd': True, 'size': (5000, 20000), 'label': '1080P', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts'], 'tags': ['m2ts']},
{'identifier': '720p', 'hd': True, 'size': (3500, 10000), 'label': '720P', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts']},
{'identifier': 'brrip', 'hd': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip'], 'allow': ['720p'], 'ext':['avi']},
{'identifier': 'brrip', 'hd': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip'], 'allow': ['720p', '1080p'], 'ext':['avi']},
{'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': [], 'allow': [], 'ext':['iso', 'img'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts']},
{'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': ['dvdrip'], 'allow': [], 'ext':['avi', 'mpg', 'mpeg'], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
{'identifier': 'scr', 'size': (600, 1600), 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip'], 'allow': ['dvdr', 'dvd'], 'ext':['avi', 'mpg', 'mpeg']},

View File

@@ -1,4 +1,5 @@
from couchpotato.core.plugins.renamer.main import Renamer
import os
def start():
return Renamer()
@@ -111,6 +112,15 @@ config = [{
'label': 'Separator',
'description': 'Replace all the spaces with a character. Example: ".", "-" (without quotes). Leave empty to use spaces.',
},
{
'advanced': True,
'name': 'ntfs_permission',
'label': 'NTFS Permission',
'type': 'bool',
'hidden': os.name != 'nt',
'description': 'Set permission of moved files to that of destination folder (Windows NTFS only).',
'default': False,
},
],
}, {
'tab': 'renamer',

View File

@@ -13,6 +13,7 @@ import errno
import os
import re
import shutil
import time
import traceback
log = CPLog(__name__)
@@ -170,15 +171,15 @@ class Renamer(Plugin):
replacements['cd_nr'] = cd if multiple else ''
# Naming
final_folder_name = self.doReplace(folder_name, replacements)
final_file_name = self.doReplace(file_name, replacements)
final_folder_name = self.doReplace(folder_name, replacements).lstrip('. ')
final_file_name = self.doReplace(file_name, replacements).lstrip('. ')
replacements['filename'] = final_file_name[:-(len(getExt(final_file_name)) + 1)]
# Meta naming
if file_type is 'trailer':
final_file_name = self.doReplace(trailer_name, replacements, remove_multiple = True)
final_file_name = self.doReplace(trailer_name, replacements, remove_multiple = True).lstrip('. ')
elif file_type is 'nfo':
final_file_name = self.doReplace(nfo_name, replacements, remove_multiple = True)
final_file_name = self.doReplace(nfo_name, replacements, remove_multiple = True).lstrip('. ')
# Seperator replace
if separator:
@@ -275,6 +276,7 @@ class Renamer(Plugin):
for profile_type in movie.profile.types:
if profile_type.quality_id == group['meta_data']['quality']['id'] and profile_type.finish:
movie.status_id = done_status.get('id')
movie.last_edit = int(time.time())
db.commit()
except Exception, e:
log.error('Failed marking movie finished: %s %s', (e, traceback.format_exc()))
@@ -316,8 +318,10 @@ class Renamer(Plugin):
log.debug('Marking release as downloaded')
try:
release.status_id = downloaded_status.get('id')
release.last_edit = int(time.time())
except Exception, e:
log.error('Failed marking release as finished: %s %s', (e, traceback.format_exc()))
db.commit()
# Remove leftover files
@@ -455,6 +459,8 @@ class Renamer(Plugin):
try:
os.chmod(dest, Env.getPermission('file'))
if os.name == 'nt' and self.conf('ntfs_permission'):
os.popen('icacls "' + dest + '"* /reset /T')
except:
log.error('Failed setting permissions for file: %s, %s', (dest, traceback.format_exc(1)))
@@ -468,7 +474,7 @@ class Renamer(Plugin):
except:
log.error('Couldn\'t move file "%s" to "%s": %s', (old, dest, traceback.format_exc()))
raise Exception
raise
return True
@@ -554,6 +560,7 @@ class Renamer(Plugin):
if rel.movie.status_id == done_status.get('id'):
log.debug('Found a completed movie with a snatched release : %s. Setting release status to ignored...' , default_title)
rel.status_id = ignored_status.get('id')
rel.last_edit = int(time.time())
db.commit()
continue
@@ -564,7 +571,7 @@ class Renamer(Plugin):
found = False
for item in statuses:
if item['name'] == nzbname or getImdb(item['name']) == movie_dict['library']['identifier']:
if item['name'] == nzbname or rel_dict['info']['name'] in item['name'] or getImdb(item['name']) == movie_dict['library']['identifier']:
timeleft = 'N/A' if item['timeleft'] == -1 else item['timeleft']
log.debug('Found %s: %s, time to go: %s', (item['name'], item['status'].upper(), timeleft))
@@ -578,6 +585,7 @@ class Renamer(Plugin):
fireEvent('searcher.try_next_release', movie_id = rel.movie_id)
else:
rel.status_id = failed_status.get('id')
rel.last_edit = int(time.time())
db.commit()
elif item['status'] == 'completed':
log.info('Download of %s completed!', item['name'])

View File

@@ -23,7 +23,7 @@ class Scanner(Plugin):
'media': 314572800, # 300MB
'trailer': 1048576, # 1MB
}
ignored_in_path = ['extracting', '_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_', '_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo'] #unpacking, smb-crap, hidden files
ignored_in_path = [os.path.sep + 'extracted' + os.path.sep, 'extracting', '_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_', '_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo'] #unpacking, smb-crap, hidden files
ignore_names = ['extract', 'extracting', 'extracted', 'movie', 'movies', 'film', 'films', 'download', 'downloads', 'video_ts', 'audio_ts', 'bdmv', 'certificate']
extensions = {
'movie': ['mkv', 'wmv', 'avi', 'mpg', 'mpeg', 'mp4', 'm2ts', 'iso', 'img', 'mdf', 'ts', 'm4v'],

View File

@@ -30,6 +30,7 @@ class Searcher(Plugin):
addEvent('searcher.correct_movie', self.correctMovie)
addEvent('searcher.download', self.download)
addEvent('searcher.try_next_release', self.tryNextRelease)
addEvent('searcher.could_be_released', self.couldBeReleased)
addApiView('searcher.try_next', self.tryNextReleaseView, docs = {
'desc': 'Marks the snatched results as ignored and try the next best release',
@@ -156,7 +157,7 @@ class Searcher(Plugin):
ret = False
for quality_type in movie['profile']['types']:
if not self.couldBeReleased(quality_type['quality']['identifier'], release_dates, pre_releases):
if not self.couldBeReleased(quality_type['quality']['identifier'] in pre_releases, release_dates):
log.info('Too early to search for %s, %s', (quality_type['quality']['identifier'], default_title))
continue
@@ -164,7 +165,7 @@ class Searcher(Plugin):
# See if better quality is available
for release in movie['releases']:
if release['quality']['order'] <= quality_type['quality']['order'] and release['status_id'] not in [available_status.get('id'), ignored_status.get('id')]:
if release['quality']['order'] < quality_type['quality']['order'] and release['status_id'] not in [available_status.get('id'), ignored_status.get('id')]:
has_better_quality += 1
# Don't search for quality lower then already available.
@@ -208,6 +209,7 @@ class Searcher(Plugin):
db.add(rls)
else:
[db.delete(old_info) for old_info in rls.info]
rls.last_edit = int(time.time())
db.commit()
@@ -292,7 +294,10 @@ class Searcher(Plugin):
db = get_session()
rls = db.query(Release).filter_by(identifier = md5(data['url'])).first()
if rls:
rls.status_id = snatched_status.get('id')
renamer_enabled = Env.setting('enabled', 'renamer')
done_status = fireEvent('status.get', 'done', single = True)
rls.status_id = done_status.get('id') if not renamer_enabled else snatched_status.get('id')
db.commit()
log_movie = '%s (%s) in %s' % (getTitle(movie['library']), movie['library']['year'], rls.quality.label)
@@ -300,26 +305,28 @@ class Searcher(Plugin):
log.info(snatch_message)
fireEvent('movie.snatched', message = snatch_message, data = rls.to_dict())
# If renamer isn't used, mark movie done
if not Env.setting('enabled', 'renamer'):
active_status = fireEvent('status.get', 'active', single = True)
done_status = fireEvent('status.get', 'done', single = True)
try:
if movie['status_id'] == active_status.get('id'):
for profile_type in movie['profile']['types']:
if rls and profile_type['quality_id'] == rls.quality.id and profile_type['finish']:
log.info('Renamer disabled, marking movie as finished: %s', log_movie)
# If renamer isn't used, mark movie done
if not renamer_enabled:
active_status = fireEvent('status.get', 'active', single = True)
done_status = fireEvent('status.get', 'done', single = True)
try:
if movie['status_id'] == active_status.get('id'):
for profile_type in movie['profile']['types']:
if profile_type['quality_id'] == rls.quality.id and profile_type['finish']:
log.info('Renamer disabled, marking movie as finished: %s', log_movie)
# Mark release done
rls.status_id = done_status.get('id')
db.commit()
# Mark release done
rls.status_id = done_status.get('id')
rls.last_edit = int(time.time())
db.commit()
# Mark movie done
mvie = db.query(Movie).filter_by(id = movie['id']).first()
mvie.status_id = done_status.get('id')
db.commit()
except:
log.error('Failed marking movie finished, renamer disabled: %s', traceback.format_exc())
# Mark movie done
mvie = db.query(Movie).filter_by(id = movie['id']).first()
mvie.status_id = done_status.get('id')
mvie.last_edit = int(time.time())
db.commit()
except:
log.error('Failed marking movie finished, renamer disabled: %s', traceback.format_exc())
except:
log.error('Failed marking movie finished: %s', traceback.format_exc())
@@ -526,7 +533,7 @@ class Searcher(Plugin):
return False
def couldBeReleased(self, wanted_quality, dates, pre_releases):
def couldBeReleased(self, is_pre_release, dates):
now = int(time.time())
@@ -538,7 +545,7 @@ class Searcher(Plugin):
if dates.get('theater', 0) < 0 or dates.get('dvd', 0) < 0:
return True
if wanted_quality in pre_releases:
if is_pre_release:
# Prerelease 1 week before theaters
if dates.get('theater') - 604800 < now:
return True

View File

@@ -23,6 +23,7 @@ class StatusPlugin(Plugin):
'deleted': 'Deleted',
'ignored': 'Ignored',
'available': 'Available',
'suggest': 'Suggest',
}
def __init__(self):

View File

@@ -1,6 +1,22 @@
from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.request import jsonified, getParam
from couchpotato.core.plugins.base import Plugin
class Suggestion(Plugin):
pass
def __init__(self):
addApiView('suggestion.view', self.getView)
def getView(self):
limit_offset = getParam('limit_offset', None)
total_movies, movies = fireEvent('movie.list', status = 'suggest', limit_offset = limit_offset, single = True)
return jsonified({
'success': True,
'empty': len(movies) == 0,
'total': total_movies,
'movies': movies,
})

View File

@@ -82,7 +82,7 @@ Page.Wizard = new Class({
'target': self.el
},
'onComplete': function(){
window.location = App.createUrl();
window.location = App.createUrl('wanted');
}
});
}

View File

@@ -0,0 +1,28 @@
from .main import Goodfilms
def start():
return Goodfilms()
config = [{
'name': 'goodfilms',
'groups': [
{
'tab': 'automation',
'list': 'watchlist_providers',
'name': 'goodfilms_automation',
'label': 'Goodfilms',
'description': 'import movies from your <a href="http://goodfil.ms">Goodfilms</a> queue',
'options': [
{
'name': 'automation_enabled',
'default': False,
'type': 'enabler',
},
{
'name': 'automation_username',
'label': 'Username',
},
],
},
],
}]

View File

@@ -0,0 +1,36 @@
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.automation.base import Automation
from bs4 import BeautifulSoup
log = CPLog(__name__)
class Goodfilms(Automation):
url = 'http://goodfil.ms/%s/queue'
def getIMDBids(self):
if not self.conf('automation_username'):
log.error('Please fill in your username')
return []
movies = []
for movie in self.getWatchlist():
imdb_id = self.search(movie.get('title'), movie.get('year'), imdb_only = True)
movies.append(imdb_id)
return movies
def getWatchlist(self):
url = self.url % self.conf('automation_username')
soup = BeautifulSoup(self.getHTMLData(url))
movies = []
for movie in soup.find_all('div', attrs = { 'class': 'movie', 'data-film-title': True }):
movies.append({ 'title': movie['data-film-title'], 'year': movie['data-film-year'] })
return movies

View File

@@ -46,7 +46,8 @@ class Provider(Plugin):
def getJsonData(self, url, **kwargs):
data = self.getCache(md5(url), url, **kwargs)
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('params', {})))
data = self.getCache(cache_key, url, **kwargs)
if data:
try:
@@ -58,7 +59,8 @@ class Provider(Plugin):
def getRSSData(self, url, item_path = 'channel/item', **kwargs):
data = self.getCache(md5(url), url, **kwargs)
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('params', {})))
data = self.getCache(cache_key, url, **kwargs)
if data:
try:
@@ -70,7 +72,9 @@ class Provider(Plugin):
return []
def getHTMLData(self, url, **kwargs):
return self.getCache(md5(url), url, **kwargs)
cache_key = '%s%s' % (md5(url), md5('%s' % kwargs.get('params', {})))
return self.getCache(cache_key, url, **kwargs)
class YarrProvider(Provider):

View File

@@ -5,9 +5,7 @@ from couchpotato.core.helpers.request import jsonified, getParams
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.movie.base import MovieProvider
from couchpotato.core.settings.model import Movie
from flask.helpers import json
import time
import traceback
log = CPLog(__name__)
@@ -17,8 +15,9 @@ class CouchPotatoApi(MovieProvider):
urls = {
'search': 'https://couchpota.to/api/search/%s/',
'info': 'https://couchpota.to/api/info/%s/',
'is_movie': 'https://couchpota.to/api/ismovie/%s/',
'eta': 'https://couchpota.to/api/eta/%s/',
'suggest': 'https://couchpota.to/api/suggest/%s/%s/',
'suggest': 'https://couchpota.to/api/suggest/',
}
http_time_between_calls = 0
api_version = 1
@@ -29,58 +28,47 @@ class CouchPotatoApi(MovieProvider):
addEvent('movie.info', self.getInfo, priority = 1)
addEvent('movie.search', self.search, priority = 1)
addEvent('movie.release_date', self.getReleaseDate)
addEvent('movie.suggest', self.suggest)
addEvent('movie.is_movie', self.isMovie)
def search(self, q, limit = 12):
return self.getJsonData(self.urls['search'] % tryUrlencode(q), headers = self.getRequestHeaders())
cache_key = 'cpapi.cache.%s' % q
cached = self.getCache(cache_key, self.urls['search'] % tryUrlencode(q), headers = self.getRequestHeaders())
def isMovie(self, identifier = None):
if cached:
try:
movies = json.loads(cached)
return movies
except:
log.error('Failed parsing search results: %s', traceback.format_exc())
if not identifier:
return
return []
data = self.getJsonData(self.urls['is_movie'] % identifier, headers = self.getRequestHeaders())
if data:
return data.get('is_movie', True)
return True
def getInfo(self, identifier = None):
if not identifier:
return
cache_key = 'cpapi.cache.info.%s' % identifier
cached = self.getCache(cache_key, self.urls['info'] % identifier, headers = self.getRequestHeaders())
if cached:
try:
movie = json.loads(cached)
return movie
except:
log.error('Failed parsing info results: %s', traceback.format_exc())
result = self.getJsonData(self.urls['info'] % identifier, headers = self.getRequestHeaders())
if result: return result
return {}
def getReleaseDate(self, identifier = None):
if identifier is None: return {}
try:
data = self.urlopen(self.urls['eta'] % identifier, headers = self.getRequestHeaders())
dates = json.loads(data)
log.debug('Found ETA for %s: %s', (identifier, dates))
return dates
except Exception, e:
log.error('Error getting ETA for %s: %s', (identifier, e))
return {}
dates = self.getJsonData(self.urls['eta'] % identifier, headers = self.getRequestHeaders())
log.debug('Found ETA for %s: %s', (identifier, dates))
return dates
def suggest(self, movies = [], ignore = []):
try:
data = self.urlopen(self.urls['suggest'] % (','.join(movies), ','.join(ignore)))
suggestions = json.loads(data)
log.info('Found Suggestions for %s', (suggestions))
except Exception, e:
log.error('Error getting suggestions for %s: %s', (movies, e))
suggestions = self.getJsonData(self.urls['suggest'], params = {
'movies': ','.join(movies),
#'ignore': ','.join(ignore),
})
log.info('Found Suggestions for %s', (suggestions))
return suggestions

View File

@@ -69,7 +69,7 @@ class Newznab(NZBProvider, RSS):
results.append({
'id': nzb_id,
'provider_extra': host['host'],
'provider_extra': urlparse(host['host']).hostname or host['host'],
'name': self.getTextElement(nzb, 'title'),
'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))),
'size': int(self.getElement(nzb, 'enclosure').attrib['length']) / 1024 / 1024,

View File

@@ -0,0 +1,40 @@
from .main import IPTorrents
def start():
return IPTorrents()
config = [{
'name': 'iptorrents',
'groups': [
{
'tab': 'searcher',
'subtab': 'providers',
'list': 'torrent_providers',
'name': 'IPTorrents',
'description': 'See <a href="http://www.iptorrents.com">IPTorrents</a>',
'wizard': True,
'options': [
{
'name': 'enabled',
'type': 'enabler',
'default': False,
},
{
'name': 'username',
'default': '',
},
{
'name': 'password',
'default': '',
'type': 'password',
},
{
'name': 'freeleech',
'default': 0,
'type': 'bool',
'description': 'Only search for [FreeLeech] torrents.',
},
],
},
],
}]

View File

@@ -0,0 +1,83 @@
from bs4 import BeautifulSoup
from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.providers.torrent.base import TorrentProvider
import traceback
log = CPLog(__name__)
class IPTorrents(TorrentProvider):
urls = {
'test' : 'http://www.iptorrents.com/',
'base_url' : 'http://www.iptorrents.com',
'login' : 'http://www.iptorrents.com/torrents/',
'search' : 'http://www.iptorrents.com/torrents/?l%d=1%s&q=%s&qf=ti',
}
cat_ids = [
([48], ['720p', '1080p', 'bd50']),
([72], ['cam', 'ts', 'tc', 'r5', 'scr']),
([7], ['dvdrip', 'brrip']),
([6], ['dvdr']),
]
http_time_between_calls = 1 #seconds
cat_backup_id = None
def _searchOnTitle(self, title, movie, quality, results):
freeleech = '' if not self.conf('freeleech') else '&free=on'
url = self.urls['search'] % (self.getCatId(quality['identifier'])[0], freeleech, tryUrlencode('%s %s' % (title.replace(':', ''), movie['library']['year'])))
data = self.getHTMLData(url, opener = self.login_opener)
if data:
html = BeautifulSoup(data)
try:
result_table = html.find('table', attrs = {'class' : 'torrents'})
if not result_table or 'nothing found!' in data.lower():
return
entries = result_table.find_all('tr')
for result in entries[1:]:
torrent = result.find_all('td')[1].find('a')
torrent_id = torrent['href'].replace('/details.php?id=', '')
torrent_name = torrent.string
torrent_download_url = self.urls['base_url'] + (result.find_all('td')[3].find('a'))['href'].replace(' ', '.')
torrent_details_url = self.urls['base_url'] + torrent['href']
torrent_size = self.parseSize(result.find_all('td')[5].string)
torrent_seeders = tryInt(result.find('td', attrs = {'class' : 'ac t_seeders'}).string)
torrent_leechers = tryInt(result.find('td', attrs = {'class' : 'ac t_leechers'}).string)
results.append({
'id': torrent_id,
'name': torrent_name,
'url': torrent_download_url,
'detail_url': torrent_details_url,
'download': self.loginDownload,
'size': torrent_size,
'seeders': torrent_seeders,
'leechers': torrent_leechers,
})
except:
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
def loginSuccess(self, output):
return 'don\'t have an account' not in output.lower()
def getLoginParams(self):
return tryUrlencode({
'username': self.conf('username'),
'password': self.conf('password'),
'login': 'submit',
})

View File

@@ -68,8 +68,6 @@ class ThePirateBay(TorrentMagnetProvider):
except:
pass
print total_pages, page
entries = results_table.find_all('tr')
for result in entries[2:]:
link = result.find(href = re.compile('torrent\/\d+\/'))

View File

@@ -45,7 +45,7 @@ class Movie(Entity):
The files belonging to the movie object are global for the whole movie
such as trailers, nfo, thumbnails"""
last_edit = Field(Integer, default = lambda: int(time.time()))
last_edit = Field(Integer, default = lambda: int(time.time()), index = True)
library = ManyToOne('Library', cascade = 'delete, delete-orphan', single_parent = True)
status = ManyToOne('Status')
@@ -95,6 +95,7 @@ class Release(Entity):
"""Logically groups all files that belong to a certain release, such as
parts of a movie, subtitles."""
last_edit = Field(Integer, default = lambda: int(time.time()), index = True)
identifier = Field(String(100), index = True)
movie = ManyToOne('Movie')

View File

@@ -20,7 +20,7 @@ class Env(object):
_options = None
_args = None
_quiet = False
_deamonize = False
_daemonized = False
_desktop = None
_session = None

View File

@@ -118,6 +118,7 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
Env.set('console_log', options.console_log)
Env.set('quiet', options.quiet)
Env.set('desktop', desktop)
Env.set('daemonized', options.daemon)
Env.set('args', args)
Env.set('options', options)
@@ -208,11 +209,12 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
# Basic config
app.secret_key = api_key
host = Env.setting('host', default = '0.0.0.0')
# app.debug = development
config = {
'use_reloader': reloader,
'port': tryInt(Env.setting('port', default = 5000)),
'host': Env.setting('host', default = ''),
'host': host if host and len(host) > 0 else '0.0.0.0',
'ssl_cert': Env.setting('ssl_cert', default = None),
'ssl_key': Env.setting('ssl_key', default = None),
}
@@ -243,7 +245,8 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
(r'.*', FallbackHandler, dict(fallback = web_container)),
],
log_function = lambda x : None,
debug = config['use_reloader']
debug = config['use_reloader'],
gzip = True,
)
if config['ssl_cert'] and config['ssl_key']:
@@ -259,7 +262,7 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En
while try_restart:
try:
server.listen(config['port'])
server.listen(config['port'], config['host'])
loop.start()
except Exception, e:
try:

Binary file not shown.

After

Width:  |  Height:  |  Size: 778 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 8.2 KiB

View File

@@ -3,7 +3,7 @@ var CouchPotato = new Class({
Implements: [Events, Options],
defaults: {
page: 'wanted',
page: 'home',
action: 'index',
params: {}
},
@@ -24,8 +24,8 @@ var CouchPotato = new Class({
if(window.location.hash)
History.handleInitialState();
self.openPage(window.location.pathname);
else
self.openPage(window.location.pathname);
History.addEvent('change', self.openPage.bind(self));
self.c.addEvent('click:relay(a[href^=/]:not([target]))', self.pushState.bind(self));
@@ -135,7 +135,7 @@ var CouchPotato = new Class({
self.current_page.hide()
try {
var page = self.pages[page_name] || self.pages.Wanted;
var page = self.pages[page_name] || self.pages.Home;
page.open(action, params, current_url);
page.show();
}
@@ -342,7 +342,14 @@ var Route = new Class({
parse: function(){
var self = this;
var path = History.getPath().replace(Api.getOption('url'), '/').replace(App.getOption('base_url'), '/')
var rep = function(pa){
return pa.replace(Api.getOption('url'), '/').replace(App.getOption('base_url'), '/')
}
var path = rep(History.getPath())
if(path == '/' && location.hash){
path = rep(location.hash.replace('#', '/'))
}
self.current = path.replace(/^\/+|\/+$/g, '')
var url = self.current.split('/')

View File

@@ -24,6 +24,9 @@ var self = window.StyleFix = {
var url = link.href || link.getAttribute('data-href'),
base = url.replace(/[^\/]+$/, ''),
base_scheme = (/^[a-z]{3,10}:/.exec(base) || [''])[0],
base_domain = (/^[a-z]{3,10}:\/\/[^\/]+/.exec(base) || [''])[0],
base_query = /^([^?]*)\??/.exec(url)[1],
parent = link.parentNode,
xhr = new XMLHttpRequest(),
process;
@@ -43,12 +46,23 @@ var self = window.StyleFix = {
// Convert relative URLs to absolute, if needed
if(base) {
css = css.replace(/url\(\s*?((?:"|')?)(.+?)\1\s*?\)/gi, function($0, quote, url) {
if(!/^([a-z]{3,10}:|\/|#)/i.test(url)) { // If url not absolute & not a hash
if(/^([a-z]{3,10}:|#)/i.test(url)) { // Absolute & or hash-relative
return $0;
}
else if(/^\/\//.test(url)) { // Scheme-relative
// May contain sequences like /../ and /./ but those DO work
return 'url("' + base_scheme + url + '")';
}
else if(/^\//.test(url)) { // Domain-relative
return 'url("' + base_domain + url + '")';
}
else if(/^\?/.test(url)) { // Query-relative
return 'url("' + base_query + url + '")';
}
else {
// Path-relative
return 'url("' + base + url + '")';
}
return $0;
});
// behavior URLs shoudnt be converted (Issue #19)
@@ -470,4 +484,4 @@ root.className += ' ' + self.prefix;
StyleFix.register(self.prefixCSS);
})(document.documentElement);
})(document.documentElement);

View File

@@ -0,0 +1,104 @@
Page.Home = new Class({
Extends: PageBase,
name: 'home',
title: 'Manage new stuff for things and such',
indexAction: function(param){
var self = this;
if(self.soon_list){
// Reset lists
self.available_list.update();
self.late_list.update();
return
}
// Snatched
self.available_list = new MovieList({
'navigation': false,
'identifier': 'snatched',
'load_more': false,
'view': 'list',
'actions': [MA.IMDB, MA.Trailer, MA.Files, MA.Release, MA.Edit, MA.Readd, MA.Refresh, MA.Delete],
'title': 'Snatched & Available',
'on_empty_element': new Element('div'),
'filter': {
'release_status': 'snatched,available'
}
});
// Coming Soon
self.soon_list = new MovieList({
'navigation': false,
'identifier': 'soon',
'limit': 18,
'title': 'Available soon',
'description': 'These are being searches for and should be available soon as they will be released on DVD in the next few weeks.',
'on_empty_element': new Element('div').adopt(
new Element('h1', {'text': 'Available soon'}),
new Element('span', {'text': 'There are no movies available soon. Add some movies, so you have something to watch later.'})
),
'filter': {
'random': true
},
'actions': [MA.IMDB, MA.Refresh],
'load_more': false,
'view': 'thumbs',
'api_call': 'dashboard.soon'
});
// Still not available
self.late_list = new MovieList({
'navigation': false,
'identifier': 'late',
'limit': 50,
'title': 'Still not available',
'description': 'Try another quality profile or maybe add more providers in <a href="'+App.createUrl('settings/searcher/providers/')+'">Settings</a>.',
'on_empty_element': new Element('div'),
'filter': {
'late': true
},
'load_more': false,
'view': 'list',
'actions': [MA.IMDB, MA.Trailer, MA.Edit, MA.Refresh, MA.Delete],
'api_call': 'dashboard.soon'
});
self.el.adopt(
$(self.available_list),
$(self.soon_list),
$(self.late_list)
);
// Suggest
// self.suggestion_list = new MovieList({
// 'navigation': false,
// 'identifier': 'suggestions',
// 'limit': 6,
// 'load_more': false,
// 'view': 'thumbs',
// 'api_call': 'suggestion.suggest'
// });
// self.el.adopt(
// new Element('h2', {
// 'text': 'You might like'
// }),
// $(self.suggestion_list)
// );
// Recent
// Snatched
// Renamed
// Added
// Free space
// Shortcuts
}
})

View File

@@ -27,8 +27,10 @@ Page.Manage = new Class({
self.list = new MovieList({
'identifier': 'manage',
'status': 'done',
'actions': MovieActions,
'filter': {
'release_status': 'done'
},
'actions': [MA.IMDB, MA.Trailer, MA.Files, MA.Readd, MA.Edit, MA.Delete],
'menu': [self.refresh_button, self.refresh_quick],
'on_empty_element': new Element('div.empty_manage').adopt(
new Element('div', {
@@ -88,7 +90,7 @@ Page.Manage = new Class({
'onComplete': function(json){
self.update_in_progress = true;
if(!json.progress){
if(!json || !json.progress){
clearInterval(self.progress_interval);
self.update_in_progress = false;
if(self.progress_container){

View File

@@ -1,8 +0,0 @@
Page.Soon = new Class({
Extends: PageBase,
name: 'soon',
title: 'Which wanted movies are released soon?'
})

View File

@@ -22,7 +22,7 @@ Page.Wanted = new Class({
self.wanted = new MovieList({
'identifier': 'wanted',
'status': 'active',
'actions': MovieActions,
'actions': [MA.IMDB, MA.Trailer, MA.Release, MA.Edit, MA.Refresh, MA.Readd, MA.Delete],
'add_new': true,
'menu': [self.manual_search],
'on_empty_element': App.createUserscriptButtons().addClass('empty_wanted')
@@ -52,7 +52,8 @@ Page.Wanted = new Class({
var start_text = self.manual_search.get('text');
self.progress_interval = setInterval(function(){
Api.request('searcher.progress', {
if(self.search_progress && self.search_progress.running) return;
self.search_progress = Api.request('searcher.progress', {
'onComplete': function(json){
self.search_in_progress = true;
if(!json.progress){
@@ -65,288 +66,9 @@ Page.Wanted = new Class({
self.manual_search.set('text', 'Searching.. (' + (((progress.total-progress.to_go)/progress.total)*100).round() + '%)');
}
}
})
});
}, 1000);
}
});
var MovieActions = {};
window.addEvent('domready', function(){
MovieActions.Wanted = {
'IMDB': IMDBAction
,'Trailer': TrailerAction
,'Releases': ReleaseAction
,'Edit': new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.edit', {
'title': 'Change movie information, like title and quality.',
'events': {
'click': self.editMovie.bind(self)
}
});
},
editMovie: function(e){
var self = this;
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
new Element('div.form').adopt(
self.title_select = new Element('select', {
'name': 'title'
}),
self.profile_select = new Element('select', {
'name': 'profile'
}),
new Element('a.button.edit', {
'text': 'Save & Search',
'events': {
'click': self.save.bind(self)
}
})
)
).inject(self.movie, 'top');
Array.each(self.movie.data.library.titles, function(alt){
new Element('option', {
'text': alt.title
}).inject(self.title_select);
if(alt['default'])
self.title_select.set('value', alt.title);
});
Quality.getActiveProfiles().each(function(profile){
var profile_id = profile.id ? profile.id : profile.data.id;
new Element('option', {
'value': profile_id,
'text': profile.label ? profile.label : profile.data.label
}).inject(self.profile_select);
if(self.movie.profile && self.movie.profile.data && self.movie.profile.data.id == profile_id)
self.profile_select.set('value', profile_id);
});
}
self.movie.slide('in', self.options_container);
},
save: function(e){
(e).preventDefault();
var self = this;
Api.request('movie.edit', {
'data': {
'id': self.movie.get('id'),
'default_title': self.title_select.get('value'),
'profile_id': self.profile_select.get('value')
},
'useSpinner': true,
'spinnerTarget': $(self.movie),
'onComplete': function(){
self.movie.quality.set('text', self.profile_select.getSelected()[0].get('text'));
self.movie.title.set('text', self.title_select.getSelected()[0].get('text'));
}
});
self.movie.slide('out');
}
})
,'Refresh': new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.refresh', {
'title': 'Refresh the movie info and do a forced search',
'events': {
'click': self.doRefresh.bind(self)
}
});
},
doRefresh: function(e){
var self = this;
(e).preventDefault();
Api.request('movie.refresh', {
'data': {
'id': self.movie.get('id')
}
});
}
})
,'Delete': new Class({
Extends: MovieAction,
Implements: [Chain],
create: function(){
var self = this;
self.el = new Element('a.delete', {
'title': 'Remove the movie from this CP list',
'events': {
'click': self.showConfirm.bind(self)
}
});
},
showConfirm: function(e){
var self = this;
(e).preventDefault();
if(!self.delete_container){
self.delete_container = new Element('div.buttons.delete_container').adopt(
new Element('a.cancel', {
'text': 'Cancel',
'events': {
'click': self.hideConfirm.bind(self)
}
}),
new Element('span.or', {
'text': 'or'
}),
new Element('a.button.delete', {
'text': 'Delete ' + self.movie.title.get('text'),
'events': {
'click': self.del.bind(self)
}
})
).inject(self.movie, 'top');
}
self.movie.slide('in', self.delete_container);
},
hideConfirm: function(e){
var self = this;
(e).preventDefault();
self.movie.slide('out');
},
del: function(e){
(e).preventDefault();
var self = this;
var movie = $(self.movie);
self.chain(
function(){
self.callChain();
},
function(){
Api.request('movie.delete', {
'data': {
'id': self.movie.get('id'),
'delete_from': self.movie.list.options.identifier
},
'onComplete': function(){
movie.set('tween', {
'duration': 300,
'onComplete': function(){
self.movie.destroy()
}
});
movie.tween('height', 0);
}
});
}
);
self.callChain();
}
})
};
MovieActions.Snatched = {
'IMDB': IMDBAction
,'Delete': MovieActions.Wanted.Delete
};
MovieActions.Done = {
'IMDB': IMDBAction
,'Edit': MovieActions.Wanted.Edit
,'Trailer': TrailerAction
,'Files': new Class({
Extends: MovieAction,
create: function(){
var self = this;
self.el = new Element('a.directory', {
'title': 'Available files',
'events': {
'click': self.showFiles.bind(self)
}
});
},
showFiles: function(e){
var self = this;
(e).preventDefault();
if(!self.options_container){
self.options_container = new Element('div.options').adopt(
self.files_container = new Element('div.files.table')
).inject(self.movie, 'top');
// Header
new Element('div.item.head').adopt(
new Element('span.name', {'text': 'File'}),
new Element('span.type', {'text': 'Type'}),
new Element('span.is_available', {'text': 'Available'})
).inject(self.files_container)
Array.each(self.movie.data.releases, function(release){
var rel = new Element('div.release').inject(self.files_container);
Array.each(release.files, function(file){
new Element('div.file.item').adopt(
new Element('span.name', {'text': file.path}),
new Element('span.type', {'text': File.Type.get(file.type_id).name}),
new Element('span.available', {'text': file.available})
).inject(rel)
});
});
}
self.movie.slide('in', self.options_container);
},
})
,'Delete': MovieActions.Wanted.Delete
};
})
});

View File

@@ -80,6 +80,12 @@ a:hover { color: #f3f3f3; }
padding: 80px 0 10px;
}
h2 {
font-size: 30px;
padding: 0;
margin: 20px 0 0 0;
}
.footer {
text-align:center;
padding: 50px 0 0 0;
@@ -151,6 +157,7 @@ body > .spinner, .mask{
.icon.folder { background-image: url('../images/icon.folder.png'); }
.icon.imdb { background-image: url('../images/icon.imdb.png'); }
.icon.refresh { background-image: url('../images/icon.refresh.png'); }
.icon.readd { background-image: url('../images/icon.readd.png'); }
.icon.rating { background-image: url('../images/icon.rating.png'); }
.icon.files { background-image: url('../images/icon.files.png'); }
.icon.info { background-image: url('../images/icon.info.png'); }
@@ -578,7 +585,7 @@ body > .spinner, .mask{
bottom: 0;
padding: 2px;
width: 240px;
z-index: 2;
z-index: 20;
overflow: hidden;
font-size: 14px;
font-weight: bold;

View File

@@ -118,7 +118,7 @@
border: 0;
}
.page .ctrlHolder.save_success:not(:first-child) {
background: url('../../images/icon.check.png') no-repeat 7px center;
background: url('../images/icon.check.png') no-repeat 7px center;
}
.page .ctrlHolder:last-child { border: none; }
.page .ctrlHolder:hover { background-color: rgba(255,255,255,0.05); }
@@ -250,7 +250,7 @@
padding: 0 4% 0 4px;
font-size: 13px;
width: 30%;
background-image: url('../../images/icon.folder.gif');
background-image: url('../images/icon.folder.gif');
background-repeat: no-repeat;
background-position: 97% center;
overflow: hidden;
@@ -298,7 +298,7 @@
cursor: pointer;
margin: 0 !important;
border-top: 1px solid rgba(255,255,255,0.1);
background: url('../../images/right.arrow.png') no-repeat 98% center;
background: url('../images/right.arrow.png') no-repeat 98% center;
}
.page .directory_list li:last-child {
border-bottom: 1px solid rgba(255,255,255,0.1);
@@ -484,7 +484,7 @@
margin: -9px 0 0 -16px;
border-radius: 30px 30px 0 0;
cursor: pointer;
background: url('../../images/icon.delete.png') no-repeat center 2px, -*-linear-gradient(
background: url('../images/icon.delete.png') no-repeat center 2px, -*-linear-gradient(
270deg,
#5b9bd1 0%,
#5b9bd1 100%
@@ -558,7 +558,7 @@
}
.page .tab_about .usenet li {
background: url('../../images/icon.check.png') no-repeat left center;
background: url('../images/icon.check.png') no-repeat left center;
padding: 0 0 0 25px;
}
@@ -646,6 +646,6 @@
}
.active .group_imdb_automation:not(.disabled) {
background: url('../../images/imdb_watchlist.png') no-repeat right 50px;
background: url('../images/imdb_watchlist.png') no-repeat right 50px;
min-height: 210px;
}

View File

@@ -1,43 +1,14 @@
<!doctype html>
<html>
<head>
<link rel="stylesheet" href="{{ url_for('web.static', filename='style/main.css') }}" type="text/css">
<link rel="stylesheet" href="{{ url_for('web.static', filename='style/uniform.generic.css') }}" type="text/css">
<link rel="stylesheet" href="{{ url_for('web.static', filename='style/uniform.css') }}" type="text/css">
<link rel="stylesheet" href="{{ url_for('web.static', filename='style/page/settings.css') }}" type="text/css">
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/mootools.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/mootools_more.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/prefix_free.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/uniform.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/form_replacement/form_check.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/form_replacement/form_radio.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/form_replacement/form_dropdown.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/form_replacement/form_selectoption.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/question.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/scrollspy.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/spin.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/couchpotato.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/api.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/library/history.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/page.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/block.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/block/navigation.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/block/footer.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/block/menu.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/page/wanted.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/page/settings.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/page/about.js') }}"></script>
<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/page/manage.js') }}"></script>
<!--<script type="text/javascript" src="{{ url_for('web.static', filename='scripts/page/soon.js') }}"></script>-->
{% for url in fireEvent('clientscript.get_scripts', as_html = True, single = True) %}
{% for url in fireEvent('clientscript.get_styles', as_html = True, location = 'front', single = True) %}
<link rel="stylesheet" href="{{ url_for('web.index') }}{{ url }}" type="text/css">{% endfor %}
{% for url in fireEvent('clientscript.get_scripts', as_html = True, location = 'front', single = True) %}
<script type="text/javascript" src="{{ url_for('web.index') }}{{ url }}"></script>{% endfor %}
{% for url in fireEvent('clientscript.get_styles', as_html = True, single = True) %}
{% for url in fireEvent('clientscript.get_scripts', as_html = True, location = 'head', single = True) %}
<script type="text/javascript" src="{{ url_for('web.index') }}{{ url }}"></script>{% endfor %}
{% for url in fireEvent('clientscript.get_styles', as_html = True, location = 'head', single = True) %}
<link rel="stylesheet" href="{{ url_for('web.index') }}{{ url }}" type="text/css">{% endfor %}
<link href="{{ url_for('web.static', filename='images/favicon.ico') }}" rel="icon" type="image/x-icon" />

View File

@@ -14,6 +14,11 @@
prog=couchpotato
lockfile=/var/lock/subsys/$prog
# Source couchpotato configuration
if [ -f /etc/sysconfig/couchpotato ]; then
. /etc/sysconfig/couchpotato
fi
## Edit user configuation in /etc/sysconfig/couchpotato to change
## the defaults
username=${CP_USER-couchpotato}
@@ -22,11 +27,6 @@ datadir=${CP_DATA-~/.couchpotato}
pidfile=${CP_PIDFILE-/var/run/couchpotato/couchpotato.pid}
##
# Source couchpotato configuration
if [ -f /etc/sysconfig/couchpotato ]; then
. /etc/sysconfig/couchpotato
fi
pidpath=`dirname ${pidfile}`
options=" --daemon --pid_file=${pidfile} --data_dir=${datadir}"
@@ -87,4 +87,4 @@ case "$1" in
*)
echo $"Usage: $0 {start|stop|status|restart|try-restart|force-reload}"
exit 2
esac
esac

View File

@@ -12,51 +12,56 @@
# Description: starts instance of CouchPotato using start-stop-daemon
### END INIT INFO
############### EDIT ME ##################
# path to app
APP_PATH=/usr/local/sbin/CouchPotatoServer/
# Check for existance of defaults file
# and utilze if available
if [ -f /etc/default/couchpotato ]; then
. /etc/default/couchpotato
else
echo "/etc/default/couchpotato not found using default settings.";
fi
# user
RUN_AS=YOUR_USERNAME_HERE
# path to python bin
DAEMON=/usr/bin/python
# Path to store PID file
PID_FILE=/var/run/couchpotato/server.pid
PID_PATH=$(dirname $PID_FILE)
# script name
# Script name
NAME=couchpotato
# app name
# App name
DESC=CouchPotato
# startup args
DAEMON_OPTS=" CouchPotato.py --daemon --pid_file=${PID_FILE}"
# Path to app root
CP_APP_PATH=${APP_PATH-/usr/local/sbin/CouchPotatoServer/}
############### END EDIT ME ##################
# User to run CP as
CP_RUN_AS=${RUN_AS-root}
test -x $DAEMON || exit 0
# Path to python bin
CP_DAEMON=${DAEMON_PATH-/usr/bin/python}
# Path to store PID file
CP_PID_FILE=${PID_FILE-/var/run/couchpotato.pid}
# Other startup args
CP_DAEMON_OPTS=" CouchPotato.py --daemon --pid_file=${CP_PID_FILE}"
test -x $CP_DAEMON || exit 0
set -e
case "$1" in
start)
echo "Starting $DESC"
rm -rf $PID_PATH || return 1
install -d --mode=0755 -o $RUN_AS $PID_PATH || return 1
start-stop-daemon -d $APP_PATH -c $RUN_AS --start --background --pidfile $PID_FILE --exec $DAEMON -- $DAEMON_OPTS
rm -rf $CP_PID_FILE || return 1
touch $CP_PID_FILE
chown $CP_RUN_AS $CP_PID_FILE
start-stop-daemon -d $CP_APP_PATH -c $CP_RUN_AS --start --background --pidfile $CP_PID_FILE --exec $CP_DAEMON -- $CP_DAEMON_OPTS
;;
stop)
echo "Stopping $DESC"
start-stop-daemon --stop --pidfile $PID_FILE --retry 15
start-stop-daemon --stop --pidfile $CP_PID_FILE --retry 15
;;
restart|force-reload)
echo "Restarting $DESC"
start-stop-daemon --stop --pidfile $PID_FILE --retry 15
start-stop-daemon -d $APP_PATH -c $RUN_AS --start --background --pidfile $PID_FILE --exec $DAEMON -- $DAEMON_OPTS
start-stop-daemon --stop --pidfile $CP_PID_FILE --retry 15
start-stop-daemon -d $CP_APP_PATH -c $CP_RUN_AS --start --background --pidfile $CP_PID_FILE --exec $CP_DAEMON -- $CP_DAEMON_OPTS
;;
*)
N=/etc/init.d/$NAME

5
init/ubuntu.default Normal file
View File

@@ -0,0 +1,5 @@
# COPY THIS FILE TO /etc/default/couchpotato
# OPTIONS: APP_PATH, RUN_AS, DAEMON_PATH, CP_PID_FILE
APP_PATH=
RUN_AS=root

View File

@@ -92,6 +92,7 @@ class Daemon():
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self.pidfile, 'r')
@@ -115,7 +116,6 @@ class Daemon():
if err.find("No such process") > 0:
self.delpid()
else:
print str(err)
sys.exit(1)
def restart(self):

0
libs/minify/__init__.py Normal file
View File

223
libs/minify/cssmin.py Normal file
View File

@@ -0,0 +1,223 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# `cssmin.py` - A Python port of the YUI CSS compressor.
from StringIO import StringIO # The pure-Python StringIO supports unicode.
import re
__version__ = '0.1.1'
def remove_comments(css):
"""Remove all CSS comment blocks."""
iemac = False
preserve = False
comment_start = css.find("/*")
while comment_start >= 0:
# Preserve comments that look like `/*!...*/`.
# Slicing is used to make sure we don"t get an IndexError.
preserve = css[comment_start + 2:comment_start + 3] == "!"
comment_end = css.find("*/", comment_start + 2)
if comment_end < 0:
if not preserve:
css = css[:comment_start]
break
elif comment_end >= (comment_start + 2):
if css[comment_end - 1] == "\\":
# This is an IE Mac-specific comment; leave this one and the
# following one alone.
comment_start = comment_end + 2
iemac = True
elif iemac:
comment_start = comment_end + 2
iemac = False
elif not preserve:
css = css[:comment_start] + css[comment_end + 2:]
else:
comment_start = comment_end + 2
comment_start = css.find("/*", comment_start)
return css
def remove_unnecessary_whitespace(css):
"""Remove unnecessary whitespace characters."""
def pseudoclasscolon(css):
"""
Prevents 'p :link' from becoming 'p:link'.
Translates 'p :link' into 'p ___PSEUDOCLASSCOLON___link'; this is
translated back again later.
"""
regex = re.compile(r"(^|\})(([^\{\:])+\:)+([^\{]*\{)")
match = regex.search(css)
while match:
css = ''.join([
css[:match.start()],
match.group().replace(":", "___PSEUDOCLASSCOLON___"),
css[match.end():]])
match = regex.search(css)
return css
css = pseudoclasscolon(css)
# Remove spaces from before things.
css = re.sub(r"\s+([!{};:>+\(\)\],])", r"\1", css)
# If there is a `@charset`, then only allow one, and move to the beginning.
css = re.sub(r"^(.*)(@charset \"[^\"]*\";)", r"\2\1", css)
css = re.sub(r"^(\s*@charset [^;]+;\s*)+", r"\1", css)
# Put the space back in for a few cases, such as `@media screen` and
# `(-webkit-min-device-pixel-ratio:0)`.
css = re.sub(r"\band\(", "and (", css)
# Put the colons back.
css = css.replace('___PSEUDOCLASSCOLON___', ':')
# Remove spaces from after things.
css = re.sub(r"([!{}:;>+\(\[,])\s+", r"\1", css)
return css
def remove_unnecessary_semicolons(css):
"""Remove unnecessary semicolons."""
return re.sub(r";+\}", "}", css)
def remove_empty_rules(css):
"""Remove empty rules."""
return re.sub(r"[^\}\{]+\{\}", "", css)
def normalize_rgb_colors_to_hex(css):
"""Convert `rgb(51,102,153)` to `#336699`."""
regex = re.compile(r"rgb\s*\(\s*([0-9,\s]+)\s*\)")
match = regex.search(css)
while match:
colors = match.group(1).split(",")
hexcolor = '#%.2x%.2x%.2x' % tuple(map(int, colors))
css = css.replace(match.group(), hexcolor)
match = regex.search(css)
return css
def condense_zero_units(css):
"""Replace `0(px, em, %, etc)` with `0`."""
return re.sub(r"([\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)", r"\1\2", css)
def condense_multidimensional_zeros(css):
"""Replace `:0 0 0 0;`, `:0 0 0;` etc. with `:0;`."""
css = css.replace(":0 0 0 0;", ":0;")
css = css.replace(":0 0 0;", ":0;")
css = css.replace(":0 0;", ":0;")
# Revert `background-position:0;` to the valid `background-position:0 0;`.
css = css.replace("background-position:0;", "background-position:0 0;")
return css
def condense_floating_points(css):
"""Replace `0.6` with `.6` where possible."""
return re.sub(r"(:|\s)0+\.(\d+)", r"\1.\2", css)
def condense_hex_colors(css):
"""Shorten colors from #AABBCC to #ABC where possible."""
regex = re.compile(r"([^\"'=\s])(\s*)#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])")
match = regex.search(css)
while match:
first = match.group(3) + match.group(5) + match.group(7)
second = match.group(4) + match.group(6) + match.group(8)
if first.lower() == second.lower():
css = css.replace(match.group(), match.group(1) + match.group(2) + '#' + first)
match = regex.search(css, match.end() - 3)
else:
match = regex.search(css, match.end())
return css
def condense_whitespace(css):
"""Condense multiple adjacent whitespace characters into one."""
return re.sub(r"\s+", " ", css)
def condense_semicolons(css):
"""Condense multiple adjacent semicolon characters into one."""
return re.sub(r";;+", ";", css)
def wrap_css_lines(css, line_length):
"""Wrap the lines of the given CSS to an approximate length."""
lines = []
line_start = 0
for i, char in enumerate(css):
# It's safe to break after `}` characters.
if char == '}' and (i - line_start >= line_length):
lines.append(css[line_start:i + 1])
line_start = i + 1
if line_start < len(css):
lines.append(css[line_start:])
return '\n'.join(lines)
def cssmin(css, wrap = None):
css = remove_comments(css)
css = condense_whitespace(css)
# A pseudo class for the Box Model Hack
# (see http://tantek.com/CSS/Examples/boxmodelhack.html)
css = css.replace('"\\"}\\""', "___PSEUDOCLASSBMH___")
#css = remove_unnecessary_whitespace(css)
css = remove_unnecessary_semicolons(css)
css = condense_zero_units(css)
css = condense_multidimensional_zeros(css)
css = condense_floating_points(css)
css = normalize_rgb_colors_to_hex(css)
css = condense_hex_colors(css)
if wrap is not None:
css = wrap_css_lines(css, wrap)
css = css.replace("___PSEUDOCLASSBMH___", '"\\"}\\""')
css = condense_semicolons(css)
return css.strip()
def main():
import optparse
import sys
p = optparse.OptionParser(
prog = "cssmin", version = __version__,
usage = "%prog [--wrap N]",
description = """Reads raw CSS from stdin, and writes compressed CSS to stdout.""")
p.add_option(
'-w', '--wrap', type = 'int', default = None, metavar = 'N',
help = "Wrap output to approximately N chars per line.")
options, args = p.parse_args()
sys.stdout.write(cssmin(sys.stdin.read(), wrap = options.wrap))
if __name__ == '__main__':
main()

218
libs/minify/jsmin.py Normal file
View File

@@ -0,0 +1,218 @@
#!/usr/bin/python
# This code is original from jsmin by Douglas Crockford, it was translated to
# Python by Baruch Even. The original code had the following copyright and
# license.
#
# /* jsmin.c
# 2007-05-22
#
# Copyright (c) 2002 Douglas Crockford (www.crockford.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# The Software shall be used for Good, not Evil.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# */
from StringIO import StringIO
def jsmin(js):
ins = StringIO(js)
outs = StringIO()
JavascriptMinify().minify(ins, outs)
str = outs.getvalue()
if len(str) > 0 and str[0] == '\n':
str = str[1:]
return str
def isAlphanum(c):
"""return true if the character is a letter, digit, underscore,
dollar sign, or non-ASCII character.
"""
return ((c >= 'a' and c <= 'z') or (c >= '0' and c <= '9') or
(c >= 'A' and c <= 'Z') or c == '_' or c == '$' or c == '\\' or (c is not None and ord(c) > 126));
class UnterminatedComment(Exception):
pass
class UnterminatedStringLiteral(Exception):
pass
class UnterminatedRegularExpression(Exception):
pass
class JavascriptMinify(object):
def _outA(self):
self.outstream.write(self.theA)
def _outB(self):
self.outstream.write(self.theB)
def _get(self):
"""return the next character from stdin. Watch out for lookahead. If
the character is a control character, translate it to a space or
linefeed.
"""
c = self.theLookahead
self.theLookahead = None
if c == None:
c = self.instream.read(1)
if c >= ' ' or c == '\n':
return c
if c == '': # EOF
return '\000'
if c == '\r':
return '\n'
return ' '
def _peek(self):
self.theLookahead = self._get()
return self.theLookahead
def _next(self):
"""get the next character, excluding comments. peek() is used to see
if a '/' is followed by a '/' or '*'.
"""
c = self._get()
if c == '/':
p = self._peek()
if p == '/':
c = self._get()
while c > '\n':
c = self._get()
return c
if p == '*':
c = self._get()
while 1:
c = self._get()
if c == '*':
if self._peek() == '/':
self._get()
return ' '
if c == '\000':
raise UnterminatedComment()
return c
def _action(self, action):
"""do something! What you do is determined by the argument:
1 Output A. Copy B to A. Get the next B.
2 Copy B to A. Get the next B. (Delete A).
3 Get the next B. (Delete B).
action treats a string as a single character. Wow!
action recognizes a regular expression if it is preceded by ( or , or =.
"""
if action <= 1:
self._outA()
if action <= 2:
self.theA = self.theB
if self.theA == "'" or self.theA == '"':
while 1:
self._outA()
self.theA = self._get()
if self.theA == self.theB:
break
if self.theA <= '\n':
raise UnterminatedStringLiteral()
if self.theA == '\\':
self._outA()
self.theA = self._get()
if action <= 3:
self.theB = self._next()
if self.theB == '/' and (self.theA == '(' or self.theA == ',' or
self.theA == '=' or self.theA == ':' or
self.theA == '[' or self.theA == '?' or
self.theA == '!' or self.theA == '&' or
self.theA == '|' or self.theA == ';' or
self.theA == '{' or self.theA == '}' or
self.theA == '\n'):
self._outA()
self._outB()
while 1:
self.theA = self._get()
if self.theA == '/':
break
elif self.theA == '\\':
self._outA()
self.theA = self._get()
elif self.theA <= '\n':
raise UnterminatedRegularExpression()
self._outA()
self.theB = self._next()
def _jsmin(self):
"""Copy the input to the output, deleting the characters which are
insignificant to JavaScript. Comments will be removed. Tabs will be
replaced with spaces. Carriage returns will be replaced with linefeeds.
Most spaces and linefeeds will be removed.
"""
self.theA = '\n'
self._action(3)
while self.theA != '\000':
if self.theA == ' ':
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
elif self.theA == '\n':
if self.theB in ['{', '[', '(', '+', '-']:
self._action(1)
elif self.theB == ' ':
self._action(3)
else:
if isAlphanum(self.theB):
self._action(1)
else:
self._action(2)
else:
if self.theB == ' ':
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
elif self.theB == '\n':
if self.theA in ['}', ']', ')', '+', '-', '"', '\'']:
self._action(1)
else:
if isAlphanum(self.theA):
self._action(1)
else:
self._action(3)
else:
self._action(1)
def minify(self, instream, outstream):
self.instream = instream
self.outstream = outstream
self.theA = '\n'
self.theB = None
self.theLookahead = None
self._jsmin()
self.instream.close()
if __name__ == '__main__':
import sys
jsm = JavascriptMinify()
jsm.minify(sys.stdin, sys.stdout)

View File

@@ -16,7 +16,7 @@
"""The Tornado web server and tools."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
# version is a human-readable version number.
@@ -25,5 +25,5 @@ from __future__ import absolute_import, division, with_statement
# is zero for an official release, positive for a development branch,
# or negative for a release candidate (after the base version number
# has been incremented)
version = "2.4.post2"
version_info = (2, 4, 0, 2)
version = "2.4.post3"
version_info = (2, 4, 0, 3)

View File

@@ -44,23 +44,63 @@ Example usage for Google OpenID::
# Save the user with, e.g., set_secure_cookie()
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import functools
import hashlib
import hmac
import time
import urllib
import urlparse
import uuid
from tornado.concurrent import Future, chain_future, return_future
from tornado import gen
from tornado import httpclient
from tornado import escape
from tornado.httputil import url_concat
from tornado.log import gen_log
from tornado.util import bytes_type, b
from tornado.util import bytes_type, u, unicode_type, ArgReplacer
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse # py2
class AuthError(Exception):
pass
def _auth_future_to_callback(callback, future):
try:
result = future.result()
except AuthError as e:
gen_log.warning(str(e))
result = None
callback(result)
def _auth_return_future(f):
"""Similar to tornado.concurrent.return_future, but uses the auth
module's legacy callback interface.
Note that when using this decorator the ``callback`` parameter
inside the function will actually be a future.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = Future()
callback, args, kwargs = replacer.replace(future, args, kwargs)
if callback is not None:
future.add_done_callback(
functools.partial(_auth_future_to_callback, callback))
f(*args, **kwargs)
return future
return wrapper
class OpenIdMixin(object):
"""Abstract implementation of OpenID and Attribute Exchange.
@@ -81,8 +121,9 @@ class OpenIdMixin(object):
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
@_auth_return_future
def get_authenticated_user(self, callback, http_client=None):
"""Fetches the authenticated user data upon redirect.
@@ -91,23 +132,23 @@ class OpenIdMixin(object):
methods.
"""
# Verify the OpenID response via direct request to the OP
args = dict((k, v[-1]) for k, v in self.request.arguments.iteritems())
args["openid.mode"] = u"check_authentication"
args = dict((k, v[-1]) for k, v in self.request.arguments.items())
args["openid.mode"] = u("check_authentication")
url = self._OPENID_ENDPOINT
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(url, self.async_callback(
self._on_authentication_verified, callback),
method="POST", body=urllib.urlencode(args))
method="POST", body=urllib_parse.urlencode(args))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
args = {
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.claimed_id":
"http://specs.openid.net/auth/2.0/identifier_select",
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.identity":
"http://specs.openid.net/auth/2.0/identifier_select",
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.return_to": url,
"openid.realm": urlparse.urljoin(url, '/'),
"openid.mode": "checkid_setup",
@@ -124,11 +165,11 @@ class OpenIdMixin(object):
required += ["firstname", "fullname", "lastname"]
args.update({
"openid.ax.type.firstname":
"http://axschema.org/namePerson/first",
"http://axschema.org/namePerson/first",
"openid.ax.type.fullname":
"http://axschema.org/namePerson",
"http://axschema.org/namePerson",
"openid.ax.type.lastname":
"http://axschema.org/namePerson/last",
"http://axschema.org/namePerson/last",
})
known_attrs = {
"email": "http://axschema.org/contact/email",
@@ -142,40 +183,40 @@ class OpenIdMixin(object):
if oauth_scope:
args.update({
"openid.ns.oauth":
"http://specs.openid.net/extensions/oauth/1.0",
"http://specs.openid.net/extensions/oauth/1.0",
"openid.oauth.consumer": self.request.host.split(":")[0],
"openid.oauth.scope": oauth_scope,
})
return args
def _on_authentication_verified(self, callback, response):
if response.error or b("is_valid:true") not in response.body:
gen_log.warning("Invalid OpenID response: %s", response.error or
response.body)
callback(None)
def _on_authentication_verified(self, future, response):
if response.error or b"is_valid:true" not in response.body:
future.set_exception(AuthError(
"Invalid OpenID response: %s" % (response.error or
response.body)))
return
# Make sure we got back at least an email from attribute exchange
ax_ns = None
for name in self.request.arguments.iterkeys():
for name in self.request.arguments:
if name.startswith("openid.ns.") and \
self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
self.get_argument(name) == u("http://openid.net/srv/ax/1.0"):
ax_ns = name[10:]
break
def get_ax_arg(uri):
if not ax_ns:
return u""
return u("")
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.iterkeys():
for name in self.request.arguments.keys():
if self.get_argument(name) == uri and name.startswith(prefix):
part = name[len(prefix):]
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name:
return u""
return self.get_argument(ax_name, u"")
return u("")
return self.get_argument(ax_name, u(""))
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
@@ -194,7 +235,7 @@ class OpenIdMixin(object):
if name:
user["name"] = name
elif name_parts:
user["name"] = u" ".join(name_parts)
user["name"] = u(" ").join(name_parts)
elif email:
user["name"] = email.split("@")[0]
if email:
@@ -206,7 +247,7 @@ class OpenIdMixin(object):
claimed_id = self.get_argument("openid.claimed_id", None)
if claimed_id:
user["claimed_id"] = claimed_id
callback(user)
future.set_result(user)
def get_auth_http_client(self):
"""Returns the AsyncHTTPClient instance to be used for auth requests.
@@ -248,7 +289,7 @@ class OAuthMixin(object):
self.async_callback(
self._on_request_token,
self._OAUTH_AUTHORIZE_URL,
callback_uri))
callback_uri))
else:
http_client.fetch(
self._oauth_request_token_url(),
@@ -256,6 +297,7 @@ class OAuthMixin(object):
self._on_request_token, self._OAUTH_AUTHORIZE_URL,
callback_uri))
@_auth_return_future
def get_authenticated_user(self, callback, http_client=None):
"""Gets the OAuth authorized user and access token on callback.
@@ -267,19 +309,19 @@ class OAuthMixin(object):
to this service on behalf of the user.
"""
future = callback
request_key = escape.utf8(self.get_argument("oauth_token"))
oauth_verifier = self.get_argument("oauth_verifier", None)
request_cookie = self.get_cookie("_oauth_request_token")
if not request_cookie:
gen_log.warning("Missing OAuth request token cookie")
callback(None)
future.set_exception(AuthError(
"Missing OAuth request token cookie"))
return
self.clear_cookie("_oauth_request_token")
cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
if cookie_key != request_key:
gen_log.info((cookie_key, request_key, request_cookie))
gen_log.warning("Request token does not match cookie")
callback(None)
future.set_exception(AuthError(
"Request token does not match cookie"))
return
token = dict(key=cookie_key, secret=cookie_secret)
if oauth_verifier:
@@ -312,23 +354,23 @@ class OAuthMixin(object):
signature = _oauth_signature(consumer_token, "GET", url, args)
args["oauth_signature"] = signature
return url + "?" + urllib.urlencode(args)
return url + "?" + urllib_parse.urlencode(args)
def _on_request_token(self, authorize_url, callback_uri, response):
if response.error:
raise Exception("Could not get request token")
request_token = _oauth_parse_response(response.body)
data = (base64.b64encode(request_token["key"]) + b("|") +
data = (base64.b64encode(request_token["key"]) + b"|" +
base64.b64encode(request_token["secret"]))
self.set_cookie("_oauth_request_token", data)
args = dict(oauth_token=request_token["key"])
if callback_uri == "oob":
self.finish(authorize_url + "?" + urllib.urlencode(args))
self.finish(authorize_url + "?" + urllib_parse.urlencode(args))
return
elif callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
self.redirect(authorize_url + "?" + urllib.urlencode(args))
self.redirect(authorize_url + "?" + urllib_parse.urlencode(args))
def _oauth_access_token_url(self, request_token):
consumer_token = self._oauth_consumer_token()
@@ -352,27 +394,36 @@ class OAuthMixin(object):
request_token)
args["oauth_signature"] = signature
return url + "?" + urllib.urlencode(args)
return url + "?" + urllib_parse.urlencode(args)
def _on_access_token(self, callback, response):
def _on_access_token(self, future, response):
if response.error:
gen_log.warning("Could not fetch access token")
callback(None)
future.set_exception(AuthError("Could not fetch access token"))
return
access_token = _oauth_parse_response(response.body)
self._oauth_get_user(access_token, self.async_callback(
self._on_oauth_get_user, access_token, callback))
self._oauth_get_user_future(access_token).add_done_callback(
self.async_callback(self._on_oauth_get_user, access_token, future))
@return_future
def _oauth_get_user_future(self, access_token, callback):
# By default, call the old-style _oauth_get_user, but new code
# should override this method instead.
self._oauth_get_user(access_token, callback)
def _oauth_get_user(self, access_token, callback):
raise NotImplementedError()
def _on_oauth_get_user(self, access_token, callback, user):
def _on_oauth_get_user(self, access_token, future, user_future):
if user_future.exception() is not None:
future.set_exception(user_future.exception())
return
user = user_future.result()
if not user:
callback(None)
future.set_exception(AuthError("Error getting user"))
return
user["access_token"] = access_token
callback(user)
future.set_result(user)
def _oauth_request_parameters(self, url, access_token, parameters={},
method="GET"):
@@ -395,7 +446,7 @@ class OAuthMixin(object):
args.update(parameters)
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, method, url, args,
access_token)
access_token)
else:
signature = _oauth_signature(consumer_token, method, url, args,
access_token)
@@ -425,13 +476,13 @@ class OAuth2Mixin(object):
process.
"""
args = {
"redirect_uri": redirect_uri,
"client_id": client_id
"redirect_uri": redirect_uri,
"client_id": client_id
}
if extra_params:
args.update(extra_params)
self.redirect(
url_concat(self._OAUTH_AUTHORIZE_URL, args))
url_concat(self._OAUTH_AUTHORIZE_URL, args))
def _oauth_request_token_url(self, redirect_uri=None, client_id=None,
client_secret=None, code=None,
@@ -442,7 +493,7 @@ class OAuth2Mixin(object):
code=code,
client_id=client_id,
client_secret=client_secret,
)
)
if extra_params:
args.update(extra_params)
return url_concat(url, args)
@@ -499,8 +550,9 @@ class TwitterMixin(OAuthMixin):
http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), self.async_callback(
self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None))
def twitter_request(self, path, callback, access_token=None,
post_args=None, **args):
@_auth_return_future
def twitter_request(self, path, callback=None, access_token=None,
post_args=None, **args):
"""Fetches the given API path, e.g., "/statuses/user_timeline/btaylor"
The path should not include the format (we automatically append
@@ -553,22 +605,22 @@ class TwitterMixin(OAuthMixin):
url, access_token, all_args, method=method)
args.update(oauth)
if args:
url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_twitter_request, callback)
url += "?" + urllib_parse.urlencode(args)
http = self.get_auth_http_client()
http_callback = self.async_callback(self._on_twitter_request, callback)
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=http_callback)
else:
http.fetch(url, callback=callback)
http.fetch(url, callback=http_callback)
def _on_twitter_request(self, callback, response):
def _on_twitter_request(self, future, response):
if response.error:
gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
future.set_exception(AuthError(
"Error response %s fetching %s" % (response.error,
response.request.url)))
return
callback(escape.json_decode(response.body))
future.set_result(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("twitter_consumer_key", "Twitter OAuth")
@@ -577,13 +629,12 @@ class TwitterMixin(OAuthMixin):
key=self.settings["twitter_consumer_key"],
secret=self.settings["twitter_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
callback = self.async_callback(self._parse_user_response, callback)
self.twitter_request(
"/users/show/" + escape.native_str(access_token[b("screen_name")]),
access_token=access_token, callback=callback)
def _parse_user_response(self, callback, user):
@return_future
@gen.engine
def _oauth_get_user_future(self, access_token, callback):
user = yield self.twitter_request(
"/users/show/" + escape.native_str(access_token[b"screen_name"]),
access_token=access_token)
if user:
user["username"] = user["screen_name"]
callback(user)
@@ -629,6 +680,7 @@ class FriendFeedMixin(OAuthMixin):
_OAUTH_NO_CALLBACKS = True
_OAUTH_VERSION = "1.0"
@_auth_return_future
def friendfeed_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/bret/friends"
@@ -675,22 +727,22 @@ class FriendFeedMixin(OAuthMixin):
url, access_token, all_args, method=method)
args.update(oauth)
if args:
url += "?" + urllib.urlencode(args)
url += "?" + urllib_parse.urlencode(args)
callback = self.async_callback(self._on_friendfeed_request, callback)
http = self.get_auth_http_client()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
def _on_friendfeed_request(self, callback, response):
def _on_friendfeed_request(self, future, response):
if response.error:
gen_log.warning("Error response %s fetching %s", response.error,
response.request.url)
callback(None)
future.set_exception(AuthError(
"Error response %s fetching %s" % (response.error,
response.request.url)))
return
callback(escape.json_decode(response.body))
future.set_result(escape.json_decode(response.body))
def _oauth_consumer_token(self):
self.require_setting("friendfeed_consumer_key", "FriendFeed OAuth")
@@ -699,12 +751,15 @@ class FriendFeedMixin(OAuthMixin):
key=self.settings["friendfeed_consumer_key"],
secret=self.settings["friendfeed_consumer_secret"])
@return_future
@gen.engine
def _oauth_get_user(self, access_token, callback):
callback = self.async_callback(self._parse_user_response, callback)
self.friendfeed_request(
user = yield self.friendfeed_request(
"/feedinfo/" + access_token["username"],
include="id,name,description", access_token=access_token,
callback=callback)
include="id,name,description", access_token=access_token)
if user:
user["username"] = user["id"]
callback(user)
def _parse_user_response(self, callback, user):
if user:
@@ -755,15 +810,16 @@ class GoogleMixin(OpenIdMixin, OAuthMixin):
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs,
oauth_scope=oauth_scope)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
@_auth_return_future
def get_authenticated_user(self, callback):
"""Fetches the authenticated user data upon redirect."""
# Look to see if we are doing combined OpenID/OAuth
oauth_ns = ""
for name, values in self.request.arguments.iteritems():
for name, values in self.request.arguments.items():
if name.startswith("openid.ns.") and \
values[-1] == u"http://specs.openid.net/extensions/oauth/1.0":
values[-1] == b"http://specs.openid.net/extensions/oauth/1.0":
oauth_ns = name[10:]
break
token = self.get_argument("openid." + oauth_ns + ".request_token", "")
@@ -773,7 +829,8 @@ class GoogleMixin(OpenIdMixin, OAuthMixin):
http.fetch(self._oauth_access_token_url(token),
self.async_callback(self._on_access_token, callback))
else:
OpenIdMixin.get_authenticated_user(self, callback)
chain_future(OpenIdMixin.get_authenticated_user(self),
callback)
def _oauth_consumer_token(self):
self.require_setting("google_consumer_key", "Google OAuth")
@@ -782,15 +839,16 @@ class GoogleMixin(OpenIdMixin, OAuthMixin):
key=self.settings["google_consumer_key"],
secret=self.settings["google_consumer_secret"])
def _oauth_get_user(self, access_token, callback):
OpenIdMixin.get_authenticated_user(self, callback)
def _oauth_get_user_future(self, access_token, callback):
return OpenIdMixin.get_authenticated_user(self)
class FacebookMixin(object):
"""Facebook Connect authentication.
New applications should consider using `FacebookGraphMixin` below instead
of this class.
*Deprecated:* New applications should use `FacebookGraphMixin`
below instead of this class. This class does not support the
Future-based interface seen on other classes in this module.
To authenticate with Facebook, register your application with
Facebook at http://www.facebook.com/developers/apps.php. Then
@@ -837,11 +895,11 @@ class FacebookMixin(object):
args["cancel_url"] = urlparse.urljoin(
self.request.full_url(), cancel_uri)
if extended_permissions:
if isinstance(extended_permissions, (unicode, bytes_type)):
if isinstance(extended_permissions, (unicode_type, bytes_type)):
extended_permissions = [extended_permissions]
args["req_perms"] = ",".join(extended_permissions)
self.redirect("http://www.facebook.com/login.php?" +
urllib.urlencode(args))
urllib_parse.urlencode(args))
def authorize_redirect(self, extended_permissions, callback_uri=None,
cancel_uri=None):
@@ -923,7 +981,7 @@ class FacebookMixin(object):
args["format"] = "json"
args["sig"] = self._signature(args)
url = "http://api.facebook.com/restserver.php?" + \
urllib.urlencode(args)
urllib_parse.urlencode(args)
http = self.get_auth_http_client()
http.fetch(url, callback=self.async_callback(
self._parse_response, callback))
@@ -966,7 +1024,7 @@ class FacebookMixin(object):
def _signature(self, args):
parts = ["%s=%s" % (n, args[n]) for n in sorted(args.keys())]
body = "".join(parts) + self.settings["facebook_secret"]
if isinstance(body, unicode):
if isinstance(body, unicode_type):
body = body.encode("utf-8")
return hashlib.md5(body).hexdigest()
@@ -986,7 +1044,7 @@ class FacebookGraphMixin(OAuth2Mixin):
_OAUTH_NO_CALLBACKS = False
def get_authenticated_user(self, redirect_uri, client_id, client_secret,
code, callback, extra_fields=None):
code, callback, extra_fields=None):
"""Handles the login for the Facebook user, returning a user object.
Example usage::
@@ -1014,10 +1072,10 @@ class FacebookGraphMixin(OAuth2Mixin):
"""
http = self.get_auth_http_client()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
}
fields = set(['id', 'name', 'first_name', 'last_name',
@@ -1026,11 +1084,11 @@ class FacebookGraphMixin(OAuth2Mixin):
fields.update(extra_fields)
http.fetch(self._oauth_request_token_url(**args),
self.async_callback(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
self.async_callback(self._on_access_token, redirect_uri, client_id,
client_secret, callback, fields))
def _on_access_token(self, redirect_uri, client_id, client_secret,
callback, fields, response):
callback, fields, response):
if response.error:
gen_log.warning('Facebook auth error: %s' % str(response))
callback(None)
@@ -1048,7 +1106,7 @@ class FacebookGraphMixin(OAuth2Mixin):
self._on_get_user_info, callback, session, fields),
access_token=session["access_token"],
fields=",".join(fields)
)
)
def _on_get_user_info(self, callback, session, fields, user):
if user is None:
@@ -1063,7 +1121,7 @@ class FacebookGraphMixin(OAuth2Mixin):
callback(fieldmap)
def facebook_request(self, path, callback, access_token=None,
post_args=None, **args):
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, post_args should be provided. Query
@@ -1104,11 +1162,11 @@ class FacebookGraphMixin(OAuth2Mixin):
all_args.update(args)
if all_args:
url += "?" + urllib.urlencode(all_args)
url += "?" + urllib_parse.urlencode(all_args)
callback = self.async_callback(self._on_facebook_request, callback)
http = self.get_auth_http_client()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback)
@@ -1148,7 +1206,7 @@ def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
key_elems = [escape.utf8(consumer_token["secret"])]
key_elems.append(escape.utf8(token["secret"] if token else ""))
key = b("&").join(key_elems)
key = b"&".join(key_elems)
hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
@@ -1170,25 +1228,25 @@ def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
for k, v in sorted(parameters.items())))
base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [escape.utf8(urllib.quote(consumer_token["secret"], safe='~'))]
key_elems.append(escape.utf8(urllib.quote(token["secret"], safe='~') if token else ""))
key = b("&").join(key_elems)
key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))]
key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else ""))
key = b"&".join(key_elems)
hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
def _oauth_escape(val):
if isinstance(val, unicode):
if isinstance(val, unicode_type):
val = val.encode("utf-8")
return urllib.quote(val, safe="~")
return urllib_parse.quote(val, safe="~")
def _oauth_parse_response(body):
p = escape.parse_qs(body, keep_blank_values=False)
token = dict(key=p[b("oauth_token")][0], secret=p[b("oauth_token_secret")][0])
token = dict(key=p[b"oauth_token"][0], secret=p[b"oauth_token_secret"][0])
# Add the extra parameters the Provider included to the token
special = (b("oauth_token"), b("oauth_token_secret"))
special = (b"oauth_token", b"oauth_token_secret")
token.update((k, p[k][0]) for k in p if k not in special)
return token

View File

@@ -31,7 +31,7 @@ Additionally, modifying these variables will cause reloading to behave
incorrectly.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import os
import sys
@@ -79,6 +79,7 @@ import weakref
from tornado import ioloop
from tornado.log import gen_log
from tornado import process
from tornado.util import exec_in
try:
import signal
@@ -91,6 +92,7 @@ _reload_hooks = []
_reload_attempted = False
_io_loops = weakref.WeakKeyDictionary()
def start(io_loop=None, check_time=500):
"""Restarts the process automatically when a module is modified.
@@ -103,7 +105,7 @@ def start(io_loop=None, check_time=500):
_io_loops[io_loop] = True
if len(_io_loops) > 1:
gen_log.warning("tornado.autoreload started more than once in the same process")
add_reload_hook(functools.partial(_close_all_fds, io_loop))
add_reload_hook(functools.partial(io_loop.close, all_fds=True))
modify_times = {}
callback = functools.partial(_reload_on_update, modify_times)
scheduler = ioloop.PeriodicCallback(callback, check_time, io_loop=io_loop)
@@ -141,14 +143,6 @@ def add_reload_hook(fn):
_reload_hooks.append(fn)
def _close_all_fds(io_loop):
for fd in io_loop._handlers.keys():
try:
os.close(fd)
except Exception:
pass
def _reload_on_update(modify_times):
if _reload_attempted:
# We already tried to reload and it didn't work, so don't try again.
@@ -204,7 +198,7 @@ def _reload():
# to ensure that the new process sees the same path we did.
path_prefix = '.' + os.pathsep
if (sys.path[0] == '' and
not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
os.environ["PYTHONPATH"] = (path_prefix +
os.environ.get("PYTHONPATH", ""))
if sys.platform == 'win32':
@@ -263,7 +257,7 @@ def main():
script = sys.argv[1]
sys.argv = sys.argv[1:]
else:
print >>sys.stderr, _USAGE
print(_USAGE, file=sys.stderr)
sys.exit(1)
try:
@@ -277,11 +271,11 @@ def main():
# Use globals as our "locals" dictionary so that
# something that tries to import __main__ (e.g. the unittest
# module) will see the right things.
exec f.read() in globals(), globals()
except SystemExit, e:
exec_in(f.read(), globals(), globals())
except SystemExit as e:
logging.basicConfig()
gen_log.info("Script exited with status %s", e.code)
except Exception, e:
except Exception as e:
logging.basicConfig()
gen_log.warning("Script exited with uncaught exception", exc_info=True)
# If an exception occurred at import time, the file with the error

View File

@@ -13,19 +13,21 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import functools
import sys
from tornado.stack_context import ExceptionStackContext
from tornado.util import raise_exc_info
from tornado.util import raise_exc_info, ArgReplacer
try:
from concurrent import futures
except ImportError:
futures = None
class ReturnValueIgnoredError(Exception):
pass
class DummyFuture(object):
def __init__(self):
@@ -89,39 +91,99 @@ if futures is None:
else:
Future = futures.Future
class DummyExecutor(object):
def submit(self, fn, *args, **kwargs):
future = Future()
try:
future.set_result(fn(*args, **kwargs))
except Exception, e:
except Exception as e:
future.set_exception(e)
return future
dummy_executor = DummyExecutor()
def run_on_executor(fn):
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
callback = kwargs.pop("callback")
callback = kwargs.pop("callback", None)
future = self.executor.submit(fn, self, *args, **kwargs)
if callback:
self.io_loop.add_future(future, callback)
return future
return wrapper
# TODO: this needs a better name
def future_wrap(f):
def return_future(f):
"""Decorator to make a function that returns via callback return a `Future`.
The wrapped function should take a ``callback`` keyword argument
and invoke it with one argument when it has finished. To signal failure,
the function can simply raise an exception (which will be
captured by the `stack_context` and passed along to the `Future`).
From the caller's perspective, the callback argument is optional.
If one is given, it will be invoked when the function is complete
with the `Future` as an argument. If no callback is given, the caller
should use the `Future` to wait for the function to complete
(perhaps by yielding it in a `gen.engine` function, or passing it
to `IOLoop.add_future`).
Usage::
@return_future
def future_func(arg1, arg2, callback):
# Do stuff (possibly asynchronous)
callback(result)
@gen.engine
def caller(callback):
yield future_func(arg1, arg2)
callback()
Note that ``@return_future`` and ``@gen.engine`` can be applied to the
same function, provided ``@return_future`` appears first.
"""
replacer = ArgReplacer(f, 'callback')
@functools.wraps(f)
def wrapper(*args, **kwargs):
future = Future()
if kwargs.get('callback') is not None:
future.add_done_callback(kwargs.pop('callback'))
kwargs['callback'] = future.set_result
callback, args, kwargs = replacer.replace(future.set_result,
args, kwargs)
if callback is not None:
future.add_done_callback(callback)
def handle_error(typ, value, tb):
future.set_exception(value)
return True
exc_info = None
with ExceptionStackContext(handle_error):
f(*args, **kwargs)
try:
result = f(*args, **kwargs)
if result is not None:
raise ReturnValueIgnoredError(
"@return_future should not be used with functions "
"that return values")
except:
exc_info = sys.exc_info()
raise
if exc_info is not None:
# If the initial synchronous part of f() raised an exception,
# go ahead and raise it to the caller directly without waiting
# for them to inspect the Future.
raise_exc_info(exc_info)
return future
return wrapper
def chain_future(a, b):
"""Chain two futures together so that when one completes, so does the other.
The result (success or failure) of ``a`` will be copied to ``b``.
"""
def copy(future):
assert future is a
if a.exception() is not None:
b.set_exception(a.exception())
else:
b.set_result(a.result())
a.add_done_callback(copy)

View File

@@ -16,9 +16,8 @@
"""Blocking and non-blocking HTTP client implementations using pycurl."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import cStringIO
import collections
import logging
import pycurl
@@ -30,20 +29,22 @@ from tornado import ioloop
from tornado.log import gen_log
from tornado import stack_context
from tornado.escape import utf8
from tornado.escape import utf8, native_str
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy
try:
from io import BytesIO # py3
except ImportError:
from cStringIO import StringIO as BytesIO # py2
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize(self, io_loop=None, max_clients=10, defaults=None):
self.io_loop = io_loop
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
def initialize(self, io_loop, max_clients=10, defaults=None):
super(CurlAsyncHTTPClient, self).initialize(io_loop, defaults=defaults)
self._multi = pycurl.CurlMulti()
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
self._curls = [_curl_create() for i in xrange(max_clients)]
self._curls = [_curl_create() for i in range(max_clients)]
self._free_list = self._curls[:]
self._requests = collections.deque()
self._fds = {}
@@ -69,19 +70,27 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
self._handle_force_timeout, 1000, io_loop=io_loop)
self._force_timeout_callback.start()
# Work around a bug in libcurl 7.29.0: Some fields in the curl
# multi object are initialized lazily, and its destructor will
# segfault if it is destroyed without having been used. Add
# and remove a dummy handle to make sure everything is
# initialized.
dummy_curl_handle = pycurl.Curl()
self._multi.add_handle(dummy_curl_handle)
self._multi.remove_handle(dummy_curl_handle)
def close(self):
self._force_timeout_callback.stop()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
for curl in self._curls:
curl.close()
self._multi.close()
self._closed = True
super(CurlAsyncHTTPClient, self).close()
def fetch(self, request, callback, **kwargs):
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
request = _RequestProxy(request, self.defaults)
self._requests.append((request, stack_context.wrap(callback)))
def fetch_impl(self, request, callback):
self._requests.append((request, callback))
self._process_queue()
self._set_timeout(0)
@@ -128,7 +137,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
while True:
try:
ret, num_handles = self._socket_action(fd, action)
except pycurl.error, e:
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
@@ -142,7 +151,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
try:
ret, num_handles = self._socket_action(
pycurl.SOCKET_TIMEOUT, 0)
except pycurl.error, e:
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
@@ -173,7 +182,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
while True:
try:
ret, num_handles = self._multi.socket_all()
except pycurl.error, e:
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
@@ -203,7 +212,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
(request, callback) = self._requests.popleft()
curl.info = {
"headers": httputil.HTTPHeaders(),
"buffer": cStringIO.StringIO(),
"buffer": BytesIO(),
"request": request,
"callback": callback,
"curl_start_time": time.time(),
@@ -247,7 +256,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
total=curl.getinfo(pycurl.TOTAL_TIME),
redirect=curl.getinfo(pycurl.REDIRECT_TIME),
)
)
try:
info["callback"](HTTPResponse(
request=info["request"], code=code, headers=info["headers"],
@@ -276,7 +285,7 @@ def _curl_create():
def _curl_setup_request(curl, request, buffer, headers):
curl.setopt(pycurl.URL, utf8(request.url))
curl.setopt(pycurl.URL, native_str(request.url))
# libcurl's magic "Expect: 100-continue" behavior causes delays
# with servers that don't support it (which include, among others,
@@ -296,10 +305,10 @@ def _curl_setup_request(curl, request, buffer, headers):
# Request headers may be either a regular dict or HTTPHeaders object
if isinstance(request.headers, httputil.HTTPHeaders):
curl.setopt(pycurl.HTTPHEADER,
[utf8("%s: %s" % i) for i in request.headers.get_all()])
[native_str("%s: %s" % i) for i in request.headers.get_all()])
else:
curl.setopt(pycurl.HTTPHEADER,
[utf8("%s: %s" % i) for i in request.headers.iteritems()])
[native_str("%s: %s" % i) for i in request.headers.items()])
if request.header_callback:
curl.setopt(pycurl.HEADERFUNCTION, request.header_callback)
@@ -307,15 +316,26 @@ def _curl_setup_request(curl, request, buffer, headers):
curl.setopt(pycurl.HEADERFUNCTION,
lambda line: _curl_header_callback(headers, line))
if request.streaming_callback:
curl.setopt(pycurl.WRITEFUNCTION, request.streaming_callback)
write_function = request.streaming_callback
else:
curl.setopt(pycurl.WRITEFUNCTION, buffer.write)
write_function = buffer.write
if type(b'') is type(''): # py2
curl.setopt(pycurl.WRITEFUNCTION, write_function)
else: # py3
# Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
# a fork/port. That version has a bug in which it passes unicode
# strings instead of bytes to the WRITEFUNCTION. This means that
# if you use a WRITEFUNCTION (which tornado always does), you cannot
# download arbitrary binary data. This needs to be fixed in the
# ported pycurl package, but in the meantime this lambda will
# make it work for downloading (utf8) text.
curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
if request.user_agent:
curl.setopt(pycurl.USERAGENT, utf8(request.user_agent))
curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
else:
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
if request.network_interface:
@@ -329,7 +349,7 @@ def _curl_setup_request(curl, request, buffer, headers):
curl.setopt(pycurl.PROXYPORT, request.proxy_port)
if request.proxy_username:
credentials = '%s:%s' % (request.proxy_username,
request.proxy_password)
request.proxy_password)
curl.setopt(pycurl.PROXYUSERPWD, credentials)
else:
curl.setopt(pycurl.PROXY, '')
@@ -377,7 +397,7 @@ def _curl_setup_request(curl, request, buffer, headers):
# Handle curl's cryptic options for every individual HTTP method
if request.method in ("POST", "PUT"):
request_buffer = cStringIO.StringIO(utf8(request.body))
request_buffer = BytesIO(utf8(request.body))
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
if request.method == "POST":
def ioctl(cmd):
@@ -391,7 +411,7 @@ def _curl_setup_request(curl, request, buffer, headers):
if request.auth_username is not None:
userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
curl.setopt(pycurl.USERPWD, utf8(userpwd))
curl.setopt(pycurl.USERPWD, native_str(userpwd))
gen_log.debug("%s %s (username: %r)", request.method, request.url,
request.auth_username)
else:

View File

@@ -1,112 +0,0 @@
/*
* Copyright 2009 Facebook
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include "Python.h"
#include <string.h>
#include <sys/epoll.h>
#define MAX_EVENTS 24
/*
* Simple wrapper around epoll_create.
*/
static PyObject* _epoll_create(void) {
int fd = epoll_create(MAX_EVENTS);
if (fd == -1) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
return PyInt_FromLong(fd);
}
/*
* Simple wrapper around epoll_ctl. We throw an exception if the call fails
* rather than returning the error code since it is an infrequent (and likely
* catastrophic) event when it does happen.
*/
static PyObject* _epoll_ctl(PyObject* self, PyObject* args) {
int epfd, op, fd, events;
struct epoll_event event;
if (!PyArg_ParseTuple(args, "iiiI", &epfd, &op, &fd, &events)) {
return NULL;
}
memset(&event, 0, sizeof(event));
event.events = events;
event.data.fd = fd;
if (epoll_ctl(epfd, op, fd, &event) == -1) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
Py_INCREF(Py_None);
return Py_None;
}
/*
* Simple wrapper around epoll_wait. We return None if the call times out and
* throw an exception if an error occurs. Otherwise, we return a list of
* (fd, event) tuples.
*/
static PyObject* _epoll_wait(PyObject* self, PyObject* args) {
struct epoll_event events[MAX_EVENTS];
int epfd, timeout, num_events, i;
PyObject* list;
PyObject* tuple;
if (!PyArg_ParseTuple(args, "ii", &epfd, &timeout)) {
return NULL;
}
Py_BEGIN_ALLOW_THREADS
num_events = epoll_wait(epfd, events, MAX_EVENTS, timeout);
Py_END_ALLOW_THREADS
if (num_events == -1) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
list = PyList_New(num_events);
for (i = 0; i < num_events; i++) {
tuple = PyTuple_New(2);
PyTuple_SET_ITEM(tuple, 0, PyInt_FromLong(events[i].data.fd));
PyTuple_SET_ITEM(tuple, 1, PyInt_FromLong(events[i].events));
PyList_SET_ITEM(list, i, tuple);
}
return list;
}
/*
* Our method declararations
*/
static PyMethodDef kEpollMethods[] = {
{"epoll_create", (PyCFunction)_epoll_create, METH_NOARGS,
"Create an epoll file descriptor"},
{"epoll_ctl", _epoll_ctl, METH_VARARGS,
"Control an epoll file descriptor"},
{"epoll_wait", _epoll_wait, METH_VARARGS,
"Wait for events on an epoll file descriptor"},
{NULL, NULL, 0, NULL}
};
/*
* Module initialization
*/
PyMODINIT_FUNC initepoll(void) {
Py_InitModule("epoll", kEpollMethods);
}

View File

@@ -20,49 +20,34 @@ Also includes a few other miscellaneous string manipulation functions that
have crept in over time.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import htmlentitydefs
import re
import sys
import urllib
# Python3 compatibility: On python2.5, introduce the bytes alias from 2.6
try:
bytes
except Exception:
bytes = str
from tornado.util import bytes_type, unicode_type, basestring_type, u
try:
from urlparse import parse_qs # Python 2.6+
from urllib.parse import parse_qs # py3
except ImportError:
from cgi import parse_qs
from urlparse import parse_qs # Python 2.6+
# json module is in the standard library as of python 2.6; fall back to
# simplejson if present for older versions.
try:
import json
assert hasattr(json, "loads") and hasattr(json, "dumps")
_json_decode = json.loads
_json_encode = json.dumps
except Exception:
try:
import simplejson
_json_decode = lambda s: simplejson.loads(_unicode(s))
_json_encode = lambda v: simplejson.dumps(v)
except ImportError:
try:
# For Google AppEngine
from django.utils import simplejson
_json_decode = lambda s: simplejson.loads(_unicode(s))
_json_encode = lambda v: simplejson.dumps(v)
except ImportError:
def _json_decode(s):
raise NotImplementedError(
"A JSON parser is required, e.g., simplejson at "
"http://pypi.python.org/pypi/simplejson/")
_json_encode = _json_decode
import htmlentitydefs # py2
except ImportError:
import html.entities as htmlentitydefs # py3
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse # py2
import json
try:
unichr
except NameError:
unichr = chr
_XHTML_ESCAPE_RE = re.compile('[&<>"]')
_XHTML_ESCAPE_DICT = {'&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;'}
@@ -87,12 +72,12 @@ def json_encode(value):
# the javscript. Some json libraries do this escaping by default,
# although python's standard library does not, so we do it here.
# http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
return _json_encode(recursive_unicode(value)).replace("</", "<\\/")
return json.dumps(recursive_unicode(value)).replace("</", "<\\/")
def json_decode(value):
"""Returns Python objects for the given JSON string."""
return _json_decode(to_basestring(value))
return json.loads(to_basestring(value))
def squeeze(value):
@@ -102,7 +87,7 @@ def squeeze(value):
def url_escape(value):
"""Returns a valid URL-encoded version of the given value."""
return urllib.quote_plus(utf8(value))
return urllib_parse.quote_plus(utf8(value))
# python 3 changed things around enough that we need two separate
# implementations of url_unescape. We also need our own implementation
@@ -117,9 +102,9 @@ if sys.version_info[0] < 3:
the result is a unicode string in the specified encoding.
"""
if encoding is None:
return urllib.unquote_plus(utf8(value))
return urllib_parse.unquote_plus(utf8(value))
else:
return unicode(urllib.unquote_plus(utf8(value)), encoding)
return unicode_type(urllib_parse.unquote_plus(utf8(value)), encoding)
parse_qs_bytes = parse_qs
else:
@@ -132,9 +117,9 @@ else:
the result is a unicode string in the specified encoding.
"""
if encoding is None:
return urllib.parse.unquote_to_bytes(value)
return urllib_parse.unquote_to_bytes(value)
else:
return urllib.unquote_plus(to_basestring(value), encoding=encoding)
return urllib_parse.unquote_plus(to_basestring(value), encoding=encoding)
def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False):
"""Parses a query string like urlparse.parse_qs, but returns the
@@ -149,12 +134,12 @@ else:
result = parse_qs(qs, keep_blank_values, strict_parsing,
encoding='latin1', errors='strict')
encoded = {}
for k, v in result.iteritems():
for k, v in result.items():
encoded[k] = [i.encode('latin1') for i in v]
return encoded
_UTF8_TYPES = (bytes, type(None))
_UTF8_TYPES = (bytes_type, type(None))
def utf8(value):
@@ -165,10 +150,10 @@ def utf8(value):
"""
if isinstance(value, _UTF8_TYPES):
return value
assert isinstance(value, unicode)
assert isinstance(value, unicode_type)
return value.encode("utf-8")
_TO_UNICODE_TYPES = (unicode, type(None))
_TO_UNICODE_TYPES = (unicode_type, type(None))
def to_unicode(value):
@@ -179,7 +164,7 @@ def to_unicode(value):
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
assert isinstance(value, bytes)
assert isinstance(value, bytes_type)
return value.decode("utf-8")
# to_unicode was previously named _unicode not because it was private,
@@ -188,12 +173,12 @@ _unicode = to_unicode
# When dealing with the standard library across python 2 and 3 it is
# sometimes useful to have a direct conversion to the native string type
if str is unicode:
if str is unicode_type:
native_str = to_unicode
else:
native_str = utf8
_BASESTRING_TYPES = (basestring, type(None))
_BASESTRING_TYPES = (basestring_type, type(None))
def to_basestring(value):
@@ -207,7 +192,7 @@ def to_basestring(value):
"""
if isinstance(value, _BASESTRING_TYPES):
return value
assert isinstance(value, bytes)
assert isinstance(value, bytes_type)
return value.decode("utf-8")
@@ -217,12 +202,12 @@ def recursive_unicode(obj):
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.iteritems())
return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items())
elif isinstance(obj, list):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
return tuple(recursive_unicode(i) for i in obj)
elif isinstance(obj, bytes):
elif isinstance(obj, bytes_type):
return to_unicode(obj)
else:
return obj
@@ -232,7 +217,9 @@ def recursive_unicode(obj):
# but it gets all exponential on certain patterns (such as too many trailing
# dots), causing the regex matcher to never return.
# This regex should avoid those problems.
_URL_RE = re.compile(ur"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&amp;|&quot;)*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&amp;|&quot;)*\)))+)""")
# Use to_unicode instead of tornado.util.u - we don't want backslashes getting
# processed as escapes.
_URL_RE = re.compile(to_unicode(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&amp;|&quot;)*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&amp;|&quot;)*\)))+)"""))
def linkify(text, shorten=False, extra_params="",
@@ -302,7 +289,7 @@ def linkify(text, shorten=False, extra_params="",
# (no more slug, etc), so it really just provides a little
# extra indication of shortening.
url = url[:proto_len] + parts[0] + "/" + \
parts[1][:8].split('?')[0].split('.')[0]
parts[1][:8].split('?')[0].split('.')[0]
if len(url) > max_len * 1.5: # still too long
url = url[:max_len]
@@ -321,7 +308,7 @@ def linkify(text, shorten=False, extra_params="",
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u'<a href="%s"%s>%s</a>' % (href, params, url)
return u('<a href="%s"%s>%s</a>') % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than &amp; so
@@ -344,7 +331,7 @@ def _convert_entity(m):
def _build_unicode_map():
unicode_map = {}
for name, value in htmlentitydefs.name2codepoint.iteritems():
for name, value in htmlentitydefs.name2codepoint.items():
unicode_map[name] = unichr(value)
return unicode_map

View File

@@ -62,10 +62,11 @@ it was called with one argument, the result is that argument. If it was
called with more than one argument or any keyword arguments, the result
is an `Arguments` object, which is a named tuple ``(args, kwargs)``.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import collections
import functools
import operator
import itertools
import sys
import types
@@ -276,15 +277,23 @@ class Multi(YieldPoint):
a list of ``YieldPoints``.
"""
def __init__(self, children):
assert all(isinstance(i, YieldPoint) for i in children)
self.children = children
self.children = []
for i in children:
if isinstance(i, Future):
i = YieldFuture(i)
self.children.append(i)
assert all(isinstance(i, YieldPoint) for i in self.children)
self.unfinished_children = set(self.children)
def start(self, runner):
for i in self.children:
i.start(runner)
def is_ready(self):
return all(i.is_ready() for i in self.children)
finished = list(itertools.takewhile(
lambda i: i.is_ready(), self.unfinished_children))
self.unfinished_children.difference_update(finished)
return not self.unfinished_children
def get_result(self):
return [i.get_result() for i in self.children]
@@ -305,10 +314,12 @@ class Runner(object):
"""Internal implementation of `tornado.gen.engine`.
Maintains information about pending callbacks and their results.
``final_callback`` is run after the generator exits.
"""
def __init__(self, gen, deactivate_stack_context):
def __init__(self, gen, final_callback):
self.gen = gen
self.deactivate_stack_context = deactivate_stack_context
self.final_callback = final_callback
self.yield_point = _NullYieldPoint()
self.pending_callbacks = set()
self.results = {}
@@ -373,16 +384,15 @@ class Runner(object):
raise LeakedCallbackError(
"finished without waiting for callbacks %r" %
self.pending_callbacks)
self.deactivate_stack_context()
self.deactivate_stack_context = None
self.final_callback()
self.final_callback = None
return
except Exception:
self.finished = True
raise
if isinstance(yielded, list):
yielded = Multi(yielded)
if isinstance(yielded, Future):
# TODO: lists of futures
elif isinstance(yielded, Future):
yielded = YieldFuture(yielded)
if isinstance(yielded, YieldPoint):
self.yield_point = yielded
@@ -414,20 +424,4 @@ class Runner(object):
else:
return False
# in python 2.6+ this could be a collections.namedtuple
class Arguments(tuple):
"""The result of a yield expression whose callback had more than one
argument (or keyword arguments).
The `Arguments` object can be used as a tuple ``(args, kwargs)``
or an object with attributes ``args`` and ``kwargs``.
"""
__slots__ = ()
def __new__(cls, args, kwargs):
return tuple.__new__(cls, (args, kwargs))
args = property(operator.itemgetter(0))
kwargs = property(operator.itemgetter(1))
Arguments = collections.namedtuple('Arguments', ['args', 'kwargs'])

View File

@@ -29,14 +29,12 @@ you use a recent version of ``libcurl`` and ``pycurl``. Currently the minimum
supported version is 7.18.2, and the recommended version is 7.21.1 or newer.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import calendar
import email.utils
import httplib
import time
import weakref
from tornado.concurrent import Future
from tornado.escape import utf8
from tornado import httputil, stack_context
from tornado.ioloop import IOLoop
@@ -134,7 +132,7 @@ class AsyncHTTPClient(Configurable):
def _async_clients(cls):
attr_name = '_async_client_dict_' + cls.__name__
if not hasattr(cls, attr_name):
setattr(cls, attr_name, weakref.WeakKeyDictionary())
setattr(cls, attr_name, weakref.WeakKeyDictionary())
return getattr(cls, attr_name)
def __new__(cls, io_loop=None, force_instance=False, **kwargs):
@@ -147,6 +145,12 @@ class AsyncHTTPClient(Configurable):
cls._async_clients()[io_loop] = instance
return instance
def initialize(self, io_loop, defaults=None):
self.io_loop = io_loop
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
def close(self):
"""Destroys this http client, freeing any file descriptors used.
Not needed in normal use, but may be helpful in unittests that
@@ -156,7 +160,7 @@ class AsyncHTTPClient(Configurable):
if self._async_clients().get(self.io_loop) is self:
del self._async_clients()[self.io_loop]
def fetch(self, request, callback, **kwargs):
def fetch(self, request, callback=None, **kwargs):
"""Executes a request, calling callback with an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
@@ -168,6 +172,37 @@ class AsyncHTTPClient(Configurable):
encountered during the request. You can call response.rethrow() to
throw the exception (if any) in the callback.
"""
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
# We may modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
request.headers = httputil.HTTPHeaders(request.headers)
request = _RequestProxy(request, self.defaults)
future = Future()
if callback is not None:
callback = stack_context.wrap(callback)
def handle_future(future):
exc = future.exception()
if isinstance(exc, HTTPError) and exc.response is not None:
response = exc.response
elif exc is not None:
response = HTTPResponse(
request, 599, error=exc,
request_time=time.time() - request.start_time)
else:
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
def handle_response(response):
if response.error:
future.set_exception(response.error)
else:
future.set_result(response)
self.fetch_impl(request, handle_response)
return future
def fetch_impl(self, request, callback):
raise NotImplementedError()
@classmethod
@@ -280,9 +315,8 @@ class HTTPRequest(object):
if headers is None:
headers = httputil.HTTPHeaders()
if if_modified_since:
timestamp = calendar.timegm(if_modified_since.utctimetuple())
headers["If-Modified-Since"] = email.utils.formatdate(
timestamp, localtime=False, usegmt=True)
headers["If-Modified-Since"] = httputil.format_timestamp(
if_modified_since)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_username = proxy_username
@@ -346,7 +380,7 @@ class HTTPResponse(object):
time_info=None, reason=None):
self.request = request
self.code = code
self.reason = reason or httplib.responses.get(code, "Unknown")
self.reason = reason or httputil.responses.get(code, "Unknown")
if headers is not None:
self.headers = headers
else:
@@ -383,7 +417,7 @@ class HTTPResponse(object):
raise self.error
def __repr__(self):
args = ",".join("%s=%r" % i for i in self.__dict__.iteritems())
args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
return "%s(%s)" % (self.__class__.__name__, args)
@@ -403,10 +437,11 @@ class HTTPError(Exception):
"""
def __init__(self, code, message=None, response=None):
self.code = code
message = message or httplib.responses.get(code, "Unknown")
message = message or httputil.responses.get(code, "Unknown")
self.response = response
Exception.__init__(self, "HTTP %d: %s" % (self.code, message))
class _RequestProxy(object):
"""Combines an object with a dictionary of defaults.
@@ -440,15 +475,15 @@ def main():
follow_redirects=options.follow_redirects,
validate_cert=options.validate_cert,
)
except HTTPError, e:
except HTTPError as e:
if e.response is not None:
response = e.response
else:
raise
if options.print_headers:
print response.headers
print(response.headers)
if options.print_body:
print response.body
print(response.body)
client.close()
if __name__ == "__main__":

View File

@@ -24,24 +24,24 @@ This module also defines the `HTTPRequest` class which is exposed via
`tornado.web.RequestHandler.request`.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import Cookie
import socket
import ssl
import time
from tornado.escape import native_str, parse_qs_bytes
from tornado import httputil
from tornado import iostream
from tornado.log import gen_log
from tornado.netutil import TCPServer
from tornado.tcpserver import TCPServer
from tornado import stack_context
from tornado.util import b, bytes_type
from tornado.util import bytes_type
try:
import ssl # Python 2.6+
import Cookie # py2
except ImportError:
ssl = None
import http.cookies as Cookie # py3
class HTTPServer(TCPServer):
@@ -95,7 +95,8 @@ class HTTPServer(TCPServer):
`HTTPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
To make this server serve SSL traffic, send the ssl_options dictionary
argument with the arguments required for the `ssl.wrap_socket` method,
including "certfile" and "keyfile"::
including "certfile" and "keyfile". In Python 3.2+ you can pass
an `ssl.SSLContext` object instead of a dict::
HTTPServer(applicaton, ssl_options={
"certfile": os.path.join(data_dir, "mydomain.crt"),
@@ -103,9 +104,9 @@ class HTTPServer(TCPServer):
})
`HTTPServer` initialization follows one of three patterns (the
initialization methods are defined on `tornado.netutil.TCPServer`):
initialization methods are defined on `tornado.tcpserver.TCPServer`):
1. `~tornado.netutil.TCPServer.listen`: simple single-process::
1. `~tornado.tcpserver.TCPServer.listen`: simple single-process::
server = HTTPServer(app)
server.listen(8888)
@@ -114,7 +115,7 @@ class HTTPServer(TCPServer):
In many cases, `tornado.web.Application.listen` can be used to avoid
the need to explicitly create the `HTTPServer`.
2. `~tornado.netutil.TCPServer.bind`/`~tornado.netutil.TCPServer.start`:
2. `~tornado.tcpserver.TCPServer.bind`/`~tornado.tcpserver.TCPServer.start`:
simple multi-process::
server = HTTPServer(app)
@@ -126,7 +127,7 @@ class HTTPServer(TCPServer):
to the `HTTPServer` constructor. `start` will always start
the server on the default singleton `IOLoop`.
3. `~tornado.netutil.TCPServer.add_sockets`: advanced multi-process::
3. `~tornado.tcpserver.TCPServer.add_sockets`: advanced multi-process::
sockets = tornado.netutil.bind_sockets(8888)
tornado.process.fork_processes(0)
@@ -171,6 +172,10 @@ class HTTPConnection(object):
xheaders=False, protocol=None):
self.stream = stream
self.address = address
# Save the socket's address family now so we know how to
# interpret self.address even after the stream is closed
# and its socket attribute replaced with None.
self.address_family = stream.socket.family
self.request_callback = request_callback
self.no_keep_alive = no_keep_alive
self.xheaders = xheaders
@@ -180,7 +185,19 @@ class HTTPConnection(object):
# Save stack context here, outside of any request. This keeps
# contexts from one request from leaking into the next.
self._header_callback = stack_context.wrap(self._on_headers)
self.stream.read_until(b("\r\n\r\n"), self._header_callback)
self.stream.read_until(b"\r\n\r\n", self._header_callback)
self._write_callback = None
self._close_callback = None
def set_close_callback(self, callback):
self._close_callback = stack_context.wrap(callback)
self.stream.set_close_callback(self._on_connection_close)
def _on_connection_close(self):
callback = self._close_callback
self._close_callback = None
callback()
# Delete any unfinished callbacks to break up reference cycles.
self._write_callback = None
def close(self):
@@ -241,7 +258,7 @@ class HTTPConnection(object):
# Use a try/except instead of checking stream.closed()
# directly, because in some cases the stream doesn't discover
# that it's closed until you try to read from it.
self.stream.read_until(b("\r\n\r\n"), self._header_callback)
self.stream.read_until(b"\r\n\r\n", self._header_callback)
except iostream.StreamClosedError:
self.close()
@@ -259,10 +276,7 @@ class HTTPConnection(object):
headers = httputil.HTTPHeaders.parse(data[eol:])
# HTTPRequest wants an IP, not a full socket address
if getattr(self.stream.socket, 'family', socket.AF_INET) in (
socket.AF_INET, socket.AF_INET6):
# Jython 2.5.2 doesn't have the socket.family attribute,
# so just assume IP in that case.
if self.address_family in (socket.AF_INET, socket.AF_INET6):
remote_ip = self.address[0]
else:
# Unix (or other) socket; fake the remote address
@@ -278,12 +292,12 @@ class HTTPConnection(object):
if content_length > self.stream.max_buffer_size:
raise _BadRequestException("Content-Length too long")
if headers.get("Expect") == "100-continue":
self.stream.write(b("HTTP/1.1 100 (Continue)\r\n\r\n"))
self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
self.stream.read_bytes(content_length, self._on_request_body)
return
self.request_callback(self._request)
except _BadRequestException, e:
except _BadRequestException as e:
gen_log.info("Malformed HTTP request from %s: %s",
self.address[0], e)
self.close()
@@ -484,7 +498,7 @@ class HTTPRequest(object):
socket.SOCK_STREAM,
0, socket.AI_NUMERICHOST)
return bool(res)
except socket.gaierror, e:
except socket.gaierror as e:
if e.args[0] == socket.EAI_NONAME:
return False
raise

View File

@@ -16,14 +16,26 @@
"""HTTP utility code shared by clients and servers."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import urllib
import datetime
import numbers
import re
import time
from tornado.escape import native_str, parse_qs_bytes, utf8
from tornado.log import gen_log
from tornado.util import b, ObjectDict
from tornado.util import ObjectDict
try:
from httplib import responses # py2
except ImportError:
from http.client import responses # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
class HTTPHeaders(dict):
@@ -34,7 +46,7 @@ class HTTPHeaders(dict):
value per key, with multiple values joined by a comma.
>>> h = HTTPHeaders({"content-type": "text/html"})
>>> h.keys()
>>> list(h.keys())
['Content-Type']
>>> h["Content-Type"]
'text/html'
@@ -47,7 +59,7 @@ class HTTPHeaders(dict):
['A=B', 'C=D']
>>> for (k,v) in sorted(h.get_all()):
... print '%s: %s' % (k,v)
... print('%s: %s' % (k,v))
...
Content-Type: text/html
Set-Cookie: A=B
@@ -60,7 +72,7 @@ class HTTPHeaders(dict):
self._as_list = {}
self._last_key = None
if (len(args) == 1 and len(kwargs) == 0 and
isinstance(args[0], HTTPHeaders)):
isinstance(args[0], HTTPHeaders)):
# Copy constructor
for k, v in args[0].get_all():
self.add(k, v)
@@ -76,7 +88,9 @@ class HTTPHeaders(dict):
self._last_key = norm_name
if norm_name in self:
# bypass our override of __setitem__ since it modifies _as_list
dict.__setitem__(self, norm_name, self[norm_name] + ',' + value)
dict.__setitem__(self, norm_name,
native_str(self[norm_name]) + ',' +
native_str(value))
self._as_list[norm_name].append(value)
else:
self[norm_name] = value
@@ -92,8 +106,8 @@ class HTTPHeaders(dict):
If a header has multiple values, multiple pairs will be
returned with the same name.
"""
for name, list in self._as_list.iteritems():
for value in list:
for name, values in self._as_list.items():
for value in values:
yield (name, value)
def parse_line(self, line):
@@ -119,7 +133,7 @@ class HTTPHeaders(dict):
"""Returns a dictionary from HTTP header text.
>>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n")
>>> sorted(h.iteritems())
>>> sorted(h.items())
[('Content-Length', '42'), ('Content-Type', 'text/html')]
"""
h = cls()
@@ -152,7 +166,7 @@ class HTTPHeaders(dict):
def update(self, *args, **kwargs):
# dict.update bypasses our __setitem__
for k, v in dict(*args, **kwargs).iteritems():
for k, v in dict(*args, **kwargs).items():
self[k] = v
def copy(self):
@@ -191,7 +205,7 @@ def url_concat(url, args):
return url
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urllib.urlencode(args)
return url + urlencode(args)
class HTTPFile(ObjectDict):
@@ -216,7 +230,7 @@ def parse_body_arguments(content_type, body, arguments, files):
"""
if content_type.startswith("application/x-www-form-urlencoded"):
uri_arguments = parse_qs_bytes(native_str(body))
for name, values in uri_arguments.iteritems():
for name, values in uri_arguments.items():
values = [v for v in values if v]
if values:
arguments.setdefault(name, []).extend(values)
@@ -243,24 +257,24 @@ def parse_multipart_form_data(boundary, data, arguments, files):
# xmpp). I think we're also supposed to handle backslash-escapes
# here but I'll save that until we see a client that uses them
# in the wild.
if boundary.startswith(b('"')) and boundary.endswith(b('"')):
if boundary.startswith(b'"') and boundary.endswith(b'"'):
boundary = boundary[1:-1]
final_boundary_index = data.rfind(b("--") + boundary + b("--"))
final_boundary_index = data.rfind(b"--" + boundary + b"--")
if final_boundary_index == -1:
gen_log.warning("Invalid multipart/form-data: no final boundary")
return
parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n")
for part in parts:
if not part:
continue
eoh = part.find(b("\r\n\r\n"))
eoh = part.find(b"\r\n\r\n")
if eoh == -1:
gen_log.warning("multipart/form-data missing headers")
continue
headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
disp_header = headers.get("Content-Disposition", "")
disposition, disp_params = _parse_header(disp_header)
if disposition != "form-data" or not part.endswith(b("\r\n")):
if disposition != "form-data" or not part.endswith(b"\r\n"):
gen_log.warning("Invalid multipart/form-data")
continue
value = part[eoh + 4:-2]
@@ -277,6 +291,26 @@ def parse_multipart_form_data(boundary, data, arguments, files):
arguments.setdefault(name, []).append(value)
def format_timestamp(ts):
"""Formats a timestamp in the format used by HTTP.
The argument may be a numeric timestamp as returned by `time.time()`,
a time tuple as returned by `time.gmtime()`, or a `datetime.datetime`
object.
>>> format_timestamp(1359312200)
'Sun, 27 Jan 2013 18:43:20 GMT'
"""
if isinstance(ts, (tuple, time.struct_time)):
pass
elif isinstance(ts, datetime.datetime):
ts = ts.utctimetuple()
elif isinstance(ts, numbers.Real):
ts = time.gmtime(ts)
else:
raise TypeError("unknown timestamp type: %r" % ts)
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", ts)
# _parseparam and _parse_header are copied and modified from python2.7's cgi.py
# The original 2.7 version of this code did not correctly support some
# combinations of semicolons and double quotes.
@@ -300,7 +334,7 @@ def _parse_header(line):
"""
parts = _parseparam(';' + line)
key = parts.next()
key = next(parts)
pdict = {}
for p in parts:
i = p.find('=')

View File

@@ -26,17 +26,16 @@ In addition to I/O events, the `IOLoop` can also schedule time-based events.
`IOLoop.add_timeout` is a non-blocking alternative to `time.sleep`.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import errno
import functools
import heapq
import logging
import numbers
import os
import select
import sys
import thread
import threading
import time
import traceback
@@ -56,6 +55,11 @@ try:
except ImportError:
futures = None
try:
import thread # py2
except ImportError:
import _thread as thread # py3
from tornado.platform.auto import set_close_exec, Waker
@@ -66,7 +70,7 @@ class IOLoop(Configurable):
2.6+) if they are available, or else we fall back on select(). If
you are implementing a system that needs to handle thousands of
simultaneous connections, you should use a system that supports either
epoll or queue.
epoll or kqueue.
Example usage for a simple TCP server::
@@ -177,13 +181,9 @@ class IOLoop(Configurable):
@classmethod
def configurable_default(cls):
if hasattr(select, "epoll") or sys.platform.startswith('linux'):
try:
from tornado.platform.epoll import EPollIOLoop
return EPollIOLoop
except ImportError:
gen_log.warning("unable to import EPollIOLoop, falling back to SelectIOLoop")
pass
if hasattr(select, "epoll"):
from tornado.platform.epoll import EPollIOLoop
return EPollIOLoop
if hasattr(select, "kqueue"):
# Python 2.6+ on BSD or Mac
from tornado.platform.kqueue import KQueueIOLoop
@@ -194,7 +194,7 @@ class IOLoop(Configurable):
def initialize(self):
pass
def close(self, all_fds = False):
def close(self, all_fds=False):
"""Closes the IOLoop, freeing any resources used.
If ``all_fds`` is true, all file descriptors registered on the
@@ -252,8 +252,8 @@ class IOLoop(Configurable):
For use with set_blocking_signal_threshold.
"""
gen_log.warning('IOLoop blocked for %f seconds in\n%s',
self._blocking_signal_threshold,
''.join(traceback.format_stack(frame)))
self._blocking_signal_threshold,
''.join(traceback.format_stack(frame)))
def start(self):
"""Starts the I/O loop.
@@ -264,7 +264,8 @@ class IOLoop(Configurable):
raise NotImplementedError()
def stop(self):
"""Stop the loop after the current event loop iteration is complete.
"""Stop the I/O loop.
If the event loop is not currently running, the next call to start()
will return immediately.
@@ -280,6 +281,8 @@ class IOLoop(Configurable):
Note that even after `stop` has been called, the IOLoop is not
completely stopped until `IOLoop.start` has also returned.
Some work that was scheduled before the call to `stop` may still
be run before the IOLoop shuts down.
"""
raise NotImplementedError()
@@ -316,7 +319,9 @@ class IOLoop(Configurable):
def remove_timeout(self, timeout):
"""Cancels a pending timeout.
The argument is a handle as returned by add_timeout.
The argument is a handle as returned by add_timeout. It is
safe to call `remove_timeout` even if the callback has already
been run.
"""
raise NotImplementedError()
@@ -351,6 +356,7 @@ class IOLoop(Configurable):
_FUTURE_TYPES = (futures.Future, DummyFuture)
else:
_FUTURE_TYPES = DummyFuture
def add_future(self, future, callback):
"""Schedules a callback on the IOLoop when the given future is finished.
@@ -381,8 +387,7 @@ class IOLoop(Configurable):
The exception itself is not passed explicitly, but is available
in sys.exc_info.
"""
app_log.error("Exception in callback %r", callback, exc_info = True)
app_log.error("Exception in callback %r", callback, exc_info=True)
class PollIOLoop(IOLoop):
@@ -392,7 +397,7 @@ class PollIOLoop(IOLoop):
(Linux), `tornado.platform.kqueue.KQueueIOLoop` (BSD and Mac), or
`tornado.platform.select.SelectIOLoop` (all platforms).
"""
def initialize(self, impl, time_func = None):
def initialize(self, impl, time_func=None):
super(PollIOLoop, self).initialize()
self._impl = impl
if hasattr(self._impl, 'fileno'):
@@ -416,16 +421,16 @@ class PollIOLoop(IOLoop):
lambda fd, events: self._waker.consume(),
self.READ)
def close(self, all_fds = False):
def close(self, all_fds=False):
with self._callback_lock:
self._closing = True
self.remove_handler(self._waker.fileno())
if all_fds:
for fd in self._handlers.keys()[:]:
for fd in self._handlers.keys():
try:
os.close(fd)
except Exception:
gen_log.debug("error closing fd %s", fd, exc_info = True)
gen_log.debug("error closing fd %s", fd, exc_info=True)
self._waker.close()
self._impl.close()
@@ -442,12 +447,12 @@ class PollIOLoop(IOLoop):
try:
self._impl.unregister(fd)
except Exception:
gen_log.debug("Error deleting fd from IOLoop", exc_info = True)
gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
def set_blocking_signal_threshold(self, seconds, action):
if not hasattr(signal, "setitimer"):
gen_log.error("set_blocking_signal_threshold requires a signal module "
"with the setitimer method")
"with the setitimer method")
return
self._blocking_signal_threshold = seconds
if seconds is not None:
@@ -500,7 +505,7 @@ class PollIOLoop(IOLoop):
# IOLoop is just started once at the beginning.
signal.set_wakeup_fd(old_wakeup_fd)
old_wakeup_fd = None
except ValueError: # non-main thread
except ValueError: # non-main thread
pass
while True:
@@ -543,7 +548,7 @@ class PollIOLoop(IOLoop):
try:
event_pairs = self._impl.poll(poll_timeout)
except Exception, e:
except Exception as e:
# Depending on python version and IOLoop implementation,
# different exception types may be thrown and there are
# two ways EINTR might be signaled:
@@ -568,18 +573,17 @@ class PollIOLoop(IOLoop):
while self._events:
fd, events = self._events.popitem()
try:
hdlr = self._handlers.get(fd)
if hdlr: hdlr(fd, events)
except (OSError, IOError), e:
self._handlers[fd](fd, events)
except (OSError, IOError) as e:
if e.args[0] == errno.EPIPE:
# Happens when the client closes the connection
pass
else:
app_log.error("Exception in I/O handler for fd %s",
fd, exc_info = True)
fd, exc_info=True)
except Exception:
app_log.error("Exception in I/O handler for fd %s",
fd, exc_info = True)
fd, exc_info=True)
# reset the stopped flag so another start/stop pair can be issued
self._stopped = False
if self._blocking_signal_threshold is not None:
@@ -615,7 +619,7 @@ class PollIOLoop(IOLoop):
raise RuntimeError("IOLoop is closing")
list_empty = not self._callbacks
self._callbacks.append(functools.partial(
stack_context.wrap(callback), *args, **kwargs))
stack_context.wrap(callback), *args, **kwargs))
if list_empty and thread.get_ident() != self._thread_ident:
# If we're in the IOLoop's thread, we know it's not currently
# polling. If we're not, and we added the first callback to an
@@ -641,7 +645,7 @@ class PollIOLoop(IOLoop):
# either the old or new version of self._callbacks,
# but either way will work.
self._callbacks.append(functools.partial(
stack_context.wrap(callback), *args, **kwargs))
stack_context.wrap(callback), *args, **kwargs))
class _Timeout(object):
@@ -651,7 +655,7 @@ class _Timeout(object):
__slots__ = ['deadline', 'callback']
def __init__(self, deadline, callback, io_loop):
if isinstance(deadline, (int, long, float)):
if isinstance(deadline, numbers.Real):
self.deadline = deadline
elif isinstance(deadline, datetime.timedelta):
self.deadline = io_loop.time() + _Timeout.timedelta_to_seconds(deadline)
@@ -684,7 +688,7 @@ class PeriodicCallback(object):
`start` must be called after the PeriodicCallback is created.
"""
def __init__(self, callback, callback_time, io_loop = None):
def __init__(self, callback, callback_time, io_loop=None):
self.callback = callback
if callback_time <= 0:
raise ValueError("Periodic callback must have a positive callback_time")
@@ -712,7 +716,7 @@ class PeriodicCallback(object):
try:
self.callback()
except Exception:
app_log.error("Error in periodic callback", exc_info = True)
app_log.error("Error in periodic callback", exc_info=True)
self._schedule_next()
def _schedule_next(self):

View File

@@ -24,33 +24,33 @@ Contents:
* `PipeIOStream`: Pipe-based IOStream implementation.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import collections
import errno
import numbers
import os
import socket
import ssl
import sys
import re
from tornado import ioloop
from tornado.log import gen_log, app_log
from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError
from tornado import stack_context
from tornado.util import b, bytes_type
try:
import ssl # Python 2.6+
except ImportError:
ssl = None
from tornado.util import bytes_type
try:
from tornado.platform.posix import _set_nonblocking
except ImportError:
_set_nonblocking = None
class StreamClosedError(IOError):
pass
class BaseIOStream(object):
"""A utility class to write to and read from a non-blocking file or socket.
@@ -146,7 +146,7 @@ class BaseIOStream(object):
``callback`` will be empty.
"""
self._set_read_callback(callback)
assert isinstance(num_bytes, (int, long))
assert isinstance(num_bytes, numbers.Integral)
self._read_bytes = num_bytes
self._streaming_callback = stack_context.wrap(streaming_callback)
self._try_inline_read()
@@ -237,12 +237,14 @@ class BaseIOStream(object):
def _maybe_run_close_callback(self):
if (self.closed() and self._close_callback and
self._pending_callbacks == 0):
self._pending_callbacks == 0):
# if there are pending callbacks, don't run the close callback
# until they're done (see _maybe_add_error_handler)
cb = self._close_callback
self._close_callback = None
self._run_callback(cb)
# Delete any unfinished callbacks to break up reference cycles.
self._read_callback = self._write_callback = None
def reading(self):
"""Returns true if we are currently reading from the stream."""
@@ -399,7 +401,7 @@ class BaseIOStream(object):
"""
try:
chunk = self.read_from_fd()
except (socket.error, IOError, OSError), e:
except (socket.error, IOError, OSError) as e:
# ssl.SSLError is a subclass of socket.error
if e.args[0] == errno.ECONNRESET:
# Treat ECONNRESET as a connection close rather than
@@ -504,7 +506,7 @@ class BaseIOStream(object):
self._write_buffer_frozen = False
_merge_prefix(self._write_buffer, num_bytes)
self._write_buffer.popleft()
except socket.error, e:
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
self._write_buffer_frozen = True
break
@@ -520,7 +522,7 @@ class BaseIOStream(object):
def _consume(self, loc):
if loc == 0:
return b("")
return b""
_merge_prefix(self._read_buffer, loc)
self._read_buffer_size -= loc
return self._read_buffer.popleft()
@@ -630,7 +632,7 @@ class IOStream(BaseIOStream):
def read_from_fd(self):
try:
chunk = self.socket.recv(self.read_chunk_size)
except socket.error, e:
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return None
else:
@@ -643,7 +645,7 @@ class IOStream(BaseIOStream):
def write_to_fd(self, data):
return self.socket.send(data)
def connect(self, address, callback=None):
def connect(self, address, callback=None, server_hostname=None):
"""Connects the socket to a remote address without blocking.
May only be called if the socket passed to the constructor was
@@ -652,6 +654,11 @@ class IOStream(BaseIOStream):
If callback is specified, it will be called when the
connection is completed.
If specified, the ``server_hostname`` parameter will be used
in SSL connections for certificate validation (if requested in
the ``ssl_options``) and SNI (if supported; requires
Python 3.2+).
Note that it is safe to call IOStream.write while the
connection is pending, in which case the data will be written
as soon as the connection is ready. Calling IOStream read
@@ -661,7 +668,7 @@ class IOStream(BaseIOStream):
self._connecting = True
try:
self.socket.connect(address)
except socket.error, e:
except socket.error as e:
# In non-blocking mode we expect connect() to raise an
# exception with EINPROGRESS or EWOULDBLOCK.
#
@@ -710,8 +717,9 @@ class SSLIOStream(IOStream):
def __init__(self, *args, **kwargs):
"""Creates an SSLIOStream.
If a dictionary is provided as keyword argument ssl_options,
it will be used as additional keyword arguments to ssl.wrap_socket.
The ``ssl_options`` keyword argument may either be a dictionary
of keywords arguments for `ssl.wrap_socket`, or an `ssl.SSLContext`
object.
"""
self._ssl_options = kwargs.pop('ssl_options', {})
super(SSLIOStream, self).__init__(*args, **kwargs)
@@ -719,6 +727,7 @@ class SSLIOStream(IOStream):
self._handshake_reading = False
self._handshake_writing = False
self._ssl_connect_callback = None
self._server_hostname = None
def reading(self):
return self._handshake_reading or super(SSLIOStream, self).reading()
@@ -732,7 +741,7 @@ class SSLIOStream(IOStream):
self._handshake_reading = False
self._handshake_writing = False
self.socket.do_handshake()
except ssl.SSLError, err:
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_WANT_READ:
self._handshake_reading = True
return
@@ -751,16 +760,46 @@ class SSLIOStream(IOStream):
self.socket.fileno(), peer, err)
return self.close(exc_info=True)
raise
except socket.error, err:
except socket.error as err:
if err.args[0] in (errno.ECONNABORTED, errno.ECONNRESET):
return self.close(exc_info=True)
else:
self._ssl_accepting = False
if not self._verify_cert(self.socket.getpeercert()):
self.close()
return
if self._ssl_connect_callback is not None:
callback = self._ssl_connect_callback
self._ssl_connect_callback = None
self._run_callback(callback)
def _verify_cert(self, peercert):
"""Returns True if peercert is valid according to the configured
validation mode and hostname.
The ssl handshake already tested the certificate for a valid
CA signature; the only thing that remains is to check
the hostname.
"""
if isinstance(self._ssl_options, dict):
verify_mode = self._ssl_options.get('cert_reqs', ssl.CERT_NONE)
elif isinstance(self._ssl_options, ssl.SSLContext):
verify_mode = self._ssl_options.verify_mode
assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL)
if verify_mode == ssl.CERT_NONE or self._server_hostname is None:
return True
cert = self.socket.getpeercert()
if cert is None and verify_mode == ssl.CERT_REQUIRED:
gen_log.warning("No SSL certificate given")
return False
try:
ssl_match_hostname(peercert, self._server_hostname)
except SSLCertificateError:
gen_log.warning("Invalid SSL certificate", exc_info=True)
return False
else:
return True
def _handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
@@ -773,10 +812,11 @@ class SSLIOStream(IOStream):
return
super(SSLIOStream, self)._handle_write()
def connect(self, address, callback=None):
def connect(self, address, callback=None, server_hostname=None):
# Save the user's callback and run it after the ssl handshake
# has completed.
self._ssl_connect_callback = callback
self._server_hostname = server_hostname
super(SSLIOStream, self).connect(address, callback=None)
def _handle_connect(self):
@@ -786,9 +826,9 @@ class SSLIOStream(IOStream):
# user callbacks are enqueued asynchronously on the IOLoop,
# but since _handle_events calls _handle_connect immediately
# followed by _handle_write we need this to be synchronous.
self.socket = ssl.wrap_socket(self.socket,
do_handshake_on_connect=False,
**self._ssl_options)
self.socket = ssl_wrap_socket(self.socket, self._ssl_options,
server_hostname=self._server_hostname,
do_handshake_on_connect=False)
super(SSLIOStream, self)._handle_connect()
def read_from_fd(self):
@@ -804,14 +844,14 @@ class SSLIOStream(IOStream):
# called when there is nothing to read, so we have to use
# read() instead.
chunk = self.socket.read(self.read_chunk_size)
except ssl.SSLError, e:
except ssl.SSLError as e:
# SSLError is a subclass of socket.error, so this except
# block must come first.
if e.args[0] == ssl.SSL_ERROR_WANT_READ:
return None
else:
raise
except socket.error, e:
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return None
else:
@@ -821,6 +861,7 @@ class SSLIOStream(IOStream):
return None
return chunk
class PipeIOStream(BaseIOStream):
"""Pipe-based IOStream implementation.
@@ -844,7 +885,7 @@ class PipeIOStream(BaseIOStream):
def read_from_fd(self):
try:
chunk = os.read(self.fd, self.read_chunk_size)
except (IOError, OSError), e:
except (IOError, OSError) as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return None
elif e.args[0] == errno.EBADF:
@@ -874,17 +915,17 @@ def _merge_prefix(deque, size):
string of up to size bytes.
>>> d = collections.deque(['abc', 'de', 'fghi', 'j'])
>>> _merge_prefix(d, 5); print d
>>> _merge_prefix(d, 5); print(d)
deque(['abcde', 'fghi', 'j'])
Strings will be split as necessary to reach the desired size.
>>> _merge_prefix(d, 7); print d
>>> _merge_prefix(d, 7); print(d)
deque(['abcdefg', 'hi', 'j'])
>>> _merge_prefix(d, 3); print d
>>> _merge_prefix(d, 3); print(d)
deque(['abc', 'defg', 'hi', 'j'])
>>> _merge_prefix(d, 100); print d
>>> _merge_prefix(d, 100); print(d)
deque(['abcdefghij'])
"""
if len(deque) == 1 and len(deque[0]) <= size:
@@ -904,7 +945,7 @@ def _merge_prefix(deque, size):
if prefix:
deque.appendleft(type(prefix[0])().join(prefix))
if not deque:
deque.appendleft(b(""))
deque.appendleft(b"")
def doctests():

View File

@@ -39,7 +39,7 @@ supported by gettext and related tools). If neither method is called,
the locale.translate method will simply return the original string.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import csv
import datetime
@@ -48,6 +48,7 @@ import re
from tornado import escape
from tornado.log import gen_log
from tornado.util import u
_default_locale = "en_US"
_translations = {}
@@ -80,11 +81,11 @@ def set_default_locale(code):
global _default_locale
global _supported_locales
_default_locale = code
_supported_locales = frozenset(_translations.keys() + [_default_locale])
_supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
def load_translations(directory):
u"""Loads translations from CSV files in a directory.
u("""Loads translations from CSV files in a directory.
Translations are strings with optional Python-style named placeholders
(e.g., "My name is %(name)s") and their associated translations.
@@ -109,7 +110,7 @@ def load_translations(directory):
"%(name)s liked this","A %(name)s les gust\u00f3 esto","plural"
"%(name)s liked this","A %(name)s le gust\u00f3 esto","singular"
"""
""")
global _translations
global _supported_locales
_translations = {}
@@ -128,8 +129,6 @@ def load_translations(directory):
f = open(full_path, "r", encoding="utf-8")
except TypeError:
# python 2: files return byte strings, which are decoded below.
# Once we drop python 2.5, this could use io.open instead
# on both 2 and 3.
f = open(full_path, "r")
_translations[locale] = {}
for i, row in enumerate(csv.reader(f)):
@@ -147,7 +146,7 @@ def load_translations(directory):
continue
_translations[locale].setdefault(plural, {})[english] = translation
f.close()
_supported_locales = frozenset(_translations.keys() + [_default_locale])
_supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
gen_log.debug("Supported locales: %s", sorted(_supported_locales))
@@ -183,10 +182,10 @@ def load_gettext_translations(directory, domain):
os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo"))
_translations[lang] = gettext.translation(domain, directory,
languages=[lang])
except Exception, e:
except Exception as e:
gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
continue
_supported_locales = frozenset(_translations.keys() + [_default_locale])
_supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
_use_gettext = True
gen_log.debug("Supported locales: %s", sorted(_supported_locales))
@@ -242,7 +241,7 @@ class Locale(object):
def __init__(self, code, translations):
self.code = code
self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown")
self.name = LOCALE_NAMES.get(code, {}).get("name", u("Unknown"))
self.rtl = False
for prefix in ["fa", "ar", "he"]:
if self.code.startswith(prefix):
@@ -323,26 +322,26 @@ class Locale(object):
if days == 0:
format = _("%(time)s")
elif days == 1 and local_date.day == local_yesterday.day and \
relative:
relative:
format = _("yesterday") if shorter else \
_("yesterday at %(time)s")
_("yesterday at %(time)s")
elif days < 5:
format = _("%(weekday)s") if shorter else \
_("%(weekday)s at %(time)s")
_("%(weekday)s at %(time)s")
elif days < 334: # 11mo, since confusing for same month last year
format = _("%(month_name)s %(day)s") if shorter else \
_("%(month_name)s %(day)s at %(time)s")
_("%(month_name)s %(day)s at %(time)s")
if format is None:
format = _("%(month_name)s %(day)s, %(year)s") if shorter else \
_("%(month_name)s %(day)s, %(year)s at %(time)s")
_("%(month_name)s %(day)s, %(year)s at %(time)s")
tfhour_clock = self.code not in ("en", "en_US", "zh_CN")
if tfhour_clock:
str_time = "%d:%02d" % (local_date.hour, local_date.minute)
elif self.code == "zh_CN":
str_time = "%s%d:%02d" % (
(u'\u4e0a\u5348', u'\u4e0b\u5348')[local_date.hour >= 12],
(u('\u4e0a\u5348'), u('\u4e0b\u5348'))[local_date.hour >= 12],
local_date.hour % 12 or 12, local_date.minute)
else:
str_time = "%d:%02d %s" % (
@@ -388,7 +387,7 @@ class Locale(object):
return ""
if len(parts) == 1:
return parts[0]
comma = u' \u0648 ' if self.code.startswith("fa") else u", "
comma = u(' \u0648 ') if self.code.startswith("fa") else u(", ")
return _("%(commas)s and %(last)s") % {
"commas": comma.join(parts[:-1]),
"last": parts[len(parts) - 1],
@@ -444,66 +443,66 @@ class GettextLocale(Locale):
return self.gettext(message)
LOCALE_NAMES = {
"af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"},
"am_ET": {"name_en": u"Amharic", "name": u'\u12a0\u121b\u122d\u129b'},
"ar_AR": {"name_en": u"Arabic", "name": u"\u0627\u0644\u0639\u0631\u0628\u064a\u0629"},
"bg_BG": {"name_en": u"Bulgarian", "name": u"\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438"},
"bn_IN": {"name_en": u"Bengali", "name": u"\u09ac\u09be\u0982\u09b2\u09be"},
"bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"},
"ca_ES": {"name_en": u"Catalan", "name": u"Catal\xe0"},
"cs_CZ": {"name_en": u"Czech", "name": u"\u010ce\u0161tina"},
"cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"},
"da_DK": {"name_en": u"Danish", "name": u"Dansk"},
"de_DE": {"name_en": u"German", "name": u"Deutsch"},
"el_GR": {"name_en": u"Greek", "name": u"\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac"},
"en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"},
"en_US": {"name_en": u"English (US)", "name": u"English (US)"},
"es_ES": {"name_en": u"Spanish (Spain)", "name": u"Espa\xf1ol (Espa\xf1a)"},
"es_LA": {"name_en": u"Spanish", "name": u"Espa\xf1ol"},
"et_EE": {"name_en": u"Estonian", "name": u"Eesti"},
"eu_ES": {"name_en": u"Basque", "name": u"Euskara"},
"fa_IR": {"name_en": u"Persian", "name": u"\u0641\u0627\u0631\u0633\u06cc"},
"fi_FI": {"name_en": u"Finnish", "name": u"Suomi"},
"fr_CA": {"name_en": u"French (Canada)", "name": u"Fran\xe7ais (Canada)"},
"fr_FR": {"name_en": u"French", "name": u"Fran\xe7ais"},
"ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"},
"gl_ES": {"name_en": u"Galician", "name": u"Galego"},
"he_IL": {"name_en": u"Hebrew", "name": u"\u05e2\u05d1\u05e8\u05d9\u05ea"},
"hi_IN": {"name_en": u"Hindi", "name": u"\u0939\u093f\u0928\u094d\u0926\u0940"},
"hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"},
"hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"},
"id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"},
"is_IS": {"name_en": u"Icelandic", "name": u"\xcdslenska"},
"it_IT": {"name_en": u"Italian", "name": u"Italiano"},
"ja_JP": {"name_en": u"Japanese", "name": u"\u65e5\u672c\u8a9e"},
"ko_KR": {"name_en": u"Korean", "name": u"\ud55c\uad6d\uc5b4"},
"lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvi\u0173"},
"lv_LV": {"name_en": u"Latvian", "name": u"Latvie\u0161u"},
"mk_MK": {"name_en": u"Macedonian", "name": u"\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u043a\u0438"},
"ml_IN": {"name_en": u"Malayalam", "name": u"\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02"},
"ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"},
"nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokm\xe5l)"},
"nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"},
"nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"},
"pa_IN": {"name_en": u"Punjabi", "name": u"\u0a2a\u0a70\u0a1c\u0a3e\u0a2c\u0a40"},
"pl_PL": {"name_en": u"Polish", "name": u"Polski"},
"pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Portugu\xeas (Brasil)"},
"pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Portugu\xeas (Portugal)"},
"ro_RO": {"name_en": u"Romanian", "name": u"Rom\xe2n\u0103"},
"ru_RU": {"name_en": u"Russian", "name": u"\u0420\u0443\u0441\u0441\u043a\u0438\u0439"},
"sk_SK": {"name_en": u"Slovak", "name": u"Sloven\u010dina"},
"sl_SI": {"name_en": u"Slovenian", "name": u"Sloven\u0161\u010dina"},
"sq_AL": {"name_en": u"Albanian", "name": u"Shqip"},
"sr_RS": {"name_en": u"Serbian", "name": u"\u0421\u0440\u043f\u0441\u043a\u0438"},
"sv_SE": {"name_en": u"Swedish", "name": u"Svenska"},
"sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"},
"ta_IN": {"name_en": u"Tamil", "name": u"\u0ba4\u0bae\u0bbf\u0bb4\u0bcd"},
"te_IN": {"name_en": u"Telugu", "name": u"\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41"},
"th_TH": {"name_en": u"Thai", "name": u"\u0e20\u0e32\u0e29\u0e32\u0e44\u0e17\u0e22"},
"tl_PH": {"name_en": u"Filipino", "name": u"Filipino"},
"tr_TR": {"name_en": u"Turkish", "name": u"T\xfcrk\xe7e"},
"uk_UA": {"name_en": u"Ukraini ", "name": u"\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430"},
"vi_VN": {"name_en": u"Vietnamese", "name": u"Ti\u1ebfng Vi\u1ec7t"},
"zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"\u4e2d\u6587(\u7b80\u4f53)"},
"zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"\u4e2d\u6587(\u7e41\u9ad4)"},
"af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")},
"am_ET": {"name_en": u("Amharic"), "name": u('\u12a0\u121b\u122d\u129b')},
"ar_AR": {"name_en": u("Arabic"), "name": u("\u0627\u0644\u0639\u0631\u0628\u064a\u0629")},
"bg_BG": {"name_en": u("Bulgarian"), "name": u("\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438")},
"bn_IN": {"name_en": u("Bengali"), "name": u("\u09ac\u09be\u0982\u09b2\u09be")},
"bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")},
"ca_ES": {"name_en": u("Catalan"), "name": u("Catal\xe0")},
"cs_CZ": {"name_en": u("Czech"), "name": u("\u010ce\u0161tina")},
"cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")},
"da_DK": {"name_en": u("Danish"), "name": u("Dansk")},
"de_DE": {"name_en": u("German"), "name": u("Deutsch")},
"el_GR": {"name_en": u("Greek"), "name": u("\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac")},
"en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")},
"en_US": {"name_en": u("English (US)"), "name": u("English (US)")},
"es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Espa\xf1ol (Espa\xf1a)")},
"es_LA": {"name_en": u("Spanish"), "name": u("Espa\xf1ol")},
"et_EE": {"name_en": u("Estonian"), "name": u("Eesti")},
"eu_ES": {"name_en": u("Basque"), "name": u("Euskara")},
"fa_IR": {"name_en": u("Persian"), "name": u("\u0641\u0627\u0631\u0633\u06cc")},
"fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")},
"fr_CA": {"name_en": u("French (Canada)"), "name": u("Fran\xe7ais (Canada)")},
"fr_FR": {"name_en": u("French"), "name": u("Fran\xe7ais")},
"ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")},
"gl_ES": {"name_en": u("Galician"), "name": u("Galego")},
"he_IL": {"name_en": u("Hebrew"), "name": u("\u05e2\u05d1\u05e8\u05d9\u05ea")},
"hi_IN": {"name_en": u("Hindi"), "name": u("\u0939\u093f\u0928\u094d\u0926\u0940")},
"hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")},
"hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")},
"id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")},
"is_IS": {"name_en": u("Icelandic"), "name": u("\xcdslenska")},
"it_IT": {"name_en": u("Italian"), "name": u("Italiano")},
"ja_JP": {"name_en": u("Japanese"), "name": u("\u65e5\u672c\u8a9e")},
"ko_KR": {"name_en": u("Korean"), "name": u("\ud55c\uad6d\uc5b4")},
"lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvi\u0173")},
"lv_LV": {"name_en": u("Latvian"), "name": u("Latvie\u0161u")},
"mk_MK": {"name_en": u("Macedonian"), "name": u("\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u043a\u0438")},
"ml_IN": {"name_en": u("Malayalam"), "name": u("\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02")},
"ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")},
"nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokm\xe5l)")},
"nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")},
"nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")},
"pa_IN": {"name_en": u("Punjabi"), "name": u("\u0a2a\u0a70\u0a1c\u0a3e\u0a2c\u0a40")},
"pl_PL": {"name_en": u("Polish"), "name": u("Polski")},
"pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Portugu\xeas (Brasil)")},
"pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Portugu\xeas (Portugal)")},
"ro_RO": {"name_en": u("Romanian"), "name": u("Rom\xe2n\u0103")},
"ru_RU": {"name_en": u("Russian"), "name": u("\u0420\u0443\u0441\u0441\u043a\u0438\u0439")},
"sk_SK": {"name_en": u("Slovak"), "name": u("Sloven\u010dina")},
"sl_SI": {"name_en": u("Slovenian"), "name": u("Sloven\u0161\u010dina")},
"sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")},
"sr_RS": {"name_en": u("Serbian"), "name": u("\u0421\u0440\u043f\u0441\u043a\u0438")},
"sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")},
"sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")},
"ta_IN": {"name_en": u("Tamil"), "name": u("\u0ba4\u0bae\u0bbf\u0bb4\u0bcd")},
"te_IN": {"name_en": u("Telugu"), "name": u("\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41")},
"th_TH": {"name_en": u("Thai"), "name": u("\u0e20\u0e32\u0e29\u0e32\u0e44\u0e17\u0e22")},
"tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")},
"tr_TR": {"name_en": u("Turkish"), "name": u("T\xfcrk\xe7e")},
"uk_UA": {"name_en": u("Ukraini "), "name": u("\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430")},
"vi_VN": {"name_en": u("Vietnamese"), "name": u("Ti\u1ebfng Vi\u1ec7t")},
"zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("\u4e2d\u6587(\u7b80\u4f53)")},
"zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("\u4e2d\u6587(\u7e41\u9ad4)")},
}

View File

@@ -28,13 +28,15 @@ These streams may be configured independently using the standard library's
`logging` module. For example, you may wish to send ``tornado.access`` logs
to a separate file for analysis.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import logging
import logging.handlers
import sys
import time
from tornado.escape import _unicode
from tornado.util import unicode_type, basestring_type
try:
import curses
@@ -46,6 +48,7 @@ access_log = logging.getLogger("tornado.access")
app_log = logging.getLogger("tornado.application")
gen_log = logging.getLogger("tornado.general")
def _stderr_supports_color():
color = False
if curses and sys.stderr.isatty():
@@ -85,25 +88,25 @@ class LogFormatter(logging.Formatter):
fg_color = (curses.tigetstr("setaf") or
curses.tigetstr("setf") or "")
if (3, 0) < sys.version_info < (3, 2, 3):
fg_color = unicode(fg_color, "ascii")
fg_color = unicode_type(fg_color, "ascii")
self._colors = {
logging.DEBUG: unicode(curses.tparm(fg_color, 4), # Blue
"ascii"),
logging.INFO: unicode(curses.tparm(fg_color, 2), # Green
"ascii"),
logging.WARNING: unicode(curses.tparm(fg_color, 3), # Yellow
"ascii"),
logging.ERROR: unicode(curses.tparm(fg_color, 1), # Red
"ascii"),
logging.DEBUG: unicode_type(curses.tparm(fg_color, 4), # Blue
"ascii"),
logging.INFO: unicode_type(curses.tparm(fg_color, 2), # Green
"ascii"),
logging.WARNING: unicode_type(curses.tparm(fg_color, 3), # Yellow
"ascii"),
logging.ERROR: unicode_type(curses.tparm(fg_color, 1), # Red
"ascii"),
}
self._normal = unicode(curses.tigetstr("sgr0"), "ascii")
self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii")
def format(self, record):
try:
record.message = record.getMessage()
except Exception, e:
except Exception as e:
record.message = "Bad message (%r): %r" % (e, record.__dict__)
assert isinstance(record.message, basestring) # guaranteed by logging
assert isinstance(record.message, basestring_type) # guaranteed by logging
record.asctime = time.strftime(
"%y%m%d %H:%M:%S", self.converter(record.created))
prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
@@ -128,20 +131,27 @@ class LogFormatter(logging.Formatter):
# it's worth it since the encoding errors that would otherwise
# result are so useless (and tornado is fond of using utf8-encoded
# byte strings whereever possible).
try:
message = _unicode(record.message)
except UnicodeDecodeError:
message = repr(record.message)
def safe_unicode(s):
try:
return _unicode(s)
except UnicodeDecodeError:
return repr(s)
formatted = prefix + " " + message
formatted = prefix + " " + safe_unicode(record.message)
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
formatted = formatted.rstrip() + "\n" + record.exc_text
# exc_text contains multiple lines. We need to safe_unicode
# each line separately so that non-utf8 bytes don't cause
# all the newlines to turn into '\n'.
lines = [formatted.rstrip()]
lines.extend(safe_unicode(ln) for ln in record.exc_text.split('\n'))
formatted = '\n'.join(lines)
return formatted.replace("\n", "\n ")
def enable_pretty_logging(options=None):
def enable_pretty_logging(options=None, logger=None):
"""Turns on formatted logging output as configured.
This is called automaticaly by `tornado.options.parse_command_line`
@@ -151,22 +161,23 @@ def enable_pretty_logging(options=None):
from tornado.options import options
if options.logging == 'none':
return
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, options.logging.upper()))
if logger is None:
logger = logging.getLogger()
logger.setLevel(getattr(logging, options.logging.upper()))
if options.log_file_prefix:
channel = logging.handlers.RotatingFileHandler(
filename=options.log_file_prefix,
maxBytes=options.log_file_max_size,
backupCount=options.log_file_num_backups)
channel.setFormatter(LogFormatter(color=False))
root_logger.addHandler(channel)
logger.addHandler(channel)
if (options.log_to_stderr or
(options.log_to_stderr is None and not root_logger.handlers)):
(options.log_to_stderr is None and not logger.handlers)):
# Set up color if we are in a tty and curses is installed
channel = logging.StreamHandler()
channel.setFormatter(LogFormatter())
root_logger.addHandler(channel)
logger.addHandler(channel)
def define_logging_options(options=None):
@@ -174,21 +185,21 @@ def define_logging_options(options=None):
# late import to prevent cycle
from tornado.options import options
options.define("logging", default="info",
help=("Set the Python log level. If 'none', tornado won't touch the "
"logging configuration."),
metavar="debug|info|warning|error|none")
help=("Set the Python log level. If 'none', tornado won't touch the "
"logging configuration."),
metavar="debug|info|warning|error|none")
options.define("log_to_stderr", type=bool, default=None,
help=("Send log output to stderr (colorized if possible). "
"By default use stderr if --log_file_prefix is not set and "
"no other logging is configured."))
help=("Send log output to stderr (colorized if possible). "
"By default use stderr if --log_file_prefix is not set and "
"no other logging is configured."))
options.define("log_file_prefix", type=str, default=None, metavar="PATH",
help=("Path prefix for log files. "
"Note that if you are running multiple tornado processes, "
"log_file_prefix must be different for each of them (e.g. "
"include the port number)"))
help=("Path prefix for log files. "
"Note that if you are running multiple tornado processes, "
"log_file_prefix must be different for each of them (e.g. "
"include the port number)"))
options.define("log_file_max_size", type=int, default=100 * 1000 * 1000,
help="max size of log files before rollover")
help="max size of log files before rollover")
options.define("log_file_num_backups", type=int, default=10,
help="number of log files to keep")
help="number of log files to keep")
options.add_parse_callback(enable_pretty_logging)

View File

@@ -16,226 +16,19 @@
"""Miscellaneous network utility code."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import re
import socket
import ssl
import stat
from tornado import process
from tornado.concurrent import dummy_executor, run_on_executor
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream, SSLIOStream
from tornado.log import app_log
from tornado.platform.auto import set_close_exec
try:
import ssl # Python 2.6+
except ImportError:
ssl = None
class TCPServer(object):
r"""A non-blocking, single-threaded TCP server.
To use `TCPServer`, define a subclass which overrides the `handle_stream`
method.
`TCPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
To make this server serve SSL traffic, send the ssl_options dictionary
argument with the arguments required for the `ssl.wrap_socket` method,
including "certfile" and "keyfile"::
TCPServer(ssl_options={
"certfile": os.path.join(data_dir, "mydomain.crt"),
"keyfile": os.path.join(data_dir, "mydomain.key"),
})
`TCPServer` initialization follows one of three patterns:
1. `listen`: simple single-process::
server = TCPServer()
server.listen(8888)
IOLoop.instance().start()
2. `bind`/`start`: simple multi-process::
server = TCPServer()
server.bind(8888)
server.start(0) # Forks multiple sub-processes
IOLoop.instance().start()
When using this interface, an `IOLoop` must *not* be passed
to the `TCPServer` constructor. `start` will always start
the server on the default singleton `IOLoop`.
3. `add_sockets`: advanced multi-process::
sockets = bind_sockets(8888)
tornado.process.fork_processes(0)
server = TCPServer()
server.add_sockets(sockets)
IOLoop.instance().start()
The `add_sockets` interface is more complicated, but it can be
used with `tornado.process.fork_processes` to give you more
flexibility in when the fork happens. `add_sockets` can
also be used in single-process servers if you want to create
your listening sockets in some way other than
`bind_sockets`.
"""
def __init__(self, io_loop=None, ssl_options=None):
self.io_loop = io_loop
self.ssl_options = ssl_options
self._sockets = {} # fd -> socket object
self._pending_sockets = []
self._started = False
# Verify the SSL options. Otherwise we don't get errors until clients
# connect. This doesn't verify that the keys are legitimate, but
# the SSL module doesn't do that until there is a connected socket
# which seems like too much work
if self.ssl_options is not None:
# Only certfile is required: it can contain both keys
if 'certfile' not in self.ssl_options:
raise KeyError('missing key "certfile" in ssl_options')
if not os.path.exists(self.ssl_options['certfile']):
raise ValueError('certfile "%s" does not exist' %
self.ssl_options['certfile'])
if ('keyfile' in self.ssl_options and
not os.path.exists(self.ssl_options['keyfile'])):
raise ValueError('keyfile "%s" does not exist' %
self.ssl_options['keyfile'])
def listen(self, port, address=""):
"""Starts accepting connections on the given port.
This method may be called more than once to listen on multiple ports.
`listen` takes effect immediately; it is not necessary to call
`TCPServer.start` afterwards. It is, however, necessary to start
the `IOLoop`.
"""
sockets = bind_sockets(port, address=address)
self.add_sockets(sockets)
def add_sockets(self, sockets):
"""Makes this server start accepting connections on the given sockets.
The ``sockets`` parameter is a list of socket objects such as
those returned by `bind_sockets`.
`add_sockets` is typically used in combination with that
method and `tornado.process.fork_processes` to provide greater
control over the initialization of a multi-process server.
"""
if self.io_loop is None:
self.io_loop = IOLoop.instance()
for sock in sockets:
self._sockets[sock.fileno()] = sock
add_accept_handler(sock, self._handle_connection,
io_loop=self.io_loop)
def add_socket(self, socket):
"""Singular version of `add_sockets`. Takes a single socket object."""
self.add_sockets([socket])
def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128):
"""Binds this server to the given port on the given address.
To start the server, call `start`. If you want to run this server
in a single process, you can call `listen` as a shortcut to the
sequence of `bind` and `start` calls.
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either ``socket.AF_INET``
or ``socket.AF_INET6`` to restrict to ipv4 or ipv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen`.
This method may be called multiple times prior to `start` to listen
on multiple ports or interfaces.
"""
sockets = bind_sockets(port, address=address, family=family,
backlog=backlog)
if self._started:
self.add_sockets(sockets)
else:
self._pending_sockets.extend(sockets)
def start(self, num_processes=1):
"""Starts this server in the IOLoop.
By default, we run the server in this process and do not fork any
additional child process.
If num_processes is ``None`` or <= 0, we detect the number of cores
available on this machine and fork that number of child
processes. If num_processes is given and > 1, we fork that
specific number of sub-processes.
Since we use processes and not threads, there is no shared memory
between any server code.
Note that multiple processes are not compatible with the autoreload
module (or the ``debug=True`` option to `tornado.web.Application`).
When using multiple processes, no IOLoops can be created or
referenced until after the call to ``TCPServer.start(n)``.
"""
assert not self._started
self._started = True
if num_processes != 1:
process.fork_processes(num_processes)
sockets = self._pending_sockets
self._pending_sockets = []
self.add_sockets(sockets)
def stop(self):
"""Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped.
"""
for fd, sock in self._sockets.iteritems():
self.io_loop.remove_handler(fd)
sock.close()
def handle_stream(self, stream, address):
"""Override to handle a new `IOStream` from an incoming connection."""
raise NotImplementedError()
def _handle_connection(self, connection, address):
if self.ssl_options is not None:
assert ssl, "Python 2.6+ and OpenSSL required for SSL"
try:
connection = ssl.wrap_socket(connection,
server_side=True,
do_handshake_on_connect=False,
**self.ssl_options)
except ssl.SSLError, err:
if err.args[0] == ssl.SSL_ERROR_EOF:
return connection.close()
else:
raise
except socket.error, err:
if err.args[0] == errno.ECONNABORTED:
return connection.close()
else:
raise
try:
if self.ssl_options is not None:
stream = SSLIOStream(connection, io_loop=self.io_loop)
else:
stream = IOStream(connection, io_loop=self.io_loop)
self.handle_stream(stream, address)
except Exception:
app_log.error("Error in connection callback", exc_info=True)
from tornado.util import Configurable
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
@@ -261,10 +54,17 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags
sockets = []
if address == "":
address = None
if not socket.has_ipv6 and family == socket.AF_UNSPEC:
# Python can be compiled with --disable-ipv6, which causes
# operations on AF_INET6 sockets to fail, but does not
# automatically exclude those results from getaddrinfo
# results.
# http://bugs.python.org/issue16208
family = socket.AF_INET
if flags is None:
flags = socket.AI_PASSIVE
for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM,
0, flags)):
0, flags)):
af, socktype, proto, canonname, sockaddr = res
sock = socket.socket(af, socktype, proto)
set_close_exec(sock.fileno())
@@ -288,7 +88,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags
return sockets
if hasattr(socket, 'AF_UNIX'):
def bind_unix_socket(file, mode=0600, backlog=128):
def bind_unix_socket(file, mode=0o600, backlog=128):
"""Creates a listening unix socket.
If a socket with the given name already exists, it will be deleted.
@@ -304,7 +104,7 @@ if hasattr(socket, 'AF_UNIX'):
sock.setblocking(0)
try:
st = os.stat(file)
except OSError, err:
except OSError as err:
if err.errno != errno.ENOENT:
raise
else:
@@ -334,7 +134,7 @@ def add_accept_handler(sock, callback, io_loop=None):
while True:
try:
connection, address = sock.accept()
except socket.error, e:
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return
raise
@@ -342,11 +142,186 @@ def add_accept_handler(sock, callback, io_loop=None):
io_loop.add_handler(sock.fileno(), accept_handler, IOLoop.READ)
class Resolver(object):
def __init__(self, io_loop=None, executor=None):
class Resolver(Configurable):
@classmethod
def configurable_base(cls):
return Resolver
@classmethod
def configurable_default(cls):
return BlockingResolver
def getaddrinfo(self, *args, **kwargs):
"""Resolves an address.
The arguments to this function are the same as to
`socket.getaddrinfo`, with the addition of an optional
keyword-only ``callback`` argument.
Returns a `Future` whose result is the same as the return
value of `socket.getaddrinfo`. If a callback is passed,
it will be run with the `Future` as an argument when it
is complete.
"""
raise NotImplementedError()
class ExecutorResolver(Resolver):
def initialize(self, io_loop=None, executor=None):
self.io_loop = io_loop or IOLoop.instance()
self.executor = executor or dummy_executor
@run_on_executor
def getaddrinfo(self, *args, **kwargs):
return socket.getaddrinfo(*args, **kwargs)
class BlockingResolver(ExecutorResolver):
def initialize(self, io_loop=None):
super(BlockingResolver, self).initialize(io_loop=io_loop)
class ThreadedResolver(ExecutorResolver):
def initialize(self, io_loop=None, num_threads=10):
from concurrent.futures import ThreadPoolExecutor
super(ThreadedResolver, self).initialize(
io_loop=io_loop, executor=ThreadPoolExecutor(num_threads))
class OverrideResolver(Resolver):
"""Wraps a resolver with a mapping of overrides.
This can be used to make local DNS changes (e.g. for testing)
without modifying system-wide settings.
The mapping can contain either host strings or host-port pairs.
"""
def initialize(self, resolver, mapping):
self.resolver = resolver
self.mapping = mapping
def getaddrinfo(self, host, port, *args, **kwargs):
if (host, port) in self.mapping:
host, port = self.mapping[(host, port)]
elif host in self.mapping:
host = self.mapping[host]
return self.resolver.getaddrinfo(host, port, *args, **kwargs)
# These are the keyword arguments to ssl.wrap_socket that must be translated
# to their SSLContext equivalents (the other arguments are still passed
# to SSLContext.wrap_socket).
_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile',
'cert_reqs', 'ca_certs', 'ciphers'])
def ssl_options_to_context(ssl_options):
"""Try to Convert an ssl_options dictionary to an SSLContext object.
The ``ssl_options`` dictionary contains keywords to be passed to
`ssl.wrap_sockets`. In Python 3.2+, `ssl.SSLContext` objects can
be used instead. This function converts the dict form to its
`SSLContext` equivalent, and may be used when a component which
accepts both forms needs to upgrade to the `SSLContext` version
to use features like SNI or NPN.
"""
if isinstance(ssl_options, dict):
assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options
if (not hasattr(ssl, 'SSLContext') or
isinstance(ssl_options, ssl.SSLContext)):
return ssl_options
context = ssl.SSLContext(
ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23))
if 'certfile' in ssl_options:
context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None))
if 'cert_reqs' in ssl_options:
context.verify_mode = ssl_options['cert_reqs']
if 'ca_certs' in ssl_options:
context.load_verify_locations(ssl_options['ca_certs'])
if 'ciphers' in ssl_options:
context.set_ciphers(ssl_options['ciphers'])
return context
def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs):
"""Returns an `ssl.SSLSocket` wrapping the given socket.
``ssl_options`` may be either a dictionary (as accepted by
`ssl_options_to_context) or an `ssl.SSLContext` object.
Additional keyword arguments are passed to `wrap_socket`
(either the `SSLContext` method or the `ssl` module function
as appropriate).
"""
context = ssl_options_to_context(ssl_options)
if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext):
if server_hostname is not None and getattr(ssl, 'HAS_SNI'):
# Python doesn't have server-side SNI support so we can't
# really unittest this, but it can be manually tested with
# python3.2 -m tornado.httpclient https://sni.velox.ch
return context.wrap_socket(socket, server_hostname=server_hostname,
**kwargs)
else:
return context.wrap_socket(socket, **kwargs)
else:
return ssl.wrap_socket(socket, **dict(context, **kwargs))
if hasattr(ssl, 'match_hostname'): # python 3.2+
ssl_match_hostname = ssl.match_hostname
SSLCertificateError = ssl.CertificateError
else:
# match_hostname was added to the standard library ssl module in python 3.2.
# The following code was backported for older releases and copied from
# https://bitbucket.org/brandon/backports.ssl_match_hostname
class SSLCertificateError(ValueError):
pass
def _dnsname_to_pat(dn):
pats = []
for frag in dn.split(r'.'):
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
else:
# Otherwise, '*' matches any dotless fragment.
frag = re.escape(frag)
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
def ssl_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
are mostly followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if not san:
# The subject is only checked when subjectAltName is empty
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise SSLCertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise SSLCertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise SSLCertificateError("no appropriate commonName or "
"subjectAltName fields were found")

View File

@@ -57,7 +57,7 @@ simply call methods on it. You may create additional `OptionParser`
instances to define isolated sets of options, such as for subcommands.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import re
@@ -68,6 +68,7 @@ import textwrap
from tornado.escape import _unicode
from tornado.log import define_logging_options
from tornado import stack_context
from tornado.util import basestring_type
class Error(Exception):
@@ -171,7 +172,7 @@ class OptionParser(object):
if args is None:
args = sys.argv
remaining = []
for i in xrange(1, len(args)):
for i in range(1, len(args)):
# All things after the last option are command line arguments
if not args[i].startswith("-"):
remaining = args[i:]
@@ -218,15 +219,15 @@ class OptionParser(object):
"""Prints all the command line options to stderr (or another file)."""
if file is None:
file = sys.stderr
print >> file, "Usage: %s [OPTIONS]" % sys.argv[0]
print >> file, "\nOptions:\n"
print("Usage: %s [OPTIONS]" % sys.argv[0], file=file)
print("\nOptions:\n", file=file)
by_group = {}
for option in self._options.itervalues():
for option in self._options.values():
by_group.setdefault(option.group_name, []).append(option)
for filename, o in sorted(by_group.items()):
if filename:
print >> file, "\n%s options:\n" % os.path.normpath(filename)
print("\n%s options:\n" % os.path.normpath(filename), file=file)
o.sort(key=lambda option: option.name)
for option in o:
prefix = option.name
@@ -238,10 +239,10 @@ class OptionParser(object):
lines = textwrap.wrap(description, 79 - 35)
if len(prefix) > 30 or len(lines) == 0:
lines.insert(0, '')
print >> file, " --%-30s %s" % (prefix, lines[0])
print(" --%-30s %s" % (prefix, lines[0]), file=file)
for line in lines[1:]:
print >> file, "%-34s %s" % (' ', line)
print >> file
print("%-34s %s" % (' ', line), file=file)
print(file=file)
def _help_callback(self, value):
if value:
@@ -272,6 +273,7 @@ class OptionParser(object):
"""
return _Mockable(self)
class _Mockable(object):
"""`mock.patch` compatible wrapper for `OptionParser`.
@@ -300,8 +302,9 @@ class _Mockable(object):
def __delattr__(self, name):
setattr(self._options, name, self._originals.pop(name))
class _Option(object):
def __init__(self, name, default=None, type=basestring, help=None,
def __init__(self, name, default=None, type=basestring_type, help=None,
metavar=None, multiple=False, file_name=None, group_name=None,
callback=None):
if default is None and multiple:
@@ -325,7 +328,7 @@ class _Option(object):
datetime.datetime: self._parse_datetime,
datetime.timedelta: self._parse_timedelta,
bool: self._parse_bool,
basestring: self._parse_string,
basestring_type: self._parse_string,
}.get(self.type, self.type)
if self.multiple:
self._value = []
@@ -467,6 +470,7 @@ def print_help(file=None):
"""
return options.print_help(file)
def add_parse_callback(callback):
"""Adds a parse callback, to be invoked when option parsing is done.

View File

@@ -23,7 +23,7 @@ Most code that needs access to this functionality should do e.g.::
from tornado.platform.auto import set_close_exec
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import os

View File

@@ -1,11 +1,10 @@
"""Lowest-common-denominator implementations of platform functionality."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import errno
import socket
from tornado.platform import interface
from tornado.util import b
class Waker(interface.Waker):
@@ -43,9 +42,9 @@ class Waker(interface.Waker):
try:
self.writer.connect(connect_address)
break # success
except socket.error, detail:
except socket.error as detail:
if (not hasattr(errno, 'WSAEADDRINUSE') or
detail[0] != errno.WSAEADDRINUSE):
detail[0] != errno.WSAEADDRINUSE):
# "Address already in use" is the only error
# I've seen on two WinXP Pro SP2 boxes, under
# Pythons 2.3.5 and 2.4.1.
@@ -74,7 +73,7 @@ class Waker(interface.Waker):
def wake(self):
try:
self.writer.send(b("x"))
self.writer.send(b"x")
except (IOError, socket.error):
pass

View File

@@ -13,56 +13,14 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""EPoll-based IOLoop implementation for Linux systems.
"""EPoll-based IOLoop implementation for Linux systems."""
from __future__ import absolute_import, division, print_function, with_statement
Supports the standard library's `select.epoll` function for Python 2.6+,
and our own C module for Python 2.5.
"""
from __future__ import absolute_import, division, with_statement
import os
import select
from tornado.ioloop import PollIOLoop
if hasattr(select, 'epoll'):
# Python 2.6+
class EPollIOLoop(PollIOLoop):
def initialize(self, **kwargs):
super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs)
else:
# Python 2.5
from tornado import epoll
class _EPoll(object):
"""An epoll-based event loop using our C module for Python 2.5 systems"""
_EPOLL_CTL_ADD = 1
_EPOLL_CTL_DEL = 2
_EPOLL_CTL_MOD = 3
def __init__(self):
self._epoll_fd = epoll.epoll_create()
def fileno(self):
return self._epoll_fd
def close(self):
os.close(self._epoll_fd)
def register(self, fd, events):
epoll.epoll_ctl(self._epoll_fd, self._EPOLL_CTL_ADD, fd, events)
def modify(self, fd, events):
epoll.epoll_ctl(self._epoll_fd, self._EPOLL_CTL_MOD, fd, events)
def unregister(self, fd):
epoll.epoll_ctl(self._epoll_fd, self._EPOLL_CTL_DEL, fd, 0)
def poll(self, timeout):
return epoll.epoll_wait(self._epoll_fd, int(timeout * 1000))
class EPollIOLoop(PollIOLoop):
def initialize(self, **kwargs):
super(EPollIOLoop, self).initialize(impl=_EPoll(), **kwargs)
class EPollIOLoop(PollIOLoop):
def initialize(self, **kwargs):
super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs)

View File

@@ -21,7 +21,7 @@ for other tornado.platform modules. Most code should import the appropriate
implementation from `tornado.platform.auto`.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
def set_close_exec(fd):

View File

@@ -14,7 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
"""KQueue-based IOLoop implementation for BSD/Mac systems."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import select
@@ -22,6 +22,7 @@ from tornado.ioloop import IOLoop, PollIOLoop
assert hasattr(select, 'kqueue'), 'kqueue not supported'
class _KQueue(object):
"""A kqueue-based event loop for BSD/Mac systems."""
def __init__(self):
@@ -52,11 +53,11 @@ class _KQueue(object):
kevents = []
if events & IOLoop.WRITE:
kevents.append(select.kevent(
fd, filter=select.KQ_FILTER_WRITE, flags=flags))
fd, filter=select.KQ_FILTER_WRITE, flags=flags))
if events & IOLoop.READ or not kevents:
# Always read when there is not a write
kevents.append(select.kevent(
fd, filter=select.KQ_FILTER_READ, flags=flags))
fd, filter=select.KQ_FILTER_READ, flags=flags))
# Even though control() takes a list, it seems to return EINVAL
# on Mac OS X (10.6) when there is more than one event in the list.
for kevent in kevents:

View File

@@ -16,13 +16,12 @@
"""Posix implementations of platform-specific functionality."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import fcntl
import os
from tornado.platform import interface
from tornado.util import b
def set_close_exec(fd):
@@ -53,7 +52,7 @@ class Waker(interface.Waker):
def wake(self):
try:
self.writer.write(b("x"))
self.writer.write(b"x")
except IOError:
pass

View File

@@ -17,12 +17,13 @@
Used as a fallback for systems that don't support epoll or kqueue.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import select
from tornado.ioloop import IOLoop, PollIOLoop
class _Select(object):
"""A simple, select()-based IOLoop implementation for non-Linux systems"""
def __init__(self):
@@ -69,7 +70,7 @@ class _Select(object):
events[fd] = events.get(fd, 0) | IOLoop.ERROR
return events.items()
class SelectIOLoop(PollIOLoop):
def initialize(self, **kwargs):
super(SelectIOLoop, self).initialize(impl=_Select(), **kwargs)

View File

@@ -64,11 +64,10 @@ reactor. Recommended usage::
This module has been tested with Twisted versions 11.0.0 and newer.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import functools
import datetime
import time
from twisted.internet.posixbase import PosixReactorBase
from twisted.internet.interfaces import \
@@ -85,6 +84,7 @@ from tornado.stack_context import NullContext, wrap
from tornado.ioloop import IOLoop
@implementer(IDelayedCall)
class TornadoDelayedCall(object):
"""DelayedCall object for Tornado."""
def __init__(self, reactor, seconds, f, *args, **kw):
@@ -125,10 +125,9 @@ class TornadoDelayedCall(object):
def active(self):
return self._active
# Fake class decorator for python 2.5 compatibility
TornadoDelayedCall = implementer(IDelayedCall)(TornadoDelayedCall)
@implementer(IReactorTime, IReactorFDSet)
class TornadoReactor(PosixReactorBase):
"""Twisted reactor built on the Tornado IOLoop.
@@ -235,7 +234,7 @@ class TornadoReactor(PosixReactorBase):
with NullContext():
self._fds[fd] = (reader, None)
self._io_loop.add_handler(fd, self._invoke_callback,
IOLoop.READ)
IOLoop.READ)
def addWriter(self, writer):
"""Add a FileDescriptor for notification of data available to write."""
@@ -254,7 +253,7 @@ class TornadoReactor(PosixReactorBase):
with NullContext():
self._fds[fd] = (None, writer)
self._io_loop.add_handler(fd, self._invoke_callback,
IOLoop.WRITE)
IOLoop.WRITE)
def removeReader(self, reader):
"""Remove a Selectable for notification of data available to read."""
@@ -316,7 +315,6 @@ class TornadoReactor(PosixReactorBase):
def mainLoop(self):
self._io_loop.start()
TornadoReactor = implementer(IReactorTime, IReactorFDSet)(TornadoReactor)
class _TestReactor(TornadoReactor):
@@ -352,6 +350,8 @@ def install(io_loop=None):
installReactor(reactor)
return reactor
@implementer(IReadDescriptor, IWriteDescriptor)
class _FD(object):
def __init__(self, fd, handler):
self.fd = fd
@@ -378,7 +378,7 @@ class _FD(object):
def logPrefix(self):
return ''
_FD = implementer(IReadDescriptor, IWriteDescriptor)(_FD)
class TwistedIOLoop(tornado.ioloop.IOLoop):
"""IOLoop implementation that runs on Twisted.
@@ -405,15 +405,15 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
if fd in self.fds:
raise ValueError('fd %d added twice' % fd)
self.fds[fd] = _FD(fd, wrap(handler))
if events | tornado.ioloop.IOLoop.READ:
if events & tornado.ioloop.IOLoop.READ:
self.fds[fd].reading = True
self.reactor.addReader(self.fds[fd])
if events | tornado.ioloop.IOLoop.WRITE:
if events & tornado.ioloop.IOLoop.WRITE:
self.fds[fd].writing = True
self.reactor.addWriter(self.fds[fd])
def update_handler(self, fd, events):
if events | tornado.ioloop.IOLoop.READ:
if events & tornado.ioloop.IOLoop.READ:
if not self.fds[fd].reading:
self.fds[fd].reading = True
self.reactor.addReader(self.fds[fd])
@@ -421,7 +421,7 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
if self.fds[fd].reading:
self.fds[fd].reading = False
self.reactor.removeReader(self.fds[fd])
if events | tornado.ioloop.IOLoop.WRITE:
if events & tornado.ioloop.IOLoop.WRITE:
if not self.fds[fd].writing:
self.fds[fd].writing = True
self.reactor.addWriter(self.fds[fd])

View File

@@ -2,7 +2,7 @@
# for production use.
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import ctypes
import ctypes.wintypes

View File

@@ -16,10 +16,10 @@
"""Utilities for working with multiple processes."""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import errno
import functools
import multiprocessing
import os
import signal
import subprocess
@@ -34,18 +34,17 @@ from tornado.log import gen_log
from tornado import stack_context
try:
import multiprocessing # Python 2.6+
except ImportError:
multiprocessing = None
long # py2
except NameError:
long = int # py3
def cpu_count():
"""Returns the number of processors on this machine."""
if multiprocessing is not None:
try:
return multiprocessing.cpu_count()
except NotImplementedError:
pass
try:
return multiprocessing.cpu_count()
except NotImplementedError:
pass
try:
return os.sysconf("SC_NPROCESSORS_CONF")
except ValueError:
@@ -125,7 +124,7 @@ def fork_processes(num_processes, max_restarts=100):
while children:
try:
pid, status = os.wait()
except OSError, e:
except OSError as e:
if e.errno == errno.EINTR:
continue
raise
@@ -162,6 +161,7 @@ def task_id():
global _task_id
return _task_id
class Subprocess(object):
"""Wraps ``subprocess.Popen`` with IOStream support.
@@ -195,7 +195,7 @@ class Subprocess(object):
err_r, err_w = os.pipe()
kwargs['stderr'] = err_w
to_close.append(err_w)
self.stdout = PipeIOStream(err_r, io_loop=self.io_loop)
self.stderr = PipeIOStream(err_r, io_loop=self.io_loop)
self.proc = subprocess.Popen(*args, **kwargs)
for fd in to_close:
os.close(fd)
@@ -253,14 +253,14 @@ class Subprocess(object):
@classmethod
def _cleanup(cls):
for pid in cls._waiting.keys():
for pid in list(cls._waiting.keys()): # make a copy
cls._try_cleanup_process(pid)
@classmethod
def _try_cleanup_process(cls, pid):
try:
ret_pid, status = os.waitpid(pid, os.WNOHANG)
except OSError, e:
except OSError as e:
if e.args[0] == errno.ECHILD:
return
if ret_pid == 0:

View File

@@ -1,14 +1,14 @@
#!/usr/bin/env python
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
from tornado.escape import utf8, _unicode, native_str
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy
from tornado.httputil import HTTPHeaders
from tornado.iostream import IOStream, SSLIOStream
from tornado.netutil import Resolver
from tornado.netutil import Resolver, OverrideResolver
from tornado.log import gen_log
from tornado import stack_context
from tornado.util import b, GzipDecompressor
from tornado.util import GzipDecompressor
import base64
import collections
@@ -17,9 +17,8 @@ import functools
import os.path
import re
import socket
import ssl
import sys
import time
import urlparse
try:
from io import BytesIO # python 3
@@ -27,9 +26,9 @@ except ImportError:
from cStringIO import StringIO as BytesIO # python 2
try:
import ssl # python 2.6+
import urlparse # py2
except ImportError:
ssl = None
import urllib.parse as urlparse # py3
_DEFAULT_CA_CERTS = os.path.dirname(__file__) + '/ca-certificates.crt'
@@ -45,12 +44,8 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
supported. In particular, proxies are not supported, connections
are not reused, and callers cannot select the network interface to be
used.
Python 2.6 or higher is required for HTTPS support. Users of Python 2.5
should use the curl-based AsyncHTTPClient if HTTPS support is required.
"""
def initialize(self, io_loop=None, max_clients=10,
def initialize(self, io_loop, max_clients=10,
hostname_mapping=None, max_buffer_size=104857600,
resolver=None, defaults=None):
"""Creates a AsyncHTTPClient.
@@ -72,32 +67,24 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
max_buffer_size is the number of bytes that can be read by IOStream. It
defaults to 100mb.
"""
self.io_loop = io_loop
super(SimpleAsyncHTTPClient, self).initialize(io_loop,
defaults=defaults)
self.max_clients = max_clients
self.queue = collections.deque()
self.active = {}
self.hostname_mapping = hostname_mapping
self.max_buffer_size = max_buffer_size
self.resolver = resolver or Resolver(io_loop=io_loop)
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
if hostname_mapping is not None:
self.resolver = OverrideResolver(resolver=self.resolver,
mapping=hostname_mapping)
def fetch(self, request, callback, **kwargs):
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
# We're going to modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
request.headers = HTTPHeaders(request.headers)
request = _RequestProxy(request, self.defaults)
callback = stack_context.wrap(callback)
def fetch_impl(self, request, callback):
self.queue.append((request, callback))
self._process_queue()
if self.queue:
gen_log.debug("max_clients limit reached, request queued. "
"%d active, %d queued requests." % (
len(self.active), len(self.queue)))
len(self.active), len(self.queue)))
def _process_queue(self):
with stack_context.NullContext():
@@ -108,7 +95,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
_HTTPConnection(self.io_loop, self, request,
functools.partial(self._release_fetch, key),
callback,
self.max_buffer_size)
self.max_buffer_size, self.resolver)
def _release_fetch(self, key):
del self.active[key]
@@ -119,7 +106,7 @@ class _HTTPConnection(object):
_SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"])
def __init__(self, io_loop, client, request, release_callback,
final_callback, max_buffer_size):
final_callback, max_buffer_size, resolver):
self.start_time = io_loop.time()
self.io_loop = io_loop
self.client = client
@@ -127,6 +114,7 @@ class _HTTPConnection(object):
self.release_callback = release_callback
self.final_callback = final_callback
self.max_buffer_size = max_buffer_size
self.resolver = resolver
self.code = None
self.headers = None
self.chunks = None
@@ -135,9 +123,6 @@ class _HTTPConnection(object):
self._timeout = None
with stack_context.ExceptionStackContext(self._handle_exception):
self.parsed = urlparse.urlsplit(_unicode(self.request.url))
if ssl is None and self.parsed.scheme == "https":
raise ValueError("HTTPS requires either python2.6+ or "
"curl_httpclient")
if self.parsed.scheme not in ("http", "https"):
raise ValueError("Unsupported url scheme: %s" %
self.request.url)
@@ -157,8 +142,6 @@ class _HTTPConnection(object):
# raw ipv6 addresses in urls are enclosed in brackets
host = host[1:-1]
self.parsed_hostname = host # save final host for _on_connect
if self.client.hostname_mapping is not None:
host = self.client.hostname_mapping.get(host, host)
if request.allow_ipv6:
af = socket.AF_UNSPEC
@@ -167,7 +150,7 @@ class _HTTPConnection(object):
# so restrict to ipv4 by default.
af = socket.AF_INET
self.client.resolver.getaddrinfo(
self.resolver.getaddrinfo(
host, port, af, socket.SOCK_STREAM, 0, 0,
callback=self._on_resolve)
@@ -220,31 +203,29 @@ class _HTTPConnection(object):
self.start_time + timeout,
stack_context.wrap(self._on_timeout))
self.stream.set_close_callback(self._on_close)
self.stream.connect(sockaddr, self._on_connect)
# ipv6 addresses are broken (in self.parsed.hostname) until
# 2.7, here is correctly parsed value calculated in __init__
self.stream.connect(sockaddr, self._on_connect,
server_hostname=self.parsed_hostname)
def _on_timeout(self):
self._timeout = None
if self.final_callback is not None:
raise HTTPError(599, "Timeout")
def _on_connect(self):
def _remove_timeout(self):
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
def _on_connect(self):
self._remove_timeout()
if self.request.request_timeout:
self._timeout = self.io_loop.add_timeout(
self.start_time + self.request.request_timeout,
stack_context.wrap(self._on_timeout))
if (self.request.validate_cert and
isinstance(self.stream, SSLIOStream)):
match_hostname(self.stream.socket.getpeercert(),
# ipv6 addresses are broken (in
# self.parsed.hostname) until 2.7, here is
# correctly parsed value calculated in
# __init__
self.parsed_hostname)
if (self.request.method not in self._SUPPORTED_METHODS and
not self.request.allow_nonstandard_methods):
not self.request.allow_nonstandard_methods):
raise KeyError("unknown method %s" % self.request.method)
for key in ('network_interface',
'proxy_host', 'proxy_port',
@@ -265,8 +246,8 @@ class _HTTPConnection(object):
username = self.request.auth_username
password = self.request.auth_password or ''
if username is not None:
auth = utf8(username) + b(":") + utf8(password)
self.request.headers["Authorization"] = (b("Basic ") +
auth = utf8(username) + b":" + utf8(password)
self.request.headers["Authorization"] = (b"Basic " +
base64.b64encode(auth))
if self.request.user_agent:
self.request.headers["User-Agent"] = self.request.user_agent
@@ -277,25 +258,25 @@ class _HTTPConnection(object):
assert self.request.body is None
if self.request.body is not None:
self.request.headers["Content-Length"] = str(len(
self.request.body))
self.request.body))
if (self.request.method == "POST" and
"Content-Type" not in self.request.headers):
"Content-Type" not in self.request.headers):
self.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
if self.request.use_gzip:
self.request.headers["Accept-Encoding"] = "gzip"
req_path = ((self.parsed.path or '/') +
(('?' + self.parsed.query) if self.parsed.query else ''))
(('?' + self.parsed.query) if self.parsed.query else ''))
request_lines = [utf8("%s %s HTTP/1.1" % (self.request.method,
req_path))]
for k, v in self.request.headers.get_all():
line = utf8(k) + b(": ") + utf8(v)
if b('\n') in line:
line = utf8(k) + b": " + utf8(v)
if b'\n' in line:
raise ValueError('Newline in header: ' + repr(line))
request_lines.append(line)
self.stream.write(b("\r\n").join(request_lines) + b("\r\n\r\n"))
self.stream.write(b"\r\n".join(request_lines) + b"\r\n\r\n")
if self.request.body is not None:
self.stream.write(self.request.body)
self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers)
self.stream.read_until_regex(b"\r?\n\r?\n", self._on_headers)
def _release(self):
if self.release_callback is not None:
@@ -312,10 +293,11 @@ class _HTTPConnection(object):
def _handle_exception(self, typ, value, tb):
if self.final_callback:
self._remove_timeout()
gen_log.warning("uncaught exception", exc_info=(typ, value, tb))
self._run_callback(HTTPResponse(self.request, 599, error=value,
request_time=self.io_loop.time() - self.start_time,
))
request_time=self.io_loop.time() - self.start_time,
))
if hasattr(self, "stream"):
self.stream.close()
@@ -334,19 +316,22 @@ class _HTTPConnection(object):
message = str(self.stream.error)
raise HTTPError(599, message)
def _handle_1xx(self, code):
self.stream.read_until_regex(b"\r?\n\r?\n", self._on_headers)
def _on_headers(self, data):
data = native_str(data.decode("latin1"))
first_line, _, header_data = data.partition("\n")
match = re.match("HTTP/1.[01] ([0-9]+) ([^\r]*)", first_line)
assert match
code = int(match.group(1))
self.headers = HTTPHeaders.parse(header_data)
if 100 <= code < 200:
self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers)
self._handle_1xx(code)
return
else:
self.code = code
self.reason = match.group(2)
self.headers = HTTPHeaders.parse(header_data)
if "Content-Length" in self.headers:
if "," in self.headers["Content-Length"]:
@@ -372,38 +357,36 @@ class _HTTPConnection(object):
if self.request.method == "HEAD" or self.code == 304:
# HEAD requests and 304 responses never have content, even
# though they may have content-length headers
self._on_body(b(""))
self._on_body(b"")
return
if 100 <= self.code < 200 or self.code == 204:
# These response codes never have bodies
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
if ("Transfer-Encoding" in self.headers or
content_length not in (None, 0)):
content_length not in (None, 0)):
raise ValueError("Response with code %d should not have body" %
self.code)
self._on_body(b(""))
self._on_body(b"")
return
if (self.request.use_gzip and
self.headers.get("Content-Encoding") == "gzip"):
self.headers.get("Content-Encoding") == "gzip"):
self._decompressor = GzipDecompressor()
if self.headers.get("Transfer-Encoding") == "chunked":
self.chunks = []
self.stream.read_until(b("\r\n"), self._on_chunk_length)
self.stream.read_until(b"\r\n", self._on_chunk_length)
elif content_length is not None:
self.stream.read_bytes(content_length, self._on_body)
else:
self.stream.read_until_close(self._on_body)
def _on_body(self, data):
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self._remove_timeout()
original_request = getattr(self.request, "original_request",
self.request)
if (self.request.follow_redirects and
self.request.max_redirects > 0 and
self.code in (301, 302, 303, 307)):
self.code in (301, 302, 303, 307)):
assert isinstance(self.request, _RequestProxy)
new_request = copy.copy(self.request.request)
new_request.url = urlparse.urljoin(self.request.url,
@@ -472,13 +455,13 @@ class _HTTPConnection(object):
# all the data has been decompressed, so we don't need to
# decompress again in _on_body
self._decompressor = None
self._on_body(b('').join(self.chunks))
self._on_body(b''.join(self.chunks))
else:
self.stream.read_bytes(length + 2, # chunk ends with \r\n
self._on_chunk_data)
self._on_chunk_data)
def _on_chunk_data(self, data):
assert data[-2:] == b("\r\n")
assert data[-2:] == b"\r\n"
chunk = data[:-2]
if self._decompressor:
chunk = self._decompressor.decompress(chunk)
@@ -486,69 +469,9 @@ class _HTTPConnection(object):
self.request.streaming_callback(chunk)
else:
self.chunks.append(chunk)
self.stream.read_until(b("\r\n"), self._on_chunk_length)
self.stream.read_until(b"\r\n", self._on_chunk_length)
# match_hostname was added to the standard library ssl module in python 3.2.
# The following code was backported for older releases and copied from
# https://bitbucket.org/brandon/backports.ssl_match_hostname
class CertificateError(ValueError):
pass
def _dnsname_to_pat(dn):
pats = []
for frag in dn.split(r'.'):
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
else:
# Otherwise, '*' matches any dotless fragment.
frag = re.escape(frag)
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
are mostly followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if not san:
# The subject is only checked when subjectAltName is empty
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
if __name__ == "__main__":
AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
main()

View File

@@ -14,7 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
'''StackContext allows applications to maintain threadlocal-like state
"""StackContext allows applications to maintain threadlocal-like state
that follows execution as it moves to other execution contexts.
The motivating examples are to eliminate the need for explicit
@@ -64,9 +64,9 @@ Here are a few rules of thumb for when it's necessary:
persist across asynchronous calls, create a new `StackContext` (or
`ExceptionStackContext`), and make your asynchronous calls in a ``with``
block that references your `StackContext`.
'''
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import contextlib
import functools
@@ -77,6 +77,10 @@ import threading
from tornado.util import raise_exc_info
class StackContextInconsistentError(Exception):
pass
class _State(threading.local):
def __init__(self):
self.contexts = ()
@@ -84,7 +88,7 @@ _state = _State()
class StackContext(object):
'''Establishes the given context as a StackContext that will be transferred.
"""Establishes the given context as a StackContext that will be transferred.
Note that the parameter is a callable that returns a context
manager, not the context itself. That is, where for a
@@ -102,7 +106,7 @@ class StackContext(object):
deactivating a context does not affect any instances of that
context that are currently pending). This is an advanced feature
and not necessary in most applications.
'''
"""
def __init__(self, context_factory, _active_cell=None):
self.context_factory = context_factory
self.active_cell = _active_cell or [True]
@@ -113,8 +117,10 @@ class StackContext(object):
def __enter__(self):
self.old_contexts = _state.contexts
# _state.contexts is a tuple of (class, arg, active_cell) tuples
_state.contexts = (self.old_contexts +
((StackContext, self.context_factory, self.active_cell),))
self.new_contexts = (self.old_contexts +
((StackContext, self.context_factory,
self.active_cell),))
_state.contexts = self.new_contexts
try:
self.context = self.context_factory()
self.context.__enter__()
@@ -127,11 +133,23 @@ class StackContext(object):
try:
return self.context.__exit__(type, value, traceback)
finally:
final_contexts = _state.contexts
_state.contexts = self.old_contexts
# Generator coroutines and with-statements with non-local
# effects interact badly. Check here for signs of
# the stack getting out of sync.
# Note that this check comes after restoring _state.context
# so that if it fails things are left in a (relatively)
# consistent state.
if final_contexts is not self.new_contexts:
raise StackContextInconsistentError(
'stack_context inconsistency (may be caused by yield '
'within a "with StackContext" block)')
self.old_contexts = self.new_contexts = None
class ExceptionStackContext(object):
'''Specialization of StackContext for exception handling.
"""Specialization of StackContext for exception handling.
The supplied exception_handler function will be called in the
event of an uncaught exception in this context. The semantics are
@@ -142,16 +160,17 @@ class ExceptionStackContext(object):
If the exception handler returns true, the exception will be
consumed and will not be propagated to other exception handlers.
'''
"""
def __init__(self, exception_handler, _active_cell=None):
self.exception_handler = exception_handler
self.active_cell = _active_cell or [True]
def __enter__(self):
self.old_contexts = _state.contexts
_state.contexts = (self.old_contexts +
((ExceptionStackContext, self.exception_handler,
self.active_cell),))
self.new_contexts = (self.old_contexts +
((ExceptionStackContext, self.exception_handler,
self.active_cell),))
_state.contexts = self.new_contexts
return lambda: operator.setitem(self.active_cell, 0, False)
def __exit__(self, type, value, traceback):
@@ -159,17 +178,22 @@ class ExceptionStackContext(object):
if type is not None:
return self.exception_handler(type, value, traceback)
finally:
final_contexts = _state.contexts
_state.contexts = self.old_contexts
self.old_contexts = None
if final_contexts is not self.new_contexts:
raise StackContextInconsistentError(
'stack_context inconsistency (may be caused by yield '
'within a "with StackContext" block)')
self.old_contexts = self.new_contexts = None
class NullContext(object):
'''Resets the StackContext.
"""Resets the StackContext.
Useful when creating a shared resource on demand (e.g. an AsyncHTTPClient)
where the stack that caused the creating is not relevant to future
operations.
'''
"""
def __enter__(self):
self.old_contexts = _state.contexts
_state.contexts = ()
@@ -183,13 +207,13 @@ class _StackContextWrapper(functools.partial):
def wrap(fn):
'''Returns a callable object that will restore the current StackContext
"""Returns a callable object that will restore the current StackContext
when executed.
Use this whenever saving a callback to be executed later in a
different execution context (either in a different thread or
asynchronously in the same thread).
'''
"""
if fn is None or fn.__class__ is _StackContextWrapper:
return fn
# functools.wraps doesn't appear to work on functools.partial objects

230
libs/tornado/tcpserver.py Executable file
View File

@@ -0,0 +1,230 @@
#!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A non-blocking, single-threaded TCP server."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import socket
import ssl
from tornado.log import app_log
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream, SSLIOStream
from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket
from tornado import process
class TCPServer(object):
r"""A non-blocking, single-threaded TCP server.
To use `TCPServer`, define a subclass which overrides the `handle_stream`
method.
`TCPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
To make this server serve SSL traffic, send the ssl_options dictionary
argument with the arguments required for the `ssl.wrap_socket` method,
including "certfile" and "keyfile"::
TCPServer(ssl_options={
"certfile": os.path.join(data_dir, "mydomain.crt"),
"keyfile": os.path.join(data_dir, "mydomain.key"),
})
`TCPServer` initialization follows one of three patterns:
1. `listen`: simple single-process::
server = TCPServer()
server.listen(8888)
IOLoop.instance().start()
2. `bind`/`start`: simple multi-process::
server = TCPServer()
server.bind(8888)
server.start(0) # Forks multiple sub-processes
IOLoop.instance().start()
When using this interface, an `IOLoop` must *not* be passed
to the `TCPServer` constructor. `start` will always start
the server on the default singleton `IOLoop`.
3. `add_sockets`: advanced multi-process::
sockets = bind_sockets(8888)
tornado.process.fork_processes(0)
server = TCPServer()
server.add_sockets(sockets)
IOLoop.instance().start()
The `add_sockets` interface is more complicated, but it can be
used with `tornado.process.fork_processes` to give you more
flexibility in when the fork happens. `add_sockets` can
also be used in single-process servers if you want to create
your listening sockets in some way other than
`bind_sockets`.
"""
def __init__(self, io_loop=None, ssl_options=None):
self.io_loop = io_loop
self.ssl_options = ssl_options
self._sockets = {} # fd -> socket object
self._pending_sockets = []
self._started = False
# Verify the SSL options. Otherwise we don't get errors until clients
# connect. This doesn't verify that the keys are legitimate, but
# the SSL module doesn't do that until there is a connected socket
# which seems like too much work
if self.ssl_options is not None and isinstance(self.ssl_options, dict):
# Only certfile is required: it can contain both keys
if 'certfile' not in self.ssl_options:
raise KeyError('missing key "certfile" in ssl_options')
if not os.path.exists(self.ssl_options['certfile']):
raise ValueError('certfile "%s" does not exist' %
self.ssl_options['certfile'])
if ('keyfile' in self.ssl_options and
not os.path.exists(self.ssl_options['keyfile'])):
raise ValueError('keyfile "%s" does not exist' %
self.ssl_options['keyfile'])
def listen(self, port, address=""):
"""Starts accepting connections on the given port.
This method may be called more than once to listen on multiple ports.
`listen` takes effect immediately; it is not necessary to call
`TCPServer.start` afterwards. It is, however, necessary to start
the `IOLoop`.
"""
sockets = bind_sockets(port, address=address)
self.add_sockets(sockets)
def add_sockets(self, sockets):
"""Makes this server start accepting connections on the given sockets.
The ``sockets`` parameter is a list of socket objects such as
those returned by `bind_sockets`.
`add_sockets` is typically used in combination with that
method and `tornado.process.fork_processes` to provide greater
control over the initialization of a multi-process server.
"""
if self.io_loop is None:
self.io_loop = IOLoop.instance()
for sock in sockets:
self._sockets[sock.fileno()] = sock
add_accept_handler(sock, self._handle_connection,
io_loop=self.io_loop)
def add_socket(self, socket):
"""Singular version of `add_sockets`. Takes a single socket object."""
self.add_sockets([socket])
def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128):
"""Binds this server to the given port on the given address.
To start the server, call `start`. If you want to run this server
in a single process, you can call `listen` as a shortcut to the
sequence of `bind` and `start` calls.
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either ``socket.AF_INET``
or ``socket.AF_INET6`` to restrict to ipv4 or ipv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen`.
This method may be called multiple times prior to `start` to listen
on multiple ports or interfaces.
"""
sockets = bind_sockets(port, address=address, family=family,
backlog=backlog)
if self._started:
self.add_sockets(sockets)
else:
self._pending_sockets.extend(sockets)
def start(self, num_processes=1):
"""Starts this server in the IOLoop.
By default, we run the server in this process and do not fork any
additional child process.
If num_processes is ``None`` or <= 0, we detect the number of cores
available on this machine and fork that number of child
processes. If num_processes is given and > 1, we fork that
specific number of sub-processes.
Since we use processes and not threads, there is no shared memory
between any server code.
Note that multiple processes are not compatible with the autoreload
module (or the ``debug=True`` option to `tornado.web.Application`).
When using multiple processes, no IOLoops can be created or
referenced until after the call to ``TCPServer.start(n)``.
"""
assert not self._started
self._started = True
if num_processes != 1:
process.fork_processes(num_processes)
sockets = self._pending_sockets
self._pending_sockets = []
self.add_sockets(sockets)
def stop(self):
"""Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped.
"""
for fd, sock in self._sockets.items():
self.io_loop.remove_handler(fd)
sock.close()
def handle_stream(self, stream, address):
"""Override to handle a new `IOStream` from an incoming connection."""
raise NotImplementedError()
def _handle_connection(self, connection, address):
if self.ssl_options is not None:
assert ssl, "Python 2.6+ and OpenSSL required for SSL"
try:
connection = ssl_wrap_socket(connection,
self.ssl_options,
server_side=True,
do_handshake_on_connect=False)
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_EOF:
return connection.close()
else:
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return connection.close()
else:
raise
try:
if self.ssl_options is not None:
stream = SSLIOStream(connection, io_loop=self.io_loop)
else:
stream = IOStream(connection, io_loop=self.io_loop)
self.handle_stream(stream, address)
except Exception:
app_log.error("Error in connection callback", exc_info=True)

View File

@@ -179,9 +179,8 @@ with ``{# ... #}``.
``{% continue %}`` may be used inside the loop.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
import cStringIO
import datetime
import linecache
import os.path
@@ -191,7 +190,12 @@ import threading
from tornado import escape
from tornado.log import app_log
from tornado.util import bytes_type, ObjectDict
from tornado.util import bytes_type, ObjectDict, exec_in, unicode_type
try:
from cStringIO import StringIO # py2
except ImportError:
from io import StringIO # py3
_DEFAULT_AUTOESCAPE = "xhtml_escape"
_UNSET = object()
@@ -226,10 +230,12 @@ class Template(object):
try:
# Under python2.5, the fake filename used here must match
# the module name used in __name__ below.
# The dont_inherit flag prevents template.py's future imports
# from being applied to the generated code.
self.compiled = compile(
escape.to_unicode(self.code),
"%s.generated.py" % self.name.replace('.', '_'),
"exec")
"exec", dont_inherit=True)
except Exception:
formatted_code = _format_code(self.code).rstrip()
app_log.error("%s code:\n%s", self.name, formatted_code)
@@ -246,7 +252,7 @@ class Template(object):
"linkify": escape.linkify,
"datetime": datetime,
"_utf8": escape.utf8, # for internal use
"_string_types": (unicode, bytes_type),
"_string_types": (unicode_type, bytes_type),
# __name__ and __loader__ allow the traceback mechanism to find
# the generated source code.
"__name__": self.name.replace('.', '_'),
@@ -254,7 +260,7 @@ class Template(object):
}
namespace.update(self.namespace)
namespace.update(kwargs)
exec self.compiled in namespace
exec_in(self.compiled, namespace)
execute = namespace["_execute"]
# Clear the traceback module's cache of source data now that
# we've generated a new template (mainly for this module's
@@ -263,7 +269,7 @@ class Template(object):
return execute()
def _generate_python(self, loader, compress_whitespace):
buffer = cStringIO.StringIO()
buffer = StringIO()
try:
# named_blocks maps from names to _NamedBlock objects
named_blocks = {}
@@ -346,8 +352,8 @@ class Loader(BaseLoader):
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
not parent_path.startswith("/") and \
not name.startswith("/"):
current_path = os.path.join(self.root, parent_path)
file_dir = os.path.dirname(os.path.abspath(current_path))
relative_path = os.path.abspath(os.path.join(file_dir, name))
@@ -371,8 +377,8 @@ class DictLoader(BaseLoader):
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
not parent_path.startswith("/") and \
not name.startswith("/"):
file_dir = posixpath.dirname(parent_path)
name = posixpath.normpath(posixpath.join(file_dir, name))
return name
@@ -620,7 +626,7 @@ class _CodeWriter(object):
ancestors = ["%s:%d" % (tmpl.name, lineno)
for (tmpl, lineno) in self.include_stack]
line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
print >> self.file, " " * indent + line + line_comment
print(" " * indent + line + line_comment, file=self.file)
class _TemplateReader(object):
@@ -708,7 +714,7 @@ def _parse(reader, template, in_block=None, in_loop=None):
# innermost ones. This is useful when generating languages
# like latex where curlies are also meaningful
if (curly + 2 < reader.remaining() and
reader[curly + 1] == '{' and reader[curly + 2] == '{'):
reader[curly + 1] == '{' and reader[curly + 2] == '{'):
curly += 1
continue
break
@@ -775,7 +781,7 @@ def _parse(reader, template, in_block=None, in_loop=None):
if allowed_parents is not None:
if not in_block:
raise ParseError("%s outside %s block" %
(operator, allowed_parents))
(operator, allowed_parents))
if in_block not in allowed_parents:
raise ParseError("%s block cannot be attached to %s block" % (operator, in_block))
body.chunks.append(_IntermediateControlBlock(contents, line))

View File

@@ -18,10 +18,10 @@ inheritance. See the docstrings for each class/function below for more
information.
"""
from __future__ import absolute_import, division, with_statement
from __future__ import absolute_import, division, print_function, with_statement
from cStringIO import StringIO
try:
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
from tornado.simple_httpclient import SimpleAsyncHTTPClient
@@ -31,20 +31,27 @@ except ImportError:
# These modules are not importable on app engine. Parts of this module
# won't work, but e.g. LogTrapTestCase and main() will.
AsyncHTTPClient = None
gen = None
HTTPServer = None
IOLoop = None
netutil = None
SimpleAsyncHTTPClient = None
from tornado.log import gen_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import raise_exc_info
from tornado.util import raise_exc_info, basestring_type
import functools
import logging
import os
import re
import signal
import socket
import sys
import time
import types
try:
from io import StringIO # py3
except ImportError:
from cStringIO import StringIO # py2
# Tornado's own test suite requires the updated unittest module
# (either py27+ or unittest2) so tornado.test.util enforces
@@ -151,7 +158,7 @@ class AsyncTestCase(unittest.TestCase):
def tearDown(self):
self.io_loop.clear_current()
if (not IOLoop.initialized() or
self.io_loop is not IOLoop.instance()):
self.io_loop is not IOLoop.instance()):
# Try to clean up any file descriptors left open in the ioloop.
# This avoids leaks, especially when tests are run repeatedly
# in the same process with autoreload (because curl does not
@@ -160,10 +167,10 @@ class AsyncTestCase(unittest.TestCase):
super(AsyncTestCase, self).tearDown()
def get_new_ioloop(self):
'''Creates a new IOLoop for this test. May be overridden in
"""Creates a new IOLoop for this test. May be overridden in
subclasses for tests that require a specific IOLoop (usually
the singleton).
'''
"""
return IOLoop()
def _handle_exception(self, typ, value, tb):
@@ -185,12 +192,12 @@ class AsyncTestCase(unittest.TestCase):
self.__rethrow()
def stop(self, _arg=None, **kwargs):
'''Stops the ioloop, causing one pending (or future) call to wait()
"""Stops the ioloop, causing one pending (or future) call to wait()
to return.
Keyword arguments or a single positional argument passed to stop() are
saved and will be returned by wait().
'''
"""
assert _arg is None or not kwargs
self.__stop_args = kwargs or _arg
if self.__running:
@@ -211,20 +218,21 @@ class AsyncTestCase(unittest.TestCase):
def timeout_func():
try:
raise self.failureException(
'Async operation timed out after %s seconds' %
timeout)
'Async operation timed out after %s seconds' %
timeout)
except Exception:
self.__failure = sys.exc_info()
self.stop()
if self.__timeout is not None:
self.io_loop.remove_timeout(self.__timeout)
self.__timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, timeout_func)
while True:
self.__running = True
self.io_loop.start()
if (self.__failure is not None or
condition is None or condition()):
condition is None or condition()):
break
if self.__timeout is not None:
self.io_loop.remove_timeout(self.__timeout)
self.__timeout = None
assert self.__stopped
self.__stopped = False
self.__rethrow()
@@ -234,7 +242,7 @@ class AsyncTestCase(unittest.TestCase):
class AsyncHTTPTestCase(AsyncTestCase):
'''A test case that starts up an HTTP server.
"""A test case that starts up an HTTP server.
Subclasses must override get_app(), which returns the
tornado.web.Application (or other HTTPServer callback) to be tested.
@@ -255,7 +263,7 @@ class AsyncHTTPTestCase(AsyncTestCase):
self.http_client.fetch(self.get_url('/'), self.stop)
response = self.wait()
# test contents of response
'''
"""
def setUp(self):
super(AsyncHTTPTestCase, self).setUp()
sock, port = bind_unused_port()
@@ -314,7 +322,7 @@ class AsyncHTTPTestCase(AsyncTestCase):
def tearDown(self):
self.http_server.stop()
if (not IOLoop.initialized() or
self.http_client.io_loop is not IOLoop.instance()):
self.http_client.io_loop is not IOLoop.instance()):
self.http_client.close()
super(AsyncHTTPTestCase, self).tearDown()
@@ -342,13 +350,43 @@ class AsyncHTTPSTestCase(AsyncHTTPTestCase):
# openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
module_dir = os.path.dirname(__file__)
return dict(
certfile=os.path.join(module_dir, 'test', 'test.crt'),
keyfile=os.path.join(module_dir, 'test', 'test.key'))
certfile=os.path.join(module_dir, 'test', 'test.crt'),
keyfile=os.path.join(module_dir, 'test', 'test.key'))
def get_protocol(self):
return 'https'
def gen_test(f):
"""Testing equivalent of ``@gen.engine``, to be applied to test methods.
``@gen.engine`` cannot be used on tests because the `IOLoop` is not
already running. ``@gen_test`` should be applied to test methods
on subclasses of `AsyncTestCase`.
Note that unlike most uses of ``@gen.engine``, ``@gen_test`` can
detect automatically when the function finishes cleanly so there
is no need to run a callback to signal completion.
Example::
class MyTest(AsyncHTTPTestCase):
@gen_test
def test_something(self):
response = yield gen.Task(self.fetch('/'))
"""
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
result = f(self, *args, **kwargs)
if result is None:
return
assert isinstance(result, types.GeneratorType)
runner = gen.Runner(result, self.stop)
runner.run()
self.wait()
return wrapper
class LogTrapTestCase(unittest.TestCase):
"""A test case that captures and discards all logging output
if the test passes.
@@ -361,21 +399,21 @@ class LogTrapTestCase(unittest.TestCase):
This class assumes that only one log handler is configured and that
it is a StreamHandler. This is true for both logging.basicConfig
and the "pretty logging" configured by tornado.options.
and the "pretty logging" configured by tornado.options. It is not
compatible with other log buffering mechanisms, such as those provided
by some test runners.
"""
def run(self, result=None):
logger = logging.getLogger()
if len(logger.handlers) > 1:
# Multiple handlers have been defined. It gets messy to handle
# this, especially since the handlers may have different
# formatters. Just leave the logging alone in this case.
super(LogTrapTestCase, self).run(result)
return
if not logger.handlers:
logging.basicConfig()
self.assertEqual(len(logger.handlers), 1)
handler = logger.handlers[0]
assert isinstance(handler, logging.StreamHandler)
if (len(logger.handlers) > 1 or
not isinstance(handler, logging.StreamHandler)):
# Logging has been configured in a way we don't recognize,
# so just leave it alone.
super(LogTrapTestCase, self).run(result)
return
old_stream = handler.stream
try:
handler.stream = StringIO()
@@ -410,7 +448,7 @@ class ExpectLog(logging.Filter):
:param required: If true, an exeption will be raised if the end of
the ``with`` statement is reached without matching any log entries.
"""
if isinstance(logger, basestring):
if isinstance(logger, basestring_type):
logger = logging.getLogger(logger)
self.logger = logger
self.regex = re.compile(regex)
@@ -496,7 +534,7 @@ def main(**kwargs):
kwargs['buffer'] = True
if __name__ == '__main__' and len(argv) == 1:
print >> sys.stderr, "No tests specified"
print("No tests specified", file=sys.stderr)
sys.exit(1)
try:
# In order to be able to run tests by their fully-qualified name
@@ -509,7 +547,7 @@ def main(**kwargs):
unittest.main(module=None, argv=argv, **kwargs)
else:
unittest.main(defaultTest="all", argv=argv, **kwargs)
except SystemExit, e:
except SystemExit as e:
if e.code == 0:
gen_log.info('PASS')
else:

Some files were not shown because too many files have changed in this diff Show More