diff --git a/CouchPotato.py b/CouchPotato.py index 364d77a4..7049cda5 100755 --- a/CouchPotato.py +++ b/CouchPotato.py @@ -19,7 +19,12 @@ base_path = dirname(os.path.abspath(__file__)) sys.path.insert(0, os.path.join(base_path, 'libs')) from couchpotato.environment import Env -from couchpotato.core.helpers.variable import getDataDir +from couchpotato.core.helpers.variable import getDataDir, removePyc + + +# Remove pyc files before dynamic load (sees .pyc files regular .py modules) +removePyc(base_path) + class Loader(object): @@ -50,7 +55,7 @@ class Loader(object): # Create logging dir self.log_dir = os.path.join(self.data_dir, 'logs'); if not os.path.isdir(self.log_dir): - os.mkdir(self.log_dir) + os.makedirs(self.log_dir) # Logging from couchpotato.core.logger import CPLog @@ -67,10 +72,11 @@ class Loader(object): signal.signal(signal.SIGTERM, lambda signum, stack_frame: sys.exit(1)) from couchpotato.core.event import addEvent - addEvent('app.after_shutdown', self.afterShutdown) + addEvent('app.do_shutdown', self.setRestart) - def afterShutdown(self, restart): + def setRestart(self, restart): self.do_restart = restart + return True def onExit(self, signal, frame): from couchpotato.core.event import fireEvent @@ -98,7 +104,6 @@ class Loader(object): # Release log files and shutdown logger logging.shutdown() - time.sleep(3) args = [sys.executable] + [os.path.join(base_path, os.path.basename(__file__))] + sys.argv[1:] subprocess.Popen(args) diff --git a/README.md b/README.md index 10d3b773..4dbe75bb 100644 --- a/README.md +++ b/README.md @@ -17,9 +17,9 @@ Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for * Open up `Git Bash` (or CMD) and go to the folder you want to install CP. Something like Program Files. * Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`. * You can now start CP via `CouchPotatoServer\CouchPotato.py` to start -* Your browser should open up, but if it doesn't go to: `http://localhost:5050/` +* Your browser should open up, but if it doesn't go to `http://localhost:5050/` -OSx: +OS X: * If you're on Leopard (10.5) install Python 2.6+: [Python 2.6.5](http://www.python.org/download/releases/2.6.5/) * Install [GIT](http://git-scm.com/) @@ -27,19 +27,20 @@ OSx: * Go to your App folder `cd /Applications` * Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git` * Then do `python CouchPotatoServer/CouchPotato.py` -* Your browser should open up, but if it doesn't go to: `http://localhost:5050/` +* Your browser should open up, but if it doesn't go to `http://localhost:5050/` -Linux (ubuntu / debian): +Linux (Ubuntu / Debian): * Install [GIT](http://git-scm.com/) with `apt-get install git-core` * 'cd' to the folder of your choosing. * Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git` * Then do `python CouchPotatoServer/CouchPotato.py` to start -* To run on boot copy the init script. `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato` -* Change the paths inside the init script. `sudo nano /etc/init.d/couchpotato` -* Make it executable. `sudo chmod +x /etc/init.d/couchpotato` -* Add it to defaults. `sudo update-rc.d couchpotato defaults` -* Open your browser and go to: `http://localhost:5050/` +* To run on boot copy the init script `sudo cp CouchPotatoServer/init/ubuntu /etc/init.d/couchpotato` +* Copy the default paths file `sudo cp CouchPotatoServer/init/ubuntu.default /etc/default/couchpotato` +* Change the paths inside the default file `sudo nano /etc/default/couchpotato` +* Make it executable `sudo chmod +x /etc/init.d/couchpotato` +* Add it to defaults `sudo update-rc.d couchpotato defaults` +* Open your browser and go to `http://localhost:5050/` FreeBSD : diff --git a/contributing.md b/contributing.md index 0d82d71c..821212c8 100644 --- a/contributing.md +++ b/contributing.md @@ -22,11 +22,11 @@ Before you submit an issue, please go through the following checklist: * What providers are you using? (While your logs include these, scanning through hundreds of lines of logs isn't our hobby) * Post the logs from the *config* directory, please do not copy paste the UI. Use pastebin to store these logs! * Give a short step by step of how to reproduce the error. - * What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed then when you use CP on OSX or Windows. + * What hardware / OS are you using and what are its limitations? For example: NAS can be slow and maybe have a different version of python installed than when you use CP on OS X or Windows. * Your issue might be marked with the "can't reproduce" tag. Don't ask why your issue was closed if it says so in the tag. - * If you're running on a NAS (QNAP, Austor etc..) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else!! - -The more relevant information you can provide, the more likely it is the issue will be resolved rather than closed. + * If you're running on a NAS (QNAP, Austor, Synology etc.) with pre-made packages, make sure these are set up to use our source repository (RuudBurger/CouchPotatoServer) and nothing else! + +The more relevant information you provide, the more likely that your issue will be resolved. ## Pull Requests Pull requests are intended for contributing code or documentation to the project. Before you submit a pull request, consider the following: diff --git a/couchpotato/__init__.py b/couchpotato/__init__.py index 092bd406..daa93bcc 100644 --- a/couchpotato/__init__.py +++ b/couchpotato/__init__.py @@ -1,3 +1,7 @@ +import os +import time +import traceback + from couchpotato.api import api_docs, api_docs_missing, api from couchpotato.core.event import fireEvent from couchpotato.core.helpers.variable import md5, tryInt @@ -5,9 +9,6 @@ from couchpotato.core.logger import CPLog from couchpotato.environment import Env from tornado import template from tornado.web import RequestHandler, authenticated -import os -import time -import traceback log = CPLog(__name__) @@ -45,7 +46,7 @@ class WebHandler(BaseHandler): self.write({'success': False, 'error': 'Failed returning results'}) -def addView(route, func, static = False): +def addView(route, func): views[route] = func diff --git a/couchpotato/api.py b/couchpotato/api.py index 5fdee986..b21cfeb0 100644 --- a/couchpotato/api.py +++ b/couchpotato/api.py @@ -7,9 +7,7 @@ import urllib from couchpotato.core.helpers.request import getParams from couchpotato.core.logger import CPLog -from tornado.gen import coroutine from tornado.web import RequestHandler, asynchronous -import tornado log = CPLog(__name__) @@ -28,10 +26,18 @@ def run_async(func): def async_func(*args, **kwargs): func_hl = Thread(target = func, args = args, kwargs = kwargs) func_hl.start() - return func_hl return async_func +@run_async +def run_handler(route, kwargs, callback = None): + try: + res = api[route](**kwargs) + callback(res, route) + except: + log.error('Failed doing api request "%s": %s', (route, traceback.format_exc())) + callback({'success': False, 'error': 'Failed returning results'}, route) + # NonBlock API handler class NonBlockHandler(RequestHandler): @@ -78,13 +84,18 @@ def addNonBlockApiView(route, func_tuple, docs = None, **kwargs): # Blocking API handler class ApiHandler(RequestHandler): - @coroutine + @asynchronous def get(self, route, *args, **kwargs): route = route.strip('/') if not api.get(route): self.write('API call doesn\'t seem to exist') + self.finish() return + # Create lock if it doesn't exist + if route in api_locks and not api_locks.get(route): + api_locks[route] = threading.Lock() + api_locks[route].acquire() try: @@ -102,36 +113,43 @@ class ApiHandler(RequestHandler): except: pass # Add async callback handler - @run_async - def run_handler(callback): - try: - res = api[route](**kwargs) - callback(res) - except: - log.error('Failed doing api request "%s": %s', (route, traceback.format_exc())) - callback({'success': False, 'error': 'Failed returning results'}) - - result = yield tornado.gen.Task(run_handler) - - # Check JSONP callback - jsonp_callback = self.get_argument('callback_func', default = None) - - if jsonp_callback: - self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')') - self.set_header("Content-Type", "text/javascript") - elif isinstance(result, tuple) and result[0] == 'redirect': - self.redirect(result[1]) - else: - self.write(result) + run_handler(route, kwargs, callback = self.taskFinished) except: log.error('Failed doing api request "%s": %s', (route, traceback.format_exc())) - self.write({'success': False, 'error': 'Failed returning results'}) + try: + self.write({'success': False, 'error': 'Failed returning results'}) + self.finish() + except: + log.error('Failed write error "%s": %s', (route, traceback.format_exc())) - api_locks[route].release() + api_locks[route].release() post = get + def taskFinished(self, result, route): + + if not self.request.connection.stream.closed(): + try: + # Check JSONP callback + jsonp_callback = self.get_argument('callback_func', default = None) + + if jsonp_callback: + self.write(str(jsonp_callback) + '(' + json.dumps(result) + ')') + self.set_header("Content-Type", "text/javascript") + self.finish() + elif isinstance(result, tuple) and result[0] == 'redirect': + self.redirect(result[1]) + else: + self.write(result) + self.finish() + except: + log.debug('Failed doing request, probably already closed: %s', (traceback.format_exc())) + try: self.finish({'success': False, 'error': 'Failed returning results'}) + except: pass + + api_locks[route].release() + def addApiView(route, func, static = False, docs = None, **kwargs): diff --git a/couchpotato/core/_base/_core.py b/couchpotato/core/_base/_core.py index ca45ceb0..852c42c2 100644 --- a/couchpotato/core/_base/_core.py +++ b/couchpotato/core/_base/_core.py @@ -8,7 +8,7 @@ import webbrowser from couchpotato.api import addApiView from couchpotato.core.event import fireEvent, addEvent -from couchpotato.core.helpers.variable import cleanHost, md5 +from couchpotato.core.helpers.variable import cleanHost, md5, isSubFolder from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.environment import Env @@ -71,13 +71,14 @@ class Core(Plugin): return value if value and len(value) > 3 else uuid4().hex def checkDataDir(self): - if Env.get('app_dir') in Env.get('data_dir'): + if isSubFolder(Env.get('data_dir'), Env.get('app_dir')): log.error('You should NOT use your CouchPotato directory to save your settings in. Files will get overwritten or be deleted.') return True def cleanUpFolders(self): - self.deleteEmptyFolder(Env.get('app_dir'), show_error = False) + only_clean = ['couchpotato', 'libs', 'init'] + self.deleteEmptyFolder(Env.get('app_dir'), show_error = False, only_clean = only_clean) def available(self, **kwargs): return { @@ -117,7 +118,7 @@ class Core(Plugin): self.shutdown_started = True - fireEvent('app.do_shutdown') + fireEvent('app.do_shutdown', restart = restart) log.debug('Every plugin got shutdown event') loop = True @@ -142,9 +143,11 @@ class Core(Plugin): log.debug('Safe to shutdown/restart') + loop = IOLoop.current() + try: - if not IOLoop.current()._closing: - IOLoop.current().stop() + if not loop._closing: + loop.stop() except RuntimeError: pass except: diff --git a/couchpotato/core/_base/downloader/main.py b/couchpotato/core/_base/downloader/main.py index 80890e4a..7ef98af2 100644 --- a/couchpotato/core/_base/downloader/main.py +++ b/couchpotato/core/_base/downloader/main.py @@ -25,6 +25,7 @@ class DownloaderBase(Provider): status_support = True torrent_sources = [ + 'https://zoink.it/torrent/%s.torrent', 'http://torrage.com/torrent/%s.torrent', 'https://torcache.net/torrent/%s.torrent', ] @@ -72,6 +73,9 @@ class DownloaderBase(Provider): return return self.download(data = data, media = media, filedata = filedata) + def download(self, *args, **kwargs): + return False + def _getAllDownloadStatus(self, download_ids): if self.isDisabled(manual = True, data = {}): return diff --git a/couchpotato/core/_base/downloader/static/downloaders.js b/couchpotato/core/_base/downloader/static/downloaders.js index ec85ce3d..45215158 100644 --- a/couchpotato/core/_base/downloader/static/downloaders.js +++ b/couchpotato/core/_base/downloader/static/downloaders.js @@ -40,15 +40,16 @@ var DownloadersBase = new Class({ button.set('text', button_name); + var message; if(json.success){ - var message = new Element('span.success', { + message = new Element('span.success', { 'text': 'Connection successful' }).inject(button, 'after') } else { var msg_text = 'Connection failed. Check logs for details.'; if(json.hasOwnProperty('msg')) msg_text = json.msg; - var message = new Element('span.failed', { + message = new Element('span.failed', { 'text': msg_text }).inject(button, 'after') } diff --git a/couchpotato/core/_base/scheduler.py b/couchpotato/core/_base/scheduler.py index 16a3a4f2..271a2d81 100644 --- a/couchpotato/core/_base/scheduler.py +++ b/couchpotato/core/_base/scheduler.py @@ -33,9 +33,9 @@ class Scheduler(Plugin): except: pass - def doShutdown(self): + def doShutdown(self, *args, **kwargs): self.stop() - return super(Scheduler, self).doShutdown() + return super(Scheduler, self).doShutdown(*args, **kwargs) def stop(self): if self.started: diff --git a/couchpotato/core/_base/updater/main.py b/couchpotato/core/_base/updater/main.py index 9ebecce2..7730d3bd 100644 --- a/couchpotato/core/_base/updater/main.py +++ b/couchpotato/core/_base/updater/main.py @@ -10,13 +10,13 @@ from threading import RLock from couchpotato.api import addApiView from couchpotato.core.event import addEvent, fireEvent, fireEventAsync -from couchpotato.core.helpers.encoding import ss, sp +from couchpotato.core.helpers.encoding import sp +from couchpotato.core.helpers.variable import removePyc from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.environment import Env from dateutil.parser import parse from git.repository import LocalRepository -from scandir import scandir import version from six.moves import filter @@ -142,9 +142,11 @@ class Updater(Plugin): 'success': success } - def doShutdown(self): - self.updater.deletePyc(show_logs = False) - return super(Updater, self).doShutdown() + def doShutdown(self, *args, **kwargs): + if not Env.get('dev') and not Env.get('desktop'): + removePyc(Env.get('app_dir'), show_logs = False) + + return super(Updater, self).doShutdown(*args, **kwargs) class BaseUpdater(Plugin): @@ -180,30 +182,6 @@ class BaseUpdater(Plugin): def check(self): pass - def deletePyc(self, only_excess = True, show_logs = True): - - for root, dirs, files in scandir.walk(Env.get('app_dir')): - - pyc_files = filter(lambda filename: filename.endswith('.pyc'), files) - py_files = set(filter(lambda filename: filename.endswith('.py'), files)) - excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files - - for excess_pyc_file in excess_pyc_files: - full_path = os.path.join(root, excess_pyc_file) - if show_logs: log.debug('Removing old PYC file: %s', full_path) - try: - os.remove(full_path) - except: - log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc())) - - for dir_name in dirs: - full_path = os.path.join(root, dir_name) - if len(os.listdir(full_path)) == 0: - try: - os.rmdir(full_path) - except: - log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc())) - class GitUpdater(BaseUpdater): @@ -327,13 +305,13 @@ class SourceUpdater(BaseUpdater): data_dir = Env.get('data_dir') # Get list of files we want to overwrite - self.deletePyc() + removePyc(app_dir) existing_files = [] - for root, subfiles, filenames in scandir.walk(app_dir): + for root, subfiles, filenames in os.walk(app_dir): for filename in filenames: existing_files.append(os.path.join(root, filename)) - for root, subfiles, filenames in scandir.walk(path): + for root, subfiles, filenames in os.walk(path): for filename in filenames: fromfile = os.path.join(root, filename) tofile = os.path.join(app_dir, fromfile.replace(path + os.path.sep, '')) diff --git a/couchpotato/core/database.py b/couchpotato/core/database.py index f9607137..c7051f5c 100644 --- a/couchpotato/core/database.py +++ b/couchpotato/core/database.py @@ -3,10 +3,11 @@ import os import time import traceback +from CodernityDB.index import IndexException, IndexNotFoundException, IndexConflict from couchpotato import CPLog from couchpotato.api import addApiView from couchpotato.core.event import addEvent, fireEvent -from couchpotato.core.helpers.encoding import toUnicode +from couchpotato.core.helpers.encoding import toUnicode, sp from couchpotato.core.helpers.variable import getImdb, tryInt @@ -15,18 +16,22 @@ log = CPLog(__name__) class Database(object): - indexes = [] + indexes = None db = None def __init__(self): + self.indexes = {} + addApiView('database.list_documents', self.listDocuments) addApiView('database.reindex', self.reindex) addApiView('database.compact', self.compact) addApiView('database.document.update', self.updateDocument) addApiView('database.document.delete', self.deleteDocument) + addEvent('database.setup.after', self.startup_compact) addEvent('database.setup_index', self.setupIndex) + addEvent('app.migrate', self.migrate) addEvent('app.after_shutdown', self.close) @@ -43,26 +48,45 @@ class Database(object): def setupIndex(self, index_name, klass): - self.indexes.append(index_name) + self.indexes[index_name] = klass db = self.getDB() # Category index index_instance = klass(db.path, index_name) try: - db.add_index(index_instance) - db.reindex_index(index_name) - except: - previous = db.indexes_names[index_name] - previous_version = previous._version - current_version = klass._version - # Only edit index if versions are different - if previous_version < current_version: - log.debug('Index "%s" already exists, updating and reindexing', index_name) - db.destroy_index(previous) + # Make sure store and bucket don't exist + exists = [] + for x in ['buck', 'stor']: + full_path = os.path.join(db.path, '%s_%s' % (index_name, x)) + if os.path.exists(full_path): + exists.append(full_path) + + if index_name not in db.indexes_names: + + # Remove existing buckets if index isn't there + for x in exists: + os.unlink(x) + + # Add index (will restore buckets) db.add_index(index_instance) db.reindex_index(index_name) + else: + # Previous info + previous = db.indexes_names[index_name] + previous_version = previous._version + current_version = klass._version + + # Only edit index if versions are different + if previous_version < current_version: + log.debug('Index "%s" already exists, updating and reindexing', index_name) + db.destroy_index(previous) + db.add_index(index_instance) + db.reindex_index(index_name) + + except: + log.error('Failed adding index %s: %s', (index_name, traceback.format_exc())) def deleteDocument(self, **kwargs): @@ -136,20 +160,80 @@ class Database(object): 'success': success } - def compact(self, **kwargs): + def compact(self, try_repair = True, **kwargs): + + success = False + db = self.getDB() + + # Removing left over compact files + db_path = sp(db.path) + for f in os.listdir(sp(db.path)): + for x in ['_compact_buck', '_compact_stor']: + if f[-len(x):] == x: + os.unlink(os.path.join(db_path, f)) - success = True try: - db = self.getDB() + start = time.time() + size = float(db.get_db_details().get('size', 0)) + log.debug('Compacting database, current size: %sMB', round(size/1048576, 2)) + db.compact() + new_size = float(db.get_db_details().get('size', 0)) + log.debug('Done compacting database in %ss, new size: %sMB, saved: %sMB', (round(time.time()-start, 2), round(new_size/1048576, 2), round((size-new_size)/1048576, 2))) + success = True + except (IndexException, AttributeError): + if try_repair: + log.error('Something wrong with indexes, trying repair') + + # Remove all indexes + old_indexes = self.indexes.keys() + for index_name in old_indexes: + try: + db.destroy_index(index_name) + except IndexNotFoundException: + pass + except: + log.error('Failed removing old index %s', index_name) + + # Add them again + for index_name in self.indexes: + klass = self.indexes[index_name] + + # Category index + index_instance = klass(db.path, index_name) + try: + db.add_index(index_instance) + db.reindex_index(index_name) + except IndexConflict: + pass + except: + log.error('Failed adding index %s', index_name) + raise + + self.compact(try_repair = False) + else: + log.error('Failed compact: %s', traceback.format_exc()) + except: log.error('Failed compact: %s', traceback.format_exc()) - success = False return { 'success': success } + # Compact on start + def startup_compact(self): + from couchpotato import Env + + db = self.getDB() + size = db.get_db_details().get('size') + prop_name = 'last_db_compact' + last_check = int(Env.prop(prop_name, default = 0)) + + if size > 26214400 and last_check < time.time()-604800: # 25MB / 7 days + self.compact() + Env.prop(prop_name, value = int(time.time())) + def migrate(self): from couchpotato import Env @@ -289,13 +373,16 @@ class Database(object): for profile_type in types: p_type = types[profile_type] if types[profile_type]['profile_id'] == p['id']: - new_profile['finish'].append(p_type['finish']) - new_profile['wait_for'].append(p_type['wait_for']) - new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier']) + if p_type['quality_id']: + new_profile['finish'].append(p_type['finish']) + new_profile['wait_for'].append(p_type['wait_for']) + new_profile['qualities'].append(migrate_data['quality'][p_type['quality_id']]['identifier']) - new_profile.update(db.insert(new_profile)) - - profile_link[x] = new_profile.get('_id') + if len(new_profile['qualities']) > 0: + new_profile.update(db.insert(new_profile)) + profile_link[x] = new_profile.get('_id') + else: + log.error('Corrupt profile list for "%s", using default.', p.get('label')) # Qualities log.info('Importing quality sizes') @@ -369,10 +456,10 @@ class Database(object): m = medias[x] status = statuses.get(m['status_id']).get('identifier') - l = libraries[m['library_id']] + l = libraries.get(m['library_id']) # Only migrate wanted movies, Skip if no identifier present - if not getImdb(l.get('identifier')): continue + if not l or not getImdb(l.get('identifier')): continue profile_id = profile_link.get(m['profile_id']) category_id = category_link.get(m['category_id']) diff --git a/couchpotato/core/downloaders/rtorrent_.py b/couchpotato/core/downloaders/rtorrent_.py index 5ea3a4de..7474697f 100644 --- a/couchpotato/core/downloaders/rtorrent_.py +++ b/couchpotato/core/downloaders/rtorrent_.py @@ -5,14 +5,12 @@ from urlparse import urlparse import os from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList - from couchpotato.core.event import addEvent from couchpotato.core.helpers.encoding import sp from couchpotato.core.helpers.variable import cleanHost, splitString from couchpotato.core.logger import CPLog from bencode import bencode, bdecode from rtorrent import RTorrent -from scandir import scandir log = CPLog(__name__) @@ -238,7 +236,7 @@ class rTorrent(DownloaderBase): if torrent.is_multi_file() and torrent.directory.endswith(torrent.name): # Remove empty directories bottom up try: - for path, _, _ in scandir.walk(sp(torrent.directory), topdown = False): + for path, _, _ in os.walk(sp(torrent.directory), topdown = False): os.rmdir(path) except OSError: log.info('Directory "%s" contains extra files, unable to remove', torrent.directory) diff --git a/couchpotato/core/downloaders/synology.py b/couchpotato/core/downloaders/synology.py index 125d750a..2c12536f 100644 --- a/couchpotato/core/downloaders/synology.py +++ b/couchpotato/core/downloaders/synology.py @@ -90,6 +90,7 @@ class SynologyRPC(object): self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port) self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port) + self.sid = None self.username = username self.password = password self.destination = destination diff --git a/couchpotato/core/downloaders/transmission.py b/couchpotato/core/downloaders/transmission.py index 49c8bc58..d941cca6 100644 --- a/couchpotato/core/downloaders/transmission.py +++ b/couchpotato/core/downloaders/transmission.py @@ -174,8 +174,8 @@ class TransmissionRPC(object): self.session = {} if username and password: password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() - password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager), urllib2.HTTPDigestAuthHandler(password_manager)) + password_manager.add_password(realm = 'Transmission', uri = self.url, user = username, passwd = password) + opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_manager)) opener.addheaders = [('User-agent', 'couchpotato-transmission-client/1.0')] urllib2.install_opener(opener) elif username or password: diff --git a/couchpotato/core/downloaders/utorrent.py b/couchpotato/core/downloaders/utorrent.py index 08b8d5a5..3164681c 100644 --- a/couchpotato/core/downloaders/utorrent.py +++ b/couchpotato/core/downloaders/utorrent.py @@ -168,7 +168,7 @@ class uTorrent(DownloaderBase): status = 'busy' if (torrent[1] & self.status_flags['STARTED'] or torrent[1] & self.status_flags['QUEUED']) and torrent[4] == 1000: status = 'seeding' - elif (torrent[1] & self.status_flags['ERROR']): + elif torrent[1] & self.status_flags['ERROR']: status = 'failed' elif torrent[4] == 1000: status = 'completed' @@ -229,7 +229,6 @@ class uTorrentAPI(object): password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password) self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager)) - self.opener.add_handler(urllib2.HTTPDigestAuthHandler(password_manager)) elif username or password: log.debug('User or password missing, not using authentication.') self.token = self.get_token() diff --git a/couchpotato/core/helpers/variable.py b/couchpotato/core/helpers/variable.py index 459d0e9b..fc844aaf 100644 --- a/couchpotato/core/helpers/variable.py +++ b/couchpotato/core/helpers/variable.py @@ -1,4 +1,5 @@ import collections +import ctypes import hashlib import os import platform @@ -6,8 +7,9 @@ import random import re import string import sys +import traceback -from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss +from couchpotato.core.helpers.encoding import simplifyString, toSafeString, ss, sp from couchpotato.core.logger import CPLog import six from six.moves import map, zip, filter @@ -290,9 +292,14 @@ def dictIsSubset(a, b): return all([k in b and b[k] == v for k, v in a.items()]) +# Returns True if sub_folder is the same as or inside base_folder def isSubFolder(sub_folder, base_folder): - # Returns True if sub_folder is the same as or inside base_folder - return base_folder and sub_folder and ss(os.path.normpath(base_folder).rstrip(os.path.sep) + os.path.sep) in ss(os.path.normpath(sub_folder).rstrip(os.path.sep) + os.path.sep) + if base_folder and sub_folder: + base = sp(os.path.realpath(base_folder)) + os.path.sep + subfolder = sp(os.path.realpath(sub_folder)) + os.path.sep + return os.path.commonprefix([subfolder, base]) == base + + return False # From SABNZBD @@ -307,3 +314,69 @@ def scanForPassword(name): if m: return m.group(1).strip('. '), m.group(2).strip() + + +under_pat = re.compile(r'_([a-z])') + +def underscoreToCamel(name): + return under_pat.sub(lambda x: x.group(1).upper(), name) + + +def removePyc(folder, only_excess = True, show_logs = True): + + folder = sp(folder) + + for root, dirs, files in os.walk(folder): + + pyc_files = filter(lambda filename: filename.endswith('.pyc'), files) + py_files = set(filter(lambda filename: filename.endswith('.py'), files)) + excess_pyc_files = filter(lambda pyc_filename: pyc_filename[:-1] not in py_files, pyc_files) if only_excess else pyc_files + + for excess_pyc_file in excess_pyc_files: + full_path = os.path.join(root, excess_pyc_file) + if show_logs: log.debug('Removing old PYC file: %s', full_path) + try: + os.remove(full_path) + except: + log.error('Couldn\'t remove %s: %s', (full_path, traceback.format_exc())) + + for dir_name in dirs: + full_path = os.path.join(root, dir_name) + if len(os.listdir(full_path)) == 0: + try: + os.rmdir(full_path) + except: + log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc())) + + +def getFreeSpace(directories): + + single = not isinstance(directories, (tuple, list)) + if single: + directories = [directories] + + free_space = {} + for folder in directories: + + size = None + if os.path.isdir(folder): + if os.name == 'nt': + _, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \ + ctypes.c_ulonglong() + if sys.version_info >= (3,) or isinstance(folder, unicode): + fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW #@UndefinedVariable + else: + fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA #@UndefinedVariable + ret = fun(folder, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free)) + if ret == 0: + raise ctypes.WinError() + return [total.value, free.value] + else: + s = os.statvfs(folder) + size = [s.f_blocks * s.f_frsize / (1024 * 1024), (s.f_bavail * s.f_frsize) / (1024 * 1024)] + + if single: return size + + free_space[folder] = size + + return free_space diff --git a/couchpotato/core/logger.py b/couchpotato/core/logger.py index 6bbe1953..fba8d623 100644 --- a/couchpotato/core/logger.py +++ b/couchpotato/core/logger.py @@ -25,6 +25,12 @@ class CPLog(object): self.Env = Env self.is_develop = Env.get('dev') + from couchpotato.core.event import addEvent + addEvent('app.after_shutdown', self.close) + + def close(self, *args, **kwargs): + logging.shutdown() + def info(self, msg, replace_tuple = ()): self.logger.info(self.addContext(msg, replace_tuple)) diff --git a/couchpotato/core/media/__init__.py b/couchpotato/core/media/__init__.py index f0abd28c..4a3eb684 100644 --- a/couchpotato/core/media/__init__.py +++ b/couchpotato/core/media/__init__.py @@ -1,7 +1,7 @@ import os import traceback -from couchpotato import get_db, CPLog +from couchpotato import CPLog from couchpotato.core.event import addEvent, fireEvent, fireEventAsync from couchpotato.core.helpers.encoding import toUnicode from couchpotato.core.plugins.base import Plugin @@ -28,7 +28,7 @@ class MediaBase(Plugin): media = fireEvent('media.get', media_id, single = True) event_name = '%s.searcher.single' % media.get('type') - fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id)) + fireEventAsync(event_name, media, on_complete = self.createNotifyFront(media_id), manual = True) except: log.error('Failed creating onComplete: %s', traceback.format_exc()) diff --git a/couchpotato/core/media/_base/media/index.py b/couchpotato/core/media/_base/media/index.py index fd1affca..b40e162b 100644 --- a/couchpotato/core/media/_base/media/index.py +++ b/couchpotato/core/media/_base/media/index.py @@ -99,7 +99,7 @@ from couchpotato.core.helpers.encoding import simplifyString""" class TitleIndex(TreeBasedIndex): - _version = 3 + _version = 4 custom_header = """from CodernityDB.tree_index import TreeBasedIndex from string import ascii_letters @@ -128,7 +128,7 @@ from couchpotato.core.helpers.encoding import toUnicode, simplifyString""" title = title[len(prefix):] break - return str(nr_prefix + title).ljust(32, '_')[:32] + return str(nr_prefix + title).ljust(32, ' ')[:32] class StartsWithIndex(TreeBasedIndex): @@ -176,3 +176,24 @@ class MediaChildrenIndex(TreeBasedIndex): if data.get('_t') == 'media' and data.get('parent_id'): return data.get('parent_id'), None + +class MediaTagIndex(MultiTreeBasedIndex): + _version = 2 + + custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex""" + + def __init__(self, *args, **kwargs): + kwargs['key_format'] = '32s' + super(MediaTagIndex, self).__init__(*args, **kwargs) + + def make_key_value(self, data): + if data.get('_t') == 'media' and data.get('tags') and len(data.get('tags', [])) > 0: + + tags = set() + for tag in data.get('tags', []): + tags.add(self.make_key(tag)) + + return list(tags), None + + def make_key(self, key): + return md5(key).hexdigest() diff --git a/couchpotato/core/media/_base/media/main.py b/couchpotato/core/media/_base/media/main.py index beb9dd02..1d3e1530 100644 --- a/couchpotato/core/media/_base/media/main.py +++ b/couchpotato/core/media/_base/media/main.py @@ -1,6 +1,10 @@ +from datetime import timedelta +from operator import itemgetter +import time import traceback from string import ascii_lowercase +from CodernityDB.database import RecordNotFound from couchpotato import tryInt, get_db from couchpotato.api import addApiView from couchpotato.core.event import fireEvent, fireEventAsync, addEvent @@ -8,7 +12,7 @@ from couchpotato.core.helpers.encoding import toUnicode from couchpotato.core.helpers.variable import splitString, getImdb, getTitle from couchpotato.core.logger import CPLog from couchpotato.core.media import MediaBase -from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex +from .index import MediaIndex, MediaStatusIndex, MediaTypeIndex, TitleSearchIndex, TitleIndex, StartsWithIndex, MediaChildrenIndex, MediaTagIndex log = CPLog(__name__) @@ -20,6 +24,7 @@ class MediaPlugin(MediaBase): 'media': MediaIndex, 'media_search_title': TitleSearchIndex, 'media_status': MediaStatusIndex, + 'media_tag': MediaTagIndex, 'media_by_type': MediaTypeIndex, 'media_title': TitleIndex, 'media_startswith': StartsWithIndex, @@ -80,6 +85,8 @@ class MediaPlugin(MediaBase): addEvent('media.list', self.list) addEvent('media.delete', self.delete) addEvent('media.restatus', self.restatus) + addEvent('media.tag', self.tag) + addEvent('media.untag', self.unTag) def refresh(self, id = '', **kwargs): handlers = [] @@ -119,25 +126,30 @@ class MediaPlugin(MediaBase): def get(self, media_id): - db = get_db() + try: + db = get_db() - imdb_id = getImdb(str(media_id)) + imdb_id = getImdb(str(media_id)) - media = None - if imdb_id: - media = db.get('media', 'imdb-%s' % imdb_id, with_doc = True)['doc'] - else: - media = db.get('id', media_id) + if imdb_id: + media = db.get('media', 'imdb-%s' % imdb_id, with_doc = True)['doc'] + else: + media = db.get('id', media_id) - if media: + if media: - # Attach category - try: media['category'] = db.get('id', media.get('category_id')) - except: pass + # Attach category + try: media['category'] = db.get('id', media.get('category_id')) + except: pass - media['releases'] = fireEvent('release.for_media', media['_id'], single = True) + media['releases'] = fireEvent('release.for_media', media['_id'], single = True) - return media + return media + + except RecordNotFound: + log.error('Media with id "%s" not found', media_id) + except: + raise def getView(self, id = None, **kwargs): @@ -155,8 +167,15 @@ class MediaPlugin(MediaBase): status = list(status if isinstance(status, (list, tuple)) else [status]) for s in status: - for ms in db.get_many('media_status', s, with_doc = with_doc): - yield ms['doc'] if with_doc else ms + for ms in db.get_many('media_status', s): + if with_doc: + try: + doc = db.get('id', ms['_id']) + yield doc + except RecordNotFound: + log.debug('Record not found, skipping: %s', ms['_id']) + else: + yield ms def withIdentifiers(self, identifiers, with_doc = False): @@ -171,7 +190,7 @@ class MediaPlugin(MediaBase): log.debug('No media found with identifiers: %s', identifiers) - def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, starts_with = None, search = None): + def list(self, types = None, status = None, release_status = None, status_or = False, limit_offset = None, with_tags = None, starts_with = None, search = None): db = get_db() @@ -182,6 +201,8 @@ class MediaPlugin(MediaBase): release_status = [release_status] if types and not isinstance(types, (list, tuple)): types = [types] + if with_tags and not isinstance(with_tags, (list, tuple)): + with_tags = [with_tags] # query media ids if types: @@ -208,11 +229,17 @@ class MediaPlugin(MediaBase): # Add search filters if starts_with: - filter_by['starts_with'] = set() starts_with = toUnicode(starts_with.lower())[0] starts_with = starts_with if starts_with in ascii_lowercase else '#' filter_by['starts_with'] = [x['_id'] for x in db.get_many('media_startswith', starts_with)] + # Add tag filter + if with_tags: + filter_by['with_tags'] = set() + for tag in with_tags: + for x in db.get_many('media_tag', tag): + filter_by['with_tags'].add(x['_id']) + # Filter with search query if search: filter_by['search'] = [x['_id'] for x in db.get_many('media_search_title', search)] @@ -265,6 +292,7 @@ class MediaPlugin(MediaBase): release_status = splitString(kwargs.get('release_status')), status_or = kwargs.get('status_or') is not None, limit_offset = kwargs.get('limit_offset'), + with_tags = splitString(kwargs.get('with_tags')), starts_with = kwargs.get('starts_with'), search = kwargs.get('search') ) @@ -383,16 +411,18 @@ class MediaPlugin(MediaBase): total_deleted += 1 new_media_status = 'done' elif delete_from == 'manage': - if release.get('status') == 'done': + if release.get('status') == 'done' or media.get('status') == 'done': db.delete(release) total_deleted += 1 - if (total_releases == total_deleted and media['status'] != 'active') or (delete_from == 'wanted' and media['status'] == 'active'): + if (total_releases == total_deleted and media['status'] != 'active') or (total_releases == 0 and not new_media_status) or (not new_media_status and delete_from == 'late'): db.delete(media) deleted = True elif new_media_status: media['status'] = new_media_status db.update(media) + + fireEvent('media.untag', media['_id'], 'recent', single = True) else: fireEvent('media.restatus', media.get('_id'), single = True) @@ -432,24 +462,72 @@ class MediaPlugin(MediaBase): if not m['profile_id']: m['status'] = 'done' else: - move_to_wanted = True + m['status'] = 'active' - profile = db.get('id', m['profile_id']) - media_releases = fireEvent('release.for_media', m['_id'], single = True) + try: + profile = db.get('id', m['profile_id']) + media_releases = fireEvent('release.for_media', m['_id'], single = True) + done_releases = [release for release in media_releases if release.get('status') == 'done'] - for q_identifier in profile['qualities']: - index = profile['qualities'].index(q_identifier) + if done_releases: + # Only look at latest added release + release = sorted(done_releases, key = itemgetter('last_edit'), reverse = True)[0] - for release in media_releases: - if q_identifier == release['quality'] and (release.get('status') == 'done' and profile['finish'][index]): - move_to_wanted = False + # Check if we are finished with the media + if fireEvent('quality.isfinish', {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, timedelta(seconds = time.time() - release['last_edit']).days, single = True): + m['status'] = 'done' + elif previous_status == 'done': + m['status'] = 'done' - m['status'] = 'active' if move_to_wanted else 'done' + except RecordNotFound: + log.debug('Failed restatus, keeping previous: %s', traceback.format_exc()) + m['status'] = previous_status # Only update when status has changed if previous_status != m['status']: db.update(m) - return True + # Tag media as recent + self.tag(media_id, 'recent') + + return m['status'] except: log.error('Failed restatus: %s', traceback.format_exc()) + + def tag(self, media_id, tag): + + try: + db = get_db() + m = db.get('id', media_id) + + tags = m.get('tags') or [] + if tag not in tags: + tags.append(tag) + m['tags'] = tags + db.update(m) + + return True + except: + log.error('Failed tagging: %s', traceback.format_exc()) + + return False + + def unTag(self, media_id, tag): + + try: + db = get_db() + m = db.get('id', media_id) + + tags = m.get('tags') or [] + if tag in tags: + new_tags = list(set(tags)) + new_tags.remove(tag) + + m['tags'] = new_tags + db.update(m) + + return True + except: + log.error('Failed untagging: %s', traceback.format_exc()) + + return False diff --git a/couchpotato/core/media/_base/providers/base.py b/couchpotato/core/media/_base/providers/base.py index b13b279d..587545c8 100644 --- a/couchpotato/core/media/_base/providers/base.py +++ b/couchpotato/core/media/_base/providers/base.py @@ -129,6 +129,9 @@ class YarrProvider(Provider): else: return [] + def buildUrl(self, *args, **kwargs): + pass + def login(self): # Check if we are still logged in every hour @@ -181,7 +184,7 @@ class YarrProvider(Provider): try: return self.urlopen(url, headers = {'User-Agent': Env.getIdentifier()}, show_error = False) except: - log.error('Failed getting nzb from %s: %s', (self.getName(), traceback.format_exc())) + log.error('Failed getting release from %s: %s', (self.getName(), traceback.format_exc())) return 'try_next' diff --git a/couchpotato/core/media/_base/providers/nzb/binsearch.py b/couchpotato/core/media/_base/providers/nzb/binsearch.py index 84c7b314..c61b72d3 100644 --- a/couchpotato/core/media/_base/providers/nzb/binsearch.py +++ b/couchpotato/core/media/_base/providers/nzb/binsearch.py @@ -100,6 +100,7 @@ config = [{ 'name': 'binsearch', 'description': 'Free provider, less accurate. See BinSearch', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAATklEQVQY02NwQAMMWAXOnz+PKvD//3/CAvM//z+fgiwAAs+RBab4PP//vwbFjPlAffgEChzOo2r5fBuIfRAC5w8D+QUofkkp8MHjOWQAAM3Sbogztg2wAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/nzb/newznab.py b/couchpotato/core/media/_base/providers/nzb/newznab.py index 04915776..495463bf 100644 --- a/couchpotato/core/media/_base/providers/nzb/newznab.py +++ b/couchpotato/core/media/_base/providers/nzb/newznab.py @@ -1,9 +1,7 @@ -from urllib2 import HTTPError from urlparse import urlparse import time import traceback import re -import urllib2 from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode from couchpotato.core.helpers.rss import RSS @@ -13,6 +11,7 @@ from couchpotato.core.media._base.providers.base import ResultList from couchpotato.core.media._base.providers.nzb.base import NZBProvider from couchpotato.environment import Env from dateutil.parser import parse +from requests import HTTPError log = CPLog(__name__) @@ -92,7 +91,7 @@ class Base(NZBProvider, RSS): # Extract a password from the description password = re.search('(?:' + self.passwords_regex + ')(?: *)(?:\:|\=)(?: *)(.*?)\|\n|$', description, flags = re.I).group(1) if password: - name = name + ' {{%s}}' % password.strip() + name += ' {{%s}}' % password.strip() except: log.debug('Error getting details of "%s": %s', (name, traceback.format_exc())) @@ -184,16 +183,7 @@ class Base(NZBProvider, RSS): return 'try_next' try: - # Get final redirected url - log.debug('Checking %s for redirects.', url) - req = urllib2.Request(url) - req.add_header('User-Agent', self.user_agent) - res = urllib2.urlopen(req) - finalurl = res.geturl() - if finalurl != url: - log.debug('Redirect url used: %s', finalurl) - - data = self.urlopen(finalurl, show_error = False) + data = self.urlopen(url, show_error = False) self.limits_reached[host] = False return data except HTTPError as e: @@ -230,8 +220,9 @@ config = [{ 'description': 'Enable NewzNab such as NZB.su, \ NZBs.org, DOGnzb.cr, \ Spotweb, NZBGeek, \ - SmackDown, NZBFinder', + NZBFinder', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAgMAAABinRfyAAAACVBMVEVjhwD///86aRovd/sBAAAAMklEQVQI12NgAIPQUCCRmQkjssDEShiRuRIqwZqZGcDAGBrqANUhGgIkWAOABKMDxCAA24UK50b26SAAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', @@ -240,30 +231,30 @@ config = [{ }, { 'name': 'use', - 'default': '0,0,0,0,0,0' + 'default': '0,0,0,0,0' }, { 'name': 'host', - 'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://index.nzbgeek.info, https://smackdownonyou.com, https://www.nzbfinder.ws', + 'default': 'api.nzb.su,api.dognzb.cr,nzbs.org,https://index.nzbgeek.info,https://www.nzbfinder.ws', 'description': 'The hostname of your newznab provider', }, { 'name': 'extra_score', 'advanced': True, 'label': 'Extra Score', - 'default': '0,0,0,0,0,0', + 'default': '0,0,0,0,0', 'description': 'Starting score for each release found via this provider.', }, { 'name': 'custom_tag', 'advanced': True, 'label': 'Custom tag', - 'default': ',,,,,', + 'default': ',,,,', 'description': 'Add custom tags, for example add rls=1 to get only scene releases from nzbs.org', }, { 'name': 'api_key', - 'default': ',,,,,', + 'default': ',,,,', 'label': 'Api Key', 'description': 'Can be found on your profile page', 'type': 'combined', diff --git a/couchpotato/core/media/_base/providers/nzb/nzbclub.py b/couchpotato/core/media/_base/providers/nzb/nzbclub.py index 06ae7fec..a2660899 100644 --- a/couchpotato/core/media/_base/providers/nzb/nzbclub.py +++ b/couchpotato/core/media/_base/providers/nzb/nzbclub.py @@ -80,6 +80,7 @@ config = [{ 'name': 'NZBClub', 'description': 'Free provider, less accurate. See NZBClub', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACEUlEQVQ4y3VSMWgUQRR9/8/s7OzeJSdnTsVGghLEYBNQjBpQiRBFhIB2EcHG1kbs0murhZAmVocExEZQ0c7CxkLINYcJJpoYj9wZcnu72fF21uJSXMzuhyne58/j/fcf4b+KokgBIOSU53lxP5b9oNVqDT36dH+5UjoiKvIwPFEEgWBshGZ3E7/NOupL9fMjx0e+ZhKsrq+c/FPZKJi0w4FsQXMBDEJsd7BNW9h2tuyP9vfTALIJkMIu1hYRtINM+dpzcWc0sbkreK4fUEogyraAmKGF3+7vcT/wtR9QwkCabSAzQQuvk0uglAo5YaQ5DASGYjfMXcHVOqKu6NmR7iehlKAdHWUqWPv1c3i+9uwVdRlEBGaGEAJCCrDo9ShhvF6qPq8tL57bp+DbRn2sHtUuCY9YphLMu5921VhrwYJ5tbt0tt6sjQP4vEfB2Ikz7/ytwbeR6ljHkXCUA6UcOLtPOg4MYhtH8ZcLw5er+xQMDAwEURRNl96X596Y6oxFwsw9fmtTOAr2Ik19nL365FZpsLSdnQPPM8aYewc+lDcX4rkHqbQMAGTJXulOLzycmr1bKBTi3DOGYagajcahiaOT89fbM0/dxEsUu3aidfPljWO3HzebzYNBELi5Z5RSJlrrHd/3w8lT114MrVTWOn875fHRiYVisRhorWMpZXdvNnLKGCOstb0AMlulVJI19w/+nceU4D0aCwAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/nzb/nzbindex.py b/couchpotato/core/media/_base/providers/nzb/nzbindex.py index ddcce8ad..58f4b23f 100644 --- a/couchpotato/core/media/_base/providers/nzb/nzbindex.py +++ b/couchpotato/core/media/_base/providers/nzb/nzbindex.py @@ -105,6 +105,7 @@ config = [{ 'name': 'nzbindex', 'description': 'Free provider, less accurate. See NZBIndex', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAo0lEQVR42t2SQQ2AMBAEcUCwUAv94QMLfHliAQtYqIVawEItYAG6yZFMLkUANNlk79Kbbtp2P1j9uKxVV9VWFeStl+Wh3fWK9hNwEoADZkJtMD49AqS5AUjWGx6A+m+ARICGrM5W+wSTB0gETKzdHZwCEZAJ8PGZQN4AiQAmkR9s06EBAugJiBoAAPFfAQcBgZcIHzwA6TYP4JsXeSg3P9L31w3eksbH3zMb/wAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py b/couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py index acd12cf9..bac0614d 100644 --- a/couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py +++ b/couchpotato/core/media/_base/providers/nzb/omgwtfnzbs.py @@ -74,6 +74,7 @@ config = [{ 'name': 'OMGWTFNZBs', 'description': 'See OMGWTFNZBs', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQEAIAAADAAbR1AAADbElEQVR4AZ2UW0ybZRiAy/OvdHaLYvB0YTRIFi7GkM44zRLmIfNixkWdiRMyYoxRE8/TC7MYvXCGEBmr3mxLwVMwY0wYA7e6Wso4lB6h/U9taSlMGIfBXLYlJMyo0S///2dJI5lxN8/F2/f9nu9737e/jYmXr6KTbN9BGG9HE/NotQ76UWziNzrXFiETk/5ARUNH+7+0kW7fSgTl0VKGOLZzidOkmuuIo7q2oTArNLPIzhdIkqXkerFOm2CaD/5bcKrjIL2c3fkhPxOq93Kcb91v46fV9TQKF4TgV/TbUsQtzfCaK6jMOd5DJrguSIIhexmqqVxN0FXbRR8/ND/LYTTj6J7nl2gnL47OkDW4KJhnQHCa6JpKVNJGA3OC58nwBJoZ//ebbIyKpBxjrr0o1q1FMRkrKXZnHWF85VvxMrJxibwhGyd0f5bLnKzqJs1k0Sfo+EU8hdAUvkbcwKEgs2D0OiV4jmmD1zb+Tp6er0JMMvDxPo5xev9zTBF683NS+N56n1YiB95B5crr93KRuKhKI0tb0Kw2mgLLqTjLEWO8424i9IvURaYeOckwf3+/yCC9e3bQQ/MuD+Monk0k+XFXMUfx7z5EEP+XlXi5tLlMxH8zLppw7idJrugcus30kC86gc7UrQqjLIukM8zWHOACeU+TiMxXN6ExVOkgz4lvPEzice1GIVhxhG4CrZvpl6TH55giKWqXGLy9hZh5aUtgDSew/msSyCKpl+DDNfxJc8NBIsxUxUnz14O/oONu+IIIvso9TLBQ1SY5rUhuSzUhAqJ2mRXBLDOCeUtgUZXsaObT8BffhUJPqWgiV+3zKKzYH0ClvTRLhD77HIqVkyh5jThnivehoG+qJctIRSPn6bxvO4FCgTl9c1DmbpjLajbQFE8aW5SU3rg+zOPGUjTUF9NFpLEbH2c/KmGYlY69/GQJVtGMSUcEp9eCbB1nctbxHTLRdTUkGDf+B02uGWRG3OvpJ/zSMwzif+oxVBID3cQKBavLCiPmB2PM2UuSCUPgrX4VDb97AwEG67bh4+KTOlncvu3M31BwA5rLHbCfEjwkNDky9e/SSbSxnD46Pg0RJtpXRvhmBSZHpRjWtKwFybjuQeXaKxto4WjLZZZvVmC17pZLJFkwxm5++PS2Mrwc7nyIMYZe/IzoP5d6QgEybqTXAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/awesomehd.py b/couchpotato/core/media/_base/providers/torrent/awesomehd.py index 1372f6cf..78c46488 100644 --- a/couchpotato/core/media/_base/providers/torrent/awesomehd.py +++ b/couchpotato/core/media/_base/providers/torrent/awesomehd.py @@ -78,8 +78,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'Awesome-HD', - 'description': 'See AHD', + 'description': 'AHD', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC+UlEQVR4AV1SO0y6dxQ9H4g8CoIoohZ5NA0aR2UgkYpNB5uocTSaLlrDblMH09Gt8d90r3YpJkanxjA4GGkbO7RNxSABq8jDGnkpD+UD5NV7Bxvbk9wvv+/3uPece66A/yEWi42FQqHVfD7/cbPZtIEglUpjOp3uZHR0dBvAn3gDIRqNgjE4OKj0+Xzf3NzcfD4wMCCjf5TLZbTbbajVatzf3+Pu7q5uNpt35ufnvwBQAScQRREEldfr9RWLxan+/n5YrVa+jFarhVfQQyQSCU4EhULhX15engEgSrjC0dHRVqlUmjQYDBgaGgKtuTqz4mTgIoVCASaTCX19fajVapOHh4dbFJBks9mxcDi8qtFoJEajkfVyJWi1WkxMTMDhcIAT8x6D7/Dd6+vr1fHx8TGp2+3+iqo5+YCzBwIBToK5ubl/mQwPDyMSibAs2Gw2UHNRrValz8/PDUk8Hv9EqVRCr9fj4uICTNflcqFer+Pg4AB7e3uoVCq8x9Rxfn6O7u5uqFQq8FspZXxHTekggByA3W4Hr9PpNDeRL3I1cMhkMrBrnZ2dyGQyvNYIs7OzVbJNPjIyAraLwYdcjR8wXl5eIJfLwRIFQQDLYkm3t7c1CdGPPT4+cpOImp4PODMeaK+n10As2jBbrHifHOjS6qAguVFimkqlwAMmIQnHV1dX4NDQhVwuhyZTV6pgIktzDzkkk0lEwhEEzs7ASQr5Ai4vL1nuccfCwsLO/v6+p9FoyJhF6ekJro/cPCzIZLNQa7rQoK77/SdgWWpKkCaJ5EB9aWnpe6nH40nRMBnJV4f5gw+FX3/5GX/8/htXRZdOzzqhJWn6nl6YbTZqqhrhULD16fT0d8FgcFtYW1vD5uamfGVl5cd4IjldKhZACdkJvKfWUANrxEaJV4hiGVaL1b+7653hXzwRZQr2X76xsfG1xWIRaZzbNPv/CdrjEL9cX/+WXFBSgEPgzxuwG3Yans9OT0+naBZMIJDNfzudzp8WFxd/APAX3uAf9WOTxOPLdosAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/bithdtv.py b/couchpotato/core/media/_base/providers/torrent/bithdtv.py index bb12c66a..57bc2218 100644 --- a/couchpotato/core/media/_base/providers/torrent/bithdtv.py +++ b/couchpotato/core/media/_base/providers/torrent/bithdtv.py @@ -93,8 +93,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'BiT-HDTV', - 'description': 'See BiT-HDTV', + 'description': 'BiT-HDTV', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABMklEQVR4AZ3Qu0ojcQCF8W9MJcQbJNgEEQUbQVIqWgnaWfkIvoCgggixEAmIhRtY2GV3w7KwU61B0EYIxmiw0YCik84ipaCuc0nmP5dcjIUgOjqDvxf4OAdf9mnMLcUJyPyGSCP+YRdC+Kp8iagJKhuS+InYRhTGgDbeV2uEMand4ZRxizjXHQEimxhraAnUr73BNqQxMiNeV2SwcjTLEVtb4Zl10mXutvOWm2otw5Sxz6TGTbdd6ncuYvVLXAXrvM+ruyBpy1S3JLGDfUQ1O6jn5vTsrJXvqSt4UNfj6vxTRPxBHER5QeSirhLGk/5rWN+ffB1XZuxjnDy1q87m7TS+xOGA+Iv4gfkbaw+nOMXHDHnITGEk0VfRFnn4Po4vNYm6RGukmggR0L08+l+e4HMeASo/i6AJUjLgAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/bitsoup.py b/couchpotato/core/media/_base/providers/torrent/bitsoup.py index f32e79e7..9519e58b 100644 --- a/couchpotato/core/media/_base/providers/torrent/bitsoup.py +++ b/couchpotato/core/media/_base/providers/torrent/bitsoup.py @@ -1,6 +1,6 @@ import traceback -from bs4 import BeautifulSoup +from bs4 import BeautifulSoup, SoupStrainer from couchpotato.core.helpers.variable import tryInt from couchpotato.core.logger import CPLog from couchpotato.core.media._base.providers.torrent.base import TorrentProvider @@ -20,6 +20,7 @@ class Base(TorrentProvider): } http_time_between_calls = 1 # Seconds + only_tables_tags = SoupStrainer('table') def _searchOnTitle(self, title, movie, quality, results): @@ -27,7 +28,7 @@ class Base(TorrentProvider): data = self.getHTMLData(url) if data: - html = BeautifulSoup(data) + html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags) try: result_table = html.find('table', attrs = {'class': 'koptekst'}) @@ -87,8 +88,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'Bitsoup', - 'description': 'See Bitsoup', + 'description': 'Bitsoup', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAB8ElEQVR4AbWSS2sTURiGz3euk0mswaE37HhNhIrajQheFgF3rgR/lAt/gOBCXNZlo6AbqfUWRVCxi04wqUnTRibpJLaJzdzOOZ6WUumyC5/VHOb9eN/FA91uFx0FjI4IPfgiGLTWH73tn348GKmN7ijD0d2b41fO5qJEaX24AWNIUrVQCTTJ3Llx6vbV6Vtzk7Gi9+ebi996guFDDYAQAVj4FExP5qdOZB49W62t/zH3hECcwsPnbWeMXz6Xi2K1f0ApeK3hMCHHbP5gvvoriBgFAAQJEAxhjJ4u+YWTNsVI6b1JgtPWZkoIefKy4fcii2OTw2BABs7wj3bYDlLL4rvjGWOdTser1j5Xf7c3Q/MbHQYApxItvnm31mhQQ71eX2vUB76/vsWB2hg0QuogrMwLIG8P3InM2/eVGXeDViqVwWB79vRU2lgJYmdHcgXCTAXQFJTN5HguvDCR2Hxsxe8EvT54nlcul5vNpqDIEgwRQanAhAAABgRIyiQcjpIkkTOuWyqVoN/vSylX67XXH74uV1vHRUyxxFqbLBCSmBpiXSq6xcL5QrGYzWZ3XQIAwdlOJB+/aL764ucdmncYs0WsCI7kvTnn+qyDMEnTVCn1Tz5KsBFg6fvWcmsUAcnYNC/g2hnromvvqbHvxv+39S+MX+bWkFXwAgAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/hdbits.py b/couchpotato/core/media/_base/providers/torrent/hdbits.py index 6cf8d57d..ebf2899b 100644 --- a/couchpotato/core/media/_base/providers/torrent/hdbits.py +++ b/couchpotato/core/media/_base/providers/torrent/hdbits.py @@ -71,7 +71,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'HDBits', - 'description': 'See HDBits', + 'wizard': True, + 'description': 'HDBits', + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABi0lEQVR4AZWSzUsbQRjGdyabTcvSNPTSHlpQQeMHJApC8CJRvHgQQU969+LJP8G7f4N3DwpeFRQvRr0EKaUl0ATSpkigUNFsMl/r9NmZLCEHA/nNO5PfvMPDm0DI6fV3ZxiolEICe1oZCBVCCmBPKwOh2ErKBHGE4KYEXBpSLkUlqO4LcM7f+6nVhRnOhSkOz/hexk+tL+YL0yPF2YmN4tynD++4gTLGkNNac9YFLoREBR1+cnF3dFY6v/m6PD+FaXiNJtgA4xYbABxiGrz6+6HWaI5/+Qh37YS0/3Znc8UxwNGBIIBX22z+/ZdJ+4wzyjpR4PEpODg8tgUXBv2iWUzSpa12B0IR6n6lvt8Aek2lZHb084+fdRNgrwY8z81PjhVy2d2ttUrtV/lbBa+JXGEpDMPnoF2tN1QYRqVUtf6nFbThb7wk7le395elcqhASLb39okDiHY00VCtCTEHwSiH4AI0lkOiT1dwMeSfT3SRxiQWNO7Zwj1egkoVIQFMKvSiC3bcjXq9Jf8DcDIRT3hh10kAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/ilovetorrents.py b/couchpotato/core/media/_base/providers/torrent/ilovetorrents.py index 56b7dac5..b6ccecd1 100644 --- a/couchpotato/core/media/_base/providers/torrent/ilovetorrents.py +++ b/couchpotato/core/media/_base/providers/torrent/ilovetorrents.py @@ -3,7 +3,7 @@ import traceback from bs4 import BeautifulSoup from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode -from couchpotato.core.helpers.variable import tryInt +from couchpotato.core.helpers.variable import tryInt, splitString from couchpotato.core.logger import CPLog from couchpotato.core.media._base.providers.torrent.base import TorrentProvider @@ -15,7 +15,7 @@ class Base(TorrentProvider): urls = { 'download': 'https://www.ilovetorrents.me/%s', - 'detail': 'https//www.ilovetorrents.me/%s', + 'detail': 'https://www.ilovetorrents.me/%s', 'search': 'https://www.ilovetorrents.me/browse.php?search=%s&page=%s&cat=%s', 'test': 'https://www.ilovetorrents.me/', 'login': 'https://www.ilovetorrents.me/takelogin.php', @@ -47,17 +47,24 @@ class Base(TorrentProvider): data = self.getHTMLData(search_url) if data: try: - soup = BeautifulSoup(data) - results_table = soup.find('table', attrs = {'class': 'koptekst'}) + results_table = None + + data_split = splitString(data, '.+'')', i['href']).group('pageNumber')) for i in pagelinks] - total_pages = max(pageNumbers) - + page_numbers = [int(re.search('page=(?P.+'')', i['href']).group('page_number')) for i in pagelinks] + total_pages = max(page_numbers) except: pass @@ -139,6 +146,7 @@ config = [{ 'name': 'ILoveTorrents', 'description': 'Where the Love of Torrents is Born', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACPUlEQVR4AYWM0U9SbxjH3+v266I/oNvWZTfd2J1d0ZqbZEFwWrUImOKs4YwtumFKZvvlJJADR2TCQQlMPKg5NmpREgaekAPnBATKgmK1LqQlx6awHnZWF1Tr2Xfvvs+7z+dB0mlO7StpAh+M4S/2jbo3w8+xvJvlnSneEt+10zwer5ujNUOoChjALWFw5XOwdCAk/P57cGvPl+Oht0W7VJHN5NC1uW1BON4hGjXbwpVWMZhsy9v7sEIXAsDNYBXgdkEoIKyWD2CF8ut/aOXTZc/fBSgLWw1BgA4BDHOV0GkT90cBQpXahU5TFomsb38XhJC5/Tbh1P8c6rJlBeGfAeyMhUFwNVcs9lxV9Ot0dwmyd+mrNvRtbJ2fSPC6Z3Vsvub2z3sDFACAAYzk0+kUyxEkyfN7PopqNBro55A+P6yPKIrL5zF1HwjdeBJJCObIsZO79bo3sHhWhglo5WMV3mazuVPb4fLvSL8/FAkB1hK6rXQPwYhMyROK8VK5LAiH/jsMt0HQjxiN4/ePdoilllcqDyt3Mkg8mRBNbIhMb8RERkowQA/p76g0/UDDdCoNmDminM0qSK5vlpE5kugCHhNPxntwWmJPYTMZtYcFR6ABHQsVRlYLukVORaaULvqKI46keFSCv77kSPS6kxrPptLNDHgz16fWBtyxe6v5h08LUy+KI8ushqTPWWIX8Sg6b45IrGtyW6zXFb/hpQf9m3oqfWuB0fpSw0uZ4WB69En69uOk2rmO2V52PXj+A/mI4ESKpb2HAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/iptorrents.py b/couchpotato/core/media/_base/providers/torrent/iptorrents.py index 0433d3ef..0915ca31 100644 --- a/couchpotato/core/media/_base/providers/torrent/iptorrents.py +++ b/couchpotato/core/media/_base/providers/torrent/iptorrents.py @@ -120,8 +120,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'IPTorrents', - 'description': 'See IPTorrents', + 'description': 'IPTorrents', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABRklEQVR42qWQO0vDUBiG8zeKY3EqQUtNO7g0J6ZJ1+ifKIIFQXAqDYKCyaaYxM3udrZLHdRFhXrZ6liCW6mubfk874EESgqaeOCF7/Y8hEh41aq6yZi2nyZgBGya9XKtZs4No05pAkZV2YbEmyMMsoSxLQeC46wCTdPPY4HruPQyGIhF97qLWsS78Miydn4XdK46NJ9OsQAYBzMIMf8MQ9wtCnTdWCaIDx/u7uljOIQEe0hiIWPamSTLay3+RxOCSPI9+RJAo7Er9r2bnqjBFAqyK+VyK4f5/Cr5ni8OFKVCz49PFI5GdNvvU7ttE1M1zMU+8AMqFksEhrMnQsBDzqmDAwzx2ehRLwT7yyCI+vSC99c3mozH1NxrJgWWtR1BOECfEJSVCm6WCzJGCA7+IWhBsM4zywDPwEp4vCjx2DzBH2ODAfsDb33Ps6dQwJgAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/kickasstorrents.py b/couchpotato/core/media/_base/providers/torrent/kickasstorrents.py index a711502a..730bb608 100644 --- a/couchpotato/core/media/_base/providers/torrent/kickasstorrents.py +++ b/couchpotato/core/media/_base/providers/torrent/kickasstorrents.py @@ -32,8 +32,12 @@ class Base(TorrentMagnetProvider): proxy_list = [ 'https://kickass.to', 'http://kickass.pw', + 'http://kickassto.come.in', + 'http://katproxy.ws', 'http://www.kickassunblock.info', 'http://www.kickassproxy.info', + 'http://katph.eu', + 'http://kickassto.come.in', ] def _search(self, media, quality, results): @@ -128,8 +132,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'KickAssTorrents', - 'description': 'See KickAssTorrents', + 'description': 'KickAssTorrents', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACD0lEQVR42pXK20uTcRjA8d/fsJsuap0orBuFlm3hir3JJvQOVmuwllN20Lb2isI2nVHKjBqrCWYaNnNuBrkSWxglhDVJOkBdSWUOq5FgoiOrMdRJ2xPPxW+8OUf1ge/FcyCUSVe2qedK5U/OxNTTXRNXEQ52Glb4O6dNEfK1auJkvRY7+/zxnQbA/D596laXcY3OWOiaIX2393SGznUmxkUo/YkDgqHemuzobQ7+NV+reo5Q1mqp68GABdY3+/EloO+JeN4tEqiFU8f3CwhyWo9E7wfMgI0ELTDx0AvjIxcgvZoC9P7NMN7yMmrFeoKa68rfDfmrARsNN0Ihr55cx59ctZWSiwS5bLKpwW4dYJH+M/B6/CYszE0BFZ+egG+Ln+HRoBN/cpl1pV6COIMkOnBVA/w+fXgGKJVM4LxhumMleoL06hJ3wKcCfl+/TAKKx17gnFePRwkqxR4BQSpFkbCrrQJueI7mWpyfATQ9OQY43+uv/+PutBycJ3y2qn2x7jY50GJvnwLKZjOwspyE5I8F4N+1yr1uwqcs3ym63Hwo29EiAyzUWQVr6WVAS4lZCPutQG/2GtES2YiW3d3XflYKtL72kzAcdEDHeSa3czeIMyyz/TApRKvcFfE0isHbJMnrHCf6xTLb1ORvWNlWo91cvHrJUQo0o6ZoRi7dIiT/g2WEDi27Iyov21xMCvgNfXvtwIACfHwAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py b/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py index 80e23488..609ef2d4 100644 --- a/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py +++ b/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py @@ -187,8 +187,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'PassThePopcorn', - 'description': 'See PassThePopcorn.me', + 'description': 'PassThePopcorn.me', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAARklEQVQoz2NgIAP8BwMiGWRpIN1JNWn/t6T9f532+W8GkNt7vzz9UkfarZVpb68BuWlbnqW1nU7L2DMx7eCoBlpqGOppCQB83zIgIg+wWQAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/publichd.py b/couchpotato/core/media/_base/providers/torrent/publichd.py deleted file mode 100644 index 1090be2b..00000000 --- a/couchpotato/core/media/_base/providers/torrent/publichd.py +++ /dev/null @@ -1,136 +0,0 @@ -from urlparse import parse_qs -import re -import traceback - -from bs4 import BeautifulSoup -from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode -from couchpotato.core.helpers.variable import tryInt -from couchpotato.core.logger import CPLog -from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider -import six - - -log = CPLog(__name__) - - -class Base(TorrentMagnetProvider): - - urls = { - 'test': 'https://publichd.se', - 'detail': 'https://publichd.se/index.php?page=torrent-details&id=%s', - 'search': 'https://publichd.se/index.php', - } - http_time_between_calls = 0 - - def search(self, movie, quality): - - if not quality.get('hd', False): - return [] - - return super(Base, self).search(movie, quality) - - def _search(self, media, quality, results): - - query = self.buildUrl(media) - - params = tryUrlencode({ - 'page': 'torrents', - 'search': query, - 'active': 1, - }) - - data = self.getHTMLData('%s?%s' % (self.urls['search'], params)) - - if data: - - try: - soup = BeautifulSoup(data) - - results_table = soup.find('table', attrs = {'id': 'bgtorrlist2'}) - entries = results_table.find_all('tr') - - for result in entries[2:len(entries) - 1]: - info_url = result.find(href = re.compile('torrent-details')) - download = result.find(href = re.compile('magnet:')) - - if info_url and download: - - url = parse_qs(info_url['href']) - - results.append({ - 'id': url['id'][0], - 'name': six.text_type(info_url.string), - 'url': download['href'], - 'detail_url': self.urls['detail'] % url['id'][0], - 'size': self.parseSize(result.find_all('td')[7].string), - 'seeders': tryInt(result.find_all('td')[4].string), - 'leechers': tryInt(result.find_all('td')[5].string), - 'get_more_info': self.getMoreInfo - }) - - except: - log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) - - def getMoreInfo(self, item): - - cache_key = 'publichd.%s' % item['id'] - description = self.getCache(cache_key) - - if not description: - - try: - full_description = self.urlopen(item['detail_url']) - html = BeautifulSoup(full_description) - nfo_pre = html.find('div', attrs = {'id': 'torrmain'}) - description = toUnicode(nfo_pre.text) if nfo_pre else '' - except: - log.error('Failed getting more info for %s', item['name']) - description = '' - - self.setCache(cache_key, description, timeout = 25920000) - - item['description'] = description - return item - - -config = [{ - 'name': 'publichd', - 'groups': [ - { - 'tab': 'searcher', - 'list': 'torrent_providers', - 'name': 'PublicHD', - 'description': 'Public Torrent site with only HD content. See PublicHD', - 'wizard': True, - 'options': [ - { - 'name': 'enabled', - 'type': 'enabler', - 'default': True, - }, - { - 'name': 'seed_ratio', - 'label': 'Seed ratio', - 'type': 'float', - 'default': 1, - 'description': 'Will not be (re)moved until this seed ratio is met.', - }, - { - 'name': 'seed_time', - 'label': 'Seed time', - 'type': 'int', - 'default': 40, - 'description': 'Will not be (re)moved until this seed time (in hours) is met.', - }, - { - 'name': 'extra_score', - 'advanced': True, - 'label': 'Extra Score', - 'type': 'int', - 'default': 0, - 'description': 'Starting score for each release found via this provider.', - } - ], - }, - ], -}] diff --git a/couchpotato/core/media/_base/providers/torrent/sceneaccess.py b/couchpotato/core/media/_base/providers/torrent/sceneaccess.py index e488b63b..e172f6ad 100644 --- a/couchpotato/core/media/_base/providers/torrent/sceneaccess.py +++ b/couchpotato/core/media/_base/providers/torrent/sceneaccess.py @@ -89,8 +89,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'SceneAccess', - 'description': 'See SceneAccess', + 'description': 'SceneAccess', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAACT0lEQVR4AYVQS0sbURidO3OTmajJ5FElTTOkPmZ01GhHrIq0aoWAj1Vc+A/cuRMXbl24V9SlCGqrLhVFCrooEhCp2BAx0mobTY2kaR7qmOm87EXL1EWxh29xL+c7nPMdgGHYO5bF/gdbefnr6WlbWRnxluMwAB4Z0uEgXa7nwaDL7+/RNPzxbYvb/XJ0FBYVfd/ayh0fQ4qCGEHcm0KLRZUk7Pb2YRJPRwcsKMidnKD3t9VVT3s7BDh+z5FOZ3Vfn3h+Hltfx00mRRSRWFcUmmVNhYVqPn8dj3va2oh+txvcQRVF9ebm1fi4k+dRFbosY5rm4Hk7xxULQnJnx93S4g0EIEEQRoDLo6PrWEw8Pc0eHLwYGopMTDirqlJ7eyhYYGHhfgfHCcKYksZGVB/NcXI2mw6HhZERqrjYTNPHi4tFPh8aJIYIhgPlcCRDoZLW1s75+Z/7+59nZ/OJhLWigqAoKZX6Mjf3dXkZ3pydGYLc4aEoCCkInzQ1fRobS2xuvllaonkedfArnY5OTdGVldBkOADgqq2Nr6z8CIWaJietDHOhKB+HhwFKC6Gnq4ukKJvP9zcSbjYDXbeVlkKzuZBhnnV3e3t6UOmaJO0ODibW1hB1GYkg8R/gup7Z3TVZLJ5AILW9LcZiVpYtYBhw16O3t7cauckyeF9Tgz0ATpL2+nopmWycmbnY2LiKRjFk6/d7+/vRJfl4HGzV1T0UIM43MGBvaIBWK/YvwM5w+IMgGH8tkyEgvIpE7M3Nt6qqZrNyOq1kMmouh455Ggz+BhKY4GEc2CfwAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/thepiratebay.py b/couchpotato/core/media/_base/providers/torrent/thepiratebay.py index b840fc71..57bcfbdc 100644 --- a/couchpotato/core/media/_base/providers/torrent/thepiratebay.py +++ b/couchpotato/core/media/_base/providers/torrent/thepiratebay.py @@ -24,15 +24,18 @@ class Base(TorrentMagnetProvider): http_time_between_calls = 0 proxy_list = [ - 'https://tpb.ipredator.se', + 'https://nobay.net', + 'https://thebay.al', 'https://thepiratebay.se', - 'http://pirateproxy.ca', - 'http://tpb.al', + 'http://thepiratebay.cd', + 'http://thebootlegbay.com', 'http://www.tpb.gr', - 'http://bayproxy.me', - 'http://proxybay.eu', + 'http://tpbproxy.co.uk', + 'http://pirateproxy.in', 'http://www.getpirate.com', 'http://piratebay.io', + 'http://bayproxy.li', + 'http://proxybay.pw', ] def _search(self, media, quality, results): @@ -110,7 +113,7 @@ class Base(TorrentMagnetProvider): html = BeautifulSoup(full_description) nfo_pre = html.find('div', attrs = {'class': 'nfo'}) description = '' - try: + try: description = toUnicode(nfo_pre.text) except: pass @@ -126,8 +129,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'ThePirateBay', - 'description': 'The world\'s largest bittorrent tracker. See ThePirateBay', + 'description': 'The world\'s largest bittorrent tracker. ThePirateBay', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAA3UlEQVQY02P4DwT/YADIZvj//7qnozMYODmtAAusZoCDELDAegYGViZhAWZmRoYoqIDupfhNN1M3dTBEggXWMZg9jZRXV77YxhAOFpjDwMAPMoCXmcHsF1SAQZ6bQY2VgUEbKHClcAYzg3mINEO8jSCD478/DPsZmvqWblu1bOmStes3Pp0ezVDF4Gif0Hfx9///74/ObRZ2YNiZ47C8XIRBxFJR0jbSSUud4f9zAQWn8NTuziAt2zy5xIMM/z8LFX0E+fD/x0MRDCeA1v7Z++Y/FDzyvAtyBxIA+h8A8ZKLeT+lJroAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentbytes.py b/couchpotato/core/media/_base/providers/torrent/torrentbytes.py index 85a4fda1..156243eb 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentbytes.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentbytes.py @@ -90,8 +90,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'TorrentBytes', - 'description': 'See TorrentBytes', + 'description': 'TorrentBytes', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAeFBMVEUAAAAAAEQAA1QAEmEAKnQALHYAMoEAOokAQpIASYsASZgAS5UATZwATosATpgAVJ0AWZwAYZ4AZKAAaZ8Ab7IAcbMAfccAgcQAgcsAhM4AiscAjMkAmt0AoOIApecAp/EAqvQAs+kAt+wA3P8A4f8A//8VAAAfDbiaAl08AAAAjUlEQVQYGQXBO04DQRAFwHqz7Z8sECIl5f73ISRD5GBs7UxTlWfg9vYXnvJRQJqOL88D6BAwJtMMumHUVCl60aa6H93IrIv0b+157f1lpk+fm87lMWrZH0vncKbXdRUQrRmrh9C6Iwkq6rg4PXZcyXmbizzeV/g+rDra0rGve8jPKLSOJNi2AQAwAGjwD7ApPkEHdtPQAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentday.py b/couchpotato/core/media/_base/providers/torrent/torrentday.py index a4eb3899..a3e9b78c 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentday.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentday.py @@ -1,4 +1,3 @@ -from couchpotato.core.helpers.encoding import tryUrlencode from couchpotato.core.helpers.variable import tryInt from couchpotato.core.logger import CPLog from couchpotato.core.media._base.providers.torrent.base import TorrentProvider @@ -69,8 +68,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'TorrentDay', - 'description': 'See TorrentDay', + 'description': 'TorrentDay', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAC5ElEQVQ4y12TXUgUURTH//fO7Di7foeQJH6gEEEIZZllVohfSG/6UA+RSFAQQj74VA8+Bj30lmAlRVSEvZRfhNhaka5ZUG1paKaW39tq5O6Ou+PM3M4o6m6X+XPPzD3zm/+dcy574r515WfIW8CZBM4YAA5Gc/aQC3yd7oXYEONcsISE5dTDh91HS0t7FEWhBUAeN9ynV/d9qJAgE4AECURAcVsGlCCnly26LMA0IQwTa52dje3d3e3hcPi8qqrrMjcVYI3EHCQZlkFOHBwR2QHh2ASAAIJxWGAQEDxjePhs3527XjJwnb37OHBq0T+Tyyjh+9KnEzNJ7nouc1Q/3A3HGsOvnJy+PSUlj81w2Lny9WuJ6+3AmTjD4HOcrdR2dWXLRQePvyaSLfQOPMPC8mC9iHCsOxSyzJCelzdSXlNzD5ujpb25Wbfc/XXJemTXF4+nnCNq+AMLe50uFfEJTiw4GXSFtiHL0SnIq66+p0kSArqO+eH3RdsAv9+f5vW7L7GICq6rmM8XBCAXlBw90rOyxibn5yzfkg/L09M52/jxqdESaIrBXHYZZbB1GX8cEpySxKIB8S5XcOnvqpli1zuwmrTtoLjw5LOK/eeuWsE4JH5IRPaPZKiKigmPp+5pa+u1aEjIMhEgrRkmi9mgxGUhM7LNJSzOzsE3+cOeExovXOjdytE0LV4zqNZUtV0uZzAGoGkhDH/2YHZiErmv4uyWQnZZWc+hoqL3WzlTExN5hhA8IEwkZWZOxwB++30YG/9GkYCPvqAaHAW5uWPROW86OmqCprUR7z1yZDAGQNuCvkoB/baIKUBWMTYymv+gra3eJNvjXu+B562tFyXqTJ6YuHK8rKwvBmC3vR7cOCPQLWFz8LnfXWUrJo9U19BwMyUlJRjTSMJ2ENxUiGxq9KXQfwqYlnWstvbR5aamG9g0uzM8Q4OFt++3NNixQ2NgYmeN03FOTUv7XVpV9aKisvLl1vN/WVhNc/Fi1NEAAAAASUVORK5CYII=', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentleech.py b/couchpotato/core/media/_base/providers/torrent/torrentleech.py index fe427985..5f59dab7 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentleech.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentleech.py @@ -24,9 +24,9 @@ class Base(TorrentProvider): http_time_between_calls = 1 # Seconds cat_backup_id = None - def _search(self, media, quality, results): + def _searchOnTitle(self, title, media, quality, results): - url = self.urls['search'] % self.buildUrl(media, quality) + url = self.urls['search'] % self.buildUrl(title, media, quality) data = self.getHTMLData(url) @@ -80,8 +80,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'TorrentLeech', - 'description': 'See TorrentLeech', + 'description': 'TorrentLeech', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACHUlEQVR4AZVSO48SYRSdGTCBEMKzILLAWiybkKAGMZRUUJEoDZX7B9zsbuQPYEEjNLTQkYgJDwsoSaxspEBsCITXjjNAIKi8AkzceXgmbHQ1NJ5iMufmO9/9zrmXlCSJ+B8o75J8Pp/NZj0eTzweBy0Wi4PBYD6f12o1r9ebTCZx+22HcrnMsuxms7m6urTZ7LPZDMVYLBZ8ZV3yo8aq9Pq0wzCMTqe77dDv9y8uLyAWBH6xWOyL0K/56fcb+rrPgPZ6PZfLRe1fsl6vCUmGKIqoqNXqdDr9Dbjps9znUV0uTqdTjuPkDoVCIfcuJ4gizjMMm8u9vW+1nr04czqdK56c37CbKY9j2+1WEARZ0Gq1RFHAz2q1qlQqXxoN69HRcDjUarW8ZD6QUigUOnY8uKYH8N1sNkul9yiGw+F6vS4Rxn8EsodEIqHRaOSnq9T7ajQazWQycEIR1AEBYDabSZJyHDucJyegwWBQr9ebTCaKvHd4cCQANUU9evwQ1Ofz4YvUKUI43GE8HouSiFiNRhOowWBIpVLyHITJkuW3PwgAEf3pgIwxF5r+OplMEsk3CPT5szCMnY7EwUdhwUh/CXiej0Qi3idPz89fdrpdbsfBzH7S3Q9K5pP4c0sAKpVKoVAQGO1ut+t0OoFAQHkH2Da/3/+but3uarWK0ZMQoNdyucRutdttmqZxMTzY7XaYxsrgtUjEZrNhkSwWyy/0NCatZumrNQAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentpotato.py b/couchpotato/core/media/_base/providers/torrent/torrentpotato.py index 84718f04..d1426765 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentpotato.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentpotato.py @@ -134,6 +134,7 @@ config = [{ 'order': 10, 'description': 'CouchPotato torrent provider. Checkout the wiki page about this provider for more info.', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABSElEQVR4AZ2Nz0oCURTGv8t1YMpqUxt9ARFxoQ/gQtppgvUKcu/sxB5iBJkogspaBC6iVUplEC6kv+oiiKDNhAtt16roP0HQgdsMLgaxfvy4nHP4Pi48qE2g4v91JOqT1CH/UnA7w7icUlLawyEdj+ZI/7h6YluWbRiddHonHh9M70aj7VTKzuXuikUMci/EO/ACnAI15599oAk8AR/AgxBQNCzreD7bmpl+FOIVuAHqQDUcJo+AK+CZFKLt95/MpSmMt0TiW9POxse6UvYZ6zB2wFgjFiNpOGesR0rZ0PVPXf8KhUCl22CwClz4eN8weoZBb9c0bdPsOWvHx/cYu9Y0CoNoZTJrwAbn5DrnZc6XOV+igVbnsgo0IxEomlJuA1vUIYGyq3PZBChwmExCUSmVZgMBDIUCK4UCFIv5vHIhm/XUDeAf/ADbcpd5+aXSWQAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentshack.py b/couchpotato/core/media/_base/providers/torrent/torrentshack.py index 2a285b2a..1af7e552 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentshack.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentshack.py @@ -48,9 +48,9 @@ class Base(TorrentProvider): 'name': six.text_type(link.span.string).translate({ord(six.u('\xad')): None}), 'url': self.urls['download'] % url['href'], 'detail_url': self.urls['download'] % link['href'], - 'size': self.parseSize(result.find_all('td')[4].string), - 'seeders': tryInt(result.find_all('td')[6].string), - 'leechers': tryInt(result.find_all('td')[7].string), + 'size': self.parseSize(result.find_all('td')[5].string), + 'seeders': tryInt(result.find_all('td')[7].string), + 'leechers': tryInt(result.find_all('td')[8].string), }) except: @@ -80,7 +80,9 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'TorrentShack', - 'description': 'See TorrentShack', + 'description': 'TorrentShack', + 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAABmElEQVQoFQXBzY2cVRiE0afqvd84CQiAnxWWtyxsS6ThINBYg2Dc7mZBMEjE4mzs6e9WcY5+ePNuVFJJodQAoLo+SaWCy9rcV8cmjah3CI6iYu7oRU30kE5xxELRfamklY3k1NL19sSm7vPzP/ZdNZzKVDaY2sPZJBh9fv5ITrmG2+Vp4e1sPchVqTCQZJnVXi+/L4uuAJGly1+Pw8CprLbi8Om7tbT19/XRqJUk11JP9uHj9ulxhXbvJbI9qJvr5YkGXFG2IBT8tXczt+sfzDZCp3765f3t9tHEHGEDACma77+8o4oATKk+/PfW9YmHruRFjWoVSFsVsGu1YSKq6Oc37+n98unPZSRlY7vsKDqN+92X3yR9+PdXee3iJNKMStqdcZqoTJbUSi5JOkpfRlhSI0mSpEmCFKoU7FqSNOLAk54uGwCStMUCgLrVic62g7oDoFmmdI+P3S0pDe1xvDqb6XrZqbtzShWNoh9fv/XQHaDdM9OqrZi2M7M3UrB2vlkPS1IbdEBk7UiSoD6VlZ6aKWer4aH4f/AvKoHUTjuyAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentz.py b/couchpotato/core/media/_base/providers/torrent/torrentz.py index 3937f64e..8a5455c9 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentz.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentz.py @@ -80,11 +80,12 @@ config = [{ 'name': 'Torrentz', 'description': 'Torrentz is a free, fast and powerful meta-search engine. Torrentz', 'wizard': True, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAQklEQVQ4y2NgAALjtJn/ycEMlGiGG0IVAxiwAKzOxaKGARcgxgC8YNSAwWoAzuRMjgsIugqfAUR5CZcBRIcHsWEAADSA96Ig020yAAAAAElFTkSuQmCC', 'options': [ { 'name': 'enabled', 'type': 'enabler', - 'default': False + 'default': True }, { 'name': 'verified_only', diff --git a/couchpotato/core/media/_base/providers/torrent/yify.py b/couchpotato/core/media/_base/providers/torrent/yify.py index 82413fc5..0daf20a0 100644 --- a/couchpotato/core/media/_base/providers/torrent/yify.py +++ b/couchpotato/core/media/_base/providers/torrent/yify.py @@ -2,13 +2,13 @@ import traceback from couchpotato.core.helpers.variable import tryInt, getIdentifier from couchpotato.core.logger import CPLog -from couchpotato.core.media._base.providers.torrent.base import TorrentProvider +from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider log = CPLog(__name__) -class Base(TorrentProvider): +class Base(TorrentMagnetProvider): urls = { 'test': '%s/api', @@ -49,7 +49,7 @@ class Base(TorrentProvider): if result['Quality'] and result['Quality'] not in result['MovieTitle']: title = result['MovieTitle'] + ' BrRip ' + result['Quality'] - else: + else: title = result['MovieTitle'] + ' BrRip' results.append({ @@ -79,6 +79,7 @@ config = [{ 'name': 'Yify', 'description': 'Free provider, less accurate. Small HD movies, encoded by Yify.', 'wizard': False, + 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACL0lEQVR4AS1SPW/UQBAd23fxne/Ld2dvzvHuzPocEBAKokCBqGiQ6IgACYmvUKRBFEQgKKGg4BAlUoggggYUEQpSHOI7CIEoQs/fYcbLaU/efTvvvZlnA1qydoxU5kcxX0CkgmQZtPy0hCUjvK+WgEByOZ5dns1O5bzna8fRVkgsxH8B0YouIvBhdD5T11NiVOoKrsttyUcpRW0InUrFnwe9HzuP2uaQZYhF2LQ76TTXw2RVMTK8mYYbjfh+zNquMVCrqn93aArLSixPxnafdGDLaz1tjY5rmNa8z5BczEQOxQfCl1GyoqoWxYRN1bkh7ELw3q/vhP6HIL4TG9KumpjgvwuyM7OsjSj98E/vszMfZ7xvPtMaWxGO5crwIumKCR5HxDtJ0AWKGG204RfUd/3smJYqwem/Q7BTS1ZGfM4LNpVwuKAz6cMeROst0S2EwNE7GjTehO2H3dxqIpdkydat15G3F8SXBi4GlpBNlSz012L/k2+W0CLLk/jbcf13rf41yJeMQ8QWUZiHCfCA9ad+81nEKPtoS9mJOf9v0NmMJHgUT6xayheK9EIK7JJeU/AF4scDF7Y5SPlJrRcxJ+um4ibNEdObxLiIwJim+eT2AL5D9CIcnZ5zvSJi9eIlNHVVtZ831dk5svPgvjPWTq+ktWkd/kD0qtm71x+sDQe3kt6DXnM7Ct+GajmTxKlkAokWljyAKSm5oWa2w+BH4P2UuVub7eTyiGOQYapY/wEztHduSDYz5gAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', diff --git a/couchpotato/core/media/_base/search/main.py b/couchpotato/core/media/_base/search/main.py index 81897b5f..1d0603cb 100644 --- a/couchpotato/core/media/_base/search/main.py +++ b/couchpotato/core/media/_base/search/main.py @@ -1,6 +1,6 @@ from couchpotato.api import addApiView from couchpotato.core.event import fireEvent, addEvent -from couchpotato.core.helpers.variable import mergeDicts +from couchpotato.core.helpers.variable import mergeDicts, getImdb from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin @@ -35,12 +35,21 @@ class Search(Plugin): elif isinstance(types, (list, tuple, set)): types = list(types) + imdb_identifier = getImdb(q) + if not types: - result = fireEvent('info.search', q = q, merge = True) + if imdb_identifier: + result = fireEvent('movie.info', identifier = imdb_identifier, merge = True) + result = {result['type']: [result]} + else: + result = fireEvent('info.search', q = q, merge = True) else: result = {} for media_type in types: - result[media_type] = fireEvent('%s.search' % media_type) + if imdb_identifier: + result[media_type] = fireEvent('%s.info' % media_type, identifier = imdb_identifier) + else: + result[media_type] = fireEvent('%s.search' % media_type, q = q) return mergeDicts({ 'success': True, diff --git a/couchpotato/core/media/_base/search/static/search.css b/couchpotato/core/media/_base/search/static/search.css index eb8b66cc..def985fe 100644 --- a/couchpotato/core/media/_base/search/static/search.css +++ b/couchpotato/core/media/_base/search/static/search.css @@ -3,7 +3,6 @@ vertical-align: middle; text-align: right; transition: all .4s cubic-bezier(0.9,0,0.1,1); - position: absolute; z-index: 20; border: 0 solid transparent; border-bottom-width: 4px; diff --git a/couchpotato/core/media/_base/search/static/search.js b/couchpotato/core/media/_base/search/static/search.js index 8b8f2a58..12227641 100644 --- a/couchpotato/core/media/_base/search/static/search.js +++ b/couchpotato/core/media/_base/search/static/search.js @@ -13,6 +13,9 @@ var BlockSearch = new Class({ self.input = new Element('input', { 'placeholder': 'Search & add a new media', 'events': { + 'input': self.keyup.bind(self), + 'paste': self.keyup.bind(self), + 'change': self.keyup.bind(self), 'keyup': self.keyup.bind(self), 'focus': function(){ if(focus_timer) clearTimeout(focus_timer); diff --git a/couchpotato/core/media/movie/_base/main.py b/couchpotato/core/media/movie/_base/main.py index 860ec965..336d8033 100644 --- a/couchpotato/core/media/movie/_base/main.py +++ b/couchpotato/core/media/movie/_base/main.py @@ -2,6 +2,7 @@ import os import traceback import time +from CodernityDB.database import RecordNotFound from couchpotato import get_db from couchpotato.api import addApiView from couchpotato.core.event import fireEvent, fireEventAsync, addEvent @@ -90,7 +91,7 @@ class MovieBase(MovieTypeBase): # Default profile and category default_profile = {} - if not params.get('profile_id'): + if (not params.get('profile_id') and status != 'done') or params.get('ignore_previous', False): default_profile = fireEvent('profile.default', single = True) cat_id = params.get('category_id') @@ -105,7 +106,7 @@ class MovieBase(MovieTypeBase): 'imdb': params.get('identifier') }, 'status': status if status else 'active', - 'profile_id': params.get('profile_id', default_profile.get('_id')), + 'profile_id': params.get('profile_id') or default_profile.get('_id'), 'category_id': cat_id if cat_id is not None and len(cat_id) > 0 and cat_id != '-1' else None, } @@ -117,8 +118,17 @@ class MovieBase(MovieTypeBase): media['info'] = info new = False + previous_profile = None try: m = db.get('media', 'imdb-%s' % params.get('identifier'), with_doc = True)['doc'] + + try: + db.get('id', m.get('profile_id')) + previous_profile = m.get('profile_id') + except RecordNotFound: + pass + except: + log.error('Failed getting previous profile: %s', traceback.format_exc()) except: new = True m = db.insert(media) @@ -146,9 +156,10 @@ class MovieBase(MovieTypeBase): else: fireEvent('release.delete', release['_id'], single = True) - m['profile_id'] = params.get('profile_id', default_profile.get('id')) + m['profile_id'] = (params.get('profile_id') or default_profile.get('_id')) if not previous_profile else previous_profile m['category_id'] = cat_id if cat_id is not None and len(cat_id) > 0 else (m.get('category_id') or None) m['last_edit'] = int(time.time()) + m['tags'] = [] do_search = True db.update(m) @@ -225,7 +236,7 @@ class MovieBase(MovieTypeBase): db.update(m) - fireEvent('media.restatus', m['_id']) + fireEvent('media.restatus', m['_id'], single = True) m = db.get('id', media_id) diff --git a/couchpotato/core/media/movie/_base/static/movie.actions.js b/couchpotato/core/media/movie/_base/static/movie.actions.js index 26ee2421..ff71f31d 100644 --- a/couchpotato/core/media/movie/_base/static/movie.actions.js +++ b/couchpotato/core/media/movie/_base/static/movie.actions.js @@ -252,7 +252,7 @@ MA.Release = new Class({ self.trynext_container.adopt( new Element('span.or', { - 'text': 'This movie is snatched, if anything went wrong, download' + 'text': 'If anything went wrong, download' }), lr ? new Element('a.button.orange', { 'text': 'the same release again', @@ -302,7 +302,7 @@ MA.Release = new Class({ self.movie.data.releases.each(function(release){ if(has_available && has_snatched) return; - if(['snatched', 'downloaded', 'seeding'].contains(release.status)) + if(['snatched', 'downloaded', 'seeding', 'done'].contains(release.status)) has_snatched = true; if(['available'].contains(release.status)) diff --git a/couchpotato/core/media/movie/_base/static/movie.css b/couchpotato/core/media/movie/_base/static/movie.css index 05fcddf4..311b111d 100644 --- a/couchpotato/core/media/movie/_base/static/movie.css +++ b/couchpotato/core/media/movie/_base/static/movie.css @@ -357,18 +357,40 @@ top: 30px; clear: both; bottom: 30px; - overflow: hidden; position: absolute; } - .movies .data:hover .description { - overflow: auto; - } .movies.list_list .movie:not(.details_view) .info .description, .movies.mass_edit_list .info .description, .movies.thumbs_list .info .description { display: none; } + .movies .data .eta { + display: none; + } + + .movies.details_list .data .eta { + position: absolute; + bottom: 0; + right: 0; + display: block; + min-height: 20px; + text-align: right; + font-style: italic; + opacity: .8; + font-size: 11px; + } + + .movies.details_list .movie:hover .data .eta { + display: none; + } + + .movies.thumbs_list .data .eta { + display: block; + position: absolute; + bottom: 40px; + } + .movies .data .quality { position: absolute; bottom: 2px; diff --git a/couchpotato/core/media/movie/_base/static/movie.js b/couchpotato/core/media/movie/_base/static/movie.js index 1a547f2f..47880089 100644 --- a/couchpotato/core/media/movie/_base/static/movie.js +++ b/couchpotato/core/media/movie/_base/static/movie.js @@ -136,6 +136,21 @@ var Movie = new Class({ self.el.addClass('status_'+self.get('status')); + var eta = null, + eta_date = null, + now = Math.round(+new Date()/1000); + + if(self.data.info.release_date) + [self.data.info.release_date.dvd, self.data.info.release_date.theater].each(function(timestamp){ + if (timestamp > 0 && (eta == null || Math.abs(timestamp - now) < Math.abs(eta - now))) + eta = timestamp; + }); + + if(eta){ + eta_date = new Date(eta * 1000); + eta_date = eta_date.toLocaleString('en-us', { month: "long" }) + ' ' + eta_date.getFullYear(); + } + self.el.adopt( self.select_checkbox = new Element('input[type=checkbox].inlay', { 'events': { @@ -158,9 +173,13 @@ var Movie = new Class({ 'text': self.data.info.year || 'n/a' }) ), - self.description = new Element('div.description', { + self.description = new Element('div.description.tiny_scroll', { 'text': self.data.info.plot }), + self.eta = eta_date && (now+8035200 > eta) ? new Element('div.eta', { + 'text': eta_date, + 'title': 'ETA' + }) : null, self.quality = new Element('div.quality', { 'events': { 'click': function(e){ diff --git a/couchpotato/core/media/movie/charts/main.py b/couchpotato/core/media/movie/charts/main.py index 58133d83..fe6ddc0f 100644 --- a/couchpotato/core/media/movie/charts/main.py +++ b/couchpotato/core/media/movie/charts/main.py @@ -45,6 +45,7 @@ class Charts(Plugin): if catched_charts: return catched_charts + charts = [] try: self.update_in_progress = True charts = fireEvent('automation.get_chart_list', merge = True) diff --git a/couchpotato/core/media/movie/charts/static/charts.css b/couchpotato/core/media/movie/charts/static/charts.css index ce8ff0ab..261169f6 100644 --- a/couchpotato/core/media/movie/charts/static/charts.css +++ b/couchpotato/core/media/movie/charts/static/charts.css @@ -3,26 +3,21 @@ margin-bottom: 30px; } -.charts > h2 { - height: 40px; -} + .charts > h2 { + height: 40px; + } -.charts .chart { - display: inline-block; - width: 50%; - vertical-align: top; - max-height: 510px; - overflow: hidden; - scrollbar-base-color: #4e5969; -} + .charts .chart { + display: inline-block; + width: 50%; + vertical-align: top; + max-height: 510px; + scrollbar-base-color: #4e5969; + } -.charts .chart:hover { - overflow-y: auto; -} - -.charts .chart .media_result.hidden { - display: none; -} + .charts .chart .media_result.hidden { + display: none; + } .charts .refresh { clear:both; @@ -36,30 +31,30 @@ text-align:center; } - .charts .refresh a { - text-align: center; - padding: 0; - display: none; - width: 30px; - height: 30px; - position: absolute; - right: 10px; - top: -40px; - opacity: .7; - } + .charts .refresh a { + text-align: center; + padding: 0; + display: none; + width: 30px; + height: 30px; + position: absolute; + right: 10px; + top: -40px; + opacity: .7; + } - .charts .refresh a:hover { - opacity: 1; - } + .charts .refresh a:hover { + opacity: 1; + } -.charts p.no_charts_enabled { - padding: 0.7em 1em; - display: none; -} + .charts p.no_charts_enabled { + padding: 0.7em 1em; + display: none; + } -.charts .chart h3 a { - color: #fff; -} + .charts .chart h3 a { + color: #fff; + } .charts .chart .media_result { @@ -148,7 +143,6 @@ padding: 0 3px 10px 0; } .charts .chart .media_result .data:before { - bottom: 0; content: ''; display: block; height: 10px; diff --git a/couchpotato/core/media/movie/charts/static/charts.js b/couchpotato/core/media/movie/charts/static/charts.js index 559046f0..79f0eda9 100644 --- a/couchpotato/core/media/movie/charts/static/charts.js +++ b/couchpotato/core/media/movie/charts/static/charts.js @@ -22,9 +22,11 @@ var Charts = new Class({ 'events': { 'click': function(e) { e.preventDefault(); - self.el.getChildren('div.chart').destroy(); + + self.el.getElements('.chart').destroy(); self.el_refreshing_text.show(); self.el_refresh_link.hide(); + self.api_request = Api.request('charts.view', { 'data': { 'force_update': 1 }, 'onComplete': self.fill.bind(self) @@ -72,7 +74,7 @@ var Charts = new Class({ Object.each(json.charts, function(chart){ - var c = new Element('div.chart').grab( + var c = new Element('div.chart.tiny_scroll').grab( new Element('h3').grab( new Element('a', { 'text': chart.name, 'href': chart.url diff --git a/couchpotato/core/media/movie/library.py b/couchpotato/core/media/movie/library.py index a6e29f32..28cb1b46 100644 --- a/couchpotato/core/media/movie/library.py +++ b/couchpotato/core/media/movie/library.py @@ -1,4 +1,5 @@ from couchpotato.core.event import addEvent +from couchpotato.core.helpers.variable import getTitle from couchpotato.core.logger import CPLog from couchpotato.core.media._base.library.base import LibraryBase @@ -17,7 +18,9 @@ class MovieLibraryPlugin(LibraryBase): if media.get('type') != 'movie': return + default_title = getTitle(media) titles = media['info'].get('titles', []) + titles.insert(0, default_title) # Add year identifier to titles if include_year: diff --git a/couchpotato/core/media/movie/providers/automation/imdb.py b/couchpotato/core/media/movie/providers/automation/imdb.py index b52816a8..783e8f50 100644 --- a/couchpotato/core/media/movie/providers/automation/imdb.py +++ b/couchpotato/core/media/movie/providers/automation/imdb.py @@ -3,6 +3,7 @@ import re from bs4 import BeautifulSoup from couchpotato import fireEvent +from couchpotato.core.helpers.encoding import ss from couchpotato.core.helpers.rss import RSS from couchpotato.core.helpers.variable import getImdb, splitString, tryInt from couchpotato.core.logger import CPLog @@ -28,6 +29,39 @@ class IMDBBase(Automation, RSS): def getInfo(self, imdb_id): return fireEvent('movie.info', identifier = imdb_id, extended = False, merge = True) + def getFromURL(self, url): + log.debug('Getting IMDBs from: %s', url) + html = self.getHTMLData(url) + + try: + split = splitString(html, split_on = "
")[1] + html = splitString(split, split_on = "
")[0] + except: + try: + split = splitString(html, split_on = "
") + + if len(split) < 2: + log.error('Failed parsing IMDB page "%s", unexpected html.', url) + return [] + + html = BeautifulSoup(split[1]) + for x in ['list compact', 'lister', 'list detail sub-list']: + html2 = html.find('div', attrs = { + 'class': x + }) + + if html2: + html = html2.contents + html = ''.join([str(x) for x in html]) + break + except: + log.error('Failed parsing IMDB page "%s": %s', (url, traceback.format_exc())) + + html = ss(html) + imdbs = getImdb(html, multiple = True) if html else [] + + return imdbs + class IMDBWatchlist(IMDBBase): @@ -65,16 +99,7 @@ class IMDBWatchlist(IMDBBase): try: w_url = '%s&start=%s' % (watchlist_url, start) - log.debug('Started IMDB watchlists: %s', w_url) - html = self.getHTMLData(w_url) - - try: - split = splitString(html, split_on="
")[1] - html = splitString(split, split_on="
")[0] - except: - pass - - imdbs = getImdb(html, multiple = True) if html else [] + imdbs = self.getFromURL(w_url) for imdb in imdbs: if imdb not in movies: @@ -85,13 +110,14 @@ class IMDBWatchlist(IMDBBase): log.debug('Found %s movies on %s', (len(imdbs), w_url)) - if len(imdbs) < 250: + if len(imdbs) < 225: break - start += 250 + start = len(movies) except: log.error('Failed loading IMDB watchlist: %s %s', (watchlist_url, traceback.format_exc())) + break return movies @@ -109,12 +135,12 @@ class IMDBAutomation(IMDBBase): 'boxoffice': { 'order': 2, 'name': 'IMDB - Box Office', - 'url': 'http://www.imdb.com/chart/', + 'url': 'http://www.imdb.com/boxoffice/', }, 'rentals': { 'order': 3, 'name': 'IMDB - Top DVD rentals', - 'url': 'http://m.imdb.com/boxoffice_json', + 'url': 'http://www.imdb.com/boxoffice/rentals', 'type': 'json', }, 'top250': { @@ -124,8 +150,6 @@ class IMDBAutomation(IMDBBase): }, } - first_table = ['boxoffice'] - def getIMDBids(self): movies = [] @@ -135,36 +159,19 @@ class IMDBAutomation(IMDBBase): url = chart.get('url') if self.conf('automation_charts_%s' % name): - data = self.getHTMLData(url) + imdb_ids = self.getFromURL(url) - if data: - try: - html = BeautifulSoup(data) + try: + for imdb_id in imdb_ids: + info = self.getInfo(imdb_id) + if info and self.isMinimalMovie(info): + movies.append(imdb_id) - if chart.get('type', 'html') == 'html': - result_div = html.find('div', attrs = {'id': 'main'}) + if self.shuttingDown(): + break - try: - if url in self.first_table: - table = result_div.find('table') - result_div = table if table else result_div - except: - pass - - imdb_ids = getImdb(str(result_div), multiple = True) - else: - imdb_ids = getImdb(str(data), multiple = True) - - for imdb_id in imdb_ids: - info = self.getInfo(imdb_id) - if info and self.isMinimalMovie(info): - movies.append(imdb_id) - - if self.shuttingDown(): - break - - except: - log.error('Failed loading IMDB chart results from %s: %s', (url, traceback.format_exc())) + except: + log.error('Failed loading IMDB chart results from %s: %s', (url, traceback.format_exc())) return movies @@ -182,42 +189,25 @@ class IMDBAutomation(IMDBBase): chart['list'] = [] - data = self.getHTMLData(url) - if data: - html = BeautifulSoup(data) + imdb_ids = self.getFromURL(url) - try: + try: + for imdb_id in imdb_ids[0:max_items]: - if chart.get('type', 'html') == 'html': - result_div = html.find('div', attrs = {'id': 'main'}) + is_movie = fireEvent('movie.is_movie', identifier = imdb_id, single = True) + if not is_movie: + continue - try: - if url in self.first_table: - table = result_div.find('table') - result_div = table if table else result_div - except: - pass + info = self.getInfo(imdb_id) + chart['list'].append(info) - imdb_ids = getImdb(str(result_div), multiple = True) - else: - imdb_ids = getImdb(str(data), multiple = True) + if self.shuttingDown(): + break + except: + log.error('Failed loading IMDB chart results from %s: %s', (url, traceback.format_exc())) - for imdb_id in imdb_ids[0:max_items]: - - is_movie = fireEvent('movie.is_movie', identifier = imdb_id, single = True) - if not is_movie: - continue - - info = self.getInfo(imdb_id) - chart['list'].append(info) - - if self.shuttingDown(): - break - except: - log.error('Failed loading IMDB chart results from %s: %s', (url, traceback.format_exc())) - - if chart['list']: - movie_lists.append(chart) + if chart['list']: + movie_lists.append(chart) return movie_lists @@ -273,7 +263,7 @@ config = [{ 'name': 'automation_charts_rentals', 'type': 'bool', 'label': 'DVD Rentals', - 'description': 'Top DVD rentals chart', + 'description': 'Top DVD rentals chart', 'default': True, }, { @@ -322,7 +312,7 @@ config = [{ 'name': 'chart_display_rentals', 'type': 'bool', 'label': 'DVD Rentals', - 'description': 'Top DVD rentals chart', + 'description': 'Top DVD rentals chart', 'default': True, }, { diff --git a/couchpotato/core/media/movie/providers/automation/moviemeter.py b/couchpotato/core/media/movie/providers/automation/moviemeter.py index 883fcfa1..b06046fb 100644 --- a/couchpotato/core/media/movie/providers/automation/moviemeter.py +++ b/couchpotato/core/media/movie/providers/automation/moviemeter.py @@ -21,11 +21,15 @@ class Moviemeter(Automation, RSS): for movie in rss_movies: - name_year = fireEvent('scanner.name_year', self.getTextElement(movie, 'title'), single = True) - imdb = self.search(name_year.get('name'), name_year.get('year')) + title = self.getTextElement(movie, 'title') + name_year = fireEvent('scanner.name_year', title, single = True) + if name_year.get('name') and name_year.get('year'): + imdb = self.search(name_year.get('name'), name_year.get('year')) - if imdb and self.isMinimalMovie(imdb): - movies.append(imdb['imdb']) + if imdb and self.isMinimalMovie(imdb): + movies.append(imdb['imdb']) + else: + log.error('Failed getting name and year from: %s', title) return movies diff --git a/couchpotato/core/media/movie/providers/automation/popularmovies.py b/couchpotato/core/media/movie/providers/automation/popularmovies.py new file mode 100644 index 00000000..eb46ecef --- /dev/null +++ b/couchpotato/core/media/movie/providers/automation/popularmovies.py @@ -0,0 +1,47 @@ +from couchpotato import fireEvent +from couchpotato.core.logger import CPLog +from couchpotato.core.media.movie.providers.automation.base import Automation + +log = CPLog(__name__) + +autoload = 'PopularMovies' + + +class PopularMovies(Automation): + + interval = 1800 + url = 'https://s3.amazonaws.com/popular-movies/movies.json' + + def getIMDBids(self): + + movies = [] + retrieved_movies = self.getJsonData(self.url) + + for movie in retrieved_movies.get('movies'): + imdb_id = movie.get('imdb_id') + info = fireEvent('movie.info', identifier = imdb_id, extended = False, merge = True) + if self.isMinimalMovie(info): + movies.append(imdb_id) + + return movies + + +config = [{ + 'name': 'popularmovies', + 'groups': [ + { + 'tab': 'automation', + 'list': 'automation_providers', + 'name': 'popularmovies_automation', + 'label': 'Popular Movies', + 'description': 'Imports the top titles of movies that have been in theaters. Script provided by Steven Lu', + 'options': [ + { + 'name': 'automation_enabled', + 'default': False, + 'type': 'enabler', + }, + ], + }, + ], +}] diff --git a/couchpotato/core/media/movie/providers/info/_modifier.py b/couchpotato/core/media/movie/providers/info/_modifier.py index bf257272..f6a3089b 100644 --- a/couchpotato/core/media/movie/providers/info/_modifier.py +++ b/couchpotato/core/media/movie/providers/info/_modifier.py @@ -26,7 +26,14 @@ class MovieResultModifier(Plugin): 'backdrop': [], 'poster_original': [], 'backdrop_original': [], - 'actors': {} + 'actors': {}, + 'landscape': [], + 'logo': [], + 'clear_art': [], + 'disc_art': [], + 'banner': [], + 'extra_thumbs': [], + 'extra_fanart': [] }, 'runtime': 0, 'plot': '', diff --git a/couchpotato/core/media/movie/providers/info/couchpotatoapi.py b/couchpotato/core/media/movie/providers/info/couchpotatoapi.py index 272fab9b..4c65bf8c 100644 --- a/couchpotato/core/media/movie/providers/info/couchpotatoapi.py +++ b/couchpotato/core/media/movie/providers/info/couchpotatoapi.py @@ -29,7 +29,7 @@ class CouchPotatoApi(MovieProvider): api_version = 1 def __init__(self): - addEvent('movie.info', self.getInfo, priority = 1) + addEvent('movie.info', self.getInfo, priority = 2) addEvent('movie.info.release_date', self.getReleaseDate) addEvent('info.search', self.search, priority = 1) diff --git a/couchpotato/core/media/movie/providers/info/fanarttv.py b/couchpotato/core/media/movie/providers/info/fanarttv.py new file mode 100644 index 00000000..8bfa92c8 --- /dev/null +++ b/couchpotato/core/media/movie/providers/info/fanarttv.py @@ -0,0 +1,130 @@ +import traceback + +from couchpotato import tryInt +from couchpotato.core.event import addEvent +from couchpotato.core.logger import CPLog +from couchpotato.core.media.movie.providers.base import MovieProvider + + +log = CPLog(__name__) + +autoload = 'FanartTV' + + +class FanartTV(MovieProvider): + + urls = { + 'api': 'http://api.fanart.tv/webservice/movie/b28b14e9be662e027cfbc7c3dd600405/%s/JSON/all/1/2' + } + + MAX_EXTRAFANART = 20 + http_time_between_calls = 0 + + def __init__(self): + addEvent('movie.info', self.getArt, priority = 1) + + def getArt(self, identifier = None, **kwargs): + + log.debug("Getting Extra Artwork from Fanart.tv...") + if not identifier: + return {} + + images = {} + + try: + url = self.urls['api'] % identifier + fanart_data = self.getJsonData(url) + + if fanart_data: + name, resource = fanart_data.items()[0] + log.debug('Found images for %s', name) + images = self._parseMovie(resource) + + except: + log.error('Failed getting extra art for %s: %s', + (identifier, traceback.format_exc())) + return {} + + return { + 'images': images + } + + def _parseMovie(self, movie): + images = { + 'landscape': self._getMultImages(movie.get('moviethumb', []), 1), + 'logo': [], + 'disc_art': self._getMultImages(self._trimDiscs(movie.get('moviedisc', [])), 1), + 'clear_art': self._getMultImages(movie.get('hdmovieart', []), 1), + 'banner': self._getMultImages(movie.get('moviebanner', []), 1), + 'extra_fanart': [], + } + + if len(images['clear_art']) == 0: + images['clear_art'] = self._getMultImages(movie.get('movieart', []), 1) + + images['logo'] = self._getMultImages(movie.get('hdmovielogo', []), 1) + if len(images['logo']) == 0: + images['logo'] = self._getMultImages(movie.get('movielogo', []), 1) + + fanarts = self._getMultImages(movie.get('moviebackground', []), self.MAX_EXTRAFANART + 1) + + if fanarts: + images['backdrop_original'] = [fanarts[0]] + images['extra_fanart'] = fanarts[1:] + + return images + + def _trimDiscs(self, disc_images): + """ + Return a subset of discImages. Only bluray disc images will be returned. + """ + + trimmed = [] + for disc in disc_images: + if disc.get('disc_type') == 'bluray': + trimmed.append(disc) + + if len(trimmed) == 0: + return disc_images + + return trimmed + + def _getImage(self, images): + image_url = None + highscore = -1 + for image in images: + if tryInt(image.get('likes')) > highscore: + highscore = tryInt(image.get('likes')) + image_url = image.get('url') + + return image_url + + def _getMultImages(self, images, n): + """ + Chooses the best n images and returns them as a list. + If n<0, all images will be returned. + """ + image_urls = [] + pool = [] + for image in images: + if image.get('lang') == 'en': + pool.append(image) + orig_pool_size = len(pool) + + while len(pool) > 0 and (n < 0 or orig_pool_size - len(pool) < n): + best = None + highscore = -1 + for image in pool: + if tryInt(image.get('likes')) > highscore: + highscore = tryInt(image.get('likes')) + best = image + image_urls.append(best.get('url')) + pool.remove(best) + + return image_urls + + def isDisabled(self): + if self.conf('api_key') == '': + log.error('No API key provided.') + return True + return False diff --git a/couchpotato/core/media/movie/providers/info/themoviedb.py b/couchpotato/core/media/movie/providers/info/themoviedb.py index b48822cc..ac1daecd 100644 --- a/couchpotato/core/media/movie/providers/info/themoviedb.py +++ b/couchpotato/core/media/movie/providers/info/themoviedb.py @@ -13,9 +13,10 @@ autoload = 'TheMovieDb' class TheMovieDb(MovieProvider): + MAX_EXTRATHUMBS = 4 def __init__(self): - addEvent('movie.info', self.getInfo, priority = 2) + addEvent('movie.info', self.getInfo, priority = 3) addEvent('movie.info_by_tmdb', self.getInfo) # Configure TMDB settings @@ -97,16 +98,18 @@ class TheMovieDb(MovieProvider): if not movie_data: # Images - poster = self.getImage(movie, type = 'poster', size = 'poster') + poster = self.getImage(movie, type = 'poster', size = 'w154') poster_original = self.getImage(movie, type = 'poster', size = 'original') backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original') + extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original', n = self.MAX_EXTRATHUMBS, skipfirst = True) images = { 'poster': [poster] if poster else [], #'backdrop': [backdrop] if backdrop else [], 'poster_original': [poster_original] if poster_original else [], 'backdrop_original': [backdrop_original] if backdrop_original else [], - 'actors': {} + 'actors': {}, + 'extra_thumbs': extra_thumbs } # Genres @@ -150,8 +153,10 @@ class TheMovieDb(MovieProvider): movie_data = dict((k, v) for k, v in movie_data.items() if v) # Add alternative names + if movie_data['original_title'] and movie_data['original_title'] not in movie_data['titles']: + movie_data['titles'].append(movie_data['original_title']) + if extended: - movie_data['titles'].append(movie.originaltitle) for alt in movie.alternate_titles: alt_name = alt.title if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None: @@ -172,6 +177,30 @@ class TheMovieDb(MovieProvider): return image_url + def getMultImages(self, movie, type = 'backdrops', size = 'original', n = -1, skipfirst = False): + """ + If n < 0, return all images. Otherwise return n images. + If n > len(getattr(movie, type)), then return all images. + If skipfirst is True, then it will skip getattr(movie, type)[0]. This + is because backdrops[0] is typically backdrop. + """ + + image_urls = [] + try: + images = getattr(movie, type) + if n < 0 or n > len(images): + num_images = len(images) + else: + num_images = n + + for i in range(int(skipfirst), num_images + int(skipfirst)): + image_urls.append(images[i].geturl(size = size)) + + except: + log.debug('Failed getting %i %s.%s for "%s"', (n, type, size, ss(str(movie)))) + + return image_urls + def isDisabled(self): if self.conf('api_key') == '': log.error('No API key provided.') diff --git a/couchpotato/core/media/movie/providers/metadata/base.py b/couchpotato/core/media/movie/providers/metadata/base.py index b9fc5d71..7968000b 100644 --- a/couchpotato/core/media/movie/providers/metadata/base.py +++ b/couchpotato/core/media/movie/providers/metadata/base.py @@ -4,7 +4,7 @@ import traceback from couchpotato.core.event import addEvent, fireEvent from couchpotato.core.helpers.encoding import sp -from couchpotato.core.helpers.variable import getIdentifier +from couchpotato.core.helpers.variable import getIdentifier, underscoreToCamel from couchpotato.core.logger import CPLog from couchpotato.core.media._base.providers.metadata.base import MetaDataBase from couchpotato.environment import Env @@ -38,75 +38,150 @@ class MovieMetaData(MetaDataBase): movie_info = group['media'].get('info') - for file_type in ['nfo', 'thumbnail', 'fanart']: + for file_type in ['nfo']: try: - # Get file path - name = getattr(self, 'get' + file_type.capitalize() + 'Name')(meta_name, root) + self._createType(meta_name, root, movie_info, group, file_type, 0) + except: + log.error('Unable to create %s file: %s', ('nfo', traceback.format_exc())) - if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): - - # Get file content - content = getattr(self, 'get' + file_type.capitalize())(movie_info = movie_info, data = group) - if content: - log.debug('Creating %s file: %s', (file_type, name)) - if os.path.isfile(content): - content = sp(content) - name = sp(name) - - shutil.copy2(content, name) - shutil.copyfile(content, name) - - # Try and copy stats seperately - try: shutil.copystat(content, name) - except: pass - else: - self.createFile(name, content) - group['renamed_files'].append(name) - - try: - os.chmod(sp(name), Env.getPermission('file')) - except: - log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc())) + for file_type in ['thumbnail', 'fanart', 'banner', 'disc_art', 'logo', 'clear_art', 'landscape', 'extra_thumbs', 'extra_fanart']: + try: + if file_type == 'thumbnail': + num_images = len(movie_info['images']['poster_original']) + elif file_type == 'fanart': + num_images = len(movie_info['images']['backdrop_original']) + else: + num_images = len(movie_info['images'][file_type]) + for i in range(num_images): + self._createType(meta_name, root, movie_info, group, file_type, i) except: log.error('Unable to create %s file: %s', (file_type, traceback.format_exc())) + def _createType(self, meta_name, root, movie_info, group, file_type, i): # Get file path + camelcase_method = underscoreToCamel(file_type.capitalize()) + name = getattr(self, 'get' + camelcase_method + 'Name')(meta_name, root, i) + + if name and (self.conf('meta_' + file_type) or self.conf('meta_' + file_type) is None): + + # Get file content + content = getattr(self, 'get' + camelcase_method)(movie_info = movie_info, data = group, i = i) + if content: + log.debug('Creating %s file: %s', (file_type, name)) + if os.path.isfile(content): + content = sp(content) + name = sp(name) + + if not os.path.exists(os.path.dirname(name)): + os.makedirs(os.path.dirname(name)) + + shutil.copy2(content, name) + shutil.copyfile(content, name) + + # Try and copy stats seperately + try: shutil.copystat(content, name) + except: pass + else: + self.createFile(name, content) + group['renamed_files'].append(name) + + try: + os.chmod(sp(name), Env.getPermission('file')) + except: + log.debug('Failed setting permissions for %s: %s', (name, traceback.format_exc())) + def getRootName(self, data = None): if not data: data = {} return os.path.join(data['destination_dir'], data['filename']) - def getFanartName(self, name, root): + def getFanartName(self, name, root, i): return - def getThumbnailName(self, name, root): + def getThumbnailName(self, name, root, i): return - def getNfoName(self, name, root): + def getBannerName(self, name, root, i): return - def getNfo(self, movie_info = None, data = None): + def getClearArtName(self, name, root, i): + return + + def getLogoName(self, name, root, i): + return + + def getDiscArtName(self, name, root, i): + return + + def getLandscapeName(self, name, root, i): + return + + def getExtraThumbsName(self, name, root, i): + return + + def getExtraFanartName(self, name, root, i): + return + + def getNfoName(self, name, root, i): + return + + def getNfo(self, movie_info = None, data = None, i = 0): if not data: data = {} if not movie_info: movie_info = {} - def getThumbnail(self, movie_info = None, data = None, wanted_file_type = 'poster_original'): + def getThumbnail(self, movie_info = None, data = None, wanted_file_type = 'poster_original', i = 0): if not data: data = {} if not movie_info: movie_info = {} # See if it is in current files files = data['media'].get('files') if files.get('image_' + wanted_file_type): - if os.path.isfile(files['image_' + wanted_file_type][0]): - return files['image_' + wanted_file_type][0] + if os.path.isfile(files['image_' + wanted_file_type][i]): + return files['image_' + wanted_file_type][i] # Download using existing info try: images = movie_info['images'][wanted_file_type] - file_path = fireEvent('file.download', url = images[0], single = True) + file_path = fireEvent('file.download', url = images[i], single = True) return file_path except: pass - def getFanart(self, movie_info = None, data = None): + def getFanart(self, movie_info = None, data = None, i = 0): if not data: data = {} if not movie_info: movie_info = {} - return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'backdrop_original') + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'backdrop_original', i = i) + + def getBanner(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'banner', i = i) + + def getClearArt(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'clear_art', i = i) + + def getLogo(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'logo', i = i) + + def getDiscArt(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'disc_art', i = i) + + def getLandscape(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data= data, wanted_file_type = 'landscape', i = i) + + def getExtraThumbs(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'extra_thumbs', i = i) + + def getExtraFanart(self, movie_info = None, data = None, i = 0): + if not data: data = {} + if not movie_info: movie_info = {} + return self.getThumbnail(movie_info = movie_info, data = data, wanted_file_type = 'extra_fanart', i = i) diff --git a/couchpotato/core/media/movie/providers/metadata/mediabrowser.py b/couchpotato/core/media/movie/providers/metadata/mediabrowser.py new file mode 100644 index 00000000..6e40e4c1 --- /dev/null +++ b/couchpotato/core/media/movie/providers/metadata/mediabrowser.py @@ -0,0 +1,36 @@ +import os + +from couchpotato.core.media.movie.providers.metadata.base import MovieMetaData + + +autoload = 'MediaBrowser' + + +class MediaBrowser(MovieMetaData): + + def getThumbnailName(self, name, root, i): + return os.path.join(root, 'folder.jpg') + + def getFanartName(self, name, root, i): + return os.path.join(root, 'backdrop.jpg') + + +config = [{ + 'name': 'mediabrowser', + 'groups': [ + { + 'tab': 'renamer', + 'subtab': 'metadata', + 'name': 'mediabrowser_metadata', + 'label': 'MediaBrowser', + 'description': 'Generate folder.jpg and backdrop.jpg', + 'options': [ + { + 'name': 'meta_enabled', + 'default': False, + 'type': 'enabler', + }, + ], + }, + ], +}] diff --git a/couchpotato/core/media/movie/providers/metadata/ps3.py b/couchpotato/core/media/movie/providers/metadata/ps3.py new file mode 100644 index 00000000..05df0a53 --- /dev/null +++ b/couchpotato/core/media/movie/providers/metadata/ps3.py @@ -0,0 +1,33 @@ +import os + +from couchpotato.core.media.movie.providers.metadata.base import MovieMetaData + + +autoload = 'SonyPS3' + + +class SonyPS3(MovieMetaData): + + def getThumbnailName(self, name, root, i): + return os.path.join(root, 'cover.jpg') + + +config = [{ + 'name': 'sonyps3', + 'groups': [ + { + 'tab': 'renamer', + 'subtab': 'metadata', + 'name': 'sonyps3_metadata', + 'label': 'Sony PS3', + 'description': 'Generate cover.jpg', + 'options': [ + { + 'name': 'meta_enabled', + 'default': False, + 'type': 'enabler', + }, + ], + }, + ], +}] diff --git a/couchpotato/core/media/movie/providers/metadata/wmc.py b/couchpotato/core/media/movie/providers/metadata/wmc.py index 8faa6d2a..3cb9e3c7 100644 --- a/couchpotato/core/media/movie/providers/metadata/wmc.py +++ b/couchpotato/core/media/movie/providers/metadata/wmc.py @@ -8,7 +8,7 @@ autoload = 'WindowsMediaCenter' class WindowsMediaCenter(MovieMetaData): - def getThumbnailName(self, name, root): + def getThumbnailName(self, name, root, i): return os.path.join(root, 'folder.jpg') diff --git a/couchpotato/core/media/movie/providers/metadata/xbmc.py b/couchpotato/core/media/movie/providers/metadata/xbmc.py index 9f6bb7dc..ff0c119f 100644 --- a/couchpotato/core/media/movie/providers/metadata/xbmc.py +++ b/couchpotato/core/media/movie/providers/metadata/xbmc.py @@ -17,19 +17,43 @@ autoload = 'XBMC' class XBMC(MovieMetaData): - def getFanartName(self, name, root): + def getFanartName(self, name, root, i): return self.createMetaName(self.conf('meta_fanart_name'), name, root) - def getThumbnailName(self, name, root): + def getThumbnailName(self, name, root, i): return self.createMetaName(self.conf('meta_thumbnail_name'), name, root) - def getNfoName(self, name, root): + def getNfoName(self, name, root, i): return self.createMetaName(self.conf('meta_nfo_name'), name, root) + def getBannerName(self, name, root, i): + return self.createMetaName(self.conf('meta_banner_name'), name, root) + + def getClearArtName(self, name, root, i): + return self.createMetaName(self.conf('meta_clear_art_name'), name, root) + + def getLogoName(self, name, root, i): + return self.createMetaName(self.conf('meta_logo_name'), name, root) + + def getDiscArtName(self, name, root, i): + return self.createMetaName(self.conf('meta_disc_art_name'), name, root) + + def getLandscapeName(self, name, root, i): + return self.createMetaName(self.conf('meta_landscape_name'), name, root) + + def getExtraThumbsName(self, name, root, i): + return self.createMetaNameMult(self.conf('meta_extra_thumbs_name'), name, root, i) + + def getExtraFanartName(self, name, root, i): + return self.createMetaNameMult(self.conf('meta_extra_fanart_name'), name, root, i) + def createMetaName(self, basename, name, root): return os.path.join(root, basename.replace('%s', name)) - def getNfo(self, movie_info = None, data = None): + def createMetaNameMult(self, basename, name, root, i): + return os.path.join(root, basename.replace('%s', name).replace('', str(i + 1))) + + def getNfo(self, movie_info=None, data=None, i=0): if not data: data = {} if not movie_info: movie_info = {} @@ -129,10 +153,25 @@ class XBMC(MovieMetaData): for image_url in movie_info['images']['poster_original']: image = SubElement(nfoxml, 'thumb') image.text = toUnicode(image_url) - fanart = SubElement(nfoxml, 'fanart') - for image_url in movie_info['images']['backdrop_original']: - image = SubElement(fanart, 'thumb') - image.text = toUnicode(image_url) + + image_types = [ + ('fanart', 'backdrop_original'), + ('banner', 'banner'), + ('discart', 'disc_art'), + ('logo', 'logo'), + ('clearart', 'clear_art'), + ('landscape', 'landscape'), + ('extrathumb', 'extra_thumbs'), + ('extrafanart', 'extra_fanart'), + ] + + for image_type in image_types: + sub, type = image_type + + sub_element = SubElement(nfoxml, sub) + for image_url in movie_info['images'][type]: + image = SubElement(sub_element, 'thumb') + image.text = toUnicode(image_url) # Add trailer if found trailer_found = False @@ -239,6 +278,92 @@ config = [{ 'default': '%s.tbn', 'advanced': True, }, + { + 'name': 'meta_banner', + 'label': 'Banner', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_banner_name', + 'label': 'Banner filename', + 'default': 'banner.jpg', + 'advanced': True, + }, + { + 'name': 'meta_clear_art', + 'label': 'ClearArt', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_clear_art_name', + 'label': 'ClearArt filename', + 'default': 'clearart.png', + 'advanced': True, + }, + { + 'name': 'meta_disc_art', + 'label': 'DiscArt', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_disc_art_name', + 'label': 'DiscArt filename', + 'default': 'disc.png', + 'advanced': True, + }, + { + 'name': 'meta_landscape', + 'label': 'Landscape', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_landscape_name', + 'label': 'Landscape filename', + 'default': 'landscape.jpg', + 'advanced': True, + }, + { + 'name': 'meta_logo', + 'label': 'ClearLogo', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_logo_name', + 'label': 'ClearLogo filename', + 'default': 'logo.png', + 'advanced': True, + }, + { + 'name': 'meta_extra_thumbs', + 'label': 'Extrathumbs', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_extra_thumbs_name', + 'label': 'Extrathumbs filename', + 'description': '<i> is the image number, and must be included to have multiple images', + 'default': 'extrathumbs/thumb.jpg', + 'advanced': True + }, + { + 'name': 'meta_extra_fanart', + 'label': 'Extrafanart', + 'default': False, + 'type': 'bool' + }, + { + 'name': 'meta_extra_fanart_name', + 'label': 'Extrafanart filename', + 'default': 'extrafanart/extrafanart.jpg', + 'description': '<i> is the image number, and must be included to have multiple images', + 'advanced': True + } ], }, ], diff --git a/couchpotato/core/media/movie/providers/torrent/publichd.py b/couchpotato/core/media/movie/providers/torrent/publichd.py deleted file mode 100644 index 9fed4619..00000000 --- a/couchpotato/core/media/movie/providers/torrent/publichd.py +++ /dev/null @@ -1,14 +0,0 @@ -from couchpotato.core.logger import CPLog -from couchpotato.core.event import fireEvent -from couchpotato.core.media._base.providers.torrent.publichd import Base -from couchpotato.core.media.movie.providers.base import MovieProvider - -log = CPLog(__name__) - -autoload = 'PublicHD' - - -class PublicHD(MovieProvider, Base): - - def buildUrl(self, media): - return fireEvent('library.query', media, single = True).replace(':', '') diff --git a/couchpotato/core/media/movie/providers/torrent/sceneaccess.py b/couchpotato/core/media/movie/providers/torrent/sceneaccess.py index 99157d73..579103af 100644 --- a/couchpotato/core/media/movie/providers/torrent/sceneaccess.py +++ b/couchpotato/core/media/movie/providers/torrent/sceneaccess.py @@ -1,5 +1,4 @@ from couchpotato.core.helpers.encoding import tryUrlencode -from couchpotato.core.event import fireEvent from couchpotato.core.logger import CPLog from couchpotato.core.media._base.providers.torrent.sceneaccess import Base from couchpotato.core.media.movie.providers.base import MovieProvider diff --git a/couchpotato/core/media/movie/providers/torrent/torrentleech.py b/couchpotato/core/media/movie/providers/torrent/torrentleech.py index 9ab1580e..191ceba8 100644 --- a/couchpotato/core/media/movie/providers/torrent/torrentleech.py +++ b/couchpotato/core/media/movie/providers/torrent/torrentleech.py @@ -1,4 +1,3 @@ -from couchpotato import fireEvent from couchpotato.core.helpers.encoding import tryUrlencode from couchpotato.core.logger import CPLog from couchpotato.core.media._base.providers.torrent.torrentleech import Base @@ -21,8 +20,8 @@ class TorrentLeech(MovieProvider, Base): ([12], ['dvdr']), ] - def buildUrl(self, media, quality): + def buildUrl(self, title, media, quality): return ( - tryUrlencode(fireEvent('library.query', media, single = True)), + tryUrlencode(title.replace(':', '')), self.getCatId(quality)[0] ) diff --git a/couchpotato/core/media/movie/providers/trailer/base.py b/couchpotato/core/media/movie/providers/trailer/base.py index c1a6f71d..9cd1dbe6 100644 --- a/couchpotato/core/media/movie/providers/trailer/base.py +++ b/couchpotato/core/media/movie/providers/trailer/base.py @@ -11,3 +11,6 @@ class TrailerProvider(Provider): def __init__(self): addEvent('trailer.search', self.search) + + def search(self, *args, **kwargs): + pass diff --git a/couchpotato/core/media/movie/providers/trailer/hdtrailers.py b/couchpotato/core/media/movie/providers/trailer/hdtrailers.py index ad040a3b..828f017f 100644 --- a/couchpotato/core/media/movie/providers/trailer/hdtrailers.py +++ b/couchpotato/core/media/movie/providers/trailer/hdtrailers.py @@ -1,5 +1,4 @@ from string import digits, ascii_letters -from urllib2 import HTTPError import re from bs4 import SoupStrainer, BeautifulSoup @@ -7,6 +6,7 @@ from couchpotato.core.helpers.encoding import tryUrlencode from couchpotato.core.helpers.variable import mergeDicts, getTitle from couchpotato.core.logger import CPLog from couchpotato.core.media.movie.providers.trailer.base import TrailerProvider +from requests import HTTPError log = CPLog(__name__) @@ -21,6 +21,7 @@ class HDTrailers(TrailerProvider): 'backup': 'http://www.hd-trailers.net/blog/', } providers = ['apple.ico', 'yahoo.ico', 'moviefone.ico', 'myspace.ico', 'favicon.ico'] + only_tables_tags = SoupStrainer('table') def search(self, group): @@ -67,8 +68,7 @@ class HDTrailers(TrailerProvider): return results try: - tables = SoupStrainer('div') - html = BeautifulSoup(data, parse_only = tables) + html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags) result_table = html.find_all('h2', text = re.compile(movie_name)) for h2 in result_table: @@ -90,8 +90,7 @@ class HDTrailers(TrailerProvider): results = {'480p':[], '720p':[], '1080p':[]} try: - tables = SoupStrainer('table') - html = BeautifulSoup(data, parse_only = tables) + html = BeautifulSoup(data, 'html.parser', parse_only = self.only_tables_tags) result_table = html.find('table', attrs = {'class':'bottomTable'}) for tr in result_table.find_all('tr'): diff --git a/couchpotato/core/media/movie/searcher.py b/couchpotato/core/media/movie/searcher.py index a5b1f477..7d92c57e 100644 --- a/couchpotato/core/media/movie/searcher.py +++ b/couchpotato/core/media/movie/searcher.py @@ -58,13 +58,13 @@ class MovieSearcher(SearcherBase, MovieTypeBase): def searchAllView(self, **kwargs): - fireEventAsync('movie.searcher.all') + fireEventAsync('movie.searcher.all', manual = True) return { 'success': not self.in_progress } - def searchAll(self): + def searchAll(self, manual = False): if self.in_progress: log.info('Search already in progress') @@ -91,7 +91,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase): media = fireEvent('media.get', media_id, single = True) try: - self.single(media, search_protocols) + self.single(media, search_protocols, manual = manual) except IndexError: log.error('Forcing library update for %s, if you see this often, please report: %s', (getIdentifier(media), traceback.format_exc())) fireEvent('movie.update_info', media_id) @@ -109,7 +109,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase): self.in_progress = False - def single(self, movie, search_protocols = None, manual = False): + def single(self, movie, search_protocols = None, manual = False, force_download = False): # Find out search type try: @@ -120,30 +120,46 @@ class MovieSearcher(SearcherBase, MovieTypeBase): if not movie['profile_id'] or (movie['status'] == 'done' and not manual): log.debug('Movie doesn\'t have a profile or already done, assuming in manage tab.') + fireEvent('media.restatus', movie['_id'], single = True) return + default_title = getTitle(movie) + if not default_title: + log.error('No proper info found for movie, removing it from library to stop it from causing more issues.') + fireEvent('media.delete', movie['_id'], single = True) + return + + # Update media status and check if it is still not done (due to the stop searching after feature + if fireEvent('media.restatus', movie['_id'], single = True) == 'done': + log.debug('No better quality found, marking movie %s as done.', default_title) + pre_releases = fireEvent('quality.pre_releases', single = True) release_dates = fireEvent('movie.update_release_dates', movie['_id'], merge = True) found_releases = [] previous_releases = movie.get('releases', []) too_early_to_search = [] - - default_title = getTitle(movie) - if not default_title: - log.error('No proper info found for movie, removing it from library to cause it from having more issues.') - fireEvent('media.delete', movie['_id'], single = True) - return + outside_eta_results = 0 + alway_search = self.conf('always_search') + ignore_eta = manual + total_result_count = 0 fireEvent('notify.frontend', type = 'movie.searcher.started', data = {'_id': movie['_id']}, message = 'Searching for "%s"' % default_title) + # Ignore eta once every 7 days + if not alway_search: + prop_name = 'last_ignored_eta.%s' % movie['_id'] + last_ignored_eta = float(Env.prop(prop_name, default = 0)) + if last_ignored_eta > time.time() - 604800: + ignore_eta = True + Env.prop(prop_name, value = time.time()) + db = get_db() profile = db.get('id', movie['profile_id']) ret = False - index = 0 - for q_identifier in profile.get('qualities'): + for index, q_identifier in enumerate(profile.get('qualities', [])): quality_custom = { 'index': index, 'quality': q_identifier, @@ -152,11 +168,13 @@ class MovieSearcher(SearcherBase, MovieTypeBase): '3d': profile['3d'][index] if profile.get('3d') else False } - index += 1 - - if not self.conf('always_search') and not self.couldBeReleased(q_identifier in pre_releases, release_dates, movie['info']['year']): + could_not_be_released = not self.couldBeReleased(q_identifier in pre_releases, release_dates, movie['info']['year']) + if not alway_search and could_not_be_released: too_early_to_search.append(q_identifier) - continue + + # Skip release, if ETA isn't ignored + if not ignore_eta: + continue has_better_quality = 0 @@ -173,19 +191,24 @@ class MovieSearcher(SearcherBase, MovieTypeBase): # Don't search for quality lower then already available. if has_better_quality > 0: log.info('Better quality (%s) already available or snatched for %s', (q_identifier, default_title)) - fireEvent('media.restatus', movie['_id']) + fireEvent('media.restatus', movie['_id'], single = True) break quality = fireEvent('quality.single', identifier = q_identifier, single = True) - log.info('Search for %s in %s', (default_title, quality['label'])) + log.info('Search for %s in %s%s', (default_title, quality['label'], ' ignoring ETA' if alway_search or ignore_eta else '')) # Extend quality with profile customs quality['custom'] = quality_custom results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or [] - if len(results) == 0: + results_count = len(results) + total_result_count += results_count + if results_count == 0: log.debug('Nothing found for %s in %s', (default_title, quality['label'])) + # Keep track of releases found outside ETA window + outside_eta_results += results_count if could_not_be_released else 0 + # Check if movie isn't deleted while searching if not fireEvent('media.get', movie.get('_id'), single = True): break @@ -193,8 +216,13 @@ class MovieSearcher(SearcherBase, MovieTypeBase): # Add them to this movie releases list found_releases += fireEvent('release.create_from_search', results, movie, quality, single = True) + # Don't trigger download, but notify user of available releases + if could_not_be_released: + if results_count > 0: + log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title)) + # Try find a valid result and download it - if fireEvent('release.try_download_result', results, movie, quality_custom, manual, single = True): + if (force_download or not could_not_be_released or alway_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True): ret = True # Remove releases that aren't found anymore @@ -211,9 +239,19 @@ class MovieSearcher(SearcherBase, MovieTypeBase): if self.shuttingDown() or ret: break + if total_result_count > 0: + fireEvent('media.tag', movie['_id'], 'recent', single = True) + if len(too_early_to_search) > 0: log.info2('Too early to search for %s, %s', (too_early_to_search, default_title)) + if outside_eta_results > 0: + message = 'Found %s releases for "%s" before ETA. Select and download via the dashboard.' % (outside_eta_results, default_title) + log.info(message) + + if not manual: + fireEvent('media.available', message = message, data = {}) + fireEvent('notify.frontend', type = 'movie.searcher.ended', data = {'_id': movie['_id']}) return ret @@ -334,13 +372,13 @@ class MovieSearcher(SearcherBase, MovieTypeBase): def tryNextReleaseView(self, media_id = None, **kwargs): - trynext = self.tryNextRelease(media_id, manual = True) + trynext = self.tryNextRelease(media_id, manual = True, force_download = True) return { 'success': trynext } - def tryNextRelease(self, media_id, manual = False): + def tryNextRelease(self, media_id, manual = False, force_download = False): try: db = get_db() @@ -352,7 +390,7 @@ class MovieSearcher(SearcherBase, MovieTypeBase): movie_dict = fireEvent('media.get', media_id, single = True) log.info('Trying next release for: %s', getTitle(movie_dict)) - self.single(movie_dict, manual = manual) + self.single(movie_dict, manual = manual, force_download = force_download) return True diff --git a/couchpotato/core/media/movie/suggestion/main.py b/couchpotato/core/media/movie/suggestion/main.py index 5db403f6..146a6a06 100644 --- a/couchpotato/core/media/movie/suggestion/main.py +++ b/couchpotato/core/media/movie/suggestion/main.py @@ -1,4 +1,3 @@ -from couchpotato import get_db from couchpotato.api import addApiView from couchpotato.core.event import fireEvent from couchpotato.core.helpers.variable import splitString, removeDuplicate, getIdentifier @@ -84,7 +83,6 @@ class Suggestion(Plugin): # Get new results and add them if len(new_suggestions) - 1 < limit: - db = get_db() active_movies = fireEvent('media.with_status', ['active', 'done'], single = True) movies = [getIdentifier(x) for x in active_movies] movies.extend(seen) diff --git a/couchpotato/core/notifications/base.py b/couchpotato/core/notifications/base.py index 0bfb337c..725704e0 100644 --- a/couchpotato/core/notifications/base.py +++ b/couchpotato/core/notifications/base.py @@ -15,6 +15,7 @@ class Notification(Provider): test_message = 'ZOMG Lazors Pewpewpew!' listen_to = [ + 'media.available', 'renamer.after', 'movie.snatched', 'updater.available', 'updater.updated', 'core.message.important', diff --git a/couchpotato/core/notifications/boxcar.py b/couchpotato/core/notifications/boxcar.py deleted file mode 100644 index a7acc882..00000000 --- a/couchpotato/core/notifications/boxcar.py +++ /dev/null @@ -1,69 +0,0 @@ -import time - -from couchpotato.core.helpers.encoding import toUnicode -from couchpotato.core.logger import CPLog -from couchpotato.core.notifications.base import Notification - - -log = CPLog(__name__) - -autoload = 'Boxcar' - - -class Boxcar(Notification): - - url = 'https://boxcar.io/devices/providers/7MNNXY3UIzVBwvzkKwkC/notifications' - - def notify(self, message = '', data = None, listener = None): - if not data: data = {} - - try: - message = message.strip() - - data = { - 'email': self.conf('email'), - 'notification[from_screen_name]': self.default_title, - 'notification[message]': toUnicode(message), - 'notification[from_remote_service_id]': int(time.time()), - } - - self.urlopen(self.url, data = data) - except: - log.error('Check your email and added services on boxcar.io') - return False - - log.info('Boxcar notification successful.') - return True - - def isEnabled(self): - return super(Boxcar, self).isEnabled() and self.conf('email') - - -config = [{ - 'name': 'boxcar', - 'groups': [ - { - 'tab': 'notifications', - 'list': 'notification_providers', - 'name': 'boxcar', - 'options': [ - { - 'name': 'enabled', - 'default': 0, - 'type': 'enabler', - }, - { - 'name': 'email', - 'description': 'Your Boxcar registration emailaddress.' - }, - { - 'name': 'on_snatch', - 'default': 0, - 'type': 'bool', - 'advanced': True, - 'description': 'Also send message when movie is snatched.', - }, - ], - } - ], -}] diff --git a/couchpotato/core/notifications/core/main.py b/couchpotato/core/notifications/core/main.py index 0cd8e579..5190218e 100644 --- a/couchpotato/core/notifications/core/main.py +++ b/couchpotato/core/notifications/core/main.py @@ -28,6 +28,7 @@ class CoreNotifier(Notification): m_lock = None listen_to = [ + 'media.available', 'renamer.after', 'movie.snatched', 'updater.available', 'updater.updated', 'core.message', 'core.message.important', @@ -258,14 +259,14 @@ class CoreNotifier(Notification): messages = [] - # Get unread + # Get last message if init: db = get_db() - notifications = db.all('notification_unread', with_doc = True) + notifications = db.all('notification', with_doc = True) for n in notifications: - if n['doc'].get('time') > (time.time() - 259200): + if n['doc'].get('time') > (time.time() - 604800): messages.append(n['doc']) return { diff --git a/couchpotato/core/notifications/core/static/notification.js b/couchpotato/core/notifications/core/static/notification.js index 3b2bc618..a77f19f9 100644 --- a/couchpotato/core/notifications/core/static/notification.js +++ b/couchpotato/core/notifications/core/static/notification.js @@ -122,9 +122,12 @@ var NotificationBase = new Class({ startPoll: function(){ var self = this; - if(self.stopped || (self.request && self.request.isRunning())) + if(self.stopped) return; + if(self.request && self.request.isRunning()) + self.request.cancel(); + self.request = Api.request('nonblock/notification.listener', { 'onSuccess': function(json){ self.processData(json, false) @@ -149,7 +152,7 @@ var NotificationBase = new Class({ var self = this; // Process data - if(json){ + if(json && json.result){ Array.each(json.result, function(result){ App.trigger(result._t || result.type, [result]); if(result.message && result.read === undefined && !init) diff --git a/couchpotato/core/notifications/nmj.py b/couchpotato/core/notifications/nmj.py index 7579cafa..665837f1 100644 --- a/couchpotato/core/notifications/nmj.py +++ b/couchpotato/core/notifications/nmj.py @@ -22,10 +22,11 @@ class NMJ(Notification): # noinspection PyMissingConstructor def __init__(self): - addEvent('renamer.after', self.addToLibrary) addApiView(self.testNotifyName(), self.test) addApiView('notify.nmj.auto_config', self.autoConfig) + addEvent('renamer.after', self.addToLibrary) + def autoConfig(self, host = 'localhost', **kwargs): mount = '' diff --git a/couchpotato/core/notifications/pushbullet.py b/couchpotato/core/notifications/pushbullet.py index 56cf2288..e9d4605c 100644 --- a/couchpotato/core/notifications/pushbullet.py +++ b/couchpotato/core/notifications/pushbullet.py @@ -14,7 +14,7 @@ autoload = 'Pushbullet' class Pushbullet(Notification): - url = 'https://api.pushbullet.com/api/%s' + url = 'https://api.pushbullet.com/v2/%s' def notify(self, message = '', data = None, listener = None): if not data: data = {} @@ -25,11 +25,7 @@ class Pushbullet(Notification): # Get all the device IDs linked to this user if not len(devices): - response = self.request('devices') - if not response: - return False - - devices += [device.get('id') for device in response['devices']] + devices = [None] successful = 0 for device in devices: @@ -88,7 +84,8 @@ config = [{ }, { 'name': 'api_key', - 'label': 'User API Key' + 'label': 'Access Token', + 'description': 'Can be found on Account Settings', }, { 'name': 'devices', diff --git a/couchpotato/core/notifications/pushover.py b/couchpotato/core/notifications/pushover.py index ea4b00a3..46dc0ad8 100644 --- a/couchpotato/core/notifications/pushover.py +++ b/couchpotato/core/notifications/pushover.py @@ -1,7 +1,7 @@ from httplib import HTTPSConnection from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode -from couchpotato.core.helpers.variable import getTitle +from couchpotato.core.helpers.variable import getTitle, getIdentifier from couchpotato.core.logger import CPLog from couchpotato.core.notifications.base import Notification @@ -13,7 +13,6 @@ autoload = 'Pushover' class Pushover(Notification): - app_token = 'YkxHMYDZp285L265L3IwH3LmzkTaCy' def notify(self, message = '', data = None, listener = None): if not data: data = {} @@ -22,15 +21,15 @@ class Pushover(Notification): api_data = { 'user': self.conf('user_key'), - 'token': self.app_token, + 'token': self.conf('api_token'), 'message': toUnicode(message), 'priority': self.conf('priority'), 'sound': self.conf('sound'), } - if data and data.get('identifier'): + if data and getIdentifier(data): api_data.update({ - 'url': toUnicode('http://www.imdb.com/title/%s/' % data['identifier']), + 'url': toUnicode('http://www.imdb.com/title/%s/' % getIdentifier(data)), 'url_title': toUnicode('%s on IMDb' % getTitle(data)), }) @@ -49,7 +48,7 @@ class Pushover(Notification): log.error('Pushover auth failed: %s', response.reason) return False else: - log.error('Pushover notification failed.') + log.error('Pushover notification failed: %s', request_status) return False @@ -70,6 +69,12 @@ config = [{ 'name': 'user_key', 'description': 'Register on pushover.net to get one.' }, + { + 'name': 'api_token', + 'description': 'Register on pushover.net to get one.', + 'advanced': True, + 'default': 'YkxHMYDZp285L265L3IwH3LmzkTaCy', + }, { 'name': 'priority', 'default': 0, diff --git a/couchpotato/core/notifications/synoindex.py b/couchpotato/core/notifications/synoindex.py index cd4ff8f9..b14e1a03 100644 --- a/couchpotato/core/notifications/synoindex.py +++ b/couchpotato/core/notifications/synoindex.py @@ -1,6 +1,7 @@ import os import subprocess +from couchpotato.api import addApiView from couchpotato.core.event import addEvent from couchpotato.core.logger import CPLog from couchpotato.core.notifications.base import Notification @@ -16,7 +17,8 @@ class Synoindex(Notification): index_path = '/usr/syno/bin/synoindex' def __init__(self): - super(Synoindex, self).__init__() + addApiView(self.testNotifyName(), self.test) + addEvent('renamer.after', self.addToLibrary) def addToLibrary(self, message = None, group = None): diff --git a/couchpotato/core/notifications/xbmc.py b/couchpotato/core/notifications/xbmc.py index 28439a75..eb0b6996 100644 --- a/couchpotato/core/notifications/xbmc.py +++ b/couchpotato/core/notifications/xbmc.py @@ -8,7 +8,7 @@ from couchpotato.core.helpers.variable import splitString, getTitle from couchpotato.core.logger import CPLog from couchpotato.core.notifications.base import Notification import requests -from requests.packages.urllib3.exceptions import MaxRetryError +from requests.packages.urllib3.exceptions import MaxRetryError, ConnectionError log = CPLog(__name__) @@ -36,7 +36,7 @@ class XBMC(Notification): if self.use_json_notifications.get(host): calls = [ - ('GUI.ShowNotification', {'title': self.default_title, 'message': message, 'image': self.getNotificationImage('small')}), + ('GUI.ShowNotification', None, {'title': self.default_title, 'message': message, 'image': self.getNotificationImage('small')}), ] if data and data.get('destination_dir') and (not self.conf('only_first') or hosts.index(host) == 0): @@ -44,7 +44,7 @@ class XBMC(Notification): if not self.conf('force_full_scan') and (self.conf('remote_dir_scan') or socket.getfqdn('localhost') == socket.getfqdn(host.split(':')[0])): param = {'directory': data['destination_dir']} - calls.append(('VideoLibrary.Scan', param)) + calls.append(('VideoLibrary.Scan', None, param)) max_successful += len(calls) response = self.request(host, calls) @@ -52,7 +52,7 @@ class XBMC(Notification): response = self.notifyXBMCnoJSON(host, {'title': self.default_title, 'message': message}) if data and data.get('destination_dir') and (not self.conf('only_first') or hosts.index(host) == 0): - response += self.request(host, [('VideoLibrary.Scan', {})]) + response += self.request(host, [('VideoLibrary.Scan', None, {})]) max_successful += 1 max_successful += 1 @@ -75,7 +75,7 @@ class XBMC(Notification): # XBMC JSON-RPC version request response = self.request(host, [ - ('JSONRPC.Version', {}) + ('JSONRPC.Version', None, {}) ]) for result in response: if result.get('result') and type(result['result']['version']).__name__ == 'int': @@ -112,7 +112,7 @@ class XBMC(Notification): self.use_json_notifications[host] = True # send the text message - resp = self.request(host, [('GUI.ShowNotification', {'title':self.default_title, 'message':message, 'image': self.getNotificationImage('small')})]) + resp = self.request(host, [('GUI.ShowNotification', None, {'title':self.default_title, 'message':message, 'image': self.getNotificationImage('small')})]) for r in resp: if r.get('result') and r['result'] == 'OK': log.debug('Message delivered successfully!') @@ -172,7 +172,7 @@ class XBMC(Notification): # manually fake expected response array return [{'result': 'Error'}] - except (MaxRetryError, requests.exceptions.Timeout): + except (MaxRetryError, requests.exceptions.Timeout, ConnectionError): log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off') return [{'result': 'Error'}] except: @@ -184,12 +184,13 @@ class XBMC(Notification): data = [] for req in do_requests: - method, kwargs = req + method, id, kwargs = req + data.append({ 'method': method, 'params': kwargs, 'jsonrpc': '2.0', - 'id': method, + 'id': id if id else method, }) data = json.dumps(data) @@ -223,7 +224,7 @@ config = [{ 'list': 'notification_providers', 'name': 'xbmc', 'label': 'XBMC', - 'description': 'v11 (Eden) and v12 (Frodo)', + 'description': 'v11 (Eden), v12 (Frodo), v13 (Gotham)', 'options': [ { 'name': 'enabled', @@ -256,7 +257,7 @@ config = [{ 'default': 0, 'type': 'bool', 'advanced': True, - 'description': 'Only scan new movie folder at remote XBMC servers. Works if movie location is the same.', + 'description': ('Only scan new movie folder at remote XBMC servers.', 'Useful if the XBMC path is different from the path CPS uses.'), }, { 'name': 'force_full_scan', @@ -264,11 +265,11 @@ config = [{ 'default': 0, 'type': 'bool', 'advanced': True, - 'description': 'Do a full scan instead of only the new movie. Useful if the XBMC path is different from the path CPS uses.', + 'description': ('Do a full scan instead of only the new movie.', 'Useful if the XBMC path is different from the path CPS uses.'), }, { 'name': 'on_snatch', - 'default': 0, + 'default': False, 'type': 'bool', 'advanced': True, 'description': 'Also send message when movie is snatched.', diff --git a/couchpotato/core/notifications/xmpp.py b/couchpotato/core/notifications/xmpp_.py similarity index 100% rename from couchpotato/core/notifications/xmpp.py rename to couchpotato/core/notifications/xmpp_.py diff --git a/couchpotato/core/plugins/base.py b/couchpotato/core/plugins/base.py index 7cc4d1c6..bc66123f 100644 --- a/couchpotato/core/plugins/base.py +++ b/couchpotato/core/plugins/base.py @@ -1,3 +1,4 @@ +from urllib import quote from urlparse import urlparse import glob import inspect @@ -5,7 +6,6 @@ import os.path import re import time import traceback -import urllib2 from couchpotato.core.event import fireEvent, addEvent from couchpotato.core.helpers.encoding import ss, toSafeString, \ @@ -16,7 +16,6 @@ from couchpotato.environment import Env import requests from requests.packages.urllib3 import Timeout from requests.packages.urllib3.exceptions import MaxRetryError -from scandir import scandir from tornado import template from tornado.web import StaticFileHandler @@ -41,7 +40,6 @@ class Plugin(object): http_time_between_calls = 0 http_failed_request = {} http_failed_disabled = {} - http_opener = requests.Session() def __new__(cls, *args, **kwargs): new_plugin = super(Plugin, cls).__new__(cls) @@ -141,19 +139,25 @@ class Plugin(object): return False - def deleteEmptyFolder(self, folder, show_error = True): + def deleteEmptyFolder(self, folder, show_error = True, only_clean = None): folder = sp(folder) - for root, dirs, files in scandir.walk(folder): + for item in os.listdir(folder): + full_folder = os.path.join(folder, item) - for dir_name in dirs: - full_path = os.path.join(root, dir_name) - if len(os.listdir(full_path)) == 0: - try: - os.rmdir(full_path) - except: - if show_error: - log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc())) + if not only_clean or (item in only_clean and os.path.isdir(full_folder)): + + for root, dirs, files in os.walk(full_folder): + + for dir_name in dirs: + full_path = os.path.join(root, dir_name) + + if len(os.listdir(full_path)) == 0: + try: + os.rmdir(full_path) + except: + if show_error: + log.error('Couldn\'t remove empty directory %s: %s', (full_path, traceback.format_exc())) try: os.rmdir(folder) @@ -163,7 +167,7 @@ class Plugin(object): # http request def urlopen(self, url, timeout = 30, data = None, headers = None, files = None, show_error = True): - url = urllib2.quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]") + url = quote(ss(url), safe = "%/:=&?~#+!$,;'@()*[]") if not headers: headers = {} if not data: data = {} @@ -179,7 +183,7 @@ class Plugin(object): headers['Connection'] = headers.get('Connection', 'keep-alive') headers['Cache-Control'] = headers.get('Cache-Control', 'max-age=0') - r = self.http_opener + r = Env.get('http_opener') # Don't try for failed requests if self.http_failed_disabled.get(host, 0) > 0: @@ -201,11 +205,12 @@ class Plugin(object): 'data': data if len(data) > 0 else None, 'timeout': timeout, 'files': files, + 'verify': False, #verify_ssl, Disable for now as to many wrongly implemented certificates.. } method = 'post' if len(data) > 0 or files else 'get' log.info('Opening url: %s %s, data: %s', (method, url, [x for x in data.keys()] if isinstance(data, dict) else 'with data')) - response = r.request(method, url, verify = False, **kwargs) + response = r.request(method, url, **kwargs) if response.status_code == requests.codes.ok: data = response.content @@ -258,7 +263,7 @@ class Plugin(object): def afterCall(self, handler): self.isRunning('%s.%s' % (self.getName(), handler.__name__), False) - def doShutdown(self): + def doShutdown(self, *args, **kwargs): self.shuttingDown(True) return True diff --git a/couchpotato/core/plugins/browser.py b/couchpotato/core/plugins/browser.py index 6880f3b4..013a4823 100644 --- a/couchpotato/core/plugins/browser.py +++ b/couchpotato/core/plugins/browser.py @@ -3,6 +3,7 @@ import os import string from couchpotato.api import addApiView +from couchpotato.core.helpers.encoding import sp from couchpotato.core.helpers.variable import getUserDir from couchpotato.core.plugins.base import Plugin import six @@ -50,6 +51,7 @@ class FileBrowser(Plugin): path = '/' dirs = [] + path = sp(path) for f in os.listdir(path): p = os.path.join(path, f) if os.path.isdir(p) and ((self.is_hidden(p) and bool(int(show_hidden))) or not self.is_hidden(p)): diff --git a/couchpotato/core/plugins/file.py b/couchpotato/core/plugins/file.py index 92d85c50..80c073fc 100644 --- a/couchpotato/core/plugins/file.py +++ b/couchpotato/core/plugins/file.py @@ -5,11 +5,10 @@ from couchpotato import get_db from couchpotato.api import addApiView from couchpotato.core.event import addEvent, fireEvent from couchpotato.core.helpers.encoding import toUnicode -from couchpotato.core.helpers.variable import md5, getExt +from couchpotato.core.helpers.variable import md5, getExt, isSubFolder from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.environment import Env -from scandir import scandir from tornado.web import StaticFileHandler @@ -33,6 +32,8 @@ class FileManager(Plugin): fireEvent('schedule.interval', 'file.cleanup', self.cleanup, hours = 24) + addEvent('app.test', self.doSubfolderTest) + def cleanup(self): # Wait a bit after starting before cleanup @@ -49,9 +50,9 @@ class FileManager(Plugin): for x in file_dict.keys(): files.extend(file_dict[x]) - for f in scandir.scandir(cache_dir): - if os.path.splitext(f.name)[1] in ['.png', '.jpg', '.jpeg']: - file_path = os.path.join(cache_dir, f.name) + for f in os.listdir(cache_dir): + if os.path.splitext(f)[1] in ['.png', '.jpg', '.jpeg']: + file_path = os.path.join(cache_dir, f) if toUnicode(file_path) not in files: os.remove(file_path) except: @@ -77,3 +78,33 @@ class FileManager(Plugin): self.createFile(dest, filedata, binary = True) return dest + + def doSubfolderTest(self): + + tests = { + ('/test/subfolder', '/test/sub'): False, + ('/test/sub/folder', '/test/sub'): True, + ('/test/sub/folder', '/test/sub2'): False, + ('/sub/fold', '/test/sub/fold'): False, + ('/sub/fold', '/test/sub/folder'): False, + ('/opt/couchpotato', '/var/opt/couchpotato'): False, + ('/var/opt', '/var/opt/couchpotato'): False, + ('/CapItaLs/Are/OK', '/CapItaLs/Are/OK'): True, + ('/CapItaLs/Are/OK', '/CapItaLs/Are/OK2'): False, + ('/capitals/are/not/OK', '/capitals/are/NOT'): False, + ('\\\\Mounted\\Volume\\Test', '\\\\Mounted\\Volume'): True, + ('C:\\\\test\\path', 'C:\\\\test2'): False + } + + failed = 0 + for x in tests: + if isSubFolder(x[0], x[1]) is not tests[x]: + log.error('Failed subfolder test %s %s', x) + failed += 1 + + if failed > 0: + log.error('Subfolder test failed %s tests', failed) + else: + log.info('Subfolder test succeeded') + + return failed == 0 \ No newline at end of file diff --git a/couchpotato/core/plugins/log/main.py b/couchpotato/core/plugins/log/main.py index 5206acd6..003529b1 100644 --- a/couchpotato/core/plugins/log/main.py +++ b/couchpotato/core/plugins/log/main.py @@ -134,8 +134,8 @@ class Logging(Plugin): logs_raw = toUnicode(log_content).split('[0m\n') logs = [] - for log in logs_raw: - split = splitString(log, '\x1b') + for log_line in logs_raw: + split = splitString(log_line, '\x1b') if split: try: date, time, log_type = splitString(split[0], ' ') diff --git a/couchpotato/core/plugins/log/static/log.css b/couchpotato/core/plugins/log/static/log.css index 8a809e32..c7aace61 100644 --- a/couchpotato/core/plugins/log/static/log.css +++ b/couchpotato/core/plugins/log/static/log.css @@ -23,7 +23,7 @@ cursor: pointer; } - .page.log .nav li:hover:not(.active, .filter) { + .page.log .nav li:hover:not(.active):not(.filter) { background: rgba(255, 255, 255, 0.1); } diff --git a/couchpotato/core/plugins/log/static/log.js b/couchpotato/core/plugins/log/static/log.js index d39a20a9..11acb5ca 100644 --- a/couchpotato/core/plugins/log/static/log.js +++ b/couchpotato/core/plugins/log/static/log.js @@ -17,7 +17,7 @@ Page.Log = new Class({ Movie(s) I have this with: ...\n\ Quality of the movie being searched: ...\n\ Providers I use: ...\n\ -Version of CouchPotato: ...\n\ +Version of CouchPotato: {version}\n\ Running on: ...\n\ \n\ ### Logs:\n\ @@ -207,7 +207,11 @@ Running on: ...\n\ showReport: function(text){ var self = this, - body = self.report_text.replace('{issue}', text); + version = Updater.getInfo(), + body = self.report_text + .replace('{issue}', text) + .replace('{version}', version ? version.version.repr : '...'), + textarea; var overlay = new Element('div.report', { 'method': 'post', @@ -240,7 +244,7 @@ Running on: ...\n\ 'text': ' before posting, then copy the text below' }) ), - new Element('textarea', { + textarea = new Element('textarea', { 'text': body, 'events': { 'click': function(){ @@ -251,7 +255,17 @@ Running on: ...\n\ new Element('a.button', { 'target': '_blank', 'text': 'Create a new issue on GitHub with the text above', - 'href': 'https://github.com/RuudBurger/CouchPotatoServer/issues/new?body=' + (body.length < 2000 ? encodeURIComponent(body) : 'Paste the text here') + 'href': 'https://github.com/RuudBurger/CouchPotatoServer/issues/new', + 'events': { + 'click': function(e){ + (e).stop(); + + var body = textarea.get('value'), + bdy = '?body=' + (body.length < 2000 ? encodeURIComponent(body) : 'Paste the text here'), + win = window.open(e.target.get('href') + bdy, '_blank'); + win.focus(); + } + } }) ) ); diff --git a/couchpotato/core/plugins/manage.py b/couchpotato/core/plugins/manage.py index ab29e72d..c8d53ea0 100644 --- a/couchpotato/core/plugins/manage.py +++ b/couchpotato/core/plugins/manage.py @@ -1,6 +1,4 @@ -import ctypes import os -import sys import time import traceback @@ -8,7 +6,7 @@ from couchpotato import get_db from couchpotato.api import addApiView from couchpotato.core.event import fireEvent, addEvent, fireEventAsync from couchpotato.core.helpers.encoding import sp -from couchpotato.core.helpers.variable import splitString, getTitle, tryInt, getIdentifier +from couchpotato.core.helpers.variable import splitString, getTitle, tryInt, getIdentifier, getFreeSpace from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.environment import Env @@ -33,7 +31,7 @@ class Manage(Plugin): # Add files after renaming def after_rename(message = None, group = None): if not group: group = {} - return self.scanFilesToLibrary(folder = group['destination_dir'], files = group['renamed_files']) + return self.scanFilesToLibrary(folder = group['destination_dir'], files = group['renamed_files'], release_download = group['release_download']) addEvent('renamer.after', after_rename, priority = 110) addApiView('manage.update', self.updateLibraryView, docs = { @@ -53,6 +51,20 @@ class Manage(Plugin): if not Env.get('dev') and self.conf('startup_scan'): addEvent('app.load', self.updateLibraryQuick) + addEvent('app.load', self.setCrons) + + # Enable / disable interval + addEvent('setting.save.manage.library_refresh_interval.after', self.setCrons) + + def setCrons(self): + + fireEvent('schedule.remove', 'manage.update_library') + refresh = tryInt(self.conf('library_refresh_interval')) + if refresh > 0: + fireEvent('schedule.interval', 'manage.update_library', self.updateLibrary, hours = refresh, single = True) + + return True + def getProgress(self, **kwargs): return { 'progress': self.in_progress @@ -71,7 +83,8 @@ class Manage(Plugin): return self.updateLibrary(full = False) def updateLibrary(self, full = True): - last_update = float(Env.prop('manage.last_update', default = 0)) + last_update_key = 'manage.last_update%s' % ('_full' if full else '') + last_update = float(Env.prop(last_update_key, default = 0)) if self.in_progress: log.info('Already updating library: %s', self.in_progress) @@ -122,6 +135,7 @@ class Manage(Plugin): # Get movies with done status total_movies, done_movies = fireEvent('media.list', types = 'movie', status = 'done', release_status = 'done', status_or = True, single = True) + deleted_releases = [] for done_movie in done_movies: if getIdentifier(done_movie) not in added_identifiers: fireEvent('media.delete', media_id = done_movie['_id'], delete_from = 'all') @@ -151,18 +165,24 @@ class Manage(Plugin): already_used = used_files.get(release_file) if already_used: - # delete current one - if already_used.get('last_edit', 0) < release.get('last_edit', 0): - fireEvent('release.delete', release['_id'], single = True) - # delete previous one - else: - fireEvent('release.delete', already_used['_id'], single = True) + release_id = release['_id'] if already_used.get('last_edit', 0) < release.get('last_edit', 0) else already_used['_id'] + if release_id not in deleted_releases: + fireEvent('release.delete', release_id, single = True) + deleted_releases.append(release_id) break else: used_files[release_file] = release del used_files - Env.prop('manage.last_update', time.time()) + # Break if CP wants to shut down + if self.shuttingDown(): + break + + if not self.shuttingDown(): + db = get_db() + db.reindex() + + Env.prop(last_update_key, time.time()) except: log.error('Failed updating library: %s', (traceback.format_exc())) @@ -192,14 +212,14 @@ class Manage(Plugin): 'to_go': total_found, }) + self.updateProgress(folder, to_go) + if group['media'] and group['identifier']: added_identifiers.append(group['identifier']) # Add it to release and update the info fireEvent('release.add', group = group, update_info = False) fireEvent('movie.update_info', identifier = group['identifier'], on_complete = self.createAfterUpdate(folder, group['identifier'])) - else: - self.updateProgress(folder) return addToLibrary @@ -210,7 +230,6 @@ class Manage(Plugin): if not self.in_progress or self.shuttingDown(): return - self.updateProgress(folder) total = self.in_progress[folder]['total'] movie_dict = fireEvent('media.get', identifier, single = True) @@ -218,10 +237,11 @@ class Manage(Plugin): return afterUpdate - def updateProgress(self, folder): + def updateProgress(self, folder, to_go): pr = self.in_progress[folder] - pr['to_go'] -= 1 + if to_go < pr['to_go']: + pr['to_go'] = to_go avg = (time.time() - pr['started']) / (pr['total'] - pr['to_go']) pr['eta'] = tryInt(avg * pr['to_go']) @@ -236,7 +256,7 @@ class Manage(Plugin): return [] - def scanFilesToLibrary(self, folder = None, files = None): + def scanFilesToLibrary(self, folder = None, files = None, release_download = None): folder = os.path.normpath(folder) @@ -245,34 +265,13 @@ class Manage(Plugin): if groups: for group in groups.values(): if group.get('media'): - fireEvent('release.add', group = group) + if release_download and release_download.get('release_id'): + fireEvent('release.add', group = group, update_id = release_download.get('release_id')) + else: + fireEvent('release.add', group = group) def getDiskSpace(self): - - free_space = {} - for folder in self.directories(): - - size = None - if os.path.isdir(folder): - if os.name == 'nt': - _, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \ - ctypes.c_ulonglong() - if sys.version_info >= (3,) or isinstance(folder, unicode): - fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW #@UndefinedVariable - else: - fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA #@UndefinedVariable - ret = fun(folder, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free)) - if ret == 0: - raise ctypes.WinError() - used = total.value - free.value - return [total.value, used, free.value] - else: - s = os.statvfs(folder) - size = [s.f_blocks * s.f_frsize / (1024 * 1024), (s.f_bavail * s.f_frsize) / (1024 * 1024)] - - free_space[folder] = size - - return free_space + return getFreeSpace(self.directories()) config = [{ @@ -308,6 +307,14 @@ config = [{ 'advanced': True, 'description': 'Do a quick scan on startup. On slow systems better disable this.', }, + { + 'label': 'Full library refresh', + 'name': 'library_refresh_interval', + 'type': 'int', + 'default': 0, + 'advanced': True, + 'description': 'Do a full scan every X hours. (0 is disabled)', + }, ], }, ], diff --git a/couchpotato/core/plugins/profile/main.py b/couchpotato/core/plugins/profile/main.py index 8dc59336..489c34dd 100644 --- a/couchpotato/core/plugins/profile/main.py +++ b/couchpotato/core/plugins/profile/main.py @@ -38,9 +38,18 @@ class ProfilePlugin(Plugin): def forceDefaults(self): + db = get_db() + + # Fill qualities and profiles if they are empty somehow.. + if db.count(db.all, 'profile') == 0: + + if db.count(db.all, 'quality') == 0: + fireEvent('quality.fill', single = True) + + self.fill() + # Get all active movies without profile try: - db = get_db() medias = fireEvent('media.with_status', 'active', single = True) profile_ids = [x.get('_id') for x in self.all()] @@ -79,6 +88,7 @@ class ProfilePlugin(Plugin): 'core': kwargs.get('core', False), 'qualities': [], 'wait_for': [], + 'stop_after': [], 'finish': [], '3d': [] } @@ -88,6 +98,7 @@ class ProfilePlugin(Plugin): for type in kwargs.get('types', []): profile['qualities'].append(type.get('quality')) profile['wait_for'].append(tryInt(kwargs.get('wait_for', 0))) + profile['stop_after'].append(tryInt(kwargs.get('stop_after', 0))) profile['finish'].append((tryInt(type.get('finish')) == 1) if order > 0 else True) profile['3d'].append(tryInt(type.get('3d'))) order += 1 @@ -208,6 +219,7 @@ class ProfilePlugin(Plugin): 'qualities': profile.get('qualities'), 'finish': [], 'wait_for': [], + 'stop_after': [], '3d': [] } @@ -215,6 +227,7 @@ class ProfilePlugin(Plugin): for q in profile.get('qualities'): pro['finish'].append(True) pro['wait_for'].append(0) + pro['stop_after'].append(0) pro['3d'].append(threed.pop() if threed else False) db.insert(pro) diff --git a/couchpotato/core/plugins/profile/static/profile.css b/couchpotato/core/plugins/profile/static/profile.css index 073fa573..edab8312 100644 --- a/couchpotato/core/plugins/profile/static/profile.css +++ b/couchpotato/core/plugins/profile/static/profile.css @@ -43,9 +43,8 @@ } .profile .wait_for { - position: absolute; - right: 60px; - top: 0; + padding-top: 0; + padding-bottom: 20px; } .profile .wait_for input { @@ -159,9 +158,6 @@ } #profile_ordering li { - cursor: -webkit-grab; - cursor: -moz-grab; - cursor: grab; border-bottom: 1px solid rgba(255,255,255,0.2); padding: 0 5px; } @@ -183,6 +179,9 @@ background: url('../../images/handle.png') center; width: 20px; float: right; + cursor: -webkit-grab; + cursor: -moz-grab; + cursor: grab; } #profile_ordering .formHint { diff --git a/couchpotato/core/plugins/profile/static/profile.js b/couchpotato/core/plugins/profile/static/profile.js index c62b137c..89f1a697 100644 --- a/couchpotato/core/plugins/profile/static/profile.js +++ b/couchpotato/core/plugins/profile/static/profile.js @@ -37,20 +37,28 @@ var Profile = new Class({ 'placeholder': 'Profile name' }) ), - new Element('div.wait_for.ctrlHolder').adopt( - new Element('span', {'text':'Wait'}), - new Element('input.inlay.xsmall', { - 'type':'text', - 'value': data.wait_for && data.wait_for.length > 0 ? data.wait_for[0] : 0 - }), - new Element('span', {'text':'day(s) for a better quality.'}) - ), new Element('div.qualities.ctrlHolder').adopt( new Element('label', {'text': 'Search for'}), self.type_container = new Element('ol.types'), new Element('div.formHint', { 'html': "Search these qualities (2 minimum), from top to bottom. Use the checkbox, to stop searching after it found this quality." }) + ), + new Element('div.wait_for.ctrlHolder').adopt( + // "Wait the entered number of days for a checked quality, before downloading a lower quality release." + new Element('span', {'text':'Wait'}), + new Element('input.inlay.wait_for_input.xsmall', { + 'type':'text', + 'value': data.wait_for && data.wait_for.length > 0 ? data.wait_for[0] : 0 + }), + new Element('span', {'text':'day(s) for a better quality '}), + new Element('span.advanced', {'text':'and keep searching'}), + // "After a checked quality is found and downloaded, continue searching for even better quality releases for the entered number of days." + new Element('input.inlay.xsmall.stop_after_input.advanced', { + 'type':'text', + 'value': data.stop_after && data.stop_after.length > 0 ? data.stop_after[0] : 0 + }), + new Element('span.advanced', {'text':'day(s) for a better (checked) quality.'}) ) ); @@ -116,7 +124,8 @@ var Profile = new Class({ var data = { 'id' : self.data._id, 'label' : self.el.getElement('.quality_label input').get('value'), - 'wait_for' : self.el.getElement('.wait_for input').get('value'), + 'wait_for' : self.el.getElement('.wait_for_input').get('value'), + 'stop_after' : self.el.getElement('.stop_after_input').get('value'), 'types': [] }; diff --git a/couchpotato/core/plugins/quality/main.py b/couchpotato/core/plugins/quality/main.py index e52e3d57..2687cb14 100644 --- a/couchpotato/core/plugins/quality/main.py +++ b/couchpotato/core/plugins/quality/main.py @@ -1,11 +1,12 @@ import traceback import re +from CodernityDB.database import RecordNotFound from couchpotato import get_db from couchpotato.api import addApiView -from couchpotato.core.event import addEvent +from couchpotato.core.event import addEvent, fireEvent from couchpotato.core.helpers.encoding import toUnicode, ss -from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt +from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt, splitString from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.core.plugins.quality.index import QualityIndex @@ -21,23 +22,23 @@ class QualityPlugin(Plugin): } qualities = [ - {'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25'], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']}, - {'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts'], 'tags': ['m2ts', 'x264', 'h264']}, + {'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'label': 'BR-Disk', 'alternative': ['bd25', ('br', 'disk')], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']}, + {'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts', 'ts'], 'tags': ['m2ts', 'x264', 'h264']}, {'identifier': '720p', 'hd': True, 'allow_3d': True, 'size': (3000, 10000), 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264']}, - {'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip'], 'allow': ['720p', '1080p'], 'ext':[], 'tags': ['hdtv', 'hdrip', 'webdl', ('web', 'dl')]}, - {'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': ['br2dvd'], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']}, - {'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': [], 'allow': [], 'ext':[], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]}, + {'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'label': 'BR-Rip', 'alternative': ['bdrip', ('br', 'rip')], 'allow': ['720p', '1080p'], 'ext':['mp4', 'avi'], 'tags': ['hdtv', 'hdrip', 'webdl', ('web', 'dl')]}, + {'identifier': 'dvdr', 'size': (3000, 10000), 'label': 'DVD-R', 'alternative': ['br2dvd', ('dvd', 'r')], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']}, + {'identifier': 'dvdrip', 'size': (600, 2400), 'label': 'DVD-Rip', 'width': 720, 'alternative': [('dvd', 'rip')], 'allow': [], 'ext':['avi'], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]}, {'identifier': 'scr', 'size': (600, 1600), 'label': 'Screener', 'alternative': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr'], 'allow': ['dvdr', 'dvdrip', '720p', '1080p'], 'ext':[], 'tags': ['webrip', ('web', 'rip')]}, - {'identifier': 'r5', 'size': (600, 1000), 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr'], 'ext':[]}, - {'identifier': 'tc', 'size': (600, 1000), 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': [], 'ext':[]}, - {'identifier': 'ts', 'size': (600, 1000), 'label': 'TeleSync', 'alternative': ['telesync', 'hdts'], 'allow': [], 'ext':[]}, - {'identifier': 'cam', 'size': (600, 1000), 'label': 'Cam', 'alternative': ['camrip', 'hdcam'], 'allow': [], 'ext':[]} + {'identifier': 'r5', 'size': (600, 1000), 'label': 'R5', 'alternative': ['r6'], 'allow': ['dvdr', '720p'], 'ext':[]}, + {'identifier': 'tc', 'size': (600, 1000), 'label': 'TeleCine', 'alternative': ['telecine'], 'allow': ['720p'], 'ext':[]}, + {'identifier': 'ts', 'size': (600, 1000), 'label': 'TeleSync', 'alternative': ['telesync', 'hdts'], 'allow': ['720p'], 'ext':[]}, + {'identifier': 'cam', 'size': (600, 1000), 'label': 'Cam', 'alternative': ['camrip', 'hdcam'], 'allow': ['720p'], 'ext':[]} ] pre_releases = ['cam', 'ts', 'tc', 'r5', 'scr'] threed_tags = { 'sbs': [('half', 'sbs'), 'hsbs', ('full', 'sbs'), 'fsbs'], 'ou': [('half', 'ou'), 'hou', ('full', 'ou'), 'fou'], - '3d': ['2d3d', '3d2d'], + '3d': ['2d3d', '3d2d', '3d'], } cached_qualities = None @@ -51,6 +52,7 @@ class QualityPlugin(Plugin): addEvent('quality.order', self.getOrder) addEvent('quality.ishigher', self.isHigher) addEvent('quality.isfinish', self.isFinish) + addEvent('quality.fill', self.fill) addApiView('quality.size.save', self.saveSize) addApiView('quality.list', self.allView, docs = { @@ -93,15 +95,14 @@ class QualityPlugin(Plugin): db = get_db() - qualities = db.all('quality', with_doc = True) - temp = [] - for quality in qualities: - quality = quality['doc'] - q = mergeDicts(self.getQuality(quality.get('identifier')), quality) + for quality in self.qualities: + quality_doc = db.get('quality', quality.get('identifier'), with_doc = True)['doc'] + q = mergeDicts(quality, quality_doc) temp.append(q) - self.cached_qualities = temp + if len(temp) == len(self.qualities): + self.cached_qualities = temp return temp @@ -152,24 +153,31 @@ class QualityPlugin(Plugin): order = 0 for q in self.qualities: - db.insert({ - '_t': 'quality', - 'order': order, - 'identifier': q.get('identifier'), - 'size_min': tryInt(q.get('size')[0]), - 'size_max': tryInt(q.get('size')[1]), - }) + existing = None + try: + existing = db.get('quality', q.get('identifier')) + except RecordNotFound: + pass - log.info('Creating profile: %s', q.get('label')) - db.insert({ - '_t': 'profile', - 'order': order + 20, # Make sure it goes behind other profiles - 'core': True, - 'qualities': [q.get('identifier')], - 'label': toUnicode(q.get('label')), - 'finish': [True], - 'wait_for': [0], - }) + if not existing: + db.insert({ + '_t': 'quality', + 'order': order, + 'identifier': q.get('identifier'), + 'size_min': tryInt(q.get('size')[0]), + 'size_max': tryInt(q.get('size')[1]), + }) + + log.info('Creating profile: %s', q.get('label')) + db.insert({ + '_t': 'profile', + 'order': order + 20, # Make sure it goes behind other profiles + 'core': True, + 'qualities': [q.get('identifier')], + 'label': toUnicode(q.get('label')), + 'finish': [True], + 'wait_for': [0], + }) order += 1 @@ -200,22 +208,34 @@ class QualityPlugin(Plugin): for cur_file in files: words = re.split('\W+', cur_file.lower()) + name_year = fireEvent('scanner.name_year', cur_file, file_name = cur_file, single = True) + threed_words = words + if name_year and name_year.get('name'): + split_name = splitString(name_year.get('name'), ' ') + threed_words = [x for x in words if x not in split_name] for quality in qualities: contains_score = self.containsTagScore(quality, words, cur_file) - threedscore = self.contains3D(quality, words, cur_file) if quality.get('allow_3d') else (0, None) + threedscore = self.contains3D(quality, threed_words, cur_file) if quality.get('allow_3d') else (0, None) self.calcScore(score, quality, contains_score, threedscore) - # Evaluate score based on size + size_scores = [] for quality in qualities: - size_score = self.guessSizeScore(quality, size = size) - self.calcScore(score, quality, size_score, penalty = False) - # Try again with loose testing - for quality in qualities: + # Evaluate score based on size + size_score = self.guessSizeScore(quality, size = size) loose_score = self.guessLooseScore(quality, extra = extra) - self.calcScore(score, quality, loose_score, penalty = False) + + if size_score > 0: + size_scores.append(quality) + + self.calcScore(score, quality, size_score + loose_score, penalty = False) + + # Add additional size score if only 1 size validated + if len(size_scores) == 1: + self.calcScore(score, size_scores[0], 10, penalty = False) + del size_scores # Return nothing if all scores are <= 0 has_non_zero = 0 @@ -241,6 +261,9 @@ class QualityPlugin(Plugin): cur_file = ss(cur_file) score = 0 + extension = words[-1] + words = words[:-1] + points = { 'identifier': 10, 'label': 10, @@ -260,7 +283,7 @@ class QualityPlugin(Plugin): log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file)) score += points.get(tag_type) - if isinstance(alt, (str, unicode)) and ss(alt.lower()) in cur_file.lower(): + if isinstance(alt, (str, unicode)) and ss(alt.lower()) in words: log.debug('Found %s via %s %s in %s', (quality['identifier'], tag_type, quality.get(tag_type), cur_file)) score += points.get(tag_type) / 2 @@ -270,8 +293,8 @@ class QualityPlugin(Plugin): # Check extention for ext in quality.get('ext', []): - if ext == words[-1]: - log.debug('Found %s extension in %s', (ext, cur_file)) + if ext == extension: + log.debug('Found %s with .%s extension in %s', (quality['identifier'], ext, cur_file)) score += points['ext'] return score @@ -283,14 +306,14 @@ class QualityPlugin(Plugin): tags = self.threed_tags.get(key, []) for tag in tags: - if (isinstance(tag, tuple) and '.'.join(tag) in '.'.join(words)) or (isinstance(tag, (str, unicode)) and ss(tag.lower()) in cur_file.lower()): + if isinstance(tag, tuple): + if len(set(words) & set(tag)) == len(tag): + log.debug('Found %s in %s', (tag, cur_file)) + return 1, key + elif tag in words: log.debug('Found %s in %s', (tag, cur_file)) return 1, key - if list(set([key]) & set(words)): - log.debug('Found %s in %s', (key, cur_file)) - return 1, key - return 0, None def guessLooseScore(self, quality, extra = None): @@ -325,6 +348,8 @@ class QualityPlugin(Plugin): if tryInt(quality['size_min']) <= tryInt(size) <= tryInt(quality['size_max']): log.debug('Found %s via release size: %s MB < %s MB < %s MB', (quality['identifier'], quality['size_min'], size, quality['size_max'])) score += 5 + else: + score -= 5 return score @@ -351,28 +376,34 @@ class QualityPlugin(Plugin): # Give panelty for all lower qualities for q in self.qualities[self.order.index(quality.get('identifier'))+1:]: - score[q.get('identifier')]['score'] -= 1 + if score.get(q.get('identifier')): + score[q.get('identifier')]['score'] -= 1 - def isFinish(self, quality, profile): + def isFinish(self, quality, profile, release_age = 0): if not isinstance(profile, dict) or not profile.get('qualities'): - return False + # No profile so anything (scanned) is good enough + return True try: - quality_order = [i for i, identifier in enumerate(profile['qualities']) if identifier == quality['identifier'] and bool(profile['3d'][i] if profile.get('3d') else 0) == bool(quality.get('is_3d', 0))][0] - return profile['finish'][quality_order] + index = [i for i, identifier in enumerate(profile['qualities']) if identifier == quality['identifier'] and bool(profile['3d'][i] if profile.get('3d') else False) == bool(quality.get('is_3d', False))][0] + + if index == 0 or (profile['finish'][index] and int(release_age) >= int(profile.get('stop_after', [0])[0])): + return True + + return False except: return False def isHigher(self, quality, compare_with, profile = None): if not isinstance(profile, dict) or not profile.get('qualities'): - profile = {'qualities': self.order} + profile = fireEvent('profile.default', single = True) # Try to find quality in profile, if not found: a quality we do not want is lower than anything else try: quality_order = [i for i, identifier in enumerate(profile['qualities']) if identifier == quality['identifier'] and bool(profile['3d'][i] if profile.get('3d') else 0) == bool(quality.get('is_3d', 0))][0] except: log.debug('Quality %s not found in profile identifiers %s', (quality['identifier'] + (' 3D' if quality.get('is_3d', 0) else ''), \ - [identifier + ('3D' if (profile['3d'][i] if profile.get('3d') else 0) else '') for i, identifier in enumerate(profile['qualities'])])) + [identifier + (' 3D' if (profile['3d'][i] if profile.get('3d') else 0) else '') for i, identifier in enumerate(profile['qualities'])])) return 'lower' # Try to find compare quality in profile, if not found: anything is higher than a not wanted quality @@ -412,6 +443,20 @@ class QualityPlugin(Plugin): 'Movie Name (2013) 2D + 3D': {'size': 49000, 'quality': 'bd50', 'is_3d': True}, 'Movie Monuments 2013 BrRip 1080p': {'size': 1800, 'quality': 'brrip'}, 'Movie Monuments 2013 BrRip 720p': {'size': 1300, 'quality': 'brrip'}, + 'The.Movie.2014.3D.1080p.BluRay.AVC.DTS-HD.MA.5.1-GroupName': {'size': 30000, 'quality': 'bd50', 'is_3d': True}, + '/home/namehou/Movie Monuments (2013)/Movie Monuments.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': False}, + '/home/namehou/Movie Monuments (2013)/Movie Monuments Full-OU.mkv': {'size': 4500, 'quality': '1080p', 'is_3d': True}, + '/volume1/Public/3D/Moviename/Moviename (2009).3D.SBS.ts': {'size': 7500, 'quality': '1080p', 'is_3d': True}, + '/volume1/Public/Moviename/Moviename (2009).ts': {'size': 5500, 'quality': '1080p'}, + '/movies/BluRay HDDVD H.264 MKV 720p EngSub/QuiQui le fou (criterion collection #123, 1915)/QuiQui le fou (1915) 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p'}, + 'C:\\movies\QuiQui le fou (collection #123, 1915)\QuiQui le fou (1915) 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p'}, + 'C:\\movies\QuiQui le fou (collection #123, 1915)\QuiQui le fou (1915) half-sbs 720p x264 BluRay.mkv': {'size': 5500, 'quality': '720p', 'is_3d': True}, + 'Moviename 2014 720p HDCAM XviD DualAudio': {'size': 4000, 'quality': 'cam'}, + 'Moviename (2014) - 720p CAM x264': {'size': 2250, 'quality': 'cam'}, + 'Movie Name (2014).mp4': {'size': 750, 'quality': 'brrip'}, + 'Moviename.2014.720p.R6.WEB-DL.x264.AC3-xyz': {'size': 750, 'quality': 'r5'}, + 'Movie name 2014 New Source 720p HDCAM x264 AC3 xyz': {'size': 750, 'quality': 'cam'}, + 'Movie.Name.2014.720p.HD.TS.AC3.x264': {'size': 750, 'quality': 'ts'} } correct = 0 @@ -419,7 +464,10 @@ class QualityPlugin(Plugin): test_quality = self.guess(files = [name], extra = tests[name].get('extra', None), size = tests[name].get('size', None)) or {} success = test_quality.get('identifier') == tests[name]['quality'] and test_quality.get('is_3d') == tests[name].get('is_3d', False) if not success: - log.error('%s failed check, thinks it\'s %s', (name, self.guess([name]).get('identifier'))) + log.error('%s failed check, thinks it\'s "%s" expecting "%s"', (name, + test_quality.get('identifier') + (' 3D' if test_quality.get('is_3d') else ''), + tests[name]['quality'] + (' 3D' if tests[name].get('is_3d') else '') + )) correct += success diff --git a/couchpotato/core/plugins/quality/static/quality.js b/couchpotato/core/plugins/quality/static/quality.js index 285ed1d9..d233b1ce 100644 --- a/couchpotato/core/plugins/quality/static/quality.js +++ b/couchpotato/core/plugins/quality/static/quality.js @@ -8,6 +8,7 @@ var QualityBase = new Class({ self.qualities = data.qualities; + self.profiles_list = null; self.profiles = []; Array.each(data.profiles, self.createProfilesClass.bind(self)); @@ -29,9 +30,14 @@ var QualityBase = new Class({ }, getQuality: function(identifier){ - return this.qualities.filter(function(q){ - return q.identifier == identifier; - }).pick(); + try { + return this.qualities.filter(function(q){ + return q.identifier == identifier; + }).pick(); + } + catch(e){} + + return {} }, addSettings: function(){ @@ -101,14 +107,13 @@ var QualityBase = new Class({ createProfileOrdering: function(){ var self = this; - var profile_list; self.settings.createGroup({ 'label': 'Profile Defaults', - 'description': '(Needs refresh \'' +(App.isMac() ? 'CMD+R' : 'F5')+ '\' after editing)' + 'description': '(Needs refresh \'' +(App.isMac() ? 'CMD+R' : 'F5')+ '\' after editing)' }).adopt( new Element('.ctrlHolder#profile_ordering').adopt( new Element('label[text=Order]'), - profile_list = new Element('ul'), + self.profiles_list = new Element('ul'), new Element('p.formHint', { 'html': 'Change the order the profiles are in the dropdown list. Uncheck to hide it completely.
First one will be default.' }) @@ -128,29 +133,37 @@ var QualityBase = new Class({ 'text': profile.data.label }), new Element('span.handle') - ).inject(profile_list); + ).inject(self.profiles_list); new Form.Check(check); }); // Sortable - self.profile_sortable = new Sortables(profile_list, { + var sorted_changed = false; + self.profile_sortable = new Sortables(self.profiles_list, { 'revert': true, - 'handle': '', + 'handle': '.handle', 'opacity': 0.5, - 'onComplete': self.saveProfileOrdering.bind(self) + 'onSort': function(){ + sorted_changed = true; + }, + 'onComplete': function(){ + if(sorted_changed){ + self.saveProfileOrdering(); + sorted_changed = false; + } + } }); }, saveProfileOrdering: function(){ - var self = this; + var self = this, + ids = [], + hidden = []; - var ids = []; - var hidden = []; - - self.profile_sortable.list.getElements('li').each(function(el, nr){ + self.profiles_list.getElements('li').each(function(el, nr){ ids.include(el.get('data-id')); hidden[nr] = +!el.getElement('input[type=checkbox]').get('checked'); }); diff --git a/couchpotato/core/plugins/release/main.py b/couchpotato/core/plugins/release/main.py index 8af26fd2..7b47aa96 100644 --- a/couchpotato/core/plugins/release/main.py +++ b/couchpotato/core/plugins/release/main.py @@ -3,11 +3,11 @@ import os import time import traceback -from CodernityDB.database import RecordDeleted +from CodernityDB.database import RecordDeleted, RecordNotFound from couchpotato import md5, get_db from couchpotato.api import addApiView from couchpotato.core.event import fireEvent, addEvent -from couchpotato.core.helpers.encoding import ss, toUnicode, sp +from couchpotato.core.helpers.encoding import toUnicode, sp from couchpotato.core.helpers.variable import getTitle from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin @@ -58,7 +58,7 @@ class Release(Plugin): addEvent('release.for_media', self.forMedia) # Clean releases that didn't have activity in the last week - addEvent('app.load', self.cleanDone) + addEvent('app.load', self.cleanDone, priority = 1000) fireEvent('schedule.interval', 'movie.clean_releases', self.cleanDone, hours = 12) def cleanDone(self): @@ -79,6 +79,13 @@ class Release(Plugin): try: db.get('id', release.get('key')) media_exist.append(release.get('key')) + + try: + if release['doc'].get('status') == 'ignore': + release['doc']['status'] = 'ignored' + db.update(release['doc']) + except: + log.error('Failed fixing mis-status tag: %s', traceback.format_exc()) except RecordDeleted: db.delete(release['doc']) log.debug('Deleted orphaned release: %s', release['doc']) @@ -87,9 +94,6 @@ class Release(Plugin): del media_exist - # Reindex statuses - db.reindex_index('media_status') - # get movies last_edit more than a week ago medias = fireEvent('media.with_status', 'done', single = True) @@ -103,11 +107,13 @@ class Release(Plugin): if rel['status'] in ['available']: self.delete(rel['_id']) - # Set all snatched and downloaded releases to ignored to make sure they are ignored when re-adding the move + # Set all snatched and downloaded releases to ignored to make sure they are ignored when re-adding the media elif rel['status'] in ['snatched', 'downloaded']: - self.updateStatus(rel['_id'], status = 'ignore') + self.updateStatus(rel['_id'], status = 'ignored') - def add(self, group, update_info = True): + fireEvent('media.untag', media.get('_id'), 'recent', single = True) + + def add(self, group, update_info = True, update_id = None): try: db = get_db() @@ -123,33 +129,49 @@ class Release(Plugin): 'profile_id': None, }, search_after = False, update_after = update_info, notify_after = False, status = 'done', single = True) - # Add Release - release = { - '_t': 'release', - 'media_id': media['_id'], - 'identifier': release_identifier, - 'quality': group['meta_data']['quality'].get('identifier'), - 'is_3d': group['meta_data']['quality'].get('is_3d', 0), - 'last_edit': int(time.time()), - 'status': 'done' - } - try: - r = db.get('release_identifier', release_identifier, with_doc = True)['doc'] - r['media_id'] = media['_id'] - except: - r = db.insert(release) + release = None + if update_id: + try: + release = db.get('id', update_id) + release.update({ + 'identifier': release_identifier, + 'last_edit': int(time.time()), + 'status': 'done', + }) + except: + log.error('Failed updating existing release: %s', traceback.format_exc()) + else: - # Update with ref and _id - release.update({ - '_id': r['_id'], - '_rev': r['_rev'], - }) + # Add Release + if not release: + release = { + '_t': 'release', + 'media_id': media['_id'], + 'identifier': release_identifier, + 'quality': group['meta_data']['quality'].get('identifier'), + 'is_3d': group['meta_data']['quality'].get('is_3d', 0), + 'last_edit': int(time.time()), + 'status': 'done' + } + + try: + r = db.get('release_identifier', release_identifier, with_doc = True)['doc'] + r['media_id'] = media['_id'] + except: + log.debug('Failed updating release by identifier "%s". Inserting new.', release_identifier) + r = db.insert(release) + + # Update with ref and _id + release.update({ + '_id': r['_id'], + '_rev': r['_rev'], + }) # Empty out empty file groups - release['files'] = dict((k, v) for k, v in group['files'].items() if v) + release['files'] = dict((k, [toUnicode(x) for x in v]) for k, v in group['files'].items() if v) db.update(release) - fireEvent('media.restatus', media['_id']) + fireEvent('media.restatus', media['_id'], single = True) return True except: @@ -170,6 +192,9 @@ class Release(Plugin): rel = db.get('id', release_id) db.delete(rel) return True + except RecordDeleted: + log.debug('Already deleted: %s', release_id) + return True except: log.error('Failed: %s', traceback.format_exc()) @@ -302,7 +327,7 @@ class Release(Plugin): log_movie = '%s (%s) in %s' % (getTitle(media), media['info']['year'], rls['quality']) snatch_message = 'Snatched "%s": %s' % (data.get('name'), log_movie) log.info(snatch_message) - fireEvent('%s.snatched' % data['type'], message = snatch_message, data = rls) + fireEvent('%s.snatched' % data['type'], message = snatch_message, data = media) # Mark release as snatched if renamer_enabled: @@ -313,22 +338,14 @@ class Release(Plugin): if media['status'] == 'active': profile = db.get('id', media['profile_id']) - finished = False - if rls['quality'] in profile['qualities']: - nr = profile['qualities'].index(rls['quality']) - finished = profile['finish'][nr] - - if finished: + if fireEvent('quality.isfinish', {'identifier': rls['quality'], 'is_3d': rls.get('is_3d', False)}, profile, single = True): log.info('Renamer disabled, marking media as finished: %s', log_movie) # Mark release done self.updateStatus(rls['_id'], status = 'done') # Mark media done - mdia = db.get('id', media['_id']) - mdia['status'] = 'done' - mdia['last_edit'] = int(time.time()) - db.update(mdia) + fireEvent('media.restatus', media['_id'], single = True) return True @@ -341,7 +358,7 @@ class Release(Plugin): return True - def tryDownloadResult(self, results, media, quality_custom, manual = False): + def tryDownloadResult(self, results, media, quality_custom): wait_for = False let_through = False @@ -355,7 +372,11 @@ class Release(Plugin): continue if rel['score'] <= 0: - log.info('Ignored, score to low: %s', rel['name']) + log.info('Ignored, score "%s" to low: %s', (rel['score'], rel['name'])) + continue + + if rel['size'] <= 50: + log.info('Ignored, size "%sMB" to low: %s', (rel['size'], rel['name'])) continue rel['wait_for'] = False @@ -375,7 +396,7 @@ class Release(Plugin): wait_for = True continue - downloaded = fireEvent('release.download', data = rel, media = media, manual = manual, single = True) + downloaded = fireEvent('release.download', data = rel, media = media, single = True) if downloaded is True: return True elif downloaded != 'try_next': @@ -453,7 +474,7 @@ class Release(Plugin): rel = db.get('id', release_id) if rel and rel.get('status') != status: - release_name = rel['info'].get('name') + release_name = None if rel.get('files'): for file_type in rel.get('files', {}): if file_type == 'movie': @@ -461,6 +482,9 @@ class Release(Plugin): release_name = os.path.basename(release_file) break + if not release_name and rel.get('info'): + release_name = rel['info'].get('name') + #update status in Db log.debug('Marking release %s as %s', (release_name, status)) rel['status'] = status @@ -484,8 +508,15 @@ class Release(Plugin): status = list(status if isinstance(status, (list, tuple)) else [status]) for s in status: - for ms in db.get_many('release_status', s, with_doc = with_doc): - yield ms['doc'] if with_doc else ms + for ms in db.get_many('release_status', s): + if with_doc: + try: + doc = db.get('id', ms['_id']) + yield doc + except RecordNotFound: + log.debug('Record not found, skipping: %s', ms['_id']) + else: + yield ms def forMedia(self, media_id): diff --git a/couchpotato/core/plugins/renamer.py b/couchpotato/core/plugins/renamer.py index 0b56ca33..2bc47876 100644 --- a/couchpotato/core/plugins/renamer.py +++ b/couchpotato/core/plugins/renamer.py @@ -14,7 +14,6 @@ from couchpotato.core.helpers.variable import getExt, mergeDicts, getTitle, \ from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.environment import Env -from scandir import scandir from unrar2 import RarFile import six from six.moves import filter @@ -124,11 +123,6 @@ class Renamer(Plugin): no_process = [to_folder] cat_list = fireEvent('category.all', single = True) or [] no_process.extend([item['destination'] for item in cat_list]) - try: - if Env.setting('library', section = 'manage').strip(): - no_process.extend([sp(manage_folder) for manage_folder in splitString(Env.setting('library', section = 'manage'), '::')]) - except: - pass # Check to see if the no_process folders are inside the "from" folder. if not os.path.isdir(base_folder) or not os.path.isdir(to_folder): @@ -137,7 +131,7 @@ class Renamer(Plugin): else: for item in no_process: if isSubFolder(item, base_folder): - log.error('To protect your data, the media libraries can\'t be inside of or the same as the "from" folder.') + log.error('To protect your data, the media libraries can\'t be inside of or the same as the "from" folder. "%s" in "%s"', (item, base_folder)) return # Check to see if the no_process folders are inside the provided media_folder @@ -169,7 +163,7 @@ class Renamer(Plugin): if media_folder: for item in no_process: if isSubFolder(item, media_folder): - log.error('To protect your data, the media libraries can\'t be inside of or the same as the provided media folder.') + log.error('To protect your data, the media libraries can\'t be inside of or the same as the provided media folder. "%s" in "%s"', (item, media_folder)) return # Make sure a checkSnatched marked all downloads/seeds as such @@ -195,7 +189,7 @@ class Renamer(Plugin): else: # Get all files from the specified folder try: - for root, folders, names in scandir.walk(media_folder): + for root, folders, names in os.walk(media_folder): files.extend([sp(os.path.join(root, name)) for name in names]) except: log.error('Failed getting files from %s: %s', (media_folder, traceback.format_exc())) @@ -203,14 +197,18 @@ class Renamer(Plugin): db = get_db() # Extend the download info with info stored in the downloaded release + keep_original = self.moveTypeIsLinked() + is_torrent = False if release_download: release_download = self.extendReleaseDownload(release_download) + is_torrent = self.downloadIsTorrent(release_download) + keep_original = True if is_torrent and self.conf('file_action') not in ['move'] else keep_original # Unpack any archives extr_files = None if self.conf('unrar'): folder, media_folder, files, extr_files = self.extractFiles(folder = folder, media_folder = media_folder, files = files, - cleanup = self.conf('cleanup') and not self.downloadIsTorrent(release_download)) + cleanup = self.conf('cleanup') and not keep_original) groups = fireEvent('scanner.scan', folder = folder if folder else base_folder, files = files, release_download = release_download, return_ignored = False, single = True) or [] @@ -228,6 +226,7 @@ class Renamer(Plugin): for group_identifier in groups: group = groups[group_identifier] + group['release_download'] = None rename_files = {} remove_files = [] remove_releases = [] @@ -326,7 +325,7 @@ class Renamer(Plugin): if file_type is 'nfo' and not self.conf('rename_nfo'): log.debug('Skipping, renaming of %s disabled', file_type) for current_file in group['files'][file_type]: - if self.conf('cleanup') and (not self.downloadIsTorrent(release_download) or self.fileIsAdded(current_file, group)): + if self.conf('cleanup') and (not keep_original or self.fileIsAdded(current_file, group)): remove_files.append(current_file) continue @@ -446,19 +445,22 @@ class Renamer(Plugin): # Before renaming, remove the lower quality files remove_leftovers = True - # Mark movie "done" once it's found the quality with the finish check + # Get media quality profile profile = None - try: - if media.get('status') == 'active' and media.get('profile_id'): + if media.get('profile_id'): + try: profile = db.get('id', media['profile_id']) - if fireEvent('quality.isfinish', group['meta_data']['quality'], profile, single = True): - mdia = db.get('id', media['_id']) - mdia['status'] = 'done' - mdia['last_edit'] = int(time.time()) - db.update(mdia) + except: + # Set profile to None as it does not exist anymore + mdia = db.get('id', media['_id']) + mdia['profile_id'] = None + db.update(mdia) + log.error('Error getting quality profile for %s: %s', (media_title, traceback.format_exc())) + else: + log.debug('Media has no quality profile: %s', media_title) - except Exception as e: - log.error('Failed marking movie finished: %s', (traceback.format_exc())) + # Mark media for dashboard + mark_as_recent = False # Go over current movie releases for release in fireEvent('release.for_media', media['_id'], single = True): @@ -468,7 +470,7 @@ class Renamer(Plugin): # This is where CP removes older, lesser quality releases or releases that are not wanted anymore is_higher = fireEvent('quality.ishigher', \ - group['meta_data']['quality'], {'identifier': release['quality'], 'is_3d': release.get('is_3d', 0)}, profile, single = True) + group['meta_data']['quality'], {'identifier': release['quality'], 'is_3d': release.get('is_3d', False)}, profile, single = True) if is_higher == 'higher': log.info('Removing lesser or not wanted quality %s for %s.', (media_title, release.get('quality'))) @@ -493,7 +495,7 @@ class Renamer(Plugin): self.tagRelease(group = group, tag = 'exists') # Notify on rename fail - download_message = 'Renaming of %s (%s) cancelled, exists in %s already.' % (media_title, group['meta_data']['quality']['label'], release.get('identifier')) + download_message = 'Renaming of %s (%s) cancelled, exists in %s already.' % (media_title, group['meta_data']['quality']['label'], release.get('quality')) fireEvent('movie.renaming.canceled', message = download_message, data = group) remove_leftovers = False @@ -505,13 +507,22 @@ class Renamer(Plugin): if release_download['status'] == 'completed': # Set the release to downloaded fireEvent('release.update_status', release['_id'], status = 'downloaded', single = True) + group['release_download'] = release_download + mark_as_recent = True elif release_download['status'] == 'seeding': # Set the release to seeding fireEvent('release.update_status', release['_id'], status = 'seeding', single = True) + mark_as_recent = True - elif release.get('identifier') == group['meta_data']['quality']['identifier']: - # Set the release to downloaded - fireEvent('release.update_status', release['_id'], status = 'downloaded', single = True) + elif release.get('quality') == group['meta_data']['quality']['identifier']: + # Set the release to downloaded + fireEvent('release.update_status', release['_id'], status = 'downloaded', single = True) + group['release_download'] = release_download + mark_as_recent = True + + # Mark media for dashboard + if mark_as_recent: + fireEvent('media.tag', group['media'].get('_id'), 'recent', single = True) # Remove leftover files if not remove_leftovers: # Don't remove anything @@ -520,7 +531,7 @@ class Renamer(Plugin): log.debug('Removing leftover files') for current_file in group['files']['leftover']: if self.conf('cleanup') and not self.conf('move_leftover') and \ - (not self.downloadIsTorrent(release_download) or self.fileIsAdded(current_file, group)): + (not keep_original or self.fileIsAdded(current_file, group)): remove_files.append(current_file) # Remove files @@ -567,7 +578,7 @@ class Renamer(Plugin): self.makeDir(os.path.dirname(dst)) try: - self.moveFile(src, dst, forcemove = not self.downloadIsTorrent(release_download) or self.fileIsAdded(src, group)) + self.moveFile(src, dst, use_default = not is_torrent or self.fileIsAdded(src, group)) group['renamed_files'].append(dst) except: log.error('Failed renaming the file "%s" : %s', (os.path.basename(src), traceback.format_exc())) @@ -583,7 +594,7 @@ class Renamer(Plugin): self.untagRelease(group = group, tag = 'failed_rename') # Tag folder if it is in the 'from' folder and it will not be removed because it is a torrent - if self.movieInFromFolder(media_folder) and self.downloadIsTorrent(release_download): + if self.movieInFromFolder(media_folder) and keep_original: self.tagRelease(group = group, tag = 'renamed_already') # Remove matching releases @@ -594,7 +605,7 @@ class Renamer(Plugin): except: log.error('Failed removing %s: %s', (release, traceback.format_exc())) - if group['dirname'] and group['parentdir'] and not self.downloadIsTorrent(release_download): + if group['dirname'] and group['parentdir'] and not keep_original: if media_folder: # Delete the movie folder group_folder = media_folder @@ -609,7 +620,7 @@ class Renamer(Plugin): log.error('Failed removing %s: %s', (group_folder, traceback.format_exc())) # Notify on download, search for trailers etc - download_message = 'Downloaded %s (%s)' % (media_title, replacements['quality']) + download_message = 'Downloaded %s (%s%s)' % (media_title, replacements['quality'], (' ' + replacements['3d']) if replacements['3d'] else '') try: fireEvent('renamer.after', message = download_message, group = group, in_order = True) except: @@ -664,7 +675,7 @@ Remove it if you want it to be renamed (again, or at least let it try again) # Tag all files in release folder elif release_download['folder']: - for root, folders, names in scandir.walk(sp(release_download['folder'])): + for root, folders, names in os.walk(sp(release_download['folder'])): tag_files.extend([os.path.join(root, name) for name in names]) for filename in tag_files: @@ -704,7 +715,7 @@ Remove it if you want it to be renamed (again, or at least let it try again) # Untag all files in release folder else: - for root, folders, names in scandir.walk(folder): + for root, folders, names in os.walk(folder): tag_files.extend([sp(os.path.join(root, name)) for name in names if not os.path.splitext(name)[1] == '.ignore']) if not folder: @@ -712,7 +723,7 @@ Remove it if you want it to be renamed (again, or at least let it try again) # Find all .ignore files in folder ignore_files = [] - for root, dirnames, filenames in scandir.walk(folder): + for root, dirnames, filenames in os.walk(folder): ignore_files.extend(fnmatch.filter([sp(os.path.join(root, filename)) for filename in filenames], '*%s.ignore' % tag)) # Match all found ignore files with the tag_files and delete if found @@ -741,11 +752,11 @@ Remove it if you want it to be renamed (again, or at least let it try again) # Find tag on all files in release folder else: - for root, folders, names in scandir.walk(folder): + for root, folders, names in os.walk(folder): tag_files.extend([sp(os.path.join(root, name)) for name in names if not os.path.splitext(name)[1] == '.ignore']) # Find all .ignore files in folder - for root, dirnames, filenames in scandir.walk(folder): + for root, dirnames, filenames in os.walk(folder): ignore_files.extend(fnmatch.filter([sp(os.path.join(root, filename)) for filename in filenames], '*%s.ignore' % tag)) # Match all found ignore files with the tag_files and return True found @@ -756,10 +767,15 @@ Remove it if you want it to be renamed (again, or at least let it try again) return False - def moveFile(self, old, dest, forcemove = False): + def moveFile(self, old, dest, use_default = False): dest = sp(dest) try: - if forcemove or self.conf('file_action') not in ['copy', 'link']: + + move_type = self.conf('file_action') + if use_default: + move_type = self.conf('default_file_action') + + if move_type not in ['copy', 'link']: try: shutil.move(old, dest) except: @@ -768,16 +784,16 @@ Remove it if you want it to be renamed (again, or at least let it try again) os.unlink(old) else: raise - elif self.conf('file_action') == 'copy': + elif move_type == 'copy': shutil.copy(old, dest) - elif self.conf('file_action') == 'link': + else: # First try to hardlink try: log.debug('Hardlinking file "%s" to "%s"...', (old, dest)) link(old, dest) except: # Try to simlink next - log.debug('Couldn\'t hardlink file "%s" to "%s". Simlinking instead. Error: %s.', (old, dest, traceback.format_exc())) + log.debug('Couldn\'t hardlink file "%s" to "%s". Symlinking instead. Error: %s.', (old, dest, traceback.format_exc())) shutil.copy(old, dest) try: symlink(dest, old + '.link') @@ -1077,6 +1093,9 @@ Remove it if you want it to be renamed (again, or at least let it try again) return False return src in group['before_rename'] + def moveTypeIsLinked(self): + return self.conf('default_file_action') in ['copy', 'link'] + def statusInfoComplete(self, release_download): return release_download.get('id') and release_download.get('downloader') and release_download.get('folder') @@ -1102,7 +1121,7 @@ Remove it if you want it to be renamed (again, or at least let it try again) check_file_date = False if not files: - for root, folders, names in scandir.walk(folder): + for root, folders, names in os.walk(folder): files.extend([sp(os.path.join(root, name)) for name in names]) # Find all archive files @@ -1128,14 +1147,20 @@ Remove it if you want it to be renamed (again, or at least let it try again) log.info('Archive %s found. Extracting...', os.path.basename(archive['file'])) try: - rar_handle = RarFile(archive['file']) + rar_handle = RarFile(archive['file'], custom_path = self.conf('unrar_path')) extr_path = os.path.join(from_folder, os.path.relpath(os.path.dirname(archive['file']), folder)) self.makeDir(extr_path) for packedinfo in rar_handle.infolist(): - if not packedinfo.isdir and not os.path.isfile(sp(os.path.join(extr_path, os.path.basename(packedinfo.filename)))): + extr_file_path = sp(os.path.join(extr_path, os.path.basename(packedinfo.filename))) + if not packedinfo.isdir and not os.path.isfile(extr_file_path): log.debug('Extracting %s...', packedinfo.filename) rar_handle.extract(condition = [packedinfo.index], path = extr_path, withSubpath = False, overwrite = False) - extr_files.append(sp(os.path.join(extr_path, os.path.basename(packedinfo.filename)))) + if self.conf('unrar_modify_date'): + try: + os.utime(extr_file_path, (os.path.getatime(archive['file']), os.path.getmtime(archive['file']))) + except: + log.error('Rar modify date enabled, but failed: %s', traceback.format_exc()) + extr_files.append(extr_file_path) del rar_handle except Exception as e: log.error('Failed to extract %s: %s %s', (archive['file'], e, traceback.format_exc())) @@ -1270,6 +1295,18 @@ config = [{ 'description': 'Extract rar files if found.', 'default': False, }, + { + 'advanced': True, + 'name': 'unrar_path', + 'description': 'Custom path to unrar bin', + }, + { + 'advanced': True, + 'name': 'unrar_modify_date', + 'type': 'bool', + 'description': ('Set modify date of unrar-ed files to the rar-file\'s date.', 'This will allow XBMC to recognize extracted files as recently added even if the movie was released some time ago.'), + 'default': False, + }, { 'name': 'cleanup', 'type': 'bool', @@ -1320,14 +1357,23 @@ config = [{ 'label': 'Folder-Separator', 'description': ('Replace all the spaces with a character.', 'Example: ".", "-" (without quotes). Leave empty to use spaces.'), }, + { + 'name': 'default_file_action', + 'label': 'Default File Action', + 'default': 'move', + 'type': 'dropdown', + 'values': [('Link', 'link'), ('Copy', 'copy'), ('Move', 'move')], + 'description': ('Link, Copy or Move after download completed.', + 'Link first tries hard link, then sym link and falls back to Copy.'), + 'advanced': True, + }, { 'name': 'file_action', 'label': 'Torrent File Action', 'default': 'link', 'type': 'dropdown', 'values': [('Link', 'link'), ('Copy', 'copy'), ('Move', 'move')], - 'description': ('Link, Copy or Move after download completed.', - 'Link first tries hard link, then sym link and falls back to Copy. It is perfered to use link when downloading torrents as it will save you space, while still beeing able to seed.'), + 'description': 'See above. It is prefered to use link when downloading torrents as it will save you space, while still beeing able to seed.', 'advanced': True, }, { diff --git a/couchpotato/core/plugins/scanner.py b/couchpotato/core/plugins/scanner.py index b8eca4c4..6a4d537d 100644 --- a/couchpotato/core/plugins/scanner.py +++ b/couchpotato/core/plugins/scanner.py @@ -13,7 +13,6 @@ from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from enzyme.exceptions import NoParserError, ParseError from guessit import guess_movie_info -from scandir import scandir from subliminal.videos import Video import enzyme from six.moves import filter, map, zip @@ -106,7 +105,7 @@ class Scanner(Plugin): 'HDTV': ['hdtv'] } - clean = '([ _\,\.\(\)\[\]\-]|^)(3d|hsbs|sbs|ou|extended.cut|directors.cut|french|fr|swedisch|sw|danish|dutch|nl|swesub|subs|spanish|german|ac3|dts|custom|dc|divx|divx5|dsr|dsrip|dutch|dvd|dvdr|dvdrip|dvdscr|dvdscreener|screener|dvdivx|cam|fragment|fs|hdtv|hdrip' \ + clean = '([ _\,\.\(\)\[\]\-]|^)(3d|hsbs|sbs|half.sbs|full.sbs|ou|half.ou|full.ou|extended|extended.cut|directors.cut|french|fr|swedisch|sw|danish|dutch|nl|swesub|subs|spanish|german|ac3|dts|custom|dc|divx|divx5|dsr|dsrip|dutch|dvd|dvdr|dvdrip|dvdscr|dvdscreener|screener|dvdivx|cam|fragment|fs|hdtv|hdrip' \ '|hdtvrip|webdl|web.dl|webrip|web.rip|internal|limited|multisubs|ntsc|ogg|ogm|pal|pdtv|proper|repack|rerip|retail|r3|r5|bd5|se|svcd|swedish|german|read.nfo|nfofix|unrated|ws|telesync|ts|telecine|tc|brrip|bdrip|video_ts|audio_ts|480p|480i|576p|576i|720p|720i|1080p|1080i|hrhd|hrhdtv|hddvd|bluray|x264|h264|xvid|xvidvd|xxx|www.www|hc|\[.*\])(?=[ _\,\.\(\)\[\]\-]|$)' multipart_regex = [ '[ _\.-]+cd[ _\.-]*([0-9a-d]+)', #*cd1 @@ -150,7 +149,7 @@ class Scanner(Plugin): check_file_date = True try: files = [] - for root, dirs, walk_files in scandir.walk(folder, followlinks=True): + for root, dirs, walk_files in os.walk(folder, followlinks=True): files.extend([sp(os.path.join(sp(root), ss(filename))) for filename in walk_files]) # Break if CP wants to shut down @@ -365,6 +364,7 @@ class Scanner(Plugin): if return_ignored is False and identifier in ignored_identifiers: log.debug('Ignore file found, ignoring release: %s', identifier) + total_found -= 1 continue # Group extra (and easy) files first @@ -385,6 +385,7 @@ class Scanner(Plugin): if len(group['files']['movie']) == 0: log.error('Couldn\'t find any movie files for %s', identifier) + total_found -= 1 continue log.debug('Getting metadata for %s', identifier) @@ -430,7 +431,7 @@ class Scanner(Plugin): # Notify parent & progress on something found if on_found: - on_found(group, total_found, total_found - len(processed_movies)) + on_found(group, total_found, len(valid_files)) # Wait for all the async events calm down a bit while threading.activeCount() > 100 and not self.shuttingDown(): @@ -472,7 +473,7 @@ class Scanner(Plugin): data['size'] = data.get('size', 0) + self.getFileSize(cur_file) data['quality'] = None - quality = fireEvent('quality.guess', size = data['size'], files = files, extra = data, single = True) + quality = fireEvent('quality.guess', size = data.get('size'), files = files, extra = data, single = True) # Use the quality that we snatched but check if it matches our guess if release_download and release_download.get('quality'): @@ -633,7 +634,14 @@ class Scanner(Plugin): name_year = self.getReleaseNameYear(identifier, file_name = filename if not group['is_dvd'] else None) if name_year.get('name') and name_year.get('year'): - movie = fireEvent('movie.search', q = '%(name)s %(year)s' % name_year, merge = True, limit = 1) + search_q = '%(name)s %(year)s' % name_year + movie = fireEvent('movie.search', q = search_q, merge = True, limit = 1) + + # Try with other + if len(movie) == 0 and name_year.get('other') and name_year['other'].get('name') and name_year['other'].get('year'): + search_q2 = '%(name)s %(year)s' % name_year.get('other') + if search_q2 != search_q: + movie = fireEvent('movie.search', q = search_q2, merge = True, limit = 1) if len(movie) > 0: imdb_id = movie[0].get('imdb') @@ -851,7 +859,9 @@ class Scanner(Plugin): if key in filename.lower() and key != 'default': return self.resolutions[key] except: - return self.resolutions['default'] + pass + + return self.resolutions['default'] def getGroup(self, file): try: @@ -900,6 +910,7 @@ class Scanner(Plugin): log.debug('Could not detect via guessit "%s": %s', (file_name, traceback.format_exc())) # Backup to simple + release_name = os.path.basename(release_name.replace('\\', '/')) cleaned = ' '.join(re.split('\W+', simplifyString(release_name))) cleaned = re.sub(self.clean, ' ', cleaned) @@ -934,8 +945,11 @@ class Scanner(Plugin): pass if cp_guess.get('year') == guess.get('year') and len(cp_guess.get('name', '')) > len(guess.get('name', '')): + cp_guess['other'] = guess return cp_guess elif guess == {}: + cp_guess['other'] = guess return cp_guess + guess['other'] = cp_guess return guess diff --git a/couchpotato/core/plugins/trailer.py b/couchpotato/core/plugins/trailer.py index ae525862..82216b8e 100644 --- a/couchpotato/core/plugins/trailer.py +++ b/couchpotato/core/plugins/trailer.py @@ -32,7 +32,7 @@ class Trailer(Plugin): destination = os.path.join(group['destination_dir'], filename) if not os.path.isfile(destination): trailer_file = fireEvent('file.download', url = trailer, dest = destination, urlopen_kwargs = {'headers': {'User-Agent': 'Quicktime'}}, single = True) - if os.path.getsize(trailer_file) < (1024 * 1024): # Don't trust small trailers (1MB), try next one + if trailer_file and os.path.getsize(trailer_file) < (1024 * 1024): # Don't trust small trailers (1MB), try next one os.unlink(trailer_file) continue else: diff --git a/couchpotato/core/plugins/wizard/static/wizard.js b/couchpotato/core/plugins/wizard/static/wizard.js index fdd7b743..f215dbf4 100644 --- a/couchpotato/core/plugins/wizard/static/wizard.js +++ b/couchpotato/core/plugins/wizard/static/wizard.js @@ -90,7 +90,7 @@ Page.Wizard = new Class({ self.parent(action, params); self.addEvent('create', function(){ - self.order(); + self.orderGroups(); }); self.initialized = true; @@ -106,7 +106,7 @@ Page.Wizard = new Class({ }).delay(1) }, - order: function(){ + orderGroups: function(){ var self = this; var form = self.el.getElement('.uniForm'); diff --git a/couchpotato/core/settings.py b/couchpotato/core/settings.py index a0bcd743..4315ec18 100644 --- a/couchpotato/core/settings.py +++ b/couchpotato/core/settings.py @@ -1,5 +1,4 @@ from __future__ import with_statement -import traceback import ConfigParser from hashlib import md5 @@ -49,7 +48,7 @@ class Settings(object): 'desc': 'Save setting to config file (settings.conf)', 'params': { 'section': {'desc': 'The section name in settings.conf'}, - 'option': {'desc': 'The option name'}, + 'name': {'desc': 'The option name'}, 'value': {'desc': 'The value you want to save'}, } }) @@ -72,15 +71,7 @@ class Settings(object): self.connectEvents() def databaseSetup(self): - from couchpotato import get_db - - db = get_db() - - try: - db.add_index(PropertyIndex(db.path, 'property')) - except: - self.log.debug('Index for properties already exists') - db.edit_index(PropertyIndex(db.path, 'property')) + fireEvent('database.setup_index', 'property', PropertyIndex) def parser(self): return self.p diff --git a/couchpotato/environment.py b/couchpotato/environment.py index 8c1ee17c..1000d489 100644 --- a/couchpotato/environment.py +++ b/couchpotato/environment.py @@ -2,6 +2,7 @@ import os from couchpotato.core.database import Database from couchpotato.core.event import fireEvent, addEvent +from couchpotato.core.helpers.encoding import toUnicode from couchpotato.core.loader import Loader from couchpotato.core.settings import Settings @@ -24,8 +25,7 @@ class Env(object): _quiet = False _daemonized = False _desktop = None - _engine = None - _session = None + _http_opener = None ''' Data paths and directories ''' _app_dir = "" @@ -39,8 +39,11 @@ class Env(object): return Env._debug @staticmethod - def get(attr): - return getattr(Env, '_' + attr) + def get(attr, unicode = False): + if unicode: + return toUnicode(getattr(Env, '_' + attr)) + else: + return getattr(Env, '_' + attr) @staticmethod def all(): diff --git a/couchpotato/runner.py b/couchpotato/runner.py index d22f8fc8..e5f9bcab 100644 --- a/couchpotato/runner.py +++ b/couchpotato/runner.py @@ -17,8 +17,8 @@ from couchpotato import KeyHandler, LoginHandler, LogoutHandler from couchpotato.api import NonBlockHandler, ApiHandler from couchpotato.core.event import fireEventAsync, fireEvent from couchpotato.core.helpers.encoding import sp -from couchpotato.core.helpers.variable import getDataDir, tryInt -from scandir import scandir +from couchpotato.core.helpers.variable import getDataDir, tryInt, getFreeSpace +import requests from tornado.httpserver import HTTPServer from tornado.web import Application, StaticFileHandler, RedirectHandler @@ -99,8 +99,8 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En existing_backups = [] if not os.path.isdir(backup_path): os.makedirs(backup_path) - for root, dirs, files in scandir.walk(backup_path): - for backup_file in files: + for root, dirs, files in os.walk(backup_path): + for backup_file in sorted(files): ints = re.findall('\d+', backup_file) # Delete non zip files @@ -116,7 +116,7 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En # Create new backup new_backup = sp(os.path.join(backup_path, '%s.tar.gz' % int(time.time()))) zipf = tarfile.open(new_backup, 'w:gz') - for root, dirs, files in scandir.walk(db_path): + for root, dirs, files in os.walk(db_path): for zfilename in files: zipf.add(os.path.join(root, zfilename), arcname = 'database/%s' % os.path.join(root[len(db_path) + 1:], zfilename)) zipf.close() @@ -127,13 +127,24 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En else: db.create() + # Force creation of cachedir + log_dir = sp(log_dir) + cache_dir = sp(os.path.join(data_dir, 'cache')) + python_cache = sp(os.path.join(cache_dir, 'python')) + + if not os.path.exists(cache_dir): + os.mkdir(cache_dir) + if not os.path.exists(python_cache): + os.mkdir(python_cache) + # Register environment settings Env.set('app_dir', sp(base_path)) Env.set('data_dir', sp(data_dir)) Env.set('log_path', sp(os.path.join(log_dir, 'CouchPotato.log'))) Env.set('db', db) - Env.set('cache_dir', sp(os.path.join(data_dir, 'cache'))) - Env.set('cache', FileSystemCache(sp(os.path.join(Env.get('cache_dir'), 'python')))) + Env.set('http_opener', requests.Session()) + Env.set('cache_dir', cache_dir) + Env.set('cache', FileSystemCache(python_cache)) Env.set('console_log', options.console_log) Env.set('quiet', options.quiet) Env.set('desktop', desktop) @@ -184,6 +195,15 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En log = CPLog(__name__) log.debug('Started with options %s', options) + # Check available space + try: + total_space, available_space = getFreeSpace(data_dir) + if available_space < 100: + log.error('Shutting down as CP needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space) + return + except: + log.error('Failed getting diskspace: %s', traceback.format_exc()) + def customwarn(message, category, filename, lineno, file = None, line = None): log.warning('%s %s %s line:%s', (category, message, filename, lineno)) warnings.showwarning = customwarn @@ -266,22 +286,23 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En loop = IOLoop.current() # Reload hook - def test(): + def reload_hook(): fireEvent('app.shutdown') - add_reload_hook(test) + add_reload_hook(reload_hook) # Some logging and fire load event try: log.info('Starting server on port %(port)s', config) except: pass fireEventAsync('app.load') + ssl_options = None if config['ssl_cert'] and config['ssl_key']: - server = HTTPServer(application, no_keep_alive = True, ssl_options = { + ssl_options = { 'certfile': config['ssl_cert'], 'keyfile': config['ssl_key'], - }) - else: - server = HTTPServer(application, no_keep_alive = True) + } + + server = HTTPServer(application, no_keep_alive = True, ssl_options = ssl_options) try_restart = True restart_tries = 5 @@ -290,6 +311,9 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En try: server.listen(config['port'], config['host']) loop.start() + server.close_all_connections() + server.stop() + loop.close(all_fds = True) except Exception as e: log.error('Failed starting: %s', traceback.format_exc()) try: @@ -303,6 +327,8 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En continue else: return + except ValueError: + return except: pass diff --git a/couchpotato/static/scripts/api.js b/couchpotato/static/scripts/api.js index 38d18740..410b4886 100644 --- a/couchpotato/static/scripts/api.js +++ b/couchpotato/static/scripts/api.js @@ -7,9 +7,9 @@ var ApiClass = new Class({ }, request: function(type, options){ - var self = this; + var self = this, + r_type = self.options.is_remote ? 'JSONP' : 'JSON'; - var r_type = self.options.is_remote ? 'JSONP' : 'JSON'; return new Request[r_type](Object.merge({ 'callbackKey': 'callback_func', 'method': 'get', diff --git a/couchpotato/static/scripts/couchpotato.js b/couchpotato/static/scripts/couchpotato.js index 57f1e077..391c25c0 100644 --- a/couchpotato/static/scripts/couchpotato.js +++ b/couchpotato/static/scripts/couchpotato.js @@ -54,7 +54,7 @@ }, pushState: function(e){ - if((!e.meta && Browser.Platform.mac) || (!e.control && !Browser.Platform.mac)){ + if((!e.meta && Browser.platform.mac) || (!e.control && !Browser.platform.mac)){ (e).preventDefault(); var url = e.target.get('href'); if(History.getPath() != url) @@ -63,7 +63,7 @@ }, isMac: function(){ - return Browser.Platform.mac + return Browser.platform.mac }, createLayout: function(){ @@ -272,11 +272,18 @@ (function(){ - Api.request('app.available', { - 'onFailure': function(){ - self.checkAvailable.delay(1000, self, [delay, onAvailable]); - self.fireEvent('unload'); + var onFailure = function(){ + self.checkAvailable.delay(1000, self, [delay, onAvailable]); + self.fireEvent('unload'); + } + + var request = Api.request('app.available', { + 'timeout': 2000, + 'onTimeout': function(){ + request.cancel(); + onFailure(); }, + 'onFailure': onFailure, 'onSuccess': function(){ if(onAvailable) onAvailable(); @@ -322,7 +329,7 @@ var url = 'http://www.dereferer.org/?' + el.get('href'); - if(el.get('target') == '_blank' || (e.meta && Browser.Platform.mac) || (e.control && !Browser.Platform.mac)) + if(el.get('target') == '_blank' || (e.meta && Browser.platform.mac) || (e.control && !Browser.platform.mac)) window.open(url); else window.location = url; diff --git a/couchpotato/static/scripts/library/mootools.js b/couchpotato/static/scripts/library/mootools.js index 9917ad32..838edee6 100644 --- a/couchpotato/static/scripts/library/mootools.js +++ b/couchpotato/static/scripts/library/mootools.js @@ -20,7 +20,7 @@ description: The heart of MooTools. license: MIT-style license. -copyright: Copyright (c) 2006-2012 [Valerio Proietti](http://mad4milk.net/). +copyright: Copyright (c) 2006-2014 [Valerio Proietti](http://mad4milk.net/). authors: The MooTools production team (http://mootools.net/developers/) @@ -36,8 +36,8 @@ provides: [Core, MooTools, Type, typeOf, instanceOf, Native] (function(){ this.MooTools = { - version: '1.4.5', - build: 'ab8ea8824dc3b24b6666867a2c4ed58ebb762cf0' + version: '1.5.0', + build: '0f7b690afee9349b15909f33016a25d2e4d9f4e3' }; // typeOf, instanceOf @@ -50,7 +50,7 @@ var typeOf = this.typeOf = function(item){ if (item.nodeType == 1) return 'element'; if (item.nodeType == 3) return (/\S/).test(item.nodeValue) ? 'textnode' : 'whitespace'; } else if (typeof item.length == 'number'){ - if (item.callee) return 'arguments'; + if ('callee' in item) return 'arguments'; if ('item' in item) return 'collection'; } @@ -267,7 +267,7 @@ var force = function(name, object, methods){ if (!methodsEnumerable) for (var i = 0, l = methods.length; i < l; i++){ fn.call(prototype, prototype[methods[i]], methods[i]); } - for (var key in prototype) fn.call(prototype, prototype[key], key) + for (var key in prototype) fn.call(prototype, prototype[key], key); }; } @@ -275,7 +275,7 @@ var force = function(name, object, methods){ }; force('String', String, [ - 'charAt', 'charCodeAt', 'concat', 'indexOf', 'lastIndexOf', 'match', 'quote', 'replace', 'search', + 'charAt', 'charCodeAt', 'concat', 'contains', 'indexOf', 'lastIndexOf', 'match', 'quote', 'replace', 'search', 'slice', 'split', 'substr', 'substring', 'trim', 'toLowerCase', 'toUpperCase' ])('Array', Array, [ 'pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift', 'concat', 'join', 'slice', @@ -325,11 +325,13 @@ Object.each = Object.forEach; Array.implement({ + /**/ forEach: function(fn, bind){ for (var i = 0, l = this.length; i < l; i++){ if (i in this) fn.call(bind, this[i], i, this); } }, + /**/ each: function(fn, bind){ Array.forEach(this, fn, bind); @@ -421,7 +423,7 @@ description: Contains Array Prototypes like each, contains, and erase. license: MIT-style license. -requires: Type +requires: [Type] provides: Array @@ -564,7 +566,7 @@ Array.implement({ if (this.length != 3) return null; var rgb = this.map(function(value){ if (value.length == 1) value += value; - return value.toInt(16); + return parseInt(value, 16); }); return (array) ? rgb : 'rgb(' + rgb + ')'; }, @@ -594,7 +596,7 @@ description: Contains String Prototypes like camelCase, capitalize, test, and to license: MIT-style license. -requires: Type +requires: [Type, Array] provides: String @@ -603,14 +605,16 @@ provides: String String.implement({ + // + contains: function(string, index){ + return (index ? String(this).slice(index) : String(this)).indexOf(string) > -1; + }, + // + test: function(regex, params){ return ((typeOf(regex) == 'regexp') ? regex : new RegExp('' + regex, params)).test(this); }, - contains: function(string, separator){ - return (separator) ? (separator + this + separator).indexOf(separator + string + separator) > -1 : String(this).indexOf(string) > -1; - }, - trim: function(){ return String(this).replace(/^\s+|\s+$/g, ''); }, @@ -669,6 +673,8 @@ String.implement({ }); + + /* --- @@ -1063,37 +1069,47 @@ provides: [Browser, Window, Document] var document = this.document; var window = document.window = this; -var ua = navigator.userAgent.toLowerCase(), - platform = navigator.platform.toLowerCase(), - UA = ua.match(/(opera|ie|firefox|chrome|version)[\s\/:]([\w\d\.]+)?.*?(safari|version[\s\/:]([\w\d\.]+)|$)/) || [null, 'unknown', 0], - mode = UA[1] == 'ie' && document.documentMode; +var parse = function(ua, platform){ + ua = ua.toLowerCase(); + platform = (platform ? platform.toLowerCase() : ''); -var Browser = this.Browser = { + var UA = ua.match(/(opera|ie|firefox|chrome|trident|crios|version)[\s\/:]([\w\d\.]+)?.*?(safari|(?:rv[\s\/:]|version[\s\/:])([\w\d\.]+)|$)/) || [null, 'unknown', 0]; - extend: Function.prototype.extend, + if (UA[1] == 'trident'){ + UA[1] = 'ie'; + if (UA[4]) UA[2] = UA[4]; + } else if (UA[1] == 'crios') { + UA[1] = 'chrome'; + } - name: (UA[1] == 'version') ? UA[3] : UA[1], + var platform = ua.match(/ip(?:ad|od|hone)/) ? 'ios' : (ua.match(/(?:webos|android)/) || platform.match(/mac|win|linux/) || ['other'])[0]; + if (platform == 'win') platform = 'windows'; - version: mode || parseFloat((UA[1] == 'opera' && UA[4]) ? UA[4] : UA[2]), + return { + extend: Function.prototype.extend, + name: (UA[1] == 'version') ? UA[3] : UA[1], + version: parseFloat((UA[1] == 'opera' && UA[4]) ? UA[4] : UA[2]), + platform: platform + }; +}; - Platform: { - name: ua.match(/ip(?:ad|od|hone)/) ? 'ios' : (ua.match(/(?:webos|android)/) || platform.match(/mac|win|linux/) || ['other'])[0] - }, +var Browser = this.Browser = parse(navigator.userAgent, navigator.platform); +if (Browser.ie){ + Browser.version = document.documentMode; +} + +Browser.extend({ Features: { xpath: !!(document.evaluate), air: !!(window.runtime), query: !!(document.querySelector), json: !!(window.JSON) }, + parseUA: parse +}); - Plugins: {} -}; - -Browser[Browser.name] = true; -Browser[Browser.name + parseInt(Browser.version, 10)] = true; -Browser.Platform[Browser.Platform.name] = true; // Request @@ -1126,18 +1142,7 @@ Browser.Request = (function(){ Browser.Features.xhr = !!(Browser.Request); -// Flash detection -var version = (Function.attempt(function(){ - return navigator.plugins['Shockwave Flash'].description; -}, function(){ - return new ActiveXObject('ShockwaveFlash.ShockwaveFlash').GetVariable('$version'); -}) || '0 r0').match(/\d+/g); - -Browser.Plugins.Flash = { - version: Number(version[0] || '0.' + version[1]) || 0, - build: Number(version[2]) || 0 -}; // String scripts @@ -1756,7 +1761,7 @@ local.setDocument = function(document){ // native matchesSelector function - features.nativeMatchesSelector = root.matchesSelector || /*root.msMatchesSelector ||*/ root.mozMatchesSelector || root.webkitMatchesSelector; + features.nativeMatchesSelector = root.matches || /*root.msMatchesSelector ||*/ root.mozMatchesSelector || root.webkitMatchesSelector; if (features.nativeMatchesSelector) try { // if matchesSelector trows errors on incorrect sintaxes we can use it features.nativeMatchesSelector.call(root, ':slick'); @@ -2590,12 +2595,12 @@ license: MIT-style license. requires: [Window, Document, Array, String, Function, Object, Number, Slick.Parser, Slick.Finder] -provides: [Element, Elements, $, $$, Iframe, Selectors] +provides: [Element, Elements, $, $$, IFrame, Selectors] ... */ -var Element = function(tag, props){ +var Element = this.Element = function(tag, props){ var konstructor = Element.Constructors[tag]; if (konstructor) return konstructor(props); if (typeof tag != 'string') return document.id(tag).set(props); @@ -2779,7 +2784,7 @@ Array.mirror(Elements); /**/ var createElementAcceptsHTML; try { - createElementAcceptsHTML = (document.createElement('').name == 'x'); + createElementAcceptsHTML = (document.createElement('').name == 'x'); } catch (e){} var escapeQuotes = function(html){ @@ -3112,7 +3117,28 @@ var pollutesGetAttribute = (function(div){ return (div.getAttribute('random') == 'attribute'); })(document.createElement('div')); -/* */ +var hasCloneBug = (function(test){ + test.innerHTML = ''; + return test.cloneNode(true).firstChild.childNodes.length != 1; +})(document.createElement('div')); +/* */ + +var hasClassList = !!document.createElement('div').classList; + +var classes = function(className){ + var classNames = (className || '').clean().split(" "), uniques = {}; + return classNames.filter(function(className){ + if (className !== "" && !uniques[className]) return uniques[className] = className; + }); +}; + +var addToClassList = function(name){ + this.classList.add(name); +}; + +var removeFromClassList = function(name){ + this.classList.remove(name); +}; Element.implement({ @@ -3122,7 +3148,8 @@ Element.implement({ setter(this, value); } else { /* */ - if (pollutesGetAttribute) var attributeWhiteList = this.retrieve('$attributeWhiteList', {}); + var attributeWhiteList; + if (pollutesGetAttribute) attributeWhiteList = this.retrieve('$attributeWhiteList', {}); /* */ if (value == null){ @@ -3194,17 +3221,27 @@ Element.implement({ return this; }, - hasClass: function(className){ + hasClass: hasClassList ? function(className){ + return this.classList.contains(className); + } : function(className){ return this.className.clean().contains(className, ' '); }, - addClass: function(className){ - if (!this.hasClass(className)) this.className = (this.className + ' ' + className).clean(); + addClass: hasClassList ? function(className){ + classes(className).forEach(addToClassList, this); + return this; + } : function(className){ + this.className = classes(className + ' ' + this.className).join(' '); return this; }, - removeClass: function(className){ - this.className = this.className.replace(new RegExp('(^|\\s)' + className + '(?:\\s|$)'), '$1'); + removeClass: hasClassList ? function(className){ + classes(className).forEach(removeFromClassList, this); + return this; + } : function(className){ + var classNames = classes(this.className); + classes(className).forEach(classNames.erase, classNames); + this.className = classNames.join(' '); return this; }, @@ -3279,6 +3316,37 @@ Element.implement({ }); + +// appendHTML + +var appendInserters = { + before: 'beforeBegin', + after: 'afterEnd', + bottom: 'beforeEnd', + top: 'afterBegin', + inside: 'beforeEnd' +}; + +Element.implement('appendHTML', ('insertAdjacentHTML' in document.createElement('div')) ? function(html, where){ + this.insertAdjacentHTML(appendInserters[where || 'bottom'], html); + return this; +} : function(html, where){ + var temp = new Element('div', {html: html}), + children = temp.childNodes, + fragment = temp.firstChild; + + if (!fragment) return this; + if (children.length > 1){ + fragment = document.createDocumentFragment(); + for (var i = 0, l = children.length; i < l; i++){ + fragment.appendChild(children[i]); + } + } + + inserters[where || 'bottom'](fragment, this); + return this; +}); + var collected = {}, storage = {}; var get = function(uid){ @@ -3344,7 +3412,7 @@ Element.implement({ } /**/ - if (Browser.ie){ + if (hasCloneBug){ var co = clone.getElementsByTagName('object'), to = this.getElementsByTagName('object'); for (i = co.length; i--;) co[i].outerHTML = to[i].outerHTML; } @@ -3357,13 +3425,7 @@ Element.implement({ [Element, Window, Document].invoke('implement', { addListener: function(type, fn){ - if (type == 'unload'){ - var old = fn, self = this; - fn = function(){ - self.removeListener('unload', fn); - old(); - }; - } else { + if (window.attachEvent && !window.addEventListener){ collected[Slick.uidOf(this)] = this; } if (this.addEventListener) this.addEventListener(type, fn, !!arguments[2]); @@ -3398,10 +3460,14 @@ Element.implement({ }); /**/ -if (window.attachEvent && !window.addEventListener) window.addListener('unload', function(){ - Object.each(collected, clean); - if (window.CollectGarbage) CollectGarbage(); -}); +if (window.attachEvent && !window.addEventListener){ + var gc = function(){ + Object.each(collected, clean); + if (window.CollectGarbage) CollectGarbage(); + window.removeListener('unload', gc); + } + window.addListener('unload', gc); +} /**/ Element.Properties = {}; @@ -3446,11 +3512,13 @@ Element.Properties.html = { }; +var supportsHTML5Elements = true, supportsTableInnerHTML = true, supportsTRInnerHTML = true; + /**/ // technique by jdbarlett - http://jdbartlett.com/innershiv/ var div = document.createElement('div'); div.innerHTML = ''; -var supportsHTML5Elements = (div.childNodes.length == 1); +supportsHTML5Elements = (div.childNodes.length == 1); if (!supportsHTML5Elements){ var tags = 'abbr article aside audio canvas datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video'.split(' '), fragment = document.createDocumentFragment(), l = tags.length; @@ -3460,7 +3528,7 @@ div = null; /**/ /**/ -var supportsTableInnerHTML = Function.attempt(function(){ +supportsTableInnerHTML = Function.attempt(function(){ var table = document.createElement('table'); table.innerHTML = ''; return true; @@ -3469,7 +3537,7 @@ var supportsTableInnerHTML = Function.attempt(function(){ /**/ var tr = document.createElement('tr'), html = ''; tr.innerHTML = html; -var supportsTRInnerHTML = (tr.innerHTML == html); +supportsTRInnerHTML = (tr.innerHTML == html); tr = null; /**/ @@ -3514,11 +3582,12 @@ if (testForm.firstChild.value != 's') Element.Properties.value = { var tag = this.get('tag'); if (tag != 'select') return this.setProperty('value', value); var options = this.getElements('option'); + value = String(value); for (var i = 0; i < options.length; i++){ var option = options[i], attr = option.getAttributeNode('value'), optionValue = (attr && attr.specified) ? option.value : option.get('text'); - if (optionValue == value) return option.selected = true; + if (optionValue === value) return option.selected = true; } }, @@ -3572,17 +3641,24 @@ provides: Element.Style (function(){ -var html = document.html; +var html = document.html, el; // // Check for oldIE, which does not remove styles when they're set to null -var el = document.createElement('div'); +el = document.createElement('div'); el.style.color = 'red'; el.style.color = null; var doesNotRemoveStyles = el.style.color == 'red'; + +// check for oldIE, which returns border* shorthand styles in the wrong order (color-width-style instead of width-style-color) +var border = '1px solid #123abc'; +el.style.border = border; +var returnsBordersInWrongOrder = el.style.border != border; el = null; // +var hasGetComputedStyle = !!window.getComputedStyle; + Element.Properties.styles = {set: function(styles){ this.setStyles(styles); }}; @@ -3596,16 +3672,25 @@ var setVisibility = function(element, opacity){ element.style.visibility = opacity > 0 || opacity == null ? 'visible' : 'hidden'; }; +// +var setFilter = function(element, regexp, value){ + var style = element.style, + filter = style.filter || element.getComputedStyle('filter') || ''; + style.filter = (regexp.test(filter) ? filter.replace(regexp, value) : filter + ' ' + value).trim(); + if (!style.filter) style.removeAttribute('filter'); +}; +// + var setOpacity = (hasOpacity ? function(element, opacity){ element.style.opacity = opacity; } : (hasFilter ? function(element, opacity){ - var style = element.style; - if (!element.currentStyle || !element.currentStyle.hasLayout) style.zoom = 1; - if (opacity == null || opacity == 1) opacity = ''; - else opacity = 'alpha(opacity=' + (opacity * 100).limit(0, 100).round() + ')'; - var filter = style.filter || element.getComputedStyle('filter') || ''; - style.filter = reAlpha.test(filter) ? filter.replace(reAlpha, opacity) : filter + opacity; - if (!style.filter) style.removeAttribute('filter'); + if (!element.currentStyle || !element.currentStyle.hasLayout) element.style.zoom = 1; + if (opacity == null || opacity == 1){ + setFilter(element, reAlpha, ''); + if (opacity == 1 && getOpacity(element) != 1) setFilter(element, reAlpha, 'alpha(opacity=100)'); + } else { + setFilter(element, reAlpha, 'alpha(opacity=' + (opacity * 100).limit(0, 100).round() + ')'); + } } : setVisibility)); var getOpacity = (hasOpacity ? function(element){ @@ -3622,15 +3707,27 @@ var getOpacity = (hasOpacity ? function(element){ return opacity; })); -var floatName = (html.style.cssFloat == null) ? 'styleFloat' : 'cssFloat'; +var floatName = (html.style.cssFloat == null) ? 'styleFloat' : 'cssFloat', + namedPositions = {left: '0%', top: '0%', center: '50%', right: '100%', bottom: '100%'}, + hasBackgroundPositionXY = (html.style.backgroundPositionX != null); + +// +var removeStyle = function(style, property){ + if (property == 'backgroundPosition'){ + style.removeAttribute(property + 'X'); + property += 'Y'; + } + style.removeAttribute(property); +}; +// Element.implement({ getComputedStyle: function(property){ - if (this.currentStyle) return this.currentStyle[property.camelCase()]; + if (!hasGetComputedStyle && this.currentStyle) return this.currentStyle[property.camelCase()]; var defaultView = Element.getDocument(this).defaultView, computed = defaultView ? defaultView.getComputedStyle(this, null) : null; - return (computed) ? computed.getPropertyValue((property == floatName) ? 'float' : property.hyphenate()) : null; + return (computed) ? computed.getPropertyValue((property == floatName) ? 'float' : property.hyphenate()) : ''; }, setStyle: function(property, value){ @@ -3652,7 +3749,7 @@ Element.implement({ this.style[property] = value; // if ((value == '' || value == null) && doesNotRemoveStyles && this.style.removeAttribute){ - this.style.removeAttribute(property); + removeStyle(this.style, property); } // return this; @@ -3663,20 +3760,25 @@ Element.implement({ property = (property == 'float' ? floatName : property).camelCase(); var result = this.style[property]; if (!result || property == 'zIndex'){ - result = []; - for (var style in Element.ShortStyles){ - if (property != style) continue; - for (var s in Element.ShortStyles[style]) result.push(this.getStyle(s)); + if (Element.ShortStyles.hasOwnProperty(property)){ + result = []; + for (var s in Element.ShortStyles[property]) result.push(this.getStyle(s)); return result.join(' '); } result = this.getComputedStyle(property); } + if (hasBackgroundPositionXY && /^backgroundPosition[XY]?$/.test(property)){ + return result.replace(/(top|right|bottom|left)/g, function(position){ + return namedPositions[position]; + }) || '0px'; + } + if (!result && property == 'backgroundPosition') return '0px 0px'; if (result){ result = String(result); var color = result.match(/rgba?\([\d\s,]+\)/); if (color) result = result.replace(color[0], color[0].rgbToHex()); } - if (Browser.opera || Browser.ie){ + if (!hasGetComputedStyle && !this.style[property]){ if ((/^(height|width)$/).test(property) && !(/px$/.test(result))){ var values = (property == 'width') ? ['left', 'right'] : ['top', 'bottom'], size = 0; values.each(function(value){ @@ -3684,10 +3786,15 @@ Element.implement({ }, this); return this['offset' + property.capitalize()] - size + 'px'; } - if (Browser.ie && (/^border(.+)Width|margin|padding/).test(property) && isNaN(parseFloat(result))){ + if ((/^border(.+)Width|margin|padding/).test(property) && isNaN(parseFloat(result))){ return '0px'; } } + // + if (returnsBordersInWrongOrder && /^border(Top|Right|Bottom|Left)?$/.test(property) && /^#/.test(result)){ + return result.replace(/^(.+)\s(.+)\s(.+)$/, '$2 $3 $1'); + } + // return result; }, @@ -3709,7 +3816,7 @@ Element.implement({ Element.Styles = { left: '@px', top: '@px', bottom: '@px', right: '@px', width: '@px', height: '@px', maxWidth: '@px', maxHeight: '@px', minWidth: '@px', minHeight: '@px', - backgroundColor: 'rgb(@, @, @)', backgroundPosition: '@px @px', color: 'rgb(@, @, @)', + backgroundColor: 'rgb(@, @, @)', backgroundSize: '@px', backgroundPosition: '@px @px', color: 'rgb(@, @, @)', fontSize: '@px', letterSpacing: '@px', lineHeight: '@px', clip: 'rect(@px @px @px @px)', margin: '@px @px @px @px', padding: '@px @px @px @px', border: '@px @ rgb(@, @, @) @px @ rgb(@, @, @) @px @ rgb(@, @, @)', borderWidth: '@px @px @px @px', borderStyle: '@ @ @ @', borderColor: 'rgb(@, @, @) rgb(@, @, @) rgb(@, @, @) rgb(@, @, @)', @@ -3738,6 +3845,7 @@ Element.ShortStyles = {margin: {}, padding: {}, border: {}, borderWidth: {}, bor Short.borderColor[bdc] = Short[bd][bdc] = All[bdc] = 'rgb(@, @, @)'; }); +if (hasBackgroundPositionXY) Element.ShortStyles.backgroundPosition = {backgroundPositionX: '@', backgroundPositionY: '@'}; })(); @@ -3779,7 +3887,7 @@ var DOMEvent = this.DOMEvent = new Type('DOMEvent', function(event, win){ if (type.indexOf('key') == 0){ var code = this.code = (event.which || event.keyCode); this.key = _keys[code]; - if (type == 'keydown'){ + if (type == 'keydown' || type == 'keyup'){ if (code > 111 && code < 124) this.key = 'f' + (code - 111); else if (code > 95 && code < 106) this.key = code - 96; } @@ -4001,23 +4109,27 @@ Element.NativeEvents = { gesturestart: 2, gesturechange: 2, gestureend: 2, // gesture focus: 2, blur: 2, change: 2, reset: 2, select: 2, submit: 2, paste: 2, input: 2, //form elements load: 2, unload: 1, beforeunload: 2, resize: 1, move: 1, DOMContentLoaded: 1, readystatechange: 1, //window + hashchange: 1, popstate: 2, // history error: 1, abort: 1, scroll: 1 //misc }; -Element.Events = {mousewheel: { - base: (Browser.firefox) ? 'DOMMouseScroll' : 'mousewheel' -}}; +Element.Events = { + mousewheel: { + base: 'onwheel' in document ? 'wheel' : 'onmousewheel' in document ? 'mousewheel' : 'DOMMouseScroll' + } +}; + +var check = function(event){ + var related = event.relatedTarget; + if (related == null) return true; + if (!related) return false; + return (related != this && related.prefix != 'xul' && typeOf(this) != 'document' && !this.contains(related)); +}; if ('onmouseenter' in document.documentElement){ Element.NativeEvents.mouseenter = Element.NativeEvents.mouseleave = 2; + Element.MouseenterCheck = check; } else { - var check = function(event){ - var related = event.relatedTarget; - if (related == null) return true; - if (!related) return false; - return (related != this && related.prefix != 'xul' && typeOf(this) != 'document' && !this.contains(related)); - }; - Element.Events.mouseenter = { base: 'mouseover', condition: check @@ -4035,12 +4147,12 @@ if (!window.addEventListener){ Element.Events.change = { base: function(){ var type = this.type; - return (this.get('tag') == 'input' && (type == 'radio' || type == 'checkbox')) ? 'propertychange' : 'change' + return (this.get('tag') == 'input' && (type == 'radio' || type == 'checkbox')) ? 'propertychange' : 'change'; }, condition: function(event){ - return this.type != 'radio' || (event.event.propertyName == 'checked' && this.checked); + return event.type != 'propertychange' || event.event.propertyName == 'checked'; } - } + }; } /**/ @@ -4080,10 +4192,12 @@ var bubbleUp = function(self, match, fn, event, target){ var map = { mouseenter: { - base: 'mouseover' + base: 'mouseover', + condition: Element.MouseenterCheck }, mouseleave: { - base: 'mouseout' + base: 'mouseout', + condition: Element.MouseenterCheck }, focus: { base: 'focus' + (eventListenerSupport ? '' : 'in'), @@ -4190,8 +4304,8 @@ var delegation = { }; var elementEvent = Element.Events[_type]; - if (elementEvent && elementEvent.condition){ - var __match = match, condition = elementEvent.condition; + if (_map.condition || elementEvent && elementEvent.condition){ + var __match = match, condition = _map.condition || elementEvent.condition; match = function(target, event){ return __match(target, event) && condition.call(target, event, type); }; @@ -4226,7 +4340,7 @@ var delegation = { if (_map.remove) _map.remove(this, _uid); delete stored[_uid]; storage[_type] = stored; - return removeEvent.call(this, type, delegator); + return removeEvent.call(this, type, delegator, _map.capture); } var __uid, s; @@ -4344,7 +4458,9 @@ Element.implement({ }, getOffsets: function(){ - if (this.getBoundingClientRect && !Browser.Platform.ios){ + var hasGetBoundingClientRect = this.getBoundingClientRect; + + if (hasGetBoundingClientRect){ var bound = this.getBoundingClientRect(), html = document.id(this.getDocument().documentElement), htmlScroll = html.getScroll(), @@ -4364,27 +4480,9 @@ Element.implement({ position.x += element.offsetLeft; position.y += element.offsetTop; - if (Browser.firefox){ - if (!borderBox(element)){ - position.x += leftBorder(element); - position.y += topBorder(element); - } - var parent = element.parentNode; - if (parent && styleString(parent, 'overflow') != 'visible'){ - position.x += leftBorder(parent); - position.y += topBorder(parent); - } - } else if (element != this && Browser.safari){ - position.x += leftBorder(element); - position.y += topBorder(element); - } - element = element.offsetParent; } - if (Browser.firefox && !borderBox(this)){ - position.x -= leftBorder(this); - position.y -= topBorder(this); - } + return position; }, @@ -4666,13 +4764,17 @@ var Fx = this.Fx = new Class({ }, resume: function(){ - if ((this.frame < this.frames) && !this.isRunning()) pushInstance.call(this, this.options.fps); + if (this.isPaused()) pushInstance.call(this, this.options.fps); return this; }, isRunning: function(){ var list = instances[this.options.fps]; return list && list.contains(this); + }, + + isPaused: function(){ + return (this.frame < this.frames) && !this.isRunning(); } }); @@ -4745,7 +4847,7 @@ Fx.CSS = new Class({ from = element.getStyle(property); var unit = this.options.unit; // adapted from: https://github.com/ryanmorr/fx/blob/master/fx.js#L299 - if (unit && from.slice(-unit.length) != unit && parseFloat(from) != 0){ + if (unit && from && typeof from == 'string' && from.slice(-unit.length) != unit && parseFloat(from) != 0){ element.setStyle(property, to + unit); var value = element.getComputedStyle(property); // IE and Opera support pixelLeft or pixelWidth @@ -4817,11 +4919,13 @@ Fx.CSS = new Class({ search: function(selector){ if (Fx.CSS.Cache[selector]) return Fx.CSS.Cache[selector]; var to = {}, selectorTest = new RegExp('^' + selector.escapeRegExp() + '$'); - Array.each(document.styleSheets, function(sheet, j){ - var href = sheet.href; - if (href && href.contains('://') && !href.contains(document.domain)) return; - var rules = sheet.rules || sheet.cssRules; + + var searchStyles = function(rules){ Array.each(rules, function(rule, i){ + if (rule.media){ + searchStyles(rule.rules || rule.cssRules); + return; + } if (!rule.style) return; var selectorText = (rule.selectorText) ? rule.selectorText.replace(/^\w+/, function(m){ return m.toLowerCase(); @@ -4833,6 +4937,13 @@ Fx.CSS = new Class({ to[style] = ((/^rgb/).test(value)) ? value.rgbToHex() : value; }); }); + }; + + Array.each(document.styleSheets, function(sheet, j){ + var href = sheet.href; + if (href && href.indexOf('://') > -1 && href.indexOf(document.domain) == -1) return; + var rules = sheet.rules || sheet.cssRules; + searchStyles(rules); }); return Fx.CSS.Cache[selector] = to; } @@ -5369,10 +5480,10 @@ var Request = this.Request = new Class({ if (trimPosition > -1 && (trimPosition = url.indexOf('#')) > -1) url = url.substr(0, trimPosition); if (this.options.noCache) - url += (url.contains('?') ? '&' : '?') + String.uniqueID(); + url += (url.indexOf('?') > -1 ? '&' : '?') + String.uniqueID(); - if (data && method == 'get'){ - url += (url.contains('?') ? '&' : '?') + data; + if (data && (method == 'get' || method == 'delete')){ + url += (url.indexOf('?') > -1 ? '&' : '?') + data; data = null; } @@ -5526,10 +5637,14 @@ JSON.encode = JSON.stringify ? function(obj){ return null; }; +JSON.secure = true; + + JSON.decode = function(string, secure){ if (!string || typeOf(string) != 'string') return null; - - if (secure || JSON.secure){ + + if (secure == null) secure = JSON.secure; + if (secure){ if (JSON.parse) return JSON.parse(string); if (!JSON.validate(string)) throw new Error('JSON could not decode the input; security is enabled and the value is not secure.'); } diff --git a/couchpotato/static/scripts/library/mootools_more.js b/couchpotato/static/scripts/library/mootools_more.js index 77b3a7a1..2ff49cb3 100644 --- a/couchpotato/static/scripts/library/mootools_more.js +++ b/couchpotato/static/scripts/library/mootools_more.js @@ -1,6 +1,16 @@ -// MooTools: the javascript framework. -// Load this file's selection again by visiting: http://mootools.net/more/0f75cfbac1aabbedaba7630beef8d10c -// Or build this file again with packager using: packager build More/Events.Pseudos More/Date More/Date.Extras More/Element.Forms More/Element.Position More/Element.Shortcuts More/Fx.Scroll More/Fx.Slide More/Sortables More/Request.JSONP More/Request.Periodical +/* +--- +MooTools: the javascript framework + +web build: + - http://mootools.net/more/0f75cfbac1aabbedaba7630beef8d10c + +packager build: + - packager build More/Events.Pseudos More/Date More/Date.Extras More/Element.Forms More/Element.Position More/Element.Shortcuts More/Fx.Scroll More/Fx.Slide More/Sortables More/Request.JSONP More/Request.Periodical + +... +*/ + /* --- @@ -31,8 +41,8 @@ provides: [MooTools.More] */ MooTools.More = { - 'version': '1.4.0.1', - 'build': 'a4244edf2aa97ac8a196fc96082dd35af1abab87' + version: '1.5.0', + build: '73db5e24e6e9c5c87b3a27aebef2248053f7db37' }; @@ -48,7 +58,7 @@ license: MIT-style license authors: - Arian Stolwijk -requires: [Core/Class.Extras, Core/Slick.Parser, More/MooTools.More] +requires: [Core/Class.Extras, Core/Slick.Parser, MooTools.More] provides: [Events.Pseudos] @@ -211,7 +221,7 @@ authors: requires: - Core/Object - - /MooTools.More + - MooTools.More provides: [Object.Extras] @@ -280,8 +290,8 @@ authors: requires: - Core/Events - - /Object.Extras - - /MooTools.More + - Object.Extras + - MooTools.More provides: [Locale, Lang] @@ -444,7 +454,7 @@ authors: - Aaron Newton requires: - - /Locale + - Locale provides: [Locale.en-US.Date] @@ -1079,7 +1089,7 @@ authors: - Scott Kyle requires: - - /Date + - Date provides: [Date.Extras] @@ -1235,10 +1245,8 @@ var special = { 'S': /[ŠŞŚ]/g, 't': /[ťţ]/g, 'T': /[ŤŢ]/g, - 'ue': /[ü]/g, - 'UE': /[Ü]/g, - 'u': /[ùúûůµ]/g, - 'U': /[ÙÚÛŮ]/g, + 'u': /[ùúûůüµ]/g, + 'U': /[ÙÚÛŮÜ]/g, 'y': /[ÿý]/g, 'Y': /[ŸÝ]/g, 'z': /[žźż]/g, @@ -1263,7 +1271,16 @@ tidy = { '-': /[\u2013]/g, // '--': /[\u2014]/g, '»': /[\uFFFD]/g -}; +}, + +conversions = { + ms: 1, + s: 1000, + m: 6e4, + h: 36e5 +}, + +findUnits = /(\d*.?\d+)([msh]+)/; var walk = function(string, replacements){ var result = string, key; @@ -1325,6 +1342,13 @@ String.implement({ if (trail) string += trail; } return string; + }, + + ms: function(){ + // "Borrowed" from https://gist.github.com/1503944 + var units = findUnits.exec(this); + if (units == null) return Number(this); + return Number(units[1]) * conversions[units[2]]; } }); @@ -1348,8 +1372,8 @@ authors: requires: - Core/Element - - /String.Extras - - /MooTools.More + - String.Extras + - MooTools.More provides: [Element.Forms] @@ -1493,7 +1517,7 @@ authors: requires: - Core/Element.Style - Core/Element.Dimensions - - /MooTools.More + - MooTools.More provides: [Element.Measure] @@ -1710,13 +1734,15 @@ var local = Element.Position = { }, setOffsetOption: function(element, options){ - var parentOffset = {x: 0, y: 0}, - offsetParent = element.measure(function(){ - return document.id(this.getOffsetParent()); - }), - parentScroll = offsetParent.getScroll(); + var parentOffset = {x: 0, y: 0}; + var parentScroll = {x: 0, y: 0}; + var offsetParent = element.measure(function(){ + return document.id(this.getOffsetParent()); + }); if (!offsetParent || offsetParent == element.getDocument().body) return; + + parentScroll = offsetParent.getScroll(); parentOffset = offsetParent.measure(function(){ var position = this.getPosition(); if (this.getStyle('position') == 'fixed'){ @@ -1896,7 +1922,7 @@ authors: requires: - Core/Element.Style - - /MooTools.More + - MooTools.More provides: [Element.Shortcuts] @@ -1976,7 +2002,7 @@ requires: - Core/Fx - Core/Element.Event - Core/Element.Dimensions - - /MooTools.More + - MooTools.More provides: [Fx.Scroll] @@ -2014,7 +2040,6 @@ Fx.Scroll = new Class({ set: function(){ var now = Array.flatten(arguments); - if (Browser.firefox) now = [Math.round(now[0]), Math.round(now[1])]; // not needed anymore in newer firefox versions this.element.scrollTo(now[0], now[1]); return this; }, @@ -2148,7 +2173,7 @@ authors: requires: - Core/Fx - Core/Element.Style - - /MooTools.More + - MooTools.More provides: [Fx.Slide] @@ -2325,7 +2350,7 @@ requires: - Core/Element.Event - Core/Element.Style - Core/Element.Dimensions - - /MooTools.More + - MooTools.More provides: [Drag] ... @@ -2371,10 +2396,10 @@ var Drag = new Class({ this.mouse = {'now': {}, 'pos': {}}; this.value = {'start': {}, 'now': {}}; - this.selection = (Browser.ie) ? 'selectstart' : 'mousedown'; + this.selection = 'selectstart' in document ? 'selectstart' : 'mousedown'; - if (Browser.ie && !Drag.ondragstartFixed){ + if ('ondragstart' in document && !('FileReader' in window) && !Drag.ondragstartFixed){ document.ondragstart = Function.from(false); Drag.ondragstartFixed = true; } @@ -2559,7 +2584,7 @@ authors: requires: - Core/Element.Dimensions - - /Drag + - Drag provides: [Drag.Move] @@ -2586,10 +2611,7 @@ Drag.Move = new Class({ element = this.element; this.droppables = $$(this.options.droppables); - this.container = document.id(this.options.container); - - if (this.container && typeOf(this.container) != 'element') - this.container = document.id(this.container.getDocument().body); + this.setContainer(this.options.container); if (this.options.style){ if (this.options.modifiers.x == 'left' && this.options.modifiers.y == 'top'){ @@ -2606,6 +2628,13 @@ Drag.Move = new Class({ this.addEvent('start', this.checkDroppables, true); this.overed = null; }, + + setContainer: function(container) { + this.container = document.id(container); + if (this.container && typeOf(this.container) != 'element'){ + this.container = document.id(this.container.getDocument().body); + } + }, start: function(event){ if (this.container) this.options.limit = this.calculateLimit(); @@ -2670,7 +2699,9 @@ Drag.Move = new Class({ if (container != offsetParent){ left += containerMargin.left + offsetParentPadding.left; - top += ((Browser.ie6 || Browser.ie7) ? 0 : containerMargin.top) + offsetParentPadding.top; + if (!offsetParentPadding.left && left < 0) left = 0; + top += offsetParent == document.body ? 0 : containerMargin.top + offsetParentPadding.top; + if (!offsetParentPadding.top && top < 0) top = 0; } } else { left -= elementMargin.left; @@ -2754,7 +2785,7 @@ authors: requires: - Core/Fx.Morph - - /Drag.Move + - Drag.Move provides: [Sortables] @@ -2773,7 +2804,8 @@ var Sortables = new Class({ clone: false, revert: false, handle: false, - dragOptions: {} + dragOptions: {}, + unDraggableTags: ['button', 'input', 'a', 'textarea', 'select', 'option'] }, initialize: function(lists, options){ @@ -2839,6 +2871,24 @@ var Sortables = new Class({ return list; }, this)); }, + + getDroppableCoordinates: function (element){ + var offsetParent = element.getOffsetParent(); + var position = element.getPosition(offsetParent); + var scroll = { + w: window.getScroll(), + offsetParent: offsetParent.getScroll() + }; + position.x += scroll.offsetParent.x; + position.y += scroll.offsetParent.y; + + if (offsetParent.getStyle('position') == 'fixed'){ + position.x -= scroll.w.x; + position.y -= scroll.w.y; + } + + return position; + }, getClone: function(event, element){ if (!this.options.clone) return new Element(element.tagName).inject(document.body); @@ -2859,7 +2909,7 @@ var Sortables = new Class({ }); } - return clone.inject(this.list).setPosition(element.getPosition(element.getOffsetParent())); + return clone.inject(this.list).setPosition(this.getDroppableCoordinates(this.element)); }, getDroppables: function(){ @@ -2884,7 +2934,7 @@ var Sortables = new Class({ if ( !this.idle || event.rightClick || - ['button', 'input', 'a', 'textarea'].contains(event.target.get('tag')) + (!this.options.handle && this.options.unDraggableTags.contains(event.target.get('tag'))) ) return; this.idle = false; @@ -2915,14 +2965,16 @@ var Sortables = new Class({ end: function(){ this.drag.detach(); this.element.setStyle('opacity', this.opacity); + var self = this; if (this.effect){ var dim = this.element.getStyles('width', 'height'), clone = this.clone, - pos = clone.computePosition(this.element.getPosition(this.clone.getOffsetParent())); + pos = clone.computePosition(this.getDroppableCoordinates(clone)); var destroy = function(){ this.removeEvent('cancel', destroy); clone.destroy(); + self.reset(); }; this.effect.element = clone; @@ -2935,8 +2987,9 @@ var Sortables = new Class({ }).addEvent('cancel', destroy).chain(destroy); } else { this.clone.destroy(); + self.reset(); } - this.reset(); + }, reset: function(){ @@ -3125,7 +3178,7 @@ authors: requires: - Core/Request - - /MooTools.More + - MooTools.More provides: [Request.Periodical] diff --git a/couchpotato/static/scripts/library/question.js b/couchpotato/static/scripts/library/question.js index ed53b391..64feadbf 100644 --- a/couchpotato/static/scripts/library/question.js +++ b/couchpotato/static/scripts/library/question.js @@ -1,15 +1,15 @@ var Question = new Class( { initialize : function(question, hint, answers) { - var self = this + var self = this; - self.question = question - self.hint = hint - self.answers = answers + self.question = question; + self.hint = hint; + self.answers = answers; self.createQuestion(); self.answers.each(function(answer) { - self.createAnswer(answer) + self.createAnswer(answer); }) }, @@ -29,14 +29,14 @@ var Question = new Class( { }, createAnswer : function(options) { - var self = this + var self = this; var answer = new Element('a', Object.merge(options, { 'class' : 'answer button '+(options['class'] || '')+(options['cancel'] ? ' cancel' : '') - })).inject(this.container) + })).inject(this.container); if (options.cancel) { - answer.addEvent('click', self.close.bind(self)) + answer.addEvent('click', self.close.bind(self)); } else if (options.request) { answer.addEvent('click', function(e){ @@ -44,7 +44,7 @@ var Question = new Class( { new Request(Object.merge(options, { 'url': options.href, 'onComplete': function() { - (options.onComplete || function(){})() + (options.onComplete || function(){})(); self.close(); } })).send(); @@ -59,7 +59,7 @@ var Question = new Class( { }, toElement : function() { - return this.container + return this.container; } -}) +}); diff --git a/couchpotato/static/scripts/page/home.js b/couchpotato/static/scripts/page/home.js index 7cde7d2a..792b4a07 100644 --- a/couchpotato/static/scripts/page/home.js +++ b/couchpotato/static/scripts/page/home.js @@ -54,7 +54,8 @@ Page.Home = new Class({ }) ), 'filter': { - 'release_status': 'snatched,seeding,missing,available,downloaded' + 'release_status': 'snatched,missing,available,downloaded,done,seeding', + 'with_tags': 'recent' }, 'limit': null, 'onLoaded': function(){ diff --git a/couchpotato/static/scripts/page/settings.js b/couchpotato/static/scripts/page/settings.js index de5ffcbc..b9f72aba 100644 --- a/couchpotato/static/scripts/page/settings.js +++ b/couchpotato/static/scripts/page/settings.js @@ -27,8 +27,8 @@ Page.Settings = new Class({ }, openTab: function(action){ - var self = this; - var action = (action == 'index' ? 'about' : action) || self.action; + var self = this, + action = (action == 'index' ? 'about' : action) || self.action; if(self.current) self.toggleTab(self.current, true); @@ -120,7 +120,13 @@ Page.Settings = new Class({ var self = this; self.tabs_container = new Element('ul.tabs'); - self.containers = new Element('form.uniForm.containers').adopt( + self.containers = new Element('form.uniForm.containers', { + 'events': { + 'click:relay(.enabler.disabled h2)': function(e, el){ + el.getPrevious().getElements('.check').fireEvent('click'); + } + } + }).adopt( new Element('label.advanced_toggle').adopt( new Element('span', { 'text': 'Show advanced settings' @@ -285,14 +291,23 @@ Page.Settings = new Class({ }) } + var icon; + if(group.icon){ + icon = new Element('span.icon').grab(new Element('img', { + 'src': 'data:image/png;base64,' + group.icon + })); + } + + var label = new Element('span.group_label', { + 'text': group.label || (group.name).capitalize() + }) return new Element('fieldset', { 'class': (group.advanced ? 'inlineLabels advanced' : 'inlineLabels') + ' group_' + (group.name || '') + ' subtab_' + (group.subtab || '') }).grab( - new Element('h2', { - 'text': group.label || (group.name).capitalize() - }).grab(hint) - ); + new Element('h2').adopt(icon, label, hint) + ); + }, createList: function(content_container){ diff --git a/couchpotato/static/style/main.css b/couchpotato/static/style/main.css index 5d2ed6b0..c7850b22 100644 --- a/couchpotato/static/style/main.css +++ b/couchpotato/static/style/main.css @@ -8,6 +8,7 @@ body, html { padding: 0; background: #4e5969; -webkit-font-smoothing: subpixel-antialiased; + -moz-osx-font-smoothing: grayscale; } body { overflow-y: scroll; } body.noscroll { overflow: hidden; } @@ -52,6 +53,30 @@ input:-moz-placeholder { font-style: italic; } +.tiny_scroll { + overflow: hidden; +} + + .tiny_scroll:hover { + overflow-y: auto; + } + + .tiny_scroll::-webkit-scrollbar { + width: 5px; + } + + .tiny_scroll::-webkit-scrollbar-track { + -webkit-box-shadow: inset 0 0 6px rgba(0,0,0,0.4); + -webkit-border-radius: 5px; + border-radius: 5px; + } + + .tiny_scroll::-webkit-scrollbar-thumb { + -webkit-border-radius: 5px; + border-radius: 5px; + background: rgba(255,255,255,0.3); + } + a img { border:none; } @@ -166,6 +191,7 @@ body > .spinner, .mask{ text-transform: none; line-height: 1; -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; font-size: 15px; color: #FFF; } diff --git a/couchpotato/static/style/settings.css b/couchpotato/static/style/settings.css index 1f77b8e1..7fb1df29 100644 --- a/couchpotato/static/style/settings.css +++ b/couchpotato/static/style/settings.css @@ -75,6 +75,8 @@ color: #edc07f; } .page.show_advanced .advanced { display: block; } + .page.show_advanced span.advanced, + .page.show_advanced input.advanced { display: inline; } .page.settings .tab_content { display: none; @@ -92,6 +94,22 @@ border-bottom: 1px solid #333; box-shadow: 0 1px 0 rgba(255,255,255, 0.15); } + + .page fieldset h2 .icon { + vertical-align: bottom; + position: absolute; + left: -25px; + top: 3px; + background: #FFF; + border-radius: 2.5px; + line-height: 0; + overflow: hidden; + } + + .page fieldset.enabler:hover h2 .icon { + display: none; + } + .page fieldset h2 .hint { font-size: 12px; margin-left: 10px; @@ -160,7 +178,7 @@ padding: 6px 0 0; } - .page .xsmall { width: 20px !important; text-align: center; } + .page .xsmall { width: 25px !important; text-align: center; } .page .enabler { display: block; @@ -200,17 +218,23 @@ .page .option_list .enabler.disabled { display: inline-block; - margin: 3px 3px 3px 20px; - padding: 4px 0; - width: 173px; + padding: 4px 0 5px; + width: 24%; vertical-align: top; } + .page .option_list .enabler:not(.disabled) .icon { + display: none; + } .page .option_list .enabler.disabled h2 { + cursor: pointer; border: none; box-shadow: none; - padding: 0 10px 0 25px; + padding: 0 10px 0 0; font-size: 16px; + position: relative; + left: 25px; + margin-right: 25px; } .page .option_list .enabler:not(.disabled) h2 { diff --git a/couchpotato/templates/index.html b/couchpotato/templates/index.html index 211bcf64..4d685fb2 100644 --- a/couchpotato/templates/index.html +++ b/couchpotato/templates/index.html @@ -73,10 +73,10 @@ App.setup({ 'base_url': {{ json_encode(Env.get('web_base')) }}, - 'args': {{ json_encode(Env.get('args')) }}, + 'args': {{ json_encode(Env.get('args', unicode = True)) }}, 'options': {{ json_encode(('%s' % Env.get('options'))) }}, - 'app_dir': {{ json_encode(Env.get('app_dir')) }}, - 'data_dir': {{ json_encode(Env.get('data_dir')) }}, + 'app_dir': {{ json_encode(Env.get('app_dir', unicode = True)) }}, + 'data_dir': {{ json_encode(Env.get('data_dir', unicode = True)) }}, 'pid': {{ json_encode(Env.getPid()) }}, 'userscript_version': {{ json_encode(fireEvent('userscript.get_version', single = True)) }} }); diff --git a/init/couchpotato.fedora.service b/init/couchpotato.fedora.service index 7df166bc..d3b52ba5 100644 --- a/init/couchpotato.fedora.service +++ b/init/couchpotato.fedora.service @@ -1,12 +1,13 @@ [Unit] Description=CouchPotato application instance +After=network.target [Service] -ExecStart=/usr/lib/CouchPotatoServer/CouchPotato.py --daemon +ExecStart=/var/lib/CouchPotatoServer/CouchPotato.py --daemon GuessMainPID=no Type=forking User=couchpotato Group=couchpotato [Install] -WantedBy=multi-user.target \ No newline at end of file +WantedBy=multi-user.target diff --git a/libs/axl/axel.py b/libs/axl/axel.py index d0f069ab..64d29779 100644 --- a/libs/axl/axel.py +++ b/libs/axl/axel.py @@ -1,6 +1,7 @@ # axel.py # # Copyright (C) 2010 Adrian Cristea adrian dot cristea at gmail dotcom +# Edits by Ruud Burger # # Based on an idea by Peter Thatcher, found on # http://www.valuedlessons.com/2008/04/events-in-python.html @@ -11,12 +12,14 @@ # Source: http://pypi.python.org/pypi/axel # Docs: http://packages.python.org/axel -from couchpotato.core.helpers.variable import natsortKey -import Queue +from Queue import Empty, Queue import hashlib import sys import threading +from couchpotato.core.helpers.variable import natsortKey + + class Event(object): """ Event object inspired by C# events. Handlers can be registered and @@ -140,7 +143,7 @@ class Event(object): def fire(self, *args, **kwargs): """ Stores all registered handlers in a queue for processing """ - self.queue = Queue.Queue() + self.queue = Queue() result = {} if self.handlers: @@ -239,9 +242,9 @@ class Event(object): order_lock.release() if self.queue.empty(): - raise Queue.Empty + raise Empty - except Queue.Empty: + except Empty: break def _extract(self, queue_item): diff --git a/libs/requests/__init__.py b/libs/requests/__init__.py index d5d258e8..bba19002 100644 --- a/libs/requests/__init__.py +++ b/libs/requests/__init__.py @@ -36,17 +36,17 @@ usage: The other HTTP methods are supported - see `requests.api`. Full documentation is at . -:copyright: (c) 2013 by Kenneth Reitz. +:copyright: (c) 2014 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. """ __title__ = 'requests' -__version__ = '2.1.0' -__build__ = 0x020100 +__version__ = '2.3.0' +__build__ = 0x020300 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2013 Kenneth Reitz' +__copyright__ = 'Copyright 2014 Kenneth Reitz' # Attempt to enable urllib3's SNI support, if possible try: diff --git a/libs/requests/adapters.py b/libs/requests/adapters.py index b62f64c8..eb7a2d28 100644 --- a/libs/requests/adapters.py +++ b/libs/requests/adapters.py @@ -16,7 +16,7 @@ from .packages.urllib3.response import HTTPResponse from .packages.urllib3.util import Timeout as TimeoutSauce from .compat import urlparse, basestring, urldefrag, unquote from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, - except_on_missing_scheme, get_auth_from_url) + prepend_scheme_if_needed, get_auth_from_url) from .structures import CaseInsensitiveDict from .packages.urllib3.exceptions import MaxRetryError from .packages.urllib3.exceptions import TimeoutError @@ -55,14 +55,16 @@ class HTTPAdapter(BaseAdapter): :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. - :param max_retries: The maximum number of retries each connection should attempt. + :param int max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed connections and + timeouts, never to requests where the server returns a response. :param pool_block: Whether the connection pool should block for connections. Usage:: >>> import requests >>> s = requests.Session() - >>> a = requests.adapters.HTTPAdapter() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) """ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', @@ -88,6 +90,11 @@ class HTTPAdapter(BaseAdapter): self.__attrs__) def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # because self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + for attr, value in state.items(): setattr(self, attr, value) @@ -196,13 +203,16 @@ class HTTPAdapter(BaseAdapter): proxy = proxies.get(urlparse(url.lower()).scheme) if proxy: - except_on_missing_scheme(proxy) + proxy = prepend_scheme_if_needed(proxy, 'http') proxy_headers = self.proxy_headers(proxy) if not proxy in self.proxy_manager: self.proxy_manager[proxy] = proxy_from_url( proxy, - proxy_headers=proxy_headers) + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block) conn = self.proxy_manager[proxy].connection_from_url(url) else: @@ -276,10 +286,6 @@ class HTTPAdapter(BaseAdapter): username, password = get_auth_from_url(proxy) if username and password: - # Proxy auth usernames and passwords will be urlencoded, we need - # to decode them. - username = unquote(username) - password = unquote(password) headers['Proxy-Authorization'] = _basic_auth_str(username, password) @@ -304,10 +310,7 @@ class HTTPAdapter(BaseAdapter): chunked = not (request.body is None or 'Content-Length' in request.headers) - if stream: - timeout = TimeoutSauce(connect=timeout) - else: - timeout = TimeoutSauce(connect=timeout, read=timeout) + timeout = TimeoutSauce(connect=timeout, read=timeout) try: if not chunked: @@ -366,25 +369,20 @@ class HTTPAdapter(BaseAdapter): conn._put_conn(low_conn) except socket.error as sockerr: - raise ConnectionError(sockerr) + raise ConnectionError(sockerr, request=request) except MaxRetryError as e: - raise ConnectionError(e) + raise ConnectionError(e, request=request) except _ProxyError as e: raise ProxyError(e) except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): - raise SSLError(e) + raise SSLError(e, request=request) elif isinstance(e, TimeoutError): - raise Timeout(e) + raise Timeout(e, request=request) else: raise - r = self.build_response(request, resp) - - if not stream: - r.content - - return r + return self.build_response(request, resp) diff --git a/libs/requests/api.py b/libs/requests/api.py index baf43dd6..01d853d5 100644 --- a/libs/requests/api.py +++ b/libs/requests/api.py @@ -26,7 +26,7 @@ def request(method, url, **kwargs): :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) Float describing the timeout of the request. + :param timeout: (optional) Float describing the timeout of the request in seconds. :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. diff --git a/libs/requests/auth.py b/libs/requests/auth.py index 6664cd80..9f831b7a 100644 --- a/libs/requests/auth.py +++ b/libs/requests/auth.py @@ -11,7 +11,6 @@ import os import re import time import hashlib -import logging from base64 import b64encode @@ -19,8 +18,6 @@ from .compat import urlparse, str from .cookies import extract_cookies_to_jar from .utils import parse_dict_header -log = logging.getLogger(__name__) - CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' CONTENT_TYPE_MULTI_PART = 'multipart/form-data' diff --git a/libs/requests/compat.py b/libs/requests/compat.py index 0d61a572..84d703b6 100644 --- a/libs/requests/compat.py +++ b/libs/requests/compat.py @@ -4,7 +4,7 @@ pythoncompat """ -from .packages import charade as chardet +from .packages import chardet import sys @@ -75,7 +75,9 @@ is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. try: import simplejson as json -except ImportError: +except (ImportError, SyntaxError): + # simplejson does not support Python 3.2, it thows a SyntaxError + # because of u'...' Unicode literals. import json # --------- diff --git a/libs/requests/cookies.py b/libs/requests/cookies.py index c465f552..831c49c6 100644 --- a/libs/requests/cookies.py +++ b/libs/requests/cookies.py @@ -198,30 +198,39 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): self.set_cookie(c) return c + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies from the jar. + See itervalues() and iteritems().""" + for cookie in iter(self): + yield cookie.name + def keys(self): """Dict-like keys() that returns a list of names of cookies from the jar. See values() and items().""" - keys = [] + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies from the jar. + See iterkeys() and iteritems().""" for cookie in iter(self): - keys.append(cookie.name) - return keys + yield cookie.value def values(self): """Dict-like values() that returns a list of values of cookies from the jar. See keys() and items().""" - values = [] + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples from the jar. + See iterkeys() and itervalues().""" for cookie in iter(self): - values.append(cookie.value) - return values + yield cookie.name, cookie.value def items(self): """Dict-like items() that returns a list of name-value tuples from the jar. See keys() and values(). Allows client-code to call "dict(RequestsCookieJar) and get a vanilla python dict of key value pairs.""" - items = [] - for cookie in iter(self): - items.append((cookie.name, cookie.value)) - return items + return list(self.iteritems()) def list_domains(self): """Utility method to list all the domains in the jar.""" @@ -378,29 +387,29 @@ def create_cookie(name, value, **kwargs): def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" + expires = None - if morsel["max-age"]: - expires = time.time() + morsel["max-age"] + if morsel['max-age']: + expires = time.time() + morsel['max-age'] elif morsel['expires']: - expires = morsel['expires'] - if type(expires) == type(""): - time_template = "%a, %d-%b-%Y %H:%M:%S GMT" - expires = time.mktime(time.strptime(expires, time_template)) - c = create_cookie( - name=morsel.key, - value=morsel.value, - version=morsel['version'] or 0, - port=None, - domain=morsel['domain'], - path=morsel['path'], - secure=bool(morsel['secure']), - expires=expires, - discard=False, + time_template = '%a, %d-%b-%Y %H:%M:%S GMT' + expires = time.mktime( + time.strptime(morsel['expires'], time_template)) - time.timezone + return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), + discard=False, + domain=morsel['domain'], + expires=expires, + name=morsel.key, + path=morsel['path'], + port=None, rest={'HttpOnly': morsel['httponly']}, - rfc2109=False,) - return c + rfc2109=False, + secure=bool(morsel['secure']), + value=morsel.value, + version=morsel['version'] or 0, + ) def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): diff --git a/libs/requests/exceptions.py b/libs/requests/exceptions.py index bc42b5ff..a4ee9d63 100644 --- a/libs/requests/exceptions.py +++ b/libs/requests/exceptions.py @@ -7,21 +7,29 @@ requests.exceptions This module contains the set of Requests' exceptions. """ +from .packages.urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): """There was an ambiguous exception that occurred while handling your request.""" + def __init__(self, *args, **kwargs): + """ + Initialize RequestException with `request` and `response` objects. + """ + response = kwargs.pop('response', None) + self.response = response + self.request = kwargs.pop('request', None) + if (response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(RequestException, self).__init__(*args, **kwargs) + class HTTPError(RequestException): """An HTTP error occurred.""" - def __init__(self, *args, **kwargs): - """ Initializes HTTPError with optional `response` object. """ - self.response = kwargs.pop('response', None) - super(HTTPError, self).__init__(*args, **kwargs) - class ConnectionError(RequestException): """A Connection error occurred.""" @@ -61,3 +69,7 @@ class InvalidURL(RequestException, ValueError): class ChunkedEncodingError(RequestException): """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content""" diff --git a/libs/requests/models.py b/libs/requests/models.py index 34dce181..5aad8ce4 100644 --- a/libs/requests/models.py +++ b/libs/requests/models.py @@ -8,7 +8,6 @@ This module contains the primary objects that power Requests. """ import collections -import logging import datetime from io import BytesIO, UnsupportedOperation @@ -20,9 +19,10 @@ from .cookies import cookiejar_from_dict, get_cookie_header from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url +from .packages.urllib3.exceptions import DecodeError from .exceptions import ( HTTPError, RequestException, MissingSchema, InvalidURL, - ChunkedEncodingError) + ChunkedEncodingError, ContentDecodingError) from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, @@ -30,12 +30,20 @@ from .utils import ( from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, is_py2, chardet, json, builtin_str, basestring, IncompleteRead) +from .status_codes import codes +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_moved, # 307 +) +DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 -log = logging.getLogger(__name__) - class RequestEncodingMixin(object): @property @@ -301,8 +309,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): p = PreparedRequest() p.method = self.method p.url = self.url - p.headers = self.headers.copy() - p._cookies = self._cookies.copy() + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = self._cookies.copy() if self._cookies is not None else None p.body = self.body p.hooks = self.hooks return p @@ -400,9 +408,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): is_stream = all([ hasattr(data, '__iter__'), - not isinstance(data, basestring), - not isinstance(data, list), - not isinstance(data, dict) + not isinstance(data, (basestring, list, tuple, dict)) ]) try: @@ -427,7 +433,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): else: if data: body = self._encode_params(data) - if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'): + if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None else: content_type = 'application/x-www-form-urlencoded' @@ -516,7 +522,7 @@ class Response(object): self._content = False self._content_consumed = False - #: Integer Code of responded HTTP Status. + #: Integer Code of responded HTTP Status, e.g. 404 or 200. self.status_code = None #: Case-insensitive Dictionary of Response Headers. @@ -525,7 +531,7 @@ class Response(object): self.headers = CaseInsensitiveDict() #: File-like object representation of response (for advanced usage). - #: Requires that ``stream=True` on the request. + #: Use of ``raw`` requires that ``stream=True`` be set on the request. # This requirement does not apply for use internally to Requests. self.raw = None @@ -540,6 +546,7 @@ class Response(object): #: up here. The list is sorted from the oldest to the most recent request. self.history = [] + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". self.reason = None #: A CookieJar of Cookies the server sent back. @@ -549,6 +556,10 @@ class Response(object): #: and the arrival of the response (as a timedelta) self.elapsed = datetime.timedelta(0) + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. @@ -566,6 +577,7 @@ class Response(object): # pickled objects do not have .raw setattr(self, '_content_consumed', True) + setattr(self, 'raw', None) def __repr__(self): return '' % (self.status_code) @@ -590,10 +602,16 @@ class Response(object): return False return True + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return ('location' in self.headers and self.status_code in REDIRECT_STATI) + @property def apparent_encoding(self): - """The apparent encoding, provided by the lovely Charade library - (Thanks, Ian!).""" + """The apparent encoding, provided by the chardet library""" return chardet.detect(self.content)['encoding'] def iter_content(self, chunk_size=1, decode_unicode=False): @@ -602,20 +620,20 @@ class Response(object): large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. - """ - if self._content_consumed: - # simulate reading small chunks of the content - return iter_slices(self._content, chunk_size) + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ def generate(): try: # Special case for urllib3. try: - for chunk in self.raw.stream(chunk_size, - decode_content=True): + for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk except IncompleteRead as e: raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) except AttributeError: # Standard file-like object. while True: @@ -626,12 +644,17 @@ class Response(object): self._content_consumed = True - gen = generate() + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks if decode_unicode: - gen = stream_decode_response_unicode(gen, self) + chunks = stream_decode_response_unicode(chunks, self) - return gen + return chunks def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None): """Iterates over the response data, one line at a time. When @@ -641,8 +664,7 @@ class Response(object): pending = None - for chunk in self.iter_content(chunk_size=chunk_size, - decode_unicode=decode_unicode): + for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): if pending is not None: chunk = pending + chunk @@ -688,7 +710,12 @@ class Response(object): """Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using - ``charade``. + ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. """ # Try charset from content-type @@ -729,7 +756,14 @@ class Response(object): # a best guess). encoding = guess_json_utf(self.content) if encoding is not None: - return json.loads(self.content.decode(encoding), **kwargs) + try: + return json.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass return json.loads(self.text, **kwargs) @property @@ -765,8 +799,8 @@ class Response(object): raise HTTPError(http_error_msg, response=self) def close(self): - """Closes the underlying file descriptor and releases the connection - back to the pool. + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.* """ diff --git a/libs/requests/packages/README.rst b/libs/requests/packages/README.rst new file mode 100644 index 00000000..c42f376b --- /dev/null +++ b/libs/requests/packages/README.rst @@ -0,0 +1,8 @@ +If you are planning to submit a pull request to requests with any changes in +this library do not go any further. These are independent libraries which we +vendor into requests. Any changes necessary to these libraries must be made in +them and submitted as separate pull requests to those libraries. + +urllib3 pull requests go here: https://github.com/shazow/urllib3 + +chardet pull requests go here: https://github.com/chardet/chardet diff --git a/libs/requests/packages/charade/__main__.py b/libs/requests/packages/charade/__main__.py deleted file mode 100644 index 0fc37c34..00000000 --- a/libs/requests/packages/charade/__main__.py +++ /dev/null @@ -1,7 +0,0 @@ -''' -support ';python -m charade [file2] ...' package execution syntax (2.7+) -''' - -from charade import charade_cli - -charade_cli() diff --git a/libs/requests/packages/charade/__init__.py b/libs/requests/packages/chardet/__init__.py similarity index 57% rename from libs/requests/packages/charade/__init__.py rename to libs/requests/packages/chardet/__init__.py index 743c874d..e4f0799d 100644 --- a/libs/requests/packages/charade/__init__.py +++ b/libs/requests/packages/chardet/__init__.py @@ -15,7 +15,7 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### -__version__ = "1.0.3" +__version__ = "2.2.1" from sys import version_info @@ -30,37 +30,3 @@ def detect(aBuf): u.feed(aBuf) u.close() return u.result - -def _description_of(path): - """Return a string describing the probable encoding of a file.""" - from charade.universaldetector import UniversalDetector - - u = UniversalDetector() - for line in open(path, 'rb'): - u.feed(line) - u.close() - result = u.result - if result['encoding']: - return '%s: %s with confidence %s' % (path, - result['encoding'], - result['confidence']) - else: - return '%s: no result' % path - - -def charade_cli(): - """ - Script which takes one or more file paths and reports on their detected - encodings - - Example:: - - % chardetect.py somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - - """ - from sys import argv - for path in argv[1:]: - print(_description_of(path)) - \ No newline at end of file diff --git a/libs/requests/packages/charade/big5freq.py b/libs/requests/packages/chardet/big5freq.py similarity index 100% rename from libs/requests/packages/charade/big5freq.py rename to libs/requests/packages/chardet/big5freq.py diff --git a/libs/requests/packages/charade/big5prober.py b/libs/requests/packages/chardet/big5prober.py similarity index 100% rename from libs/requests/packages/charade/big5prober.py rename to libs/requests/packages/chardet/big5prober.py diff --git a/libs/requests/packages/chardet/chardetect.py b/libs/requests/packages/chardet/chardetect.py new file mode 100644 index 00000000..ecd0163b --- /dev/null +++ b/libs/requests/packages/chardet/chardetect.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" +from io import open +from sys import argv, stdin + +from chardet.universaldetector import UniversalDetector + + +def description_of(file, name='stdin'): + """Return a string describing the probable encoding of a file.""" + u = UniversalDetector() + for line in file: + u.feed(line) + u.close() + result = u.result + if result['encoding']: + return '%s: %s with confidence %s' % (name, + result['encoding'], + result['confidence']) + else: + return '%s: no result' % name + + +def main(): + if len(argv) <= 1: + print(description_of(stdin)) + else: + for path in argv[1:]: + with open(path, 'rb') as f: + print(description_of(f, path)) + + +if __name__ == '__main__': + main() diff --git a/libs/requests/packages/charade/chardistribution.py b/libs/requests/packages/chardet/chardistribution.py similarity index 100% rename from libs/requests/packages/charade/chardistribution.py rename to libs/requests/packages/chardet/chardistribution.py diff --git a/libs/requests/packages/charade/charsetgroupprober.py b/libs/requests/packages/chardet/charsetgroupprober.py similarity index 100% rename from libs/requests/packages/charade/charsetgroupprober.py rename to libs/requests/packages/chardet/charsetgroupprober.py diff --git a/libs/requests/packages/charade/charsetprober.py b/libs/requests/packages/chardet/charsetprober.py similarity index 100% rename from libs/requests/packages/charade/charsetprober.py rename to libs/requests/packages/chardet/charsetprober.py diff --git a/libs/requests/packages/charade/codingstatemachine.py b/libs/requests/packages/chardet/codingstatemachine.py similarity index 100% rename from libs/requests/packages/charade/codingstatemachine.py rename to libs/requests/packages/chardet/codingstatemachine.py diff --git a/libs/requests/packages/charade/compat.py b/libs/requests/packages/chardet/compat.py similarity index 100% rename from libs/requests/packages/charade/compat.py rename to libs/requests/packages/chardet/compat.py diff --git a/libs/requests/packages/charade/constants.py b/libs/requests/packages/chardet/constants.py similarity index 100% rename from libs/requests/packages/charade/constants.py rename to libs/requests/packages/chardet/constants.py diff --git a/libs/requests/packages/charade/cp949prober.py b/libs/requests/packages/chardet/cp949prober.py similarity index 100% rename from libs/requests/packages/charade/cp949prober.py rename to libs/requests/packages/chardet/cp949prober.py diff --git a/libs/requests/packages/charade/escprober.py b/libs/requests/packages/chardet/escprober.py similarity index 100% rename from libs/requests/packages/charade/escprober.py rename to libs/requests/packages/chardet/escprober.py diff --git a/libs/requests/packages/charade/escsm.py b/libs/requests/packages/chardet/escsm.py similarity index 100% rename from libs/requests/packages/charade/escsm.py rename to libs/requests/packages/chardet/escsm.py diff --git a/libs/requests/packages/charade/eucjpprober.py b/libs/requests/packages/chardet/eucjpprober.py similarity index 100% rename from libs/requests/packages/charade/eucjpprober.py rename to libs/requests/packages/chardet/eucjpprober.py diff --git a/libs/requests/packages/charade/euckrfreq.py b/libs/requests/packages/chardet/euckrfreq.py similarity index 100% rename from libs/requests/packages/charade/euckrfreq.py rename to libs/requests/packages/chardet/euckrfreq.py diff --git a/libs/requests/packages/charade/euckrprober.py b/libs/requests/packages/chardet/euckrprober.py similarity index 100% rename from libs/requests/packages/charade/euckrprober.py rename to libs/requests/packages/chardet/euckrprober.py diff --git a/libs/requests/packages/charade/euctwfreq.py b/libs/requests/packages/chardet/euctwfreq.py similarity index 100% rename from libs/requests/packages/charade/euctwfreq.py rename to libs/requests/packages/chardet/euctwfreq.py diff --git a/libs/requests/packages/charade/euctwprober.py b/libs/requests/packages/chardet/euctwprober.py similarity index 100% rename from libs/requests/packages/charade/euctwprober.py rename to libs/requests/packages/chardet/euctwprober.py diff --git a/libs/requests/packages/charade/gb2312freq.py b/libs/requests/packages/chardet/gb2312freq.py similarity index 100% rename from libs/requests/packages/charade/gb2312freq.py rename to libs/requests/packages/chardet/gb2312freq.py diff --git a/libs/requests/packages/charade/gb2312prober.py b/libs/requests/packages/chardet/gb2312prober.py similarity index 100% rename from libs/requests/packages/charade/gb2312prober.py rename to libs/requests/packages/chardet/gb2312prober.py diff --git a/libs/requests/packages/charade/hebrewprober.py b/libs/requests/packages/chardet/hebrewprober.py similarity index 100% rename from libs/requests/packages/charade/hebrewprober.py rename to libs/requests/packages/chardet/hebrewprober.py diff --git a/libs/requests/packages/charade/jisfreq.py b/libs/requests/packages/chardet/jisfreq.py similarity index 100% rename from libs/requests/packages/charade/jisfreq.py rename to libs/requests/packages/chardet/jisfreq.py diff --git a/libs/requests/packages/charade/jpcntx.py b/libs/requests/packages/chardet/jpcntx.py similarity index 99% rename from libs/requests/packages/charade/jpcntx.py rename to libs/requests/packages/chardet/jpcntx.py index 04634422..f7f69ba4 100644 --- a/libs/requests/packages/charade/jpcntx.py +++ b/libs/requests/packages/chardet/jpcntx.py @@ -169,7 +169,7 @@ class JapaneseContextAnalysis: def get_confidence(self): # This is just one way to calculate confidence. It works well for me. if self._mTotalRel > MINIMUM_DATA_THRESHOLD: - return float(self._mTotalRel - self._mRelSample[0]) / self._mTotalRel + return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel else: return DONT_KNOW diff --git a/libs/requests/packages/charade/langbulgarianmodel.py b/libs/requests/packages/chardet/langbulgarianmodel.py similarity index 100% rename from libs/requests/packages/charade/langbulgarianmodel.py rename to libs/requests/packages/chardet/langbulgarianmodel.py diff --git a/libs/requests/packages/charade/langcyrillicmodel.py b/libs/requests/packages/chardet/langcyrillicmodel.py similarity index 100% rename from libs/requests/packages/charade/langcyrillicmodel.py rename to libs/requests/packages/chardet/langcyrillicmodel.py diff --git a/libs/requests/packages/charade/langgreekmodel.py b/libs/requests/packages/chardet/langgreekmodel.py similarity index 100% rename from libs/requests/packages/charade/langgreekmodel.py rename to libs/requests/packages/chardet/langgreekmodel.py diff --git a/libs/requests/packages/charade/langhebrewmodel.py b/libs/requests/packages/chardet/langhebrewmodel.py similarity index 100% rename from libs/requests/packages/charade/langhebrewmodel.py rename to libs/requests/packages/chardet/langhebrewmodel.py diff --git a/libs/requests/packages/charade/langhungarianmodel.py b/libs/requests/packages/chardet/langhungarianmodel.py similarity index 100% rename from libs/requests/packages/charade/langhungarianmodel.py rename to libs/requests/packages/chardet/langhungarianmodel.py diff --git a/libs/requests/packages/charade/langthaimodel.py b/libs/requests/packages/chardet/langthaimodel.py similarity index 100% rename from libs/requests/packages/charade/langthaimodel.py rename to libs/requests/packages/chardet/langthaimodel.py diff --git a/libs/requests/packages/charade/latin1prober.py b/libs/requests/packages/chardet/latin1prober.py similarity index 98% rename from libs/requests/packages/charade/latin1prober.py rename to libs/requests/packages/chardet/latin1prober.py index 5e2c9f90..ad695f57 100644 --- a/libs/requests/packages/charade/latin1prober.py +++ b/libs/requests/packages/chardet/latin1prober.py @@ -129,7 +129,7 @@ class Latin1Prober(CharSetProber): if total < 0.01: confidence = 0.0 else: - confidence = ((float(self._mFreqCounter[3]) / total) + confidence = ((self._mFreqCounter[3] / total) - (self._mFreqCounter[1] * 20.0 / total)) if confidence < 0.0: confidence = 0.0 diff --git a/libs/requests/packages/charade/mbcharsetprober.py b/libs/requests/packages/chardet/mbcharsetprober.py similarity index 100% rename from libs/requests/packages/charade/mbcharsetprober.py rename to libs/requests/packages/chardet/mbcharsetprober.py diff --git a/libs/requests/packages/charade/mbcsgroupprober.py b/libs/requests/packages/chardet/mbcsgroupprober.py similarity index 100% rename from libs/requests/packages/charade/mbcsgroupprober.py rename to libs/requests/packages/chardet/mbcsgroupprober.py diff --git a/libs/requests/packages/charade/mbcssm.py b/libs/requests/packages/chardet/mbcssm.py similarity index 100% rename from libs/requests/packages/charade/mbcssm.py rename to libs/requests/packages/chardet/mbcssm.py diff --git a/libs/requests/packages/charade/sbcharsetprober.py b/libs/requests/packages/chardet/sbcharsetprober.py similarity index 100% rename from libs/requests/packages/charade/sbcharsetprober.py rename to libs/requests/packages/chardet/sbcharsetprober.py diff --git a/libs/requests/packages/charade/sbcsgroupprober.py b/libs/requests/packages/chardet/sbcsgroupprober.py similarity index 100% rename from libs/requests/packages/charade/sbcsgroupprober.py rename to libs/requests/packages/chardet/sbcsgroupprober.py diff --git a/libs/requests/packages/charade/sjisprober.py b/libs/requests/packages/chardet/sjisprober.py similarity index 100% rename from libs/requests/packages/charade/sjisprober.py rename to libs/requests/packages/chardet/sjisprober.py diff --git a/libs/requests/packages/charade/universaldetector.py b/libs/requests/packages/chardet/universaldetector.py similarity index 90% rename from libs/requests/packages/charade/universaldetector.py rename to libs/requests/packages/chardet/universaldetector.py index 7ccea84f..9a03ad3d 100644 --- a/libs/requests/packages/charade/universaldetector.py +++ b/libs/requests/packages/chardet/universaldetector.py @@ -74,10 +74,12 @@ class UniversalDetector: if aBuf[:3] == codecs.BOM: # EF BB BF UTF-8 with BOM self.result = {'encoding': "UTF-8", 'confidence': 1.0} - elif aBuf[:4] in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + elif aBuf[:4] == codecs.BOM_UTF32_LE: # FF FE 00 00 UTF-32, little-endian BOM + self.result = {'encoding': "UTF-32LE", 'confidence': 1.0} + elif aBuf[:4] == codecs.BOM_UTF32_BE: # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", 'confidence': 1.0} + self.result = {'encoding': "UTF-32BE", 'confidence': 1.0} elif aBuf[:4] == b'\xFE\xFF\x00\x00': # FE FF 00 00 UCS-4, unusual octet order BOM (3412) self.result = { @@ -90,10 +92,12 @@ class UniversalDetector: 'encoding': "X-ISO-10646-UCS-4-2143", 'confidence': 1.0 } - elif aBuf[:2] == codecs.BOM_LE or aBuf[:2] == codecs.BOM_BE: + elif aBuf[:2] == codecs.BOM_LE: # FF FE UTF-16, little endian BOM + self.result = {'encoding': "UTF-16LE", 'confidence': 1.0} + elif aBuf[:2] == codecs.BOM_BE: # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", 'confidence': 1.0} + self.result = {'encoding': "UTF-16BE", 'confidence': 1.0} self._mGotData = True if self.result['encoding'] and (self.result['confidence'] > 0.0): @@ -113,10 +117,8 @@ class UniversalDetector: if not self._mEscCharSetProber: self._mEscCharSetProber = EscCharSetProber() if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt: - self.result = { - 'encoding': self._mEscCharSetProber.get_charset_name(), - 'confidence': self._mEscCharSetProber.get_confidence() - } + self.result = {'encoding': self._mEscCharSetProber.get_charset_name(), + 'confidence': self._mEscCharSetProber.get_confidence()} self.done = True elif self._mInputState == eHighbyte: if not self._mCharSetProbers: diff --git a/libs/requests/packages/charade/utf8prober.py b/libs/requests/packages/chardet/utf8prober.py similarity index 100% rename from libs/requests/packages/charade/utf8prober.py rename to libs/requests/packages/chardet/utf8prober.py diff --git a/libs/requests/packages/urllib3/_collections.py b/libs/requests/packages/urllib3/_collections.py index 5907b0dc..9cea3a44 100644 --- a/libs/requests/packages/urllib3/_collections.py +++ b/libs/requests/packages/urllib3/_collections.py @@ -4,7 +4,7 @@ # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from collections import MutableMapping +from collections import Mapping, MutableMapping try: from threading import RLock except ImportError: # Platform-specific: No threads available @@ -20,9 +20,10 @@ try: # Python 2.7+ from collections import OrderedDict except ImportError: from .packages.ordered_dict import OrderedDict +from .packages.six import itervalues -__all__ = ['RecentlyUsedContainer'] +__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] _Null = object() @@ -101,3 +102,104 @@ class RecentlyUsedContainer(MutableMapping): def keys(self): with self.lock: return self._container.keys() + + +class HTTPHeaderDict(MutableMapping): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 2616. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + + If you want to access the raw headers with their original casing + for debugging purposes you can access the private ``._data`` attribute + which is a normal python ``dict`` that maps the case-insensitive key to a + list of tuples stored as (case-sensitive-original-name, value). Using the + structure from above as our example: + + >>> headers._data + {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')], + 'content-length': [('content-length', '7')]} + """ + + def __init__(self, headers=None, **kwargs): + self._data = {} + if headers is None: + headers = {} + self.update(headers, **kwargs) + + def add(self, key, value): + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + """ + self._data.setdefault(key.lower(), []).append((key, value)) + + def getlist(self, key): + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + return self[key].split(', ') if key in self else [] + + def copy(self): + h = HTTPHeaderDict() + for key in self._data: + for rawkey, value in self._data[key]: + h.add(rawkey, value) + return h + + def __eq__(self, other): + if not isinstance(other, Mapping): + return False + other = HTTPHeaderDict(other) + return dict((k1, self[k1]) for k1 in self._data) == \ + dict((k2, other[k2]) for k2 in other._data) + + def __getitem__(self, key): + values = self._data[key.lower()] + return ', '.join(value[1] for value in values) + + def __setitem__(self, key, value): + self._data[key.lower()] = [(key, value)] + + def __delitem__(self, key): + del self._data[key.lower()] + + def __len__(self): + return len(self._data) + + def __iter__(self): + for headers in itervalues(self._data): + yield headers[0][0] + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, dict(self.items())) diff --git a/libs/requests/packages/urllib3/connection.py b/libs/requests/packages/urllib3/connection.py index e240786a..5feb3322 100644 --- a/libs/requests/packages/urllib3/connection.py +++ b/libs/requests/packages/urllib3/connection.py @@ -4,13 +4,14 @@ # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php +import sys import socket from socket import timeout as SocketTimeout try: # Python 3 - from http.client import HTTPConnection, HTTPException + from http.client import HTTPConnection as _HTTPConnection, HTTPException except ImportError: - from httplib import HTTPConnection, HTTPException + from httplib import HTTPConnection as _HTTPConnection, HTTPException class DummyConnection(object): "Used to detect a failed ConnectionCls import." @@ -24,9 +25,9 @@ try: # Compiled with SSL? pass try: # Python 3 - from http.client import HTTPSConnection + from http.client import HTTPSConnection as _HTTPSConnection except ImportError: - from httplib import HTTPSConnection + from httplib import HTTPSConnection as _HTTPSConnection import ssl BaseSSLError = ssl.SSLError @@ -38,6 +39,7 @@ from .exceptions import ( ConnectTimeoutError, ) from .packages.ssl_match_hostname import match_hostname +from .packages import six from .util import ( assert_fingerprint, resolve_cert_reqs, @@ -45,6 +47,88 @@ from .util import ( ssl_wrap_socket, ) + +port_by_scheme = { + 'http': 80, + 'https': 443, +} + + +class HTTPConnection(_HTTPConnection, object): + """ + Based on httplib.HTTPConnection but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + """ + + default_port = port_by_scheme['http'] + + # By default, disable Nagle's Algorithm. + tcp_nodelay = 1 + + def __init__(self, *args, **kw): + if six.PY3: # Python 3 + kw.pop('strict', None) + if sys.version_info < (2, 7): # Python 2.6 and older + kw.pop('source_address', None) + + # Pre-set source_address in case we have an older Python like 2.6. + self.source_address = kw.get('source_address') + + # Superclass also sets self.source_address in Python 2.7+. + _HTTPConnection.__init__(self, *args, **kw) + + def _new_conn(self): + """ Establish a socket connection and set nodelay settings on it. + + :return: a new socket connection + """ + extra_args = [] + if self.source_address: # Python 2.7+ + extra_args.append(self.source_address) + + conn = socket.create_connection( + (self.host, self.port), self.timeout, *extra_args) + conn.setsockopt( + socket.IPPROTO_TCP, socket.TCP_NODELAY, self.tcp_nodelay) + + return conn + + def _prepare_conn(self, conn): + self.sock = conn + # the _tunnel_host attribute was added in python 2.6.3 (via + # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do + # not have them. + if getattr(self, '_tunnel_host', None): + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + +class HTTPSConnection(HTTPConnection): + default_port = port_by_scheme['https'] + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw): + + HTTPConnection.__init__(self, host, port, strict=strict, + timeout=timeout, **kw) + + self.key_file = key_file + self.cert_file = cert_file + + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = 'https' + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file) + + class VerifiedHTTPSConnection(HTTPSConnection): """ Based on httplib.HTTPSConnection but wraps the socket with @@ -53,6 +137,7 @@ class VerifiedHTTPSConnection(HTTPSConnection): cert_reqs = None ca_certs = None ssl_version = None + conn_kw = {} def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, @@ -67,31 +152,41 @@ class VerifiedHTTPSConnection(HTTPSConnection): def connect(self): # Add certificate verification + try: sock = socket.create_connection( - address=(self.host, self.port), - timeout=self.timeout, - ) + address=(self.host, self.port), timeout=self.timeout, + **self.conn_kw) except SocketTimeout: - raise ConnectTimeoutError( - self, "Connection to %s timed out. (connect timeout=%s)" % - (self.host, self.timeout)) + raise ConnectTimeoutError( + self, "Connection to %s timed out. (connect timeout=%s)" % + (self.host, self.timeout)) + + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, + self.tcp_nodelay) resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_ssl_version = resolve_ssl_version(self.ssl_version) - if self._tunnel_host: + hostname = self.host + if getattr(self, '_tunnel_host', None): + # _tunnel_host was added in Python 2.6.3 + # (See: http://hg.python.org/cpython/rev/0f57b30a152f) + self.sock = sock # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + # Wrap socket using verification with the root certs in # trusted_root_certs self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, - server_hostname=self.host, + server_hostname=hostname, ssl_version=resolved_ssl_version) if resolved_cert_reqs != ssl.CERT_NONE: @@ -100,8 +195,10 @@ class VerifiedHTTPSConnection(HTTPSConnection): self.assert_fingerprint) elif self.assert_hostname is not False: match_hostname(self.sock.getpeercert(), - self.assert_hostname or self.host) + self.assert_hostname or hostname) if ssl: + # Make a copy for testing. + UnverifiedHTTPSConnection = HTTPSConnection HTTPSConnection = VerifiedHTTPSConnection diff --git a/libs/requests/packages/urllib3/connectionpool.py b/libs/requests/packages/urllib3/connectionpool.py index 72011b5a..95a53a7d 100644 --- a/libs/requests/packages/urllib3/connectionpool.py +++ b/libs/requests/packages/urllib3/connectionpool.py @@ -4,6 +4,7 @@ # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php +import sys import errno import logging @@ -19,9 +20,11 @@ except ImportError: from .exceptions import ( ClosedPoolError, + ConnectionError, ConnectTimeoutError, EmptyPoolError, HostChangedError, + LocationParseError, MaxRetryError, SSLError, TimeoutError, @@ -31,6 +34,7 @@ from .exceptions import ( from .packages.ssl_match_hostname import CertificateError from .packages import six from .connection import ( + port_by_scheme, DummyConnection, HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, HTTPException, BaseSSLError, @@ -38,7 +42,6 @@ from .connection import ( from .request import RequestMethods from .response import HTTPResponse from .util import ( - assert_fingerprint, get_host, is_connection_dropped, Timeout, @@ -51,12 +54,6 @@ log = logging.getLogger(__name__) _Default = object() -port_by_scheme = { - 'http': 80, - 'https': 443, -} - - ## Pool objects class ConnectionPool(object): @@ -69,6 +66,9 @@ class ConnectionPool(object): QueueCls = LifoQueue def __init__(self, host, port=None): + if host is None: + raise LocationParseError(host) + # httplib doesn't like it when we include brackets in ipv6 addresses host = host.strip('[]') @@ -140,7 +140,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, - headers=None, _proxy=None, _proxy_headers=None): + headers=None, _proxy=None, _proxy_headers=None, **conn_kw): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) @@ -167,21 +167,26 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): self.num_connections = 0 self.num_requests = 0 + if sys.version_info < (2, 7): # Python 2.6 and older + conn_kw.pop('source_address', None) + self.conn_kw = conn_kw + def _new_conn(self): """ - Return a fresh :class:`httplib.HTTPConnection`. + Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 log.info("Starting new HTTP connection (%d): %s" % (self.num_connections, self.host)) - extra_params = {} - if not six.PY3: # Python 2 - extra_params['strict'] = self.strict - - return self.ConnectionCls(host=self.host, port=self.port, + conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, - **extra_params) + strict=self.strict, **self.conn_kw) + if self.proxy is not None: + # Enable Nagle's algorithm for proxies, to avoid packet + # fragmentation. + conn.tcp_nodelay = 0 + return conn def _get_conn(self, timeout=None): """ @@ -238,8 +243,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): pass except Full: # This should never happen if self.block == True - log.warning("HttpConnectionPool is full, discarding connection: %s" - % self.host) + log.warning( + "Connection pool is full, discarding connection: %s" % + self.host) # Connection never got put back into the pool, close it. if conn: @@ -260,7 +266,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw): """ - Perform a request on a given httplib connection object taken from our + Perform a request on a given urllib connection object taken from our pool. :param conn: @@ -371,9 +377,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) + # Use explicit default port for comparison when none is given if self.port and not port: - # Use explicit default port for comparison when none is given. port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None return (scheme, host, port) == (self.scheme, self.host, self.port) @@ -412,10 +420,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param retries: Number of retries to allow before raising a MaxRetryError exception. + If `False`, then retries are disabled and any exception is raised + immediately. :param redirect: If True, automatically handle redirects (status codes 301, 302, - 303, 307, 308). Each redirect counts as a retry. + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is @@ -449,7 +460,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if headers is None: headers = self.headers - if retries < 0: + if retries < 0 and retries is not False: raise MaxRetryError(self, url) if release_conn is None: @@ -468,6 +479,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): headers = headers.copy() headers.update(self.proxy_headers) + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + try: # Request a connection from the queue conn = self._get_conn(timeout=pool_timeout) @@ -495,38 +510,41 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # ``response.read()``) except Empty: - # Timed out by queue + # Timed out by queue. raise EmptyPoolError(self, "No pool connections are available.") - except BaseSSLError as e: + except (BaseSSLError, CertificateError) as e: + # Release connection unconditionally because there is no way to + # close it externally in case of exception. + release_conn = True raise SSLError(e) - except CertificateError as e: - # Name mismatch - raise SSLError(e) + except (TimeoutError, HTTPException, SocketError) as e: + if conn: + # Discard the connection for these exceptions. It will be + # be replaced during the next _get_conn() call. + conn.close() + conn = None - except TimeoutError as e: - # Connection broken, discard. - conn = None - # Save the error off for retry logic. - err = e + if not retries: + if isinstance(e, TimeoutError): + # TimeoutError is exempt from MaxRetryError-wrapping. + # FIXME: ... Not sure why. Add a reason here. + raise - if retries == 0: - raise + # Wrap unexpected exceptions with the most appropriate + # module-level exception and re-raise. + if isinstance(e, SocketError) and self.proxy: + raise ProxyError('Cannot connect to proxy.', e) - except (HTTPException, SocketError) as e: - if isinstance(e, SocketError) and self.proxy is not None: - raise ProxyError('Cannot connect to proxy. ' - 'Socket error: %s.' % e) + if retries is False: + raise ConnectionError('Connection failed.', e) - # Connection broken, discard. It will be replaced next _get_conn(). - conn = None - # This is necessary so we can access e below - err = e - - if retries == 0: raise MaxRetryError(self, url, e) + # Keep track of the error for the retry warning. + err = e + finally: if release_conn: # Put the connection back to be reused. If the connection is @@ -536,8 +554,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): if not conn: # Try again - log.warn("Retrying (%d attempts remain) after connection " - "broken by '%r': %s" % (retries, err, url)) + log.warning("Retrying (%d attempts remain) after connection " + "broken by '%r': %s" % (retries, err, url)) return self.urlopen(method, url, body, headers, retries - 1, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, @@ -545,7 +563,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): # Handle redirect? redirect_location = redirect and response.get_redirect_location() - if redirect_location: + if redirect_location and retries is not False: if response.status == 303: method = 'GET' log.info("Redirecting %s -> %s" % (url, redirect_location)) @@ -563,7 +581,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): When Python is compiled with the :mod:`ssl` module, then :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, - instead of :class:`httplib.HTTPSConnection`. + instead of :class:`.HTTPSConnection`. :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. @@ -584,10 +602,14 @@ class HTTPSConnectionPool(HTTPConnectionPool): _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, ssl_version=None, - assert_hostname=None, assert_fingerprint=None): + assert_hostname=None, assert_fingerprint=None, + **conn_kw): + + if sys.version_info < (2, 7): # Python 2.6 or older + conn_kw.pop('source_address', None) HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, - block, headers, _proxy, _proxy_headers) + block, headers, _proxy, _proxy_headers, **conn_kw) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs @@ -595,6 +617,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint + self.conn_kw = conn_kw def _prepare_conn(self, conn): """ @@ -610,6 +633,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_version + conn.conn_kw = self.conn_kw if self.proxy is not None: # Python 2.7+ @@ -646,10 +670,15 @@ class HTTPSConnectionPool(HTTPConnectionPool): extra_params = {} if not six.PY3: # Python 2 extra_params['strict'] = self.strict + extra_params.update(self.conn_kw) conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, **extra_params) + if self.proxy is not None: + # Enable Nagle's algorithm for proxies, to avoid packet + # fragmentation. + conn.tcp_nodelay = 0 return self._prepare_conn(conn) diff --git a/libs/requests/packages/urllib3/contrib/pyopenssl.py b/libs/requests/packages/urllib3/contrib/pyopenssl.py index f78e7170..21a12c68 100644 --- a/libs/requests/packages/urllib3/contrib/pyopenssl.py +++ b/libs/requests/packages/urllib3/contrib/pyopenssl.py @@ -1,4 +1,7 @@ -'''SSL with SNI-support for Python 2. +'''SSL with SNI_-support for Python 2. Follow these instructions if you would +like to verify SSL certificates in Python 2. Note, the default libraries do +*not* do certificate checking; you need to do additional work to validate +certificates yourself. This needs the following packages installed: @@ -6,9 +9,15 @@ This needs the following packages installed: * ndg-httpsclient (tested with 0.3.2) * pyasn1 (tested with 0.1.6) -To activate it call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3`. -This can be done in a ``sitecustomize`` module, or at any other time before -your application begins using ``urllib3``, like this:: +You can install them with the following command: + + pip install pyopenssl ndg-httpsclient pyasn1 + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this:: try: import urllib3.contrib.pyopenssl @@ -18,13 +27,31 @@ your application begins using ``urllib3``, like this:: Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. + +Activating this module also has the positive side effect of disabling SSL/TLS +encryption in Python 2 (see `CRIME attack`_). + +If you want to configure the default list of supported cipher suites, you can +set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. + +Module Variables +---------------- + +:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. + Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES: + ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS`` + +.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication +.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) + ''' from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT -from ndg.httpsclient.subj_alt_name import SubjectAltName +from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName import OpenSSL.SSL from pyasn1.codec.der import decoder as der_decoder -from socket import _fileobject +from pyasn1.type import univ, constraint +from socket import _fileobject, timeout import ssl import select from cStringIO import StringIO @@ -50,6 +77,23 @@ _openssl_verify = { + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM over any AES-CBC for better performance and security, +# - use 3DES as fallback which is secure but slow, +# - disable NULL authentication, MD5 MACs and DSS for security reasons. +DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \ + "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \ + "!aNULL:!MD5:!DSS" + orig_util_HAS_SNI = util.HAS_SNI orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket @@ -69,6 +113,17 @@ def extract_from_urllib3(): util.HAS_SNI = orig_util_HAS_SNI +### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. +class SubjectAltName(BaseSubjectAltName): + '''ASN.1 implementation for subjectAltNames support''' + + # There is no limit to how many SAN certificates a certificate may have, + # however this needs to have some limit so we'll set an arbitrarily high + # limit. + sizeSpec = univ.SequenceOf.sizeSpec + \ + constraint.ValueSizeConstraint(1, 1024) + + ### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. def get_subj_alt_name(peer_cert): # Search through extensions @@ -102,6 +157,13 @@ def get_subj_alt_name(peer_cert): class fileobject(_fileobject): + def _wait_for_sock(self): + rd, wd, ed = select.select([self._sock], [], [], + self._sock.gettimeout()) + if not rd: + raise timeout() + + def read(self, size=-1): # Use max, disallow tiny reads in a loop as they are very inefficient. # We never leave read() with any leftover data from a new recv() call @@ -119,6 +181,7 @@ class fileobject(_fileobject): try: data = self._sock.recv(rbufsize) except OpenSSL.SSL.WantReadError: + self._wait_for_sock() continue if not data: break @@ -146,6 +209,7 @@ class fileobject(_fileobject): try: data = self._sock.recv(left) except OpenSSL.SSL.WantReadError: + self._wait_for_sock() continue if not data: break @@ -197,6 +261,7 @@ class fileobject(_fileobject): break buffers.append(data) except OpenSSL.SSL.WantReadError: + self._wait_for_sock() continue break return "".join(buffers) @@ -207,6 +272,7 @@ class fileobject(_fileobject): try: data = self._sock.recv(self._rbufsize) except OpenSSL.SSL.WantReadError: + self._wait_for_sock() continue if not data: break @@ -234,7 +300,8 @@ class fileobject(_fileobject): try: data = self._sock.recv(self._rbufsize) except OpenSSL.SSL.WantReadError: - continue + self._wait_for_sock() + continue if not data: break left = size - buf_len @@ -329,6 +396,15 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ctx.load_verify_locations(ca_certs, None) except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) + else: + ctx.set_default_verify_paths() + + # Disable TLS compression to migitate CRIME attack (issue #309) + OP_NO_COMPRESSION = 0x20000 + ctx.set_options(OP_NO_COMPRESSION) + + # Set list of supported ciphersuites. + ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST) cnx = OpenSSL.SSL.Connection(ctx, sock) cnx.set_tlsext_host_name(server_hostname) diff --git a/libs/requests/packages/urllib3/exceptions.py b/libs/requests/packages/urllib3/exceptions.py index 98ef9abc..b4df831f 100644 --- a/libs/requests/packages/urllib3/exceptions.py +++ b/libs/requests/packages/urllib3/exceptions.py @@ -44,6 +44,11 @@ class ProxyError(HTTPError): pass +class ConnectionError(HTTPError): + "Raised when a normal connection fails." + pass + + class DecodeError(HTTPError): "Raised when automatic decoding based on Content-Type fails." pass diff --git a/libs/requests/packages/urllib3/fields.py b/libs/requests/packages/urllib3/fields.py index ed017657..da79e929 100644 --- a/libs/requests/packages/urllib3/fields.py +++ b/libs/requests/packages/urllib3/fields.py @@ -15,7 +15,7 @@ def guess_content_type(filename, default='application/octet-stream'): Guess the "Content-Type" of a file. :param filename: - The filename to guess the "Content-Type" of using :mod:`mimetimes`. + The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. """ diff --git a/libs/requests/packages/urllib3/filepost.py b/libs/requests/packages/urllib3/filepost.py index 4575582e..e8b30bdd 100644 --- a/libs/requests/packages/urllib3/filepost.py +++ b/libs/requests/packages/urllib3/filepost.py @@ -46,16 +46,15 @@ def iter_field_objects(fields): def iter_fields(fields): """ + .. deprecated:: 1.6 + Iterate over fields. - .. deprecated :: - - The addition of `~urllib3.fields.RequestField` makes this function - obsolete. Instead, use :func:`iter_field_objects`, which returns - `~urllib3.fields.RequestField` objects, instead. + The addition of :class:`~urllib3.fields.RequestField` makes this function + obsolete. Instead, use :func:`iter_field_objects`, which returns + :class:`~urllib3.fields.RequestField` objects. Supports list of (k, v) tuples and dicts. - """ if isinstance(fields, dict): return ((k, v) for k, v in six.iteritems(fields)) diff --git a/libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py index 3aa5b2e1..dd59a75f 100644 --- a/libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py +++ b/libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py @@ -7,7 +7,7 @@ except ImportError: from backports.ssl_match_hostname import CertificateError, match_hostname except ImportError: # Our vendored copy - from _implementation import CertificateError, match_hostname + from ._implementation import CertificateError, match_hostname # Not needed, but documenting what we provide. __all__ = ('CertificateError', 'match_hostname') diff --git a/libs/requests/packages/urllib3/poolmanager.py b/libs/requests/packages/urllib3/poolmanager.py index c16519f8..f18ff2bb 100644 --- a/libs/requests/packages/urllib3/poolmanager.py +++ b/libs/requests/packages/urllib3/poolmanager.py @@ -1,5 +1,5 @@ # urllib3/poolmanager.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php @@ -176,7 +176,7 @@ class ProxyManager(PoolManager): Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. - :param poxy_url: + :param proxy_url: The URL of the proxy to be used. :param proxy_headers: diff --git a/libs/requests/packages/urllib3/request.py b/libs/requests/packages/urllib3/request.py index 66a9a0e6..2a92cc20 100644 --- a/libs/requests/packages/urllib3/request.py +++ b/libs/requests/packages/urllib3/request.py @@ -45,7 +45,6 @@ class RequestMethods(object): """ _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) - _encode_body_methods = set(['PATCH', 'POST', 'PUT', 'TRACE']) def __init__(self, headers=None): self.headers = headers or {} diff --git a/libs/requests/packages/urllib3/response.py b/libs/requests/packages/urllib3/response.py index 6a1fe1a7..db441828 100644 --- a/libs/requests/packages/urllib3/response.py +++ b/libs/requests/packages/urllib3/response.py @@ -9,6 +9,7 @@ import logging import zlib import io +from ._collections import HTTPHeaderDict from .exceptions import DecodeError from .packages.six import string_types as basestring, binary_type from .util import is_fp_closed @@ -79,7 +80,10 @@ class HTTPResponse(io.IOBase): def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, original_response=None, pool=None, connection=None): - self.headers = headers or {} + + self.headers = HTTPHeaderDict() + if headers: + self.headers.update(headers) self.status = status self.version = version self.reason = reason @@ -249,17 +253,9 @@ class HTTPResponse(io.IOBase): with ``original_response=r``. """ - # Normalize headers between different versions of Python - headers = {} + headers = HTTPHeaderDict() for k, v in r.getheaders(): - # Python 3: Header keys are returned capitalised - k = k.lower() - - has_value = headers.get(k) - if has_value: # Python 3: Repeating header keys are unmerged. - v = ', '.join([has_value, v]) - - headers[k] = v + headers.add(k, v) # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, 'strict', 0) diff --git a/libs/requests/packages/urllib3/util.py b/libs/requests/packages/urllib3/util.py deleted file mode 100644 index 46a0c48d..00000000 --- a/libs/requests/packages/urllib3/util.py +++ /dev/null @@ -1,643 +0,0 @@ -# urllib3/util.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -from base64 import b64encode -from binascii import hexlify, unhexlify -from collections import namedtuple -from hashlib import md5, sha1 -from socket import error as SocketError, _GLOBAL_DEFAULT_TIMEOUT -import time - -try: - from select import poll, POLLIN -except ImportError: # `poll` doesn't exist on OSX and other platforms - poll = False - try: - from select import select - except ImportError: # `select` doesn't exist on AppEngine. - select = False - -try: # Test for SSL features - SSLContext = None - HAS_SNI = False - - import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 - from ssl import SSLContext # Modern SSL? - from ssl import HAS_SNI # Has SNI? -except ImportError: - pass - -from .packages import six -from .exceptions import LocationParseError, SSLError, TimeoutStateError - - -_Default = object() -# The default timeout to use for socket connections. This is the attribute used -# by httplib to define the default timeout - - -def current_time(): - """ - Retrieve the current time, this function is mocked out in unit testing. - """ - return time.time() - - -class Timeout(object): - """ - Utility object for storing timeout values. - - Example usage: - - .. code-block:: python - - timeout = urllib3.util.Timeout(connect=2.0, read=7.0) - pool = HTTPConnectionPool('www.google.com', 80, timeout=timeout) - pool.request(...) # Etc, etc - - :param connect: - The maximum amount of time to wait for a connection attempt to a server - to succeed. Omitting the parameter will default the connect timeout to - the system default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout for connection attempts. - - :type connect: integer, float, or None - - :param read: - The maximum amount of time to wait between consecutive - read operations for a response from the server. Omitting - the parameter will default the read timeout to the system - default, probably `the global default timeout in socket.py - `_. - None will set an infinite timeout. - - :type read: integer, float, or None - - :param total: - This combines the connect and read timeouts into one; the read timeout - will be set to the time leftover from the connect attempt. In the - event that both a connect timeout and a total are specified, or a read - timeout and a total are specified, the shorter timeout will be applied. - - Defaults to None. - - :type total: integer, float, or None - - .. note:: - - Many factors can affect the total amount of time for urllib3 to return - an HTTP response. Specifically, Python's DNS resolver does not obey the - timeout specified on the socket. Other factors that can affect total - request time include high CPU load, high swap, the program running at a - low priority level, or other behaviors. The observed running time for - urllib3 to return a response may be greater than the value passed to - `total`. - - In addition, the read and total timeouts only measure the time between - read operations on the socket connecting the client and the server, - not the total amount of time for the request to return a complete - response. For most requests, the timeout is raised because the server - has not sent the first byte in the specified time. This is not always - the case; if a server streams one byte every fifteen seconds, a timeout - of 20 seconds will not ever trigger, even though the request will - take several minutes to complete. - - If your goal is to cut off any request after a set amount of wall clock - time, consider having a second "watcher" thread to cut off a slow - request. - """ - - #: A sentinel object representing the default timeout value - DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT - - def __init__(self, total=None, connect=_Default, read=_Default): - self._connect = self._validate_timeout(connect, 'connect') - self._read = self._validate_timeout(read, 'read') - self.total = self._validate_timeout(total, 'total') - self._start_connect = None - - def __str__(self): - return '%s(connect=%r, read=%r, total=%r)' % ( - type(self).__name__, self._connect, self._read, self.total) - - - @classmethod - def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid - - :param value: The timeout value to validate - :param name: The name of the timeout attribute to validate. This is used - for clear error messages - :return: the value - :raises ValueError: if the type is not an integer or a float, or if it - is a numeric value less than zero - """ - if value is _Default: - return cls.DEFAULT_TIMEOUT - - if value is None or value is cls.DEFAULT_TIMEOUT: - return value - - try: - float(value) - except (TypeError, ValueError): - raise ValueError("Timeout value %s was %s, but it must be an " - "int or float." % (name, value)) - - try: - if value < 0: - raise ValueError("Attempted to set %s timeout to %s, but the " - "timeout cannot be set to a value less " - "than 0." % (name, value)) - except TypeError: # Python 3 - raise ValueError("Timeout value %s was %s, but it must be an " - "int or float." % (name, value)) - - return value - - @classmethod - def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. - - The timeout value used by httplib.py sets the same timeout on the - connect(), and recv() socket requests. This creates a :class:`Timeout` - object that sets the individual timeouts to the ``timeout`` value passed - to this function. - - :param timeout: The legacy timeout value - :type timeout: integer, float, sentinel default object, or None - :return: a Timeout object - :rtype: :class:`Timeout` - """ - return Timeout(read=timeout, connect=timeout) - - def clone(self): - """ Create a copy of the timeout object - - Timeout properties are stored per-pool but each request needs a fresh - Timeout object to ensure each one has its own start/stop configured. - - :return: a copy of the timeout object - :rtype: :class:`Timeout` - """ - # We can't use copy.deepcopy because that will also create a new object - # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to - # detect the user default. - return Timeout(connect=self._connect, read=self._read, - total=self.total) - - def start_connect(self): - """ Start the timeout clock, used during a connect() attempt - - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to start a timer that has been started already. - """ - if self._start_connect is not None: - raise TimeoutStateError("Timeout timer has already been started.") - self._start_connect = current_time() - return self._start_connect - - def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. - - :return: the elapsed time - :rtype: float - :raises urllib3.exceptions.TimeoutStateError: if you attempt - to get duration for a timer that hasn't been started. - """ - if self._start_connect is None: - raise TimeoutStateError("Can't get connect duration for timer " - "that has not started.") - return current_time() - self._start_connect - - @property - def connect_timeout(self): - """ Get the value to use when setting a connection timeout. - - This will be a positive float or integer, the value None - (never timeout), or the default system timeout. - - :return: the connect timeout - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - """ - if self.total is None: - return self._connect - - if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: - return self.total - - return min(self._connect, self.total) - - @property - def read_timeout(self): - """ Get the value for the read timeout. - - This assumes some time has elapsed in the connection timeout and - computes the read timeout appropriately. - - If self.total is set, the read timeout is dependent on the amount of - time taken by the connect timeout. If the connection time has not been - established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be - raised. - - :return: the value to use for the read timeout - :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None - :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` - has not yet been called on this object. - """ - if (self.total is not None and - self.total is not self.DEFAULT_TIMEOUT and - self._read is not None and - self._read is not self.DEFAULT_TIMEOUT): - # in case the connect timeout has not yet been established. - if self._start_connect is None: - return self._read - return max(0, min(self.total - self.get_connect_duration(), - self._read)) - elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: - return max(0, self.total - self.get_connect_duration()) - else: - return self._read - - -class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): - """ - Datastructure for representing an HTTP URL. Used as a return value for - :func:`parse_url`. - """ - slots = () - - def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): - return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) - - @property - def hostname(self): - """For backwards-compatibility with urlparse. We're nice like that.""" - return self.host - - @property - def request_uri(self): - """Absolute path including the query string.""" - uri = self.path or '/' - - if self.query is not None: - uri += '?' + self.query - - return uri - - @property - def netloc(self): - """Network location including host and port""" - if self.port: - return '%s:%d' % (self.host, self.port) - return self.host - - -def split_first(s, delims): - """ - Given a string and an iterable of delimiters, split on the first found - delimiter. Return two split parts and the matched delimiter. - - If not found, then the first part is the full input string. - - Example: :: - - >>> split_first('foo/bar?baz', '?/=') - ('foo', 'bar?baz', '/') - >>> split_first('foo/bar?baz', '123') - ('foo/bar?baz', '', None) - - Scales linearly with number of delims. Not ideal for large number of delims. - """ - min_idx = None - min_delim = None - for d in delims: - idx = s.find(d) - if idx < 0: - continue - - if min_idx is None or idx < min_idx: - min_idx = idx - min_delim = d - - if min_idx is None or min_idx < 0: - return s, '', None - - return s[:min_idx], s[min_idx+1:], min_delim - - -def parse_url(url): - """ - Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is - performed to parse incomplete urls. Fields not provided will be None. - - Partly backwards-compatible with :mod:`urlparse`. - - Example: :: - - >>> parse_url('http://google.com/mail/') - Url(scheme='http', host='google.com', port=None, path='/', ...) - >>> parse_url('google.com:80') - Url(scheme=None, host='google.com', port=80, path=None, ...) - >>> parse_url('/foo?bar') - Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) - """ - - # While this code has overlap with stdlib's urlparse, it is much - # simplified for our needs and less annoying. - # Additionally, this implementations does silly things to be optimal - # on CPython. - - scheme = None - auth = None - host = None - port = None - path = None - fragment = None - query = None - - # Scheme - if '://' in url: - scheme, url = url.split('://', 1) - - # Find the earliest Authority Terminator - # (http://tools.ietf.org/html/rfc3986#section-3.2) - url, path_, delim = split_first(url, ['/', '?', '#']) - - if delim: - # Reassemble the path - path = delim + path_ - - # Auth - if '@' in url: - # Last '@' denotes end of auth part - auth, url = url.rsplit('@', 1) - - # IPv6 - if url and url[0] == '[': - host, url = url.split(']', 1) - host += ']' - - # Port - if ':' in url: - _host, port = url.split(':', 1) - - if not host: - host = _host - - if port: - # If given, ports must be integers. - if not port.isdigit(): - raise LocationParseError("Failed to parse: %s" % url) - port = int(port) - else: - # Blank ports are cool, too. (rfc3986#section-3.2.3) - port = None - - elif not host and url: - host = url - - if not path: - return Url(scheme, auth, host, port, path, query, fragment) - - # Fragment - if '#' in path: - path, fragment = path.split('#', 1) - - # Query - if '?' in path: - path, query = path.split('?', 1) - - return Url(scheme, auth, host, port, path, query, fragment) - - -def get_host(url): - """ - Deprecated. Use :func:`.parse_url` instead. - """ - p = parse_url(url) - return p.scheme or 'http', p.hostname, p.port - - -def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, - basic_auth=None, proxy_basic_auth=None): - """ - Shortcuts for generating request headers. - - :param keep_alive: - If ``True``, adds 'connection: keep-alive' header. - - :param accept_encoding: - Can be a boolean, list, or string. - ``True`` translates to 'gzip,deflate'. - List will get joined by comma. - String will be used as provided. - - :param user_agent: - String representing the user-agent you want, such as - "python-urllib3/0.6" - - :param basic_auth: - Colon-separated username:password string for 'authorization: basic ...' - auth header. - - :param proxy_basic_auth: - Colon-separated username:password string for 'proxy-authorization: basic ...' - auth header. - - Example: :: - - >>> make_headers(keep_alive=True, user_agent="Batman/1.0") - {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} - >>> make_headers(accept_encoding=True) - {'accept-encoding': 'gzip,deflate'} - """ - headers = {} - if accept_encoding: - if isinstance(accept_encoding, str): - pass - elif isinstance(accept_encoding, list): - accept_encoding = ','.join(accept_encoding) - else: - accept_encoding = 'gzip,deflate' - headers['accept-encoding'] = accept_encoding - - if user_agent: - headers['user-agent'] = user_agent - - if keep_alive: - headers['connection'] = 'keep-alive' - - if basic_auth: - headers['authorization'] = 'Basic ' + \ - b64encode(six.b(basic_auth)).decode('utf-8') - - if proxy_basic_auth: - headers['proxy-authorization'] = 'Basic ' + \ - b64encode(six.b(proxy_basic_auth)).decode('utf-8') - - return headers - - -def is_connection_dropped(conn): # Platform-specific - """ - Returns True if the connection is dropped and should be closed. - - :param conn: - :class:`httplib.HTTPConnection` object. - - Note: For platforms like AppEngine, this will always return ``False`` to - let the platform handle connection recycling transparently for us. - """ - sock = getattr(conn, 'sock', False) - if not sock: # Platform-specific: AppEngine - return False - - if not poll: - if not select: # Platform-specific: AppEngine - return False - - try: - return select([sock], [], [], 0.0)[0] - except SocketError: - return True - - # This version is better on platforms that support it. - p = poll() - p.register(sock, POLLIN) - for (fno, ev) in p.poll(0.0): - if fno == sock.fileno(): - # Either data is buffered (bad), or the connection is dropped. - return True - - -def resolve_cert_reqs(candidate): - """ - Resolves the argument to a numeric constant, which can be passed to - the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. - If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbrevation. - (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. - If it's neither `None` nor a string we assume it is already the numeric - constant which can directly be passed to wrap_socket. - """ - if candidate is None: - return CERT_NONE - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'CERT_' + candidate) - return res - - return candidate - - -def resolve_ssl_version(candidate): - """ - like resolve_cert_reqs - """ - if candidate is None: - return PROTOCOL_SSLv23 - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) - return res - - return candidate - - -def assert_fingerprint(cert, fingerprint): - """ - Checks if given fingerprint matches the supplied certificate. - - :param cert: - Certificate as bytes object. - :param fingerprint: - Fingerprint as string of hexdigits, can be interspersed by colons. - """ - - # Maps the length of a digest to a possible hash function producing - # this digest. - hashfunc_map = { - 16: md5, - 20: sha1 - } - - fingerprint = fingerprint.replace(':', '').lower() - - digest_length, rest = divmod(len(fingerprint), 2) - - if rest or digest_length not in hashfunc_map: - raise SSLError('Fingerprint is of invalid length.') - - # We need encode() here for py32; works on py2 and p33. - fingerprint_bytes = unhexlify(fingerprint.encode()) - - hashfunc = hashfunc_map[digest_length] - - cert_digest = hashfunc(cert).digest() - - if not cert_digest == fingerprint_bytes: - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(hexlify(fingerprint_bytes), - hexlify(cert_digest))) - -def is_fp_closed(obj): - """ - Checks whether a given file-like object is closed. - - :param obj: - The file-like object to check. - """ - if hasattr(obj, 'fp'): - # Object is a container for another file-like object that gets released - # on exhaustion (e.g. HTTPResponse) - return obj.fp is None - - return obj.closed - - -if SSLContext is not None: # Python 3.2+ - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - """ - All arguments except `server_hostname` have the same meaning as for - :func:`ssl.wrap_socket` - - :param server_hostname: - Hostname of the expected certificate - """ - context = SSLContext(ssl_version) - context.verify_mode = cert_reqs - if ca_certs: - try: - context.load_verify_locations(ca_certs) - # Py32 raises IOError - # Py33 raises FileNotFoundError - except Exception as e: # Reraise as SSLError - raise SSLError(e) - if certfile: - # FIXME: This block needs a test. - context.load_cert_chain(certfile, keyfile) - if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI - return context.wrap_socket(sock, server_hostname=server_hostname) - return context.wrap_socket(sock) - -else: # Python 3.1 and earlier - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - return wrap_socket(sock, keyfile=keyfile, certfile=certfile, - ca_certs=ca_certs, cert_reqs=cert_reqs, - ssl_version=ssl_version) diff --git a/libs/requests/packages/urllib3/util/__init__.py b/libs/requests/packages/urllib3/util/__init__.py new file mode 100644 index 00000000..a40185ee --- /dev/null +++ b/libs/requests/packages/urllib3/util/__init__.py @@ -0,0 +1,27 @@ +# urllib3/util/__init__.py +# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt) +# +# This module is part of urllib3 and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from .connection import is_connection_dropped +from .request import make_headers +from .response import is_fp_closed +from .ssl_ import ( + SSLContext, + HAS_SNI, + assert_fingerprint, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import ( + current_time, + Timeout, +) +from .url import ( + get_host, + parse_url, + split_first, + Url, +) diff --git a/libs/requests/packages/urllib3/util/connection.py b/libs/requests/packages/urllib3/util/connection.py new file mode 100644 index 00000000..8deeab5c --- /dev/null +++ b/libs/requests/packages/urllib3/util/connection.py @@ -0,0 +1,45 @@ +from socket import error as SocketError +try: + from select import poll, POLLIN +except ImportError: # `poll` doesn't exist on OSX and other platforms + poll = False + try: + from select import select + except ImportError: # `select` doesn't exist on AppEngine. + select = False + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`httplib.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, 'sock', False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return False + + if not poll: + if not select: # Platform-specific: AppEngine + return False + + try: + return select([sock], [], [], 0.0)[0] + except SocketError: + return True + + # This version is better on platforms that support it. + p = poll() + p.register(sock, POLLIN) + for (fno, ev) in p.poll(0.0): + if fno == sock.fileno(): + # Either data is buffered (bad), or the connection is dropped. + return True + + + diff --git a/libs/requests/packages/urllib3/util/request.py b/libs/requests/packages/urllib3/util/request.py new file mode 100644 index 00000000..d48d6513 --- /dev/null +++ b/libs/requests/packages/urllib3/util/request.py @@ -0,0 +1,68 @@ +from base64 import b64encode + +from ..packages import six + + +ACCEPT_ENCODING = 'gzip,deflate' + + +def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, + basic_auth=None, proxy_basic_auth=None): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + Example: :: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ','.join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers['accept-encoding'] = accept_encoding + + if user_agent: + headers['user-agent'] = user_agent + + if keep_alive: + headers['connection'] = 'keep-alive' + + if basic_auth: + headers['authorization'] = 'Basic ' + \ + b64encode(six.b(basic_auth)).decode('utf-8') + + if proxy_basic_auth: + headers['proxy-authorization'] = 'Basic ' + \ + b64encode(six.b(proxy_basic_auth)).decode('utf-8') + + return headers + + diff --git a/libs/requests/packages/urllib3/util/response.py b/libs/requests/packages/urllib3/util/response.py new file mode 100644 index 00000000..d0325bc6 --- /dev/null +++ b/libs/requests/packages/urllib3/util/response.py @@ -0,0 +1,13 @@ +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + if hasattr(obj, 'fp'): + # Object is a container for another file-like object that gets released + # on exhaustion (e.g. HTTPResponse) + return obj.fp is None + + return obj.closed diff --git a/libs/requests/packages/urllib3/util/ssl_.py b/libs/requests/packages/urllib3/util/ssl_.py new file mode 100644 index 00000000..dee4b876 --- /dev/null +++ b/libs/requests/packages/urllib3/util/ssl_.py @@ -0,0 +1,133 @@ +from binascii import hexlify, unhexlify +from hashlib import md5, sha1 + +from ..exceptions import SSLError + + +try: # Test for SSL features + SSLContext = None + HAS_SNI = False + + import ssl + from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 + from ssl import SSLContext # Modern SSL? + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + # Maps the length of a digest to a possible hash function producing + # this digest. + hashfunc_map = { + 16: md5, + 20: sha1 + } + + fingerprint = fingerprint.replace(':', '').lower() + + digest_length, rest = divmod(len(fingerprint), 2) + + if rest or digest_length not in hashfunc_map: + raise SSLError('Fingerprint is of invalid length.') + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + hashfunc = hashfunc_map[digest_length] + + cert_digest = hashfunc(cert).digest() + + if not cert_digest == fingerprint_bytes: + raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' + .format(hexlify(fingerprint_bytes), + hexlify(cert_digest))) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_NONE`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbrevation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_NONE + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'CERT_' + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_SSLv23 + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, 'PROTOCOL_' + candidate) + return res + + return candidate + + +if SSLContext is not None: # Python 3.2+ + def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + """ + All arguments except `server_hostname` have the same meaning as for + :func:`ssl.wrap_socket` + + :param server_hostname: + Hostname of the expected certificate + """ + context = SSLContext(ssl_version) + context.verify_mode = cert_reqs + + # Disable TLS compression to migitate CRIME attack (issue #309) + OP_NO_COMPRESSION = 0x20000 + context.options |= OP_NO_COMPRESSION + + if ca_certs: + try: + context.load_verify_locations(ca_certs) + # Py32 raises IOError + # Py33 raises FileNotFoundError + except Exception as e: # Reraise as SSLError + raise SSLError(e) + if certfile: + # FIXME: This block needs a test. + context.load_cert_chain(certfile, keyfile) + if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI + return context.wrap_socket(sock, server_hostname=server_hostname) + return context.wrap_socket(sock) + +else: # Python 3.1 and earlier + def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + return wrap_socket(sock, keyfile=keyfile, certfile=certfile, + ca_certs=ca_certs, cert_reqs=cert_reqs, + ssl_version=ssl_version) diff --git a/libs/requests/packages/urllib3/util/timeout.py b/libs/requests/packages/urllib3/util/timeout.py new file mode 100644 index 00000000..4f947cb2 --- /dev/null +++ b/libs/requests/packages/urllib3/util/timeout.py @@ -0,0 +1,234 @@ +from socket import _GLOBAL_DEFAULT_TIMEOUT +import time + +from ..exceptions import TimeoutStateError + + +def current_time(): + """ + Retrieve the current time, this function is mocked out in unit testing. + """ + return time.time() + + +_Default = object() +# The default timeout to use for socket connections. This is the attribute used +# by httplib to define the default timeout + + +class Timeout(object): + """ + Utility object for storing timeout values. + + Example usage: + + .. code-block:: python + + timeout = urllib3.util.Timeout(connect=2.0, read=7.0) + pool = HTTPConnectionPool('www.google.com', 80, timeout=timeout) + pool.request(...) # Etc, etc + + :param connect: + The maximum amount of time to wait for a connection attempt to a server + to succeed. Omitting the parameter will default the connect timeout to + the system default, probably `the global default timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: integer, float, or None + + :param read: + The maximum amount of time to wait between consecutive + read operations for a response from the server. Omitting + the parameter will default the read timeout to the system + default, probably `the global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: integer, float, or None + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: integer, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. Specifically, Python's DNS resolver does not obey the + timeout specified on the socket. Other factors that can affect total + request time include high CPU load, high swap, the program running at a + low priority level, or other behaviors. The observed running time for + urllib3 to return a response may be greater than the value passed to + `total`. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not ever trigger, even though the request will + take several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, 'connect') + self._read = self._validate_timeout(read, 'read') + self.total = self._validate_timeout(total, 'total') + self._start_connect = None + + def __str__(self): + return '%s(connect=%r, read=%r, total=%r)' % ( + type(self).__name__, self._connect, self._read, self.total) + + + @classmethod + def _validate_timeout(cls, value, name): + """ Check that a timeout attribute is valid + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is used + for clear error messages + :return: the value + :raises ValueError: if the type is not an integer or a float, or if it + is a numeric value less than zero + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + try: + float(value) + except (TypeError, ValueError): + raise ValueError("Timeout value %s was %s, but it must be an " + "int or float." % (name, value)) + + try: + if value < 0: + raise ValueError("Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than 0." % (name, value)) + except TypeError: # Python 3 + raise ValueError("Timeout value %s was %s, but it must be an " + "int or float." % (name, value)) + + return value + + @classmethod + def from_float(cls, timeout): + """ Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value passed + to this function. + + :param timeout: The legacy timeout value + :type timeout: integer, float, sentinel default object, or None + :return: a Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """ Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, + total=self.total) + + def start_connect(self): + """ Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """ Gets the time elapsed since the call to :meth:`start_connect`. + + :return: the elapsed time + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError("Can't get connect duration for timer " + "that has not started.") + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """ Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: the connect timeout + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """ Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: the value to use for the read timeout + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if (self.total is not None and + self.total is not self.DEFAULT_TIMEOUT and + self._read is not None and + self._read is not self.DEFAULT_TIMEOUT): + # in case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), + self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/libs/requests/packages/urllib3/util/url.py b/libs/requests/packages/urllib3/util/url.py new file mode 100644 index 00000000..362d2160 --- /dev/null +++ b/libs/requests/packages/urllib3/util/url.py @@ -0,0 +1,162 @@ +from collections import namedtuple + +from ..exceptions import LocationParseError + + +class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): + """ + Datastructure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. + """ + slots = () + + def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): + return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or '/' + + if self.query is not None: + uri += '?' + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return '%s:%d' % (self.host, self.port) + return self.host + + +def split_first(s, delims): + """ + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example: :: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, '', None + + return s[:min_idx], s[min_idx+1:], min_delim + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + + Partly backwards-compatible with :mod:`urlparse`. + + Example: :: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + + # While this code has overlap with stdlib's urlparse, it is much + # simplified for our needs and less annoying. + # Additionally, this implementations does silly things to be optimal + # on CPython. + + scheme = None + auth = None + host = None + port = None + path = None + fragment = None + query = None + + # Scheme + if '://' in url: + scheme, url = url.split('://', 1) + + # Find the earliest Authority Terminator + # (http://tools.ietf.org/html/rfc3986#section-3.2) + url, path_, delim = split_first(url, ['/', '?', '#']) + + if delim: + # Reassemble the path + path = delim + path_ + + # Auth + if '@' in url: + # Last '@' denotes end of auth part + auth, url = url.rsplit('@', 1) + + # IPv6 + if url and url[0] == '[': + host, url = url.split(']', 1) + host += ']' + + # Port + if ':' in url: + _host, port = url.split(':', 1) + + if not host: + host = _host + + if port: + # If given, ports must be integers. + if not port.isdigit(): + raise LocationParseError(url) + port = int(port) + else: + # Blank ports are cool, too. (rfc3986#section-3.2.3) + port = None + + elif not host and url: + host = url + + if not path: + return Url(scheme, auth, host, port, path, query, fragment) + + # Fragment + if '#' in path: + path, fragment = path.split('#', 1) + + # Query + if '?' in path: + path, query = path.split('?', 1) + + return Url(scheme, auth, host, port, path, query, fragment) + + +def get_host(url): + """ + Deprecated. Use :func:`.parse_url` instead. + """ + p = parse_url(url) + return p.scheme or 'http', p.hostname, p.port diff --git a/libs/requests/sessions.py b/libs/requests/sessions.py index 06e17d4b..df85a25c 100644 --- a/libs/requests/sessions.py +++ b/libs/requests/sessions.py @@ -12,27 +12,28 @@ import os from collections import Mapping from datetime import datetime +from .auth import _basic_auth_str from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) -from .models import Request, PreparedRequest +from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook -from .utils import to_key_val_list, default_headers -from .exceptions import TooManyRedirects, InvalidSchema +from .utils import to_key_val_list, default_headers, to_native_string +from .exceptions import ( + TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter -from .utils import requote_uri, get_environ_proxies, get_netrc_auth +from .utils import ( + requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, + get_auth_from_url +) from .status_codes import codes -REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_moved, # 307 -) -DEFAULT_REDIRECT_LIMIT = 30 + +# formerly defined here, reexposed here for backward compatibility +from .models import REDIRECT_STATI def merge_setting(request_setting, session_setting, dict_class=OrderedDict): @@ -63,6 +64,8 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict): if v is None: del merged_setting[k] + merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None) + return merged_setting @@ -89,11 +92,13 @@ class SessionRedirectMixin(object): i = 0 - # ((resp.status_code is codes.see_other)) - while ('location' in resp.headers and resp.status_code in REDIRECT_STATI): + while resp.is_redirect: prepared_request = req.copy() - resp.content # Consume socket so it can be released + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) if i >= self.max_redirects: raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects) @@ -121,7 +126,7 @@ class SessionRedirectMixin(object): else: url = requote_uri(url) - prepared_request.url = url + prepared_request.url = to_native_string(url) # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4 if (resp.status_code == codes.see_other and @@ -153,12 +158,19 @@ class SessionRedirectMixin(object): except KeyError: pass - extract_cookies_to_jar(prepared_request._cookies, - prepared_request, resp.raw) + extract_cookies_to_jar(prepared_request._cookies, prepared_request, resp.raw) + prepared_request._cookies.update(self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # Override the original request. + req = prepared_request + resp = self.send( - prepared_request, + req, stream=stream, timeout=timeout, verify=verify, @@ -172,6 +184,68 @@ class SessionRedirectMixin(object): i += 1 yield resp + def rebuild_auth(self, prepared_request, response): + """ + When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if 'Authorization' in headers: + # If we get redirected to a new host, we should strip out any + # authentication headers. + original_parsed = urlparse(response.request.url) + redirect_parsed = urlparse(url) + + if (original_parsed.hostname != redirect_parsed.hostname): + del headers['Authorization'] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + return + + def rebuild_proxies(self, prepared_request, proxies): + """ + This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + """ + headers = prepared_request.headers + url = prepared_request.url + scheme = urlparse(url).scheme + new_proxies = proxies.copy() if proxies is not None else {} + + if self.trust_env and not should_bypass_proxies(url): + environ_proxies = get_environ_proxies(url) + + proxy = environ_proxies.get(scheme) + + if proxy: + new_proxies.setdefault(scheme, environ_proxies[scheme]) + + if 'Proxy-Authorization' in headers: + del headers['Proxy-Authorization'] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + if username and password: + headers['Proxy-Authorization'] = _basic_auth_str(username, password) + + return new_proxies + class Session(SessionRedirectMixin): """A Requests session. @@ -255,7 +329,7 @@ class Session(SessionRedirectMixin): :class:`Session`. :param request: :class:`Request` instance to prepare with this - session's settings. + session's settings. """ cookies = request.cookies or {} @@ -319,7 +393,7 @@ class Session(SessionRedirectMixin): :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) Float describing the timeout of the - request. + request in seconds. :param allow_redirects: (optional) Boolean. Set to True by default. :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. @@ -466,8 +540,7 @@ class Session(SessionRedirectMixin): if not isinstance(request, PreparedRequest): raise ValueError('You can only send PreparedRequests.') - # Set up variables needed for resolve_redirects and dispatching of - # hooks + # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') timeout = kwargs.get('timeout') @@ -481,8 +554,10 @@ class Session(SessionRedirectMixin): # Start time (approximately) of the request start = datetime.utcnow() + # Send the request r = adapter.send(request, **kwargs) + # Total elapsed time of the request (approximately) r.elapsed = datetime.utcnow() - start @@ -491,15 +566,20 @@ class Session(SessionRedirectMixin): # Persist cookies if r.history: + # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + extract_cookies_to_jar(self.cookies, request, r.raw) # Redirect resolving generator. - gen = self.resolve_redirects(r, request, stream=stream, - timeout=timeout, verify=verify, cert=cert, - proxies=proxies) + gen = self.resolve_redirects(r, request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies) # Resolve redirects if allowed. history = [resp for resp in gen] if allow_redirects else [] @@ -510,7 +590,10 @@ class Session(SessionRedirectMixin): history.insert(0, r) # Get the last request made r = history.pop() - r.history = tuple(history) + r.history = history + + if not stream: + r.content return r @@ -533,8 +616,10 @@ class Session(SessionRedirectMixin): """Registers a connection adapter to a prefix. Adapters are sorted in descending order by key length.""" + self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + for key in keys_to_move: self.adapters[key] = self.adapters.pop(key) diff --git a/libs/requests/structures.py b/libs/requests/structures.py index a1759137..66cdad86 100644 --- a/libs/requests/structures.py +++ b/libs/requests/structures.py @@ -8,30 +8,7 @@ Data structures that power Requests. """ -import os import collections -from itertools import islice - - -class IteratorProxy(object): - """docstring for IteratorProxy""" - def __init__(self, i): - self.i = i - # self.i = chain.from_iterable(i) - - def __iter__(self): - return self.i - - def __len__(self): - if hasattr(self.i, '__len__'): - return len(self.i) - if hasattr(self.i, 'len'): - return self.i.len - if hasattr(self.i, 'fileno'): - return os.fstat(self.i.fileno()).st_size - - def read(self, n): - return "".join(islice(self.i, None, n)) class CaseInsensitiveDict(collections.MutableMapping): @@ -106,8 +83,7 @@ class CaseInsensitiveDict(collections.MutableMapping): return CaseInsensitiveDict(self._store.values()) def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, dict(self.items())) - + return str(dict(self.items())) class LookupDict(dict): """Dictionary lookup object.""" diff --git a/libs/requests/utils.py b/libs/requests/utils.py index c7e2b089..68e50cf0 100644 --- a/libs/requests/utils.py +++ b/libs/requests/utils.py @@ -24,10 +24,10 @@ from . import __version__ from . import certs from .compat import parse_http_list as _parse_list_header from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2, - builtin_str, getproxies, proxy_bypass) + builtin_str, getproxies, proxy_bypass, urlunparse) from .cookies import RequestsCookieJar, cookiejar_from_dict from .structures import CaseInsensitiveDict -from .exceptions import MissingSchema, InvalidURL +from .exceptions import InvalidURL _hush_pyflakes = (RequestsCookieJar,) @@ -61,25 +61,34 @@ def super_len(o): return os.fstat(fileno).st_size if hasattr(o, 'getvalue'): - # e.g. BytesIO, cStringIO.StringI + # e.g. BytesIO, cStringIO.StringIO return len(o.getvalue()) + def get_netrc_auth(url): """Returns the Requests tuple auth for a given url from netrc.""" try: from netrc import netrc, NetrcParseError - locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES) netrc_path = None - for loc in locations: - if os.path.exists(loc) and not netrc_path: + for f in NETRC_FILES: + try: + loc = os.path.expanduser('~/{0}'.format(f)) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See http://bugs.python.org/issue20164 & + # https://github.com/kennethreitz/requests/issues/1846 + return + + if os.path.exists(loc): netrc_path = loc + break # Abort early if there isn't one. if netrc_path is None: - return netrc_path + return ri = urlparse(url) @@ -457,9 +466,10 @@ def is_valid_cidr(string_network): return True -def get_environ_proxies(url): - """Return a dict of environment proxies.""" - +def should_bypass_proxies(url): + """ + Returns whether we should bypass proxies or not. + """ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) # First check whether no_proxy is defined. If it is, check that the URL @@ -477,23 +487,36 @@ def get_environ_proxies(url): for proxy_ip in no_proxy: if is_valid_cidr(proxy_ip): if address_in_network(ip, proxy_ip): - return {} + return True else: for host in no_proxy: if netloc.endswith(host) or netloc.split(':')[0].endswith(host): # The URL does match something in no_proxy, so we don't want # to apply the proxies on this URL. - return {} + return True # If the system proxy settings indicate that this URL should be bypassed, # don't proxy. - if proxy_bypass(netloc): - return {} + # The proxy_bypass function is incredibly buggy on OS X in early versions + # of Python 2.6, so allow this call to fail. Only catch the specific + # exceptions we've seen, though: this call failing in other ways can reveal + # legitimate problems. + try: + bypass = proxy_bypass(netloc) + except (TypeError, socket.gaierror): + bypass = False - # If we get here, we either didn't have no_proxy set or we're not going - # anywhere that no_proxy applies to, and the system settings don't require - # bypassing the proxy for the current URL. - return getproxies() + if bypass: + return True + + return False + +def get_environ_proxies(url): + """Return a dict of environment proxies.""" + if should_bypass_proxies(url): + return {} + else: + return getproxies() def default_user_agent(name="python-requests"): @@ -530,7 +553,7 @@ def default_user_agent(name="python-requests"): def default_headers(): return CaseInsensitiveDict({ 'User-Agent': default_user_agent(), - 'Accept-Encoding': ', '.join(('gzip', 'deflate', 'compress')), + 'Accept-Encoding': ', '.join(('gzip', 'deflate')), 'Accept': '*/*' }) @@ -604,24 +627,31 @@ def guess_json_utf(data): return None -def except_on_missing_scheme(url): - """Given a URL, raise a MissingSchema exception if the scheme is missing. - """ - scheme, netloc, path, params, query, fragment = urlparse(url) +def prepend_scheme_if_needed(url, new_scheme): + '''Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument.''' + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) - if not scheme: - raise MissingSchema('Proxy URLs must have explicit schemes.') + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) def get_auth_from_url(url): """Given a url with authentication components, extract them into a tuple of username,password.""" - if url: - url = unquote(url) - parsed = urlparse(url) - return (parsed.username, parsed.password) - else: - return ('', '') + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ('', '') + + return auth def to_native_string(string, encoding='ascii'): diff --git a/libs/scandir/.gitattributes b/libs/scandir/.gitattributes deleted file mode 100644 index 176a458f..00000000 --- a/libs/scandir/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -* text=auto diff --git a/libs/scandir/.gitignore b/libs/scandir/.gitignore deleted file mode 100644 index 48878c73..00000000 --- a/libs/scandir/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.pyc -*.pyd -benchtree -build diff --git a/libs/scandir/__init__.py b/libs/scandir/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/libs/scandir/_scandir.c b/libs/scandir/_scandir.c deleted file mode 100644 index 26c81eda..00000000 --- a/libs/scandir/_scandir.c +++ /dev/null @@ -1,373 +0,0 @@ -// scandir C speedups -// -// TODO: this is a work in progress! -// -// There's a fair bit of PY_MAJOR_VERSION boilerplate to support both Python 2 -// and Python 3 -- the structure of this is taken from here: -// http://docs.python.org/3.3/howto/cporting.html - -#include -#include - -#ifdef MS_WINDOWS -#include -#endif - -#if PY_MAJOR_VERSION >= 3 -#define INITERROR return NULL -#define FROM_LONG PyLong_FromLong -#define FROM_STRING PyUnicode_FromStringAndSize -#else -#define INITERROR return -#define FROM_LONG PyInt_FromLong -#define FROM_STRING PyString_FromStringAndSize -#endif - -#ifdef MS_WINDOWS - -static PyObject * -win32_error_unicode(char* function, Py_UNICODE* filename) -{ - errno = GetLastError(); - if (filename) - return PyErr_SetFromWindowsErrWithUnicodeFilename(errno, filename); - else - return PyErr_SetFromWindowsErr(errno); -} - -/* Below, we *know* that ugo+r is 0444 */ -#if _S_IREAD != 0400 -#error Unsupported C library -#endif -static int -attributes_to_mode(DWORD attr) -{ - int m = 0; - if (attr & FILE_ATTRIBUTE_DIRECTORY) - m |= _S_IFDIR | 0111; /* IFEXEC for user,group,other */ - else - m |= _S_IFREG; - if (attr & FILE_ATTRIBUTE_READONLY) - m |= 0444; - else - m |= 0666; - if (attr & FILE_ATTRIBUTE_REPARSE_POINT) - m |= 0120000; // S_IFLNK - return m; -} - -double -filetime_to_time(FILETIME *filetime) -{ - const double SECONDS_BETWEEN_EPOCHS = 11644473600.0; - - unsigned long long total = (unsigned long long)filetime->dwHighDateTime << 32 | - (unsigned long long)filetime->dwLowDateTime; - return (double)total / 10000000.0 - SECONDS_BETWEEN_EPOCHS; -} - -static PyTypeObject StatResultType; - -static PyObject * -find_data_to_statresult(WIN32_FIND_DATAW *data) -{ - PY_LONG_LONG size; - PyObject *v = PyStructSequence_New(&StatResultType); - if (v == NULL) - return NULL; - - size = (PY_LONG_LONG)data->nFileSizeHigh << 32 | - (PY_LONG_LONG)data->nFileSizeLow; - - PyStructSequence_SET_ITEM(v, 0, FROM_LONG(attributes_to_mode(data->dwFileAttributes))); - PyStructSequence_SET_ITEM(v, 1, FROM_LONG(0)); - PyStructSequence_SET_ITEM(v, 2, FROM_LONG(0)); - PyStructSequence_SET_ITEM(v, 3, FROM_LONG(0)); - PyStructSequence_SET_ITEM(v, 4, FROM_LONG(0)); - PyStructSequence_SET_ITEM(v, 5, FROM_LONG(0)); - PyStructSequence_SET_ITEM(v, 6, PyLong_FromLongLong((PY_LONG_LONG)size)); - PyStructSequence_SET_ITEM(v, 7, PyFloat_FromDouble(filetime_to_time(&data->ftLastAccessTime))); - PyStructSequence_SET_ITEM(v, 8, PyFloat_FromDouble(filetime_to_time(&data->ftLastWriteTime))); - PyStructSequence_SET_ITEM(v, 9, PyFloat_FromDouble(filetime_to_time(&data->ftCreationTime))); - - if (PyErr_Occurred()) { - Py_DECREF(v); - return NULL; - } - - return v; -} - -static PyStructSequence_Field stat_result_fields[] = { - {"st_mode", "protection bits"}, - {"st_ino", "inode"}, - {"st_dev", "device"}, - {"st_nlink", "number of hard links"}, - {"st_uid", "user ID of owner"}, - {"st_gid", "group ID of owner"}, - {"st_size", "total size, in bytes"}, - {"st_atime", "time of last access"}, - {"st_mtime", "time of last modification"}, - {"st_ctime", "time of last change"}, - {0} -}; - -static PyStructSequence_Desc stat_result_desc = { - "stat_result", /* name */ - NULL, /* doc */ - stat_result_fields, - 10 -}; - -static PyObject * -scandir_helper(PyObject *self, PyObject *args) -{ - PyObject *d, *v; - HANDLE hFindFile; - BOOL result; - WIN32_FIND_DATAW wFileData; - Py_UNICODE *wnamebuf; - Py_ssize_t len; - PyObject *po; - PyObject *name_stat; - - if (!PyArg_ParseTuple(args, "U:scandir_helper", &po)) - return NULL; - - /* Overallocate for \\*.*\0 */ - len = PyUnicode_GET_SIZE(po); - wnamebuf = malloc((len + 5) * sizeof(wchar_t)); - if (!wnamebuf) { - PyErr_NoMemory(); - return NULL; - } - wcscpy(wnamebuf, PyUnicode_AS_UNICODE(po)); - if (len > 0) { - Py_UNICODE wch = wnamebuf[len-1]; - if (wch != L'/' && wch != L'\\' && wch != L':') - wnamebuf[len++] = L'\\'; - wcscpy(wnamebuf + len, L"*.*"); - } - if ((d = PyList_New(0)) == NULL) { - free(wnamebuf); - return NULL; - } - Py_BEGIN_ALLOW_THREADS - hFindFile = FindFirstFileW(wnamebuf, &wFileData); - Py_END_ALLOW_THREADS - if (hFindFile == INVALID_HANDLE_VALUE) { - int error = GetLastError(); - if (error == ERROR_FILE_NOT_FOUND) { - free(wnamebuf); - return d; - } - Py_DECREF(d); - win32_error_unicode("FindFirstFileW", wnamebuf); - free(wnamebuf); - return NULL; - } - do { - /* Skip over . and .. */ - if (wcscmp(wFileData.cFileName, L".") != 0 && - wcscmp(wFileData.cFileName, L"..") != 0) { - v = PyUnicode_FromUnicode(wFileData.cFileName, wcslen(wFileData.cFileName)); - if (v == NULL) { - Py_DECREF(d); - d = NULL; - break; - } - name_stat = Py_BuildValue("ON", v, find_data_to_statresult(&wFileData)); - if (name_stat == NULL) { - Py_DECREF(v); - Py_DECREF(d); - d = NULL; - break; - } - if (PyList_Append(d, name_stat) != 0) { - Py_DECREF(v); - Py_DECREF(d); - Py_DECREF(name_stat); - d = NULL; - break; - } - Py_DECREF(name_stat); - Py_DECREF(v); - } - Py_BEGIN_ALLOW_THREADS - result = FindNextFileW(hFindFile, &wFileData); - Py_END_ALLOW_THREADS - /* FindNextFile sets error to ERROR_NO_MORE_FILES if - it got to the end of the directory. */ - if (!result && GetLastError() != ERROR_NO_MORE_FILES) { - Py_DECREF(d); - win32_error_unicode("FindNextFileW", wnamebuf); - FindClose(hFindFile); - free(wnamebuf); - return NULL; - } - } while (result == TRUE); - - if (FindClose(hFindFile) == FALSE) { - Py_DECREF(d); - win32_error_unicode("FindClose", wnamebuf); - free(wnamebuf); - return NULL; - } - free(wnamebuf); - return d; -} - -#else // Linux / OS X - -#include -#define NAMLEN(dirent) strlen((dirent)->d_name) - -static PyObject * -posix_error_with_allocated_filename(char* name) -{ - PyObject *rc = PyErr_SetFromErrnoWithFilename(PyExc_OSError, name); - PyMem_Free(name); - return rc; -} - -static PyObject * -scandir_helper(PyObject *self, PyObject *args) -{ - char *name = NULL; - PyObject *d, *v, *name_type; - DIR *dirp; - struct dirent *ep; - int arg_is_unicode = 1; - - errno = 0; - if (!PyArg_ParseTuple(args, "U:scandir_helper", &v)) { - arg_is_unicode = 0; - PyErr_Clear(); - } - if (!PyArg_ParseTuple(args, "et:scandir_helper", Py_FileSystemDefaultEncoding, &name)) - return NULL; - Py_BEGIN_ALLOW_THREADS - dirp = opendir(name); - Py_END_ALLOW_THREADS - if (dirp == NULL) { - return posix_error_with_allocated_filename(name); - } - if ((d = PyList_New(0)) == NULL) { - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - PyMem_Free(name); - return NULL; - } - for (;;) { - errno = 0; - Py_BEGIN_ALLOW_THREADS - ep = readdir(dirp); - Py_END_ALLOW_THREADS - if (ep == NULL) { - if (errno == 0) { - break; - } else { - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - Py_DECREF(d); - return posix_error_with_allocated_filename(name); - } - } - if (ep->d_name[0] == '.' && - (NAMLEN(ep) == 1 || - (ep->d_name[1] == '.' && NAMLEN(ep) == 2))) - continue; - v = FROM_STRING(ep->d_name, NAMLEN(ep)); - if (v == NULL) { - Py_DECREF(d); - d = NULL; - break; - } - if (arg_is_unicode) { - PyObject *w; - - w = PyUnicode_FromEncodedObject(v, - Py_FileSystemDefaultEncoding, - "strict"); - if (w != NULL) { - Py_DECREF(v); - v = w; - } - else { - /* fall back to the original byte string, as - discussed in patch #683592 */ - PyErr_Clear(); - } - } - name_type = Py_BuildValue("ON", v, FROM_LONG(ep->d_type)); - if (name_type == NULL) { - Py_DECREF(v); - Py_DECREF(d); - d = NULL; - break; - } - if (PyList_Append(d, name_type) != 0) { - Py_DECREF(v); - Py_DECREF(d); - Py_DECREF(name_type); - d = NULL; - break; - } - Py_DECREF(name_type); - Py_DECREF(v); - } - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - PyMem_Free(name); - - return d; -} - -#endif - -static PyMethodDef scandir_methods[] = { - {"scandir_helper", (PyCFunction)scandir_helper, METH_VARARGS, NULL}, - {NULL, NULL}, -}; - -#if PY_MAJOR_VERSION >= 3 -static struct PyModuleDef moduledef = { - PyModuleDef_HEAD_INIT, - "_scandir", - NULL, - 0, - scandir_methods, - NULL, - NULL, - NULL, - NULL, -}; -#endif - -#if PY_MAJOR_VERSION >= 3 -PyObject * -PyInit__scandir(void) -{ - PyObject *module = PyModule_Create(&moduledef); -#else -void -init_scandir(void) -{ - PyObject *module = Py_InitModule("_scandir", scandir_methods); -#endif - if (module == NULL) { - INITERROR; - } - -#ifdef MS_WINDOWS - stat_result_desc.name = "scandir.stat_result"; - PyStructSequence_InitType(&StatResultType, &stat_result_desc); -#endif - -#if PY_MAJOR_VERSION >= 3 - return module; -#endif -} diff --git a/libs/scandir/scandir.py b/libs/scandir/scandir.py deleted file mode 100644 index 1e34f8e5..00000000 --- a/libs/scandir/scandir.py +++ /dev/null @@ -1,456 +0,0 @@ -"""scandir, a better directory iterator that exposes all file info OS provides - -scandir is a generator version of os.listdir() that returns an iterator over -files in a directory, and also exposes the extra information most OSes provide -while iterating files in a directory. - -See README.md or https://github.com/benhoyt/scandir for rationale and docs. - -scandir is released under the new BSD 3-clause license. See LICENSE.txt for -the full license text. -""" - -from __future__ import division - -import ctypes -import os -import stat -import sys - -__version__ = '0.3' -__all__ = ['scandir', 'walk'] - -# Shortcuts to these functions for speed and ease -join = os.path.join -lstat = os.lstat - -S_IFDIR = stat.S_IFDIR -S_IFREG = stat.S_IFREG -S_IFLNK = stat.S_IFLNK - -# 'unicode' isn't defined on Python 3 -try: - unicode -except NameError: - unicode = str - -_scandir = None - - -class GenericDirEntry(object): - __slots__ = ('name', '_lstat', '_path') - - def __init__(self, path, name): - self._path = path - self.name = name - self._lstat = None - - def lstat(self): - if self._lstat is None: - self._lstat = lstat(join(self._path, self.name)) - return self._lstat - - def is_dir(self): - try: - self.lstat() - except OSError: - return False - return self._lstat.st_mode & 0o170000 == S_IFDIR - - def is_file(self): - try: - self.lstat() - except OSError: - return False - return self._lstat.st_mode & 0o170000 == S_IFREG - - def is_symlink(self): - try: - self.lstat() - except OSError: - return False - return self._lstat.st_mode & 0o170000 == S_IFLNK - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - -if sys.platform == 'win32': - from ctypes import wintypes - - # Various constants from windows.h - INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value - ERROR_FILE_NOT_FOUND = 2 - ERROR_NO_MORE_FILES = 18 - FILE_ATTRIBUTE_READONLY = 1 - FILE_ATTRIBUTE_DIRECTORY = 16 - FILE_ATTRIBUTE_REPARSE_POINT = 1024 - - # Numer of seconds between 1601-01-01 and 1970-01-01 - SECONDS_BETWEEN_EPOCHS = 11644473600 - - kernel32 = ctypes.windll.kernel32 - - # ctypes wrappers for (wide string versions of) FindFirstFile, - # FindNextFile, and FindClose - FindFirstFile = kernel32.FindFirstFileW - FindFirstFile.argtypes = [ - wintypes.LPCWSTR, - ctypes.POINTER(wintypes.WIN32_FIND_DATAW), - ] - FindFirstFile.restype = wintypes.HANDLE - - FindNextFile = kernel32.FindNextFileW - FindNextFile.argtypes = [ - wintypes.HANDLE, - ctypes.POINTER(wintypes.WIN32_FIND_DATAW), - ] - FindNextFile.restype = wintypes.BOOL - - FindClose = kernel32.FindClose - FindClose.argtypes = [wintypes.HANDLE] - FindClose.restype = wintypes.BOOL - - def filetime_to_time(filetime): - """Convert Win32 FILETIME to time since Unix epoch in seconds.""" - total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime - return total / 10000000 - SECONDS_BETWEEN_EPOCHS - - def find_data_to_stat(data): - """Convert Win32 FIND_DATA struct to stat_result.""" - # First convert Win32 dwFileAttributes to st_mode - attributes = data.dwFileAttributes - st_mode = 0 - if attributes & FILE_ATTRIBUTE_DIRECTORY: - st_mode |= S_IFDIR | 0o111 - else: - st_mode |= S_IFREG - if attributes & FILE_ATTRIBUTE_READONLY: - st_mode |= 0o444 - else: - st_mode |= 0o666 - if attributes & FILE_ATTRIBUTE_REPARSE_POINT: - st_mode |= S_IFLNK - - st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow - st_atime = filetime_to_time(data.ftLastAccessTime) - st_mtime = filetime_to_time(data.ftLastWriteTime) - st_ctime = filetime_to_time(data.ftCreationTime) - - # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev, - # st_nlink, st_uid, st_gid - return os.stat_result((st_mode, 0, 0, 0, 0, 0, st_size, st_atime, - st_mtime, st_ctime)) - - class Win32DirEntry(object): - __slots__ = ('name', '_lstat', '_find_data') - - def __init__(self, name, find_data): - self.name = name - self._lstat = None - self._find_data = find_data - - def lstat(self): - if self._lstat is None: - # Lazily convert to stat object, because it's slow, and often - # we only need is_dir() etc - self._lstat = find_data_to_stat(self._find_data) - return self._lstat - - def is_dir(self): - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_DIRECTORY != 0) - - def is_file(self): - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_DIRECTORY == 0) - - def is_symlink(self): - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_REPARSE_POINT != 0) - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def win_error(error, filename): - exc = WindowsError(error, ctypes.FormatError(error)) - exc.filename = filename - return exc - - def scandir(path='.', windows_wildcard='*.*'): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - # Call FindFirstFile and handle errors - data = wintypes.WIN32_FIND_DATAW() - data_p = ctypes.byref(data) - filename = join(path, windows_wildcard) - handle = FindFirstFile(filename, data_p) - if handle == INVALID_HANDLE_VALUE: - error = ctypes.GetLastError() - if error == ERROR_FILE_NOT_FOUND: - # No files, don't yield anything - return - raise win_error(error, path) - - # Call FindNextFile in a loop, stopping when no more files - try: - while True: - # Skip '.' and '..' (current and parent directory), but - # otherwise yield (filename, stat_result) tuple - name = data.cFileName - if name not in ('.', '..'): - yield Win32DirEntry(name, data) - - data = wintypes.WIN32_FIND_DATAW() - data_p = ctypes.byref(data) - success = FindNextFile(handle, data_p) - if not success: - error = ctypes.GetLastError() - if error == ERROR_NO_MORE_FILES: - break - raise win_error(error, path) - finally: - if not FindClose(handle): - raise win_error(ctypes.GetLastError(), path) - - try: - import _scandir - - scandir_helper = _scandir.scandir_helper - - class Win32DirEntry(object): - __slots__ = ('name', '_lstat') - - def __init__(self, name, lstat): - self.name = name - self._lstat = lstat - - def lstat(self): - return self._lstat - - def is_dir(self): - return self._lstat.st_mode & 0o170000 == S_IFDIR - - def is_file(self): - return self._lstat.st_mode & 0o170000 == S_IFREG - - def is_symlink(self): - return self._lstat.st_mode & 0o170000 == S_IFLNK - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def scandir(path='.'): - for name, stat in scandir_helper(unicode(path)): - yield Win32DirEntry(name, stat) - - except ImportError: - pass - - -# Linux, OS X, and BSD implementation -elif sys.platform.startswith(('linux', 'darwin')) or 'bsd' in sys.platform: - import ctypes.util - - DIR_p = ctypes.c_void_p - - # Rather annoying how the dirent struct is slightly different on each - # platform. The only fields we care about are d_name and d_type. - class Dirent(ctypes.Structure): - if sys.platform.startswith('linux'): - _fields_ = ( - ('d_ino', ctypes.c_ulong), - ('d_off', ctypes.c_long), - ('d_reclen', ctypes.c_ushort), - ('d_type', ctypes.c_byte), - ('d_name', ctypes.c_char * 256), - ) - else: - _fields_ = ( - ('d_ino', ctypes.c_uint32), # must be uint32, not ulong - ('d_reclen', ctypes.c_ushort), - ('d_type', ctypes.c_byte), - ('d_namlen', ctypes.c_byte), - ('d_name', ctypes.c_char * 256), - ) - - DT_UNKNOWN = 0 - DT_DIR = 4 - DT_REG = 8 - DT_LNK = 10 - - Dirent_p = ctypes.POINTER(Dirent) - Dirent_pp = ctypes.POINTER(Dirent_p) - - libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True) - opendir = libc.opendir - opendir.argtypes = [ctypes.c_char_p] - opendir.restype = DIR_p - - readdir_r = libc.readdir_r - readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp] - readdir_r.restype = ctypes.c_int - - closedir = libc.closedir - closedir.argtypes = [DIR_p] - closedir.restype = ctypes.c_int - - file_system_encoding = sys.getfilesystemencoding() - - class PosixDirEntry(object): - __slots__ = ('name', '_d_type', '_lstat', '_path') - - def __init__(self, path, name, d_type): - self._path = path - self.name = name - self._d_type = d_type - self._lstat = None - - def lstat(self): - if self._lstat is None: - self._lstat = lstat(join(self._path, self.name)) - return self._lstat - - # Ridiculous duplication between these is* functions -- helps a little - # bit with os.walk() performance compared to calling another function. - def is_dir(self): - d_type = self._d_type - if d_type != DT_UNKNOWN: - return d_type == DT_DIR - try: - self.lstat() - except OSError: - return False - return self._lstat.st_mode & 0o170000 == S_IFDIR - - def is_file(self): - d_type = self._d_type - if d_type != DT_UNKNOWN: - return d_type == DT_REG - try: - self.lstat() - except OSError: - return False - return self._lstat.st_mode & 0o170000 == S_IFREG - - def is_symlink(self): - d_type = self._d_type - if d_type != DT_UNKNOWN: - return d_type == DT_LNK - try: - self.lstat() - except OSError: - return False - return self._lstat.st_mode & 0o170000 == S_IFLNK - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def posix_error(filename): - errno = ctypes.get_errno() - exc = OSError(errno, os.strerror(errno)) - exc.filename = filename - return exc - - def scandir(path='.'): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - dir_p = opendir(path.encode(file_system_encoding)) - if not dir_p: - raise posix_error(path) - try: - result = Dirent_p() - while True: - entry = Dirent() - if readdir_r(dir_p, entry, result): - raise posix_error(path) - if not result: - break - name = entry.d_name.decode(file_system_encoding) - if name not in ('.', '..'): - yield PosixDirEntry(path, name, entry.d_type) - finally: - if closedir(dir_p): - raise posix_error(path) - - try: - import _scandir - - scandir_helper = _scandir.scandir_helper - - def scandir(path='.'): - for name, d_type in scandir_helper(unicode(path)): - yield PosixDirEntry(path, name, d_type) - - except ImportError: - pass - - -# Some other system -- no d_type or stat information -else: - def scandir(path='.'): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - for name in os.listdir(path): - yield GenericDirEntry(path, name) - - -def walk(top, topdown=True, onerror=None, followlinks=False): - """Like os.walk(), but faster, as it uses scandir() internally.""" - # Determine which are files and which are directories - dirs = [] - nondirs = [] - try: - for entry in scandir(top): - if entry.is_dir(): - dirs.append(entry) - else: - nondirs.append(entry) - except OSError as error: - if onerror is not None: - onerror(error) - return - - # Yield before recursion if going top down - if topdown: - # Need to do some fancy footwork here as caller is allowed to modify - # dir_names, and we really want them to modify dirs (list of DirEntry - # objects) instead. Keep a mapping of entries keyed by name. - dir_names = [] - entries_by_name = {} - for entry in dirs: - dir_names.append(entry.name) - entries_by_name[entry.name] = entry - - yield top, dir_names, [e.name for e in nondirs] - - dirs = [] - for dir_name in dir_names: - entry = entries_by_name.get(dir_name) - if entry is None: - # Only happens when caller creates a new directory and adds it - # to dir_names - entry = GenericDirEntry(top, dir_name) - dirs.append(entry) - - # Recurse into sub-directories, following symbolic links if "followlinks" - for entry in dirs: - if followlinks or not entry.is_symlink(): - new_path = join(top, entry.name) - for x in walk(new_path, topdown, onerror, followlinks): - yield x - - # Yield before recursion if going bottom up - if not topdown: - yield top, [e.name for e in dirs], [e.name for e in nondirs] diff --git a/libs/tornado/__init__.py b/libs/tornado/__init__.py index c41ec97b..05174084 100755 --- a/libs/tornado/__init__.py +++ b/libs/tornado/__init__.py @@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement # is zero for an official release, positive for a development branch, # or negative for a release candidate or beta (after the base version # number has been incremented) -version = "3.2" -version_info = (3, 2, 0, 0) +version = "3.3.dev1" +version_info = (3, 3, 0, -100) diff --git a/libs/tornado/auth.py b/libs/tornado/auth.py index 9baac9ba..f15413e5 100755 --- a/libs/tornado/auth.py +++ b/libs/tornado/auth.py @@ -34,15 +34,29 @@ See the individual service classes below for complete documentation. Example usage for Google OpenID:: - class GoogleLoginHandler(tornado.web.RequestHandler, - tornado.auth.GoogleMixin): + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, + tornado.auth.GoogleOAuth2Mixin): @tornado.gen.coroutine def get(self): - if self.get_argument("openid.mode", None): - user = yield self.get_authenticated_user() - # Save the user with e.g. set_secure_cookie() + if self.get_argument('code', False): + user = yield self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument('code')) + # Save the user with e.g. set_secure_cookie else: - yield self.authenticate_redirect() + yield self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings['google_oauth']['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) + +.. versionchanged:: 3.3 + All of the callback interfaces in this module are now guaranteed + to run their callback with an argument of ``None`` on error. + Previously some functions would do this while others would simply + terminate the request on their own. This change also ensures that + errors are more consistently reported through the ``Future`` interfaces. """ from __future__ import absolute_import, division, print_function, with_statement @@ -61,6 +75,7 @@ from tornado import httpclient from tornado import escape from tornado.httputil import url_concat from tornado.log import gen_log +from tornado.stack_context import ExceptionStackContext from tornado.util import bytes_type, u, unicode_type, ArgReplacer try: @@ -73,6 +88,11 @@ try: except ImportError: import urllib as urllib_parse # py2 +try: + long # py2 +except NameError: + long = int # py3 + class AuthError(Exception): pass @@ -103,7 +123,14 @@ def _auth_return_future(f): if callback is not None: future.add_done_callback( functools.partial(_auth_future_to_callback, callback)) - f(*args, **kwargs) + def handle_exception(typ, value, tb): + if future.done(): + return False + else: + future.set_exc_info((typ, value, tb)) + return True + with ExceptionStackContext(handle_exception): + f(*args, **kwargs) return future return wrapper @@ -161,7 +188,7 @@ class OpenIdMixin(object): url = self._OPENID_ENDPOINT if http_client is None: http_client = self.get_auth_http_client() - http_client.fetch(url, self.async_callback( + http_client.fetch(url, functools.partial( self._on_authentication_verified, callback), method="POST", body=urllib_parse.urlencode(args)) @@ -333,7 +360,7 @@ class OAuthMixin(object): http_client.fetch( self._oauth_request_token_url(callback_uri=callback_uri, extra_params=extra_params), - self.async_callback( + functools.partial( self._on_request_token, self._OAUTH_AUTHORIZE_URL, callback_uri, @@ -341,7 +368,7 @@ class OAuthMixin(object): else: http_client.fetch( self._oauth_request_token_url(), - self.async_callback( + functools.partial( self._on_request_token, self._OAUTH_AUTHORIZE_URL, callback_uri, callback)) @@ -378,7 +405,7 @@ class OAuthMixin(object): if http_client is None: http_client = self.get_auth_http_client() http_client.fetch(self._oauth_access_token_url(token), - self.async_callback(self._on_access_token, callback)) + functools.partial(self._on_access_token, callback)) def _oauth_request_token_url(self, callback_uri=None, extra_params=None): consumer_token = self._oauth_consumer_token() @@ -455,7 +482,7 @@ class OAuthMixin(object): access_token = _oauth_parse_response(response.body) self._oauth_get_user_future(access_token).add_done_callback( - self.async_callback(self._on_oauth_get_user, access_token, future)) + functools.partial(self._on_oauth_get_user, access_token, future)) def _oauth_consumer_token(self): """Subclasses must override this to return their OAuth consumer keys. @@ -640,7 +667,7 @@ class TwitterMixin(OAuthMixin): """ http = self.get_auth_http_client() http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), - self.async_callback( + functools.partial( self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None, callback)) @@ -698,7 +725,7 @@ class TwitterMixin(OAuthMixin): if args: url += "?" + urllib_parse.urlencode(args) http = self.get_auth_http_client() - http_callback = self.async_callback(self._on_twitter_request, callback) + http_callback = functools.partial(self._on_twitter_request, callback) if post_args is not None: http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), callback=http_callback) @@ -815,7 +842,7 @@ class FriendFeedMixin(OAuthMixin): args.update(oauth) if args: url += "?" + urllib_parse.urlencode(args) - callback = self.async_callback(self._on_friendfeed_request, callback) + callback = functools.partial(self._on_friendfeed_request, callback) http = self.get_auth_http_client() if post_args is not None: http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), @@ -856,6 +883,10 @@ class FriendFeedMixin(OAuthMixin): class GoogleMixin(OpenIdMixin, OAuthMixin): """Google Open ID / OAuth authentication. + *Deprecated:* New applications should use `GoogleOAuth2Mixin` + below instead of this class. As of May 19, 2014, Google has stopped + supporting registration-free authentication. + No application registration is necessary to use Google for authentication or to access Google resources on behalf of a user. @@ -926,7 +957,7 @@ class GoogleMixin(OpenIdMixin, OAuthMixin): http = self.get_auth_http_client() token = dict(key=token, secret="") http.fetch(self._oauth_access_token_url(token), - self.async_callback(self._on_access_token, callback)) + functools.partial(self._on_access_token, callback)) else: chain_future(OpenIdMixin.get_authenticated_user(self), callback) @@ -945,6 +976,19 @@ class GoogleMixin(OpenIdMixin, OAuthMixin): class GoogleOAuth2Mixin(OAuth2Mixin): """Google authentication using OAuth2. + In order to use, register your application with Google and copy the + relevant parameters to your application settings. + + * Go to the Google Dev Console at http://console.developers.google.com + * Select a project, or create a new one. + * In the sidebar on the left, select APIs & Auth. + * In the list of APIs, find the Google+ API service and set it to ON. + * In the sidebar on the left, select Credentials. + * In the OAuth section of the page, select Create New Client ID. + * Set the Redirect URI to point to your auth handler + * Copy the "Client secret" and "Client ID" to the application settings as + {"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}} + .. versionadded:: 3.2 """ _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth" @@ -958,7 +1002,7 @@ class GoogleOAuth2Mixin(OAuth2Mixin): Example usage:: - class GoogleOAuth2LoginHandler(LoginHandler, + class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, tornado.auth.GoogleOAuth2Mixin): @tornado.gen.coroutine def get(self): @@ -985,7 +1029,7 @@ class GoogleOAuth2Mixin(OAuth2Mixin): }) http.fetch(self._OAUTH_ACCESS_TOKEN_URL, - self.async_callback(self._on_access_token, callback), + functools.partial(self._on_access_token, callback), method="POST", headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=body) def _on_access_token(self, future, response): @@ -1026,7 +1070,7 @@ class FacebookMixin(object): @tornado.web.asynchronous def get(self): if self.get_argument("session", None): - self.get_authenticated_user(self.async_callback(self._on_auth)) + self.get_authenticated_user(self._on_auth) return yield self.authenticate_redirect() @@ -1112,7 +1156,7 @@ class FacebookMixin(object): session = escape.json_decode(self.get_argument("session")) self.facebook_request( method="facebook.users.getInfo", - callback=self.async_callback( + callback=functools.partial( self._on_get_user_info, callback, session), session_key=session["session_key"], uids=session["uid"], @@ -1138,7 +1182,7 @@ class FacebookMixin(object): def get(self): self.facebook_request( method="stream.get", - callback=self.async_callback(self._on_stream), + callback=self._on_stream, session_key=self.current_user["session_key"]) def _on_stream(self, stream): @@ -1162,7 +1206,7 @@ class FacebookMixin(object): url = "http://api.facebook.com/restserver.php?" + \ urllib_parse.urlencode(args) http = self.get_auth_http_client() - http.fetch(url, callback=self.async_callback( + http.fetch(url, callback=functools.partial( self._parse_response, callback)) def _on_get_user_info(self, callback, session, users): @@ -1260,7 +1304,7 @@ class FacebookGraphMixin(OAuth2Mixin): fields.update(extra_fields) http.fetch(self._oauth_request_token_url(**args), - self.async_callback(self._on_access_token, redirect_uri, client_id, + functools.partial(self._on_access_token, redirect_uri, client_id, client_secret, callback, fields)) def _on_access_token(self, redirect_uri, client_id, client_secret, @@ -1277,7 +1321,7 @@ class FacebookGraphMixin(OAuth2Mixin): self.facebook_request( path="/me", - callback=self.async_callback( + callback=functools.partial( self._on_get_user_info, future, session, fields), access_token=session["access_token"], fields=",".join(fields) @@ -1344,7 +1388,7 @@ class FacebookGraphMixin(OAuth2Mixin): if all_args: url += "?" + urllib_parse.urlencode(all_args) - callback = self.async_callback(self._on_facebook_request, callback) + callback = functools.partial(self._on_facebook_request, callback) http = self.get_auth_http_client() if post_args is not None: http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), diff --git a/libs/tornado/autoreload.py b/libs/tornado/autoreload.py index 79cccb49..3982579a 100755 --- a/libs/tornado/autoreload.py +++ b/libs/tornado/autoreload.py @@ -14,7 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. -"""xAutomatically restart the server when a source file is modified. +"""Automatically restart the server when a source file is modified. Most applications should not access this module directly. Instead, pass the keyword argument ``autoreload=True`` to the diff --git a/libs/tornado/ca-certificates.crt b/libs/tornado/ca-certificates.crt deleted file mode 100755 index a1ede895..00000000 --- a/libs/tornado/ca-certificates.crt +++ /dev/null @@ -1,3562 +0,0 @@ -# This file contains certificates of known certificate authorities -# for use with SimpleAsyncHTTPClient. -# -# It was extracted from the Mozilla source tree using libcurl's mk-ca-bundle -# script on Aug 13, 2013. -# -# This data file is licenced under the MPL/GPL. - -## -## ca-bundle.crt -- Bundle of CA Root Certificates -## -## Certificate data from Mozilla as of: Tue Aug 13 03:28:51 2013 -## -## This is a bundle of X.509 certificates of public Certificate Authorities -## (CA). These were automatically extracted from Mozilla's root certificates -## file (certdata.txt). This file can be found in the mozilla source tree: -## http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1 -## -## It contains the certificates in PEM format and therefore -## can be directly used with curl / libcurl / php_curl, or with -## an Apache+mod_ssl webserver for SSL client authentication. -## Just configure this file as the SSLCACertificateFile. -## - -# @(#) $RCSfile: certdata.txt,v $ $Revision: 1.87 $ $Date: 2012/12/29 16:32:45 $ - -GTE CyberTrust Global Root -========================== ------BEGIN CERTIFICATE----- -MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYDVQQKEw9HVEUg -Q29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNvbHV0aW9ucywgSW5jLjEjMCEG -A1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJvb3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEz -MjM1OTAwWjB1MQswCQYDVQQGEwJVUzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQL -Ex5HVEUgQ3liZXJUcnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0 -IEdsb2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrHiM3dFw4u -sJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTSr41tiGeA5u2ylc9yMcql -HHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X404Wqk2kmhXBIgD8SFcd5tB8FLztimQID -AQABMA0GCSqGSIb3DQEBBAUAA4GBAG3rGwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMW -M4ETCJ57NE7fQMh017l93PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OF -NMQkpw0PlZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ ------END CERTIFICATE----- - -Thawte Server CA -================ ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3Vs -dGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UE -AxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5j -b20wHhcNOTYwODAxMDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNV -BAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29u -c3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcG -A1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0 -ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl -/Kj0R1HahbUgdJSGHg91yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg7 -1CcEJRCXL+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGjEzAR -MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG7oWDTSEwjsrZqG9J -GubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6eQNuozDJ0uW8NxuOzRAvZim+aKZuZ -GCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZqdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -Thawte Premium Server CA -======================== ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3Vs -dGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UE -AxMYVGhhd3RlIFByZW1pdW0gU2VydmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZl -ckB0aGF3dGUuY29tMB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYT -AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsGA1UEChMU -VGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2VydmljZXMgRGl2 -aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNlcnZlciBDQTEoMCYGCSqGSIb3DQEJARYZ -cHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2 -aovXwlue2oFBYo847kkEVdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIh -Udib0GfQug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMRuHM/ -qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQFAAOBgQAm -SCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUIhfzJATj/Tb7yFkJD57taRvvBxhEf -8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JMpAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7t -UCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- - -Equifax Secure CA -================= ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJVUzEQMA4GA1UE -ChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoT -B0VxdWlmYXgxLTArBgNVBAsTJEVxdWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCB -nzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPR -fM6fBeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+AcJkVV5MW -8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kCAwEAAaOCAQkwggEFMHAG -A1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UE -CxMkRXF1aWZheCBTZWN1cmUgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoG -A1UdEAQTMBGBDzIwMTgwODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvS -spXXR9gjIBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQFMAMB -Af8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUAA4GBAFjOKer89961 -zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y7qj/WsjTVbJmcVfewCHrPSqnI0kB -BIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee95 -70+sB3c4 ------END CERTIFICATE----- - -Digital Signature Trust Co. Global CA 1 -======================================= ------BEGIN CERTIFICATE----- -MIIDKTCCApKgAwIBAgIENnAVljANBgkqhkiG9w0BAQUFADBGMQswCQYDVQQGEwJVUzEkMCIGA1UE -ChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMREwDwYDVQQLEwhEU1RDQSBFMTAeFw05ODEy -MTAxODEwMjNaFw0xODEyMTAxODQwMjNaMEYxCzAJBgNVBAYTAlVTMSQwIgYDVQQKExtEaWdpdGFs -IFNpZ25hdHVyZSBUcnVzdCBDby4xETAPBgNVBAsTCERTVENBIEUxMIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQCgbIGpzzQeJN3+hijM3oMv+V7UQtLodGBmE5gGHKlREmlvMVW5SXIACH7TpWJE -NySZj9mDSI+ZbZUTu0M7LklOiDfBu1h//uG9+LthzfNHwJmm8fOR6Hh8AMthyUQncWlVSn5JTe2i -o74CTADKAqjuAQIxZA9SLRN0dja1erQtcQIBA6OCASQwggEgMBEGCWCGSAGG+EIBAQQEAwIABzBo -BgNVHR8EYTBfMF2gW6BZpFcwVTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG0RpZ2l0YWwgU2lnbmF0 -dXJlIFRydXN0IENvLjERMA8GA1UECxMIRFNUQ0EgRTExDTALBgNVBAMTBENSTDEwKwYDVR0QBCQw -IoAPMTk5ODEyMTAxODEwMjNagQ8yMDE4MTIxMDE4MTAyM1owCwYDVR0PBAQDAgEGMB8GA1UdIwQY -MBaAFGp5fpFpRhgTCgJ3pVlbYJglDqL4MB0GA1UdDgQWBBRqeX6RaUYYEwoCd6VZW2CYJQ6i+DAM -BgNVHRMEBTADAQH/MBkGCSqGSIb2fQdBAAQMMAobBFY0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4GB -ACIS2Hod3IEGtgllsofIH160L+nEHvI8wbsEkBFKg05+k7lNQseSJqBcNJo4cvj9axY+IO6CizEq -kzaFI4iKPANo08kJD038bKTaKHKTDomAsH3+gG9lbRgzl4vCa4nuYD3Im+9/KzJic5PLPON74nZ4 -RbyhkwS7hp86W0N6w4pl ------END CERTIFICATE----- - -Digital Signature Trust Co. Global CA 3 -======================================= ------BEGIN CERTIFICATE----- -MIIDKTCCApKgAwIBAgIENm7TzjANBgkqhkiG9w0BAQUFADBGMQswCQYDVQQGEwJVUzEkMCIGA1UE -ChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMREwDwYDVQQLEwhEU1RDQSBFMjAeFw05ODEy -MDkxOTE3MjZaFw0xODEyMDkxOTQ3MjZaMEYxCzAJBgNVBAYTAlVTMSQwIgYDVQQKExtEaWdpdGFs -IFNpZ25hdHVyZSBUcnVzdCBDby4xETAPBgNVBAsTCERTVENBIEUyMIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQC/k48Xku8zExjrEH9OFr//Bo8qhbxe+SSmJIi2A7fBw18DW9Fvrn5C6mYjuGOD -VvsoLeE4i7TuqAHhzhy2iCoiRoX7n6dwqUcUP87eZfCocfdPJmyMvMa1795JJ/9IKn3oTQPMx7JS -xhcxEzu1TdvIxPbDDyQq2gyd55FbgM2UnQIBA6OCASQwggEgMBEGCWCGSAGG+EIBAQQEAwIABzBo -BgNVHR8EYTBfMF2gW6BZpFcwVTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG0RpZ2l0YWwgU2lnbmF0 -dXJlIFRydXN0IENvLjERMA8GA1UECxMIRFNUQ0EgRTIxDTALBgNVBAMTBENSTDEwKwYDVR0QBCQw -IoAPMTk5ODEyMDkxOTE3MjZagQ8yMDE4MTIwOTE5MTcyNlowCwYDVR0PBAQDAgEGMB8GA1UdIwQY -MBaAFB6CTShlgDzJQW6sNS5ay97u+DlbMB0GA1UdDgQWBBQegk0oZYA8yUFurDUuWsve7vg5WzAM -BgNVHRMEBTADAQH/MBkGCSqGSIb2fQdBAAQMMAobBFY0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4GB -AEeNg61i8tuwnkUiBbmi1gMOOHLnnvx75pO2mqWilMg0HZHRxdf0CiUPPXiBng+xZ8SQTGPdXqfi -up/1902lMXucKS1M/mQ+7LZT/uqb7YLbdHVLB3luHtgZg3Pe9T7Qtd7nS2h9Qy4qIOF+oHhEngj1 -mPnHfxsb1gYgAlihw6ID ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority -======================================================= ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkGA1UEBhMCVVMx -FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5 -IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVow -XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz -IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA -A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94 -f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol -hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBAgUAA4GBALtMEivPLCYA -TxQT3ab7/AoRhIzzKBxnki98tsX63/Dolbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59Ah -WM1pF+NEHJwZRDmJXNycAA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2Omuf -Tqj/ZA1k ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority - G2 -============================================================ ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJBgNVBAYTAlVT -MRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMgUHJpbWFy -eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVz -dCBOZXR3b3JrMB4XDTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVT -MRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMgUHJpbWFy -eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVz -dCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCO -FoUgRm1HP9SFIIThbbP4pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71 -lSk8UOg013gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwIDAQAB -MA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSkU01UbSuvDV1Ai2TT -1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7iF6YM40AIOw7n60RzKprxaZLvcRTD -Oaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpYoJ2daZH9 ------END CERTIFICATE----- - -GlobalSign Root CA -================== ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQkUx -GTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jvb3QgQ0ExGzAZBgNVBAMTEkds -b2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAwMDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNV -BAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYD -VQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDa -DuaZjc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavpxy0Sy6sc -THAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp1Wrjsok6Vjk4bwY8iGlb -Kk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdGsnUOhugZitVtbNV4FpWi6cgKOOvyJBNP -c1STE4U6G7weNLWLBYy5d4ux2x8gkasJU26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrX -gzT/LCrBbBlDSgeF59N89iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0BAQUF -AAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOzyj1hTdNGCbM+w6Dj -Y1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE38NflNUVyRRBnMRddWQVDf9VMOyG -j/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymPAbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhH -hm4qxFYxldBniYUr+WymXUadDKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveC -X4XSQRjbgbMEHMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -GlobalSign Root CA - R2 -======================= ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4GA1UECxMXR2xv -YmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh -bFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT -aWduIFJvb3QgQ0EgLSBSMjETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln -bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6 -ErPLv4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8eoLrvozp -s6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklqtTleiDTsvHgMCJiEbKjN -S7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzdC9XZzPnqJworc5HGnRusyMvo4KD0L5CL -TfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pazq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6C -ygPCm48CAwEAAaOBnDCBmTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUm+IHV2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5nbG9i -YWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG3lm0mi3f3BmGLjAN -BgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4GsJ0/WwbgcQ3izDJr86iw8bmEbTUsp -9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu -01yiPqFbQfXf5WRDLenVOavSot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG7 -9G+dwfCMNYxdAfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -ValiCert Class 1 VA -=================== ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRp -b24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZh -bGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIy -MjM0OFoXDTE5MDYyNTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0 -d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEg -UG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0 -LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUA -A4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9YLqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIi -GQj4/xEjm84H9b9pGib+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCm -DuJWBQ8YTfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0LBwG -lN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLWI8sogTLDAHkY7FkX -icnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPwnXS3qT6gpf+2SQMT2iLM7XGCK5nP -Orf1LXLI ------END CERTIFICATE----- - -ValiCert Class 2 VA -=================== ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRp -b24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZh -bGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAw -MTk1NFoXDTE5MDYyNjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0 -d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIg -UG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0 -LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUA -A4GNADCBiQKBgQDOOnHK5avIWZJV16vYdA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVC -CSRrCl6zfN1SLUzm1NZ9WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7Rf -ZHM047QSv4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9vUJSZ -SWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTuIYEZoDJJKPTEjlbV -UjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwCW/POuZ6lcg5Ktz885hZo+L7tdEy8 -W9ViH0Pd ------END CERTIFICATE----- - -RSA Root Certificate 1 -====================== ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRp -b24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZh -bGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAw -MjIzM1oXDTE5MDYyNjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0 -d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMg -UG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0 -LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMIGfMA0GCSqGSIb3DQEBAQUA -A4GNADCBiQKBgQDjmFGWHOjVsQaBalfDcnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td -3zZxFJmP3MKS8edgkpfs2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89H -BFx1cQqYJJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliEZwgs -3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJn0WuPIqpsHEzXcjF -V9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/APhmcGcwTTYJBtYze4D1gCCAPRX5r -on+jjBXu ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority - G3 -============================================================ ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJV -UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv -cmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRy -dXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhv -cml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDMgUHVibGljIFByaW1hcnkg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAMu6nFL8eB8aHm8bN3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1 -EUGO+i2tKmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGukxUc -cLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBmCC+Vk7+qRy+oRpfw -EuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJXwzw3sJ2zq/3avL6QaaiMxTJ5Xpj -055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWuimi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEA -ERSWwauSCPc/L8my/uRan2Te2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5f -j267Cz3qWhMeDGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565pF4ErWjfJXir0 -xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGtTxzhT5yvDwyd93gN2PQ1VoDa -t20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -Verisign Class 4 Public Primary Certification Authority - G3 -============================================================ ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQswCQYDVQQGEwJV -UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv -cmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRy -dXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhv -cml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWduIENsYXNzIDQgUHVibGljIFByaW1hcnkg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAK3LpRFpxlmr8Y+1GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaS -tBO3IFsJ+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0GbdU6LM -8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLmNxdLMEYH5IBtptiW -Lugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XYufTsgsbSPZUd5cBPhMnZo0QoBmrX -Razwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEA -j/ola09b5KROJ1WrIhVZPMq1CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXtt -mhwwjIDLk5Mqg6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm -fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c2NU8Qh0XwRJd -RTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/bLvSHgCwIe34QWKCudiyxLtG -UPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== ------END CERTIFICATE----- - -Entrust.net Secure Server CA -============================ ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMCVVMxFDASBgNV -BAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5uZXQvQ1BTIGluY29ycC4gYnkg -cmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRl -ZDE6MDgGA1UEAxMxRW50cnVzdC5uZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eTAeFw05OTA1MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIG -A1UEChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBi -eSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1p -dGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUAA4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQ -aO2f55M28Qpku0f1BBc/I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5 -gXpa0zf3wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OCAdcw -ggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHboIHYpIHVMIHSMQsw -CQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5l -dC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0MFqBDzIwMTkw -NTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8BdiE1U9s/8KAGv7UISX8+1i0Bow -HQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAaMAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EA -BAwwChsEVjQuMAMCBJAwDQYJKoZIhvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyN -Ewr75Ji174z4xRAN95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9 -n9cd2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - -Entrust.net Premium 2048 Secure Server CA -========================================= ------BEGIN CERTIFICATE----- -MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChMLRW50cnVzdC5u -ZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBpbmNvcnAuIGJ5IHJlZi4gKGxp -bWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNV -BAMTKkVudHJ1c3QubmV0IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQx -NzUwNTFaFw0xOTEyMjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3 -d3d3LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTEl -MCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEGA1UEAxMqRW50cnVzdC5u -ZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgpMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEArU1LqRKGsuqjIAcVFmQqK0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOL -Gp18EzoOH1u3Hs/lJBQesYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSr -hRSGlVuXMlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVTXTzW -nLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/HoZdenoVve8AjhUi -VBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH4QIDAQABo3QwcjARBglghkgBhvhC -AQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGAvtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdER -gL7YibkIozH5oSQJFrlwMB0GCSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0B -AQUFAAOCAQEAWUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo -oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQh7A6tcOdBTcS -o8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18f3v/rxzP5tsHrV7bhZ3QKw0z -2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfNB/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjX -OP/swNlQ8C5LWK5Gb9Auw2DaclVyvUxFnmG6v4SBkgPR0ml8xQ== ------END CERTIFICATE----- - -Baltimore CyberTrust Root -========================= ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJRTESMBAGA1UE -ChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYDVQQDExlCYWx0aW1vcmUgQ3li -ZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoXDTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMC -SUUxEjAQBgNVBAoTCUJhbHRpbW9yZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFs -dGltb3JlIEN5YmVyVHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKME -uyKrmD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjrIZ3AQSsB -UnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeKmpYcqWe4PwzV9/lSEy/C -G9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSuXmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9 -XbIGevOF6uvUA65ehD5f/xXtabz5OTZydc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjpr -l3RjM71oGDHweI12v/yejl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoI -VDaGezq1BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEB -BQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT929hkTI7gQCvlYpNRh -cL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3WgxjkzSswF07r51XgdIGn9w/xZchMB5 -hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsa -Y71k5h+3zvDyny67G7fyUIhzksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9H -RCwBXbsdtTLSR9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -Equifax Secure Global eBusiness CA -================================== ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT -RXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBTZWN1cmUgR2xvYmFsIGVCdXNp -bmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIwMDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMx -HDAaBgNVBAoTE0VxdWlmYXggU2VjdXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEds -b2JhbCBlQnVzaW5lc3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRV -PEnCUdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc58O/gGzN -qfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/o5brhTMhHD4ePmBudpxn -hcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAHMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAUvqigdHJQa0S3ySPY+6j/s1draGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hs -MA0GCSqGSIb3DQEBBAUAA4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okEN -I7SS+RkAZ70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv8qIY -NMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -Equifax Secure eBusiness CA 1 -============================= ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT -RXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENB -LTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQwMDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UE -ChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNz -IENBLTEwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ -1MRoRvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBuWqDZQu4a -IZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKwEnv+j6YDAgMBAAGjZjBk -MBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEp4MlIR21kW -Nl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRKeDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQF -AAOBgQB1W6ibAxHm6VZMzfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5 -lSE/9dR+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN/Bf+ -KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -Equifax Secure eBusiness CA 2 -============================= ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJVUzEXMBUGA1UE -ChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJlIGVCdXNpbmVzcyBDQS0y -MB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoT -DkVxdWlmYXggU2VjdXJlMSYwJAYDVQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCB -nzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn -2Z0GvxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/BPO3QSQ5 -BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0CAwEAAaOCAQkwggEFMHAG -A1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUx -JjAkBgNVBAsTHUVxdWlmYXggU2VjdXJlIGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoG -A1UdEAQTMBGBDzIwMTkwNjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9e -uSBIplBqy/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQFMAMB -Af8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUAA4GBAAyGgq3oThr1 -jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia -78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUm -V+GRMOrN ------END CERTIFICATE----- - -AddTrust Low-Value Services Root -================================ ------BEGIN CERTIFICATE----- -MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRU -cnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMwMTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQsw -CQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBO -ZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ulCDtbKRY6 -54eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6ntGO0/7Gcrjyvd7ZWxbWr -oulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyldI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1 -Zmne3yzxbrww2ywkEtvrNTVokMsAsJchPXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJui -GMx1I4S+6+JNM3GOGvDC+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8w -HQYDVR0OBBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBlMQswCQYDVQQGEwJT -RTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEw -HwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxt -ZBsfzQ3duQH6lmM0MkhHma6X7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0Ph -iVYrqW9yTkkz43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY -eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJlpz/+0WatC7xr -mYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOAWiFeIc9TVPC6b4nbqKqVz4vj -ccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= ------END CERTIFICATE----- - -AddTrust External Root -====================== ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYD -VQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEw -NDgzOFowbzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRU -cnVzdCBFeHRlcm5hbCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0Eg -Um9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvtH7xsD821 -+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9uMq/NzgtHj6RQa1wVsfw -Tz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzXmk6vBbOmcZSccbNQYArHE504B4YCqOmo -aSYYkKtMsE8jqzpPhNjfzp/haW+710LXa0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy -2xSoRcRdKn23tNbE7qzNE0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv7 -7+ldU9U0WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYDVR0P -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0Jvf6xCZU7wO94CTL -VBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRk -VHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB -IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZl -j7DYd7usQWxHYINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvCNr4TDea9Y355 -e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEXc4g/VhsxOBi0cQ+azcgOno4u -G+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5amnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -AddTrust Public Services Root -============================= ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSAwHgYDVQQDExdBZGRU -cnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAxMDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJ -BgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5l -dHdvcmsxIDAeBgNVBAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV6tsfSlbu -nyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nXGCwwfQ56HmIexkvA/X1i -d9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnPdzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSG -Aa2Il+tmzV7R/9x98oTaunet3IAIx6eH1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAw -HM+A+WD+eeSI8t0A65RF62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0G -A1UdDgQWBBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRUcnVzdCBUVFAgTmV0d29yazEgMB4G -A1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4 -JNojVhaTdt02KLmuG7jD8WS6IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL -+YPoRNWyQSW/iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao -GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh4SINhwBk/ox9 -Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQmXiLsks3/QppEIW1cxeMiHV9H -EufOX1362KqxMy3ZdvJOOjMMK7MtkAY= ------END CERTIFICATE----- - -AddTrust Qualified Certificates Root -==================================== ------BEGIN CERTIFICATE----- -MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSMwIQYDVQQDExpBZGRU -cnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcx -CzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQ -IE5ldHdvcmsxIzAhBgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwqxBb/4Oxx -64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G87B4pfYOQnrjfxvM0PC3 -KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i2O+tCBGaKZnhqkRFmhJePp1tUvznoD1o -L/BLcHwTOK28FSXx1s6rosAx1i+f4P8UWfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GR -wVY18BTcZTYJbqukB8c10cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HU -MIHRMB0GA1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6FrpGkwZzELMAkGA1UE -BhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRUcnVzdCBUVFAgTmV0d29y -azEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlmaWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBABmrder4i2VhlRO6aQTvhsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxG -GuoYQ992zPlmhpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X -dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3P6CxB9bpT9ze -RXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9YiQBCYz95OdBEsIJuQRno3eDB -iFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5noxqE= ------END CERTIFICATE----- - -Entrust Root Certification Authority -==================================== ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMCVVMxFjAUBgNV -BAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0Lm5ldC9DUFMgaXMgaW5jb3Jw -b3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMWKGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsG -A1UEAxMkRW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0 -MloXDTI2MTEyNzIwNTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMu -MTkwNwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSByZWZlcmVu -Y2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNVBAMTJEVudHJ1c3QgUm9v -dCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -ALaVtkNC+sZtKm9I35RMOVcF7sN5EUFoNu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYsz -A9u3g3s+IIRe7bJWKKf44LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOww -Cj0Yzfv9KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGIrb68 -j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi94DkZfs0Nw4pgHBN -rziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOBsDCBrTAOBgNVHQ8BAf8EBAMCAQYw -DwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAigA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1 -MzQyWjAfBgNVHSMEGDAWgBRokORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DH -hmak8fdLQ/uEvW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9tO1KzKtvn1ISM -Y/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6ZuaAGAT/3B+XxFNSRuzFVJ7yVTa -v52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTS -W3iDVuycNsMm4hH2Z0kdkquM++v/eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0 -tHuu2guQOHXvgR1m0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -RSA Security 2048 v3 -==================== ------BEGIN CERTIFICATE----- -MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6MRkwFwYDVQQK -ExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJpdHkgMjA0OCBWMzAeFw0wMTAy -MjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAXBgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAb -BgNVBAsTFFJTQSBTZWN1cml0eSAyMDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAt49VcdKA3XtpeafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7 -Jylg/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGlwSMiuLgb -WhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnhAMFRD0xS+ARaqn1y07iH -KrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP -+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpuAWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4E -FgQUB8NRMKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYcHnmY -v/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/Zb5gEydxiKRz44Rj -0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+f00/FGj1EVDVwfSQpQgdMWD/YIwj -VAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVOrSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395 -nzIlQnQFgCi/vcEkllgVsRch6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kA -pKnXwiJPZ9d37CAFYd4= ------END CERTIFICATE----- - -GeoTrust Global CA -================== ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9iYWwgQ0EwHhcNMDIwNTIxMDQw -MDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5j -LjEbMBkGA1UEAxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjo -BbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDviS2Aelet -8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU1XupGc1V3sjs0l44U+Vc -T4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagU -vTLrGAMoUgRx5aszPeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVk -DBF9qn1luMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKInZ57Q -zxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfStQWVYrmm3ok9Nns4 -d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcFPseKUgzbFbS9bZvlxrFUaKnjaZC2 -mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Unhw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6p -XE0zX5IJL4hmXXeXxx12E6nV5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvm -Mw== ------END CERTIFICATE----- - -GeoTrust Global CA 2 -==================== ------BEGIN CERTIFICATE----- -MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN -R2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFsIENBIDIwHhcNMDQwMzA0MDUw -MDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5j -LjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQDvPE1APRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/ -NTL8Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hLTytCOb1k -LUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL5mkWRxHCJ1kDs6ZgwiFA -Vvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7S4wMcoKK+xfNAGw6EzywhIdLFnopsk/b -HdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNH -K266ZUapEBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6tdEPx7 -srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv/NgdRN3ggX+d6Yvh -ZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywNA0ZF66D0f0hExghAzN4bcLUprbqL -OzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkC -x1YAzUm5s2x7UwQa4qjJqhIFI8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqF -H4z1Ir+rzoPz4iIprn2DQKi6bA== ------END CERTIFICATE----- - -GeoTrust Universal CA -===================== ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN -R2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVyc2FsIENBMB4XDTA0MDMwNDA1 -MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IElu -Yy4xHjAcBgNVBAMTFUdlb1RydXN0IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAKYVVaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9t -JPi8cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTTQjOgNB0e -RXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFhF7em6fgemdtzbvQKoiFs -7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2vc7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d -8Lsrlh/eezJS/R27tQahsiFepdaVaH/wmZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7V -qnJNk22CDtucvc+081xdVHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3Cga -Rr0BHdCXteGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZf9hB -Z3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfReBi9Fi1jUIxaS5BZu -KGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+nhutxx9z3SxPGWX9f5NAEC7S8O08 -ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0 -XG0D08DYj3rWMB8GA1UdIwQYMBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIB -hjANBgkqhkiG9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fXIwjhmF7DWgh2 -qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzynANXH/KttgCJwpQzgXQQpAvvL -oJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0zuzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsK -xr2EoyNB3tZ3b4XUhRxQ4K5RirqNPnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxF -KyDuSN/n3QmOGKjaQI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2 -DFKWkoRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9ER/frslK -xfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQtDF4JbAiXfKM9fJP/P6EU -p8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/SfuvmbJxPgWp6ZKy7PtXny3YuxadIwVyQD8vI -P/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -GeoTrust Universal CA 2 -======================= ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMN -R2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwHhcNMDQwMzA0 -MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3Qg -SW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0 -DE81WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUGFF+3Qs17 -j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdqXbboW0W63MOhBW9Wjo8Q -JqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxLse4YuU6W3Nx2/zu+z18DwPw76L5GG//a -QMJS9/7jOvdqdzXQ2o3rXhhqMcceujwbKNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2 -WP0+GfPtDCapkzj4T8FdIgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP -20gaXT73y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRthAAn -ZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgocQIgfksILAAX/8sgC -SqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4Lt1ZrtmhN79UNdxzMk+MBB4zsslG -8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2 -+/CfXGJx7Tz0RzgQKzAfBgNVHSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8E -BAMCAYYwDQYJKoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQL1EuxBRa3ugZ -4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgrFg5fNuH8KrUwJM/gYwx7WBr+ -mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSoag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpq -A1Ihn0CoZ1Dy81of398j9tx4TuaYT1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpg -Y+RdM4kX2TGq2tbzGDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiP -pm8m1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJVOCiNUW7d -FGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH6aLcr34YEoP9VhdBLtUp -gn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwXQMAJKOSLakhT2+zNVVXxxvjpoixMptEm -X36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -America Online Root Certification Authority 1 -============================================= ------BEGIN CERTIFICATE----- -MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT -QW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBPbmxpbmUgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkG -A1UEBhMCVVMxHDAaBgNVBAoTE0FtZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2Eg -T25saW5lIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lkhsmj76CG -v2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym1BW32J/X3HGrfpq/m44z -DyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsWOqMFf6Dch9Wc/HKpoH145LcxVR5lu9Rh -sCFg7RAycsWSJR74kEoYeEfffjA3PlAb2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP -8c9GsEsPPt2IYriMqQkoO3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAUAK3Z -o/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBBQUAA4IBAQB8itEf -GDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkFZu90821fnZmv9ov761KyBZiibyrF -VL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAbLjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft -3OJvx8Fi8eNy1gTIdGcL+oiroQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43g -Kd8hdIaC2y+CMMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds -sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 ------END CERTIFICATE----- - -America Online Root Certification Authority 2 -============================================= ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEcMBoGA1UEChMT -QW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBPbmxpbmUgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkG -A1UEBhMCVVMxHDAaBgNVBAoTE0FtZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2Eg -T25saW5lIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC206B89en -fHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFciKtZHgVdEglZTvYYUAQv8 -f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2JxhP7JsowtS013wMPgwr38oE18aO6lhO -qKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JN -RvCAOVIyD+OEsnpD8l7eXz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0 -gBe4lL8BPeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67Xnfn -6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEqZ8A9W6Wa6897Gqid -FEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZo2C7HK2JNDJiuEMhBnIMoVxtRsX6 -Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnj -B453cMor9H124HhnAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3Op -aaEg5+31IqEjFNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmnxPBUlgtk87FY -T15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2LHo1YGwRgJfMqZJS5ivmae2p -+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzcccobGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXg -JXUjhx5c3LqdsKyzadsXg8n33gy8CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//Zoy -zH1kUQ7rVyZ2OuMeIjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgO -ZtMADjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2FAjgQ5ANh -1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUXOm/9riW99XJZZLF0Kjhf -GEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPbAZO1XB4Y3WRayhgoPmMEEf0cjQAPuDff -Z4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQlZvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuP -cX/9XhmgD0uRuMRUvAawRY8mkaKO/qk= ------END CERTIFICATE----- - -Visa eCommerce Root -=================== ------BEGIN CERTIFICATE----- -MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBrMQswCQYDVQQG -EwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2Ug -QXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2 -WhcNMjIwNjI0MDAxNjEyWjBrMQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMm -VmlzYSBJbnRlcm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv -bW1lcmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h2mCxlCfL -F9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4ElpF7sDPwsRROEW+1QK8b -RaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdVZqW1LS7YgFmypw23RuwhY/81q6UCzyr0 -TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI -/k4+oKsGGelT84ATB+0tvz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzs -GHxBvfaLdXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEG -MB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUFAAOCAQEAX/FBfXxc -CLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcRzCSs00Rsca4BIGsDoo8Ytyk6feUW -YFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pz -zkWKsKZJ/0x9nXGIxHYdkFsd7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBu -YQa7FkKMcPcw++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt -398znM/jra6O1I7mT1GvFpLgXPYHDw== ------END CERTIFICATE----- - -Certum Root CA -============== ------BEGIN CERTIFICATE----- -MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBMMRswGQYDVQQK -ExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBDQTAeFw0wMjA2MTExMDQ2Mzla -Fw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBMMRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8u -by4xEjAQBgNVBAMTCUNlcnR1bSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6x -wS7TT3zNJc4YPk/EjG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdL -kKWoePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GIULdtlkIJ -89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapuOb7kky/ZR6By6/qmW6/K -Uz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUgAKpoC6EahQGcxEZjgoi2IrHu/qpGWX7P -NSzVttpd90gzFFS269lvzs2I1qsb2pY7HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkq -hkiG9w0BAQUFAAOCAQEAuI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+ -GXYkHAQaTOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTgxSvg -GrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1qCjqTE5s7FCMTY5w/ -0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5xO/fIR/RpbxXyEV6DHpx8Uq79AtoS -qFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs6GAqm4VKQPNriiTsBhYscw== ------END CERTIFICATE----- - -Comodo AAA Services root -======================== ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS -R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg -TGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAw -MFoXDTI4MTIzMTIzNTk1OVowezELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hl -c3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNV -BAMMGEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQuaBtDFcCLNSS1UY8y2bmhG -C1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe3M/vg4aijJRPn2jymJBGhCfHdr/jzDUs -i14HZGWCwEiwqJH5YZ92IFCokcdmtet4YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszW -Y19zjNoFmag4qMsXeDZRrOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjH -Ypy+g8cmez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQUoBEK -Iz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wewYDVR0f -BHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20vQUFBQ2VydGlmaWNhdGVTZXJ2aWNl -cy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29tb2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2Vz -LmNybDANBgkqhkiG9w0BAQUFAAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm -7l3sAg9g1o1QGE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2G9w84FoVxp7Z -8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsil2D4kF501KKaU73yqWjgom7C -12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -Comodo Secure Services root -=========================== ------BEGIN CERTIFICATE----- -MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS -R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg -TGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRpZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAw -MDAwMFoXDTI4MTIzMTIzNTk1OVowfjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFu -Y2hlc3RlcjEQMA4GA1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAi -BgNVBAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPMcm3ye5drswfxdySRXyWP -9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3SHpR7LZQdqnXXs5jLrLxkU0C8j6ysNstc -rbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rC -oznl2yY4rYsK7hljxxwk3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3V -p6ea5EQz6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNVHQ4E -FgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8w -gYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL1NlY3VyZUNlcnRpZmlj -YXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRwOi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlm -aWNhdGVTZXJ2aWNlcy5jcmwwDQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm -4J4oqF7Tt/Q05qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj -Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtIgKvcnDe4IRRL -DXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJaD61JlfutuC23bkpgHl9j6Pw -pCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDlizeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1H -RR3B7Hzs/Sk= ------END CERTIFICATE----- - -Comodo Trusted Services root -============================ ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEbMBkGA1UECAwS -R3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRowGAYDVQQKDBFDb21vZG8gQ0Eg -TGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEw -MDAwMDBaFw0yODEyMzEyMzU5NTlaMH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1h -bmNoZXN0ZXIxEDAOBgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUw -IwYDVQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWWfnJSoBVC21ndZHoa0Lh7 -3TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMtTGo87IvDktJTdyR0nAducPy9C1t2ul/y -/9c3S0pgePfw+spwtOpZqqPOSC+pw7ILfhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6 -juljatEPmsbS9Is6FARW1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsS -ivnkBbA7kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0GA1Ud -DgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21vZG9jYS5jb20vVHJ1c3RlZENlcnRp -ZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRodHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENl -cnRpZmljYXRlU2VydmljZXMuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8Ntw -uleGFTQQuS9/HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 -pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxISjBc/lDb+XbDA -BHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+xqFx7D+gIIxmOom0jtTYsU0l -R+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/AtyjcndBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O -9y5Xt5hwXsjEeLBi ------END CERTIFICATE----- - -QuoVadis Root CA -================ ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJCTTEZMBcGA1UE -ChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAz -MTkxODMzMzNaFw0yMTAzMTcxODMzMzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRp -cyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQD -EyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Ypli4kVEAkOPcahdxYTMuk -J0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2DrOpm2RgbaIr1VxqYuvXtdj182d6UajtL -F8HVj71lODqV0D1VNk7feVcxKh7YWWVJWCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeL -YzcS19Dsw3sgQUSj7cugF+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWen -AScOospUxbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCCAk4w -PQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVvdmFkaXNvZmZzaG9y -ZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREwggENMIIBCQYJKwYBBAG+WAABMIH7 -MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNlIG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmlj -YXRlIGJ5IGFueSBwYXJ0eSBhc3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJs -ZSBzdGFuZGFyZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYIKwYBBQUHAgEW -Fmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3TKbkGGew5Oanwl4Rqy+/fMIGu -BgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rqy+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkw -FwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MS4wLAYDVQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6 -tlCLMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSkfnIYj9lo -fFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf87C9TqnN7Az10buYWnuul -LsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1RcHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2x -gI4JVrmcGmD+XcHXetwReNDWXcG31a0ymQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi -5upZIof4l/UO/erMkqQWxFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi -5nrQNiOKSnQ2+Q== ------END CERTIFICATE----- - -QuoVadis Root CA 2 -================== ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoT -EFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMjAeFw0wNjExMjQx -ODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQCaGMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6 -XJxgFyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55JWpzmM+Yk -lvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bBrrcCaoF6qUWD4gXmuVbB -lDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp+ARz8un+XJiM9XOva7R+zdRcAitMOeGy -lZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt -66/3FsvbzSUr5R/7mp/iUcw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1Jdxn -wQ5hYIizPtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og/zOh -D7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UHoycR7hYQe7xFSkyy -BNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuIyV77zGHcizN300QyNQliBJIWENie -J0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1Ud -DgQWBBQahGK8SEwzJQTU7tD2A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGU -a6FJpEcwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2fBluornFdLwUv -Z+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzng/iN/Ae42l9NLmeyhP3ZRPx3 -UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2BlfF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodm -VjB3pjd4M1IQWK4/YY7yarHvGH5KWWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK -+JDSV6IZUaUtl0HaB0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrW -IozchLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPRTUIZ3Ph1 -WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWDmbA4CD/pXvk1B+TJYm5X -f6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0ZohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II -4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8 -VCLAAVBpQ570su9t+Oza8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -QuoVadis Root CA 3 -================== ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0xGTAXBgNVBAoT -EFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJvb3QgQ0EgMzAeFw0wNjExMjQx -OTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMRswGQYDVQQDExJRdW9WYWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQDMV0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNgg -DhoB4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUrH556VOij -KTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd8lyyBTNvijbO0BNO/79K -DDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9CabwvvWhDFlaJKjdhkf2mrk7AyxRllDdLkgbv -BNDInIjbC3uBr7E9KsRlOni27tyAsdLTmZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwp -p5ijJUMv7/FfJuGITfhebtfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8 -nT8KKdjcT5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDtWAEX -MJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZc6tsgLjoC2SToJyM -Gf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A4iLItLRkT9a6fUg+qGkM17uGcclz -uD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYDVR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHT -BgkrBgEEAb5YAAMwgcUwgZMGCCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmlj -YXRlIGNvbnN0aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVudC4wLQYIKwYB -BQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2NwczALBgNVHQ8EBAMCAQYwHQYD -VR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4GA1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4 -ywLQoUmkRzBFMQswCQYDVQQGEwJCTTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UE -AxMSUXVvVmFkaXMgUm9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZV -qyM07ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSemd1o417+s -hvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd+LJ2w/w4E6oM3kJpK27z -POuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2 -Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadNt54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp -8kokUvd0/bpO5qgdAm6xDYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBC -bjPsMZ57k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6szHXu -g/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0jWy10QJLZYxkNc91p -vGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeTmJlglFwjz1onl14LBQaTNx47aTbr -qZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -Security Communication Root CA -============================== ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMP -U0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEw -HhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMP -U0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEw -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw -8yl89f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJDKaVv0uM -DPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9Ms+k2Y7CI9eNqPPYJayX -5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/NQV3Is00qVUarH9oe4kA92819uZKAnDfd -DJZkndwi92SL32HeFZRSFaB9UslLqCHJxrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2 -JChzAgMBAAGjPzA9MB0GA1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYw -DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vGkl3g -0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfrUj94nK9NrvjVT8+a -mCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5Bw+SUEmK3TGXX8npN6o7WWWXlDLJ -s58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJUJRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ -6rBK+1YWc26sTfcioU+tHXotRSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAi -FL39vmwLAw== ------END CERTIFICATE----- - -Sonera Class 2 Root CA -====================== ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEPMA0GA1UEChMG -U29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAxMDQwNjA3Mjk0MFoXDTIxMDQw -NjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNVBAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJh -IENsYXNzMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3 -/Ei9vX+ALTU74W+oZ6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybT -dXnt5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s3TmVToMG -f+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2EjvOr7nQKV0ba5cTppCD8P -tOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu8nYybieDwnPz3BjotJPqdURrBGAgcVeH -nfO+oJAjPYok4doh28MCAwEAAaMzMDEwDwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITT -XjwwCwYDVR0PBAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt -0jSv9zilzqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/3DEI -cbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvDFNr450kkkdAdavph -Oe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6Tk6ezAyNlNzZRZxe7EJQY670XcSx -EtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLH -llpwrN9M ------END CERTIFICATE----- - -Staat der Nederlanden Root CA -============================= ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJOTDEeMBwGA1UE -ChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFhdCBkZXIgTmVkZXJsYW5kZW4g -Um9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEyMTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4w -HAYDVQQKExVTdGFhdCBkZXIgTmVkZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxh -bmRlbiBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFt -vsznExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw719tV2U02P -jLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MOhXeiD+EwR+4A5zN9RGca -C1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+UtFE5A3+y3qcym7RHjm+0Sq7lr7HcsBth -vJly3uSJt3omXdozSVtSnA71iq3DuD3oBmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn6 -22r+I/q85Ej0ZytqERAhSQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRV -HSAAMDwwOgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMvcm9v -dC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA7Jbg0zTBLL9s+DAN -BgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k/rvuFbQvBgwp8qiSpGEN/KtcCFtR -EytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzmeafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbw -MVcoEoJz6TMvplW0C5GUR5z6u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3y -nGQI0DvDKcWy7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR -iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw== ------END CERTIFICATE----- - -TDC Internet Root CA -==================== ------BEGIN CERTIFICATE----- -MIIEKzCCAxOgAwIBAgIEOsylTDANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJESzEVMBMGA1UE -ChMMVERDIEludGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQgUm9vdCBDQTAeFw0wMTA0MDUx -NjMzMTdaFw0yMTA0MDUxNzAzMTdaMEMxCzAJBgNVBAYTAkRLMRUwEwYDVQQKEwxUREMgSW50ZXJu -ZXQxHTAbBgNVBAsTFFREQyBJbnRlcm5ldCBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxLhAvJHVYx/XmaCLDEAedLdInUaMArLgJF/wGROnN4NrXceO+YQwzho7+vvOi20j -xsNuZp+Jpd/gQlBn+h9sHvTQBda/ytZO5GhgbEaqHF1j4QeGDmUApy6mcca8uYGoOn0a0vnRrEvL -znWv3Hv6gXPU/Lq9QYjUdLP5Xjg6PEOo0pVOd20TDJ2PeAG3WiAfAzc14izbSysseLlJ28TQx5yc -5IogCSEWVmb/Bexb4/DPqyQkXsN/cHoSxNK1EKC2IeGNeGlVRGn1ypYcNIUXJXfi9i8nmHj9eQY6 -otZaQ8H/7AQ77hPv01ha/5Lr7K7a8jcDR0G2l8ktCkEiu7vmpwIDAQABo4IBJTCCASEwEQYJYIZI -AYb4QgEBBAQDAgAHMGUGA1UdHwReMFwwWqBYoFakVDBSMQswCQYDVQQGEwJESzEVMBMGA1UEChMM -VERDIEludGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQgUm9vdCBDQTENMAsGA1UEAxMEQ1JM -MTArBgNVHRAEJDAigA8yMDAxMDQwNTE2MzMxN1qBDzIwMjEwNDA1MTcwMzE3WjALBgNVHQ8EBAMC -AQYwHwYDVR0jBBgwFoAUbGQBx/2FbazI2p5QCIUItTxWqFAwHQYDVR0OBBYEFGxkAcf9hW2syNqe -UAiFCLU8VqhQMAwGA1UdEwQFMAMBAf8wHQYJKoZIhvZ9B0EABBAwDhsIVjUuMDo0LjADAgSQMA0G -CSqGSIb3DQEBBQUAA4IBAQBOQ8zR3R0QGwZ/t6T609lN+yOfI1Rb5osvBCiLtSdtiaHsmGnc540m -gwV5dOy0uaOXwTUA/RXaOYE6lTGQ3pfphqiZdwzlWqCE/xIWrG64jcN7ksKsLtB9KOy282A4aW8+ -2ARVPp7MVdK6/rtHBNcK2RYKNCn1WBPVT8+PVkuzHu7TmHnaCB4Mb7j4Fifvwm899qNLPg7kbWzb -O0ESm70NRyN/PErQr8Cv9u8btRXE64PECV90i9kR+8JWsTz4cMo0jUNAE4z9mQNUecYu6oah9jrU -Cbz0vGbMPVjQV0kK7iXiQe4T+Zs4NNEA9X7nlB38aQNiuJkFBT1reBK9sG9l ------END CERTIFICATE----- - -UTN DATACorp SGC Root CA -======================== ------BEGIN CERTIFICATE----- -MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCBkzELMAkGA1UE -BhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhl -IFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZ -BgNVBAMTElVUTiAtIERBVEFDb3JwIFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBa -MIGTMQswCQYDVQQGEwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4w -HAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cudXNlcnRy -dXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ys -raP6LnD43m77VkIVni5c7yPeIbkFdicZD0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlo -wHDyUwDAXlCCpVZvNvlK4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA -9P4yPykqlXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulWbfXv -33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQABo4GrMIGoMAsGA1Ud -DwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRTMtGzz3/64PGgXYVOktKeRR20TzA9 -BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3JsLnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dD -LmNybDAqBgNVHSUEIzAhBggrBgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3 -DQEBBQUAA4IBAQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft -Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyjj98C5OBxOvG0 -I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVHKWss5nbZqSl9Mt3JNjy9rjXx -EZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwP -DPafepE39peC4N1xaf92P2BNPM/3mfnGV/TJVTl4uix5yaaIK/QI ------END CERTIFICATE----- - -UTN USERFirst Hardware Root CA -============================== ------BEGIN CERTIFICATE----- -MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCBlzELMAkGA1UE -BhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEeMBwGA1UEChMVVGhl -IFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAd -BgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgx -OTIyWjCBlzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0 -eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8vd3d3LnVz -ZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdhcmUwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlI -wrthdBKWHTxqctU8EGc6Oe0rE81m65UJM6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFd -tqdt++BxF2uiiPsA3/4aMXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8 -i4fDidNdoI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqIDsjf -Pe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9KsyoUhbAgMBAAGjgbkw -gbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFKFyXyYbKJhDlV0HN9WF -lp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNF -UkZpcnN0LUhhcmR3YXJlLmNybDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUF -BwMGBggrBgEFBQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM -//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28GpgoiskliCE7/yMgUsogW -XecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gECJChicsZUN/KHAG8HQQZexB2 -lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kn -iCrVWFCVH/A7HFe7fRQ5YiuayZSSKqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67 -nfhmqA== ------END CERTIFICATE----- - -Camerfirma Chambers of Commerce Root -==================================== ------BEGIN CERTIFICATE----- -MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEnMCUGA1UEChMe -QUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1i -ZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAx -NjEzNDNaFw0zNzA5MzAxNjEzNDRaMH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZp -cm1hIFNBIENJRiBBODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3Jn -MSIwIAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0BAQEFAAOC -AQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtbunXF/KGIJPov7coISjlU -xFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0dBmpAPrMMhe5cG3nCYsS4No41XQEMIwRH -NaqbYE6gZj3LJgqcQKH0XZi/caulAGgq7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jW -DA+wWFjbw2Y3npuRVDM30pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFV -d9oKDMyXroDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIGA1Ud -EwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5jaGFtYmVyc2lnbi5v -cmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p26EpW1eLTXYGduHRooowDgYDVR0P -AQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hh -bWJlcnNpZ24ub3JnMCcGA1UdEgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYD -VR0gBFEwTzBNBgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz -aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEBAAxBl8IahsAi -fJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZdp0AJPaxJRUXcLo0waLIJuvvD -L8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wN -UPf6s+xCX6ndbcj0dc97wXImsQEcXCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/n -ADydb47kMgkdTXg0eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1 -erfutGWaIZDgqtCYvDi1czyL+Nw= ------END CERTIFICATE----- - -Camerfirma Global Chambersign Root -================================== ------BEGIN CERTIFICATE----- -MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEnMCUGA1UEChMe -QUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1i -ZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYx -NDE4WhcNMzcwOTMwMTYxNDE4WjB9MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJt -YSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEg -MB4GA1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUAA4IBDQAw -ggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0Mi+ITaFgCPS3CU6gSS9J -1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/sQJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8O -by4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpVeAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl -6DJWk0aJqCWKZQbua795B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c -8lCrEqWhz0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0TAQH/ -BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1iZXJzaWduLm9yZy9j -aGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4wTcbOX60Qq+UDpfqpFDAOBgNVHQ8B -Af8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAHMCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBj -aGFtYmVyc2lnbi5vcmcwKgYDVR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9y -ZzBbBgNVHSAEVDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh -bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0BAQUFAAOCAQEA -PDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUMbKGKfKX0j//U2K0X1S0E0T9Y -gOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXiryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJ -PJ7oKXqJ1/6v/2j1pReQvayZzKWGVwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4 -IBHNfTIzSJRUTN3cecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREes -t2d/AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A== ------END CERTIFICATE----- - -NetLock Notary (Class A) Root -============================= ------BEGIN CERTIFICATE----- -MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQI -EwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6 -dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9j -ayBLb3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oX -DTE5MDIxOTIzMTQ0N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQH -EwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYD -VQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFz -cyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSM -D7tM9DceqQWC2ObhbHDqeLVu0ThEDaiDzl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZ -z+qMkjvN9wfcZnSX9EUi3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC -/tmwqcm8WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LYOph7 -tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2EsiNCubMvJIH5+hCoR6 -4sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCCApswDgYDVR0PAQH/BAQDAgAGMBIG -A1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaC -Ak1GSUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pv -bGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu -IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2Vn -LWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0 -ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFz -IGxlaXJhc2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBh -IGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVu -b3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1YW5jZSBh -bmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sg -Q1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFp -bCBhdCBjcHNAbmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5 -ayZrU3/b39/zcT0mwBQOxmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjP -ytoUMaFP0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQQeJB -CWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxkf1qbFFgBJ34TUMdr -KuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK8CtmdWOMovsEPoMOmzbwGOQmIMOM -8CgHrTwXZoi1/baI ------END CERTIFICATE----- - -NetLock Business (Class B) Root -=============================== ------BEGIN CERTIFICATE----- -MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUxETAPBgNVBAcT -CEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0b25zYWdpIEtmdC4xGjAYBgNV -BAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQDEylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikg -VGFudXNpdHZhbnlraWFkbzAeFw05OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYD -VQQGEwJIVTERMA8GA1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRv -bnNhZ2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5ldExvY2sg -VXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB -iQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xKgZjupNTKihe5In+DCnVMm8Bp2GQ5o+2S -o/1bXHQawEfKOml2mrriRBf8TKPV/riXiK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr -1nGTLbO/CVRY7QbrqHvcQ7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNV -HQ8BAf8EBAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZ -RUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRh -dGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQuIEEgaGl0 -ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRv -c2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUg -YXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh -c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBz -Oi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6ZXNA -bmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhl -IHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2 -YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBj -cHNAbmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06sPgzTEdM -43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXan3BukxowOR0w2y7jfLKR -stE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKSNitjrFgBazMpUIaD8QFI ------END CERTIFICATE----- - -NetLock Express (Class C) Root -============================== ------BEGIN CERTIFICATE----- -MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUxETAPBgNVBAcT -CEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0b25zYWdpIEtmdC4xGjAYBgNV -BAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQDEytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBD -KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJ -BgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6 -dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMrTmV0TG9j -ayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzANBgkqhkiG9w0BAQEFAAOB -jQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNAOoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3Z -W3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63 -euyucYT2BDMIJTLrdKwWRMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQw -DgYDVR0PAQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEWggJN -RklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0YWxhbm9zIFN6b2xn -YWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFzb2sgYWxhcGphbiBrZXN6dWx0LiBB -IGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBOZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1i -aXp0b3NpdGFzYSB2ZWRpLiBBIGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0 -ZWxlIGF6IGVsb2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs -ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25sYXBqYW4gYSBo -dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kga2VyaGV0byBheiBlbGxlbm9y -emVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4gSU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5k -IHRoZSB1c2Ugb2YgdGhpcyBjZXJ0aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQ -UyBhdmFpbGFibGUgYXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwg -YXQgY3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmYta3UzbM2 -xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2gpO0u9f38vf5NNwgMvOOW -gyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4Fp1hBWeAyNDYpQcCNJgEjTME1A== ------END CERTIFICATE----- - -XRamp Global CA Root -==================== ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UE -BhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2Vj -dXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwHhcNMDQxMTAxMTcxNDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMx -HjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkg -U2VydmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS638eMpSe2OAtp87ZOqCwu -IR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCPKZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMx -foArtYzAQDsRhtDLooY2YKTVMIJt2W7QDxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FE -zG+gSqmUsE3a56k0enI4qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqs -AxcZZPRaJSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNViPvry -xS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASsjVy16bYbMDYGA1UdHwQvMC0wK6Ap -oCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMC -AQEwDQYJKoZIhvcNAQEFBQADggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc -/Kh4ZzXxHfARvbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLaIR9NmXmd4c8n -nxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSyi6mx5O+aGtA9aZnuqCij4Tyz -8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQO+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -Go Daddy Class 2 CA -=================== ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMY -VGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkG -A1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g -RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQAD -ggENADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv -2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+qN1j3hybX2C32 -qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiOr18SPaAIBQi2XKVlOARFmR6j -YGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmY -vLEHZ6IVDd2gWMZEewo+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0O -BBYEFNLEsNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h/t2o -atTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMu -MTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwG -A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wim -PQoZ+YeAEW5p5JYXMP80kWNyOO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKt -I3lpjbi2Tc7PTMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mERdEr/VxqHD3VI -Ls9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5CufReYNnyicsbkqWletNw+vHX/b -vZ8= ------END CERTIFICATE----- - -Starfield Class 2 CA -==================== ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzElMCMGA1UEChMc -U3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZpZWxkIENsYXNzIDIg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQwNjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBo -MQswCQYDVQQGEwJVUzElMCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAG -A1UECxMpU3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqG -SIb3DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf8MOh2tTY -bitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN+lq2cwQlZut3f+dZxkqZ -JRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVm -epsZGD3/cVE8MC5fvj13c7JdBmzDI1aaK4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSN -F4Azbl5KXZnJHoe0nRrA1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HF -MIHCMB0GA1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fRzt0f -hvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNo -bm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBDbGFzcyAyIENlcnRpZmljYXRpb24g -QXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGs -afPzWdqbAYcaT1epoXkJKtv3L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLM -PUxA2IGvd56Deruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynpVSJYACPq4xJD -KVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEYWQPJIrSPnNVeKtelttQKbfi3 -QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -StartCom Certification Authority -================================ ------BEGIN CERTIFICATE----- -MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMN -U3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmlu -ZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0 -NjM2WhcNMzYwOTE3MTk0NjM2WjB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRk -LjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMg -U3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZkpMyONvg45iPwbm2xPN1y -o4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rfOQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/ -Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/CJi/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/d -eMotHweXMAEtcnn6RtYTKqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt -2PZE4XNiHzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMMAv+Z -6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w+2OqqGwaVLRcJXrJ -osmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/ -untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVc -UjyJthkqcwEKDwOzEmDyei+B26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT -37uMdBNSSwIDAQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE -FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9jZXJ0LnN0YXJ0 -Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3JsLnN0YXJ0Y29tLm9yZy9zZnNj -YS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFMBgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUH -AgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRw -Oi8vY2VydC5zdGFydGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYg -U3RhcnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlhYmlsaXR5 -LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2YgdGhlIFN0YXJ0Q29tIENl -cnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFpbGFibGUgYXQgaHR0cDovL2NlcnQuc3Rh -cnRjb20ub3JnL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilT -dGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOC -AgEAFmyZ9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8jhvh -3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUWFjgKXlf2Ysd6AgXm -vB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJzewT4F+irsfMuXGRuczE6Eri8sxHk -fY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3 -fsNrarnDy0RLrHiQi+fHLB5LEUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZ -EoalHmdkrQYuL6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq -yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuCO3NJo2pXh5Tl -1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6Vum0ABj6y6koQOdjQK/W/7HW/ -lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkyShNOsF/5oirpt9P/FlUQqmMGqz9IgcgA38coro -g14= ------END CERTIFICATE----- - -Taiwan GRCA -=========== ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/MQswCQYDVQQG -EwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4X -DTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1owPzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dv -dmVybm1lbnQgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qN -w8XRIePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1qgQdW8or5 -BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKyyhwOeYHWtXBiCAEuTk8O -1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAtsF/tnyMKtsc2AtJfcdgEWFelq16TheEfO -htX7MfP6Mb40qij7cEwdScevLJ1tZqa2jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wov -J5pGfaENda1UhhXcSTvxls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7 -Q3hub/FCVGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHKYS1t -B6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoHEgKXTiCQ8P8NHuJB -O9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThNXo+EHWbNxWCWtFJaBYmOlXqYwZE8 -lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1UdDgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNV -HRMEBTADAQH/MDkGBGcqBwAEMTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg2 -09yewDL7MTqKUWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyfqzvS/3WXy6Tj -Zwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaKZEk9GhiHkASfQlK3T8v+R0F2 -Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFEJPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlU -D7gsL0u8qV1bYH+Mh6XgUmMqvtg7hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6Qz -DxARvBMB1uUO07+1EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+Hbk -Z6MmnD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WXudpVBrkk -7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44VbnzssQwmSNOXfJIoRIM3BKQ -CZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDeLMDDav7v3Aun+kbfYNucpllQdSNpc5Oy -+fwC00fmcc4QAu4njIT/rEUNE1yDMuAlpYYsfPQS ------END CERTIFICATE----- - -Firmaprofesional Root CA -======================== ------BEGIN CERTIFICATE----- -MIIEVzCCAz+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBnTELMAkGA1UEBhMCRVMxIjAgBgNVBAcT -GUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1dG9yaWRhZCBkZSBDZXJ0aWZp -Y2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FA -ZmlybWFwcm9mZXNpb25hbC5jb20wHhcNMDExMDI0MjIwMDAwWhcNMTMxMDI0MjIwMDAwWjCBnTEL -MAkGA1UEBhMCRVMxIjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMT -OUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2 -ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20wggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDnIwNvbyOlXnjOlSztlB5uCp4Bx+ow0Syd3Tfom5h5VtP8c9/Qit5V -j1H5WuretXDE7aTt/6MNbg9kUDGvASdYrv5sp0ovFy3Tc9UTHI9ZpTQsHVQERc1ouKDAA6XPhUJH -lShbz++AbOCQl4oBPB3zhxAwJkh91/zpnZFx/0GaqUC1N5wpIE8fUuOgfRNtVLcK3ulqTgesrBlf -3H5idPayBQC6haD9HThuy1q7hryUZzM1gywfI834yJFxzJeL764P3CkDG8A563DtwW4O2GcLiam8 -NeTvtjS0pbbELaW+0MOUJEjb35bTALVmGotmBQ/dPz/LP6pemkr4tErvlTcbAgMBAAGjgZ8wgZww -KgYDVR0RBCMwIYYfaHR0cDovL3d3dy5maXJtYXByb2Zlc2lvbmFsLmNvbTASBgNVHRMBAf8ECDAG -AQH/AgEBMCsGA1UdEAQkMCKADzIwMDExMDI0MjIwMDAwWoEPMjAxMzEwMjQyMjAwMDBaMA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUMwugZtHq2s7eYpMEKFK1FH84aLcwDQYJKoZIhvcNAQEFBQAD -ggEBAEdz/o0nVPD11HecJ3lXV7cVVuzH2Fi3AQL0M+2TUIiefEaxvT8Ub/GzR0iLjJcG1+p+o1wq -u00vR+L4OQbJnC4xGgN49Lw4xiKLMzHwFgQEffl25EvXwOaD7FnMP97/T2u3Z36mhoEyIwOdyPdf -wUpgpZKpsaSgYMN4h7Mi8yrrW6ntBas3D7Hi05V2Y1Z0jFhyGzflZKG+TQyTmAyX9odtsz/ny4Cm -7YjHX1BiAuiZdBbQ5rQ58SfLyEDW44YQqSMSkuBpQWOnryULwMWSyx6Yo1q6xTMPoJcB3X/ge9YG -VM+h4k0460tQtcsm9MracEpqoeJ5quGnM/b9Sh/22WA= ------END CERTIFICATE----- - -Wells Fargo Root CA -=================== ------BEGIN CERTIFICATE----- -MIID5TCCAs2gAwIBAgIEOeSXnjANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UEBhMCVVMxFDASBgNV -BAoTC1dlbGxzIEZhcmdvMSwwKgYDVQQLEyNXZWxscyBGYXJnbyBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eTEvMC0GA1UEAxMmV2VsbHMgRmFyZ28gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcN -MDAxMDExMTY0MTI4WhcNMjEwMTE0MTY0MTI4WjCBgjELMAkGA1UEBhMCVVMxFDASBgNVBAoTC1dl -bGxzIEZhcmdvMSwwKgYDVQQLEyNXZWxscyBGYXJnbyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEv -MC0GA1UEAxMmV2VsbHMgRmFyZ28gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVqDM7Jvk0/82bfuUER84A4n135zHCLielTWi5MbqNQ1mX -x3Oqfz1cQJ4F5aHiidlMuD+b+Qy0yGIZLEWukR5zcUHESxP9cMIlrCL1dQu3U+SlK93OvRw6esP3 -E48mVJwWa2uv+9iWsWCaSOAlIiR5NM4OJgALTqv9i86C1y8IcGjBqAr5dE8Hq6T54oN+J3N0Prj5 -OEL8pahbSCOz6+MlsoCultQKnMJ4msZoGK43YjdeUXWoWGPAUe5AeH6orxqg4bB4nVCMe+ez/I4j -sNtlAHCEAQgAFG5Uhpq6zPk3EPbg3oQtnaSFN9OH4xXQwReQfhkhahKpdv0SAulPIV4XAgMBAAGj -YTBfMA8GA1UdEwEB/wQFMAMBAf8wTAYDVR0gBEUwQzBBBgtghkgBhvt7hwcBCzAyMDAGCCsGAQUF -BwIBFiRodHRwOi8vd3d3LndlbGxzZmFyZ28uY29tL2NlcnRwb2xpY3kwDQYJKoZIhvcNAQEFBQAD -ggEBANIn3ZwKdyu7IvICtUpKkfnRLb7kuxpo7w6kAOnu5+/u9vnldKTC2FJYxHT7zmu1Oyl5GFrv -m+0fazbuSCUlFLZWohDo7qd/0D+j0MNdJu4HzMPBJCGHHt8qElNvQRbn7a6U+oxy+hNH8Dx+rn0R -OhPs7fpvcmR7nX1/Jv16+yWt6j4pf0zjAFcysLPp7VMX2YuyFA4w6OXVE8Zkr8QA1dhYJPz1j+zx -x32l2w8n0cbyQIjmH/ZhqPRCyLk306m+LFZ4wnKbWV01QIroTmMatukgalHizqSQ33ZwmVxwQ023 -tqcZZE6St8WRPH9IFmV7Fv3L/PvZ1dZPIWU7Sn9Ho/s= ------END CERTIFICATE----- - -Swisscom Root CA 1 -================== ------BEGIN CERTIFICATE----- -MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQG -EwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2VydGlmaWNhdGUgU2Vy -dmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3QgQ0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4 -MTgyMjA2MjBaMGQxCzAJBgNVBAYTAmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGln -aXRhbCBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIIC -IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9m2BtRsiM -MW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdihFvkcxC7mlSpnzNApbjyF -NDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/TilftKaNXXsLmREDA/7n29uj/x2lzZAe -AR81sH8A25Bvxn570e56eqeqDFdvpG3FEzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkC -b6dJtDZd0KTeByy2dbcokdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn -7uHbHaBuHYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNFvJbN -cA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo19AOeCMgkckkKmUp -WyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjCL3UcPX7ape8eYIVpQtPM+GP+HkM5 -haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJWbjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNY -MUJDLXT5xp6mig/p/r+D5kNXJLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYw -HQYDVR0hBBYwFDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j -BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzcK6FptWfUjNP9 -MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzfky9NfEBWMXrrpA9gzXrzvsMn -jgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7IkVh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQ -MbFamIp1TpBcahQq4FJHgmDmHtqBsfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4H -VtA4oJVwIHaM190e3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtl -vrsRls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ipmXeascCl -OS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HHb6D0jqTsNFFbjCYDcKF3 -1QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksfrK/7DZBaZmBwXarNeNQk7shBoJMBkpxq -nvy5JMWzFYJ+vq6VK+uxwNrjAWALXmmshFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCy -x/yP2FS1k2Kdzs9Z+z0YzirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMW -NY6E0F/6MBr1mmz0DlP5OlvRHA== ------END CERTIFICATE----- - -DigiCert Assured ID Root CA -=========================== ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQw -IgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzEx -MTEwMDAwMDAwWjBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQL -ExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0Ew -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7cJpSIqvTO -9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYPmDI2dsze3Tyoou9q+yHy -UmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW -/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpy -oeb6pNnVFzF1roV9Iq4/AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whf -GHdPAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRF -66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYunpyGd823IDzANBgkq -hkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRCdWKuh+vy1dneVrOfzM4UKLkNl2Bc -EkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTffwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38Fn -SbNd67IJKusm7Xi+fT8r87cmNW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i -8b5QZ7dsvfPxH2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -DigiCert Global Root CA -======================= ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBhMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSAw -HgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBDQTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAw -MDAwMDBaMGExCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3 -dy5kaWdpY2VydC5jb20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkq -hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsBCSDMAZOn -TjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97nh6Vfe63SKMI2tavegw5 -BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt43C/dxC//AH2hdmoRBBYMql1GNXRor5H -4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7PT19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y -7vrTC0LUq7dBMtoM1O/4gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQAB -o2MwYTAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbRTLtm -8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUwDQYJKoZIhvcNAQEF -BQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/EsrhMAtudXH/vTBH1jLuG2cenTnmCmr -EbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIt -tep3Sp+dWOIrWcBAI+0tKIJFPnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886 -UAb3LujEV0lsYSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -DigiCert High Assurance EV Root CA -================================== ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBsMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSsw -KQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5jZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAw -MFoXDTMxMTExMDAwMDAwMFowbDELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZ -MBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFu -Y2UgRVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm+9S75S0t -Mqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTWPNt0OKRKzE0lgvdKpVMS -OO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEMxChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3 -MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFBIk5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQ -NAQTXKFx01p8VdteZOE3hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUe -h10aUAsgEsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMB -Af8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaAFLE+w2kD+L9HAdSY -JhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3NecnzyIZgYIVyHbIUf4KmeqvxgydkAQ -V8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6zeM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFp -myPInngiK3BD41VHMWEZ71jFhS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkK -mNEVX58Svnw2Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep+OkuE6N36B9K ------END CERTIFICATE----- - -Certplus Class 2 Primary CA -=========================== ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAwPTELMAkGA1UE -BhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFzcyAyIFByaW1hcnkgQ0EwHhcN -OTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2Vy -dHBsdXMxGzAZBgNVBAMTEkNsYXNzIDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBANxQltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR -5aiRVhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyLkcAbmXuZ -Vg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCdEgETjdyAYveVqUSISnFO -YFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yasH7WLO7dDWWuwJKZtkIvEcupdM5i3y95e -e++U8Rs+yskhwcWYAqqi9lt3m/V+llU0HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRME -CDAGAQH/AgEKMAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJ -YIZIAYb4QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMuY29t -L0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/AN9WM2K191EBkOvD -P9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8yfFC82x/xXp8HVGIutIKPidd3i1R -TtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMRFcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+ -7UCmnYR0ObncHoUW2ikbhiMAybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW -//1IMwrh3KWBkJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - -DST Root CA X3 -============== ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/MSQwIgYDVQQK -ExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMTDkRTVCBSb290IENBIFgzMB4X -DTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVowPzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1 -cmUgVHJ1c3QgQ28uMRcwFQYDVQQDEw5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmT -rE4Orz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEqOLl5CjH9 -UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9bxiqKqy69cK3FCxolkHRy -xXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40d -utolucbY38EVAjqr2m7xPi71XAicPNaDaeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQ -MA0GCSqGSIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69ikug -dB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXrAvHRAosZy5Q6XkjE -GB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZzR8srzJmwN0jP41ZL9c8PDHIyh8bw -RLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubS -fZGL+T0yjWW06XyxV3bqxbYoOb8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -DST ACES CA X6 -============== ------BEGIN CERTIFICATE----- -MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBbMQswCQYDVQQG -EwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QxETAPBgNVBAsTCERTVCBBQ0VT -MRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0wMzExMjAyMTE5NThaFw0xNzExMjAyMTE5NTha -MFsxCzAJBgNVBAYTAlVTMSAwHgYDVQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UE -CxMIRFNUIEFDRVMxFzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPuktKe1jzI -DZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7gLFViYsx+tC3dr5BPTCa -pCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZHfAjIgrrep4c9oW24MFbCswKBXy314pow -GCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4aahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPy -MjwmR/onJALJfh1biEITajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rkc3Qu -Y29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjtodHRwOi8vd3d3LnRy -dXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMtaW5kZXguaHRtbDAdBgNVHQ4EFgQU -CXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZIhvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V2 -5FYrnJmQ6AgwbN99Pe7lv7UkQIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6t -Fr8hlxCBPeP/h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq -nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpRrscL9yuwNwXs -vFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf29w4LTJxoeHtxMcfrHuBnQfO3 -oKfN5XozNmr6mis= ------END CERTIFICATE----- - -TURKTRUST Certificate Services Provider Root 1 -============================================== ------BEGIN CERTIFICATE----- -MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOcUktUUlVTVCBF -bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGDAJUUjEP -MA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykgMjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0 -acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMx -MDI3MTdaFw0xNTAzMjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsg -U2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYDVQQHDAZB -TktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBC -aWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GX -yGl8hMW0kWxsE2qkVa2kheiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8i -Si9BB35JYbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5CurKZ -8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1JuTm5Rh8i27fbMx4 -W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51b0dewQIDAQABoxAwDjAMBgNVHRME -BTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46 -sWrv7/hg0Uw2ZkUd82YCdAR7kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxE -q8Sn5RTOPEFhfEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy -B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdAaLX/7KfS0zgY -nNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKSRGQDJereW26fyfJOrN3H ------END CERTIFICATE----- - -TURKTRUST Certificate Services Provider Root 2 -============================================== ------BEGIN CERTIFICATE----- -MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBF -bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEP -MA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUg -QmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcN -MDUxMTA3MTAwNzU3WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVr -dHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEPMA0G -A1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmls -acWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqe -LCDe2JAOCtFp0if7qnefJ1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKI -x+XlZEdhR3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJQv2g -QrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGXJHpsmxcPbe9TmJEr -5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1pzpwACPI2/z7woQ8arBT9pmAPAgMB -AAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58SFq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/ntt -Rbj2hWyfIvwqECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4 -Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFzgw2lGh1uEpJ+ -hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotHuFEJjOp9zYhys2AzsfAKRO8P -9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LSy3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5 -UrbnBEI= ------END CERTIFICATE----- - -SwissSign Gold CA - G2 -====================== ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkNIMRUw -EwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2lnbiBHb2xkIENBIC0gRzIwHhcN -MDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBFMQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dp -c3NTaWduIEFHMR8wHQYDVQQDExZTd2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUq -t2/876LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+bbqBHH5C -jCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c6bM8K8vzARO/Ws/BtQpg -vd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqEemA8atufK+ze3gE/bk3lUIbLtK/tREDF -ylqM2tIrfKjuvqblCqoOpd8FUrdVxyJdMmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvR -AiTysybUa9oEVeXBCsdtMDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuend -jIj3o02yMszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69yFGkO -peUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPiaG59je883WX0XaxR -7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxMgI93e2CaHt+28kgeDrpOVG2Y4OGi -GqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUWyV7lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64 -OfPAeGZe6Drn8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe645R88a7A3hfm -5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczOUYrHUDFu4Up+GC9pWbY9ZIEr -44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOf -Mke6UiI0HTJ6CVanfCU2qT1L2sCCbwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6m -Gu6uLftIdxf+u+yvGPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxp -mo/a77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCChdiDyyJk -vC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid392qgQmwLOM7XdVAyksLf -KzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEppLd6leNcG2mqeSz53OiATIgHQv2ieY2Br -NU0LbbqhPcCT4H8js1WtciVORvnSFu+wZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6Lqj -viOvrv1vA+ACOzB2+httQc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -SwissSign Silver CA - G2 -======================== ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCQ0gxFTAT -BgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMB4X -DTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0NlowRzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3 -aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG -9w0BAQEFAAOCAg8AMIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644 -N0MvFz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7brYT7QbNHm -+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieFnbAVlDLaYQ1HTWBCrpJH -6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH6ATK72oxh9TAtvmUcXtnZLi2kUpCe2Uu -MGoM9ZDulebyzYLs2aFK7PayS+VFheZteJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5h -qAaEuSh6XzjZG6k4sIN/c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5 -FZGkECwJMoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRHHTBs -ROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTfjNFusB3hB48IHpmc -celM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb65i/4z3GcRm25xBWNOHkDRUjvxF3X -CO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOBrDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUF6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRB -tjpbO8tFnb0cwpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBAHPGgeAn0i0P -4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShpWJHckRE1qTodvBqlYJ7YH39F -kWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L -3XWgwF15kIwb4FDm3jH+mHtwX6WQ2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx -/uNncqCxv1yL5PqZIseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFa -DGi8aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2Xem1ZqSqP -e97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQRdAtq/gsD/KNVV4n+Ssuu -WxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJ -DIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ub -DgEj8Z+7fNzcbBGXJbLytGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -GeoTrust Primary Certification Authority -======================================== ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMoR2VvVHJ1c3QgUHJpbWFyeSBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgx -CzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQ -cmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9AWbK7hWN -b6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjAZIVcFU2Ix7e64HXprQU9 -nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE07e9GceBrAqg1cmuXm2bgyxx5X9gaBGge -RwLmnWDiNpcB3841kt++Z8dtd1k7j53WkBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGt -tm/81w7a4DSwDRp35+MImO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJKoZI -hvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ16CePbJC/kRYkRj5K -Ts4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl4b7UVXGYNTq+k+qurUKykG/g/CFN -NWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6KoKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHa -Floxt/m0cYASSJlyc1pZU8FjUjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG -1riR/aYNKxoUAT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -thawte Primary Root CA -====================== ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCBqTELMAkGA1UE -BhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2 -aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhv -cml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3 -MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwg -SW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMv -KGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNVBAMT -FnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCs -oPD7gFnUnMekz52hWXMJEEUMDSxuaPFsW0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ -1CRfBsDMRJSUjQJib+ta3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGc -q/gcfomk6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6Sk/K -aAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94JNqR32HuHUETVPm4p -afs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XPr87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUF -AAOCAQEAeRHAS7ORtvzw6WfUDW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeE -uzLlQRHAd9mzYJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2/qxAeeWsEG89 -jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/LHbTY5xZ3Y+m4Q6gLkH3LpVH -z7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7jVaMaA== ------END CERTIFICATE----- - -VeriSign Class 3 Public Primary Certification Authority - G5 -============================================================ ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCByjELMAkGA1UE -BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBO -ZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVk -IHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2ln -biBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2lnbiwgSW5jLiAtIEZvciBh -dXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQCvJAgIKXo1nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKz -j/i5Vbext0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIzSdhD -Y2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQGBO+QueQA5N06tRn/ -Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+rCpSx4/VBEnkjWNHiDxpg8v+R70r -fk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/ -BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2Uv -Z2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKvMzEzMA0GCSqG -SIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzEp6B4Eq1iDkVwZMXnl2YtmAl+ -X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKE -KQsTb47bDN0lAtukixlE0kF6BWlKWE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiC -Km0oHw0LxOXnGiYZ4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vE -ZV8NhnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -SecureTrust CA -============== ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBIMQswCQYDVQQG -EwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xFzAVBgNVBAMTDlNlY3VyZVRy -dXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIzMTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAe -BgNVBAoTF1NlY3VyZVRydXN0IENvcnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQX -OZEzZum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO0gMdA+9t -DWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIaowW8xQmxSPmjL8xk037uH -GFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b -01k/unK8RCSc43Oz969XL0Imnal0ugBS8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmH -ursCAwEAAaOBnTCBmjATBgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCegJYYj -aHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQAwDQYJ -KoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt36Z3q059c4EVlew3KW+JwULKUBRSu -SceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHf -mbx8IVQr5Fiiu1cprp6poxkmD5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZ -nMUFdAvnZyPSCPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -Secure Global CA -================ ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQG -EwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBH -bG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkxMjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEg -MB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwg -Q0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jx -YDiJiQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa/FHtaMbQ -bqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJjnIFHovdRIWCQtBJwB1g -8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnIHmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYV -HDGA76oYa8J719rO+TMg1fW9ajMtgQT7sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi -0XPnj3pDAgMBAAGjgZ0wgZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCswKaAn -oCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsGAQQBgjcVAQQDAgEA -MA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0LURYD7xh8yOOvaliTFGCRsoTciE6+ -OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXOH0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cn -CDpOGR86p1hcF895P4vkp9MmI50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/5 -3CYNv6ZHdAbYiNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -COMODO Certification Authority -============================== ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCBgTELMAkGA1UE -BhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgG -A1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNVBAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTAeFw0wNjEyMDEwMDAwMDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEb -MBkGA1UECBMSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFD -T01PRE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3UcEbVASY06m/weaKXTuH -+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI2GqGd0S7WWaXUF601CxwRM/aN5VCaTww -xHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV -4EajcNxo2f8ESIl33rXp+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA -1KGzqSX+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5OnKVI -rLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW/zAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6gPKA6hjhodHRwOi8vY3JsLmNvbW9k -b2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9uQXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOC -AQEAPpiem/Yb6dc5t3iuHXIYSdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CP -OGEIqB6BCsAvIC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4zJVSk/BwJVmc -IGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5ddBA6+C4OmF4O5MBKgxTMVBbkN -+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IBZQ== ------END CERTIFICATE----- - -Network Solutions Certificate Authority -======================================= ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBiMQswCQYDVQQG -EwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydOZXR3b3Jr -IFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMx -MjM1OTU5WjBiMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwzc7MEL7xx -jOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPPOCwGJgl6cvf6UDL4wpPT -aaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rlmGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXT -crA/vGp97Eh/jcOrqnErU2lBUzS1sLnFBgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc -/Qzpf14Dl847ABSHJ3A4qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMB -AAGjgZcwgZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwubmV0c29sc3NsLmNv -bS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3JpdHkuY3JsMA0GCSqGSIb3DQEBBQUA -A4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc86fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q -4LqILPxFzBiwmZVRDuwduIj/h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/ -GGUsyfJj4akH/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHNpGxlaKFJdlxD -ydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -WellsSecure Public Root Certificate Authority -============================================= ------BEGIN CERTIFICATE----- -MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoM -F1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYw -NAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcN -MDcxMjEzMTcwNzU0WhcNMjIxMjE0MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dl -bGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYD -VQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+rWxxTkqxtnt3CxC5FlAM1 -iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjUDk/41itMpBb570OYj7OeUt9tkTmPOL13 -i0Nj67eT/DBMHAGTthP796EfvyXhdDcsHqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8 -bJVhHlfXBIEyg1J55oNjz7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiB -K0HmOFafSZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/SlwxlAgMB -AAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqGKGh0dHA6Ly9jcmwu -cGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0PAQH/BAQDAgHGMB0GA1UdDgQWBBQm -lRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0jBIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGB -i6SBiDCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRww -GgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg -Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEBALkVsUSRzCPI -K0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd/ZDJPHV3V3p9+N701NX3leZ0 -bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pBA4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSlj -qHyita04pO2t/caaH/+Xc/77szWnk4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+es -E2fDbbFwRnzVlhE9iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJ -tylv2G0xffX8oRAHh84vWdw+WNs= ------END CERTIFICATE----- - -COMODO ECC Certification Authority -================================== ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTELMAkGA1UEBhMC -R0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UE -ChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwHhcNMDgwMzA2MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0Ix -GzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSRFtSrYpn1PlILBs5BAH+X -4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0JcfRK9ChQtP6IHG4/bC8vCVlbpVsLM5ni -wz2J+Wos77LTBumjQjBAMB0GA1UdDgQWBBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VG -FAkK+qDmfQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdvGDeA -U/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -IGC/A -===== ------BEGIN CERTIFICATE----- -MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYTAkZSMQ8wDQYD -VQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVE -Q1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZy -MB4XDTAyMTIxMzE0MjkyM1oXDTIwMTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQI -EwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NT -STEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMIIB -IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaIs9z4iPf930Pfeo2aSVz2 -TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCW -So7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYy -HF2fYPepraX/z9E0+X1bF8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNd -frGoRpAxVs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGdPDPQ -tQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNVHSAEDjAMMAoGCCqB -egF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAxNjAfBgNVHSMEGDAWgBSjBS8YYFDC -iQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUFAAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RK -q89toB9RlPhJy3Q2FLwV3duJL92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3Q -MZsyK10XZZOYYLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg -Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2aNjSaTFR+FwNI -lQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R0982gaEbeC9xs/FZTEYYKKuF -0mBWWg== ------END CERTIFICATE----- - -Security Communication EV RootCA1 -================================= ------BEGIN CERTIFICATE----- -MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMhU2VjdXJpdHkgQ29tbXVuaWNh -dGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIzMloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UE -BhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNl -Y3VyaXR5IENvbW11bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSERMqm4miO -/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gOzXppFodEtZDkBp2uoQSX -WHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4z -ZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDFMxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4 -bepJz11sS6/vmsJWXMY1VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK -9U2vP9eCOKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqG -SIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HWtWS3irO4G8za+6xm -iEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZq51ihPZRwSzJIxXYKLerJRO1RuGG -Av8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDbEJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnW -mHyojf6GPgcWkuF75x3sM3Z+Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEW -T1MKZPlO9L9OVL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490 ------END CERTIFICATE----- - -OISTE WISeKey Global Root GA CA -=============================== ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UE -BhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHlyaWdodCAoYykgMjAwNTEiMCAG -A1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBH -bG9iYWwgUm9vdCBHQSBDQTAeFw0wNTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYD -VQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIw -IAYDVQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5 -IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy0+zAJs9 -Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxRVVuuk+g3/ytr6dTqvirdqFEr12bDYVxg -Asj1znJ7O7jyTmUIms2kahnBAbtzptf2w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbD -d50kc3vkDIzh2TbhmYsFmQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ -/yxViJGg4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t94B3R -LoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQwEAYJKwYBBAGCNxUBBAMCAQAwDQYJ -KoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOxSPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vIm -MMkQyh2I+3QZH4VFvbBsUfk2ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4 -+vg1YFkCExh8vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZiFj4A4xylNoEY -okxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ/L7fCg0= ------END CERTIFICATE----- - -Microsec e-Szigno Root CA -========================= ------BEGIN CERTIFICATE----- -MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAwcjELMAkGA1UE -BhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNyb3NlYyBMdGQuMRQwEgYDVQQL -EwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9zZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0 -MDYxMjI4NDRaFw0xNzA0MDYxMjI4NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVz -dDEWMBQGA1UEChMNTWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMT -GU1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2uuO/TEdyB5s87lozWbxXG -d36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/N -oqdNAoI/gqyFxuEPkEeZlApxcpMqyabAvjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjc -QR/Ji3HWVBTji1R4P770Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJ -PqW+jqpx62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcBAQRb -MFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3AwLQYIKwYBBQUHMAKG -IWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAPBgNVHRMBAf8EBTADAQH/MIIBcwYD -VR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIBAQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3 -LmUtc3ppZ25vLmh1L1NaU1ovMIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0A -dAB2AOEAbgB5ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn -AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABTAHoAbwBsAGcA -4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABhACAAcwB6AGUAcgBpAG4AdAAg -AGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABoAHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMA -egBpAGcAbgBvAC4AaAB1AC8AUwBaAFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6 -Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NO -PU1pY3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxPPU1pY3Jv -c2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZvY2F0aW9uTGlzdDtiaW5h -cnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuBEGluZm9AZS1zemlnbm8uaHWkdzB1MSMw -IQYDVQQDDBpNaWNyb3NlYyBlLVN6aWduw7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhT -WjEWMBQGA1UEChMNTWljcm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhV -MIGsBgNVHSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJIVTER -MA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDASBgNVBAsTC2UtU3pp -Z25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBSb290IENBghEAzLjnv04pGv2i3Gal -HCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMT -nGZjWS7KXHAM/IO8VbH0jgdsZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FE -aGAHQzAxQmHl7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a -86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfRhUZLphK3dehK -yVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/MPMMNz7UwiiAc7EBt51alhQB -S6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU= ------END CERTIFICATE----- - -Certigna -======== ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNVBAYTAkZSMRIw -EAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4XDTA3MDYyOTE1MTMwNVoXDTI3 -MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwI -Q2VydGlnbmEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7q -XOEm7RFHYeGifBZ4QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyH -GxnygQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbwzBfsV1/p -ogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q130yGLMLLGq/jj8UEYkg -DncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKf -Irjxwo1p3Po6WAbfAgMBAAGjgbwwgbkwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQ -tCRZvgHyUtVF9lo53BEwZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJ -BgNVBAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzjAQ/J -SP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG9w0BAQUFAAOCAQEA -hQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8hbV6lUmPOEvjvKtpv6zf+EwLHyzs+ -ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFncfca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1klu -PBS1xp81HlDQwY9qcEQCYsuuHWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY -1gkIl2PlwS6wt0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -AC Ra\xC3\xADz Certic\xC3\xA1mara S.A. -====================================== ------BEGIN CERTIFICATE----- -MIIGZjCCBE6gAwIBAgIPB35Sk3vgFeNX8GmMy+wMMA0GCSqGSIb3DQEBBQUAMHsxCzAJBgNVBAYT -AkNPMUcwRQYDVQQKDD5Tb2NpZWRhZCBDYW1lcmFsIGRlIENlcnRpZmljYWNpw7NuIERpZ2l0YWwg -LSBDZXJ0aWPDoW1hcmEgUy5BLjEjMCEGA1UEAwwaQUMgUmHDrXogQ2VydGljw6FtYXJhIFMuQS4w -HhcNMDYxMTI3MjA0NjI5WhcNMzAwNDAyMjE0MjAyWjB7MQswCQYDVQQGEwJDTzFHMEUGA1UECgw+ -U29jaWVkYWQgQ2FtZXJhbCBkZSBDZXJ0aWZpY2FjacOzbiBEaWdpdGFsIC0gQ2VydGljw6FtYXJh -IFMuQS4xIzAhBgNVBAMMGkFDIFJhw616IENlcnRpY8OhbWFyYSBTLkEuMIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAq2uJo1PMSCMI+8PPUZYILrgIem08kBeGqentLhM0R7LQcNzJPNCN -yu5LF6vQhbCnIwTLqKL85XXbQMpiiY9QngE9JlsYhBzLfDe3fezTf3MZsGqy2IiKLUV0qPezuMDU -2s0iiXRNWhU5cxh0T7XrmafBHoi0wpOQY5fzp6cSsgkiBzPZkc0OnB8OIMfuuzONj8LSWKdf/WU3 -4ojC2I+GdV75LaeHM/J4Ny+LvB2GNzmxlPLYvEqcgxhaBvzz1NS6jBUJJfD5to0EfhcSM2tXSExP -2yYe68yQ54v5aHxwD6Mq0Do43zeX4lvegGHTgNiRg0JaTASJaBE8rF9ogEHMYELODVoqDA+bMMCm -8Ibbq0nXl21Ii/kDwFJnmxL3wvIumGVC2daa49AZMQyth9VXAnow6IYm+48jilSH5L887uvDdUhf -HjlvgWJsxS3EF1QZtzeNnDeRyPYL1epjb4OsOMLzP96a++EjYfDIJss2yKHzMI+ko6Kh3VOz3vCa -Mh+DkXkwwakfU5tTohVTP92dsxA7SH2JD/ztA/X7JWR1DhcZDY8AFmd5ekD8LVkH2ZD6mq093ICK -5lw1omdMEWux+IBkAC1vImHFrEsm5VoQgpukg3s0956JkSCXjrdCx2bD0Omk1vUgjcTDlaxECp1b -czwmPS9KvqfJpxAe+59QafMCAwEAAaOB5jCB4zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQU0QnQ6dfOeXRU+Tows/RtLAMDG2gwgaAGA1UdIASBmDCBlTCBkgYEVR0g -ADCBiTArBggrBgEFBQcCARYfaHR0cDovL3d3dy5jZXJ0aWNhbWFyYS5jb20vZHBjLzBaBggrBgEF -BQcCAjBOGkxMaW1pdGFjaW9uZXMgZGUgZ2FyYW507WFzIGRlIGVzdGUgY2VydGlmaWNhZG8gc2Ug -cHVlZGVuIGVuY29udHJhciBlbiBsYSBEUEMuMA0GCSqGSIb3DQEBBQUAA4ICAQBclLW4RZFNjmEf -AygPU3zmpFmps4p6xbD/CHwso3EcIRNnoZUSQDWDg4902zNc8El2CoFS3UnUmjIz75uny3XlesuX -EpBcunvFm9+7OSPI/5jOCk0iAUgHforA1SBClETvv3eiiWdIG0ADBaGJ7M9i4z0ldma/Jre7Ir5v -/zlXdLp6yQGVwZVR6Kss+LGGIOk/yzVb0hfpKv6DExdA7ohiZVvVO2Dpezy4ydV/NgIlqmjCMRW3 -MGXrfx1IebHPOeJCgBbT9ZMj/EyXyVo3bHwi2ErN0o42gzmRkBDI8ck1fj+404HGIGQatlDCIaR4 -3NAvO2STdPCWkPHv+wlaNECW8DYSwaN0jJN+Qd53i+yG2dIPPy3RzECiiWZIHiCznCNZc6lEc7wk -eZBWN7PGKX6jD/EpOe9+XCgycDWs2rjIdWb8m0w5R44bb5tNAlQiM+9hup4phO9OSzNHdpdqy35f -/RWmnkJDW2ZaiogN9xa5P1FlK2Zqi9E4UqLWRhH6/JocdJ6PlwsCT2TG9WjTSy3/pDceiz+/RL5h -RqGEPQgnTIEgd4kI6mdAXmwIUV80WoyWaM3X94nCHNMyAK9Sy9NgWyo6R35rMDOhYil/SrnhLecU -Iw4OGEfhefwVVdCx/CVxY3UzHCMrr1zZ7Ud3YA47Dx7SwNxkBYn8eNZcLCZDqQ== ------END CERTIFICATE----- - -TC TrustCenter Class 2 CA II -============================ ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjELMAkGA1UEBhMC -REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNVBAsTGVRDIFRydXN0Q2VudGVy -IENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYw -MTEyMTQzODQzWhcNMjUxMjMxMjI1OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1 -c3RDZW50ZXIgR21iSDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UE -AxMcVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jftMjWQ+nEdVl//OEd+DFw -IxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKguNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2 -xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2JXjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQ -Xa7pIXSSTYtZgo+U4+lK8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7u -SNQZu+995OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3kUrL84J6E1wIqzCB -7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRydXN0Y2VudGVyLmRlL2NybC92Mi90 -Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBU -cnVzdENlbnRlciUyMENsYXNzJTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21i -SCxPVT1yb290Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iSGNn3Bzn1LL4G -dXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprtZjluS5TmVfwLG4t3wVMTZonZ -KNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8au0WOB9/WIFaGusyiC2y8zl3gK9etmF1Kdsj -TYjKUCjLhdLTEKJZbtOTVAB6okaVhgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kP -JOzHdiEoZa5X6AeIdUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfk -vQ== ------END CERTIFICATE----- - -TC TrustCenter Class 3 CA II -============================ ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjELMAkGA1UEBhMC -REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNVBAsTGVRDIFRydXN0Q2VudGVy -IENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYw -MTEyMTQ0MTU3WhcNMjUxMjMxMjI1OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1 -c3RDZW50ZXIgR21iSDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UE -AxMcVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJWHt4bNwcwIi9v8Qbxq63W -yKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+QVl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo -6SI7dYnWRBpl8huXJh0obazovVkdKyT21oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZ -uV3bOx4a+9P/FRQI2AlqukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk -2ZyqBwi1Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NXXAek0CSnwPIA1DCB -7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRydXN0Y2VudGVyLmRlL2NybC92Mi90 -Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBU -cnVzdENlbnRlciUyMENsYXNzJTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21i -SCxPVT1yb290Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlNirTzwppVMXzE -O2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8TtXqluJucsG7Kv5sbviRmEb8 -yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9 -IJqDnxrcOfHFcqMRA/07QlIp2+gB95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal -092Y+tTmBvTwtiBjS+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc -5A== ------END CERTIFICATE----- - -TC TrustCenter Universal CA I -============================= ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTELMAkGA1UEBhMC -REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNVBAsTG1RDIFRydXN0Q2VudGVy -IFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcN -MDYwMzIyMTU1NDI4WhcNMjUxMjMxMjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMg -VHJ1c3RDZW50ZXIgR21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYw -JAYDVQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSRJJZ4Hgmgm5qVSkr1YnwC -qMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3TfCZdzHd55yx4Oagmcw6iXSVphU9VDprv -xrlE4Vc93x9UIuVvZaozhDrzznq+VZeujRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtw -ag+1m7Z3W0hZneTvWq3zwZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9O -gdwZu5GQfezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYDVR0j -BBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0GCSqGSIb3DQEBBQUAA4IBAQAo0uCG -1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X17caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/Cy -vwbZ71q+s2IhtNerNXxTPqYn8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3 -ghUJGooWMNjsydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT -ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/2TYcuiUaUj0a -7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY ------END CERTIFICATE----- - -Deutsche Telekom Root CA 2 -========================== ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMT -RGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEG -A1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENBIDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5 -MjM1OTAwWjBxMQswCQYDVQQGEwJERTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0G -A1UECxMWVC1UZWxlU2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBS -b290IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEUha88EOQ5 -bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhCQN/Po7qCWWqSG6wcmtoI -KyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1MjwrrFDa1sPeg5TKqAyZMg4ISFZbavva4VhY -AUlfckE8FQYBjl2tqriTtM2e66foai1SNNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aK -Se5TBY8ZTNXeWHmb0mocQqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTV -jlsB9WoHtxa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAPBgNV -HRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAlGRZrTlk5ynr -E/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756AbrsptJh6sTtU6zkXR34ajgv8HzFZMQSy -zhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpaIzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8 -rZ7/gFnkm0W09juwzTkZmDLl6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4G -dyd1Lx+4ivn+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - -ComSign Secured CA -================== ------BEGIN CERTIFICATE----- -MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAwPDEbMBkGA1UE -AxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWduMQswCQYDVQQGEwJJTDAeFw0w -NDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwxGzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBD -QTEQMA4GA1UEChMHQ29tU2lnbjELMAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw -ggEKAoIBAQDGtWhfHZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs -49ohgHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sWv+bznkqH -7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ueMv5WJDmyVIRD9YTC2LxB -kMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d1 -9guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUw -AwEB/zBEBgNVHR8EPTA7MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29t -U2lnblNlY3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58ADsA -j8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkqhkiG9w0BAQUFAAOC -AQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7piL1DRYHjZiM/EoZNGeQFsOY3wo3a -BijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtCdsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtp -FhpFfTMDZflScZAmlaxMDPWLkz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP -51qJThRv4zdLhfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz -OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw== ------END CERTIFICATE----- - -Cybertrust Global Root -====================== ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYGA1UEChMPQ3li -ZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBSb290MB4XDTA2MTIxNTA4 -MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQD -ExZDeWJlcnRydXN0IEdsb2JhbCBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA -+Mi8vRRQZhP/8NN57CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW -0ozSJ8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2yHLtgwEZL -AfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iPt3sMpTjr3kfb1V05/Iin -89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNzFtApD0mpSPCzqrdsxacwOUBdrsTiXSZT -8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAYXSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2 -MDSgMqAwhi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3JsMB8G -A1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUAA4IBAQBW7wojoFRO -lZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMjWqd8BfP9IjsO0QbE2zZMcwSO5bAi -5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUxXOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2 -hO0j9n0Hq0V+09+zv+mKts2oomcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+T -X3EJIrduPuocA06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -ePKI Root Certification Authority -================================= ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQG -EwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0ZC4xKjAoBgNVBAsMIWVQS0kg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMx -MjdaMF4xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEq -MCgGA1UECwwhZVBLSSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAHSyZbCUNs -IZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAhijHyl3SJCRImHJ7K2RKi -lTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3XDZoTM1PRYfl61dd4s5oz9wCGzh1NlDiv -qOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX -12ruOzjjK9SXDrkb5wdJfzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0O -WQqraffAsgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uUWH1+ -ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLSnT0IFaUQAS2zMnao -lQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pHdmX2Os+PYhcZewoozRrSgx4hxyy/ -vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJipNiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXi -Zo1jDiVN1Rmy5nk3pyKdVDECAwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/Qkqi -MAwGA1UdEwQFMAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGBuvl2ICO1J2B0 -1GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6YlPwZpVnPDimZI+ymBV3QGypzq -KOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkPJXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdV -xrsStZf0X4OFunHB2WyBEXYKCrC/gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEP -NXubrjlpC2JgQCA2j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+r -GNm65ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUBo2M3IUxE -xJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS/jQ6fbjpKdx2qcgw+BRx -gMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2zGp1iro2C6pSe3VkQw63d4k3jMdXH7Ojy -sP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTEW9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmOD -BCEIZ43ygknQW/2xzQ+DhNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3 -============================================================================================================================= ------BEGIN CERTIFICATE----- -MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRSMRgwFgYDVQQH -DA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJpbGltc2VsIHZlIFRla25vbG9q -aWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSwVEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ry -b25payB2ZSBLcmlwdG9sb2ppIEFyYcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNV -BAsMGkthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUg -S8O2ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAeFw0wNzA4 -MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIxGDAWBgNVBAcMD0dlYnpl -IC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmlsaW1zZWwgdmUgVGVrbm9sb2ppayBBcmHF -n3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBUQUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZl -IEtyaXB0b2xvamkgQXJhxZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2Ft -dSBTZXJ0aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7ZrIFNl -cnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4hgb46ezzb8R1Sf1n68yJMlaCQvEhO -Eav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yKO7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1 -xnnRFDDtG1hba+818qEhTsXOfJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR -6Oqeyjh1jmKwlZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL -hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQIDAQABo0IwQDAd -BgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmPNOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4 -N5EY3ATIZJkrGG2AA1nJrvhY0D7twyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLT -y9LQQfMmNkqblWwM7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYh -LBOhgLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5noN+J1q2M -dqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUsyZyQ2uypQjyttgI= ------END CERTIFICATE----- - -Buypass Class 2 CA 1 -==================== ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU -QnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3MgQ2xhc3MgMiBDQSAxMB4XDTA2 -MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBh -c3MgQVMtOTgzMTYzMzI3MR0wGwYDVQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7M -cXA0ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLXl18xoS83 -0r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVBHfCuuCkslFJgNJQ72uA4 -0Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/R -uFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0P -AQH/BAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLPgcIV -1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+DKhQ7SLHrQVMdvvt -7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKuBctN518fV4bVIJwo+28TOPX2EZL2 -fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHsh7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5w -wDX3OaJdZtB7WZ+oRxKaJyOkLY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho ------END CERTIFICATE----- - -Buypass Class 3 CA 1 -==================== ------BEGIN CERTIFICATE----- -MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU -QnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3MgQ2xhc3MgMyBDQSAxMB4XDTA1 -MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBh -c3MgQVMtOTgzMTYzMzI3MR0wGwYDVQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKx -ifZgisRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//zNIqeKNc0 -n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI+MkcVyzwPX6UvCWThOia -AJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2RhzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c -1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0P -AQH/BAQDAgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFPBdy7 -pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27sEzNxZy5p+qksP2bA -EllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2mSlf56oBzKwzqBwKu5HEA6BvtjT5 -htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yCe/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQj -el/wroQk5PMr+4okoyeYZdowdXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915 ------END CERTIFICATE----- - -EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 -========================================================================== ------BEGIN CERTIFICATE----- -MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNVBAMML0VCRyBF -bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMTcwNQYDVQQKDC5FQkcg -QmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXptZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAe -Fw0wNjA4MTcwMDIxMDlaFw0xNjA4MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25p -ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2lt -IFRla25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h4fuXd7hxlugTlkaDT7by -X3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAktiHq6yOU/im/+4mRDGSaBUorzAzu8T2b -gmmkTPiab+ci2hC6X5L8GCcKqKpE+i4stPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfr -eYteIAbTdgtsApWjluTLdlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZ -TqNGFav4c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8UmTDGy -Y5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z+kI2sSXFCjEmN1Zn -uqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0OLna9XvNRiYuoP1Vzv9s6xiQFlpJI -qkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMWOeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vm -ExH8nYQKE3vwO9D8owrXieqWfo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0 -Nokb+Clsi7n2l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB -/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgwFoAU587GT/wW -Z5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+8ygjdsZs93/mQJ7ANtyVDR2t -FcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgm -zJNSroIBk5DKd8pNSe/iWtkqvTDOTLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64k -XPBfrAowzIpAoHMEwfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqT -bCmYIai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJnxk1Gj7sU -RT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4QDgZxGhBM/nV+/x5XOULK -1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9qKd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt -2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11thie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQ -Y9iJSrSq3RZj9W6+YKH47ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9 -AahH3eU7QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT ------END CERTIFICATE----- - -certSIGN ROOT CA -================ ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYTAlJPMREwDwYD -VQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTAeFw0wNjA3MDQxNzIwMDRa -Fw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UE -CxMQY2VydFNJR04gUk9PVCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7I -JUqOtdu0KBuqV5Do0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHH -rfAQUySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5dRdY4zTW2 -ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQOA7+j0xbm0bqQfWwCHTD -0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwvJoIQ4uNllAoEwF73XVv4EOLQunpL+943 -AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8B -Af8EBAMCAcYwHQYDVR0OBBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IB -AQA+0hyJLjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecYMnQ8 -SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ44gx+FkagQnIl6Z0 -x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6IJd1hJyMctTEHBDa0GpC9oHRxUIlt -vBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNwi/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7Nz -TogVZ96edhBiIL5VaZVDADlN9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -CNNIC ROOT -========== ------BEGIN CERTIFICATE----- -MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJDTjEOMAwGA1UE -ChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2MDcwOTE0WhcNMjcwNDE2MDcw -OTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1Qw -ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzD -o+/hn7E7SIX1mlwhIhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tiz -VHa6dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZOV/kbZKKT -VrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrCGHn2emU1z5DrvTOTn1Or -czvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gNv7Sg2Ca+I19zN38m5pIEo3/PIKe38zrK -y5nLAgMBAAGjczBxMBEGCWCGSAGG+EIBAQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscC -wQ7vptU7ETAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991S -lgrHAsEO76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnKOOK5 -Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvHugDnuL8BV8F3RTIM -O/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7HgviyJA/qIYM/PmLXoXLT1tLYhFHxUV8 -BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fLbuXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2 -G8kS1sHNzYDzAgE8yGnLRUhj2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5m -mxE= ------END CERTIFICATE----- - -ApplicationCA - Japanese Government -=================================== ------BEGIN CERTIFICATE----- -MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEcMBoGA1UEChMT -SmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRpb25DQTAeFw0wNzEyMTIxNTAw -MDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYTAkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zl -cm5tZW50MRYwFAYDVQQLEw1BcHBsaWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAp23gdE6Hj6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4 -fl+Kf5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55IrmTwcrN -wVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cwFO5cjFW6WY2H/CPek9AE -jP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDihtQWEjdnjDuGWk81quzMKq2edY3rZ+nYVu -nyoKb58DKTCXKB28t89UKU5RMfkntigm/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRU -WssmP3HMlEYNllPqa0jQk/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNV -BAYTAkpQMRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOCseOD -vOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBADlqRHZ3ODrs -o2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJhyzjVOGjprIIC8CFqMjSnHH2HZ9g -/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYD -io+nEhEMy/0/ecGc/WLuo89UDNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmW -dupwX3kSa+SjB1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL -rosot4LKGAfmt1t06SAZf7IbiVQ= ------END CERTIFICATE----- - -GeoTrust Primary Certification Authority - G3 -============================================= ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA4IEdlb1RydXN0 -IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFy -eSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIz -NTk1OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAo -YykgMjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMT -LUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMzCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5j -K/BGvESyiaHAKAxJcCGVn2TAppMSAmUmhsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdE -c5IiaacDiGydY8hS2pgn5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3C -IShwiP/WJmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exALDmKu -dlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZChuOl1UcCAwEAAaNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMR5yo6hTgMdHNxr -2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IBAQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9 -cr5HqQ6XErhK8WTTOd8lNNTBzU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbE -Ap7aDHdlDkQNkv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUHSJsMC8tJP33s -t/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2Gspki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -thawte Primary Root CA - G2 -=========================== ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDELMAkGA1UEBhMC -VVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMpIDIwMDcgdGhhd3RlLCBJbmMu -IC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3Qg -Q0EgLSBHMjAeFw0wNzExMDUwMDAwMDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEV -MBMGA1UEChMMdGhhd3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBG -b3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAt -IEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/BebfowJPDQfGAFG6DAJS -LSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6papu+7qzcMBniKI11KOasf2twu8x+qi5 -8/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQU -mtgAMADna3+FGO6Lts6KDPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUN -G4k8VIZ3KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41oxXZ3K -rr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -thawte Primary Root CA - G3 -=========================== ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCBrjELMAkGA1UE -BhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2 -aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIwMDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhv -cml6ZWQgdXNlIG9ubHkxJDAiBgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0w -ODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9uMTgwNgYD -VQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTEkMCIG -A1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAsr8nLPvb2FvdeHsbnndmgcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2At -P0LMqmsywCPLLEHd5N/8YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC -+BsUa0Lfb1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS99irY -7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2SzhkGcuYMXDhpxwTW -vGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUkOQIDAQABo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJ -KoZIhvcNAQELBQADggEBABpA2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweK -A3rD6z8KLFIWoCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7cKUGRIjxpp7sC -8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fMm7v/OeZWYdMKp8RcTGB7BXcm -er/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZuMdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -GeoTrust Primary Certification Authority - G2 -============================================= ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChjKSAyMDA3IEdlb1RydXN0IElu -Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1 -OVowgZgxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwNyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNVBAMTLUdl -b1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjB2MBAGByqGSM49AgEG -BSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcLSo17VDs6bl8VAsBQps8lL33KSLjHUGMc -KiEIfJo22Av+0SbFWDEwKCXzXV2juLaltJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+ -EVXVMAoGCCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGTqQ7m -ndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBuczrD6ogRLQy7rQkgu2 -npaqBA+K ------END CERTIFICATE----- - -VeriSign Universal Root Certification Authority -=============================================== ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCBvTELMAkGA1UE -BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBO -ZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVk -IHVzZSBvbmx5MTgwNgYDVQQDEy9WZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9u -IEF1dGhvcml0eTAeFw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJV -UzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv -cmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj -1mCOkdeQmIN65lgZOIzF9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGP -MiJhgsWHH26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+HLL72 -9fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN/BMReYTtXlT2NJ8I -AfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPTrJ9VAMf2CGqUuV/c4DPxhGD5WycR -tPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0G -CCsGAQUFBwEMBGEwX6FdoFswWTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2O -a8PPgGrUSBgsexkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4sAPmLGd75JR3 -Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+seQxIcaBlVZaDrHC1LGmWazx -Y8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTx -P/jgdFcrGJ2BtMQo2pSXpXDrrB2+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+P -wGZsY6rp2aQW9IHRlRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4 -mJO37M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -VeriSign Class 3 Public Primary Certification Authority - G4 -============================================================ ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjELMAkGA1UEBhMC -VVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3 -b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVz -ZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBU -cnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRo -b3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5 -IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8 -Utpkmw4tXNherJI9/gHmGUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGz -rl0Bp3vefLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEw -HzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVyaXNpZ24u -Y29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMWkf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMD -A2gAMGUCMGYhDBgmYFo4e1ZC4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIx -AJw9SDkjOVgaFRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -NetLock Arany (Class Gold) Főtanúsítvány -============================================ ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQGEwJIVTERMA8G -A1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3MDUGA1UECwwuVGFuw7pzw610 -dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBB -cmFueSAoQ2xhc3MgR29sZCkgRsWRdGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgx -MjA2MTUwODIxWjCBpzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxO -ZXRMb2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNzIEdvbGQpIEbFkXRhbsO6 -c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxCRec75LbRTDofTjl5Bu -0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrTlF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw -/HpYzY6b7cNGbIRwXdrzAZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAk -H3B5r9s5VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRGILdw -fzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2BJtr+UBdADTHLpl1 -neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2MU9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwW -qZw8UQCgwBEIBaeZ5m8BiFRhbvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTta -YtOUZcTh5m2C+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2FuLjbvrW5Kfna -NwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2XjG4Kvte9nHfRCaexOYNkbQu -dZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -Staat der Nederlanden Root CA - G2 -================================== ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJOTDEeMBwGA1UE -CgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFhdCBkZXIgTmVkZXJsYW5kZW4g -Um9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oXDTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMC -TkwxHjAcBgNVBAoMFVN0YWF0IGRlciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5l -ZGVybGFuZGVuIFJvb3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ -5291qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8SpuOUfiUtn -vWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPUZ5uW6M7XxgpT0GtJlvOj -CwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvEpMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiil -e7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCR -OME4HYYEhLoaJXhena/MUGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpI -CT0ugpTNGmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy5V65 -48r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv6q012iDTiIJh8BIi -trzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEKeN5KzlW/HdXZt1bv8Hb/C3m1r737 -qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMB -AAGjgZcwgZQwDwYDVR0TAQH/BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcC -ARYxaHR0cDovL3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqGSIb3DQEBCwUA -A4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLySCZa59sCrI2AGeYwRTlHSeYAz -+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwj -f/ST7ZwaUb7dRUG/kSS0H4zpX897IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaN -kqbG9AclVMwWVxJKgnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfk -CpYL+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxLvJxxcypF -URmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkmbEgeqmiSBeGCc1qb3Adb -CG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvkN1trSt8sV4pAWja63XVECDdCcAz+3F4h -oKOKwJCcaNpQ5kUQR3i2TtJlycM33+FCY7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoV -IPVVYpbtbZNQvOSqeK3Zywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm -66+KAQ== ------END CERTIFICATE----- - -CA Disig -======== ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzETMBEGA1UEBxMK -QnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwHhcNMDYw -MzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQswCQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlz -bGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgm -GErENx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnXmjxUizkD -Pw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYDXcDtab86wYqg6I7ZuUUo -hwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhWS8+2rT+MitcE5eN4TPWGqvWP+j1scaMt -ymfraHtuM6kMgiioTGohQBUgDCZbg8KpFhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8w -gfwwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0P -AQH/BAQDAgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cuZGlz -aWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5zay9jYS9jcmwvY2Ff -ZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2svY2EvY3JsL2NhX2Rpc2lnLmNybDAa -BgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEwDQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59t -WDYcPQuBDRIrRhCA/ec8J9B6yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3 -mkkp7M5+cTxqEEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/ -CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeBEicTXxChds6K -ezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFNPGO+I++MzVpQuGhU+QqZMxEA -4Z7CRneC9VkGjCFMhwnN5ag= ------END CERTIFICATE----- - -Juur-SK -======= ------BEGIN CERTIFICATE----- -MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcNAQkBFglwa2lA -c2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZpdHNlZXJpbWlza2Vza3VzMRAw -DgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMwMVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqG -SIb3DQEJARYJcGtpQHNrLmVlMQswCQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVy -aW1pc2tlc2t1czEQMA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOBSvZiF3tf -TQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkzABpTpyHhOEvWgxutr2TC -+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvHLCu3GFH+4Hv2qEivbDtPL+/40UceJlfw -UR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMPPbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDa -Tpxt4brNj3pssAki14sL2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQF -MAMBAf8wggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwICMIHD -HoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDkAGwAagBhAHMAdABh -AHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0AHMAZQBlAHIAaQBtAGkAcwBrAGUA -cwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABzAGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABr -AGkAbgBuAGkAdABhAG0AaQBzAGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nw -cy8wKwYDVR0fBCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE -FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcYP2/v6X2+MA4G -A1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOiCfP+JmeaUOTDBS8rNXiRTHyo -ERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+gkcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyL -abVAyJRld/JXIWY7zoVAtjNjGr95HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678 -IIbsSt4beDI3poHSna9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkh -Mp6qqIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0ZTbvGRNs2 -yyqcjg== ------END CERTIFICATE----- - -Hongkong Post Root CA 1 -======================= ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoT -DUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMB4XDTAzMDUx -NTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkGA1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25n -IFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1 -ApzQjVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEnPzlTCeqr -auh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjhZY4bXSNmO7ilMlHIhqqh -qZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9nnV0ttgCXjqQesBCNnLsak3c78QA3xMY -V18meMjWCnl3v/evt3a5pQuEF10Q6m/hq5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNV -HRMBAf8ECDAGAQH/AgEDMA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7i -h9legYsCmEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI37pio -l7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clBoiMBdDhViw+5Lmei -IAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJsEhTkYY2sEJCehFC78JZvRZ+K88ps -T/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpOfMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilT -c4afU9hDDl3WY4JxHYB0yvbiAmvZWg== ------END CERTIFICATE----- - -SecureSign RootCA11 -=================== ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDErMCkGA1UEChMi -SmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoGA1UEAxMTU2VjdXJlU2lnbiBS -b290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSsw -KQYDVQQKEyJKYXBhbiBDZXJ0aWZpY2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1 -cmVTaWduIFJvb3RDQTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvL -TJszi1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8h9uuywGO -wvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOVMdrAG/LuYpmGYz+/3ZMq -g6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rP -O7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitA -bpSACW22s293bzUIUPsCh8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZX -t94wDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAKCh -OBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xmKbabfSVSSUOrTC4r -bnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQX5Ucv+2rIrVls4W6ng+4reV6G4pQ -Oh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWrQbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01 -y8hSyn+B/tlr0/cR7SXf+Of5pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061 -lgeLKBObjBmNQSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -ACEDICOM Root -============= ------BEGIN CERTIFICATE----- -MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UEAwwNQUNFRElD -T00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00xCzAJBgNVBAYTAkVTMB4XDTA4 -MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEWMBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoG -A1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHk -WLn709gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7XBZXehuD -YAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5PGrjm6gSSrj0RuVFCPYew -MYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAKt0SdE3QrwqXrIhWYENiLxQSfHY9g5QYb -m8+5eaA9oiM/Qj9r+hwDezCNzmzAv+YbX79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbk -HQl/Sog4P75n/TSW9R28MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTT -xKJxqvQUfecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI2Sf2 -3EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyHK9caUPgn6C9D4zq9 -2Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEaeZAwUswdbxcJzbPEHXEUkFDWug/Fq -TYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz -4SsrSbbXc6GqlPUB53NlTKxQMA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU -9QHnc2VMrFAwRAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv -bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWImfQwng4/F9tqg -aHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3gvoFNTPhNahXwOf9jU8/kzJP -eGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKeI6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1Pwk -zQSulgUV1qzOMPPKC8W64iLgpq0i5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1 -ThCojz2GuHURwCRiipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oI -KiMnMCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZo5NjEFIq -nxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6zqylfDJKZ0DcMDQj3dcE -I2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacNGHk0vFQYXlPKNFHtRQrmjseCNj6nOGOp -MCwXEGCSn1WHElkQwg9naRHMTh5+Spqtr0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3o -tkYNbn5XOmeUwssfnHdKZ05phkOTOPu220+DkdRgfks+KzgHVZhepA== ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority -======================================================= ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkGA1UEBhMCVVMx -FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5 -IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVow -XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz -IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA -A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94 -f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol -hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBABByUqkFFBky -CEHwxWsKzH4PIRnN5GfcX6kb5sroc50i2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWX -bj9T/UWZYB2oK0z5XqcJ2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/ -D/xwzoiQ ------END CERTIFICATE----- - -Microsec e-Szigno Root CA 2009 -============================== ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYDVQQGEwJIVTER -MA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jv -c2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTAeFw0wOTA2MTYxMTMwMThaFw0yOTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UE -BwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUt -U3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvPkd6mJviZpWNwrZuuyjNA -fW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tccbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG -0IMZfcChEhyVbUr02MelTTMuhTlAdX4UfIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKA -pxn1ntxVUwOXewdI/5n7N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm -1HxdrtbCxkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1+rUC -AwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTLD8bf -QkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAbBgNVHREE -FDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqGSIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0o -lZMEyL/azXm4Q5DwpL7v8u8hmLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfX -I/OMn74dseGkddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c2Pm2G2JwCz02 -yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5tHMN1Rq41Bab2XD0h7lbwyYIi -LXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -E-Guven Kok Elektronik Sertifika Hizmet Saglayicisi -=================================================== ------BEGIN CERTIFICATE----- -MIIDtjCCAp6gAwIBAgIQRJmNPMADJ72cdpW56tustTANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQG -EwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxpZ2kgQS5TLjE8MDoGA1UEAxMz -ZS1HdXZlbiBLb2sgRWxla3Ryb25payBTZXJ0aWZpa2EgSGl6bWV0IFNhZ2xheWljaXNpMB4XDTA3 -MDEwNDExMzI0OFoXDTE3MDEwNDExMzI0OFowdTELMAkGA1UEBhMCVFIxKDAmBgNVBAoTH0VsZWt0 -cm9uaWsgQmlsZ2kgR3V2ZW5saWdpIEEuUy4xPDA6BgNVBAMTM2UtR3V2ZW4gS29rIEVsZWt0cm9u -aWsgU2VydGlmaWthIEhpem1ldCBTYWdsYXlpY2lzaTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBAMMSIJ6wXgBljU5Gu4Bc6SwGl9XzcslwuedLZYDBS75+PNdUMZTe1RK6UxYC6lhj71vY -8+0qGqpxSKPcEC1fX+tcS5yWCEIlKBHMilpiAVDV6wlTL/jDj/6z/P2douNffb7tC+Bg62nsM+3Y -jfsSSYMAyYuXjDtzKjKzEve5TfL0TW3H5tYmNwjy2f1rXKPlSFxYvEK+A1qBuhw1DADT9SN+cTAI -JjjcJRFHLfO6IxClv7wC90Nex/6wN1CZew+TzuZDLMN+DfIcQ2Zgy2ExR4ejT669VmxMvLz4Bcpk -9Ok0oSy1c+HCPujIyTQlCFzz7abHlJ+tiEMl1+E5YP6sOVkCAwEAAaNCMEAwDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJ/uRLOU1fqRTy7ZVZoEVtstxNulMA0GCSqG -SIb3DQEBBQUAA4IBAQB/X7lTW2M9dTLn+sR0GstG30ZpHFLPqk/CaOv/gKlR6D1id4k9CnU58W5d -F4dvaAXBlGzZXd/aslnLpRCKysw5zZ/rTt5S/wzw9JKp8mxTq5vSR6AfdPebmvEvFZ96ZDAYBzwq -D2fK/A+JYZ1lpTzlvBNbCNvj/+27BrtqBrF6T2XGgv0enIu1De5Iu7i9qgi0+6N8y5/NkHZchpZ4 -Vwpm+Vganf2XKWDeEaaQHBkc7gGWIjQ0LpH5t8Qn0Xvmv/uARFoW5evg1Ao4vOSR49XrXMGs3xtq -fJ7lddK2l4fbzIcrQzqECK+rPNv3PGYxhrCdU3nt+CPeQuMtgvEP5fqX ------END CERTIFICATE----- - -GlobalSign Root CA - R3 -======================= ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xv -YmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2Jh -bFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxT -aWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2ln -bjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWt -iHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ -0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3 -rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjl -OCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2 -xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE -FI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7 -lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8 -EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1E -bddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18 -YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7r -kpeDMdmztcpHWD9f ------END CERTIFICATE----- - -TC TrustCenter Universal CA III -=============================== ------BEGIN CERTIFICATE----- -MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezELMAkGA1UEBhMC -REUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNVBAsTG1RDIFRydXN0Q2VudGVy -IFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAe -Fw0wOTA5MDkwODE1MjdaFw0yOTEyMzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNU -QyBUcnVzdENlbnRlciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0Ex -KDAmBgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF5+cvAqBNLaT6hdqbJYUt -QCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYvDIRlzg9uwliT6CwLOunBjvvya8o84pxO -juT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8vzArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+Eut -CHnNaYlAJ/Uqwa1D7KRTyGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1 -M4BDj5yjdipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBhMB8G -A1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI4jANBgkqhkiG9w0BAQUFAAOCAQEA -g8ev6n9NCjw5sWi+e22JLumzCecYV42FmhfzdkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+ -KGwWaODIl0YgoGhnYIg5IFHYaAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhK -BgePxLcHsU0GDeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV -CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPHLQNjO9Po5KIq -woIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== ------END CERTIFICATE----- - -Autoridad de Certificacion Firmaprofesional CIF A62634068 -========================================================= ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UEBhMCRVMxQjBA -BgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2 -MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEyMzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIw -QAYDVQQDDDlBdXRvcmlkYWQgZGUgQ2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBB -NjI2MzQwNjgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDD -Utd9thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQMcas9UX4P -B99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefGL9ItWY16Ck6WaVICqjaY -7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15iNA9wBj4gGFrO93IbJWyTdBSTo3OxDqqH -ECNZXyAFGUftaI6SEspd/NYrspI8IM/hX68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyI -plD9amML9ZMWGxmPsu2bm8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctX -MbScyJCyZ/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirjaEbsX -LZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/TKI8xWVvTyQKmtFLK -bpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF6NkBiDkal4ZkQdU7hwxu+g/GvUgU -vzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVhOSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1Ud -EwEB/wQIMAYBAf8CAQEwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNH -DhpkLzCBpgYDVR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBvACAAZABlACAA -bABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBlAGwAbwBuAGEAIAAwADgAMAAx -ADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx -51tkljYyGOylMnfX40S2wBEqgLk9am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qk -R71kMrv2JYSiJ0L1ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaP -T481PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS3a/DTg4f -Jl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5kSeTy36LssUzAKh3ntLFl -osS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF3dvd6qJ2gHN99ZwExEWN57kci57q13XR -crHedUTnQn3iV2t93Jm8PYMo6oCTjcVMZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoR -saS8I8nkvof/uZS2+F0gStRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTD -KCOM/iczQ0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQBjLMi -6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -Izenpe.com -========== ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4MQswCQYDVQQG -EwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wHhcNMDcxMjEz -MTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMu -QS4xEzARBgNVBAMMCkl6ZW5wZS5jb20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ -03rKDx6sp4boFmVqscIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAK -ClaOxdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6HLmYRY2xU -+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFXuaOKmMPsOzTFlUFpfnXC -PCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQDyCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxT -OTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbK -F7jJeodWLBoBHmy+E60QrLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK -0GqfvEyNBjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8Lhij+ -0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIBQFqNeb+Lz0vPqhbB -leStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+HMh3/1uaD7euBUbl8agW7EekFwID -AQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2luZm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+ -SVpFTlBFIFMuQS4gLSBDSUYgQTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBG -NjIgUzgxQzBBBgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0O -BBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUAA4ICAQB4pgwWSp9MiDrAyw6l -Fn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWblaQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbga -kEyrkgPH7UIBzg/YsfqikuFgba56awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8q -hT/AQKM6WfxZSzwoJNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Cs -g1lwLDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCTVyvehQP5 -aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGkLhObNA5me0mrZJfQRsN5 -nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJbUjWumDqtujWTI6cfSN01RpiyEGjkpTHC -ClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZo -Q0iy2+tzJOeRf1SktoA+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1Z -WrOZyGlsQyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -Chambers of Commerce Root - 2008 -================================ ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYDVQQGEwJFVTFD -MEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNv -bS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMu -QS4xKTAnBgNVBAMTIENoYW1iZXJzIG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEy -Mjk1MFoXDTM4MDczMTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNl -ZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQF -EwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJl -cnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW928sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKA -XuFixrYp4YFs8r/lfTJqVKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorj -h40G072QDuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR5gN/ -ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfLZEFHcpOrUMPrCXZk -NNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05aSd+pZgvMPMZ4fKecHePOjlO+Bd5g -D2vlGts/4+EhySnB8esHnFIbAURRPHsl18TlUlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331 -lubKgdaX8ZSD6e2wsWsSaR6s+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ -0wlf2eOKNcx5Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAxhduub+84Mxh2 -EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNVHQ4EFgQU+SSsD7K1+HnA+mCI -G8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJ -BgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNh -bWVyZmlybWEuY29tL2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENh -bWVyZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDiC -CQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUH -AgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAJASryI1 -wqM58C7e6bXpeHxIvj99RZJe6dqxGfwWPJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH -3qLPaYRgM+gQDROpI9CF5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbU -RWpGqOt1glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaHFoI6 -M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2pSB7+R5KBWIBpih1 -YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MDxvbxrN8y8NmBGuScvfaAFPDRLLmF -9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QGtjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcK -zBIKinmwPQN/aUv0NCB9szTqjktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvG -nrDQWzilm1DefhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZd0jQ ------END CERTIFICATE----- - -Global Chambersign Root - 2008 -============================== ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYDVQQGEwJFVTFD -MEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNv -bS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMu -QS4xJzAlBgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMx -NDBaFw0zODA3MzExMjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUg -Y3VycmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJ -QTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDf -VtPkOpt2RbQT2//BthmLN0EYlVJH6xedKYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXf -XjaOcNFccUMd2drvXNL7G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0 -ZJJ0YPP2zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4ddPB -/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyGHoiMvvKRhI9lNNgA -TH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2Id3UwD2ln58fQ1DJu7xsepeY7s2M -H/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3VyJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfe -Ox2YItaswTXbo6Al/3K1dh3ebeksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSF -HTynyQbehP9r6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsogzCtLkykPAgMB -AAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQWBBS5CcqcHtvTbDprru1U8VuT -BjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDprru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UE -BhMCRVUxQzBBBgNVBAcTOk1hZHJpZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJm -aXJtYS5jb20vYWRkcmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJm -aXJtYSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiCCQDJzdPp -1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCowKAYIKwYBBQUHAgEWHGh0 -dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZIhvcNAQEFBQADggIBAICIf3DekijZBZRG -/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZUohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6 -ReAJ3spED8IXDneRRXozX1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/s -dZ7LoR/xfxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVza2Mg -9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yydYhz2rXzdpjEetrHH -foUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMdSqlapskD7+3056huirRXhOukP9Du -qqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9OAP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETr -P3iZ8ntxPjzxmKfFGBI/5rsoM0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVq -c5iJWzouE4gev8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - -Go Daddy Root Certificate Authority - G2 -======================================== ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMxEDAOBgNVBAgT -B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoTEUdvRGFkZHkuY29tLCBJbmMu -MTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8G -A1UEAxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKDE6bFIEMBO4Tx5oVJnyfq -9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD -+qK+ihVqf94Lw7YZFAXK6sOoBJQ7RnwyDfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutd -fMh8+7ArU6SSYmlRJQVhGkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMl -NAJWJwGRtDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEAAaNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFDqahQcQZyi27/a9 -BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmXWWcDYfF+OwYxdS2hII5PZYe096ac -vNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r -5N9ss4UXnT3ZJE95kTXWXwTrgIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYV -N8Gb5DKj7Tjo2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI4uJEvlz36hz1 ------END CERTIFICATE----- - -Starfield Root Certificate Authority - G2 -========================================= ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMxEDAOBgNVBAgT -B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9s -b2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVsZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0 -eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAw -DgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQg -VGVjaG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZpY2F0ZSBB -dXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL3twQP89o/8ArFv -W59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMgnLRJdzIpVv257IzdIvpy3Cdhl+72WoTs -bhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNk -N3mSwOxGXn/hbVNMYq/NHwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7Nf -ZTD4p7dNdloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0HZbU -JtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0GCSqGSIb3DQEBCwUAA4IBAQARWfol -TwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjUsHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx -4mcujJUDJi5DnUox9g61DLu34jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUw -F5okxBDgBPfg8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1mMpYjn0q7pBZ -c2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -Starfield Services Root Certificate Authority - G2 -================================================== ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMxEDAOBgNVBAgT -B0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoTHFN0YXJmaWVsZCBUZWNobm9s -b2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVsZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRl -IEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxT -dGFyZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2VydmljZXMg -Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20pOsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2 -h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm28xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4Pa -hHQUw2eeBGg6345AWh1KTs9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLP -LJGmpufehRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk6mFB -rMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+qAdcwKziIorhtSpzyEZGDMA0GCSqG -SIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMIbw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPP -E95Dz+I0swSdHynVv/heyNXBve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTy -xQGjhdByPq1zqwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn0q23KXB56jza -YyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCNsSi6 ------END CERTIFICATE----- - -AffirmTrust Commercial -====================== ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UEBhMCVVMxFDAS -BgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMB4XDTEw -MDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmly -bVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6Eqdb -DuKPHx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yrba0F8PrV -C8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPALMeIrJmqbTFeurCA+ukV6 -BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1yHp52UKqK39c/s4mT6NmgTWvRLpUHhww -MmWd5jyTXlBOeuM61G7MGvv50jeuJCqrVwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNV -HQ4EFgQUnZPGU4teyq8/nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYGXUPG -hi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNjvbz4YYCanrHOQnDi -qX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivtZ8SOyUOyXGsViQK8YvxO8rUzqrJv -0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9gN53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0kh -sUlHRUe072o0EclNmsxZt9YCnlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -AffirmTrust Networking -====================== ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UEBhMCVVMxFDAS -BgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMB4XDTEw -MDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmly -bVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SE -Hi3yYJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbuakCNrmreI -dIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRLQESxG9fhwoXA3hA/Pe24 -/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gb -h+0t+nvujArjqWaJGctB+d1ENmHP4ndGyH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNV -HQ4EFgQUBx/S55zawm6iQLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfOtDIu -UFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzuQY0x2+c06lkh1QF6 -12S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZLgo/bNjR9eUJtGxUAArgFU2HdW23 -WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4uolu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9 -/ZFvgrG+CJPbFEfxojfHRZ48x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -AffirmTrust Premium -=================== ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UEBhMCVVMxFDAS -BgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMB4XDTEwMDEy -OTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRy -dXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxBLfqV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtn -BKAQJG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ+jjeRFcV -5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrSs8PhaJyJ+HoAVt70VZVs -+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmd -GPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d770O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5R -p9EixAqnOEhss/n/fauGV+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NI -S+LI+H+SqHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S5u04 -6uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4IaC1nEWTJ3s7xgaVY5 -/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TXOwF0lkLgAOIua+rF7nKsu7/+6qqo -+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYEFJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByv -MiPIs0laUZx2KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B8OWycvpEgjNC -6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQMKSOyARiqcTtNd56l+0OOF6S -L5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK -+4w1IX2COPKpVJEZNZOUbWo6xbLQu4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmV -BtWVyuEklut89pMFu+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFg -IxpHYoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8GKa1qF60 -g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaORtGdFNrHF+QFlozEJLUb -zxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6eKeC2uAloGRwYQw== ------END CERTIFICATE----- - -AffirmTrust Premium ECC -======================= ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMCVVMxFDASBgNV -BAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQcmVtaXVtIEVDQzAeFw0xMDAx -MjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJBgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1U -cnVzdDEgMB4GA1UEAwwXQWZmaXJtVHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQNMF4bFZ0D0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQ -N8O9ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0GA1UdDgQW -BBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAK -BggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/VsaobgxCd05DhT1wV/GzTjxi+zygk8N53X -57hG8f2h4nECMEJZh0PUUd+60wkyWs6Iflc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKM -eQ== ------END CERTIFICATE----- - -Certum Trusted Network CA -========================= ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBMMSIwIAYDVQQK -ExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBUcnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIy -MTIwNzM3WhcNMjkxMjMxMTIwNzM3WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBU -ZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MSIwIAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rHUV+rpDKmYYe2bg+G0jAC -l/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LMTXPb865Px1bVWqeWifrzq2jUI4ZZJ88J -J7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVUBBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4 -fOQtf/WsX+sWn7Et0brMkUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0 -cvW0QM8xAcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNVHRMB -Af8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNVHQ8BAf8EBAMCAQYw -DQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15ysHhE49wcrwn9I0j6vSrEuVUEtRCj -jSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfLI9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1 -mS1FhIrlQgnXdAIv94nYmem8J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5aj -Zt3hrvJBW8qYVoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -Certinomis - Autorité Racine -============================= ------BEGIN CERTIFICATE----- -MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjETMBEGA1UEChMK -Q2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAkBgNVBAMMHUNlcnRpbm9taXMg -LSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkG -A1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYw -JAYDVQQDDB1DZXJ0aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jYF1AMnmHa -wE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N8y4oH3DfVS9O7cdxbwly -Lu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWerP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw -2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92N -jMD2AR5vpTESOH2VwnHu7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9q -c1pkIuVC28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6lSTC -lrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1Enn1So2+WLhl+HPNb -xxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB0iSVL1N6aaLwD4ZFjliCK0wi1F6g -530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql095gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna -4NH4+ej9Uji29YnfAgMBAAGjWzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBQNjLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ -KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9sov3/4gbIOZ/x -WqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZMOH8oMDX/nyNTt7buFHAAQCva -R6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40 -nJ+U8/aGH88bc62UeYdocMMzpXDn2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1B -CxMjidPJC+iKunqjo3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjv -JL1vnxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG5ERQL1TE -qkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWqpdEdnV1j6CTmNhTih60b -WfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZbdsLLO7XSAPCjDuGtbkD326C00EauFddE -wk01+dIL8hf2rGbVJLJP0RyZwG71fet0BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/ -vgt2Fl43N+bYdJeimUV5 ------END CERTIFICATE----- - -Root CA Generalitat Valenciana -============================== ------BEGIN CERTIFICATE----- -MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJFUzEfMB0GA1UE -ChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290 -IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcNMDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3 -WjBoMQswCQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UE -CxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+WmmmO3I2 -F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKjSgbwJ/BXufjpTjJ3Cj9B -ZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGlu6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQ -D0EbtFpKd71ng+CT516nDOeB0/RSrFOyA8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXte -JajCq+TA81yc477OMUxkHl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMB -AAGjggM7MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBraS5n -dmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIICIwYKKwYBBAG/VQIB -ADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBl -AHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIAYQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIA -YQBsAGkAdABhAHQAIABWAGEAbABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQBy -AGEAYwBpAPMAbgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA -aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMAaQBvAG4AYQBt -AGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQAZQAgAEEAdQB0AG8AcgBpAGQA -YQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBu -AHQAcgBhACAAZQBuACAAbABhACAAZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAA -OgAvAC8AdwB3AHcALgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0 -dHA6Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+yeAT8MIGV -BgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQswCQYDVQQGEwJFUzEfMB0G -A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScwJQYDVQQDEx5S -b290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRh -TvW1yEICKrNcda3FbcrnlD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdz -Ckj+IHLtb8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg9J63 -NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XFducTZnV+ZfsBn5OH -iJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmCIoaZM3Fa6hlXPZHNqcCjbgcTpsnt -+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM= ------END CERTIFICATE----- - -A-Trust-nQual-03 -================ ------BEGIN CERTIFICATE----- -MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJBVDFIMEYGA1UE -Cgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBpbSBlbGVrdHIuIERhdGVudmVy -a2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5RdWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5R -dWFsLTAzMB4XDTA1MDgxNzIyMDAwMFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgw -RgYDVQQKDD9BLVRydXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0 -ZW52ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMMEEEtVHJ1 -c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtPWFuA/OQO8BBC4SA -zewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUjlUC5B3ilJfYKvUWG6Nm9wASOhURh73+n -yfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZznF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPE -SU7l0+m0iKsMrmKS1GWH2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4 -iHQF63n1k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs2e3V -cuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYDVR0OBAoECERqlWdV -eRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAVdRU0VlIXLOThaq/Yy/kgM40 -ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fGKOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmr -sQd7TZjTXLDR8KdCoLXEjq/+8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZd -JXDRZslo+S4RFGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS -mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmEDNuxUCAKGkq6 -ahq97BvIxYSazQ== ------END CERTIFICATE----- - -TWCA Root Certification Authority -================================= ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJ -VEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMzWhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQG -EwJUVzESMBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NB -IFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFEAcK0HMMx -QhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HHK3XLfJ+utdGdIzdjp9xC -oi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeXRfwZVzsrb+RH9JlF/h3x+JejiB03HFyP -4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/zrX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1r -y+UPizgN7gr8/g+YnzAx3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkqhkiG -9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeCMErJk/9q56YAf4lC -mtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdlsXebQ79NqZp4VKIV66IIArB6nCWlW -QtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62Dlhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVY -T0bf+215WfKEIlKuD8z7fDvnaspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocny -Yh0igzyXxfkZYiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -Security Communication RootCA2 -============================== ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMeU2VjdXJpdHkgQ29tbXVuaWNh -dGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoXDTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMC -SlAxJTAjBgNVBAoTHFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3Vy -aXR5IENvbW11bmljYXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -ANAVOVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGrzbl+dp++ -+T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVMVAX3NuRFg3sUZdbcDE3R -3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQhNBqyjoGADdH5H5XTz+L62e4iKrFvlNV -spHEfbmwhRkGeC7bYRr6hfVKkaHnFtWOojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1K -EOtOghY6rCcMU/Gt1SSwawNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8 -QIH4D5csOPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEB -CwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpFcoJxDjrSzG+ntKEj -u/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXcokgfGT+Ok+vx+hfuzU7jBBJV1uXk -3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6q -tnRGEmyR7jTV7JqR50S+kDFy1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29 -mvVXIwAHIRc/SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -EC-ACC -====== ------BEGIN CERTIFICATE----- -MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB8zELMAkGA1UE -BhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2VydGlmaWNhY2lvIChOSUYgUS0w -ODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYD -VQQLEyxWZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UE -CxMsSmVyYXJxdWlhIEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMT -BkVDLUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQGEwJFUzE7 -MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8gKE5JRiBRLTA4MDExNzYt -SSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBDZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZl -Z2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQubmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJh -cnF1aWEgRW50aXRhdHMgZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUND -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R85iK -w5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm4CgPukLjbo73FCeT -ae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaVHMf5NLWUhdWZXqBIoH7nF2W4onW4 -HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNdQlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0a -E9jD2z3Il3rucO2n5nzbcc8tlGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw -0JDnJwIDAQABo4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4opvpXY0wfwYD -VR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBodHRwczovL3d3dy5jYXRjZXJ0 -Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidWZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5l -dC92ZXJhcnJlbCAwDQYJKoZIhvcNAQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJ -lF7W2u++AVtd0x7Y/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNa -Al6kSBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhyRp/7SNVe -l+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOSAgu+TGbrIP65y7WZf+a2 -E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xlnJ2lYJU6Un/10asIbvPuW/mIPX64b24D -5EI= ------END CERTIFICATE----- - -Hellenic Academic and Research Institutions RootCA 2011 -======================================================= ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1IxRDBCBgNVBAoT -O0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9y -aXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25z -IFJvb3RDQSAyMDExMB4XDTExMTIwNjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYT -AkdSMUQwQgYDVQQKEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25z -IENlcnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2VhcmNo -IEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPzdYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI -1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJfel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa -71HFK9+WXesyHgLacEnsbgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u -8yBRQlqD75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSPFEDH -3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNVHRMBAf8EBTADAQH/ -MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp5dgTBCPuQSUwRwYDVR0eBEAwPqA8 -MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQub3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQu -b3JnMA0GCSqGSIb3DQEBBQUAA4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVt -XdMiKahsog2p6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7dIsXRSZMFpGD -/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8AcysNnq/onN694/BtZqhFLKPM58N -7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXIl7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -Actalis Authentication Root CA -============================== ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UEBhMCSVQxDjAM -BgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1ODUyMDk2NzEnMCUGA1UE -AwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDky -MjExMjIwMlowazELMAkGA1UEBhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlz -IFMucC5BLi8wMzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNvUTufClrJ -wkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX4ay8IMKx4INRimlNAJZa -by/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9KK3giq0itFZljoZUj5NDKd45RnijMCO6 -zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1f -YVEiVRvjRuPjPdA1YprbrxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2 -oxgkg4YQ51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2Fbe8l -EfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxeKF+w6D9Fz8+vm2/7 -hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4Fv6MGn8i1zeQf1xcGDXqVdFUNaBr8 -EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbnfpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5 -jF66CyCU3nuDuP/jVo23Eek7jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLY -iDrIn3hm7YnzezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQALe3KHwGCmSUyI -WOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70jsNjLiNmsGe+b7bAEzlgqqI0 -JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDzWochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKx -K3JCaKygvU5a2hi/a5iB0P2avl4VSM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+ -Xlff1ANATIGk0k9jpwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC -4yyXX04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+OkfcvHlXHo -2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7RK4X9p2jIugErsWx0Hbhz -lefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btUZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXem -OR/qnuOf0GZvBeyqdn6/axag67XH/JJULysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9 -vwGYT7JZVEc+NHt4bVaTLnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -Trustis FPS Root CA -=================== ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQG -EwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQLExNUcnVzdGlzIEZQUyBSb290 -IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTExMzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNV -BAoTD1RydXN0aXMgTGltaXRlZDEcMBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQ -RUN+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihHiTHcDnlk -H5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjjvSkCqPoc4Vu5g6hBSLwa -cY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zt -o3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlBOrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEA -AaNTMFEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAd -BgNVHQ4EFgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01GX2c -GE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmWzaD+vkAMXBJV+JOC -yinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP41BIy+Q7DsdwyhEQsb8tGD+pmQQ9P -8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZEf1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHV -l/9D7S3B2l0pKoU/rGXuhg8FjZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYl -iB6XzCGcKQENZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -StartCom Certification Authority -================================ ------BEGIN CERTIFICATE----- -MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMN -U3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmlu -ZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0 -NjM3WhcNMzYwOTE3MTk0NjM2WjB9MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRk -LjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMg -U3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZkpMyONvg45iPwbm2xPN1y -o4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rfOQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/ -Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/CJi/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/d -eMotHweXMAEtcnn6RtYTKqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt -2PZE4XNiHzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMMAv+Z -6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w+2OqqGwaVLRcJXrJ -osmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/ -untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVc -UjyJthkqcwEKDwOzEmDyei+B26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT -37uMdBNSSwIDAQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFulF2mHMMo0aEPQ -Qa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCCATgwLgYIKwYBBQUHAgEWImh0 -dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cu -c3RhcnRzc2wuY29tL2ludGVybWVkaWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENv -bW1lcmNpYWwgKFN0YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0 -aGUgc2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93d3cuc3RhcnRzc2wuY29t -L3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBG -cmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5 -fPGFf59Jb2vKXfuM/gTFwWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWm -N3PH/UvSTa0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst0OcN -Org+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNcpRJvkrKTlMeIFw6T -tn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKlCcWw0bdT82AUuoVpaiF8H3VhFyAX -e2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVFP0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA -2MFrLH9ZXF2RsXAiV+uKa0hK1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBs -HvUwyKMQ5bLmKhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE -JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ8dCAWZvLMdib -D4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnmfyWl8kgAwKQB2j8= ------END CERTIFICATE----- - -StartCom Certification Authority G2 -=================================== ------BEGIN CERTIFICATE----- -MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMN -U3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -RzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UE -ChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8O -o1XJJZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsDvfOpL9HG -4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnooD/Uefyf3lLE3PbfHkffi -Aez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/Q0kGi4xDuFby2X8hQxfqp0iVAXV16iul -Q5XqFYSdCI0mblWbq9zSOdIxHWDirMxWRST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbs -O+wmETRIjfaAKxojAuuKHDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8H -vKTlXcxNnw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM0D4L -nMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/iUUjXuG+v+E5+M5iS -FGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9Ha90OrInwMEePnWjFqmveiJdnxMa -z6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHgTuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJ -KoZIhvcNAQELBQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K -2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfXUfEpY9Z1zRbk -J4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl6/2o1PXWT6RbdejF0mCy2wl+ -JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG -/+gyRr61M3Z3qAFdlsHB1b6uJcDJHgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTc -nIhT76IxW1hPkWLIwpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/Xld -blhYXzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5lIxKVCCIc -l85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoohdVddLHRDiBYmxOlsGOm -7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulrso8uBtjRkcfGEvRM/TAXw8HaOFvjqerm -obp573PYtlNXLfbQ4ddI ------END CERTIFICATE----- - -Buypass Class 2 Root CA -======================= ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU -QnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3MgQ2xhc3MgMiBSb290IENBMB4X -DTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1owTjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1 -eXBhc3MgQVMtOTgzMTYzMzI3MSAwHgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIw -DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1 -g1Lr6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPVL4O2fuPn -9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC911K2GScuVr1QGbNgGE41b -/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHxMlAQTn/0hpPshNOOvEu/XAFOBz3cFIqU -CqTqc/sLUegTBxj6DvEr0VQVfTzh97QZQmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeff -awrbD02TTqigzXsu8lkBarcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgI -zRFo1clrUs3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLiFRhn -Bkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRSP/TizPJhk9H9Z2vX -Uq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN9SG9dKpN6nIDSdvHXx1iY8f93ZHs -M+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxPAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFMmAd+BikoL1RpzzuvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsF -AAOCAgEAU18h9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3tOluwlN5E40EI -osHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo+fsicdl9sz1Gv7SEr5AcD48S -aq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYd -DnkM/crqJIByw5c/8nerQyIKx+u2DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWD -LfJ6v9r9jv6ly0UsH8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0 -oyLQI+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK75t98biGC -wWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h3PFaTWwyI0PurKju7koS -CTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPzY11aWOIv4x3kqdbQCtCev9eBCfHJxyYN -rJgWVqA= ------END CERTIFICATE----- - -Buypass Class 3 Root CA -======================= ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEdMBsGA1UECgwU -QnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3MgQ2xhc3MgMyBSb290IENBMB4X -DTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFowTjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1 -eXBhc3MgQVMtOTgzMTYzMzI3MSAwHgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIw -DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRH -sJ8YZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3EN3coTRiR -5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9tznDDgFHmV0ST9tD+leh -7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX0DJq1l1sDPGzbjniazEuOQAnFN44wOwZ -ZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH -2xc519woe2v1n/MuwU8XKhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV -/afmiSTYzIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvSO1UQ -RwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D34xFMFbG02SrZvPA -Xpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgPK9Dx2hzLabjKSWJtyNBjYt1gD1iq -j6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFEe4zf/lb+74suwvTg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsF -AAOCAgEAACAjQTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXSIGrs/CIBKM+G -uIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2HJLw5QY33KbmkJs4j1xrG0aG -Q0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsaO5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8 -ZORK15FTAaggiG6cX0S5y2CBNOxv033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2 -KSb12tjE8nVhz36udmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz -6MkEkbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg413OEMXbug -UZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvDu79leNKGef9JOxqDDPDe -eOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq4/g7u9xN12TyUb7mqqta6THuBrxzvxNi -Cp/HuZc= ------END CERTIFICATE----- - -T-TeleSec GlobalRoot Class 3 -============================ ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoM -IlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBU -cnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgx -MDAxMTAyOTU2WhcNMzMxMDAxMjM1OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lz -dGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBD -ZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN8ELg63iIVl6bmlQdTQyK -9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/RLyTPWGrTs0NvvAgJ1gORH8EGoel15YU -NpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZF -iP0Zf3WHHx+xGwpzJFu5ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W -0eDrXltMEnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1A/d2O2GCahKqGFPr -AyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOyWL6ukK2YJ5f+AbGwUgC4TeQbIXQb -fsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzT -ucpH9sry9uetuUg/vBa3wW306gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7h -P0HHRwA11fXT91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4pTpPDpFQUWw== ------END CERTIFICATE----- - -EE Certification Centre Root CA -=============================== ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1MQswCQYDVQQG -EwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEoMCYGA1UEAwwfRUUgQ2Vy -dGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIw -MTAxMDMwMTAxMDMwWhgPMjAzMDEyMTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlB -UyBTZXJ0aWZpdHNlZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRy -ZSBSb290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEBAQUAA4IB -DwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUyeuuOF0+W2Ap7kaJjbMeM -TC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvObntl8jixwKIy72KyaOBhU8E2lf/slLo2 -rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIwWFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw -93X2PaRka9ZP585ArQ/dMtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtN -P2MbRMNE1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/zQas8fElyalL1BSZ -MEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYBBQUHAwMGCCsGAQUFBwMEBggrBgEF -BQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEFBQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+Rj -xY6hUFaTlrg4wCQiZrxTFGGVv9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqM -lIpPnTX/dqQGE5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIWiAYLtqZLICjU -3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/vGVCJYMzpJJUPwssd8m92kMfM -dcGWxZ0= ------END CERTIFICATE----- diff --git a/libs/tornado/concurrent.py b/libs/tornado/concurrent.py index a9002b16..63b0a8c1 100755 --- a/libs/tornado/concurrent.py +++ b/libs/tornado/concurrent.py @@ -40,52 +40,132 @@ class ReturnValueIgnoredError(Exception): pass -class _DummyFuture(object): +class Future(object): + """Placeholder for an asynchronous result. + + A ``Future`` encapsulates the result of an asynchronous + operation. In synchronous applications ``Futures`` are used + to wait for the result from a thread or process pool; in + Tornado they are normally used with `.IOLoop.add_future` or by + yielding them in a `.gen.coroutine`. + + `tornado.concurrent.Future` is similar to + `concurrent.futures.Future`, but not thread-safe (and therefore + faster for use with single-threaded event loops). + + In addition to ``exception`` and ``set_exception``, methods ``exc_info`` + and ``set_exc_info`` are supported to capture tracebacks in Python 2. + The traceback is automatically available in Python 3, but in the + Python 2 futures backport this information is discarded. + This functionality was previously available in a separate class + ``TracebackFuture``, which is now a deprecated alias for this class. + + .. versionchanged:: 3.3 + `tornado.concurrent.Future` is always a thread-unsafe ``Future`` + with support for the ``exc_info`` methods. Previously it would + be an alias for the thread-safe `concurrent.futures.Future` + if that package was available and fall back to the thread-unsafe + implementation if it was not. + + """ def __init__(self): self._done = False self._result = None self._exception = None + self._exc_info = None self._callbacks = [] def cancel(self): + """Cancel the operation, if possible. + + Tornado ``Futures`` do not support cancellation, so this method always + returns False. + """ return False def cancelled(self): + """Returns True if the operation has been cancelled. + + Tornado ``Futures`` do not support cancellation, so this method + always returns False. + """ return False def running(self): + """Returns True if this operation is currently running.""" return not self._done def done(self): + """Returns True if the future has finished running.""" return self._done def result(self, timeout=None): - self._check_done() - if self._exception: + """If the operation succeeded, return its result. If it failed, + re-raise its exception. + """ + if self._result is not None: + return self._result + if self._exc_info is not None: + raise_exc_info(self._exc_info) + elif self._exception is not None: raise self._exception + self._check_done() return self._result def exception(self, timeout=None): - self._check_done() - if self._exception: + """If the operation raised an exception, return the `Exception` + object. Otherwise returns None. + """ + if self._exception is not None: return self._exception else: + self._check_done() return None def add_done_callback(self, fn): + """Attaches the given callback to the `Future`. + + It will be invoked with the `Future` as its argument when the Future + has finished running and its result is available. In Tornado + consider using `.IOLoop.add_future` instead of calling + `add_done_callback` directly. + """ if self._done: fn(self) else: self._callbacks.append(fn) def set_result(self, result): + """Sets the result of a ``Future``. + + It is undefined to call any of the ``set`` methods more than once + on the same object. + """ self._result = result self._set_done() def set_exception(self, exception): + """Sets the exception of a ``Future.``""" self._exception = exception self._set_done() + def exc_info(self): + """Returns a tuple in the same format as `sys.exc_info` or None. + + .. versionadded:: 3.3 + """ + return self._exc_info + + def set_exc_info(self, exc_info): + """Sets the exception information of a ``Future.`` + + Preserves tracebacks on Python 2. + + .. versionadded:: 3.3 + """ + self._exc_info = exc_info + self.set_exception(exc_info[1]) + def _check_done(self): if not self._done: raise Exception("DummyFuture does not support blocking for results") @@ -97,38 +177,16 @@ class _DummyFuture(object): cb(self) self._callbacks = None +TracebackFuture = Future + if futures is None: - Future = _DummyFuture + FUTURES = Future else: - Future = futures.Future + FUTURES = (futures.Future, Future) -class TracebackFuture(Future): - """Subclass of `Future` which can store a traceback with - exceptions. - - The traceback is automatically available in Python 3, but in the - Python 2 futures backport this information is discarded. - """ - def __init__(self): - super(TracebackFuture, self).__init__() - self.__exc_info = None - - def exc_info(self): - return self.__exc_info - - def set_exc_info(self, exc_info): - """Traceback-aware replacement for - `~concurrent.futures.Future.set_exception`. - """ - self.__exc_info = exc_info - self.set_exception(exc_info[1]) - - def result(self, timeout=None): - if self.__exc_info is not None: - raise_exc_info(self.__exc_info) - else: - return super(TracebackFuture, self).result(timeout=timeout) +def is_future(x): + return isinstance(x, FUTURES) class DummyExecutor(object): @@ -254,10 +312,13 @@ def return_future(f): def chain_future(a, b): """Chain two futures together so that when one completes, so does the other. - The result (success or failure) of ``a`` will be copied to ``b``. + The result (success or failure) of ``a`` will be copied to ``b``, unless + ``b`` has already been completed or cancelled by the time ``a`` finishes. """ def copy(future): assert future is a + if b.done(): + return if (isinstance(a, TracebackFuture) and isinstance(b, TracebackFuture) and a.exc_info() is not None): b.set_exc_info(a.exc_info()) diff --git a/libs/tornado/curl_httpclient.py b/libs/tornado/curl_httpclient.py index 0df7a7ee..fc7d7f26 100755 --- a/libs/tornado/curl_httpclient.py +++ b/libs/tornado/curl_httpclient.py @@ -268,6 +268,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient): info["callback"](HTTPResponse( request=info["request"], code=code, headers=info["headers"], buffer=buffer, effective_url=effective_url, error=error, + reason=info['headers'].get("X-Http-Reason", None), request_time=time.time() - info["curl_start_time"], time_info=time_info)) except Exception: @@ -470,7 +471,11 @@ def _curl_header_callback(headers, header_line): header_line = header_line.strip() if header_line.startswith("HTTP/"): headers.clear() - return + try: + (__, __, reason) = httputil.parse_response_start_line(header_line) + header_line = "X-Http-Reason: %s" % reason + except httputil.HTTPInputException: + return if not header_line: return headers.parse_line(header_line) diff --git a/libs/tornado/escape.py b/libs/tornado/escape.py index 95c0f24e..48fa673c 100755 --- a/libs/tornado/escape.py +++ b/libs/tornado/escape.py @@ -75,7 +75,7 @@ def xhtml_unescape(value): # The fact that json_encode wraps json.dumps is an implementation detail. -# Please see https://github.com/facebook/tornado/pull/706 +# Please see https://github.com/tornadoweb/tornado/pull/706 # before sending a pull request that adds **kwargs to this function. def json_encode(value): """JSON-encodes the given Python object.""" diff --git a/libs/tornado/gen.py b/libs/tornado/gen.py index aa931b45..4d1dc6e1 100755 --- a/libs/tornado/gen.py +++ b/libs/tornado/gen.py @@ -87,9 +87,9 @@ import itertools import sys import types -from tornado.concurrent import Future, TracebackFuture +from tornado.concurrent import Future, TracebackFuture, is_future, chain_future from tornado.ioloop import IOLoop -from tornado.stack_context import ExceptionStackContext, wrap +from tornado import stack_context class KeyReuseError(Exception): @@ -112,6 +112,10 @@ class ReturnValueIgnoredError(Exception): pass +class TimeoutError(Exception): + """Exception raised by ``with_timeout``.""" + + def engine(func): """Callback-oriented decorator for asynchronous generators. @@ -129,45 +133,20 @@ def engine(func): `~tornado.web.RequestHandler` :ref:`HTTP verb methods `, which use ``self.finish()`` in place of a callback argument. """ + func = _make_coroutine_wrapper(func, replace_callback=False) @functools.wraps(func) def wrapper(*args, **kwargs): - runner = None - - def handle_exception(typ, value, tb): - # if the function throws an exception before its first "yield" - # (or is not a generator at all), the Runner won't exist yet. - # However, in that case we haven't reached anything asynchronous - # yet, so we can just let the exception propagate. - if runner is not None: - return runner.handle_exception(typ, value, tb) - return False - with ExceptionStackContext(handle_exception) as deactivate: - try: - result = func(*args, **kwargs) - except (Return, StopIteration) as e: - result = getattr(e, 'value', None) - else: - if isinstance(result, types.GeneratorType): - def final_callback(value): - if value is not None: - raise ReturnValueIgnoredError( - "@gen.engine functions cannot return values: " - "%r" % (value,)) - assert value is None - deactivate() - runner = Runner(result, final_callback) - runner.run() - return - if result is not None: + future = func(*args, **kwargs) + def final_callback(future): + if future.result() is not None: raise ReturnValueIgnoredError( "@gen.engine functions cannot return values: %r" % - (result,)) - deactivate() - # no yield, so we're done + (future.result(),)) + future.add_done_callback(final_callback) return wrapper -def coroutine(func): +def coroutine(func, replace_callback=True): """Decorator for asynchronous generators. Any generator that yields objects from this module must be wrapped @@ -191,43 +170,56 @@ def coroutine(func): From the caller's perspective, ``@gen.coroutine`` is similar to the combination of ``@return_future`` and ``@gen.engine``. """ + return _make_coroutine_wrapper(func, replace_callback=True) + + +def _make_coroutine_wrapper(func, replace_callback): + """The inner workings of ``@gen.coroutine`` and ``@gen.engine``. + + The two decorators differ in their treatment of the ``callback`` + argument, so we cannot simply implement ``@engine`` in terms of + ``@coroutine``. + """ @functools.wraps(func) def wrapper(*args, **kwargs): - runner = None future = TracebackFuture() - if 'callback' in kwargs: + if replace_callback and 'callback' in kwargs: callback = kwargs.pop('callback') IOLoop.current().add_future( future, lambda future: callback(future.result())) - def handle_exception(typ, value, tb): - try: - if runner is not None and runner.handle_exception(typ, value, tb): - return True - except Exception: - typ, value, tb = sys.exc_info() - future.set_exc_info((typ, value, tb)) - return True - with ExceptionStackContext(handle_exception) as deactivate: - try: - result = func(*args, **kwargs) - except (Return, StopIteration) as e: - result = getattr(e, 'value', None) - except Exception: - deactivate() - future.set_exc_info(sys.exc_info()) + try: + result = func(*args, **kwargs) + except (Return, StopIteration) as e: + result = getattr(e, 'value', None) + except Exception: + future.set_exc_info(sys.exc_info()) + return future + else: + if isinstance(result, types.GeneratorType): + # Inline the first iteration of Runner.run. This lets us + # avoid the cost of creating a Runner when the coroutine + # never actually yields, which in turn allows us to + # use "optional" coroutines in critical path code without + # performance penalty for the synchronous case. + try: + orig_stack_contexts = stack_context._state.contexts + yielded = next(result) + if stack_context._state.contexts is not orig_stack_contexts: + yielded = TracebackFuture() + yielded.set_exception( + stack_context.StackContextInconsistentError( + 'stack_context inconsistency (probably caused ' + 'by yield within a "with StackContext" block)')) + except (StopIteration, Return) as e: + future.set_result(getattr(e, 'value', None)) + except Exception: + future.set_exc_info(sys.exc_info()) + else: + Runner(result, future, yielded) return future - else: - if isinstance(result, types.GeneratorType): - def final_callback(value): - deactivate() - future.set_result(value) - runner = Runner(result, final_callback) - runner.run() - return future - deactivate() - future.set_result(result) + future.set_result(result) return future return wrapper @@ -348,7 +340,7 @@ class WaitAll(YieldPoint): return [self.runner.pop_result(key) for key in self.keys] -class Task(YieldPoint): +def Task(func, *args, **kwargs): """Runs a single asynchronous operation. Takes a function (and optional additional arguments) and runs it with @@ -362,25 +354,25 @@ class Task(YieldPoint): func(args, callback=(yield gen.Callback(key))) result = yield gen.Wait(key) + + .. versionchanged:: 3.3 + ``gen.Task`` is now a function that returns a `.Future`, instead of + a subclass of `YieldPoint`. It still behaves the same way when + yielded. """ - def __init__(self, func, *args, **kwargs): - assert "callback" not in kwargs - self.args = args - self.kwargs = kwargs - self.func = func - - def start(self, runner): - self.runner = runner - self.key = object() - runner.register_callback(self.key) - self.kwargs["callback"] = runner.result_callback(self.key) - self.func(*self.args, **self.kwargs) - - def is_ready(self): - return self.runner.is_ready(self.key) - - def get_result(self): - return self.runner.pop_result(self.key) + future = Future() + def handle_exception(typ, value, tb): + if future.done(): + return False + future.set_exc_info((typ, value, tb)) + return True + def set_result(result): + if future.done(): + return + future.set_result(result) + with stack_context.ExceptionStackContext(handle_exception): + func(*args, callback=_argument_adapter(set_result), **kwargs) + return future class YieldFuture(YieldPoint): @@ -414,10 +406,14 @@ class YieldFuture(YieldPoint): class Multi(YieldPoint): """Runs multiple asynchronous operations in parallel. - Takes a list of ``Tasks`` or other ``YieldPoints`` and returns a list of + Takes a list of ``YieldPoints`` or ``Futures`` and returns a list of their responses. It is not necessary to call `Multi` explicitly, since the engine will do so automatically when the generator yields - a list of ``YieldPoints``. + a list of ``YieldPoints`` or a mixture of ``YieldPoints`` and ``Futures``. + + Instead of a list, the argument may also be a dictionary whose values are + Futures, in which case a parallel dictionary is returned mapping the same + keys to their results. """ def __init__(self, children): self.keys = None @@ -426,7 +422,7 @@ class Multi(YieldPoint): children = children.values() self.children = [] for i in children: - if isinstance(i, Future): + if is_future(i): i = YieldFuture(i) self.children.append(i) assert all(isinstance(i, YieldPoint) for i in self.children) @@ -450,18 +446,127 @@ class Multi(YieldPoint): return list(result) -class _NullYieldPoint(YieldPoint): - def start(self, runner): - pass +def multi_future(children): + """Wait for multiple asynchronous futures in parallel. - def is_ready(self): - return True + Takes a list of ``Futures`` (but *not* other ``YieldPoints``) and returns + a new Future that resolves when all the other Futures are done. + If all the ``Futures`` succeeded, the returned Future's result is a list + of their results. If any failed, the returned Future raises the exception + of the first one to fail. - def get_result(self): - return None + Instead of a list, the argument may also be a dictionary whose values are + Futures, in which case a parallel dictionary is returned mapping the same + keys to their results. + + It is not necessary to call `multi_future` explcitly, since the engine will + do so automatically when the generator yields a list of `Futures`. + This function is faster than the `Multi` `YieldPoint` because it does not + require the creation of a stack context. + + .. versionadded:: 3.3 + """ + if isinstance(children, dict): + keys = list(children.keys()) + children = children.values() + else: + keys = None + assert all(is_future(i) for i in children) + unfinished_children = set(children) + + future = Future() + if not children: + future.set_result({} if keys is not None else []) + def callback(f): + unfinished_children.remove(f) + if not unfinished_children: + try: + result_list = [i.result() for i in children] + except Exception: + future.set_exc_info(sys.exc_info()) + else: + if keys is not None: + future.set_result(dict(zip(keys, result_list))) + else: + future.set_result(result_list) + for f in children: + f.add_done_callback(callback) + return future -_null_yield_point = _NullYieldPoint() +def maybe_future(x): + """Converts ``x`` into a `.Future`. + + If ``x`` is already a `.Future`, it is simply returned; otherwise + it is wrapped in a new `.Future`. This is suitable for use as + ``result = yield gen.maybe_future(f())`` when you don't know whether + ``f()`` returns a `.Future` or not. + """ + if is_future(x): + return x + else: + fut = Future() + fut.set_result(x) + return fut + + +def with_timeout(timeout, future, io_loop=None): + """Wraps a `.Future` in a timeout. + + Raises `TimeoutError` if the input future does not complete before + ``timeout``, which may be specified in any form allowed by + `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time + relative to `.IOLoop.time`) + + Currently only supports Futures, not other `YieldPoint` classes. + + .. versionadded:: 3.3 + """ + # TODO: allow yield points in addition to futures? + # Tricky to do with stack_context semantics. + # + # It's tempting to optimize this by cancelling the input future on timeout + # instead of creating a new one, but A) we can't know if we are the only + # one waiting on the input future, so cancelling it might disrupt other + # callers and B) concurrent futures can only be cancelled while they are + # in the queue, so cancellation cannot reliably bound our waiting time. + result = Future() + chain_future(future, result) + if io_loop is None: + io_loop = IOLoop.current() + timeout_handle = io_loop.add_timeout( + timeout, + lambda: result.set_exception(TimeoutError("Timeout"))) + if isinstance(future, Future): + # We know this future will resolve on the IOLoop, so we don't + # need the extra thread-safety of IOLoop.add_future (and we also + # don't care about StackContext here. + future.add_done_callback( + lambda future: io_loop.remove_timeout(timeout_handle)) + else: + # concurrent.futures.Futures may resolve on any thread, so we + # need to route them back to the IOLoop. + io_loop.add_future( + future, lambda future: io_loop.remove_timeout(timeout_handle)) + return result + + +_null_future = Future() +_null_future.set_result(None) + +moment = Future() +moment.__doc__ = \ + """A special object which may be yielded to allow the IOLoop to run for +one iteration. + +This is not needed in normal use but it can be helpful in long-running +coroutines that are likely to yield Futures that are ready instantly. + +Usage: ``yield gen.moment`` + +.. versionadded:: 3.3 +""" +moment.set_result(None) class Runner(object): @@ -469,35 +574,55 @@ class Runner(object): Maintains information about pending callbacks and their results. - ``final_callback`` is run after the generator exits. + The results of the generator are stored in ``result_future`` (a + `.TracebackFuture`) """ - def __init__(self, gen, final_callback): + def __init__(self, gen, result_future, first_yielded): self.gen = gen - self.final_callback = final_callback - self.yield_point = _null_yield_point - self.pending_callbacks = set() - self.results = {} + self.result_future = result_future + self.future = _null_future + self.yield_point = None + self.pending_callbacks = None + self.results = None self.running = False self.finished = False - self.exc_info = None self.had_exception = False + self.io_loop = IOLoop.current() + # For efficiency, we do not create a stack context until we + # reach a YieldPoint (stack contexts are required for the historical + # semantics of YieldPoints, but not for Futures). When we have + # done so, this field will be set and must be called at the end + # of the coroutine. + self.stack_context_deactivate = None + if self.handle_yield(first_yielded): + self.run() def register_callback(self, key): """Adds ``key`` to the list of callbacks.""" + if self.pending_callbacks is None: + # Lazily initialize the old-style YieldPoint data structures. + self.pending_callbacks = set() + self.results = {} if key in self.pending_callbacks: raise KeyReuseError("key %r is already pending" % (key,)) self.pending_callbacks.add(key) def is_ready(self, key): """Returns true if a result is available for ``key``.""" - if key not in self.pending_callbacks: + if self.pending_callbacks is None or key not in self.pending_callbacks: raise UnknownKeyError("key %r is not pending" % (key,)) return key in self.results def set_result(self, key, result): """Sets the result for ``key`` and attempts to resume the generator.""" self.results[key] = result - self.run() + if self.yield_point is not None and self.yield_point.is_ready(): + try: + self.future.set_result(self.yield_point.get_result()) + except: + self.future.set_exc_info(sys.exc_info()) + self.yield_point = None + self.run() def pop_result(self, key): """Returns the result for ``key`` and unregisters it.""" @@ -513,25 +638,27 @@ class Runner(object): try: self.running = True while True: - if self.exc_info is None: - try: - if not self.yield_point.is_ready(): - return - next = self.yield_point.get_result() - self.yield_point = None - except Exception: - self.exc_info = sys.exc_info() + future = self.future + if not future.done(): + return + self.future = None try: - if self.exc_info is not None: + orig_stack_contexts = stack_context._state.contexts + try: + value = future.result() + except Exception: self.had_exception = True - exc_info = self.exc_info - self.exc_info = None - yielded = self.gen.throw(*exc_info) + yielded = self.gen.throw(*sys.exc_info()) else: - yielded = self.gen.send(next) + yielded = self.gen.send(value) + if stack_context._state.contexts is not orig_stack_contexts: + self.gen.throw( + stack_context.StackContextInconsistentError( + 'stack_context inconsistency (probably caused ' + 'by yield within a "with StackContext" block)')) except (StopIteration, Return) as e: self.finished = True - self.yield_point = _null_yield_point + self.future = _null_future if self.pending_callbacks and not self.had_exception: # If we ran cleanly without waiting on all callbacks # raise an error (really more of a warning). If we @@ -540,46 +667,105 @@ class Runner(object): raise LeakedCallbackError( "finished without waiting for callbacks %r" % self.pending_callbacks) - self.final_callback(getattr(e, 'value', None)) - self.final_callback = None + self.result_future.set_result(getattr(e, 'value', None)) + self.result_future = None + self._deactivate_stack_context() return except Exception: self.finished = True - self.yield_point = _null_yield_point - raise - if isinstance(yielded, (list, dict)): - yielded = Multi(yielded) - elif isinstance(yielded, Future): - yielded = YieldFuture(yielded) - if isinstance(yielded, YieldPoint): - self.yield_point = yielded - try: - self.yield_point.start(self) - except Exception: - self.exc_info = sys.exc_info() - else: - self.exc_info = (BadYieldError( - "yielded unknown object %r" % (yielded,)),) + self.future = _null_future + self.result_future.set_exc_info(sys.exc_info()) + self.result_future = None + self._deactivate_stack_context() + return + if not self.handle_yield(yielded): + return finally: self.running = False - def result_callback(self, key): - def inner(*args, **kwargs): - if kwargs or len(args) > 1: - result = Arguments(args, kwargs) - elif args: - result = args[0] + def handle_yield(self, yielded): + if isinstance(yielded, list): + if all(is_future(f) for f in yielded): + yielded = multi_future(yielded) else: - result = None - self.set_result(key, result) - return wrap(inner) + yielded = Multi(yielded) + elif isinstance(yielded, dict): + if all(is_future(f) for f in yielded.values()): + yielded = multi_future(yielded) + else: + yielded = Multi(yielded) + + if isinstance(yielded, YieldPoint): + self.future = TracebackFuture() + def start_yield_point(): + try: + yielded.start(self) + if yielded.is_ready(): + self.future.set_result( + yielded.get_result()) + else: + self.yield_point = yielded + except Exception: + self.future = TracebackFuture() + self.future.set_exc_info(sys.exc_info()) + if self.stack_context_deactivate is None: + # Start a stack context if this is the first + # YieldPoint we've seen. + with stack_context.ExceptionStackContext( + self.handle_exception) as deactivate: + self.stack_context_deactivate = deactivate + def cb(): + start_yield_point() + self.run() + self.io_loop.add_callback(cb) + return False + else: + start_yield_point() + elif is_future(yielded): + self.future = yielded + if not self.future.done() or self.future is moment: + self.io_loop.add_future( + self.future, lambda f: self.run()) + return False + else: + self.future = TracebackFuture() + self.future.set_exception(BadYieldError( + "yielded unknown object %r" % (yielded,))) + return True + + def result_callback(self, key): + return stack_context.wrap(_argument_adapter( + functools.partial(self.set_result, key))) def handle_exception(self, typ, value, tb): if not self.running and not self.finished: - self.exc_info = (typ, value, tb) + self.future = TracebackFuture() + self.future.set_exc_info((typ, value, tb)) self.run() return True else: return False + def _deactivate_stack_context(self): + if self.stack_context_deactivate is not None: + self.stack_context_deactivate() + self.stack_context_deactivate = None + Arguments = collections.namedtuple('Arguments', ['args', 'kwargs']) + + +def _argument_adapter(callback): + """Returns a function that when invoked runs ``callback`` with one arg. + + If the function returned by this function is called with exactly + one argument, that argument is passed to ``callback``. Otherwise + the args tuple and kwargs dict are wrapped in an `Arguments` object. + """ + def wrapper(*args, **kwargs): + if kwargs or len(args) > 1: + callback(Arguments(args, kwargs)) + elif args: + callback(args[0]) + else: + callback(None) + return wrapper diff --git a/libs/tornado/http1connection.py b/libs/tornado/http1connection.py new file mode 100644 index 00000000..edaa5d9e --- /dev/null +++ b/libs/tornado/http1connection.py @@ -0,0 +1,650 @@ +#!/usr/bin/env python +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Client and server implementations of HTTP/1.x. + +.. versionadded:: 3.3 +""" + +from __future__ import absolute_import, division, print_function, with_statement + +from tornado.concurrent import Future +from tornado.escape import native_str, utf8 +from tornado import gen +from tornado import httputil +from tornado import iostream +from tornado.log import gen_log, app_log +from tornado import stack_context +from tornado.util import GzipDecompressor + + +class _QuietException(Exception): + def __init__(self): + pass + +class _ExceptionLoggingContext(object): + """Used with the ``with`` statement when calling delegate methods to + log any exceptions with the given logger. Any exceptions caught are + converted to _QuietException + """ + def __init__(self, logger): + self.logger = logger + + def __enter__(self): + pass + + def __exit__(self, typ, value, tb): + if value is not None: + self.logger.error("Uncaught exception", exc_info=(typ, value, tb)) + raise _QuietException + +class HTTP1ConnectionParameters(object): + """Parameters for `.HTTP1Connection` and `.HTTP1ServerConnection`. + """ + def __init__(self, no_keep_alive=False, chunk_size=None, + max_header_size=None, header_timeout=None, max_body_size=None, + body_timeout=None, use_gzip=False): + """ + :arg bool no_keep_alive: If true, always close the connection after + one request. + :arg int chunk_size: how much data to read into memory at once + :arg int max_header_size: maximum amount of data for HTTP headers + :arg float header_timeout: how long to wait for all headers (seconds) + :arg int max_body_size: maximum amount of data for body + :arg float body_timeout: how long to wait while reading body (seconds) + :arg bool use_gzip: if true, decode incoming ``Content-Encoding: gzip`` + """ + self.no_keep_alive = no_keep_alive + self.chunk_size = chunk_size or 65536 + self.max_header_size = max_header_size or 65536 + self.header_timeout = header_timeout + self.max_body_size = max_body_size + self.body_timeout = body_timeout + self.use_gzip = use_gzip + + +class HTTP1Connection(httputil.HTTPConnection): + """Implements the HTTP/1.x protocol. + + This class can be on its own for clients, or via `HTTP1ServerConnection` + for servers. + """ + def __init__(self, stream, is_client, params=None, context=None): + """ + :arg stream: an `.IOStream` + :arg bool is_client: client or server + :arg params: a `.HTTP1ConnectionParameters` instance or ``None`` + :arg context: an opaque application-defined object that can be accessed + as ``connection.context``. + """ + self.is_client = is_client + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self.no_keep_alive = params.no_keep_alive + # The body limits can be altered by the delegate, so save them + # here instead of just referencing self.params later. + self._max_body_size = (self.params.max_body_size or + self.stream.max_buffer_size) + self._body_timeout = self.params.body_timeout + # _write_finished is set to True when finish() has been called, + # i.e. there will be no more data sent. Data may still be in the + # stream's write buffer. + self._write_finished = False + # True when we have read the entire incoming body. + self._read_finished = False + # _finish_future resolves when all data has been written and flushed + # to the IOStream. + self._finish_future = Future() + # If true, the connection should be closed after this request + # (after the response has been written in the server side, + # and after it has been read in the client) + self._disconnect_on_finish = False + self._clear_callbacks() + # Save the start lines after we read or write them; they + # affect later processing (e.g. 304 responses and HEAD methods + # have content-length but no bodies) + self._request_start_line = None + self._response_start_line = None + self._request_headers = None + # True if we are writing output with chunked encoding. + self._chunking_output = None + # While reading a body with a content-length, this is the + # amount left to read. + self._expected_content_remaining = None + # A Future for our outgoing writes, returned by IOStream.write. + self._pending_write = None + + def read_response(self, delegate): + """Read a single HTTP response. + + Typical client-mode usage is to write a request using `write_headers`, + `write`, and `finish`, and then call ``read_response``. + + :arg delegate: a `.HTTPMessageDelegate` + + Returns a `.Future` that resolves to None after the full response has + been read. + """ + if self.params.use_gzip: + delegate = _GzipMessageDelegate(delegate, self.params.chunk_size) + return self._read_message(delegate) + + @gen.coroutine + def _read_message(self, delegate): + need_delegate_close = False + try: + header_future = self.stream.read_until_regex( + b"\r?\n\r?\n", + max_bytes=self.params.max_header_size) + if self.params.header_timeout is None: + header_data = yield header_future + else: + try: + header_data = yield gen.with_timeout( + self.stream.io_loop.time() + self.params.header_timeout, + header_future, + io_loop=self.stream.io_loop) + except gen.TimeoutError: + self.close() + raise gen.Return(False) + start_line, headers = self._parse_headers(header_data) + if self.is_client: + start_line = httputil.parse_response_start_line(start_line) + self._response_start_line = start_line + else: + start_line = httputil.parse_request_start_line(start_line) + self._request_start_line = start_line + self._request_headers = headers + + self._disconnect_on_finish = not self._can_keep_alive( + start_line, headers) + need_delegate_close = True + with _ExceptionLoggingContext(app_log): + header_future = delegate.headers_received(start_line, headers) + if header_future is not None: + yield header_future + if self.stream is None: + # We've been detached. + need_delegate_close = False + raise gen.Return(False) + skip_body = False + if self.is_client: + if (self._request_start_line is not None and + self._request_start_line.method == 'HEAD'): + skip_body = True + code = start_line.code + if code == 304: + skip_body = True + if code >= 100 and code < 200: + # TODO: client delegates will get headers_received twice + # in the case of a 100-continue. Document or change? + yield self._read_message(delegate) + else: + if (headers.get("Expect") == "100-continue" and + not self._write_finished): + self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") + if not skip_body: + body_future = self._read_body(headers, delegate) + if body_future is not None: + if self._body_timeout is None: + yield body_future + else: + try: + yield gen.with_timeout( + self.stream.io_loop.time() + self._body_timeout, + body_future, self.stream.io_loop) + except gen.TimeoutError: + gen_log.info("Timeout reading body from %s", + self.context) + self.stream.close() + raise gen.Return(False) + self._read_finished = True + if not self._write_finished or self.is_client: + need_delegate_close = False + with _ExceptionLoggingContext(app_log): + delegate.finish() + # If we're waiting for the application to produce an asynchronous + # response, and we're not detached, register a close callback + # on the stream (we didn't need one while we were reading) + if (not self._finish_future.done() and + self.stream is not None and + not self.stream.closed()): + self.stream.set_close_callback(self._on_connection_close) + yield self._finish_future + if self.is_client and self._disconnect_on_finish: + self.close() + if self.stream is None: + raise gen.Return(False) + except httputil.HTTPInputException as e: + gen_log.info("Malformed HTTP message from %s: %s", + self.context, e) + self.close() + raise gen.Return(False) + finally: + if need_delegate_close: + with _ExceptionLoggingContext(app_log): + delegate.on_connection_close() + self._clear_callbacks() + raise gen.Return(True) + + def _clear_callbacks(self): + """Clears the callback attributes. + + This allows the request handler to be garbage collected more + quickly in CPython by breaking up reference cycles. + """ + self._write_callback = None + self._write_future = None + self._close_callback = None + if self.stream is not None: + self.stream.set_close_callback(None) + + def set_close_callback(self, callback): + """Sets a callback that will be run when the connection is closed. + + .. deprecated:: 3.3 + Use `.HTTPMessageDelegate.on_connection_close` instead. + """ + self._close_callback = stack_context.wrap(callback) + + def _on_connection_close(self): + # Note that this callback is only registered on the IOStream + # when we have finished reading the request and are waiting for + # the application to produce its response. + if self._close_callback is not None: + callback = self._close_callback + self._close_callback = None + callback() + if not self._finish_future.done(): + self._finish_future.set_result(None) + self._clear_callbacks() + + def close(self): + if self.stream is not None: + self.stream.close() + self._clear_callbacks() + if not self._finish_future.done(): + self._finish_future.set_result(None) + + def detach(self): + """Take control of the underlying stream. + + Returns the underlying `.IOStream` object and stops all further + HTTP processing. May only be called during + `.HTTPMessageDelegate.headers_received`. Intended for implementing + protocols like websockets that tunnel over an HTTP handshake. + """ + self._clear_callbacks() + stream = self.stream + self.stream = None + return stream + + def set_body_timeout(self, timeout): + """Sets the body timeout for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._body_timeout = timeout + + def set_max_body_size(self, max_body_size): + """Sets the body size limit for a single request. + + Overrides the value from `.HTTP1ConnectionParameters`. + """ + self._max_body_size = max_body_size + + def write_headers(self, start_line, headers, chunk=None, callback=None): + """Implements `.HTTPConnection.write_headers`.""" + if self.is_client: + self._request_start_line = start_line + # Client requests with a non-empty body must have either a + # Content-Length or a Transfer-Encoding. + self._chunking_output = ( + start_line.method in ('POST', 'PUT', 'PATCH') and + 'Content-Length' not in headers and + 'Transfer-Encoding' not in headers) + else: + self._response_start_line = start_line + self._chunking_output = ( + # TODO: should this use + # self._request_start_line.version or + # start_line.version? + self._request_start_line.version == 'HTTP/1.1' and + # 304 responses have no body (not even a zero-length body), and so + # should not have either Content-Length or Transfer-Encoding. + # headers. + start_line.code != 304 and + # No need to chunk the output if a Content-Length is specified. + 'Content-Length' not in headers and + # Applications are discouraged from touching Transfer-Encoding, + # but if they do, leave it alone. + 'Transfer-Encoding' not in headers) + # If a 1.0 client asked for keep-alive, add the header. + if (self._request_start_line.version == 'HTTP/1.0' and + (self._request_headers.get('Connection', '').lower() + == 'keep-alive')): + headers['Connection'] = 'Keep-Alive' + if self._chunking_output: + headers['Transfer-Encoding'] = 'chunked' + if (not self.is_client and + (self._request_start_line.method == 'HEAD' or + start_line.code == 304)): + self._expected_content_remaining = 0 + elif 'Content-Length' in headers: + self._expected_content_remaining = int(headers['Content-Length']) + else: + self._expected_content_remaining = None + lines = [utf8("%s %s %s" % start_line)] + lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()]) + for line in lines: + if b'\n' in line: + raise ValueError('Newline in header: ' + repr(line)) + future = None + if self.stream.closed(): + future = self._write_future = Future() + future.set_exception(iostream.StreamClosedError()) + else: + if callback is not None: + self._write_callback = stack_context.wrap(callback) + else: + future = self._write_future = Future() + data = b"\r\n".join(lines) + b"\r\n\r\n" + if chunk: + data += self._format_chunk(chunk) + self._pending_write = self.stream.write(data) + self._pending_write.add_done_callback(self._on_write_complete) + return future + + def _format_chunk(self, chunk): + if self._expected_content_remaining is not None: + self._expected_content_remaining -= len(chunk) + if self._expected_content_remaining < 0: + # Close the stream now to stop further framing errors. + self.stream.close() + raise httputil.HTTPOutputException( + "Tried to write more data than Content-Length") + if self._chunking_output and chunk: + # Don't write out empty chunks because that means END-OF-STREAM + # with chunked encoding + return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n" + else: + return chunk + + def write(self, chunk, callback=None): + """Implements `.HTTPConnection.write`. + + For backwards compatibility is is allowed but deprecated to + skip `write_headers` and instead call `write()` with a + pre-encoded header block. + """ + future = None + if self.stream.closed(): + future = self._write_future = Future() + self._write_future.set_exception(iostream.StreamClosedError()) + else: + if callback is not None: + self._write_callback = stack_context.wrap(callback) + else: + future = self._write_future = Future() + self._pending_write = self.stream.write(self._format_chunk(chunk)) + self._pending_write.add_done_callback(self._on_write_complete) + return future + + def finish(self): + """Implements `.HTTPConnection.finish`.""" + if (self._expected_content_remaining is not None and + self._expected_content_remaining != 0 and + not self.stream.closed()): + self.stream.close() + raise httputil.HTTPOutputException( + "Tried to write %d bytes less than Content-Length" % + self._expected_content_remaining) + if self._chunking_output: + if not self.stream.closed(): + self._pending_write = self.stream.write(b"0\r\n\r\n") + self._pending_write.add_done_callback(self._on_write_complete) + self._write_finished = True + # If the app finished the request while we're still reading, + # divert any remaining data away from the delegate and + # close the connection when we're done sending our response. + # Closing the connection is the only way to avoid reading the + # whole input body. + if not self._read_finished: + self._disconnect_on_finish = True + # No more data is coming, so instruct TCP to send any remaining + # data immediately instead of waiting for a full packet or ack. + self.stream.set_nodelay(True) + if self._pending_write is None: + self._finish_request(None) + else: + self._pending_write.add_done_callback(self._finish_request) + + def _on_write_complete(self, future): + if self._write_callback is not None: + callback = self._write_callback + self._write_callback = None + self.stream.io_loop.add_callback(callback) + if self._write_future is not None: + future = self._write_future + self._write_future = None + future.set_result(None) + + def _can_keep_alive(self, start_line, headers): + if self.params.no_keep_alive: + return False + connection_header = headers.get("Connection") + if connection_header is not None: + connection_header = connection_header.lower() + if start_line.version == "HTTP/1.1": + return connection_header != "close" + elif ("Content-Length" in headers + or start_line.method in ("HEAD", "GET")): + return connection_header == "keep-alive" + return False + + def _finish_request(self, future): + self._clear_callbacks() + if not self.is_client and self._disconnect_on_finish: + self.close() + return + # Turn Nagle's algorithm back on, leaving the stream in its + # default state for the next request. + self.stream.set_nodelay(False) + if not self._finish_future.done(): + self._finish_future.set_result(None) + + def _parse_headers(self, data): + data = native_str(data.decode('latin1')) + eol = data.find("\r\n") + start_line = data[:eol] + try: + headers = httputil.HTTPHeaders.parse(data[eol:]) + except ValueError: + # probably form split() if there was no ':' in the line + raise httputil.HTTPInputException("Malformed HTTP headers: %r" % + data[eol:100]) + return start_line, headers + + def _read_body(self, headers, delegate): + content_length = headers.get("Content-Length") + if content_length: + content_length = int(content_length) + if content_length > self._max_body_size: + raise httputil.HTTPInputException("Content-Length too long") + return self._read_fixed_body(content_length, delegate) + if headers.get("Transfer-Encoding") == "chunked": + return self._read_chunked_body(delegate) + if self.is_client: + return self._read_body_until_close(delegate) + return None + + @gen.coroutine + def _read_fixed_body(self, content_length, delegate): + while content_length > 0: + body = yield self.stream.read_bytes( + min(self.params.chunk_size, content_length), partial=True) + content_length -= len(body) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + yield gen.maybe_future(delegate.data_received(body)) + + @gen.coroutine + def _read_chunked_body(self, delegate): + # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1 + total_size = 0 + while True: + chunk_len = yield self.stream.read_until(b"\r\n", max_bytes=64) + chunk_len = int(chunk_len.strip(), 16) + if chunk_len == 0: + return + total_size += chunk_len + if total_size > self._max_body_size: + raise httputil.HTTPInputException("chunked body too large") + bytes_to_read = chunk_len + while bytes_to_read: + chunk = yield self.stream.read_bytes( + min(bytes_to_read, self.params.chunk_size), partial=True) + bytes_to_read -= len(chunk) + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + yield gen.maybe_future(delegate.data_received(chunk)) + # chunk ends with \r\n + crlf = yield self.stream.read_bytes(2) + assert crlf == b"\r\n" + + @gen.coroutine + def _read_body_until_close(self, delegate): + body = yield self.stream.read_until_close() + if not self._write_finished or self.is_client: + with _ExceptionLoggingContext(app_log): + delegate.data_received(body) + + +class _GzipMessageDelegate(httputil.HTTPMessageDelegate): + """Wraps an `HTTPMessageDelegate` to decode ``Content-Encoding: gzip``. + """ + def __init__(self, delegate, chunk_size): + self._delegate = delegate + self._chunk_size = chunk_size + self._decompressor = None + + def headers_received(self, start_line, headers): + if headers.get("Content-Encoding") == "gzip": + self._decompressor = GzipDecompressor() + # Downstream delegates will only see uncompressed data, + # so rename the content-encoding header. + # (but note that curl_httpclient doesn't do this). + headers.add("X-Consumed-Content-Encoding", + headers["Content-Encoding"]) + del headers["Content-Encoding"] + return self._delegate.headers_received(start_line, headers) + + @gen.coroutine + def data_received(self, chunk): + if self._decompressor: + compressed_data = chunk + while compressed_data: + decompressed = self._decompressor.decompress( + compressed_data, self._chunk_size) + if decompressed: + yield gen.maybe_future( + self._delegate.data_received(decompressed)) + compressed_data = self._decompressor.unconsumed_tail + else: + yield gen.maybe_future(self._delegate.data_received(chunk)) + + def finish(self): + if self._decompressor is not None: + tail = self._decompressor.flush() + if tail: + # I believe the tail will always be empty (i.e. + # decompress will return all it can). The purpose + # of the flush call is to detect errors such + # as truncated input. But in case it ever returns + # anything, treat it as an extra chunk + self._delegate.data_received(tail) + return self._delegate.finish() + + +class HTTP1ServerConnection(object): + """An HTTP/1.x server.""" + def __init__(self, stream, params=None, context=None): + """ + :arg stream: an `.IOStream` + :arg params: a `.HTTP1ConnectionParameters` or None + :arg context: an opaque application-defined object that is accessible + as ``connection.context`` + """ + self.stream = stream + if params is None: + params = HTTP1ConnectionParameters() + self.params = params + self.context = context + self._serving_future = None + + @gen.coroutine + def close(self): + """Closes the connection. + + Returns a `.Future` that resolves after the serving loop has exited. + """ + self.stream.close() + # Block until the serving loop is done, but ignore any exceptions + # (start_serving is already responsible for logging them). + try: + yield self._serving_future + except Exception: + pass + + def start_serving(self, delegate): + """Starts serving requests on this connection. + + :arg delegate: a `.HTTPServerConnectionDelegate` + """ + assert isinstance(delegate, httputil.HTTPServerConnectionDelegate) + self._serving_future = self._server_request_loop(delegate) + # Register the future on the IOLoop so its errors get logged. + self.stream.io_loop.add_future(self._serving_future, + lambda f: f.result()) + + @gen.coroutine + def _server_request_loop(self, delegate): + try: + while True: + conn = HTTP1Connection(self.stream, False, + self.params, self.context) + request_delegate = delegate.start_request(self, conn) + try: + ret = yield conn.read_response(request_delegate) + except (iostream.StreamClosedError, + iostream.UnsatisfiableReadError): + return + except _QuietException: + # This exception was already logged. + conn.close() + return + except Exception: + gen_log.error("Uncaught exception", exc_info=True) + conn.close() + return + if not ret: + return + yield gen.moment + finally: + delegate.on_close(self) diff --git a/libs/tornado/httpclient.py b/libs/tornado/httpclient.py index 9b42d401..94a4593a 100755 --- a/libs/tornado/httpclient.py +++ b/libs/tornado/httpclient.py @@ -25,6 +25,11 @@ to switch to ``curl_httpclient`` for reasons such as the following: Note that if you are using ``curl_httpclient``, it is highly recommended that you use a recent version of ``libcurl`` and ``pycurl``. Currently the minimum supported version is 7.18.2, and the recommended version is 7.21.1 or newer. +It is highly recommended that your ``libcurl`` installation is built with +asynchronous DNS resolver (threaded or c-ares), otherwise you may encounter +various problems with request timeouts (for more information, see +http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS +and comments in curl_httpclient.py). """ from __future__ import absolute_import, division, print_function, with_statement @@ -34,7 +39,7 @@ import time import weakref from tornado.concurrent import TracebackFuture -from tornado.escape import utf8 +from tornado.escape import utf8, native_str from tornado import httputil, stack_context from tornado.ioloop import IOLoop from tornado.util import Configurable @@ -166,7 +171,7 @@ class AsyncHTTPClient(Configurable): kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an - `HTTPResponse`. The ``Future`` wil raise an `HTTPError` if + `HTTPResponse`. The ``Future`` will raise an `HTTPError` if the request returned a non-200 response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. @@ -259,14 +264,27 @@ class HTTPRequest(object): proxy_password=None, allow_nonstandard_methods=None, validate_cert=None, ca_certs=None, allow_ipv6=None, - client_key=None, client_cert=None): + client_key=None, client_cert=None, body_producer=None, + expect_100_continue=False): r"""All parameters except ``url`` are optional. :arg string url: URL to fetch :arg string method: HTTP method, e.g. "GET" or "POST" :arg headers: Additional HTTP headers to pass on the request - :arg body: HTTP body to pass on the request :type headers: `~tornado.httputil.HTTPHeaders` or `dict` + :arg body: HTTP request body as a string (byte or unicode; if unicode + the utf-8 encoding will be used) + :arg body_producer: Callable used for lazy/asynchronous request bodies. + It is called with one argument, a ``write`` function, and should + return a `.Future`. It should call the write function with new + data as it becomes available. The write function returns a + `.Future` which can be used for flow control. + Only one of ``body`` and ``body_producer`` may + be specified. ``body_producer`` is not supported on + ``curl_httpclient``. When using ``body_producer`` it is recommended + to pass a ``Content-Length`` in the headers as otherwise chunked + encoding will be used, and many servers do not support chunked + encoding on requests. New in Tornado 3.3 :arg string auth_username: Username for HTTP authentication :arg string auth_password: Password for HTTP authentication :arg string auth_mode: Authentication mode; default is "basic". @@ -319,6 +337,11 @@ class HTTPRequest(object): note below when used with ``curl_httpclient``. :arg string client_cert: Filename for client SSL certificate, if any. See note below when used with ``curl_httpclient``. + :arg bool expect_100_continue: If true, send the + ``Expect: 100-continue`` header and wait for a continue response + before sending the request body. Only supported with + simple_httpclient. + .. note:: @@ -334,6 +357,9 @@ class HTTPRequest(object): .. versionadded:: 3.1 The ``auth_mode`` argument. + + .. versionadded:: 3.3 + The ``body_producer`` and ``expect_100_continue`` arguments. """ # Note that some of these attributes go through property setters # defined below. @@ -348,6 +374,7 @@ class HTTPRequest(object): self.url = url self.method = method self.body = body + self.body_producer = body_producer self.auth_username = auth_username self.auth_password = auth_password self.auth_mode = auth_mode @@ -367,6 +394,7 @@ class HTTPRequest(object): self.allow_ipv6 = allow_ipv6 self.client_key = client_key self.client_cert = client_cert + self.expect_100_continue = expect_100_continue self.start_time = time.time() @property @@ -388,6 +416,14 @@ class HTTPRequest(object): def body(self, value): self._body = utf8(value) + @property + def body_producer(self): + return self._body_producer + + @body_producer.setter + def body_producer(self, value): + self._body_producer = stack_context.wrap(value) + @property def streaming_callback(self): return self._streaming_callback @@ -423,8 +459,6 @@ class HTTPResponse(object): * code: numeric HTTP status code, e.g. 200 or 404 * reason: human-readable reason phrase describing the status code - (with curl_httpclient, this is a default value rather than the - server's actual response) * headers: `tornado.httputil.HTTPHeaders` object @@ -466,7 +500,8 @@ class HTTPResponse(object): self.effective_url = effective_url if error is None: if self.code < 200 or self.code >= 300: - self.error = HTTPError(self.code, response=self) + self.error = HTTPError(self.code, message=self.reason, + response=self) else: self.error = None else: @@ -556,7 +591,7 @@ def main(): if options.print_headers: print(response.headers) if options.print_body: - print(response.body) + print(native_str(response.body)) client.close() if __name__ == "__main__": diff --git a/libs/tornado/httpserver.py b/libs/tornado/httpserver.py index 34e7b768..469374e1 100755 --- a/libs/tornado/httpserver.py +++ b/libs/tornado/httpserver.py @@ -20,70 +20,55 @@ Typical applications have little direct interaction with the `HTTPServer` class except to start a server at the beginning of the process (and even that is often done indirectly via `tornado.web.Application.listen`). -This module also defines the `HTTPRequest` class which is exposed via -`tornado.web.RequestHandler.request`. +.. versionchanged:: 3.3 + + The ``HTTPRequest`` class that used to live in this module has been moved + to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. """ from __future__ import absolute_import, division, print_function, with_statement import socket -import ssl -import time -import copy -from tornado.escape import native_str, parse_qs_bytes +from tornado.escape import native_str +from tornado.http1connection import HTTP1ServerConnection, HTTP1ConnectionParameters +from tornado import gen from tornado import httputil from tornado import iostream -from tornado.log import gen_log from tornado import netutil from tornado.tcpserver import TCPServer -from tornado import stack_context -from tornado.util import bytes_type - -try: - import Cookie # py2 -except ImportError: - import http.cookies as Cookie # py3 -class HTTPServer(TCPServer): +class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate): r"""A non-blocking, single-threaded HTTP server. - A server is defined by a request callback that takes an HTTPRequest - instance as an argument and writes a valid HTTP response with - `HTTPRequest.write`. `HTTPRequest.finish` finishes the request (but does - not necessarily close the connection in the case of HTTP/1.1 keep-alive - requests). A simple example server that echoes back the URI you - requested:: + A server is defined by either a request callback that takes a + `.HTTPServerRequest` as an argument or a `.HTTPServerConnectionDelegate` + instance. + + A simple example server that echoes back the URI you requested:: import tornado.httpserver import tornado.ioloop def handle_request(request): message = "You requested %s\n" % request.uri - request.write("HTTP/1.1 200 OK\r\nContent-Length: %d\r\n\r\n%s" % ( - len(message), message)) - request.finish() + request.connection.write_headers( + httputil.ResponseStartLine('HTTP/1.1', 200, 'OK'), + {"Content-Length": str(len(message))}) + request.connection.write(message) + request.connection.finish() http_server = tornado.httpserver.HTTPServer(handle_request) http_server.listen(8888) tornado.ioloop.IOLoop.instance().start() - `HTTPServer` is a very basic connection handler. It parses the request - headers and body, but the request callback is responsible for producing - the response exactly as it will appear on the wire. This affords - maximum flexibility for applications to implement whatever parts - of HTTP responses are required. + Applications should use the methods of `.HTTPConnection` to write + their response. `HTTPServer` supports keep-alive connections by default (automatically for HTTP/1.1, or for HTTP/1.0 when the client - requests ``Connection: keep-alive``). This means that the request - callback must generate a properly-framed response, using either - the ``Content-Length`` header or ``Transfer-Encoding: chunked``. - Applications that are unable to frame their responses properly - should instead return a ``Connection: close`` header in each - response and pass ``no_keep_alive=True`` to the `HTTPServer` - constructor. + requests ``Connection: keep-alive``). If ``xheaders`` is ``True``, we support the ``X-Real-Ip``/``X-Forwarded-For`` and @@ -143,407 +128,169 @@ class HTTPServer(TCPServer): servers if you want to create your listening sockets in some way other than `tornado.netutil.bind_sockets`. + .. versionchanged:: 3.3 + Added ``gzip``, ``chunk_size``, ``max_header_size``, + ``idle_connection_timeout``, ``body_timeout``, ``max_body_size`` + arguments. Added support for `.HTTPServerConnectionDelegate` + instances as ``request_callback``. """ def __init__(self, request_callback, no_keep_alive=False, io_loop=None, - xheaders=False, ssl_options=None, protocol=None, **kwargs): + xheaders=False, ssl_options=None, protocol=None, gzip=False, + chunk_size=None, max_header_size=None, + idle_connection_timeout=None, body_timeout=None, + max_body_size=None, max_buffer_size=None): self.request_callback = request_callback self.no_keep_alive = no_keep_alive self.xheaders = xheaders self.protocol = protocol + self.conn_params = HTTP1ConnectionParameters( + use_gzip=gzip, + chunk_size=chunk_size, + max_header_size=max_header_size, + header_timeout=idle_connection_timeout or 3600, + max_body_size=max_body_size, + body_timeout=body_timeout) TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options, - **kwargs) + max_buffer_size=max_buffer_size, + read_chunk_size=chunk_size) + self._connections = set() + + @gen.coroutine + def close_all_connections(self): + while self._connections: + # Peek at an arbitrary element of the set + conn = next(iter(self._connections)) + yield conn.close() def handle_stream(self, stream, address): - HTTPConnection(stream, address, self.request_callback, - self.no_keep_alive, self.xheaders, self.protocol) + context = _HTTPRequestContext(stream, address, + self.protocol) + conn = HTTP1ServerConnection( + stream, self.conn_params, context) + self._connections.add(conn) + conn.start_serving(self) + + def start_request(self, server_conn, request_conn): + return _ServerRequestAdapter(self, request_conn) + + def on_close(self, server_conn): + self._connections.remove(server_conn) -class _BadRequestException(Exception): - """Exception class for malformed HTTP requests.""" - pass - - -class HTTPConnection(object): - """Handles a connection to an HTTP client, executing HTTP requests. - - We parse HTTP headers and bodies, and execute the request callback - until the HTTP conection is closed. - """ - def __init__(self, stream, address, request_callback, no_keep_alive=False, - xheaders=False, protocol=None): - self.stream = stream +class _HTTPRequestContext(object): + def __init__(self, stream, address, protocol): self.address = address + self.protocol = protocol # Save the socket's address family now so we know how to # interpret self.address even after the stream is closed # and its socket attribute replaced with None. - self.address_family = stream.socket.family - self.request_callback = request_callback - self.no_keep_alive = no_keep_alive - self.xheaders = xheaders - self.protocol = protocol - self._clear_request_state() - # Save stack context here, outside of any request. This keeps - # contexts from one request from leaking into the next. - self._header_callback = stack_context.wrap(self._on_headers) - self.stream.set_close_callback(self._on_connection_close) - self.stream.read_until(b"\r\n\r\n", self._header_callback) - - def _clear_request_state(self): - """Clears the per-request state. - - This is run in between requests to allow the previous handler - to be garbage collected (and prevent spurious close callbacks), - and when the connection is closed (to break up cycles and - facilitate garbage collection in cpython). - """ - self._request = None - self._request_finished = False - self._write_callback = None - self._close_callback = None - - def set_close_callback(self, callback): - """Sets a callback that will be run when the connection is closed. - - Use this instead of accessing - `HTTPConnection.stream.set_close_callback - <.BaseIOStream.set_close_callback>` directly (which was the - recommended approach prior to Tornado 3.0). - """ - self._close_callback = stack_context.wrap(callback) - - def _on_connection_close(self): - if self._close_callback is not None: - callback = self._close_callback - self._close_callback = None - callback() - # Delete any unfinished callbacks to break up reference cycles. - self._header_callback = None - self._clear_request_state() - - def close(self): - self.stream.close() - # Remove this reference to self, which would otherwise cause a - # cycle and delay garbage collection of this connection. - self._header_callback = None - self._clear_request_state() - - def write(self, chunk, callback=None): - """Writes a chunk of output to the stream.""" - if not self.stream.closed(): - self._write_callback = stack_context.wrap(callback) - self.stream.write(chunk, self._on_write_complete) - - def finish(self): - """Finishes the request.""" - self._request_finished = True - # No more data is coming, so instruct TCP to send any remaining - # data immediately instead of waiting for a full packet or ack. - self.stream.set_nodelay(True) - if not self.stream.writing(): - self._finish_request() - - def _on_write_complete(self): - if self._write_callback is not None: - callback = self._write_callback - self._write_callback = None - callback() - # _on_write_complete is enqueued on the IOLoop whenever the - # IOStream's write buffer becomes empty, but it's possible for - # another callback that runs on the IOLoop before it to - # simultaneously write more data and finish the request. If - # there is still data in the IOStream, a future - # _on_write_complete will be responsible for calling - # _finish_request. - if self._request_finished and not self.stream.writing(): - self._finish_request() - - def _finish_request(self): - if self.no_keep_alive or self._request is None: - disconnect = True + if stream.socket is not None: + self.address_family = stream.socket.family else: - connection_header = self._request.headers.get("Connection") - if connection_header is not None: - connection_header = connection_header.lower() - if self._request.supports_http_1_1(): - disconnect = connection_header == "close" - elif ("Content-Length" in self._request.headers - or self._request.method in ("HEAD", "GET")): - disconnect = connection_header != "keep-alive" - else: - disconnect = True - self._clear_request_state() - if disconnect: - self.close() - return - try: - # Use a try/except instead of checking stream.closed() - # directly, because in some cases the stream doesn't discover - # that it's closed until you try to read from it. - self.stream.read_until(b"\r\n\r\n", self._header_callback) - - # Turn Nagle's algorithm back on, leaving the stream in its - # default state for the next request. - self.stream.set_nodelay(False) - except iostream.StreamClosedError: - self.close() - - def _on_headers(self, data): - try: - data = native_str(data.decode('latin1')) - eol = data.find("\r\n") - start_line = data[:eol] - try: - method, uri, version = start_line.split(" ") - except ValueError: - raise _BadRequestException("Malformed HTTP request line") - if not version.startswith("HTTP/"): - raise _BadRequestException("Malformed HTTP version in HTTP Request-Line") - try: - headers = httputil.HTTPHeaders.parse(data[eol:]) - except ValueError: - # Probably from split() if there was no ':' in the line - raise _BadRequestException("Malformed HTTP headers") - - # HTTPRequest wants an IP, not a full socket address - if self.address_family in (socket.AF_INET, socket.AF_INET6): - remote_ip = self.address[0] - else: - # Unix (or other) socket; fake the remote address - remote_ip = '0.0.0.0' - - self._request = HTTPRequest( - connection=self, method=method, uri=uri, version=version, - headers=headers, remote_ip=remote_ip, protocol=self.protocol) - - content_length = headers.get("Content-Length") - if content_length: - content_length = int(content_length) - if content_length > self.stream.max_buffer_size: - raise _BadRequestException("Content-Length too long") - if headers.get("Expect") == "100-continue": - self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") - self.stream.read_bytes(content_length, self._on_request_body) - return - - self.request_callback(self._request) - except _BadRequestException as e: - gen_log.info("Malformed HTTP request from %r: %s", - self.address, e) - self.close() - return - - def _on_request_body(self, data): - self._request.body = data - if self._request.method in ("POST", "PATCH", "PUT"): - httputil.parse_body_arguments( - self._request.headers.get("Content-Type", ""), data, - self._request.body_arguments, self._request.files) - - for k, v in self._request.body_arguments.items(): - self._request.arguments.setdefault(k, []).extend(v) - self.request_callback(self._request) - - -class HTTPRequest(object): - """A single HTTP request. - - All attributes are type `str` unless otherwise noted. - - .. attribute:: method - - HTTP request method, e.g. "GET" or "POST" - - .. attribute:: uri - - The requested uri. - - .. attribute:: path - - The path portion of `uri` - - .. attribute:: query - - The query portion of `uri` - - .. attribute:: version - - HTTP version specified in request, e.g. "HTTP/1.1" - - .. attribute:: headers - - `.HTTPHeaders` dictionary-like object for request headers. Acts like - a case-insensitive dictionary with additional methods for repeated - headers. - - .. attribute:: body - - Request body, if present, as a byte string. - - .. attribute:: remote_ip - - Client's IP address as a string. If ``HTTPServer.xheaders`` is set, - will pass along the real IP address provided by a load balancer - in the ``X-Real-Ip`` or ``X-Forwarded-For`` header. - - .. versionchanged:: 3.1 - The list format of ``X-Forwarded-For`` is now supported. - - .. attribute:: protocol - - The protocol used, either "http" or "https". If ``HTTPServer.xheaders`` - is set, will pass along the protocol used by a load balancer if - reported via an ``X-Scheme`` header. - - .. attribute:: host - - The requested hostname, usually taken from the ``Host`` header. - - .. attribute:: arguments - - GET/POST arguments are available in the arguments property, which - maps arguments names to lists of values (to support multiple values - for individual names). Names are of type `str`, while arguments - are byte strings. Note that this is different from - `.RequestHandler.get_argument`, which returns argument values as - unicode strings. - - .. attribute:: query_arguments - - Same format as ``arguments``, but contains only arguments extracted - from the query string. - - .. versionadded:: 3.2 - - .. attribute:: body_arguments - - Same format as ``arguments``, but contains only arguments extracted - from the request body. - - .. versionadded:: 3.2 - - .. attribute:: files - - File uploads are available in the files property, which maps file - names to lists of `.HTTPFile`. - - .. attribute:: connection - - An HTTP request is attached to a single HTTP connection, which can - be accessed through the "connection" attribute. Since connections - are typically kept open in HTTP/1.1, multiple requests can be handled - sequentially on a single connection. - """ - def __init__(self, method, uri, version="HTTP/1.0", headers=None, - body=None, remote_ip=None, protocol=None, host=None, - files=None, connection=None): - self.method = method - self.uri = uri - self.version = version - self.headers = headers or httputil.HTTPHeaders() - self.body = body or "" - - # set remote IP and protocol - self.remote_ip = remote_ip + self.address_family = None + # In HTTPServerRequest we want an IP, not a full socket address. + if (self.address_family in (socket.AF_INET, socket.AF_INET6) and + address is not None): + self.remote_ip = address[0] + else: + # Unix (or other) socket; fake the remote address. + self.remote_ip = '0.0.0.0' if protocol: self.protocol = protocol - elif connection and isinstance(connection.stream, - iostream.SSLIOStream): + elif isinstance(stream, iostream.SSLIOStream): self.protocol = "https" else: self.protocol = "http" + self._orig_remote_ip = self.remote_ip + self._orig_protocol = self.protocol - # xheaders can override the defaults - if connection and connection.xheaders: - # Squid uses X-Forwarded-For, others use X-Real-Ip - ip = self.headers.get("X-Forwarded-For", self.remote_ip) - ip = ip.split(',')[-1].strip() - ip = self.headers.get( - "X-Real-Ip", ip) - if netutil.is_valid_ip(ip): - self.remote_ip = ip - # AWS uses X-Forwarded-Proto - proto = self.headers.get( - "X-Scheme", self.headers.get("X-Forwarded-Proto", self.protocol)) - if proto in ("http", "https"): - self.protocol = proto + def __str__(self): + if self.address_family in (socket.AF_INET, socket.AF_INET6): + return self.remote_ip + elif isinstance(self.address, bytes): + # Python 3 with the -bb option warns about str(bytes), + # so convert it explicitly. + # Unix socket addresses are str on mac but bytes on linux. + return native_str(self.address) + else: + return str(self.address) - self.host = host or self.headers.get("Host") or "127.0.0.1" - self.files = files or {} + def _apply_xheaders(self, headers): + """Rewrite the ``remote_ip`` and ``protocol`` fields.""" + # Squid uses X-Forwarded-For, others use X-Real-Ip + ip = headers.get("X-Forwarded-For", self.remote_ip) + ip = ip.split(',')[-1].strip() + ip = headers.get("X-Real-Ip", ip) + if netutil.is_valid_ip(ip): + self.remote_ip = ip + # AWS uses X-Forwarded-Proto + proto_header = headers.get( + "X-Scheme", headers.get("X-Forwarded-Proto", + self.protocol)) + if proto_header in ("http", "https"): + self.protocol = proto_header + + def _unapply_xheaders(self): + """Undo changes from `_apply_xheaders`. + + Xheaders are per-request so they should not leak to the next + request on the same connection. + """ + self.remote_ip = self._orig_remote_ip + self.protocol = self._orig_protocol + + +class _ServerRequestAdapter(httputil.HTTPMessageDelegate): + """Adapts the `HTTPMessageDelegate` interface to the interface expected + by our clients. + """ + def __init__(self, server, connection): + self.server = server self.connection = connection - self._start_time = time.time() - self._finish_time = None + self.request = None + if isinstance(server.request_callback, + httputil.HTTPServerConnectionDelegate): + self.delegate = server.request_callback.start_request(connection) + self._chunks = None + else: + self.delegate = None + self._chunks = [] - self.path, sep, self.query = uri.partition('?') - self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) - self.query_arguments = copy.deepcopy(self.arguments) - self.body_arguments = {} + def headers_received(self, start_line, headers): + if self.server.xheaders: + self.connection.context._apply_xheaders(headers) + if self.delegate is None: + self.request = httputil.HTTPServerRequest( + connection=self.connection, start_line=start_line, + headers=headers) + else: + return self.delegate.headers_received(start_line, headers) - def supports_http_1_1(self): - """Returns True if this request supports HTTP/1.1 semantics""" - return self.version == "HTTP/1.1" - - @property - def cookies(self): - """A dictionary of Cookie.Morsel objects.""" - if not hasattr(self, "_cookies"): - self._cookies = Cookie.SimpleCookie() - if "Cookie" in self.headers: - try: - self._cookies.load( - native_str(self.headers["Cookie"])) - except Exception: - self._cookies = {} - return self._cookies - - def write(self, chunk, callback=None): - """Writes the given chunk to the response stream.""" - assert isinstance(chunk, bytes_type) - self.connection.write(chunk, callback=callback) + def data_received(self, chunk): + if self.delegate is None: + self._chunks.append(chunk) + else: + return self.delegate.data_received(chunk) def finish(self): - """Finishes this HTTP request on the open connection.""" - self.connection.finish() - self._finish_time = time.time() - - def full_url(self): - """Reconstructs the full URL for this request.""" - return self.protocol + "://" + self.host + self.uri - - def request_time(self): - """Returns the amount of time it took for this request to execute.""" - if self._finish_time is None: - return time.time() - self._start_time + if self.delegate is None: + self.request.body = b''.join(self._chunks) + self.request._parse_body() + self.server.request_callback(self.request) else: - return self._finish_time - self._start_time + self.delegate.finish() + self._cleanup() - def get_ssl_certificate(self, binary_form=False): - """Returns the client's SSL certificate, if any. + def on_connection_close(self): + if self.delegate is None: + self._chunks = None + else: + self.delegate.on_connection_close() + self._cleanup() - To use client certificates, the HTTPServer must have been constructed - with cert_reqs set in ssl_options, e.g.:: + def _cleanup(self): + if self.server.xheaders: + self.connection.context._unapply_xheaders() - server = HTTPServer(app, - ssl_options=dict( - certfile="foo.crt", - keyfile="foo.key", - cert_reqs=ssl.CERT_REQUIRED, - ca_certs="cacert.crt")) - By default, the return value is a dictionary (or None, if no - client certificate is present). If ``binary_form`` is true, a - DER-encoded form of the certificate is returned instead. See - SSLSocket.getpeercert() in the standard library for more - details. - http://docs.python.org/library/ssl.html#sslsocket-objects - """ - try: - return self.connection.stream.socket.getpeercert( - binary_form=binary_form) - except ssl.SSLError: - return None - - def __repr__(self): - attrs = ("protocol", "host", "method", "uri", "version", "remote_ip") - args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs]) - return "%s(%s, headers=%s)" % ( - self.__class__.__name__, args, dict(self.headers)) +HTTPRequest = httputil.HTTPServerRequest diff --git a/libs/tornado/httputil.py b/libs/tornado/httputil.py index 2575bc56..6e110d90 100755 --- a/libs/tornado/httputil.py +++ b/libs/tornado/httputil.py @@ -14,20 +14,31 @@ # License for the specific language governing permissions and limitations # under the License. -"""HTTP utility code shared by clients and servers.""" +"""HTTP utility code shared by clients and servers. + +This module also defines the `HTTPServerRequest` class which is exposed +via `tornado.web.RequestHandler.request`. +""" from __future__ import absolute_import, division, print_function, with_statement import calendar import collections +import copy import datetime import email.utils import numbers +import re import time from tornado.escape import native_str, parse_qs_bytes, utf8 from tornado.log import gen_log -from tornado.util import ObjectDict +from tornado.util import ObjectDict, bytes_type + +try: + import Cookie # py2 +except ImportError: + import http.cookies as Cookie # py3 try: from httplib import responses # py2 @@ -43,6 +54,13 @@ try: except ImportError: from urllib.parse import urlencode # py3 +try: + from ssl import SSLError +except ImportError: + # ssl is unavailable on app engine. + class SSLError(Exception): + pass + class _NormalizedHeaderCache(dict): """Dynamic cached mapping of header names to Http-Header-Case. @@ -212,6 +230,337 @@ class HTTPHeaders(dict): return HTTPHeaders(self) +class HTTPServerRequest(object): + """A single HTTP request. + + All attributes are type `str` unless otherwise noted. + + .. attribute:: method + + HTTP request method, e.g. "GET" or "POST" + + .. attribute:: uri + + The requested uri. + + .. attribute:: path + + The path portion of `uri` + + .. attribute:: query + + The query portion of `uri` + + .. attribute:: version + + HTTP version specified in request, e.g. "HTTP/1.1" + + .. attribute:: headers + + `.HTTPHeaders` dictionary-like object for request headers. Acts like + a case-insensitive dictionary with additional methods for repeated + headers. + + .. attribute:: body + + Request body, if present, as a byte string. + + .. attribute:: remote_ip + + Client's IP address as a string. If ``HTTPServer.xheaders`` is set, + will pass along the real IP address provided by a load balancer + in the ``X-Real-Ip`` or ``X-Forwarded-For`` header. + + .. versionchanged:: 3.1 + The list format of ``X-Forwarded-For`` is now supported. + + .. attribute:: protocol + + The protocol used, either "http" or "https". If ``HTTPServer.xheaders`` + is set, will pass along the protocol used by a load balancer if + reported via an ``X-Scheme`` header. + + .. attribute:: host + + The requested hostname, usually taken from the ``Host`` header. + + .. attribute:: arguments + + GET/POST arguments are available in the arguments property, which + maps arguments names to lists of values (to support multiple values + for individual names). Names are of type `str`, while arguments + are byte strings. Note that this is different from + `.RequestHandler.get_argument`, which returns argument values as + unicode strings. + + .. attribute:: query_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the query string. + + .. versionadded:: 3.2 + + .. attribute:: body_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the request body. + + .. versionadded:: 3.2 + + .. attribute:: files + + File uploads are available in the files property, which maps file + names to lists of `.HTTPFile`. + + .. attribute:: connection + + An HTTP request is attached to a single HTTP connection, which can + be accessed through the "connection" attribute. Since connections + are typically kept open in HTTP/1.1, multiple requests can be handled + sequentially on a single connection. + + .. versionchanged:: 3.3 + Moved from ``tornado.httpserver.HTTPRequest``. + """ + def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None, + body=None, host=None, files=None, connection=None, + start_line=None): + if start_line is not None: + method, uri, version = start_line + self.method = method + self.uri = uri + self.version = version + self.headers = headers or HTTPHeaders() + self.body = body or "" + + # set remote IP and protocol + context = getattr(connection, 'context', None) + self.remote_ip = getattr(context, 'remote_ip') + self.protocol = getattr(context, 'protocol', "http") + + self.host = host or self.headers.get("Host") or "127.0.0.1" + self.files = files or {} + self.connection = connection + self._start_time = time.time() + self._finish_time = None + + self.path, sep, self.query = uri.partition('?') + self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) + self.query_arguments = copy.deepcopy(self.arguments) + self.body_arguments = {} + + def supports_http_1_1(self): + """Returns True if this request supports HTTP/1.1 semantics. + + .. deprecated:: 3.3 + Applications are less likely to need this information with the + introduction of `.HTTPConnection`. If you still need it, access + the ``version`` attribute directly. + """ + return self.version == "HTTP/1.1" + + @property + def cookies(self): + """A dictionary of Cookie.Morsel objects.""" + if not hasattr(self, "_cookies"): + self._cookies = Cookie.SimpleCookie() + if "Cookie" in self.headers: + try: + self._cookies.load( + native_str(self.headers["Cookie"])) + except Exception: + self._cookies = {} + return self._cookies + + def write(self, chunk, callback=None): + """Writes the given chunk to the response stream. + + .. deprecated:: 3.3 + Use ``request.connection`` and the `.HTTPConnection` methods + to write the response. + """ + assert isinstance(chunk, bytes_type) + self.connection.write(chunk, callback=callback) + + def finish(self): + """Finishes this HTTP request on the open connection. + + .. deprecated:: 3.3 + Use ``request.connection`` and the `.HTTPConnection` methods + to write the response. + """ + self.connection.finish() + self._finish_time = time.time() + + def full_url(self): + """Reconstructs the full URL for this request.""" + return self.protocol + "://" + self.host + self.uri + + def request_time(self): + """Returns the amount of time it took for this request to execute.""" + if self._finish_time is None: + return time.time() - self._start_time + else: + return self._finish_time - self._start_time + + def get_ssl_certificate(self, binary_form=False): + """Returns the client's SSL certificate, if any. + + To use client certificates, the HTTPServer must have been constructed + with cert_reqs set in ssl_options, e.g.:: + + server = HTTPServer(app, + ssl_options=dict( + certfile="foo.crt", + keyfile="foo.key", + cert_reqs=ssl.CERT_REQUIRED, + ca_certs="cacert.crt")) + + By default, the return value is a dictionary (or None, if no + client certificate is present). If ``binary_form`` is true, a + DER-encoded form of the certificate is returned instead. See + SSLSocket.getpeercert() in the standard library for more + details. + http://docs.python.org/library/ssl.html#sslsocket-objects + """ + try: + return self.connection.stream.socket.getpeercert( + binary_form=binary_form) + except SSLError: + return None + + def _parse_body(self): + parse_body_arguments( + self.headers.get("Content-Type", ""), self.body, + self.body_arguments, self.files, + self.headers) + + for k, v in self.body_arguments.items(): + self.arguments.setdefault(k, []).extend(v) + + def __repr__(self): + attrs = ("protocol", "host", "method", "uri", "version", "remote_ip") + args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs]) + return "%s(%s, headers=%s)" % ( + self.__class__.__name__, args, dict(self.headers)) + + +class HTTPInputException(Exception): + """Exception class for malformed HTTP requests or responses + from remote sources. + + .. versionadded:: 3.3 + """ + pass + + +class HTTPOutputException(Exception): + """Exception class for errors in HTTP output. + + .. versionadded:: 3.3 + """ + pass + + +class HTTPServerConnectionDelegate(object): + """Implement this interface to handle requests from `.HTTPServer`. + + .. versionadded:: 3.3 + """ + def start_request(self, server_conn, request_conn): + """This method is called by the server when a new request has started. + + :arg server_conn: is an opaque object representing the long-lived + (e.g. tcp-level) connection. + :arg request_conn: is a `.HTTPConnection` object for a single + request/response exchange. + + This method should return a `.HTTPMessageDelegate`. + """ + raise NotImplementedError() + + def on_close(self, server_conn): + """This method is called when a connection has been closed. + + :arg server_conn: is a server connection that has previously been + passed to ``start_request``. + """ + pass + + +class HTTPMessageDelegate(object): + """Implement this interface to handle an HTTP request or response. + + .. versionadded:: 3.3 + """ + def headers_received(self, start_line, headers): + """Called when the HTTP headers have been received and parsed. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine` + depending on whether this is a client or server message. + :arg headers: a `.HTTPHeaders` instance. + + Some `.HTTPConnection` methods can only be called during + ``headers_received``. + + May return a `.Future`; if it does the body will not be read + until it is done. + """ + pass + + def data_received(self, chunk): + """Called when a chunk of data has been received. + + May return a `.Future` for flow control. + """ + pass + + def finish(self): + """Called after the last chunk of data has been received.""" + pass + + def on_connection_close(self): + """Called if the connection is closed without finishing the request. + + If ``headers_received`` is called, either ``finish`` or + ``on_connection_close`` will be called, but not both. + """ + pass + + +class HTTPConnection(object): + """Applications use this interface to write their responses. + + .. versionadded:: 3.3 + """ + def write_headers(self, start_line, headers, chunk=None, callback=None): + """Write an HTTP header block. + + :arg start_line: a `.RequestStartLine` or `.ResponseStartLine`. + :arg headers: a `.HTTPHeaders` instance. + :arg chunk: the first (optional) chunk of data. This is an optimization + so that small responses can be written in the same call as their + headers. + :arg callback: a callback to be run when the write is complete. + + Returns a `.Future` if no callback is given. + """ + raise NotImplementedError() + + def write(self, chunk, callback=None): + """Writes a chunk of body data. + + The callback will be run when the write is complete. If no callback + is given, returns a Future. + """ + raise NotImplementedError() + + def finish(self): + """Indicates that the last body data has been written. + """ + raise NotImplementedError() + + def url_concat(url, args): """Concatenate url and argument dictionary regardless of whether url has existing query parameters. @@ -310,7 +659,7 @@ def _int_or_none(val): return int(val) -def parse_body_arguments(content_type, body, arguments, files): +def parse_body_arguments(content_type, body, arguments, files, headers=None): """Parses a form request body. Supports ``application/x-www-form-urlencoded`` and @@ -319,6 +668,10 @@ def parse_body_arguments(content_type, body, arguments, files): and ``files`` parameters are dictionaries that will be updated with the parsed contents. """ + if headers and 'Content-Encoding' in headers: + gen_log.warning("Unsupported Content-Encoding: %s", + headers['Content-Encoding']) + return if content_type.startswith("application/x-www-form-urlencoded"): try: uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) @@ -405,6 +758,48 @@ def format_timestamp(ts): raise TypeError("unknown timestamp type: %r" % ts) return email.utils.formatdate(ts, usegmt=True) + +RequestStartLine = collections.namedtuple( + 'RequestStartLine', ['method', 'path', 'version']) + + +def parse_request_start_line(line): + """Returns a (method, path, version) tuple for an HTTP 1.x request line. + + The response is a `collections.namedtuple`. + + >>> parse_request_start_line("GET /foo HTTP/1.1") + RequestStartLine(method='GET', path='/foo', version='HTTP/1.1') + """ + try: + method, path, version = line.split(" ") + except ValueError: + raise HTTPInputException("Malformed HTTP request line") + if not version.startswith("HTTP/"): + raise HTTPInputException( + "Malformed HTTP version in HTTP Request-Line: %r" % version) + return RequestStartLine(method, path, version) + + +ResponseStartLine = collections.namedtuple( + 'ResponseStartLine', ['version', 'code', 'reason']) + + +def parse_response_start_line(line): + """Returns a (version, code, reason) tuple for an HTTP 1.x response line. + + The response is a `collections.namedtuple`. + + >>> parse_response_start_line("HTTP/1.1 200 OK") + ResponseStartLine(version='HTTP/1.1', code=200, reason='OK') + """ + line = native_str(line) + match = re.match("(HTTP/1.[01]) ([0-9]+) ([^\r]*)", line) + if not match: + raise HTTPInputException("Error parsing response start line") + return ResponseStartLine(match.group(1), int(match.group(2)), + match.group(3)) + # _parseparam and _parse_header are copied and modified from python2.7's cgi.py # The original 2.7 version of this code did not correctly support some # combinations of semicolons and double quotes. diff --git a/libs/tornado/ioloop.py b/libs/tornado/ioloop.py index e7b84dd7..cd59bfee 100755 --- a/libs/tornado/ioloop.py +++ b/libs/tornado/ioloop.py @@ -32,6 +32,7 @@ import datetime import errno import functools import heapq +import itertools import logging import numbers import os @@ -41,10 +42,11 @@ import threading import time import traceback -from tornado.concurrent import Future, TracebackFuture +from tornado.concurrent import TracebackFuture, is_future from tornado.log import app_log, gen_log from tornado import stack_context from tornado.util import Configurable +from tornado.util import errno_from_exception try: import signal @@ -156,6 +158,15 @@ class IOLoop(Configurable): assert not IOLoop.initialized() IOLoop._instance = self + @staticmethod + def clear_instance(): + """Clear the global `IOLoop` instance. + + .. versionadded:: 3.3 + """ + if hasattr(IOLoop, "_instance"): + del IOLoop._instance + @staticmethod def current(): """Returns the current thread's `IOLoop`. @@ -244,21 +255,40 @@ class IOLoop(Configurable): raise NotImplementedError() def add_handler(self, fd, handler, events): - """Registers the given handler to receive the given events for fd. + """Registers the given handler to receive the given events for ``fd``. + + The ``fd`` argument may either be an integer file descriptor or + a file-like object with a ``fileno()`` method (and optionally a + ``close()`` method, which may be called when the `IOLoop` is shut + down). The ``events`` argument is a bitwise or of the constants ``IOLoop.READ``, ``IOLoop.WRITE``, and ``IOLoop.ERROR``. When an event occurs, ``handler(fd, events)`` will be run. + + .. versionchanged:: 3.3 + Added the ability to pass file-like objects in addition to + raw file descriptors. """ raise NotImplementedError() def update_handler(self, fd, events): - """Changes the events we listen for fd.""" + """Changes the events we listen for ``fd``. + + .. versionchanged:: 3.3 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ raise NotImplementedError() def remove_handler(self, fd): - """Stop listening for events on fd.""" + """Stop listening for events on ``fd``. + + .. versionchanged:: 3.3 + Added the ability to pass file-like objects in addition to + raw file descriptors. + """ raise NotImplementedError() def set_blocking_signal_threshold(self, seconds, action): @@ -372,7 +402,7 @@ class IOLoop(Configurable): future_cell[0] = TracebackFuture() future_cell[0].set_exc_info(sys.exc_info()) else: - if isinstance(result, Future): + if is_future(result): future_cell[0] = result else: future_cell[0] = TracebackFuture() @@ -463,7 +493,7 @@ class IOLoop(Configurable): The callback is invoked with one argument, the `.Future`. """ - assert isinstance(future, Future) + assert is_future(future) callback = stack_context.wrap(callback) future.add_done_callback( lambda future: self.add_callback(callback, future)) @@ -490,6 +520,47 @@ class IOLoop(Configurable): """ app_log.error("Exception in callback %r", callback, exc_info=True) + def split_fd(self, fd): + """Returns an (fd, obj) pair from an ``fd`` parameter. + + We accept both raw file descriptors and file-like objects as + input to `add_handler` and related methods. When a file-like + object is passed, we must retain the object itself so we can + close it correctly when the `IOLoop` shuts down, but the + poller interfaces favor file descriptors (they will accept + file-like objects and call ``fileno()`` for you, but they + always return the descriptor itself). + + This method is provided for use by `IOLoop` subclasses and should + not generally be used by application code. + + .. versionadded:: 3.3 + """ + try: + return fd.fileno(), fd + except AttributeError: + return fd, fd + + def close_fd(self, fd): + """Utility method to close an ``fd``. + + If ``fd`` is a file-like object, we close it directly; otherwise + we use `os.close`. + + This method is provided for use by `IOLoop` subclasses (in + implementations of ``IOLoop.close(all_fds=True)`` and should + not generally be used by application code. + + .. versionadded:: 3.3 + """ + try: + try: + fd.close() + except AttributeError: + os.close(fd) + except OSError: + pass + class PollIOLoop(IOLoop): """Base class for IOLoops built around a select-like function. @@ -515,7 +586,8 @@ class PollIOLoop(IOLoop): self._closing = False self._thread_ident = None self._blocking_signal_threshold = None - + self._timeout_counter = itertools.count() + # Create a pipe that we send bogus data to when we want to wake # the I/O loop when it is idle self._waker = Waker() @@ -528,26 +600,24 @@ class PollIOLoop(IOLoop): self._closing = True self.remove_handler(self._waker.fileno()) if all_fds: - for fd in self._handlers.keys(): - try: - close_method = getattr(fd, 'close', None) - if close_method is not None: - close_method() - else: - os.close(fd) - except Exception: - gen_log.debug("error closing fd %s", fd, exc_info=True) + for fd, handler in self._handlers.values(): + self.close_fd(fd) self._waker.close() self._impl.close() + self._callbacks = None + self._timeouts = None def add_handler(self, fd, handler, events): - self._handlers[fd] = stack_context.wrap(handler) + fd, obj = self.split_fd(fd) + self._handlers[fd] = (obj, stack_context.wrap(handler)) self._impl.register(fd, events | self.ERROR) def update_handler(self, fd, events): + fd, obj = self.split_fd(fd) self._impl.modify(fd, events | self.ERROR) def remove_handler(self, fd): + fd, obj = self.split_fd(fd) self._handlers.pop(fd, None) self._events.pop(fd, None) try: @@ -566,6 +636,8 @@ class PollIOLoop(IOLoop): action if action is not None else signal.SIG_DFL) def start(self): + if self._running: + raise RuntimeError("IOLoop is already running") self._setup_logging() if self._stopped: self._stopped = False @@ -666,9 +738,7 @@ class PollIOLoop(IOLoop): # two ways EINTR might be signaled: # * e.errno == errno.EINTR # * e.args is like (errno.EINTR, 'Interrupted system call') - if (getattr(e, 'errno', None) == errno.EINTR or - (isinstance(getattr(e, 'args', None), tuple) and - len(e.args) == 2 and e.args[0] == errno.EINTR)): + if errno_from_exception(e) == errno.EINTR: continue else: raise @@ -685,15 +755,17 @@ class PollIOLoop(IOLoop): while self._events: fd, events = self._events.popitem() try: - self._handlers[fd](fd, events) + fd_obj, handler_func = self._handlers[fd] + handler_func(fd_obj, events) except (OSError, IOError) as e: - if e.args[0] == errno.EPIPE: + if errno_from_exception(e) == errno.EPIPE: # Happens when the client closes the connection pass else: self.handle_callback_exception(self._handlers.get(fd)) except Exception: self.handle_callback_exception(self._handlers.get(fd)) + fd_obj = handler_func = None finally: # reset the stopped flag so another start/stop pair can be issued @@ -765,16 +837,21 @@ class _Timeout(object): """An IOLoop timeout, a UNIX timestamp and a callback""" # Reduce memory overhead when there are lots of pending callbacks - __slots__ = ['deadline', 'callback'] + __slots__ = ['deadline', 'callback', 'tiebreaker'] def __init__(self, deadline, callback, io_loop): if isinstance(deadline, numbers.Real): self.deadline = deadline elif isinstance(deadline, datetime.timedelta): - self.deadline = io_loop.time() + _Timeout.timedelta_to_seconds(deadline) + now = io_loop.time() + try: + self.deadline = now + deadline.total_seconds() + except AttributeError: # py2.6 + self.deadline = now + _Timeout.timedelta_to_seconds(deadline) else: raise TypeError("Unsupported deadline %r" % deadline) self.callback = callback + self.tiebreaker = next(io_loop._timeout_counter) @staticmethod def timedelta_to_seconds(td): @@ -786,12 +863,12 @@ class _Timeout(object): # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons # use __lt__). def __lt__(self, other): - return ((self.deadline, id(self)) < - (other.deadline, id(other))) + return ((self.deadline, self.tiebreaker) < + (other.deadline, other.tiebreaker)) def __le__(self, other): - return ((self.deadline, id(self)) <= - (other.deadline, id(other))) + return ((self.deadline, self.tiebreaker) <= + (other.deadline, other.tiebreaker)) class PeriodicCallback(object): diff --git a/libs/tornado/iostream.py b/libs/tornado/iostream.py index 5d4d08ac..3874bf75 100755 --- a/libs/tornado/iostream.py +++ b/libs/tornado/iostream.py @@ -31,21 +31,27 @@ import errno import numbers import os import socket -import ssl import sys import re +from tornado.concurrent import TracebackFuture from tornado import ioloop from tornado.log import gen_log, app_log from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError from tornado import stack_context -from tornado.util import bytes_type +from tornado.util import bytes_type, errno_from_exception try: from tornado.platform.posix import _set_nonblocking except ImportError: _set_nonblocking = None +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine + ssl = None + # These errnos indicate that a non-blocking operation must be retried # at a later time. On most platforms they're the same value, but on # some they differ. @@ -66,12 +72,31 @@ class StreamClosedError(IOError): pass +class UnsatisfiableReadError(Exception): + """Exception raised when a read cannot be satisfied. + + Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes`` + argument. + """ + pass + + +class StreamBufferFullError(Exception): + """Exception raised by `IOStream` methods when the buffer is full. + """ + + class BaseIOStream(object): """A utility class to write to and read from a non-blocking file or socket. We support a non-blocking ``write()`` and a family of ``read_*()`` methods. - All of the methods take callbacks (since writing and reading are - non-blocking and asynchronous). + All of the methods take an optional ``callback`` argument and return a + `.Future` only if no callback is given. When the operation completes, + the callback will be run or the `.Future` will resolve with the data + read (or ``None`` for ``write()``). All outstanding ``Futures`` will + resolve with a `StreamClosedError` when the stream is closed; users + of the callback interface will be notified via + `.BaseIOStream.set_close_callback` instead. When a stream is closed due to an error, the IOStream's ``error`` attribute contains the exception object. @@ -80,24 +105,48 @@ class BaseIOStream(object): `read_from_fd`, and optionally `get_fd_error`. """ def __init__(self, io_loop=None, max_buffer_size=None, - read_chunk_size=4096): + read_chunk_size=None, max_write_buffer_size=None): + """`BaseIOStream` constructor. + + :arg io_loop: The `.IOLoop` to use; defaults to `.IOLoop.current`. + :arg max_buffer_size: Maximum amount of incoming data to buffer; + defaults to 100MB. + :arg read_chunk_size: Amount of data to read at one time from the + underlying transport; defaults to 64KB. + :arg max_write_buffer_size: Amount of outgoing data to buffer; + defaults to unlimited. + + .. versionchanged:: 3.3 + Add the ``max_write_buffer_size`` parameter. Changed default + ``read_chunk_size`` to 64KB. + """ self.io_loop = io_loop or ioloop.IOLoop.current() self.max_buffer_size = max_buffer_size or 104857600 - self.read_chunk_size = read_chunk_size + # A chunk size that is too close to max_buffer_size can cause + # spurious failures. + self.read_chunk_size = min(read_chunk_size or 65536, + self.max_buffer_size // 2) + self.max_write_buffer_size = max_write_buffer_size self.error = None self._read_buffer = collections.deque() self._write_buffer = collections.deque() self._read_buffer_size = 0 + self._write_buffer_size = 0 self._write_buffer_frozen = False self._read_delimiter = None self._read_regex = None + self._read_max_bytes = None self._read_bytes = None + self._read_partial = False self._read_until_close = False self._read_callback = None + self._read_future = None self._streaming_callback = None self._write_callback = None + self._write_future = None self._close_callback = None self._connect_callback = None + self._connect_future = None self._connecting = False self._state = None self._pending_callbacks = 0 @@ -142,98 +191,162 @@ class BaseIOStream(object): """ return None - def read_until_regex(self, regex, callback): - """Run ``callback`` when we read the given regex pattern. + def read_until_regex(self, regex, callback=None, max_bytes=None): + """Asynchronously read until we have matched the given regex. - The callback will get the data read (including the data that - matched the regex and anything that came before it) as an argument. + The result includes the data that matches the regex and anything + that came before it. If a callback is given, it will be run + with the data as an argument; if not, this method returns a + `.Future`. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the regex is + not satisfied. + + .. versionchanged:: 3.3 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. """ - self._set_read_callback(callback) + future = self._set_read_callback(callback) self._read_regex = re.compile(regex) - self._try_inline_read() + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + return future + return future - def read_until(self, delimiter, callback): - """Run ``callback`` when we read the given delimiter. + def read_until(self, delimiter, callback=None, max_bytes=None): + """Asynchronously read until we have found the given delimiter. - The callback will get the data read (including the delimiter) - as an argument. + The result includes all the data read including the delimiter. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + If ``max_bytes`` is not None, the connection will be closed + if more than ``max_bytes`` bytes have been read and the delimiter + is not found. + + .. versionchanged:: 3.3 + Added the ``max_bytes`` argument. The ``callback`` argument is + now optional and a `.Future` will be returned if it is omitted. """ - self._set_read_callback(callback) + future = self._set_read_callback(callback) self._read_delimiter = delimiter - self._try_inline_read() + self._read_max_bytes = max_bytes + try: + self._try_inline_read() + except UnsatisfiableReadError as e: + # Handle this the same way as in _handle_events. + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) + return future + return future - def read_bytes(self, num_bytes, callback, streaming_callback=None): - """Run callback when we read the given number of bytes. + def read_bytes(self, num_bytes, callback=None, streaming_callback=None, + partial=False): + """Asynchronously read a number of bytes. If a ``streaming_callback`` is given, it will be called with chunks - of data as they become available, and the argument to the final - ``callback`` will be empty. Otherwise, the ``callback`` gets - the data as an argument. + of data as they become available, and the final result will be empty. + Otherwise, the result is all the data that was read. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. + + If ``partial`` is true, the callback is run as soon as we have + any bytes to return (but never more than ``num_bytes``) + + .. versionchanged:: 3.3 + Added the ``partial`` argument. The callback argument is now + optional and a `.Future` will be returned if it is omitted. """ - self._set_read_callback(callback) + future = self._set_read_callback(callback) assert isinstance(num_bytes, numbers.Integral) self._read_bytes = num_bytes + self._read_partial = partial self._streaming_callback = stack_context.wrap(streaming_callback) self._try_inline_read() + return future - def read_until_close(self, callback, streaming_callback=None): - """Reads all data from the socket until it is closed. + def read_until_close(self, callback=None, streaming_callback=None): + """Asynchronously reads all data from the socket until it is closed. If a ``streaming_callback`` is given, it will be called with chunks - of data as they become available, and the argument to the final - ``callback`` will be empty. Otherwise, the ``callback`` gets the - data as an argument. + of data as they become available, and the final result will be empty. + Otherwise, the result is all the data that was read. + If a callback is given, it will be run with the data as an argument; + if not, this method returns a `.Future`. - Subject to ``max_buffer_size`` limit from `IOStream` constructor if - a ``streaming_callback`` is not used. + .. versionchanged:: 3.3 + The callback argument is now optional and a `.Future` will + be returned if it is omitted. """ - self._set_read_callback(callback) + future = self._set_read_callback(callback) self._streaming_callback = stack_context.wrap(streaming_callback) if self.closed(): if self._streaming_callback is not None: - self._run_callback(self._streaming_callback, - self._consume(self._read_buffer_size)) - self._run_callback(self._read_callback, - self._consume(self._read_buffer_size)) - self._streaming_callback = None - self._read_callback = None - return + self._run_read_callback(self._read_buffer_size, True) + self._run_read_callback(self._read_buffer_size, False) + return future self._read_until_close = True - self._streaming_callback = stack_context.wrap(streaming_callback) self._try_inline_read() + return future def write(self, data, callback=None): - """Write the given data to this stream. + """Asynchronously write the given data to this stream. If ``callback`` is given, we call it when all of the buffered write data has been successfully written to the stream. If there was previously buffered write data and an old write callback, that callback is simply overwritten with this new callback. + + If no ``callback`` is given, this method returns a `.Future` that + resolves (with a result of ``None``) when the write has been + completed. If `write` is called again before that `.Future` has + resolved, the previous future will be orphaned and will never resolve. + + .. versionchanged:: 3.3 + Now returns a `.Future` if no callback is given. """ assert isinstance(data, bytes_type) self._check_closed() # We use bool(_write_buffer) as a proxy for write_buffer_size>0, # so never put empty strings in the buffer. if data: + if (self.max_write_buffer_size is not None and + self._write_buffer_size + len(data) > self.max_write_buffer_size): + raise StreamBufferFullError("Reached maximum read buffer size") # Break up large contiguous strings before inserting them in the # write buffer, so we don't have to recopy the entire thing # as we slice off pieces to send to the socket. WRITE_BUFFER_CHUNK_SIZE = 128 * 1024 - if len(data) > WRITE_BUFFER_CHUNK_SIZE: - for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE): - self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE]) - else: - self._write_buffer.append(data) - self._write_callback = stack_context.wrap(callback) + for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE): + self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE]) + self._write_buffer_size += len(data) + if callback is not None: + self._write_callback = stack_context.wrap(callback) + future = None + else: + future = self._write_future = TracebackFuture() if not self._connecting: self._handle_write() if self._write_buffer: self._add_io_state(self.io_loop.WRITE) self._maybe_add_error_listener() + return future def set_close_callback(self, callback): - """Call the given callback when the stream is closed.""" + """Call the given callback when the stream is closed. + + This is not necessary for applications that use the `.Future` + interface; all outstanding ``Futures`` will resolve with a + `StreamClosedError` when the stream is closed. + """ self._close_callback = stack_context.wrap(callback) + self._maybe_add_error_listener() def close(self, exc_info=False): """Close this stream. @@ -251,13 +364,9 @@ class BaseIOStream(object): if self._read_until_close: if (self._streaming_callback is not None and self._read_buffer_size): - self._run_callback(self._streaming_callback, - self._consume(self._read_buffer_size)) - callback = self._read_callback - self._read_callback = None + self._run_read_callback(self._read_buffer_size, True) self._read_until_close = False - self._run_callback(callback, - self._consume(self._read_buffer_size)) + self._run_read_callback(self._read_buffer_size, False) if self._state is not None: self.io_loop.remove_handler(self.fileno()) self._state = None @@ -269,6 +378,25 @@ class BaseIOStream(object): # If there are pending callbacks, don't run the close callback # until they're done (see _maybe_add_error_handler) if self.closed() and self._pending_callbacks == 0: + futures = [] + if self._read_future is not None: + futures.append(self._read_future) + self._read_future = None + if self._write_future is not None: + futures.append(self._write_future) + self._write_future = None + if self._connect_future is not None: + futures.append(self._connect_future) + self._connect_future = None + for future in futures: + if (isinstance(self.error, (socket.error, IOError)) and + errno_from_exception(self.error) in _ERRNO_CONNRESET): + # Treat connection resets as closed connections so + # clients only have to catch one kind of exception + # to avoid logging. + future.set_exception(StreamClosedError()) + else: + future.set_exception(self.error or StreamClosedError()) if self._close_callback is not None: cb = self._close_callback self._close_callback = None @@ -282,7 +410,7 @@ class BaseIOStream(object): def reading(self): """Returns true if we are currently reading from the stream.""" - return self._read_callback is not None + return self._read_callback is not None or self._read_future is not None def writing(self): """Returns true if we are currently writing to the stream.""" @@ -309,16 +437,22 @@ class BaseIOStream(object): def _handle_events(self, fd, events): if self.closed(): - gen_log.warning("Got events for closed stream %d", fd) + gen_log.warning("Got events for closed stream %s", fd) return try: + if self._connecting: + # Most IOLoops will report a write failed connect + # with the WRITE event, but SelectIOLoop reports a + # READ as well so we must check for connecting before + # either. + self._handle_connect() + if self.closed(): + return if events & self.io_loop.READ: self._handle_read() if self.closed(): return if events & self.io_loop.WRITE: - if self._connecting: - self._handle_connect() self._handle_write() if self.closed(): return @@ -334,13 +468,20 @@ class BaseIOStream(object): state |= self.io_loop.READ if self.writing(): state |= self.io_loop.WRITE - if state == self.io_loop.ERROR: + if state == self.io_loop.ERROR and self._read_buffer_size == 0: + # If the connection is idle, listen for reads too so + # we can tell if the connection is closed. If there is + # data in the read buffer we won't run the close callback + # yet anyway, so we don't need to listen in this case. state |= self.io_loop.READ if state != self._state: assert self._state is not None, \ "shouldn't happen: _handle_events without self._state" self._state = state self.io_loop.update_handler(self.fileno(), self._state) + except UnsatisfiableReadError as e: + gen_log.info("Unsatisfiable read, closing connection: %s" % e) + self.close(exc_info=True) except Exception: gen_log.error("Uncaught exception, closing connection.", exc_info=True) @@ -381,42 +522,108 @@ class BaseIOStream(object): self._pending_callbacks += 1 self.io_loop.add_callback(wrapper) + def _read_to_buffer_loop(self): + # This method is called from _handle_read and _try_inline_read. + try: + if self._read_bytes is not None: + target_bytes = self._read_bytes + elif self._read_max_bytes is not None: + target_bytes = self._read_max_bytes + elif self.reading(): + # For read_until without max_bytes, or + # read_until_close, read as much as we can before + # scanning for the delimiter. + target_bytes = None + else: + target_bytes = 0 + next_find_pos = 0 + # Pretend to have a pending callback so that an EOF in + # _read_to_buffer doesn't trigger an immediate close + # callback. At the end of this method we'll either + # estabilsh a real pending callback via + # _read_from_buffer or run the close callback. + # + # We need two try statements here so that + # pending_callbacks is decremented before the `except` + # clause below (which calls `close` and does need to + # trigger the callback) + self._pending_callbacks += 1 + while not self.closed(): + # Read from the socket until we get EWOULDBLOCK or equivalent. + # SSL sockets do some internal buffering, and if the data is + # sitting in the SSL object's buffer select() and friends + # can't see it; the only way to find out if it's there is to + # try to read it. + if self._read_to_buffer() == 0: + break + + self._run_streaming_callback() + + # If we've read all the bytes we can use, break out of + # this loop. We can't just call read_from_buffer here + # because of subtle interactions with the + # pending_callback and error_listener mechanisms. + # + # If we've reached target_bytes, we know we're done. + if (target_bytes is not None and + self._read_buffer_size >= target_bytes): + break + + # Otherwise, we need to call the more expensive find_read_pos. + # It's inefficient to do this on every read, so instead + # do it on the first read and whenever the read buffer + # size has doubled. + if self._read_buffer_size >= next_find_pos: + pos = self._find_read_pos() + if pos is not None: + return pos + next_find_pos = self._read_buffer_size * 2 + return self._find_read_pos() + finally: + self._pending_callbacks -= 1 + def _handle_read(self): try: - try: - # Pretend to have a pending callback so that an EOF in - # _read_to_buffer doesn't trigger an immediate close - # callback. At the end of this method we'll either - # estabilsh a real pending callback via - # _read_from_buffer or run the close callback. - # - # We need two try statements here so that - # pending_callbacks is decremented before the `except` - # clause below (which calls `close` and does need to - # trigger the callback) - self._pending_callbacks += 1 - while not self.closed(): - # Read from the socket until we get EWOULDBLOCK or equivalent. - # SSL sockets do some internal buffering, and if the data is - # sitting in the SSL object's buffer select() and friends - # can't see it; the only way to find out if it's there is to - # try to read it. - if self._read_to_buffer() == 0: - break - finally: - self._pending_callbacks -= 1 + pos = self._read_to_buffer_loop() + except UnsatisfiableReadError: + raise except Exception: gen_log.warning("error on read", exc_info=True) self.close(exc_info=True) return - if self._read_from_buffer(): + if pos is not None: + self._read_from_buffer(pos) return else: self._maybe_run_close_callback() def _set_read_callback(self, callback): - assert not self._read_callback, "Already reading" - self._read_callback = stack_context.wrap(callback) + assert self._read_callback is None, "Already reading" + assert self._read_future is None, "Already reading" + if callback is not None: + self._read_callback = stack_context.wrap(callback) + else: + self._read_future = TracebackFuture() + return self._read_future + + def _run_read_callback(self, size, streaming): + if streaming: + callback = self._streaming_callback + else: + callback = self._read_callback + self._read_callback = self._streaming_callback = None + if self._read_future is not None: + assert callback is None + future = self._read_future + self._read_future = None + future.set_result(self._consume(size)) + if callback is not None: + assert self._read_future is None + self._run_callback(callback, self._consume(size)) + else: + # If we scheduled a callback, we will add the error listener + # afterwards. If we didn't, we have to do it now. + self._maybe_add_error_listener() def _try_inline_read(self): """Attempt to complete the current read operation from buffered data. @@ -426,18 +633,14 @@ class BaseIOStream(object): listening for reads on the socket. """ # See if we've already got the data from a previous read - if self._read_from_buffer(): + self._run_streaming_callback() + pos = self._find_read_pos() + if pos is not None: + self._read_from_buffer(pos) return self._check_closed() try: - try: - # See comments in _handle_read about incrementing _pending_callbacks - self._pending_callbacks += 1 - while not self.closed(): - if self._read_to_buffer() == 0: - break - finally: - self._pending_callbacks -= 1 + pos = self._read_to_buffer_loop() except Exception: # If there was an in _read_to_buffer, we called close() already, # but couldn't run the close callback because of _pending_callbacks. @@ -445,9 +648,15 @@ class BaseIOStream(object): # applicable. self._maybe_run_close_callback() raise - if self._read_from_buffer(): + if pos is not None: + self._read_from_buffer(pos) return - self._maybe_add_error_listener() + # We couldn't satisfy the read inline, so either close the stream + # or listen for new data. + if self.closed(): + self._maybe_run_close_callback() + else: + self._add_io_state(ioloop.IOLoop.READ) def _read_to_buffer(self): """Reads from the socket and appends the result to the read buffer. @@ -472,32 +681,42 @@ class BaseIOStream(object): return 0 self._read_buffer.append(chunk) self._read_buffer_size += len(chunk) - if self._read_buffer_size >= self.max_buffer_size: + if self._read_buffer_size > self.max_buffer_size: gen_log.error("Reached maximum read buffer size") self.close() - raise IOError("Reached maximum read buffer size") + raise StreamBufferFullError("Reached maximum read buffer size") return len(chunk) - def _read_from_buffer(self): - """Attempts to complete the currently-pending read from the buffer. - - Returns True if the read was completed. - """ + def _run_streaming_callback(self): if self._streaming_callback is not None and self._read_buffer_size: bytes_to_consume = self._read_buffer_size if self._read_bytes is not None: bytes_to_consume = min(self._read_bytes, bytes_to_consume) self._read_bytes -= bytes_to_consume - self._run_callback(self._streaming_callback, - self._consume(bytes_to_consume)) - if self._read_bytes is not None and self._read_buffer_size >= self._read_bytes: - num_bytes = self._read_bytes - callback = self._read_callback - self._read_callback = None - self._streaming_callback = None - self._read_bytes = None - self._run_callback(callback, self._consume(num_bytes)) - return True + self._run_read_callback(bytes_to_consume, True) + + def _read_from_buffer(self, pos): + """Attempts to complete the currently-pending read from the buffer. + + The argument is either a position in the read buffer or None, + as returned by _find_read_pos. + """ + self._read_bytes = self._read_delimiter = self._read_regex = None + self._read_partial = False + self._run_read_callback(pos, False) + + def _find_read_pos(self): + """Attempts to find a position in the read buffer that satisfies + the currently-pending read. + + Returns a position in the buffer if the current read can be satisfied, + or None if it cannot. + """ + if (self._read_bytes is not None and + (self._read_buffer_size >= self._read_bytes or + (self._read_partial and self._read_buffer_size > 0))): + num_bytes = min(self._read_bytes, self._read_buffer_size) + return num_bytes elif self._read_delimiter is not None: # Multi-byte delimiters (e.g. '\r\n') may straddle two # chunks in the read buffer, so we can't easily find them @@ -506,37 +725,40 @@ class BaseIOStream(object): # length) tend to be "line" oriented, the delimiter is likely # to be in the first few chunks. Merge the buffer gradually # since large merges are relatively expensive and get undone in - # consume(). + # _consume(). if self._read_buffer: while True: loc = self._read_buffer[0].find(self._read_delimiter) if loc != -1: - callback = self._read_callback delimiter_len = len(self._read_delimiter) - self._read_callback = None - self._streaming_callback = None - self._read_delimiter = None - self._run_callback(callback, - self._consume(loc + delimiter_len)) - return True + self._check_max_bytes(self._read_delimiter, + loc + delimiter_len) + return loc + delimiter_len if len(self._read_buffer) == 1: break _double_prefix(self._read_buffer) + self._check_max_bytes(self._read_delimiter, + len(self._read_buffer[0])) elif self._read_regex is not None: if self._read_buffer: while True: m = self._read_regex.search(self._read_buffer[0]) if m is not None: - callback = self._read_callback - self._read_callback = None - self._streaming_callback = None - self._read_regex = None - self._run_callback(callback, self._consume(m.end())) - return True + self._check_max_bytes(self._read_regex, m.end()) + return m.end() if len(self._read_buffer) == 1: break _double_prefix(self._read_buffer) - return False + self._check_max_bytes(self._read_regex, + len(self._read_buffer[0])) + return None + + def _check_max_bytes(self, delimiter, size): + if (self._read_max_bytes is not None and + size > self._read_max_bytes): + raise UnsatisfiableReadError( + "delimiter %r not found within %d bytes" % ( + delimiter, self._read_max_bytes)) def _handle_write(self): while self._write_buffer: @@ -563,6 +785,7 @@ class BaseIOStream(object): self._write_buffer_frozen = False _merge_prefix(self._write_buffer, num_bytes) self._write_buffer.popleft() + self._write_buffer_size -= num_bytes except (socket.error, IOError, OSError) as e: if e.args[0] in _ERRNO_WOULDBLOCK: self._write_buffer_frozen = True @@ -572,14 +795,19 @@ class BaseIOStream(object): # Broken pipe errors are usually caused by connection # reset, and its better to not log EPIPE errors to # minimize log spam - gen_log.warning("Write error on %d: %s", + gen_log.warning("Write error on %s: %s", self.fileno(), e) self.close(exc_info=True) return - if not self._write_buffer and self._write_callback: - callback = self._write_callback - self._write_callback = None - self._run_callback(callback) + if not self._write_buffer: + if self._write_callback: + callback = self._write_callback + self._write_callback = None + self._run_callback(callback) + if self._write_future: + future = self._write_future + self._write_future = None + future.set_result(None) def _consume(self, loc): if loc == 0: @@ -593,10 +821,19 @@ class BaseIOStream(object): raise StreamClosedError("Stream is closed") def _maybe_add_error_listener(self): - if self._state is None and self._pending_callbacks == 0: + # This method is part of an optimization: to detect a connection that + # is closed when we're not actively reading or writing, we must listen + # for read events. However, it is inefficient to do this when the + # connection is first established because we are going to read or write + # immediately anyway. Instead, we insert checks at various times to + # see if the connection is idle and add the read listener then. + if self._pending_callbacks != 0: + return + if self._state is None or self._state == ioloop.IOLoop.ERROR: if self.closed(): self._maybe_run_close_callback() - else: + elif (self._read_buffer_size == 0 and + self._close_callback is not None): self._add_io_state(ioloop.IOLoop.READ) def _add_io_state(self, state): @@ -680,7 +917,7 @@ class IOStream(BaseIOStream): super(IOStream, self).__init__(*args, **kwargs) def fileno(self): - return self.socket.fileno() + return self.socket def close_fd(self): self.socket.close() @@ -714,7 +951,9 @@ class IOStream(BaseIOStream): not previously connected. The address parameter is in the same format as for `socket.connect `, i.e. a ``(host, port)`` tuple. If ``callback`` is specified, - it will be called when the connection is completed. + it will be called with no arguments when the connection is + completed; if not this method returns a `.Future` (whose result + after a successful connection will be the stream itself). If specified, the ``server_hostname`` parameter will be used in SSL connections for certificate validation (if requested in @@ -726,6 +965,9 @@ class IOStream(BaseIOStream): which case the data will be written as soon as the connection is ready. Calling `IOStream` read methods before the socket is connected works on some platforms but is non-portable. + + .. versionchanged:: 3.3 + If no callback is given, returns a `.Future`. """ self._connecting = True try: @@ -738,14 +980,83 @@ class IOStream(BaseIOStream): # returned immediately when attempting to connect to # localhost, so handle them the same way as an error # reported later in _handle_connect. - if (e.args[0] != errno.EINPROGRESS and - e.args[0] not in _ERRNO_WOULDBLOCK): - gen_log.warning("Connect error on fd %d: %s", + if (errno_from_exception(e) != errno.EINPROGRESS and + errno_from_exception(e) not in _ERRNO_WOULDBLOCK): + gen_log.warning("Connect error on fd %s: %s", self.socket.fileno(), e) self.close(exc_info=True) return - self._connect_callback = stack_context.wrap(callback) + if callback is not None: + self._connect_callback = stack_context.wrap(callback) + future = None + else: + future = self._connect_future = TracebackFuture() self._add_io_state(self.io_loop.WRITE) + return future + + def start_tls(self, server_side, ssl_options=None, server_hostname=None): + """Convert this `IOStream` to an `SSLIOStream`. + + This enables protocols that begin in clear-text mode and + switch to SSL after some initial negotiation (such as the + ``STARTTLS`` extension to SMTP and IMAP). + + This method cannot be used if there are outstanding reads + or writes on the stream, or if there is any data in the + IOStream's buffer (data in the operating system's socket + buffer is allowed). This means it must generally be used + immediately after reading or writing the last clear-text + data. It can also be used immediately after connecting, + before any reads or writes. + + The ``ssl_options`` argument may be either a dictionary + of options or an `ssl.SSLContext`. If a ``server_hostname`` + is given, it will be used for certificate verification + (as configured in the ``ssl_options``). + + This method returns a `.Future` whose result is the new + `SSLIOStream`. After this method has been called, + any other operation on the original stream is undefined. + + If a close callback is defined on this stream, it will be + transferred to the new stream. + + .. versionadded:: 3.3 + """ + if (self._read_callback or self._read_future or + self._write_callback or self._write_future or + self._connect_callback or self._connect_future or + self._pending_callbacks or self._closed or + self._read_buffer or self._write_buffer): + raise ValueError("IOStream is not idle; cannot convert to SSL") + if ssl_options is None: + ssl_options = {} + + socket = self.socket + self.io_loop.remove_handler(socket) + self.socket = None + socket = ssl_wrap_socket(socket, ssl_options, server_side=server_side, + do_handshake_on_connect=False) + orig_close_callback = self._close_callback + self._close_callback = None + + future = TracebackFuture() + ssl_stream = SSLIOStream(socket, ssl_options=ssl_options, + io_loop=self.io_loop) + # Wrap the original close callback so we can fail our Future as well. + # If we had an "unwrap" counterpart to this method we would need + # to restore the original callback after our Future resolves + # so that repeated wrap/unwrap calls don't build up layers. + def close_callback(): + if not future.done(): + future.set_exception(ssl_stream.error or StreamClosedError()) + if orig_close_callback is not None: + orig_close_callback() + ssl_stream.set_close_callback(close_callback) + ssl_stream._ssl_connect_callback = lambda: future.set_result(ssl_stream) + ssl_stream.max_buffer_size = self.max_buffer_size + ssl_stream.read_chunk_size = self.read_chunk_size + return future def _handle_connect(self): err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) @@ -755,14 +1066,19 @@ class IOStream(BaseIOStream): # an error state before the socket becomes writable, so # in that case a connection failure would be handled by the # error path in _handle_events instead of here. - gen_log.warning("Connect error on fd %d: %s", - self.socket.fileno(), errno.errorcode[err]) + if self._connect_future is None: + gen_log.warning("Connect error on fd %s: %s", + self.socket.fileno(), errno.errorcode[err]) self.close() return if self._connect_callback is not None: callback = self._connect_callback self._connect_callback = None self._run_callback(callback) + if self._connect_future is not None: + future = self._connect_future + self._connect_future = None + future.set_result(self) self._connecting = False def set_nodelay(self, value): @@ -841,7 +1157,7 @@ class SSLIOStream(IOStream): peer = self.socket.getpeername() except Exception: peer = '(not connected)' - gen_log.warning("SSL Error on %d %s: %s", + gen_log.warning("SSL Error on %s %s: %s", self.socket.fileno(), peer, err) return self.close(exc_info=True) raise @@ -907,19 +1223,33 @@ class SSLIOStream(IOStream): # has completed. self._ssl_connect_callback = stack_context.wrap(callback) self._server_hostname = server_hostname - super(SSLIOStream, self).connect(address, callback=None) + # Note: Since we don't pass our callback argument along to + # super.connect(), this will always return a Future. + # This is harmless, but a bit less efficient than it could be. + return super(SSLIOStream, self).connect(address, callback=None) def _handle_connect(self): + # Call the superclass method to check for errors. + super(SSLIOStream, self)._handle_connect() + if self.closed(): + return # When the connection is complete, wrap the socket for SSL # traffic. Note that we do this by overriding _handle_connect # instead of by passing a callback to super().connect because # user callbacks are enqueued asynchronously on the IOLoop, # but since _handle_events calls _handle_connect immediately # followed by _handle_write we need this to be synchronous. + # + # The IOLoop will get confused if we swap out self.socket while the + # fd is registered, so remove it now and re-register after + # wrap_socket(). + self.io_loop.remove_handler(self.socket) + old_state = self._state + self._state = None self.socket = ssl_wrap_socket(self.socket, self._ssl_options, server_hostname=self._server_hostname, do_handshake_on_connect=False) - super(SSLIOStream, self)._handle_connect() + self._add_io_state(old_state) def read_from_fd(self): if self._ssl_accepting: @@ -978,9 +1308,9 @@ class PipeIOStream(BaseIOStream): try: chunk = os.read(self.fd, self.read_chunk_size) except (IOError, OSError) as e: - if e.args[0] in _ERRNO_WOULDBLOCK: + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: return None - elif e.args[0] == errno.EBADF: + elif errno_from_exception(e) == errno.EBADF: # If the writing half of a pipe is closed, select will # report it as readable but reads will fail with EBADF. self.close(exc_info=True) diff --git a/libs/tornado/log.py b/libs/tornado/log.py index 36c3dd40..70664664 100755 --- a/libs/tornado/log.py +++ b/libs/tornado/log.py @@ -83,10 +83,10 @@ class LogFormatter(logging.Formatter): DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s' DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S' DEFAULT_COLORS = { - logging.DEBUG: 4, # Blue - logging.INFO: 2, # Green - logging.WARNING: 3, # Yellow - logging.ERROR: 1, # Red + logging.DEBUG: 4, # Blue + logging.INFO: 2, # Green + logging.WARNING: 3, # Yellow + logging.ERROR: 1, # Red } def __init__(self, color=True, fmt=DEFAULT_FORMAT, @@ -184,7 +184,7 @@ def enable_pretty_logging(options=None, logger=None): """ if options is None: from tornado.options import options - if options.logging == 'none': + if options.logging is None or options.logging.lower() == 'none': return if logger is None: logger = logging.getLogger() diff --git a/libs/tornado/netutil.py b/libs/tornado/netutil.py index 8ebe604d..a9e05d1e 100755 --- a/libs/tornado/netutil.py +++ b/libs/tornado/netutil.py @@ -20,18 +20,26 @@ from __future__ import absolute_import, division, print_function, with_statement import errno import os +import platform import socket -import ssl import stat from tornado.concurrent import dummy_executor, run_on_executor from tornado.ioloop import IOLoop from tornado.platform.auto import set_close_exec -from tornado.util import u, Configurable +from tornado.util import u, Configurable, errno_from_exception + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine + ssl = None if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ ssl_match_hostname = ssl.match_hostname SSLCertificateError = ssl.CertificateError +elif ssl is None: + ssl_match_hostname = SSLCertificateError = None else: import backports.ssl_match_hostname ssl_match_hostname = backports.ssl_match_hostname.match_hostname @@ -44,6 +52,11 @@ else: # thread now. u('foo').encode('idna') +# These errnos indicate that a non-blocking operation must be retried +# at a later time. On most platforms they're the same value, but on +# some they differ. +_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) + def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None): """Creates listening sockets bound to the given port and address. @@ -77,13 +90,23 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags family = socket.AF_INET if flags is None: flags = socket.AI_PASSIVE + bound_port = None for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM, 0, flags)): af, socktype, proto, canonname, sockaddr = res + if (platform.system() == 'Darwin' and address == 'localhost' and + af == socket.AF_INET6 and sockaddr[3] != 0): + # Mac OS X includes a link-local address fe80::1%lo0 in the + # getaddrinfo results for 'localhost'. However, the firewall + # doesn't understand that this is a local address and will + # prompt for access (often repeatedly, due to an apparent + # bug in its ability to remember granting access to an + # application). Skip these addresses. + continue try: sock = socket.socket(af, socktype, proto) except socket.error as e: - if e.args[0] == errno.EAFNOSUPPORT: + if errno_from_exception(e) == errno.EAFNOSUPPORT: continue raise set_close_exec(sock.fileno()) @@ -100,8 +123,16 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags # Python 2.x on windows doesn't have IPPROTO_IPV6. if hasattr(socket, "IPPROTO_IPV6"): sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # automatic port allocation with port=None + # should bind on the same port on IPv4 and IPv6 + host, requested_port = sockaddr[:2] + if requested_port == 0 and bound_port is not None: + sockaddr = tuple([host, bound_port] + list(sockaddr[2:])) + sock.setblocking(0) sock.bind(sockaddr) + bound_port = sock.getsockname()[1] sock.listen(backlog) sockets.append(sock) return sockets @@ -124,7 +155,7 @@ if hasattr(socket, 'AF_UNIX'): try: st = os.stat(file) except OSError as err: - if err.errno != errno.ENOENT: + if errno_from_exception(err) != errno.ENOENT: raise else: if stat.S_ISSOCK(st.st_mode): @@ -154,18 +185,18 @@ def add_accept_handler(sock, callback, io_loop=None): try: connection, address = sock.accept() except socket.error as e: - # EWOULDBLOCK and EAGAIN indicate we have accepted every + # _ERRNO_WOULDBLOCK indicate we have accepted every # connection that is available. - if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN): + if errno_from_exception(e) in _ERRNO_WOULDBLOCK: return # ECONNABORTED indicates that there was a connection # but it was closed while still in the accept queue. # (observed on FreeBSD). - if e.args[0] == errno.ECONNABORTED: + if errno_from_exception(e) == errno.ECONNABORTED: continue raise callback(connection, address) - io_loop.add_handler(sock.fileno(), accept_handler, IOLoop.READ) + io_loop.add_handler(sock, accept_handler, IOLoop.READ) def is_valid_ip(ip): @@ -381,6 +412,10 @@ def ssl_options_to_context(ssl_options): context.load_verify_locations(ssl_options['ca_certs']) if 'ciphers' in ssl_options: context.set_ciphers(ssl_options['ciphers']) + if hasattr(ssl, 'OP_NO_COMPRESSION'): + # Disable TLS compression to avoid CRIME and related attacks. + # This constant wasn't added until python 3.3. + context.options |= ssl.OP_NO_COMPRESSION return context diff --git a/libs/tornado/options.py b/libs/tornado/options.py index 1105c0e9..fa9c269e 100755 --- a/libs/tornado/options.py +++ b/libs/tornado/options.py @@ -56,6 +56,18 @@ We support `datetimes `, `timedeltas the top-level functions in this module (`define`, `parse_command_line`, etc) simply call methods on it. You may create additional `OptionParser` instances to define isolated sets of options, such as for subcommands. + +.. note:: + + By default, several options are defined that will configure the + standard `logging` module when `parse_command_line` or `parse_config_file` + are called. If you want Tornado to leave the logging configuration + alone so you can manage it yourself, either pass ``--logging=none`` + on the command line or do the following to disable it in code:: + + from tornado.options import options, parse_command_line + options.logging = None + parse_command_line() """ from __future__ import absolute_import, division, print_function, with_statement @@ -360,6 +372,8 @@ class _Mockable(object): class _Option(object): + UNSET = object() + def __init__(self, name, default=None, type=basestring_type, help=None, metavar=None, multiple=False, file_name=None, group_name=None, callback=None): @@ -374,10 +388,10 @@ class _Option(object): self.group_name = group_name self.callback = callback self.default = default - self._value = None + self._value = _Option.UNSET def value(self): - return self.default if self._value is None else self._value + return self.default if self._value is _Option.UNSET else self._value def parse(self, value): _parse = { diff --git a/libs/tornado/platform/asyncio.py b/libs/tornado/platform/asyncio.py index 162b3673..552476bc 100644 --- a/libs/tornado/platform/asyncio.py +++ b/libs/tornado/platform/asyncio.py @@ -10,21 +10,31 @@ unfinished callbacks on the event loop that fail when it resumes) """ from __future__ import absolute_import, division, print_function, with_statement -import asyncio import datetime import functools -import os -from tornado.ioloop import IOLoop +# _Timeout is used for its timedelta_to_seconds method for py26 compatibility. +from tornado.ioloop import IOLoop, _Timeout from tornado import stack_context +try: + # Import the real asyncio module for py33+ first. Older versions of the + # trollius backport also use this name. + import asyncio +except ImportError as e: + # Asyncio itself isn't available; see if trollius is (backport to py26+). + try: + import trollius as asyncio + except ImportError: + # Re-raise the original asyncio error, not the trollius one. + raise e class BaseAsyncIOLoop(IOLoop): def initialize(self, asyncio_loop, close_loop=False): self.asyncio_loop = asyncio_loop self.close_loop = close_loop self.asyncio_loop.call_soon(self.make_current) - # Maps fd to handler function (as in IOLoop.add_handler) + # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) self.handlers = {} # Set of fds listening for reads/writes self.readers = set() @@ -34,19 +44,18 @@ class BaseAsyncIOLoop(IOLoop): def close(self, all_fds=False): self.closing = True for fd in list(self.handlers): + fileobj, handler_func = self.handlers[fd] self.remove_handler(fd) if all_fds: - try: - os.close(fd) - except OSError: - pass + self.close_fd(fileobj) if self.close_loop: self.asyncio_loop.close() def add_handler(self, fd, handler, events): + fd, fileobj = self.split_fd(fd) if fd in self.handlers: - raise ValueError("fd %d added twice" % fd) - self.handlers[fd] = stack_context.wrap(handler) + raise ValueError("fd %s added twice" % fd) + self.handlers[fd] = (fileobj, stack_context.wrap(handler)) if events & IOLoop.READ: self.asyncio_loop.add_reader( fd, self._handle_events, fd, IOLoop.READ) @@ -57,6 +66,7 @@ class BaseAsyncIOLoop(IOLoop): self.writers.add(fd) def update_handler(self, fd, events): + fd, fileobj = self.split_fd(fd) if events & IOLoop.READ: if fd not in self.readers: self.asyncio_loop.add_reader( @@ -77,6 +87,7 @@ class BaseAsyncIOLoop(IOLoop): self.writers.remove(fd) def remove_handler(self, fd): + fd, fileobj = self.split_fd(fd) if fd not in self.handlers: return if fd in self.readers: @@ -88,7 +99,8 @@ class BaseAsyncIOLoop(IOLoop): del self.handlers[fd] def _handle_events(self, fd, events): - self.handlers[fd](fd, events) + fileobj, handler_func = self.handlers[fd] + handler_func(fileobj, events) def start(self): self._setup_logging() @@ -107,7 +119,7 @@ class BaseAsyncIOLoop(IOLoop): if isinstance(deadline, (int, float)): delay = max(deadline - self.time(), 0) elif isinstance(deadline, datetime.timedelta): - delay = deadline.total_seconds() + delay = _Timeout.timedelta_to_seconds(deadline) else: raise TypeError("Unsupported deadline %r", deadline) return self.asyncio_loop.call_later(delay, self._run_callback, diff --git a/libs/tornado/platform/auto.py b/libs/tornado/platform/auto.py index e55725b3..ddfe06b4 100755 --- a/libs/tornado/platform/auto.py +++ b/libs/tornado/platform/auto.py @@ -30,6 +30,10 @@ import os if os.name == 'nt': from tornado.platform.common import Waker from tornado.platform.windows import set_close_exec +elif 'APPENGINE_RUNTIME' in os.environ: + from tornado.platform.common import Waker + def set_close_exec(fd): + pass else: from tornado.platform.posix import set_close_exec, Waker diff --git a/libs/tornado/platform/common.py b/libs/tornado/platform/common.py index d9c4cf9f..b409a903 100755 --- a/libs/tornado/platform/common.py +++ b/libs/tornado/platform/common.py @@ -15,7 +15,8 @@ class Waker(interface.Waker): and Jython. """ def __init__(self): - # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py + # Based on Zope select_trigger.py: + # https://github.com/zopefoundation/Zope/blob/master/src/ZServer/medusa/thread/select_trigger.py self.writer = socket.socket() # Disable buffering -- pulling the trigger sends 1 byte, diff --git a/libs/tornado/platform/kqueue.py b/libs/tornado/platform/kqueue.py index ceff0a43..de8c046d 100755 --- a/libs/tornado/platform/kqueue.py +++ b/libs/tornado/platform/kqueue.py @@ -37,7 +37,7 @@ class _KQueue(object): def register(self, fd, events): if fd in self._active: - raise IOError("fd %d already registered" % fd) + raise IOError("fd %s already registered" % fd) self._control(fd, events, select.KQ_EV_ADD) self._active[fd] = events diff --git a/libs/tornado/platform/select.py b/libs/tornado/platform/select.py index 8bbb1f4f..9a879562 100755 --- a/libs/tornado/platform/select.py +++ b/libs/tornado/platform/select.py @@ -37,7 +37,7 @@ class _Select(object): def register(self, fd, events): if fd in self.read_fds or fd in self.write_fds or fd in self.error_fds: - raise IOError("fd %d already registered" % fd) + raise IOError("fd %s already registered" % fd) if events & IOLoop.READ: self.read_fds.add(fd) if events & IOLoop.WRITE: diff --git a/libs/tornado/platform/twisted.py b/libs/tornado/platform/twisted.py index 0c8a3105..889aa3c4 100755 --- a/libs/tornado/platform/twisted.py +++ b/libs/tornado/platform/twisted.py @@ -91,6 +91,11 @@ from tornado.netutil import Resolver from tornado.stack_context import NullContext, wrap from tornado.ioloop import IOLoop +try: + long # py2 +except NameError: + long = int # py3 + @implementer(IDelayedCall) class TornadoDelayedCall(object): @@ -365,8 +370,9 @@ def install(io_loop=None): @implementer(IReadDescriptor, IWriteDescriptor) class _FD(object): - def __init__(self, fd, handler): + def __init__(self, fd, fileobj, handler): self.fd = fd + self.fileobj = fileobj self.handler = handler self.reading = False self.writing = False @@ -377,15 +383,15 @@ class _FD(object): def doRead(self): if not self.lost: - self.handler(self.fd, tornado.ioloop.IOLoop.READ) + self.handler(self.fileobj, tornado.ioloop.IOLoop.READ) def doWrite(self): if not self.lost: - self.handler(self.fd, tornado.ioloop.IOLoop.WRITE) + self.handler(self.fileobj, tornado.ioloop.IOLoop.WRITE) def connectionLost(self, reason): if not self.lost: - self.handler(self.fd, tornado.ioloop.IOLoop.ERROR) + self.handler(self.fileobj, tornado.ioloop.IOLoop.ERROR) self.lost = True def logPrefix(self): @@ -412,14 +418,19 @@ class TwistedIOLoop(tornado.ioloop.IOLoop): self.reactor.callWhenRunning(self.make_current) def close(self, all_fds=False): + fds = self.fds self.reactor.removeAll() for c in self.reactor.getDelayedCalls(): c.cancel() + if all_fds: + for fd in fds.values(): + self.close_fd(fd.fileobj) def add_handler(self, fd, handler, events): if fd in self.fds: - raise ValueError('fd %d added twice' % fd) - self.fds[fd] = _FD(fd, wrap(handler)) + raise ValueError('fd %s added twice' % fd) + fd, fileobj = self.split_fd(fd) + self.fds[fd] = _FD(fd, fileobj, wrap(handler)) if events & tornado.ioloop.IOLoop.READ: self.fds[fd].reading = True self.reactor.addReader(self.fds[fd]) @@ -428,6 +439,7 @@ class TwistedIOLoop(tornado.ioloop.IOLoop): self.reactor.addWriter(self.fds[fd]) def update_handler(self, fd, events): + fd, fileobj = self.split_fd(fd) if events & tornado.ioloop.IOLoop.READ: if not self.fds[fd].reading: self.fds[fd].reading = True @@ -446,6 +458,7 @@ class TwistedIOLoop(tornado.ioloop.IOLoop): self.reactor.removeWriter(self.fds[fd]) def remove_handler(self, fd): + fd, fileobj = self.split_fd(fd) if fd not in self.fds: return self.fds[fd].lost = True diff --git a/libs/tornado/process.py b/libs/tornado/process.py index 942c5c3f..0f38b856 100755 --- a/libs/tornado/process.py +++ b/libs/tornado/process.py @@ -21,7 +21,6 @@ the server into multiple processes and managing subprocesses. from __future__ import absolute_import, division, print_function, with_statement import errno -import multiprocessing import os import signal import subprocess @@ -35,6 +34,13 @@ from tornado.iostream import PipeIOStream from tornado.log import gen_log from tornado.platform.auto import set_close_exec from tornado import stack_context +from tornado.util import errno_from_exception + +try: + import multiprocessing +except ImportError: + # Multiprocessing is not availble on Google App Engine. + multiprocessing = None try: long # py2 @@ -44,6 +50,8 @@ except NameError: def cpu_count(): """Returns the number of processors on this machine.""" + if multiprocessing is None: + return 1 try: return multiprocessing.cpu_count() except NotImplementedError: @@ -136,7 +144,7 @@ def fork_processes(num_processes, max_restarts=100): try: pid, status = os.wait() except OSError as e: - if e.errno == errno.EINTR: + if errno_from_exception(e) == errno.EINTR: continue raise if pid not in children: @@ -283,7 +291,7 @@ class Subprocess(object): try: ret_pid, status = os.waitpid(pid, os.WNOHANG) except OSError as e: - if e.args[0] == errno.ECHILD: + if errno_from_exception(e) == errno.ECHILD: return if ret_pid == 0: return diff --git a/libs/tornado/simple_httpclient.py b/libs/tornado/simple_httpclient.py index 73bfee89..06d7ecfa 100755 --- a/libs/tornado/simple_httpclient.py +++ b/libs/tornado/simple_httpclient.py @@ -1,23 +1,23 @@ #!/usr/bin/env python from __future__ import absolute_import, division, print_function, with_statement -from tornado.escape import utf8, _unicode, native_str +from tornado.concurrent import is_future +from tornado.escape import utf8, _unicode from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy -from tornado.httputil import HTTPHeaders -from tornado.iostream import IOStream, SSLIOStream +from tornado import httputil +from tornado.http1connection import HTTP1Connection, HTTP1ConnectionParameters +from tornado.iostream import StreamClosedError from tornado.netutil import Resolver, OverrideResolver from tornado.log import gen_log from tornado import stack_context -from tornado.util import GzipDecompressor +from tornado.tcpclient import TCPClient import base64 import collections import copy import functools -import os.path import re import socket -import ssl import sys try: @@ -30,7 +30,23 @@ try: except ImportError: import urllib.parse as urlparse # py3 -_DEFAULT_CA_CERTS = os.path.dirname(__file__) + '/ca-certificates.crt' +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine. + ssl = None + +try: + import certifi +except ImportError: + certifi = None + + +def _default_ca_certs(): + if certifi is None: + raise Exception("The 'certifi' package is required to use https " + "in simple_httpclient") + return certifi.where() class SimpleAsyncHTTPClient(AsyncHTTPClient): @@ -47,7 +63,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): """ def initialize(self, io_loop, max_clients=10, hostname_mapping=None, max_buffer_size=104857600, - resolver=None, defaults=None): + resolver=None, defaults=None, max_header_size=None): """Creates a AsyncHTTPClient. Only a single AsyncHTTPClient instance exists per IOLoop @@ -74,6 +90,9 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): self.active = {} self.waiting = {} self.max_buffer_size = max_buffer_size + self.max_header_size = max_header_size + # TCPClient could create a Resolver for us, but we have to do it + # ourselves to support hostname_mapping. if resolver: self.resolver = resolver self.own_resolver = False @@ -83,11 +102,13 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): if hostname_mapping is not None: self.resolver = OverrideResolver(resolver=self.resolver, mapping=hostname_mapping) + self.tcp_client = TCPClient(resolver=self.resolver, io_loop=io_loop) def close(self): super(SimpleAsyncHTTPClient, self).close() if self.own_resolver: self.resolver.close() + self.tcp_client.close() def fetch_impl(self, request, callback): key = object() @@ -119,7 +140,8 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): def _handle_request(self, request, release_callback, final_callback): _HTTPConnection(self.io_loop, self, request, release_callback, - final_callback, self.max_buffer_size, self.resolver) + final_callback, self.max_buffer_size, self.tcp_client, + self.max_header_size) def _release_fetch(self, key): del self.active[key] @@ -142,11 +164,12 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): del self.waiting[key] -class _HTTPConnection(object): +class _HTTPConnection(httputil.HTTPMessageDelegate): _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]) def __init__(self, io_loop, client, request, release_callback, - final_callback, max_buffer_size, resolver): + final_callback, max_buffer_size, tcp_client, + max_header_size): self.start_time = io_loop.time() self.io_loop = io_loop self.client = client @@ -154,13 +177,15 @@ class _HTTPConnection(object): self.release_callback = release_callback self.final_callback = final_callback self.max_buffer_size = max_buffer_size - self.resolver = resolver + self.tcp_client = tcp_client + self.max_header_size = max_header_size self.code = None self.headers = None - self.chunks = None + self.chunks = [] self._decompressor = None # Timeout handle returned by IOLoop.add_timeout self._timeout = None + self._sockaddr = None with stack_context.ExceptionStackContext(self._handle_exception): self.parsed = urlparse.urlsplit(_unicode(self.request.url)) if self.parsed.scheme not in ("http", "https"): @@ -183,42 +208,31 @@ class _HTTPConnection(object): host = host[1:-1] self.parsed_hostname = host # save final host for _on_connect - if request.allow_ipv6: - af = socket.AF_UNSPEC - else: - # We only try the first IP we get from getaddrinfo, - # so restrict to ipv4 by default. + if request.allow_ipv6 is False: af = socket.AF_INET + else: + af = socket.AF_UNSPEC + + ssl_options = self._get_ssl_options(self.parsed.scheme) timeout = min(self.request.connect_timeout, self.request.request_timeout) if timeout: self._timeout = self.io_loop.add_timeout( self.start_time + timeout, stack_context.wrap(self._on_timeout)) - self.resolver.resolve(host, port, af, callback=self._on_resolve) + self.tcp_client.connect(host, port, af=af, + ssl_options=ssl_options, + callback=self._on_connect) - def _on_resolve(self, addrinfo): - if self.final_callback is None: - # final_callback is cleared if we've hit our timeout - return - self.stream = self._create_stream(addrinfo) - self.stream.set_close_callback(self._on_close) - # ipv6 addresses are broken (in self.parsed.hostname) until - # 2.7, here is correctly parsed value calculated in __init__ - sockaddr = addrinfo[0][1] - self.stream.connect(sockaddr, self._on_connect, - server_hostname=self.parsed_hostname) - - def _create_stream(self, addrinfo): - af = addrinfo[0][0] - if self.parsed.scheme == "https": + def _get_ssl_options(self, scheme): + if scheme == "https": ssl_options = {} if self.request.validate_cert: ssl_options["cert_reqs"] = ssl.CERT_REQUIRED if self.request.ca_certs is not None: ssl_options["ca_certs"] = self.request.ca_certs else: - ssl_options["ca_certs"] = _DEFAULT_CA_CERTS + ssl_options["ca_certs"] = _default_ca_certs() if self.request.client_key is not None: ssl_options["keyfile"] = self.request.client_key if self.request.client_cert is not None: @@ -236,21 +250,16 @@ class _HTTPConnection(object): # but nearly all servers support both SSLv3 and TLSv1: # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html if sys.version_info >= (2, 7): - ssl_options["ciphers"] = "DEFAULT:!SSLv2" + # In addition to disabling SSLv2, we also exclude certain + # classes of insecure ciphers. + ssl_options["ciphers"] = "DEFAULT:!SSLv2:!EXPORT:!DES" else: # This is really only necessary for pre-1.0 versions # of openssl, but python 2.6 doesn't expose version # information. ssl_options["ssl_version"] = ssl.PROTOCOL_TLSv1 - - return SSLIOStream(socket.socket(af), - io_loop=self.io_loop, - ssl_options=ssl_options, - max_buffer_size=self.max_buffer_size) - else: - return IOStream(socket.socket(af), - io_loop=self.io_loop, - max_buffer_size=self.max_buffer_size) + return ssl_options + return None def _on_timeout(self): self._timeout = None @@ -262,7 +271,13 @@ class _HTTPConnection(object): self.io_loop.remove_timeout(self._timeout) self._timeout = None - def _on_connect(self): + def _on_connect(self, stream): + if self.final_callback is None: + # final_callback is cleared if we've hit our timeout. + stream.close() + return + self.stream = stream + self.stream.set_close_callback(self._on_close) self._remove_timeout() if self.final_callback is None: return @@ -302,16 +317,22 @@ class _HTTPConnection(object): self.request.headers["User-Agent"] = self.request.user_agent if not self.request.allow_nonstandard_methods: if self.request.method in ("POST", "PATCH", "PUT"): - if self.request.body is None: + if (self.request.body is None and + self.request.body_producer is None): raise AssertionError( 'Body must not be empty for "%s" request' % self.request.method) else: - if self.request.body is not None: + if (self.request.body is not None or + self.request.body_producer is not None): raise AssertionError( 'Body must be empty for "%s" request' % self.request.method) + if self.request.expect_100_continue: + self.request.headers["Expect"] = "100-continue" if self.request.body is not None: + # When body_producer is used the caller is responsible for + # setting Content-Length (or else chunked encoding will be used). self.request.headers["Content-Length"] = str(len( self.request.body)) if (self.request.method == "POST" and @@ -320,20 +341,47 @@ class _HTTPConnection(object): if self.request.use_gzip: self.request.headers["Accept-Encoding"] = "gzip" req_path = ((self.parsed.path or '/') + - (('?' + self.parsed.query) if self.parsed.query else '')) - request_lines = [utf8("%s %s HTTP/1.1" % (self.request.method, - req_path))] - for k, v in self.request.headers.get_all(): - line = utf8(k) + b": " + utf8(v) - if b'\n' in line: - raise ValueError('Newline in header: ' + repr(line)) - request_lines.append(line) - request_str = b"\r\n".join(request_lines) + b"\r\n\r\n" - if self.request.body is not None: - request_str += self.request.body + (('?' + self.parsed.query) if self.parsed.query else '')) self.stream.set_nodelay(True) - self.stream.write(request_str) - self.stream.read_until_regex(b"\r?\n\r?\n", self._on_headers) + self.connection = HTTP1Connection( + self.stream, True, + HTTP1ConnectionParameters( + no_keep_alive=True, + max_header_size=self.max_header_size, + use_gzip=self.request.use_gzip), + self._sockaddr) + start_line = httputil.RequestStartLine(self.request.method, + req_path, 'HTTP/1.1') + self.connection.write_headers(start_line, self.request.headers) + if self.request.expect_100_continue: + self._read_response() + else: + self._write_body(True) + + def _write_body(self, start_read): + if self.request.body is not None: + self.connection.write(self.request.body) + self.connection.finish() + elif self.request.body_producer is not None: + fut = self.request.body_producer(self.connection.write) + if is_future(fut): + def on_body_written(fut): + fut.result() + self.connection.finish() + if start_read: + self._read_response() + self.io_loop.add_future(fut, on_body_written) + return + self.connection.finish() + if start_read: + self._read_response() + + def _read_response(self): + # Ensure that any exception raised in read_response ends up in our + # stack context. + self.io_loop.add_future( + self.connection.read_response(self), + lambda f: f.result()) def _release(self): if self.release_callback is not None: @@ -351,43 +399,39 @@ class _HTTPConnection(object): def _handle_exception(self, typ, value, tb): if self.final_callback: self._remove_timeout() + if isinstance(value, StreamClosedError): + value = HTTPError(599, "Stream closed") self._run_callback(HTTPResponse(self.request, 599, error=value, request_time=self.io_loop.time() - self.start_time, )) if hasattr(self, "stream"): + # TODO: this may cause a StreamClosedError to be raised + # by the connection's Future. Should we cancel the + # connection more gracefully? self.stream.close() return True else: # If our callback has already been called, we are probably # catching an exception that is not caused by us but rather # some child of our callback. Rather than drop it on the floor, - # pass it along. - return False + # pass it along, unless it's just the stream being closed. + return isinstance(value, StreamClosedError) def _on_close(self): if self.final_callback is not None: message = "Connection closed" if self.stream.error: - message = str(self.stream.error) + raise self.stream.error raise HTTPError(599, message) - def _handle_1xx(self, code): - self.stream.read_until_regex(b"\r?\n\r?\n", self._on_headers) - - def _on_headers(self, data): - data = native_str(data.decode("latin1")) - first_line, _, header_data = data.partition("\n") - match = re.match("HTTP/1.[01] ([0-9]+) ([^\r]*)", first_line) - assert match - code = int(match.group(1)) - self.headers = HTTPHeaders.parse(header_data) - if 100 <= code < 200: - self._handle_1xx(code) + def headers_received(self, first_line, headers): + if self.request.expect_100_continue and first_line.code == 100: + self._write_body(False) return - else: - self.code = code - self.reason = match.group(2) + self.headers = headers + self.code = first_line.code + self.reason = first_line.reason if "Content-Length" in self.headers: if "," in self.headers["Content-Length"]: @@ -404,17 +448,12 @@ class _HTTPConnection(object): content_length = None if self.request.header_callback is not None: - # re-attach the newline we split on earlier - self.request.header_callback(first_line + _) + # Reassemble the start line. + self.request.header_callback('%s %s %s\r\n' % first_line) for k, v in self.headers.get_all(): self.request.header_callback("%s: %s\r\n" % (k, v)) self.request.header_callback('\r\n') - if self.request.method == "HEAD" or self.code == 304: - # HEAD requests and 304 responses never have content, even - # though they may have content-length headers - self._on_body(b"") - return if 100 <= self.code < 200 or self.code == 204: # These response codes never have bodies # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 @@ -422,21 +461,9 @@ class _HTTPConnection(object): content_length not in (None, 0)): raise ValueError("Response with code %d should not have body" % self.code) - self._on_body(b"") - return - if (self.request.use_gzip and - self.headers.get("Content-Encoding") == "gzip"): - self._decompressor = GzipDecompressor() - if self.headers.get("Transfer-Encoding") == "chunked": - self.chunks = [] - self.stream.read_until(b"\r\n", self._on_chunk_length) - elif content_length is not None: - self.stream.read_bytes(content_length, self._on_body) - else: - self.stream.read_until_close(self._on_body) - - def _on_body(self, data): + def finish(self): + data = b''.join(self.chunks) self._remove_timeout() original_request = getattr(self.request, "original_request", self.request) @@ -472,19 +499,12 @@ class _HTTPConnection(object): self.client.fetch(new_request, final_callback) self._on_end_request() return - if self._decompressor: - data = (self._decompressor.decompress(data) + - self._decompressor.flush()) if self.request.streaming_callback: - if self.chunks is None: - # if chunks is not None, we already called streaming_callback - # in _on_chunk_data - self.request.streaming_callback(data) buffer = BytesIO() else: buffer = BytesIO(data) # TODO: don't require one big string? response = HTTPResponse(original_request, - self.code, reason=self.reason, + self.code, reason=getattr(self, 'reason', None), headers=self.headers, request_time=self.io_loop.time() - self.start_time, buffer=buffer, @@ -495,40 +515,11 @@ class _HTTPConnection(object): def _on_end_request(self): self.stream.close() - def _on_chunk_length(self, data): - # TODO: "chunk extensions" http://tools.ietf.org/html/rfc2616#section-3.6.1 - length = int(data.strip(), 16) - if length == 0: - if self._decompressor is not None: - tail = self._decompressor.flush() - if tail: - # I believe the tail will always be empty (i.e. - # decompress will return all it can). The purpose - # of the flush call is to detect errors such - # as truncated input. But in case it ever returns - # anything, treat it as an extra chunk - if self.request.streaming_callback is not None: - self.request.streaming_callback(tail) - else: - self.chunks.append(tail) - # all the data has been decompressed, so we don't need to - # decompress again in _on_body - self._decompressor = None - self._on_body(b''.join(self.chunks)) - else: - self.stream.read_bytes(length + 2, # chunk ends with \r\n - self._on_chunk_data) - - def _on_chunk_data(self, data): - assert data[-2:] == b"\r\n" - chunk = data[:-2] - if self._decompressor: - chunk = self._decompressor.decompress(chunk) + def data_received(self, chunk): if self.request.streaming_callback is not None: self.request.streaming_callback(chunk) else: self.chunks.append(chunk) - self.stream.read_until(b"\r\n", self._on_chunk_length) if __name__ == "__main__": diff --git a/libs/tornado/speedups.c b/libs/tornado/speedups.c index 8a316c58..174a6129 100644 --- a/libs/tornado/speedups.c +++ b/libs/tornado/speedups.c @@ -1,21 +1,24 @@ +#define PY_SSIZE_T_CLEAN #include static PyObject* websocket_mask(PyObject* self, PyObject* args) { const char* mask; - int mask_len; + Py_ssize_t mask_len; const char* data; - int data_len; - int i; + Py_ssize_t data_len; + Py_ssize_t i; + PyObject* result; + char* buf; if (!PyArg_ParseTuple(args, "s#s#", &mask, &mask_len, &data, &data_len)) { return NULL; } - PyObject* result = PyBytes_FromStringAndSize(NULL, data_len); + result = PyBytes_FromStringAndSize(NULL, data_len); if (!result) { return NULL; } - char* buf = PyBytes_AsString(result); + buf = PyBytes_AsString(result); for (i = 0; i < data_len; i++) { buf[i] = data[i] ^ mask[i % 4]; } diff --git a/libs/tornado/stack_context.py b/libs/tornado/stack_context.py index b1e82b0e..2e845ab2 100755 --- a/libs/tornado/stack_context.py +++ b/libs/tornado/stack_context.py @@ -266,6 +266,18 @@ def wrap(fn): # TODO: Any other better way to store contexts and update them in wrapped function? cap_contexts = [_state.contexts] + if not cap_contexts[0][0] and not cap_contexts[0][1]: + # Fast path when there are no active contexts. + def null_wrapper(*args, **kwargs): + try: + current_state = _state.contexts + _state.contexts = cap_contexts[0] + return fn(*args, **kwargs) + finally: + _state.contexts = current_state + null_wrapper._wrapped = True + return null_wrapper + def wrapped(*args, **kwargs): ret = None try: diff --git a/libs/tornado/tcpclient.py b/libs/tornado/tcpclient.py new file mode 100644 index 00000000..d49eb5cd --- /dev/null +++ b/libs/tornado/tcpclient.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python +# +# Copyright 2014 Facebook +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""A non-blocking TCP connection factory. +""" +from __future__ import absolute_import, division, print_function, with_statement + +import functools +import socket + +from tornado.concurrent import Future +from tornado.ioloop import IOLoop +from tornado.iostream import IOStream +from tornado import gen +from tornado.netutil import Resolver + +_INITIAL_CONNECT_TIMEOUT = 0.3 + + +class _Connector(object): + """A stateless implementation of the "Happy Eyeballs" algorithm. + + "Happy Eyeballs" is documented in RFC6555 as the recommended practice + for when both IPv4 and IPv6 addresses are available. + + In this implementation, we partition the addresses by family, and + make the first connection attempt to whichever address was + returned first by ``getaddrinfo``. If that connection fails or + times out, we begin a connection in parallel to the first address + of the other family. If there are additional failures we retry + with other addresses, keeping one connection attempt per family + in flight at a time. + + http://tools.ietf.org/html/rfc6555 + + """ + def __init__(self, addrinfo, io_loop, connect): + self.io_loop = io_loop + self.connect = connect + + self.future = Future() + self.timeout = None + self.last_error = None + self.remaining = len(addrinfo) + self.primary_addrs, self.secondary_addrs = self.split(addrinfo) + + @staticmethod + def split(addrinfo): + """Partition the ``addrinfo`` list by address family. + + Returns two lists. The first list contains the first entry from + ``addrinfo`` and all others with the same family, and the + second list contains all other addresses (normally one list will + be AF_INET and the other AF_INET6, although non-standard resolvers + may return additional families). + """ + primary = [] + secondary = [] + primary_af = addrinfo[0][0] + for af, addr in addrinfo: + if af == primary_af: + primary.append((af, addr)) + else: + secondary.append((af, addr)) + return primary, secondary + + def start(self, timeout=_INITIAL_CONNECT_TIMEOUT): + self.try_connect(iter(self.primary_addrs)) + self.set_timout(timeout) + return self.future + + def try_connect(self, addrs): + try: + af, addr = next(addrs) + except StopIteration: + # We've reached the end of our queue, but the other queue + # might still be working. Send a final error on the future + # only when both queues are finished. + if self.remaining == 0 and not self.future.done(): + self.future.set_exception(self.last_error or + IOError("connection failed")) + return + future = self.connect(af, addr) + future.add_done_callback(functools.partial(self.on_connect_done, + addrs, af, addr)) + + def on_connect_done(self, addrs, af, addr, future): + self.remaining -= 1 + try: + stream = future.result() + except Exception as e: + if self.future.done(): + return + # Error: try again (but remember what happened so we have an + # error to raise in the end) + self.last_error = e + self.try_connect(addrs) + if self.timeout is not None: + # If the first attempt failed, don't wait for the + # timeout to try an address from the secondary queue. + self.on_timeout() + return + self.clear_timeout() + if self.future.done(): + # This is a late arrival; just drop it. + stream.close() + else: + self.future.set_result((af, addr, stream)) + + def set_timout(self, timeout): + self.timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, + self.on_timeout) + + def on_timeout(self): + self.timeout = None + self.try_connect(iter(self.secondary_addrs)) + + def clear_timeout(self): + if self.timeout is not None: + self.io_loop.remove_timeout(self.timeout) + + +class TCPClient(object): + """A non-blocking TCP connection factory. + """ + def __init__(self, resolver=None, io_loop=None): + self.io_loop = io_loop or IOLoop.current() + if resolver is not None: + self.resolver = resolver + self._own_resolver = False + else: + self.resolver = Resolver(io_loop=io_loop) + self._own_resolver = True + + def close(self): + if self._own_resolver: + self.resolver.close() + + @gen.coroutine + def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None, + max_buffer_size=None): + """Connect to the given host and port. + + Asynchronously returns an `.IOStream` (or `.SSLIOStream` if + ``ssl_options`` is not None). + """ + addrinfo = yield self.resolver.resolve(host, port, af) + connector = _Connector( + addrinfo, self.io_loop, + functools.partial(self._create_stream, max_buffer_size)) + af, addr, stream = yield connector.start() + # TODO: For better performance we could cache the (af, addr) + # information here and re-use it on sbusequent connections to + # the same host. (http://tools.ietf.org/html/rfc6555#section-4.2) + if ssl_options is not None: + stream = yield stream.start_tls(False, ssl_options=ssl_options, + server_hostname=host) + raise gen.Return(stream) + + def _create_stream(self, max_buffer_size, af, addr): + # Always connect in plaintext; we'll convert to ssl if necessary + # after one connection has completed. + stream = IOStream(socket.socket(af), + io_loop=self.io_loop, + max_buffer_size=max_buffer_size) + return stream.connect(addr) diff --git a/libs/tornado/tcpserver.py b/libs/tornado/tcpserver.py index c0773732..427acec5 100755 --- a/libs/tornado/tcpserver.py +++ b/libs/tornado/tcpserver.py @@ -20,13 +20,19 @@ from __future__ import absolute_import, division, print_function, with_statement import errno import os import socket -import ssl from tornado.log import app_log from tornado.ioloop import IOLoop from tornado.iostream import IOStream, SSLIOStream from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket from tornado import process +from tornado.util import errno_from_exception + +try: + import ssl +except ImportError: + # ssl is not available on Google App Engine. + ssl = None class TCPServer(object): @@ -81,13 +87,15 @@ class TCPServer(object): .. versionadded:: 3.1 The ``max_buffer_size`` argument. """ - def __init__(self, io_loop=None, ssl_options=None, max_buffer_size=None): + def __init__(self, io_loop=None, ssl_options=None, max_buffer_size=None, + read_chunk_size=None): self.io_loop = io_loop self.ssl_options = ssl_options self._sockets = {} # fd -> socket object self._pending_sockets = [] self._started = False self.max_buffer_size = max_buffer_size + self.read_chunk_size = None # Verify the SSL options. Otherwise we don't get errors until clients # connect. This doesn't verify that the keys are legitimate, but @@ -230,16 +238,20 @@ class TCPServer(object): # catch another error later on (AttributeError in # SSLIOStream._do_ssl_handshake). # To test this behavior, try nmap with the -sT flag. - # https://github.com/facebook/tornado/pull/750 - if err.args[0] in (errno.ECONNABORTED, errno.EINVAL): + # https://github.com/tornadoweb/tornado/pull/750 + if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL): return connection.close() else: raise try: if self.ssl_options is not None: - stream = SSLIOStream(connection, io_loop=self.io_loop, max_buffer_size=self.max_buffer_size) + stream = SSLIOStream(connection, io_loop=self.io_loop, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size) else: - stream = IOStream(connection, io_loop=self.io_loop, max_buffer_size=self.max_buffer_size) + stream = IOStream(connection, io_loop=self.io_loop, + max_buffer_size=self.max_buffer_size, + read_chunk_size=self.read_chunk_size) self.handle_stream(stream, address) except Exception: app_log.error("Error in connection callback", exc_info=True) diff --git a/libs/tornado/template.py b/libs/tornado/template.py index db5a528d..4dcec5d5 100755 --- a/libs/tornado/template.py +++ b/libs/tornado/template.py @@ -180,7 +180,7 @@ with ``{# ... #}``. ``{% set *x* = *y* %}`` Sets a local variable. -``{% try %}...{% except %}...{% finally %}...{% else %}...{% end %}`` +``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}`` Same as the python ``try`` statement. ``{% while *condition* %}... {% end %}`` @@ -367,10 +367,9 @@ class Loader(BaseLoader): def _create_template(self, name): path = os.path.join(self.root, name) - f = open(path, "rb") - template = Template(f.read(), name=name, loader=self) - f.close() - return template + with open(path, "rb") as f: + template = Template(f.read(), name=name, loader=self) + return template class DictLoader(BaseLoader): @@ -785,7 +784,7 @@ def _parse(reader, template, in_block=None, in_loop=None): if allowed_parents is not None: if not in_block: raise ParseError("%s outside %s block" % - (operator, allowed_parents)) + (operator, allowed_parents)) if in_block not in allowed_parents: raise ParseError("%s block cannot be attached to %s block" % (operator, in_block)) body.chunks.append(_IntermediateControlBlock(contents, line)) diff --git a/libs/tornado/testing.py b/libs/tornado/testing.py index 8355dcfc..dc30e94f 100755 --- a/libs/tornado/testing.py +++ b/libs/tornado/testing.py @@ -17,7 +17,7 @@ try: from tornado.httpclient import AsyncHTTPClient from tornado.httpserver import HTTPServer from tornado.simple_httpclient import SimpleAsyncHTTPClient - from tornado.ioloop import IOLoop + from tornado.ioloop import IOLoop, TimeoutError from tornado import netutil except ImportError: # These modules are not importable on app engine. Parts of this module @@ -38,6 +38,7 @@ import re import signal import socket import sys +import types try: from cStringIO import StringIO # py2 @@ -48,10 +49,16 @@ except ImportError: # (either py27+ or unittest2) so tornado.test.util enforces # this requirement, but for other users of tornado.testing we want # to allow the older version if unitest2 is not available. -try: - import unittest2 as unittest -except ImportError: +if sys.version_info >= (3,): + # On python 3, mixing unittest2 and unittest (including doctest) + # doesn't seem to work, so always use unittest. import unittest +else: + # On python 2, prefer unittest2 when available. + try: + import unittest2 as unittest + except ImportError: + import unittest _next_port = 10000 @@ -95,6 +102,36 @@ def get_async_test_timeout(): return 5 +class _TestMethodWrapper(object): + """Wraps a test method to raise an error if it returns a value. + + This is mainly used to detect undecorated generators (if a test + method yields it must use a decorator to consume the generator), + but will also detect other kinds of return values (these are not + necessarily errors, but we alert anyway since there is no good + reason to return a value from a test. + """ + def __init__(self, orig_method): + self.orig_method = orig_method + + def __call__(self): + result = self.orig_method() + if isinstance(result, types.GeneratorType): + raise TypeError("Generator test methods should be decorated with " + "tornado.testing.gen_test") + elif result is not None: + raise ValueError("Return value from test method ignored: %r" % + result) + + def __getattr__(self, name): + """Proxy all unknown attributes to the original method. + + This is important for some of the decorators in the `unittest` + module, such as `unittest.skipIf`. + """ + return getattr(self.orig_method, name) + + class AsyncTestCase(unittest.TestCase): """`~unittest.TestCase` subclass for testing `.IOLoop`-based asynchronous code. @@ -157,14 +194,20 @@ class AsyncTestCase(unittest.TestCase): self.assertIn("FriendFeed", response.body) self.stop() """ - def __init__(self, *args, **kwargs): - super(AsyncTestCase, self).__init__(*args, **kwargs) + def __init__(self, methodName='runTest', **kwargs): + super(AsyncTestCase, self).__init__(methodName, **kwargs) self.__stopped = False self.__running = False self.__failure = None self.__stop_args = None self.__timeout = None + # It's easy to forget the @gen_test decorator, but if you do + # the test will silently be ignored because nothing will consume + # the generator. Replace the test method with a wrapper that will + # make sure it's not an undecorated generator. + setattr(self, methodName, _TestMethodWrapper(getattr(self, methodName))) + def setUp(self): super(AsyncTestCase, self).setUp() self.io_loop = self.get_new_ioloop() @@ -352,6 +395,7 @@ class AsyncHTTPTestCase(AsyncTestCase): def tearDown(self): self.http_server.stop() + self.io_loop.run_sync(self.http_server.close_all_connections) if (not IOLoop.initialized() or self.http_client.io_loop is not IOLoop.instance()): self.http_client.close() @@ -414,18 +458,50 @@ def gen_test(func=None, timeout=None): .. versionadded:: 3.1 The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment variable. + + .. versionchanged:: 3.3 + The wrapper now passes along ``*args, **kwargs`` so it can be used + on functions with arguments. """ if timeout is None: timeout = get_async_test_timeout() def wrap(f): - f = gen.coroutine(f) - + # Stack up several decorators to allow us to access the generator + # object itself. In the innermost wrapper, we capture the generator + # and save it in an attribute of self. Next, we run the wrapped + # function through @gen.coroutine. Finally, the coroutine is + # wrapped again to make it synchronous with run_sync. + # + # This is a good case study arguing for either some sort of + # extensibility in the gen decorators or cancellation support. @functools.wraps(f) - def wrapper(self): - return self.io_loop.run_sync( - functools.partial(f, self), timeout=timeout) - return wrapper + def pre_coroutine(self, *args, **kwargs): + result = f(self, *args, **kwargs) + if isinstance(result, types.GeneratorType): + self._test_generator = result + else: + self._test_generator = None + return result + + coro = gen.coroutine(pre_coroutine) + + @functools.wraps(coro) + def post_coroutine(self, *args, **kwargs): + try: + return self.io_loop.run_sync( + functools.partial(coro, self, *args, **kwargs), + timeout=timeout) + except TimeoutError as e: + # run_sync raises an error with an unhelpful traceback. + # If we throw it back into the generator the stack trace + # will be replaced by the point where the test is stopped. + self._test_generator.throw(e) + # In case the test contains an overly broad except clause, + # we may get back here. In this case re-raise the original + # exception, which is better than nothing. + raise + return post_coroutine if func is not None: # Used like: diff --git a/libs/tornado/util.py b/libs/tornado/util.py index a2fba779..49eea2c3 100755 --- a/libs/tornado/util.py +++ b/libs/tornado/util.py @@ -12,11 +12,19 @@ and `.Resolver`. from __future__ import absolute_import, division, print_function, with_statement +import array import inspect +import os import sys import zlib +try: + xrange # py2 +except NameError: + xrange = range # py3 + + class ObjectDict(dict): """Makes a dictionary behave like an object, with attribute-style access. """ @@ -33,7 +41,7 @@ class ObjectDict(dict): class GzipDecompressor(object): """Streaming gzip decompressor. - The interface is like that of `zlib.decompressobj` (without the + The interface is like that of `zlib.decompressobj` (without some of the optional arguments, but it understands gzip headers and checksums. """ def __init__(self): @@ -42,14 +50,24 @@ class GzipDecompressor(object): # This works on cpython and pypy, but not jython. self.decompressobj = zlib.decompressobj(16 + zlib.MAX_WBITS) - def decompress(self, value): + def decompress(self, value, max_length=None): """Decompress a chunk, returning newly-available data. Some data may be buffered for later processing; `flush` must be called when there is no more input data to ensure that all data was processed. + + If ``max_length`` is given, some input data may be left over + in ``unconsumed_tail``; you must retrieve this value and pass + it back to a future call to `decompress` if it is not empty. """ - return self.decompressobj.decompress(value) + return self.decompressobj.decompress(value, max_length) + + @property + def unconsumed_tail(self): + """Returns the unconsumed portion left over + """ + return self.decompressobj.unconsumed_tail def flush(self): """Return any remaining buffered data not yet returned by decompress. @@ -132,6 +150,24 @@ def exec_in(code, glob, loc=None): """) +def errno_from_exception(e): + """Provides the errno from an Exception object. + + There are cases that the errno attribute was not set so we pull + the errno out of the args but if someone instatiates an Exception + without any args you will get a tuple error. So this function + abstracts all that behavior to give you a safe way to get the + errno. + """ + + if hasattr(e, 'errno'): + return e.errno + elif e.args: + return e.args[0] + else: + return None + + class Configurable(object): """Base class for configurable interfaces. @@ -243,6 +279,16 @@ class ArgReplacer(object): # Not a positional parameter self.arg_pos = None + def get_old_value(self, args, kwargs, default=None): + """Returns the old value of the named argument without replacing it. + + Returns ``default`` if the argument is not present. + """ + if self.arg_pos is not None and len(args) > self.arg_pos: + return args[self.arg_pos] + else: + return kwargs.get(self.name, default) + def replace(self, new_value, args, kwargs): """Replace the named argument in ``args, kwargs`` with ``new_value``. @@ -265,6 +311,41 @@ class ArgReplacer(object): return old_value, args, kwargs +def _websocket_mask_python(mask, data): + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. + Returns a `bytes` object of the same length as `data` with the mask applied + as specified in section 5.3 of RFC 6455. + + This pure-python implementation may be replaced by an optimized version when available. + """ + mask = array.array("B", mask) + unmasked = array.array("B", data) + for i in xrange(len(data)): + unmasked[i] = unmasked[i] ^ mask[i % 4] + if hasattr(unmasked, 'tobytes'): + # tostring was deprecated in py32. It hasn't been removed, + # but since we turn on deprecation warnings in our tests + # we need to use the right one. + return unmasked.tobytes() + else: + return unmasked.tostring() + +if (os.environ.get('TORNADO_NO_EXTENSION') or + os.environ.get('TORNADO_EXTENSION') == '0'): + # These environment variables exist to make it easier to do performance + # comparisons; they are not guaranteed to remain supported in the future. + _websocket_mask = _websocket_mask_python +else: + try: + from tornado.speedups import websocket_mask as _websocket_mask + except ImportError: + if os.environ.get('TORNADO_EXTENSION') == '1': + raise + _websocket_mask = _websocket_mask_python + + def doctests(): import doctest return doctest.DocTestSuite() diff --git a/libs/tornado/web.py b/libs/tornado/web.py index b22b11fe..dd2b5ef5 100755 --- a/libs/tornado/web.py +++ b/libs/tornado/web.py @@ -72,17 +72,18 @@ import time import tornado import traceback import types -import uuid -from tornado.concurrent import Future +from tornado.concurrent import Future, is_future from tornado import escape +from tornado import gen from tornado import httputil +from tornado import iostream from tornado import locale from tornado.log import access_log, app_log, gen_log from tornado import stack_context from tornado import template from tornado.escape import utf8, _unicode -from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type +from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type, _websocket_mask try: from io import BytesIO # python 3 @@ -105,6 +106,39 @@ except ImportError: from urllib.parse import urlencode # py3 +MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1 +"""The oldest signed value version supported by this version of Tornado. + +Signed values older than this version cannot be decoded. + +.. versionadded:: 3.2.1 +""" + +MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2 +"""The newest signed value version supported by this version of Tornado. + +Signed values newer than this version cannot be decoded. + +.. versionadded:: 3.2.1 +""" + +DEFAULT_SIGNED_VALUE_VERSION = 2 +"""The signed value version produced by `.RequestHandler.create_signed_value`. + +May be overridden by passing a ``version`` keyword argument. + +.. versionadded:: 3.2.1 +""" + +DEFAULT_SIGNED_VALUE_MIN_VERSION = 1 +"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`. + +May be overrided by passing a ``min_version`` keyword argument. + +.. versionadded:: 3.2.1 +""" + + class RequestHandler(object): """Subclass this class and define `get()` or `post()` to make a handler. @@ -128,6 +162,7 @@ class RequestHandler(object): self._finished = False self._auto_finish = True self._transforms = None # will be set in _execute + self._prepared_future = None self.path_args = None self.path_kwargs = None self.ui = ObjectDict((n, self._ui_method(m)) for n, m in @@ -141,10 +176,7 @@ class RequestHandler(object): application.ui_modules) self.ui["modules"] = self.ui["_tt_modules"] self.clear() - # Check since connection is not available in WSGI - if getattr(self.request, "connection", None): - self.request.connection.set_close_callback( - self.on_connection_close) + self.request.connection.set_close_callback(self.on_connection_close) self.initialize(**kwargs) def initialize(self): @@ -235,7 +267,9 @@ class RequestHandler(object): may not be called promptly after the end user closes their connection. """ - pass + if _has_stream_request_body(self.__class__): + if not self.request.body.done(): + self.request.body.set_exception(iostream.StreamClosedError()) def clear(self): """Resets all headers and content for this response.""" @@ -245,12 +279,6 @@ class RequestHandler(object): "Date": httputil.format_timestamp(time.time()), }) self.set_default_headers() - if (not self.request.supports_http_1_1() and - getattr(self.request, 'connection', None) and - not self.request.connection.no_keep_alive): - conn_header = self.request.headers.get("Connection") - if conn_header and (conn_header.lower() == "keep-alive"): - self._headers["Connection"] = "Keep-Alive" self._write_buffer = [] self._status_code = 200 self._reason = httputil.responses[200] @@ -455,7 +483,7 @@ class RequestHandler(object): @property def cookies(self): - """An alias for `self.request.cookies <.httpserver.HTTPRequest.cookies>`.""" + """An alias for `self.request.cookies <.httputil.HTTPServerRequest.cookies>`.""" return self.request.cookies def get_cookie(self, name, default=None): @@ -524,7 +552,8 @@ class RequestHandler(object): for name in self.request.cookies: self.clear_cookie(name, path=path, domain=domain) - def set_secure_cookie(self, name, value, expires_days=30, **kwargs): + def set_secure_cookie(self, name, value, expires_days=30, version=None, + **kwargs): """Signs and timestamps a cookie so it cannot be forged. You must specify the ``cookie_secret`` setting in your Application @@ -539,32 +568,50 @@ class RequestHandler(object): Secure cookies may contain arbitrary byte values, not just unicode strings (unlike regular cookies) + + .. versionchanged:: 3.2.1 + + Added the ``version`` argument. Introduced cookie version 2 + and made it the default. """ - self.set_cookie(name, self.create_signed_value(name, value), + self.set_cookie(name, self.create_signed_value(name, value, + version=version), expires_days=expires_days, **kwargs) - def create_signed_value(self, name, value): + def create_signed_value(self, name, value, version=None): """Signs and timestamps a string so it cannot be forged. Normally used via set_secure_cookie, but provided as a separate method for non-cookie uses. To decode a value not stored as a cookie use the optional value argument to get_secure_cookie. + + .. versionchanged:: 3.2.1 + + Added the ``version`` argument. Introduced cookie version 2 + and made it the default. """ self.require_setting("cookie_secret", "secure cookies") return create_signed_value(self.application.settings["cookie_secret"], - name, value) + name, value, version=version) - def get_secure_cookie(self, name, value=None, max_age_days=31): + def get_secure_cookie(self, name, value=None, max_age_days=31, + min_version=None): """Returns the given signed cookie if it validates, or None. The decoded cookie value is returned as a byte string (unlike `get_cookie`). + + .. versionchanged:: 3.2.1 + + Added the ``min_version`` argument. Introduced cookie version 2; + both versions 1 and 2 are accepted by default. """ self.require_setting("cookie_secret", "secure cookies") if value is None: value = self.get_cookie(name) return decode_signed_value(self.application.settings["cookie_secret"], - name, value, max_age_days=max_age_days) + name, value, max_age_days=max_age_days, + min_version=min_version) def redirect(self, url, permanent=False, status=None): """Sends a redirect to the given (optionally relative) URL. @@ -598,12 +645,15 @@ class RequestHandler(object): Note that lists are not converted to JSON because of a potential cross-site security vulnerability. All JSON output should be wrapped in a dictionary. More details at - http://haacked.com/archive/2008/11/20/anatomy-of-a-subtle-json-vulnerability.aspx + http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and + https://github.com/facebook/tornado/issues/1009 """ if self._finished: raise RuntimeError("Cannot write() after finish(). May be caused " "by using async operations without the " "@asynchronous decorator.") + if not isinstance(chunk, (bytes_type, unicode_type, dict)): + raise TypeError("write() only accepts bytes, unicode, and dict objects") if isinstance(chunk, dict): chunk = escape.json_encode(chunk) self.set_header("Content-Type", "application/json; charset=UTF-8") @@ -769,14 +819,10 @@ class RequestHandler(object): Note that only one flush callback can be outstanding at a time; if another flush occurs before the previous flush's callback has been run, the previous callback will be discarded. - """ - if self.application._wsgi: - # WSGI applications cannot usefully support flush, so just make - # it a no-op (and run the callback immediately). - if callback is not None: - callback() - return + .. versionchanged:: 3.3 + Now returns a `.Future` if no callback is given. + """ chunk = b"".join(self._write_buffer) self._write_buffer = [] if not self._headers_written: @@ -785,19 +831,32 @@ class RequestHandler(object): self._status_code, self._headers, chunk = \ transform.transform_first_chunk( self._status_code, self._headers, chunk, include_footers) - headers = self._generate_headers() + # Ignore the chunk and only write the headers for HEAD requests + if self.request.method == "HEAD": + chunk = None + + # Finalize the cookie headers (which have been stored in a side + # object so an outgoing cookie could be overwritten before it + # is sent). + if hasattr(self, "_new_cookie"): + for cookie in self._new_cookie.values(): + self.add_header("Set-Cookie", cookie.OutputString(None)) + + start_line = httputil.ResponseStartLine(self.request.version, + self._status_code, + self._reason) + return self.request.connection.write_headers( + start_line, self._headers, chunk, callback=callback) else: for transform in self._transforms: chunk = transform.transform_chunk(chunk, include_footers) - headers = b"" - - # Ignore the chunk and only write the headers for HEAD requests - if self.request.method == "HEAD": - if headers: - self.request.write(headers, callback=callback) - return - - self.request.write(headers + chunk, callback=callback) + # Ignore the chunk and only write the headers for HEAD requests + if self.request.method != "HEAD": + return self.request.connection.write(chunk, callback=callback) + else: + future = Future() + future.set_result(None) + return future def finish(self, chunk=None): """Finishes this response, ending the HTTP request.""" @@ -833,10 +892,9 @@ class RequestHandler(object): # are keepalive connections) self.request.connection.set_close_callback(None) - if not self.application._wsgi: - self.flush(include_footers=True) - self.request.finish() - self._log() + self.flush(include_footers=True) + self.request.finish() + self._log() self._finished = True self.on_finish() # Break up a reference cycle between this handler and the @@ -1017,16 +1075,87 @@ class RequestHandler(object): as a potential forgery. See http://en.wikipedia.org/wiki/Cross-site_request_forgery + + .. versionchanged:: 3.2.2 + The xsrf token will now be have a random mask applied in every + request, which makes it safe to include the token in pages + that are compressed. See http://breachattack.com for more + information on the issue fixed by this change. Old (version 1) + cookies will be converted to version 2 when this method is called + unless the ``xsrf_cookie_version`` `Application` setting is + set to 1. """ if not hasattr(self, "_xsrf_token"): - token = self.get_cookie("_xsrf") - if not token: - token = binascii.b2a_hex(uuid.uuid4().bytes) + version, token, timestamp = self._get_raw_xsrf_token() + output_version = self.settings.get("xsrf_cookie_version", 2) + if output_version == 1: + self._xsrf_token = binascii.b2a_hex(token) + elif output_version == 2: + mask = os.urandom(4) + self._xsrf_token = b"|".join([ + b"2", + binascii.b2a_hex(mask), + binascii.b2a_hex(_websocket_mask(mask, token)), + utf8(str(int(timestamp)))]) + else: + raise ValueError("unknown xsrf cookie version %d", + output_version) + if version is None: expires_days = 30 if self.current_user else None - self.set_cookie("_xsrf", token, expires_days=expires_days) - self._xsrf_token = token + self.set_cookie("_xsrf", self._xsrf_token, + expires_days=expires_days) return self._xsrf_token + def _get_raw_xsrf_token(self): + """Read or generate the xsrf token in its raw form. + + The raw_xsrf_token is a tuple containing: + + * version: the version of the cookie from which this token was read, + or None if we generated a new token in this request. + * token: the raw token data; random (non-ascii) bytes. + * timestamp: the time this token was generated (will not be accurate + for version 1 cookies) + """ + if not hasattr(self, '_raw_xsrf_token'): + cookie = self.get_cookie("_xsrf") + if cookie: + version, token, timestamp = self._decode_xsrf_token(cookie) + else: + version, token, timestamp = None, None, None + if token is None: + version = None + token = os.urandom(16) + timestamp = time.time() + self._raw_xsrf_token = (version, token, timestamp) + return self._raw_xsrf_token + + def _decode_xsrf_token(self, cookie): + """Convert a cookie string into a the tuple form returned by + _get_raw_xsrf_token. + """ + m = _signed_value_version_re.match(utf8(cookie)) + if m: + version = int(m.group(1)) + if version == 2: + _, mask, masked_token, timestamp = cookie.split("|") + mask = binascii.a2b_hex(utf8(mask)) + token = _websocket_mask( + mask, binascii.a2b_hex(utf8(masked_token))) + timestamp = int(timestamp) + return version, token, timestamp + else: + # Treat unknown versions as not present instead of failing. + return None, None, None + elif len(cookie) == 32: + version = 1 + token = binascii.a2b_hex(utf8(cookie)) + # We don't have a usable timestamp in older versions. + timestamp = int(time.time()) + return (version, token, timestamp) + else: + return None, None, None + def check_xsrf_cookie(self): """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument. @@ -1047,13 +1176,19 @@ class RequestHandler(object): information please see http://www.djangoproject.com/weblog/2011/feb/08/security/ http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails + + .. versionchanged:: 3.2.2 + Added support for cookie version 2. Both versions 1 and 2 are + supported. """ token = (self.get_argument("_xsrf", None) or self.request.headers.get("X-Xsrftoken") or self.request.headers.get("X-Csrftoken")) if not token: raise HTTPError(403, "'_xsrf' argument missing from POST") - if self.xsrf_token != token: + _, token, _ = self._decode_xsrf_token(token) + _, expected_token, _ = self._get_raw_xsrf_token() + if not _time_independent_equals(utf8(token), utf8(expected_token)): raise HTTPError(403, "XSRF cookie does not match POST argument") def xsrf_form_html(self): @@ -1194,6 +1329,7 @@ class RequestHandler(object): self._handle_request_exception(value) return True + @gen.coroutine def _execute(self, transforms, *args, **kwargs): """Executes this request with the given output transforms.""" self._transforms = transforms @@ -1208,52 +1344,52 @@ class RequestHandler(object): if self.request.method not in ("GET", "HEAD", "OPTIONS") and \ self.application.settings.get("xsrf_cookies"): self.check_xsrf_cookie() - self._when_complete(self.prepare(), self._execute_method) - except Exception as e: - self._handle_request_exception(e) - def _when_complete(self, result, callback): - try: - if result is None: - callback() - elif isinstance(result, Future): - if result.done(): - if result.result() is not None: - raise ValueError('Expected None, got %r' % result.result()) - callback() - else: - # Delayed import of IOLoop because it's not available - # on app engine - from tornado.ioloop import IOLoop - IOLoop.current().add_future( - result, functools.partial(self._when_complete, - callback=callback)) - else: - raise ValueError("Expected Future or None, got %r" % result) - except Exception as e: - self._handle_request_exception(e) + result = self.prepare() + if is_future(result): + result = yield result + if result is not None: + raise TypeError("Expected None, got %r" % result) + if self._prepared_future is not None: + # Tell the Application we've finished with prepare() + # and are ready for the body to arrive. + self._prepared_future.set_result(None) + if self._finished: + return + + if _has_stream_request_body(self.__class__): + # In streaming mode request.body is a Future that signals + # the body has been completely received. The Future has no + # result; the data has been passed to self.data_received + # instead. + try: + yield self.request.body + except iostream.StreamClosedError: + return - def _execute_method(self): - if not self._finished: method = getattr(self, self.request.method.lower()) - self._when_complete(method(*self.path_args, **self.path_kwargs), - self._execute_finish) + result = method(*self.path_args, **self.path_kwargs) + if is_future(result): + result = yield result + if result is not None: + raise TypeError("Expected None, got %r" % result) + if self._auto_finish and not self._finished: + self.finish() + except Exception as e: + self._handle_request_exception(e) + if (self._prepared_future is not None and + not self._prepared_future.done()): + # In case we failed before setting _prepared_future, do it + # now (to unblock the HTTP server). Note that this is not + # in a finally block to avoid GC issues prior to Python 3.4. + self._prepared_future.set_result(None) - def _execute_finish(self): - if self._auto_finish and not self._finished: - self.finish() + def data_received(self, chunk): + """Implement this method to handle streamed request data. - def _generate_headers(self): - reason = self._reason - lines = [utf8(self.request.version + " " + - str(self._status_code) + - " " + reason)] - lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()]) - - if hasattr(self, "_new_cookie"): - for cookie in self._new_cookie.values(): - lines.append(utf8("Set-Cookie: " + cookie.OutputString(None))) - return b"\r\n".join(lines) + b"\r\n\r\n" + Requires the `.stream_request_body` decorator. + """ + raise NotImplementedError() def _log(self): """Logs the current request. @@ -1367,8 +1503,6 @@ def asynchronous(method): from tornado.ioloop import IOLoop @functools.wraps(method) def wrapper(self, *args, **kwargs): - if self.application._wsgi: - raise Exception("@asynchronous is not supported for WSGI apps") self._auto_finish = False with stack_context.ExceptionStackContext( self._stack_context_handle_exception): @@ -1395,6 +1529,40 @@ def asynchronous(method): return wrapper +def stream_request_body(cls): + """Apply to `RequestHandler` subclasses to enable streaming body support. + + This decorator implies the following changes: + + * `.HTTPServerRequest.body` is undefined, and body arguments will not + be included in `RequestHandler.get_argument`. + * `RequestHandler.prepare` is called when the request headers have been + read instead of after the entire body has been read. + * The subclass must define a method ``data_received(self, data):``, which + will be called zero or more times as data is available. Note that + if the request has an empty body, ``data_received`` may not be called. + * ``prepare`` and ``data_received`` may return Futures (such as via + ``@gen.coroutine``, in which case the next method will not be called + until those futures have completed. + * The regular HTTP method (``post``, ``put``, etc) will be called after + the entire body has been read. + + There is a subtle interaction between ``data_received`` and asynchronous + ``prepare``: The first call to ``data_recieved`` may occur at any point + after the call to ``prepare`` has returned *or yielded*. + """ + if not issubclass(cls, RequestHandler): + raise TypeError("expected subclass of RequestHandler, got %r", cls) + cls._stream_request_body = True + return cls + + +def _has_stream_request_body(cls): + if not issubclass(cls, RequestHandler): + raise TypeError("expected subclass of RequestHandler, got %r", cls) + return getattr(cls, '_stream_request_body', False) + + def removeslash(method): """Use this decorator to remove trailing slashes from the request path. @@ -1439,7 +1607,7 @@ def addslash(method): return wrapper -class Application(object): +class Application(httputil.HTTPServerConnectionDelegate): """A collection of request handlers that make up a web application. Instances of this class are callable and can be passed directly to @@ -1491,12 +1659,11 @@ class Application(object): """ def __init__(self, handlers=None, default_host="", transforms=None, - wsgi=False, **settings): + **settings): if transforms is None: self.transforms = [] if settings.get("gzip"): self.transforms.append(GZipContentEncoding) - self.transforms.append(ChunkedTransferEncoding) else: self.transforms = transforms self.handlers = [] @@ -1508,7 +1675,6 @@ class Application(object): 'Template': TemplateModule, } self.ui_methods = {} - self._wsgi = wsgi self._load_ui_modules(settings.get("ui_modules", {})) self._load_ui_methods(settings.get("ui_methods", {})) if self.settings.get("static_path"): @@ -1534,7 +1700,7 @@ class Application(object): self.settings.setdefault('serve_traceback', True) # Automatically reload modified modules - if self.settings.get('autoreload') and not wsgi: + if self.settings.get('autoreload'): from tornado import autoreload autoreload.start() @@ -1634,64 +1800,15 @@ class Application(object): except TypeError: pass + def start_request(self, connection): + # Modern HTTPServer interface + return _RequestDispatcher(self, connection) + def __call__(self, request): - """Called by HTTPServer to execute the request.""" - transforms = [t(request) for t in self.transforms] - handler = None - args = [] - kwargs = {} - handlers = self._get_host_handlers(request) - if not handlers: - handler = RedirectHandler( - self, request, url="http://" + self.default_host + "/") - else: - for spec in handlers: - match = spec.regex.match(request.path) - if match: - handler = spec.handler_class(self, request, **spec.kwargs) - if spec.regex.groups: - # None-safe wrapper around url_unescape to handle - # unmatched optional groups correctly - def unquote(s): - if s is None: - return s - return escape.url_unescape(s, encoding=None, - plus=False) - # Pass matched groups to the handler. Since - # match.groups() includes both named and unnamed groups, - # we want to use either groups or groupdict but not both. - # Note that args are passed as bytes so the handler can - # decide what encoding to use. - - if spec.regex.groupindex: - kwargs = dict( - (str(k), unquote(v)) - for (k, v) in match.groupdict().items()) - else: - args = [unquote(s) for s in match.groups()] - break - if not handler: - if self.settings.get('default_handler_class'): - handler_class = self.settings['default_handler_class'] - handler_args = self.settings.get( - 'default_handler_args', {}) - else: - handler_class = ErrorHandler - handler_args = dict(status_code=404) - handler = handler_class(self, request, **handler_args) - - # If template cache is disabled (usually in the debug mode), - # re-compile templates and reload static files on every - # request so you don't need to restart to see changes - if not self.settings.get("compiled_template_cache", True): - with RequestHandler._template_loader_lock: - for loader in RequestHandler._template_loaders.values(): - loader.reset() - if not self.settings.get('static_hash_cache', True): - StaticFileHandler.reset() - - handler._execute(transforms, *args, **kwargs) - return handler + # Legacy HTTPServer interface + dispatcher = _RequestDispatcher(self, None) + dispatcher.set_request(request) + return dispatcher.execute() def reverse_url(self, name, *args): """Returns a URL path for handler named ``name`` @@ -1728,6 +1845,113 @@ class Application(object): handler._request_summary(), request_time) +class _RequestDispatcher(httputil.HTTPMessageDelegate): + def __init__(self, application, connection): + self.application = application + self.connection = connection + self.request = None + self.chunks = [] + self.handler_class = None + self.handler_kwargs = None + self.path_args = [] + self.path_kwargs = {} + + def headers_received(self, start_line, headers): + self.set_request(httputil.HTTPServerRequest( + connection=self.connection, start_line=start_line, headers=headers)) + if self.stream_request_body: + self.request.body = Future() + return self.execute() + + def set_request(self, request): + self.request = request + self._find_handler() + self.stream_request_body = _has_stream_request_body(self.handler_class) + + def _find_handler(self): + # Identify the handler to use as soon as we have the request. + # Save url path arguments for later. + app = self.application + handlers = app._get_host_handlers(self.request) + if not handlers: + self.handler_class = RedirectHandler + self.handler_kwargs = dict(url="http://" + app.default_host + "/") + return + for spec in handlers: + match = spec.regex.match(self.request.path) + if match: + self.handler_class = spec.handler_class + self.handler_kwargs = spec.kwargs + if spec.regex.groups: + # Pass matched groups to the handler. Since + # match.groups() includes both named and + # unnamed groups, we want to use either groups + # or groupdict but not both. + if spec.regex.groupindex: + self.path_kwargs = dict( + (str(k), _unquote_or_none(v)) + for (k, v) in match.groupdict().items()) + else: + self.path_args = [_unquote_or_none(s) + for s in match.groups()] + return + if app.settings.get('default_handler_class'): + self.handler_class = app.settings['default_handler_class'] + self.handler_kwargs = app.settings.get( + 'default_handler_args', {}) + else: + self.handler_class = ErrorHandler + self.handler_kwargs = dict(status_code=404) + + def data_received(self, data): + if self.stream_request_body: + return self.handler.data_received(data) + else: + self.chunks.append(data) + + def finish(self): + if self.stream_request_body: + self.request.body.set_result(None) + else: + self.request.body = b''.join(self.chunks) + self.request._parse_body() + self.execute() + + def on_connection_close(self): + if self.stream_request_body: + self.handler.on_connection_close() + else: + self.chunks = None + + def execute(self): + # If template cache is disabled (usually in the debug mode), + # re-compile templates and reload static files on every + # request so you don't need to restart to see changes + if not self.application.settings.get("compiled_template_cache", True): + with RequestHandler._template_loader_lock: + for loader in RequestHandler._template_loaders.values(): + loader.reset() + if not self.application.settings.get('static_hash_cache', True): + StaticFileHandler.reset() + + self.handler = self.handler_class(self.application, self.request, + **self.handler_kwargs) + transforms = [t(self.request) for t in self.application.transforms] + + if self.stream_request_body: + self.handler._prepared_future = Future() + # Note that if an exception escapes handler._execute it will be + # trapped in the Future it returns (which we are ignoring here). + # However, that shouldn't happen because _execute has a blanket + # except handler, and we cannot easily access the IOLoop here to + # call add_future. + self.handler._execute(transforms, *self.path_args, **self.path_kwargs) + # If we are streaming the request body, then execute() is finished + # when the handler has prepared to receive the body. If not, + # it doesn't matter when execute() finishes (so we return None) + return self.handler._prepared_future + + class HTTPError(Exception): """An exception that will turn into an HTTP error response. @@ -1886,8 +2110,9 @@ class StaticFileHandler(RequestHandler): cls._static_hashes = {} def head(self, path): - self.get(path, include_body=False) + return self.get(path, include_body=False) + @gen.coroutine def get(self, path, include_body=True): # Set up our path instance variables. self.path = self.parse_url_path(path) @@ -1912,9 +2137,9 @@ class StaticFileHandler(RequestHandler): # the request will be treated as if the header didn't exist. request_range = httputil._parse_request_range(range_header) + size = self.get_content_size() if request_range: start, end = request_range - size = self.get_content_size() if (start is not None and start >= size) or end == 0: # As per RFC 2616 14.35.1, a range is not satisfiable only: if # the first requested byte is equal to or greater than the @@ -1939,18 +2164,26 @@ class StaticFileHandler(RequestHandler): httputil._get_content_range(start, end, size)) else: start = end = None - content = self.get_content(self.absolute_path, start, end) - if isinstance(content, bytes_type): - content = [content] - content_length = 0 - for chunk in content: - if include_body: + + if start is not None and end is not None: + content_length = end - start + elif end is not None: + content_length = end + elif start is not None: + content_length = size - start + else: + content_length = size + self.set_header("Content-Length", content_length) + + if include_body: + content = self.get_content(self.absolute_path, start, end) + if isinstance(content, bytes_type): + content = [content] + for chunk in content: self.write(chunk) - else: - content_length += len(chunk) - if not include_body: + yield self.flush() + else: assert self.request.method == "HEAD" - self.set_header("Content-Length", content_length) def compute_etag(self): """Sets the ``Etag`` header based on static url version. @@ -2130,10 +2363,13 @@ class StaticFileHandler(RequestHandler): def get_content_size(self): """Retrieve the total size of the resource at the given path. - This method may be overridden by subclasses. It will only - be called if a partial result is requested from `get_content` + This method may be overridden by subclasses. .. versionadded:: 3.1 + + .. versionchanged:: 3.3 + This method is now always called, instead of only when + partial results are requested. """ stat_result = self._stat() return stat_result[stat.ST_SIZE] @@ -2255,7 +2491,7 @@ class FallbackHandler(RequestHandler): """A `RequestHandler` that wraps another HTTP server callback. The fallback is a callable object that accepts an - `~.httpserver.HTTPRequest`, such as an `Application` or + `~.httputil.HTTPServerRequest`, such as an `Application` or `tornado.wsgi.WSGIContainer`. This is most useful to use both Tornado ``RequestHandlers`` and WSGI in the same server. Typical usage:: @@ -2279,7 +2515,7 @@ class OutputTransform(object): """A transform modifies the result of an HTTP request (e.g., GZip encoding) A new transform instance is created for every request. See the - ChunkedTransferEncoding example below if you want to implement a + GZipContentEncoding example below if you want to implement a new Transform. """ def __init__(self, request): @@ -2296,16 +2532,24 @@ class GZipContentEncoding(OutputTransform): """Applies the gzip content encoding to the response. See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11 + + .. versionchanged:: 3.3 + Now compresses all mime types beginning with ``text/``, instead + of just a whitelist. (the whitelist is still used for certain + non-text mime types). """ - CONTENT_TYPES = set([ - "text/plain", "text/html", "text/css", "text/xml", "application/javascript", - "application/x-javascript", "application/xml", "application/atom+xml", - "text/javascript", "application/json", "application/xhtml+xml"]) + # Whitelist of compressible mime types (in addition to any types + # beginning with "text/"). + CONTENT_TYPES = set(["application/javascript", "application/x-javascript", + "application/xml", "application/atom+xml", + "application/json", "application/xhtml+xml"]) MIN_LENGTH = 5 def __init__(self, request): - self._gzipping = request.supports_http_1_1() and \ - "gzip" in request.headers.get("Accept-Encoding", "") + self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "") + + def _compressible_type(self, ctype): + return ctype.startswith('text/') or ctype in self.CONTENT_TYPES def transform_first_chunk(self, status_code, headers, chunk, finishing): if 'Vary' in headers: @@ -2314,7 +2558,7 @@ class GZipContentEncoding(OutputTransform): headers['Vary'] = b'Accept-Encoding' if self._gzipping: ctype = _unicode(headers.get("Content-Type", "")).split(";")[0] - self._gzipping = (ctype in self.CONTENT_TYPES) and \ + self._gzipping = self._compressible_type(ctype) and \ (not finishing or len(chunk) >= self.MIN_LENGTH) and \ (finishing or "Content-Length" not in headers) and \ ("Content-Encoding" not in headers) @@ -2340,42 +2584,16 @@ class GZipContentEncoding(OutputTransform): return chunk -class ChunkedTransferEncoding(OutputTransform): - """Applies the chunked transfer encoding to the response. - - See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1 - """ - def __init__(self, request): - self._chunking = request.supports_http_1_1() - - def transform_first_chunk(self, status_code, headers, chunk, finishing): - # 304 responses have no body (not even a zero-length body), and so - # should not have either Content-Length or Transfer-Encoding headers. - if self._chunking and status_code != 304: - # No need to chunk the output if a Content-Length is specified - if "Content-Length" in headers or "Transfer-Encoding" in headers: - self._chunking = False - else: - headers["Transfer-Encoding"] = "chunked" - chunk = self.transform_chunk(chunk, finishing) - return status_code, headers, chunk - - def transform_chunk(self, block, finishing): - if self._chunking: - # Don't write out empty chunks because that means END-OF-STREAM - # with chunked encoding - if block: - block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n" - if finishing: - block += b"0\r\n\r\n" - return block - - def authenticated(method): """Decorate methods with this to require that the user be logged in. If the user is not logged in, they will be redirected to the configured `login url `. + + If you configure a login url with a query parameter, Tornado will + assume you know what you're doing and use it as-is. If not, it + will add a `next` parameter so the login page knows where to send + you once you're logged in. """ @functools.wraps(method) def wrapper(self, *args, **kwargs): @@ -2640,29 +2858,103 @@ else: return result == 0 -def create_signed_value(secret, name, value): - timestamp = utf8(str(int(time.time()))) +def create_signed_value(secret, name, value, version=None, clock=None): + if version is None: + version = DEFAULT_SIGNED_VALUE_VERSION + if clock is None: + clock = time.time + timestamp = utf8(str(int(clock()))) value = base64.b64encode(utf8(value)) - signature = _create_signature(secret, name, value, timestamp) - value = b"|".join([value, timestamp, signature]) - return value + if version == 1: + signature = _create_signature_v1(secret, name, value, timestamp) + value = b"|".join([value, timestamp, signature]) + return value + elif version == 2: + # The v2 format consists of a version number and a series of + # length-prefixed fields "%d:%s", the last of which is a + # signature, all separated by pipes. All numbers are in + # decimal format with no leading zeros. The signature is an + # HMAC-SHA256 of the whole string up to that point, including + # the final pipe. + # + # The fields are: + # - format version (i.e. 2; no length prefix) + # - key version (currently 0; reserved for future key rotation features) + # - timestamp (integer seconds since epoch) + # - name (not encoded; assumed to be ~alphanumeric) + # - value (base64-encoded) + # - signature (hex-encoded; no length prefix) + def format_field(s): + return utf8("%d:" % len(s)) + utf8(s) + to_sign = b"|".join([ + b"2|1:0", + format_field(timestamp), + format_field(name), + format_field(value), + b'']) + signature = _create_signature_v2(secret, to_sign) + return to_sign + signature + else: + raise ValueError("Unsupported version %d" % version) + +# A leading version number in decimal with no leading zeros, followed by a pipe. +_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$") -def decode_signed_value(secret, name, value, max_age_days=31): +def decode_signed_value(secret, name, value, max_age_days=31, clock=None, min_version=None): + if clock is None: + clock = time.time + if min_version is None: + min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION + if min_version > 2: + raise ValueError("Unsupported min_version %d" % min_version) if not value: return None + + # Figure out what version this is. Version 1 did not include an + # explicit version field and started with arbitrary base64 data, + # which makes this tricky. + value = utf8(value) + m = _signed_value_version_re.match(value) + if m is None: + version = 1 + else: + try: + version = int(m.group(1)) + if version > 999: + # Certain payloads from the version-less v1 format may + # be parsed as valid integers. Due to base64 padding + # restrictions, this can only happen for numbers whose + # length is a multiple of 4, so we can treat all + # numbers up to 999 as versions, and for the rest we + # fall back to v1 format. + version = 1 + except ValueError: + version = 1 + + if version < min_version: + return None + if version == 1: + return _decode_signed_value_v1(secret, name, value, max_age_days, clock) + elif version == 2: + return _decode_signed_value_v2(secret, name, value, max_age_days, clock) + else: + return None + + +def _decode_signed_value_v1(secret, name, value, max_age_days, clock): parts = utf8(value).split(b"|") if len(parts) != 3: return None - signature = _create_signature(secret, name, parts[0], parts[1]) + signature = _create_signature_v1(secret, name, parts[0], parts[1]) if not _time_independent_equals(parts[2], signature): gen_log.warning("Invalid cookie signature %r", value) return None timestamp = int(parts[1]) - if timestamp < time.time() - max_age_days * 86400: + if timestamp < clock() - max_age_days * 86400: gen_log.warning("Expired cookie %r", value) return None - if timestamp > time.time() + 31 * 86400: + if timestamp > clock() + 31 * 86400: # _cookie_signature does not hash a delimiter between the # parts of the cookie, so an attacker could transfer trailing # digits from the payload to the timestamp without altering the @@ -2679,8 +2971,62 @@ def decode_signed_value(secret, name, value, max_age_days=31): return None -def _create_signature(secret, *parts): +def _decode_signed_value_v2(secret, name, value, max_age_days, clock): + def _consume_field(s): + length, _, rest = s.partition(b':') + n = int(length) + field_value = rest[:n] + # In python 3, indexing bytes returns small integers; we must + # use a slice to get a byte string as in python 2. + if rest[n:n + 1] != b'|': + raise ValueError("malformed v2 signed value field") + rest = rest[n + 1:] + return field_value, rest + rest = value[2:] # remove version number + try: + key_version, rest = _consume_field(rest) + timestamp, rest = _consume_field(rest) + name_field, rest = _consume_field(rest) + value_field, rest = _consume_field(rest) + except ValueError: + return None + passed_sig = rest + signed_string = value[:-len(passed_sig)] + expected_sig = _create_signature_v2(secret, signed_string) + if not _time_independent_equals(passed_sig, expected_sig): + return None + if name_field != utf8(name): + return None + timestamp = int(timestamp) + if timestamp < clock() - max_age_days * 86400: + # The signature has expired. + return None + try: + return base64.b64decode(value_field) + except Exception: + return None + + +def _create_signature_v1(secret, *parts): hash = hmac.new(utf8(secret), digestmod=hashlib.sha1) for part in parts: hash.update(utf8(part)) return utf8(hash.hexdigest()) + + +def _create_signature_v2(secret, s): + hash = hmac.new(utf8(secret), digestmod=hashlib.sha256) + hash.update(utf8(s)) + return utf8(hash.hexdigest()) + + +def _unquote_or_none(s): + """None-safe wrapper around url_unescape to handle unamteched optional + groups correctly. + + Note that args are passed as bytes so the handler can decide what + encoding to use. + """ + if s is None: + return s + return escape.url_unescape(s, encoding=None, plus=False) diff --git a/libs/tornado/websocket.py b/libs/tornado/websocket.py index 9bec9bba..c0065c79 100755 --- a/libs/tornado/websocket.py +++ b/libs/tornado/websocket.py @@ -20,7 +20,6 @@ communication between the browser and server. from __future__ import absolute_import, division, print_function, with_statement # Author: Jacob Kristhammar, 2010 -import array import base64 import collections import functools @@ -32,14 +31,19 @@ import tornado.escape import tornado.web from tornado.concurrent import TracebackFuture -from tornado.escape import utf8, native_str +from tornado.escape import utf8, native_str, to_unicode from tornado import httpclient, httputil from tornado.ioloop import IOLoop from tornado.iostream import StreamClosedError from tornado.log import gen_log, app_log -from tornado.netutil import Resolver from tornado import simple_httpclient -from tornado.util import bytes_type, unicode_type +from tornado.tcpclient import TCPClient +from tornado.util import bytes_type, unicode_type, _websocket_mask + +try: + from urllib.parse import urlparse # py2 +except ImportError: + from urlparse import urlparse # py3 try: xrange # py2 @@ -108,20 +112,17 @@ class WebSocketHandler(tornado.web.RequestHandler): def __init__(self, application, request, **kwargs): tornado.web.RequestHandler.__init__(self, application, request, **kwargs) - self.stream = request.connection.stream self.ws_connection = None + self.close_code = None + self.close_reason = None - def _execute(self, transforms, *args, **kwargs): + @tornado.web.asynchronous + def get(self, *args, **kwargs): self.open_args = args self.open_kwargs = kwargs - # Websocket only supports GET method - if self.request.method != 'GET': - self.stream.write(tornado.escape.utf8( - "HTTP/1.1 405 Method Not Allowed\r\n\r\n" - )) - self.stream.close() - return + self.stream = self.request.connection.detach() + self.stream.set_close_callback(self.on_connection_close) # Upgrade header should be present and should be equal to WebSocket if self.request.headers.get("Upgrade", "").lower() != 'websocket': @@ -144,9 +145,26 @@ class WebSocketHandler(tornado.web.RequestHandler): self.stream.close() return + # Handle WebSocket Origin naming convention differences # The difference between version 8 and 13 is that in 8 the # client sends a "Sec-Websocket-Origin" header and in 13 it's # simply "Origin". + if "Origin" in self.request.headers: + origin = self.request.headers.get("Origin") + else: + origin = self.request.headers.get("Sec-Websocket-Origin", None) + + + # If there was an origin header, check to make sure it matches + # according to check_origin. When the origin is None, we assume it + # did not come from a browser and that it can be passed on. + if origin is not None and not self.check_origin(origin): + self.stream.write(tornado.escape.utf8( + "HTTP/1.1 403 Cross Origin Websockets Disabled\r\n\r\n" + )) + self.stream.close() + return + if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"): self.ws_connection = WebSocketProtocol13(self) self.ws_connection.accept_connection() @@ -160,6 +178,7 @@ class WebSocketHandler(tornado.web.RequestHandler): "Sec-WebSocket-Version: 8\r\n\r\n")) self.stream.close() + def write_message(self, message, binary=False): """Sends the given message to the client of this Web Socket. @@ -220,18 +239,70 @@ class WebSocketHandler(tornado.web.RequestHandler): pass def on_close(self): - """Invoked when the WebSocket is closed.""" + """Invoked when the WebSocket is closed. + + If the connection was closed cleanly and a status code or reason + phrase was supplied, these values will be available as the attributes + ``self.close_code`` and ``self.close_reason``. + + .. versionchanged:: 3.3 + + Added ``close_code`` and ``close_reason`` attributes. + """ pass - def close(self): + def close(self, code=None, reason=None): """Closes this Web Socket. Once the close handshake is successful the socket will be closed. + + ``code`` may be a numeric status code, taken from the values + defined in `RFC 6455 section 7.4.1 + `_. + ``reason`` may be a textual message about why the connection is + closing. These values are made available to the client, but are + not otherwise interpreted by the websocket protocol. + + The ``code`` and ``reason`` arguments are ignored in the "draft76" + protocol version. + + .. versionchanged:: 3.3 + + Added the ``code`` and ``reason`` arguments. """ if self.ws_connection: - self.ws_connection.close() + self.ws_connection.close(code, reason) self.ws_connection = None + def check_origin(self, origin): + """Override to enable support for allowing alternate origins. + + The ``origin`` argument is the value of the ``Origin`` HTTP + header, the url responsible for initiating this request. This + method is not called for clients that do not send this header; + such requests are always allowed (because all browsers that + implement WebSockets support this header, and non-browser + clients do not have the same cross-site security concerns). + + Should return True to accept the request or False to reject it. + By default, rejects all requests with an origin on a host other + than this one. + + This is a security protection against cross site scripting attacks on + browsers, since WebSockets are allowed to bypass the usual same-origin + policies and don't use CORS headers. + + .. versionadded:: 3.3 + """ + parsed_origin = urlparse(origin) + origin = parsed_origin.netloc + origin = origin.lower() + + host = self.request.headers.get("Host") + + # Check to see that origin matches host directly, including ports + return origin == host + def allow_draft76(self): """Override to enable support for the older "draft76" protocol. @@ -489,7 +560,7 @@ class WebSocketProtocol76(WebSocketProtocol): """Send ping frame.""" raise ValueError("Ping messages not supported by this version of websockets") - def close(self): + def close(self, code=None, reason=None): """Closes the WebSocket connection.""" if not self.server_terminated: if not self.stream.closed(): @@ -739,6 +810,10 @@ class WebSocketProtocol13(WebSocketProtocol): elif opcode == 0x8: # Close self.client_terminated = True + if len(data) >= 2: + self.handler.close_code = struct.unpack('>H', data[:2])[0] + if len(data) > 2: + self.handler.close_reason = to_unicode(data[2:]) self.close() elif opcode == 0x9: # Ping @@ -749,11 +824,19 @@ class WebSocketProtocol13(WebSocketProtocol): else: self._abort() - def close(self): + def close(self, code=None, reason=None): """Closes the WebSocket connection.""" if not self.server_terminated: if not self.stream.closed(): - self._write_frame(True, 0x8, b"") + if code is None and reason is not None: + code = 1000 # "normal closure" status code + if code is None: + close_data = b'' + else: + close_data = struct.pack('>H', code) + if reason is not None: + close_data += utf8(reason) + self._write_frame(True, 0x8, close_data) self.server_terminated = True if self.client_terminated: if self._waiting is not None: @@ -789,18 +872,25 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): 'Sec-WebSocket-Version': '13', }) - self.resolver = Resolver(io_loop=io_loop) + self.tcp_client = TCPClient(io_loop=io_loop) super(WebSocketClientConnection, self).__init__( io_loop, None, request, lambda: None, self._on_http_response, - 104857600, self.resolver) + 104857600, self.tcp_client, 65536) - def close(self): + def close(self, code=None, reason=None): """Closes the websocket connection. + ``code`` and ``reason`` are documented under + `WebSocketHandler.close`. + .. versionadded:: 3.2 + + .. versionchanged:: 3.3 + + Added the ``code`` and ``reason`` arguments. """ if self.protocol is not None: - self.protocol.close() + self.protocol.close(code, reason) self.protocol = None def _on_close(self): @@ -816,8 +906,12 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): self.connect_future.set_exception(WebSocketError( "Non-websocket response")) - def _handle_1xx(self, code): - assert code == 101 + def headers_received(self, start_line, headers): + if start_line.code != 101: + return super(WebSocketClientConnection, self).headers_received( + start_line, headers) + + self.headers = headers assert self.headers['Upgrade'].lower() == 'websocket' assert self.headers['Connection'].lower() == 'upgrade' accept = WebSocketProtocol13.compute_accept_value(self.key) @@ -830,6 +924,9 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): self.io_loop.remove_timeout(self._timeout) self._timeout = None + self.stream = self.connection.detach() + self.stream.set_close_callback(self._on_close) + self.connect_future.set_result(self) def write_message(self, message, binary=False): @@ -890,35 +987,3 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None): if callback is not None: io_loop.add_future(conn.connect_future, callback) return conn.connect_future - - -def _websocket_mask_python(mask, data): - """Websocket masking function. - - `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. - Returns a `bytes` object of the same length as `data` with the mask applied - as specified in section 5.3 of RFC 6455. - - This pure-python implementation may be replaced by an optimized version when available. - """ - mask = array.array("B", mask) - unmasked = array.array("B", data) - for i in xrange(len(data)): - unmasked[i] = unmasked[i] ^ mask[i % 4] - if hasattr(unmasked, 'tobytes'): - # tostring was deprecated in py32. It hasn't been removed, - # but since we turn on deprecation warnings in our tests - # we need to use the right one. - return unmasked.tobytes() - else: - return unmasked.tostring() - -if os.environ.get('TORNADO_NO_EXTENSION'): - # This environment variable exists to make it easier to do performance comparisons; - # it's not guaranteed to remain supported in the future. - _websocket_mask = _websocket_mask_python -else: - try: - from tornado.speedups import websocket_mask as _websocket_mask - except ImportError: - _websocket_mask = _websocket_mask_python diff --git a/libs/tornado/wsgi.py b/libs/tornado/wsgi.py index 615f2e1f..47a0590a 100755 --- a/libs/tornado/wsgi.py +++ b/libs/tornado/wsgi.py @@ -20,9 +20,9 @@ WSGI is the Python standard for web servers, and allows for interoperability between Tornado and other Python web frameworks and servers. This module provides WSGI support in two ways: -* `WSGIApplication` is a version of `tornado.web.Application` that can run - inside a WSGI server. This is useful for running a Tornado app on another - HTTP server, such as Google App Engine. See the `WSGIApplication` class +* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application + interface. This is useful for running a Tornado app on another + HTTP server, such as Google App Engine. See the `WSGIAdapter` class documentation for limitations that apply. * `WSGIContainer` lets you run other WSGI applications and frameworks on the Tornado HTTP server. For example, with this class you can mix Django @@ -32,15 +32,14 @@ provides WSGI support in two ways: from __future__ import absolute_import, division, print_function, with_statement import sys -import time -import copy import tornado +from tornado.concurrent import Future from tornado import escape from tornado import httputil from tornado.log import access_log from tornado import web -from tornado.escape import native_str, parse_qs_bytes +from tornado.escape import native_str from tornado.util import bytes_type, unicode_type try: @@ -48,11 +47,6 @@ try: except ImportError: from cStringIO import StringIO as BytesIO # python 2 -try: - import Cookie # py2 -except ImportError: - import http.cookies as Cookie # py3 - try: import urllib.parse as urllib_parse # py3 except ImportError: @@ -83,11 +77,84 @@ else: class WSGIApplication(web.Application): """A WSGI equivalent of `tornado.web.Application`. - `WSGIApplication` is very similar to `tornado.web.Application`, - except no asynchronous methods are supported (since WSGI does not - support non-blocking requests properly). If you call - ``self.flush()`` or other asynchronous methods in your request - handlers running in a `WSGIApplication`, we throw an exception. + .. deprecated: 3.3:: + + Use a regular `.Application` and wrap it in `WSGIAdapter` instead. + """ + def __call__(self, environ, start_response): + return WSGIAdapter(self)(environ, start_response) + + +# WSGI has no facilities for flow control, so just return an already-done +# Future when the interface requires it. +_dummy_future = Future() +_dummy_future.set_result(None) + + +class _WSGIConnection(httputil.HTTPConnection): + def __init__(self, method, start_response, context): + self.method = method + self.start_response = start_response + self.context = context + self._write_buffer = [] + self._finished = False + self._expected_content_remaining = None + self._error = None + + def set_close_callback(self, callback): + # WSGI has no facility for detecting a closed connection mid-request, + # so we can simply ignore the callback. + pass + + def write_headers(self, start_line, headers, chunk=None, callback=None): + if self.method == 'HEAD': + self._expected_content_remaining = 0 + elif 'Content-Length' in headers: + self._expected_content_remaining = int(headers['Content-Length']) + else: + self._expected_content_remaining = None + self.start_response( + '%s %s' % (start_line.code, start_line.reason), + [(native_str(k), native_str(v)) for (k, v) in headers.get_all()]) + if chunk is not None: + self.write(chunk, callback) + elif callback is not None: + callback() + return _dummy_future + + def write(self, chunk, callback=None): + if self._expected_content_remaining is not None: + self._expected_content_remaining -= len(chunk) + if self._expected_content_remaining < 0: + self._error = httputil.HTTPOutputException( + "Tried to write more data than Content-Length") + raise self._error + self._write_buffer.append(chunk) + if callback is not None: + callback() + return _dummy_future + + def finish(self): + if (self._expected_content_remaining is not None and + self._expected_content_remaining != 0): + self._error = httputil.HTTPOutputException( + "Tried to write %d bytes less than Content-Length" % + self._expected_content_remaining) + raise self._error + self._finished = True + + +class _WSGIRequestContext(object): + def __init__(self, remote_ip, protocol): + self.remote_ip = remote_ip + self.protocol = protocol + + def __str__(self): + return self.remote_ip + + +class WSGIAdapter(object): + """Converts a `tornado.web.Application` instance into a WSGI application. Example usage:: @@ -100,121 +167,83 @@ class WSGIApplication(web.Application): self.write("Hello, world") if __name__ == "__main__": - application = tornado.wsgi.WSGIApplication([ + application = tornado.web.Application([ (r"/", MainHandler), ]) - server = wsgiref.simple_server.make_server('', 8888, application) + wsgi_app = tornado.wsgi.WSGIAdapter(application) + server = wsgiref.simple_server.make_server('', 8888, wsgi_app) server.serve_forever() See the `appengine demo - `_ + `_ for an example of using this module to run a Tornado app on Google App Engine. - WSGI applications use the same `.RequestHandler` class, but not - ``@asynchronous`` methods or ``flush()``. This means that it is - not possible to use `.AsyncHTTPClient`, or the `tornado.auth` or - `tornado.websocket` modules. + In WSGI mode asynchronous methods are not supported. This means + that it is not possible to use `.AsyncHTTPClient`, or the + `tornado.auth` or `tornado.websocket` modules. + + .. versionadded:: 3.3 """ - def __init__(self, handlers=None, default_host="", **settings): - web.Application.__init__(self, handlers, default_host, transforms=[], - wsgi=True, **settings) + def __init__(self, application): + if isinstance(application, WSGIApplication): + self.application = lambda request: web.Application.__call__( + application, request) + else: + self.application = application def __call__(self, environ, start_response): - handler = web.Application.__call__(self, HTTPRequest(environ)) - assert handler._finished - reason = handler._reason - status = str(handler._status_code) + " " + reason - headers = list(handler._headers.get_all()) - if hasattr(handler, "_new_cookie"): - for cookie in handler._new_cookie.values(): - headers.append(("Set-Cookie", cookie.OutputString(None))) - start_response(status, - [(native_str(k), native_str(v)) for (k, v) in headers]) - return handler._write_buffer - - -class HTTPRequest(object): - """Mimics `tornado.httpserver.HTTPRequest` for WSGI applications.""" - def __init__(self, environ): - """Parses the given WSGI environment to construct the request.""" - self.method = environ["REQUEST_METHOD"] - self.path = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", ""))) - self.path += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", ""))) - self.uri = self.path - self.arguments = {} - self.query_arguments = {} - self.body_arguments = {} - self.query = environ.get("QUERY_STRING", "") - if self.query: - self.uri += "?" + self.query - self.arguments = parse_qs_bytes(native_str(self.query), - keep_blank_values=True) - self.query_arguments = copy.deepcopy(self.arguments) - self.version = "HTTP/1.1" - self.headers = httputil.HTTPHeaders() + method = environ["REQUEST_METHOD"] + uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", ""))) + uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", ""))) + if environ.get("QUERY_STRING"): + uri += "?" + environ["QUERY_STRING"] + headers = httputil.HTTPHeaders() if environ.get("CONTENT_TYPE"): - self.headers["Content-Type"] = environ["CONTENT_TYPE"] + headers["Content-Type"] = environ["CONTENT_TYPE"] if environ.get("CONTENT_LENGTH"): - self.headers["Content-Length"] = environ["CONTENT_LENGTH"] + headers["Content-Length"] = environ["CONTENT_LENGTH"] for key in environ: if key.startswith("HTTP_"): - self.headers[key[5:].replace("_", "-")] = environ[key] - if self.headers.get("Content-Length"): - self.body = environ["wsgi.input"].read( - int(self.headers["Content-Length"])) + headers[key[5:].replace("_", "-")] = environ[key] + if headers.get("Content-Length"): + body = environ["wsgi.input"].read( + int(headers["Content-Length"])) else: - self.body = "" - self.protocol = environ["wsgi.url_scheme"] - self.remote_ip = environ.get("REMOTE_ADDR", "") + body = "" + protocol = environ["wsgi.url_scheme"] + remote_ip = environ.get("REMOTE_ADDR", "") if environ.get("HTTP_HOST"): - self.host = environ["HTTP_HOST"] + host = environ["HTTP_HOST"] else: - self.host = environ["SERVER_NAME"] - - # Parse request body - self.files = {} - httputil.parse_body_arguments(self.headers.get("Content-Type", ""), - self.body, self.body_arguments, self.files) - - for k, v in self.body_arguments.items(): - self.arguments.setdefault(k, []).extend(v) - - self._start_time = time.time() - self._finish_time = None - - def supports_http_1_1(self): - """Returns True if this request supports HTTP/1.1 semantics""" - return self.version == "HTTP/1.1" - - @property - def cookies(self): - """A dictionary of Cookie.Morsel objects.""" - if not hasattr(self, "_cookies"): - self._cookies = Cookie.SimpleCookie() - if "Cookie" in self.headers: - try: - self._cookies.load( - native_str(self.headers["Cookie"])) - except Exception: - self._cookies = None - return self._cookies - - def full_url(self): - """Reconstructs the full URL for this request.""" - return self.protocol + "://" + self.host + self.uri - - def request_time(self): - """Returns the amount of time it took for this request to execute.""" - if self._finish_time is None: - return time.time() - self._start_time - else: - return self._finish_time - self._start_time + host = environ["SERVER_NAME"] + connection = _WSGIConnection(method, start_response, + _WSGIRequestContext(remote_ip, protocol)) + request = httputil.HTTPServerRequest( + method, uri, "HTTP/1.1", headers=headers, body=body, + host=host, connection=connection) + request._parse_body() + self.application(request) + if connection._error: + raise connection._error + if not connection._finished: + raise Exception("request did not finish synchronously") + return connection._write_buffer class WSGIContainer(object): r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server. + .. warning:: + + WSGI is a *synchronous* interface, while Tornado's concurrency model + is based on single-threaded asynchronous execution. This means that + running a WSGI app with Tornado's `WSGIContainer` is *less scalable* + than running the same app in a multi-threaded WSGI server like + ``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are + benefits to combining Tornado and WSGI in the same process that + outweigh the reduced scalability. + Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to run it. For example:: @@ -281,7 +310,7 @@ class WSGIContainer(object): @staticmethod def environ(request): - """Converts a `tornado.httpserver.HTTPRequest` to a WSGI environment. + """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment. """ hostport = request.host.split(":") if len(hostport) == 2: @@ -327,3 +356,6 @@ class WSGIContainer(object): summary = request.method + " " + request.uri + " (" + \ request.remote_ip + ")" log_method("%d %s %.2fms", status_code, summary, request_time) + + +HTTPRequest = httputil.HTTPServerRequest diff --git a/libs/unrar2/__init__.py b/libs/unrar2/__init__.py index fe27cfe1..41b0d71d 100644 --- a/libs/unrar2/__init__.py +++ b/libs/unrar2/__init__.py @@ -21,7 +21,7 @@ # SOFTWARE. """ -pyUnRAR2 is a ctypes based wrapper around the free UnRAR.dll. +pyUnRAR2 is a ctypes based wrapper around the free UnRAR.dll. It is an modified version of Jimmy Retzlaff's pyUnRAR - more simple, stable and foolproof. @@ -45,8 +45,8 @@ if in_windows: from windows import RarFileImplementation else: from unix import RarFileImplementation - - + + import fnmatch, time, weakref class RarInfo(object): @@ -62,7 +62,7 @@ class RarInfo(object): isdir - True if the file is a directory size - size in bytes of the uncompressed file comment - comment associated with the file - + Note - this is not currently intended to be a Python file-like object. """ @@ -74,7 +74,7 @@ class RarInfo(object): self.size = data['size'] self.datetime = data['datetime'] self.comment = data['comment'] - + def __str__(self): @@ -86,7 +86,7 @@ class RarInfo(object): class RarFile(RarFileImplementation): - def __init__(self, archiveName, password=None): + def __init__(self, archiveName, password=None, custom_path = None): """Instantiate the archive. archiveName is the name of the RAR file. @@ -99,7 +99,7 @@ class RarFile(RarFileImplementation): This is a test. """ self.archiveName = archiveName - RarFileImplementation.init(self, password) + RarFileImplementation.init(self, password, custom_path) def __del__(self): self.destruct() @@ -130,31 +130,31 @@ class RarFile(RarFileImplementation): """Read specific files from archive into memory. If "condition" is a list of numbers, then return files which have those positions in infolist. If "condition" is a string, then it is treated as a wildcard for names of files to extract. - If "condition" is a function, it is treated as a callback function, which accepts a RarInfo object + If "condition" is a function, it is treated as a callback function, which accepts a RarInfo object and returns boolean True (extract) or False (skip). If "condition" is omitted, all files are returned. - + Returns list of tuples (RarInfo info, str contents) """ checker = condition2checker(condition) return RarFileImplementation.read_files(self, checker) - + def extract(self, condition='*', path='.', withSubpath=True, overwrite=True): """Extract specific files from archive to disk. - + If "condition" is a list of numbers, then extract files which have those positions in infolist. If "condition" is a string, then it is treated as a wildcard for names of files to extract. If "condition" is a function, it is treated as a callback function, which accepts a RarInfo object and returns either boolean True (extract) or boolean False (skip). - DEPRECATED: If "condition" callback returns string (only supported for Windows) - + DEPRECATED: If "condition" callback returns string (only supported for Windows) - that string will be used as a new name to save the file under. If "condition" is omitted, all files are extracted. - + "path" is a directory to extract to "withSubpath" flag denotes whether files are extracted with their full path in the archive. "overwrite" flag denotes whether extracted files will overwrite old ones. Defaults to true. - + Returns list of RarInfos for extracted files.""" checker = condition2checker(condition) return RarFileImplementation.extract(self, checker, path, withSubpath, overwrite) diff --git a/libs/unrar2/unix.py b/libs/unrar2/unix.py index 9ebab40d..91ed4b6c 100644 --- a/libs/unrar2/unix.py +++ b/libs/unrar2/unix.py @@ -21,25 +21,37 @@ # SOFTWARE. # Unix version uses unrar command line executable - +import platform +import stat import subprocess import gc - -import os, os.path -import time, re +import os +import os.path +import time +import re from rar_exceptions import * + class UnpackerNotInstalled(Exception): pass rar_executable_cached = None rar_executable_version = None -def call_unrar(params): +osx_unrar = os.path.join(os.path.dirname(__file__), 'unrar') +if os.path.isfile(osx_unrar) and 'darwin' in platform.platform().lower(): + try: + st = os.stat(osx_unrar) + os.chmod(osx_unrar, st.st_mode | stat.S_IEXEC) + except: + pass + +def call_unrar(params, custom_path = None): "Calls rar/unrar command line executable, returns stdout pipe" global rar_executable_cached if rar_executable_cached is None: - for command in ('unrar', 'rar', os.path.join(os.path.dirname(__file__), 'unrar')): + for command in (custom_path, 'unrar', 'rar', osx_unrar): + if not command: continue try: subprocess.Popen([command], stdout = subprocess.PIPE) rar_executable_cached = command @@ -59,10 +71,10 @@ def call_unrar(params): class RarFileImplementation(object): - def init(self, password = None): + def init(self, password = None, custom_path = None): global rar_executable_version self.password = password - + self.custom_path = custom_path stdoutdata, stderrdata = self.call('v', []).communicate() @@ -118,7 +130,7 @@ class RarFileImplementation(object): def call(self, cmd, options = [], files = []): options2 = options + ['p' + self.escaped_password()] soptions = ['-' + x for x in options2] - return call_unrar([cmd] + soptions + ['--', self.archiveName] + files) + return call_unrar([cmd] + soptions + ['--', self.archiveName] + files, self.custom_path) def infoiter(self): diff --git a/libs/unrar2/windows.py b/libs/unrar2/windows.py index e249f8f3..e3d920ff 100644 --- a/libs/unrar2/windows.py +++ b/libs/unrar2/windows.py @@ -174,7 +174,7 @@ class PassiveReader: def __init__(self, usercallback = None): self.buf = [] self.ucb = usercallback - + def _callback(self, msg, UserData, P1, P2): if msg == UCM_PROCESSDATA: data = (ctypes.c_char*P2).from_address(P1).raw @@ -183,7 +183,7 @@ class PassiveReader: else: self.buf.append(data) return 1 - + def get_result(self): return ''.join(self.buf) @@ -197,10 +197,10 @@ class RarInfoIterator(object): raise IncorrectRARPassword self.arc.lockStatus = "locked" self.arc.needskip = False - + def __iter__(self): return self - + def next(self): if self.index>0: if self.arc.needskip: @@ -208,9 +208,9 @@ class RarInfoIterator(object): self.res = RARReadHeaderEx(self.arc._handle, ctypes.byref(self.headerData)) if self.res: - raise StopIteration + raise StopIteration self.arc.needskip = True - + data = {} data['index'] = self.index data['filename'] = self.headerData.FileName @@ -224,7 +224,7 @@ class RarInfoIterator(object): self.index += 1 return data - + def __del__(self): self.arc.lockStatus = "finished" @@ -237,7 +237,7 @@ def generate_password_provider(password): class RarFileImplementation(object): - def init(self, password=None): + def init(self, password=None, custom_path = None): self.password = password archiveData = RAROpenArchiveDataEx(ArcNameW=self.archiveName, OpenMode=RAR_OM_EXTRACT) self._handle = RAROpenArchiveEx(ctypes.byref(archiveData)) @@ -254,9 +254,9 @@ class RarFileImplementation(object): if password: RARSetPassword(self._handle, password) - + self.lockStatus = "ready" - + def destruct(self): @@ -287,7 +287,7 @@ class RarFileImplementation(object): self.needskip = False res.append((info, reader.get_result())) return res - + def extract(self, checker, path, withSubpath, overwrite): res = [] @@ -300,7 +300,7 @@ class RarFileImplementation(object): fn = os.path.split(fn)[-1] target = os.path.join(path, fn) else: - raise DeprecationWarning, "Condition callbacks returning strings are deprecated and only supported in Windows" + raise DeprecationWarning, "Condition callbacks returning strings are deprecated and only supported in Windows" target = checkres if overwrite or (not os.path.exists(target)): tmpres = RARProcessFile(self._handle, RAR_EXTRACT, None, target)