Merge branch 'develop'

This commit is contained in:
Ruud
2014-10-07 23:09:44 +02:00
17 changed files with 153 additions and 3279 deletions

View File

@@ -181,13 +181,13 @@ class Core(Plugin):
return '%sapi/%s' % (self.createBaseUrl(), Env.setting('api_key'))
def version(self):
ver = fireEvent('updater.info', single = True)
ver = fireEvent('updater.info', single = True) or {'version': {}}
if os.name == 'nt': platf = 'windows'
elif 'Darwin' in platform.platform(): platf = 'osx'
else: platf = 'linux'
return '%s - %s-%s - v2' % (platf, ver.get('version')['type'], ver.get('version')['hash'])
return '%s - %s-%s - v2' % (platf, ver.get('version').get('type') or 'unknown', ver.get('version').get('hash') or 'unknown')
def versionView(self, **kwargs):
return {

View File

@@ -205,19 +205,28 @@ class GitUpdater(BaseUpdater):
def getVersion(self):
if not self.version:
hash = None
date = None
branch = self.branch
try:
output = self.repo.getHead() # Yes, please
log.debug('Git version output: %s', output.hash)
self.version = {
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, self.repo.getCurrentBranch().name or self.branch, output.hash[:8], datetime.fromtimestamp(output.getDate())),
'hash': output.hash[:8],
'date': output.getDate(),
'type': 'git',
'branch': self.repo.getCurrentBranch().name
}
hash = output.hash[:8]
date = output.getDate()
branch = self.repo.getCurrentBranch().name
except Exception as e:
log.error('Failed using GIT updater, running from source, you need to have GIT installed. %s', e)
return 'No GIT'
self.version = {
'repr': 'git:(%s:%s % s) %s (%s)' % (self.repo_user, self.repo_name, branch, hash or 'unknown_hash', datetime.fromtimestamp(date) if date else 'unknown_date'),
'hash': hash,
'date': date,
'type': 'git',
'branch': branch
}
return self.version

View File

@@ -78,12 +78,14 @@ class Transmission(DownloaderBase):
log.error('Failed sending torrent to Transmission')
return False
data = remote_torrent.get('torrent-added') or remote_torrent.get('torrent-duplicate')
# Change settings of added torrents
if torrent_params:
self.trpc.set_torrent(remote_torrent['torrent-added']['hashString'], torrent_params)
self.trpc.set_torrent(data['hashString'], torrent_params)
log.info('Torrent sent to Transmission successfully.')
return self.downloadReturnId(remote_torrent['torrent-added']['hashString'])
return self.downloadReturnId(data['hashString'])
def test(self):
if self.connect() and self.trpc.get_session():

View File

@@ -1,11 +1,10 @@
import traceback
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import simplifyString, toUnicode, ss
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode, ss, tryUrlencode
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie.providers.base import MovieProvider
import tmdb3
log = CPLog(__name__)
@@ -13,54 +12,65 @@ autoload = 'TheMovieDb'
class TheMovieDb(MovieProvider):
MAX_EXTRATHUMBS = 4
http_time_between_calls = .35
configuration = {
'images': {
'secure_base_url': 'https://image.tmdb.org/t/p/',
},
}
def __init__(self):
addEvent('info.search', self.search, priority = 3)
addEvent('movie.search', self.search, priority = 3)
addEvent('movie.info', self.getInfo, priority = 3)
addEvent('movie.info_by_tmdb', self.getInfo)
addEvent('app.load', self.config)
# Configure TMDB settings
tmdb3.set_key(self.conf('api_key'))
tmdb3.set_cache('null')
def config(self):
configuration = self.request('configuration')
if configuration:
self.configuration = configuration
def search(self, q, limit = 12):
def search(self, q, limit = 3):
""" Find movie by name """
if self.isDisabled():
return False
search_string = simplifyString(q)
cache_key = 'tmdb.cache.%s.%s' % (search_string, limit)
results = self.getCache(cache_key)
log.debug('Searching for movie: %s', q)
if not results:
log.debug('Searching for movie: %s', q)
raw = None
try:
name_year = fireEvent('scanner.name_year', q, single = True)
raw = self.request('search/movie', {
'query': name_year.get('name', q),
'year': name_year.get('year'),
'search_type': 'ngram' if limit > 1 else 'phrase'
}, return_key = 'results')
except:
log.error('Failed searching TMDB for "%s": %s', (q, traceback.format_exc()))
raw = None
results = []
if raw:
try:
raw = tmdb3.searchMovie(search_string)
except:
log.error('Failed searching TMDB for "%s": %s', (search_string, traceback.format_exc()))
nr = 0
results = []
if raw:
try:
nr = 0
for movie in raw:
parsed_movie = self.parseMovie(movie, extended = False)
results.append(parsed_movie)
for movie in raw:
results.append(self.parseMovie(movie, extended = False))
nr += 1
if nr == limit:
break
nr += 1
if nr == limit:
break
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
self.setCache(cache_key, results)
return results
except SyntaxError as e:
log.error('Failed to parse XML response: %s', e)
return False
return results
except SyntaxError as e:
log.error('Failed to parse XML response: %s', e)
return False
return results
@@ -69,101 +79,89 @@ class TheMovieDb(MovieProvider):
if not identifier:
return {}
cache_key = 'tmdb.cache.%s%s' % (identifier, '.ex' if extended else '')
result = self.getCache(cache_key)
if not result:
try:
log.debug('Getting info: %s', cache_key)
# noinspection PyArgumentList
movie = tmdb3.Movie(identifier)
try: exists = movie.title is not None
except: exists = False
if exists:
result = self.parseMovie(movie, extended = extended)
self.setCache(cache_key, result)
else:
result = {}
except:
log.error('Failed getting info for %s: %s', (identifier, traceback.format_exc()))
result = self.parseMovie({
'id': identifier
}, extended = extended)
return result
def parseMovie(self, movie, extended = True):
cache_key = 'tmdb.cache.%s%s' % (movie.id, '.ex' if extended else '')
movie_data = self.getCache(cache_key)
# Do request, append other items
movie = self.request('movie/%s' % movie.get('id'), {
'append_to_response': 'alternative_titles' + (',images,casts' if extended else '')
})
if not movie_data:
# Images
poster = self.getImage(movie, type = 'poster', size = 'w154')
poster_original = self.getImage(movie, type = 'poster', size = 'original')
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original') if extended else []
# Images
poster = self.getImage(movie, type = 'poster', size = 'w154')
poster_original = self.getImage(movie, type = 'poster', size = 'original')
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original', n = self.MAX_EXTRATHUMBS, skipfirst = True)
images = {
'poster': [poster] if poster else [],
#'backdrop': [backdrop] if backdrop else [],
'poster_original': [poster_original] if poster_original else [],
'backdrop_original': [backdrop_original] if backdrop_original else [],
'actors': {},
'extra_thumbs': extra_thumbs
}
images = {
'poster': [poster] if poster else [],
#'backdrop': [backdrop] if backdrop else [],
'poster_original': [poster_original] if poster_original else [],
'backdrop_original': [backdrop_original] if backdrop_original else [],
'actors': {},
'extra_thumbs': extra_thumbs
}
# Genres
try:
genres = [genre.get('name') for genre in movie.get('genres', [])]
except:
genres = []
# Genres
try:
genres = [genre.name for genre in movie.genres]
except:
genres = []
# 1900 is the same as None
year = str(movie.get('release_date') or '')[:4]
if not movie.get('release_date') or year == '1900' or year.lower() == 'none':
year = None
# 1900 is the same as None
year = str(movie.releasedate or '')[:4]
if not movie.releasedate or year == '1900' or year.lower() == 'none':
year = None
# Gather actors data
actors = {}
if extended:
# Gather actors data
actors = {}
if extended:
for cast_item in movie.cast:
try:
actors[toUnicode(cast_item.name)] = toUnicode(cast_item.character)
images['actors'][toUnicode(cast_item.name)] = self.getImage(cast_item, type = 'profile', size = 'original')
except:
log.debug('Error getting cast info for %s: %s', (cast_item, traceback.format_exc()))
# Full data
cast = movie.get('casts', {}).get('cast', [])
movie_data = {
'type': 'movie',
'via_tmdb': True,
'tmdb_id': movie.id,
'titles': [toUnicode(movie.title)],
'original_title': movie.originaltitle,
'images': images,
'imdb': movie.imdb,
'runtime': movie.runtime,
'released': str(movie.releasedate),
'year': tryInt(year, None),
'plot': movie.overview,
'genres': genres,
'collection': getattr(movie.collection, 'name', None),
'actor_roles': actors
}
for cast_item in cast:
try:
actors[toUnicode(cast_item.get('name'))] = toUnicode(cast_item.get('character'))
images['actors'][toUnicode(cast_item.get('name'))] = self.getImage(cast_item, type = 'profile', size = 'original')
except:
log.debug('Error getting cast info for %s: %s', (cast_item, traceback.format_exc()))
movie_data = dict((k, v) for k, v in movie_data.items() if v)
movie_data = {
'type': 'movie',
'via_tmdb': True,
'tmdb_id': movie.get('id'),
'titles': [toUnicode(movie.get('title'))],
'original_title': movie.get('original_title'),
'images': images,
'imdb': movie.get('imdb_id'),
'runtime': movie.get('runtime'),
'released': str(movie.get('release_date')),
'year': tryInt(year, None),
'plot': movie.get('overview'),
'genres': genres,
'collection': getattr(movie.get('belongs_to_collection'), 'name', None),
'actor_roles': actors
}
# Add alternative names
if movie_data['original_title'] and movie_data['original_title'] not in movie_data['titles']:
movie_data['titles'].append(movie_data['original_title'])
movie_data = dict((k, v) for k, v in movie_data.items() if v)
if extended:
for alt in movie.alternate_titles:
alt_name = alt.title
if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None:
movie_data['titles'].append(alt_name)
# Add alternative names
if movie_data['original_title'] and movie_data['original_title'] not in movie_data['titles']:
movie_data['titles'].append(movie_data['original_title'])
# Cache movie parsed
self.setCache(cache_key, movie_data)
# Add alternative titles
alternate_titles = movie.get('alternative_titles', {}).get('titles', [])
for alt in alternate_titles:
alt_name = alt.get('title')
if alt_name and alt_name not in movie_data['titles'] and alt_name.lower() != 'none' and alt_name is not None:
movie_data['titles'].append(alt_name)
return movie_data
@@ -171,36 +169,37 @@ class TheMovieDb(MovieProvider):
image_url = ''
try:
image_url = getattr(movie, type).geturl(size = size)
path = movie.get('%s_path' % type)
image_url = '%s%s%s' % (self.configuration['images']['secure_base_url'], size, path)
except:
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
return image_url
def getMultImages(self, movie, type = 'backdrops', size = 'original', n = -1, skipfirst = False):
"""
If n < 0, return all images. Otherwise return n images.
If n > len(getattr(movie, type)), then return all images.
If skipfirst is True, then it will skip getattr(movie, type)[0]. This
is because backdrops[0] is typically backdrop.
"""
def getMultImages(self, movie, type = 'backdrops', size = 'original'):
image_urls = []
try:
images = getattr(movie, type)
if n < 0 or n > len(images):
num_images = len(images)
else:
num_images = n
for i in range(int(skipfirst), num_images + int(skipfirst)):
image_urls.append(images[i].geturl(size = size))
for image in movie.get('images', {}).get(type, [])[1:5]:
image_urls.append(self.getImage(image, 'file', size))
except:
log.debug('Failed getting %i %s.%s for "%s"', (n, type, size, ss(str(movie))))
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
return image_urls
def request(self, call = '', params = {}, return_key = None):
params = dict((k, v) for k, v in params.items() if v)
params = tryUrlencode(params)
url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
data = self.getJsonData(url)
if data and return_key and return_key in data:
data = data.get(return_key)
return data
def isDisabled(self):
if self.conf('api_key') == '':
log.error('No API key provided.')

View File

@@ -279,7 +279,7 @@ class Plugin(object):
wait = (last_use - now) + self.http_time_between_calls
if wait > 0:
log.debug('Waiting for %s, %d seconds', (self.getName(), wait))
log.debug('Waiting for %s, %d seconds', (self.getName(), max(1, wait)))
time.sleep(min(wait, 30))
def beforeCall(self, handler):

View File

@@ -1,12 +0,0 @@
#!/usr/bin/env python
from tmdb_api import Configuration, searchMovie, searchMovieWithYear, \
searchPerson, searchStudio, searchList, searchCollection, \
searchSeries, Person, Movie, Collection, Genre, List, \
Series, Studio, Network, Episode, Season, __version__
from request import set_key, set_cache
from locales import get_locale, set_locale
from tmdb_auth import get_session, set_session
from cache_engine import CacheEngine
from tmdb_exceptions import *

View File

@@ -1,130 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: cache.py
# Python Library
# Author: Raymond Wagner
# Purpose: Caching framework to store TMDb API results
#-----------------------
import time
import os
from tmdb_exceptions import *
from cache_engine import Engines
import cache_null
import cache_file
class Cache(object):
"""
This class implements a cache framework, allowing selecting of a
pluggable engine. The framework stores data in a key/value manner,
along with a lifetime, after which data will be expired and
pulled fresh next time it is requested from the cache.
This class defines a wrapper to be used with query functions. The
wrapper will automatically cache the inputs and outputs of the
wrapped function, pulling the output from local storage for
subsequent calls with those inputs.
"""
def __init__(self, engine=None, *args, **kwargs):
self._engine = None
self._data = {}
self._age = 0
self.configure(engine, *args, **kwargs)
def _import(self, data=None):
if data is None:
data = self._engine.get(self._age)
for obj in sorted(data, key=lambda x: x.creation):
if not obj.expired:
self._data[obj.key] = obj
self._age = max(self._age, obj.creation)
def _expire(self):
for k, v in self._data.items():
if v.expired:
del self._data[k]
def configure(self, engine, *args, **kwargs):
if engine is None:
engine = 'file'
elif engine not in Engines:
raise TMDBCacheError("Invalid cache engine specified: "+engine)
self._engine = Engines[engine](self)
self._engine.configure(*args, **kwargs)
def put(self, key, data, lifetime=60*60*12):
# pull existing data, so cache will be fresh when written back out
if self._engine is None:
raise TMDBCacheError("No cache engine configured")
self._expire()
self._import(self._engine.put(key, data, lifetime))
def get(self, key):
if self._engine is None:
raise TMDBCacheError("No cache engine configured")
self._expire()
if key not in self._data:
self._import()
try:
return self._data[key].data
except:
return None
def cached(self, callback):
"""
Returns a decorator that uses a callback to specify the key to use
for caching the responses from the decorated function.
"""
return self.Cached(self, callback)
class Cached( object ):
def __init__(self, cache, callback, func=None, inst=None):
self.cache = cache
self.callback = callback
self.func = func
self.inst = inst
if func:
self.__module__ = func.__module__
self.__name__ = func.__name__
self.__doc__ = func.__doc__
def __call__(self, *args, **kwargs):
if self.func is None:
# decorator is waiting to be given a function
if len(kwargs) or (len(args) != 1):
raise TMDBCacheError(
'Cache.Cached decorator must be called a single ' +
'callable argument before it be used.')
elif args[0] is None:
raise TMDBCacheError(
'Cache.Cached decorator called before being given ' +
'a function to wrap.')
elif not callable(args[0]):
raise TMDBCacheError(
'Cache.Cached must be provided a callable object.')
return self.__class__(self.cache, self.callback, args[0])
elif self.inst.lifetime == 0:
# lifetime of zero means never cache
return self.func(*args, **kwargs)
else:
key = self.callback()
data = self.cache.get(key)
if data is None:
data = self.func(*args, **kwargs)
if hasattr(self.inst, 'lifetime'):
self.cache.put(key, data, self.inst.lifetime)
else:
self.cache.put(key, data)
return data
def __get__(self, inst, owner):
if inst is None:
return self
func = self.func.__get__(inst, owner)
callback = self.callback.__get__(inst, owner)
return self.__class__(self.cache, callback, func, inst)

View File

@@ -1,84 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: cache_engine.py
# Python Library
# Author: Raymond Wagner
# Purpose: Base cache engine class for collecting registered engines
#-----------------------
import time
from weakref import ref
class Engines(object):
"""
Static collector for engines to register against.
"""
def __init__(self):
self._engines = {}
def register(self, engine):
self._engines[engine.__name__] = engine
self._engines[engine.name] = engine
def __getitem__(self, key):
return self._engines[key]
def __contains__(self, key):
return self._engines.__contains__(key)
Engines = Engines()
class CacheEngineType(type):
"""
Cache Engine Metaclass that registers new engines against the cache
for named selection and use.
"""
def __init__(cls, name, bases, attrs):
super(CacheEngineType, cls).__init__(name, bases, attrs)
if name != 'CacheEngine':
# skip base class
Engines.register(cls)
class CacheEngine(object):
__metaclass__ = CacheEngineType
name = 'unspecified'
def __init__(self, parent):
self.parent = ref(parent)
def configure(self):
raise RuntimeError
def get(self, date):
raise RuntimeError
def put(self, key, value, lifetime):
raise RuntimeError
def expire(self, key):
raise RuntimeError
class CacheObject(object):
"""
Cache object class, containing one stored record.
"""
def __init__(self, key, data, lifetime=0, creation=None):
self.key = key
self.data = data
self.lifetime = lifetime
self.creation = creation if creation is not None else time.time()
def __len__(self):
return len(self.data)
@property
def expired(self):
return self.remaining == 0
@property
def remaining(self):
return max((self.creation + self.lifetime) - time.time(), 0)

View File

@@ -1,400 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: cache_file.py
# Python Library
# Author: Raymond Wagner
# Purpose: Persistant file-backed cache using /tmp/ to share data
# using flock or msvcrt.locking to allow safe concurrent
# access.
#-----------------------
import struct
import errno
import json
import time
import os
import io
from cStringIO import StringIO
from tmdb_exceptions import *
from cache_engine import CacheEngine, CacheObject
####################
# Cache File Format
#------------------
# cache version (2) unsigned short
# slot count (2) unsigned short
# slot 0: timestamp (8) double
# slot 0: lifetime (4) unsigned int
# slot 0: seek point (4) unsigned int
# slot 1: timestamp
# slot 1: lifetime index slots are IDd by their query date and
# slot 1: seek point are filled incrementally forwards. lifetime
# .... is how long after query date before the item
# .... expires, and seek point is the location of the
# slot N-2: timestamp start of data for that entry. 256 empty slots
# slot N-2: lifetime are pre-allocated, allowing fast updates.
# slot N-2: seek point when all slots are filled, the cache file is
# slot N-1: timestamp rewritten from scrach to add more slots.
# slot N-1: lifetime
# slot N-1: seek point
# block 1 (?) ASCII
# block 2
# .... blocks are just simple ASCII text, generated
# .... as independent objects by the JSON encoder
# block N-2
# block N-1
#
####################
def _donothing(*args, **kwargs):
pass
try:
import fcntl
class Flock(object):
"""
Context manager to flock file for the duration the object
exists. Referenced file will be automatically unflocked as the
interpreter exits the context.
Supports an optional callback to process the error and optionally
suppress it.
"""
LOCK_EX = fcntl.LOCK_EX
LOCK_SH = fcntl.LOCK_SH
def __init__(self, fileobj, operation, callback=None):
self.fileobj = fileobj
self.operation = operation
self.callback = callback
def __enter__(self):
fcntl.flock(self.fileobj, self.operation)
def __exit__(self, exc_type, exc_value, exc_tb):
suppress = False
if callable(self.callback):
suppress = self.callback(exc_type, exc_value, exc_tb)
fcntl.flock(self.fileobj, fcntl.LOCK_UN)
return suppress
def parse_filename(filename):
if '$' in filename:
# replace any environmental variables
filename = os.path.expandvars(filename)
if filename.startswith('~'):
# check for home directory
return os.path.expanduser(filename)
elif filename.startswith('/'):
# check for absolute path
return filename
# return path with temp directory prepended
return '/tmp/' + filename
except ImportError:
import msvcrt
class Flock( object ):
LOCK_EX = msvcrt.LK_LOCK
LOCK_SH = msvcrt.LK_LOCK
def __init__(self, fileobj, operation, callback=None):
self.fileobj = fileobj
self.operation = operation
self.callback = callback
def __enter__(self):
self.size = os.path.getsize(self.fileobj.name)
msvcrt.locking(self.fileobj.fileno(), self.operation, self.size)
def __exit__(self, exc_type, exc_value, exc_tb):
suppress = False
if callable(self.callback):
suppress = self.callback(exc_type, exc_value, exc_tb)
msvcrt.locking(self.fileobj.fileno(), msvcrt.LK_UNLCK, self.size)
return suppress
def parse_filename(filename):
if '%' in filename:
# replace any environmental variables
filename = os.path.expandvars(filename)
if filename.startswith('~'):
# check for home directory
return os.path.expanduser(filename)
elif (ord(filename[0]) in (range(65, 91) + range(99, 123))) \
and (filename[1:3] == ':\\'):
# check for absolute drive path (e.g. C:\...)
return filename
elif (filename.count('\\') >= 3) and (filename.startswith('\\\\')):
# check for absolute UNC path (e.g. \\server\...)
return filename
# return path with temp directory prepended
return os.path.expandvars(os.path.join('%TEMP%', filename))
class FileCacheObject(CacheObject):
_struct = struct.Struct('dII') # double and two ints
# timestamp, lifetime, position
@classmethod
def fromFile(cls, fd):
dat = cls._struct.unpack(fd.read(cls._struct.size))
obj = cls(None, None, dat[1], dat[0])
obj.position = dat[2]
return obj
def __init__(self, *args, **kwargs):
self._key = None
self._data = None
self._size = None
self._buff = StringIO()
super(FileCacheObject, self).__init__(*args, **kwargs)
@property
def size(self):
if self._size is None:
self._buff.seek(0, 2)
size = self._buff.tell()
if size == 0:
if (self._key is None) or (self._data is None):
raise RuntimeError
json.dump([self.key, self.data], self._buff)
self._size = self._buff.tell()
self._size = size
return self._size
@size.setter
def size(self, value):
self._size = value
@property
def key(self):
if self._key is None:
try:
self._key, self._data = json.loads(self._buff.getvalue())
except:
pass
return self._key
@key.setter
def key(self, value):
self._key = value
@property
def data(self):
if self._data is None:
self._key, self._data = json.loads(self._buff.getvalue())
return self._data
@data.setter
def data(self, value):
self._data = value
def load(self, fd):
fd.seek(self.position)
self._buff.seek(0)
self._buff.write(fd.read(self.size))
def dumpslot(self, fd):
pos = fd.tell()
fd.write(self._struct.pack(self.creation, self.lifetime, self.position))
def dumpdata(self, fd):
self.size
fd.seek(self.position)
fd.write(self._buff.getvalue())
class FileEngine( CacheEngine ):
"""Simple file-backed engine."""
name = 'file'
_struct = struct.Struct('HH') # two shorts for version and count
_version = 2
def __init__(self, parent):
super(FileEngine, self).__init__(parent)
self.configure(None)
def configure(self, filename, preallocate=256):
self.preallocate = preallocate
self.cachefile = filename
self.size = 0
self.free = 0
self.age = 0
def _init_cache(self):
# only run this once
self._init_cache = _donothing
if self.cachefile is None:
raise TMDBCacheError("No cache filename given.")
self.cachefile = parse_filename(self.cachefile)
try:
# attempt to read existing cache at filename
# handle any errors that occur
self._open('r+b')
# seems to have read fine, make sure we have write access
if not os.access(self.cachefile, os.W_OK):
raise TMDBCacheWriteError(self.cachefile)
except IOError as e:
if e.errno == errno.ENOENT:
# file does not exist, create a new one
try:
self._open('w+b')
self._write([])
except IOError as e:
if e.errno == errno.ENOENT:
# directory does not exist
raise TMDBCacheDirectoryError(self.cachefile)
elif e.errno == errno.EACCES:
# user does not have rights to create new file
raise TMDBCacheWriteError(self.cachefile)
else:
# let the unhandled error continue through
raise
elif e.errno == errno.EACCES:
# file exists, but we do not have permission to access it
raise TMDBCacheReadError(self.cachefile)
else:
# let the unhandled error continue through
raise
def get(self, date):
self._init_cache()
self._open('r+b')
with Flock(self.cachefd, Flock.LOCK_SH):
# return any new objects in the cache
return self._read(date)
def put(self, key, value, lifetime):
self._init_cache()
self._open('r+b')
with Flock(self.cachefd, Flock.LOCK_EX):
newobjs = self._read(self.age)
newobjs.append(FileCacheObject(key, value, lifetime))
# this will cause a new file object to be opened with the proper
# access mode, however the Flock should keep the old object open
# and properly locked
self._open('r+b')
self._write(newobjs)
return newobjs
def _open(self, mode='r+b'):
# enforce binary operation
try:
if self.cachefd.mode == mode:
# already opened in requested mode, nothing to do
self.cachefd.seek(0)
return
except:
pass # catch issue of no cachefile yet opened
self.cachefd = io.open(self.cachefile, mode)
def _read(self, date):
try:
self.cachefd.seek(0)
version, count = self._struct.unpack(\
self.cachefd.read(self._struct.size))
if version != self._version:
# old version, break out and well rewrite when finished
raise Exception
self.size = count
cache = []
while count:
# loop through storage definitions
obj = FileCacheObject.fromFile(self.cachefd)
cache.append(obj)
count -= 1
except:
# failed to read information, so just discard it and return empty
self.size = 0
self.free = 0
return []
# get end of file
self.cachefd.seek(0, 2)
position = self.cachefd.tell()
newobjs = []
emptycount = 0
# walk backward through all, collecting new content and populating size
while len(cache):
obj = cache.pop()
if obj.creation == 0:
# unused slot, skip
emptycount += 1
elif obj.expired:
# object has passed expiration date, no sense processing
continue
elif obj.creation > date:
# used slot with new data, process
obj.size, position = position - obj.position, obj.position
newobjs.append(obj)
# update age
self.age = max(self.age, obj.creation)
elif len(newobjs):
# end of new data, break
break
# walk forward and load new content
for obj in newobjs:
obj.load(self.cachefd)
self.free = emptycount
return newobjs
def _write(self, data):
if self.free and (self.size != self.free):
# we only care about the last data point, since the rest are
# already stored in the file
data = data[-1]
# determine write position of data in cache
self.cachefd.seek(0, 2)
end = self.cachefd.tell()
data.position = end
# write incremental update to free slot
self.cachefd.seek(4 + 16*(self.size-self.free))
data.dumpslot(self.cachefd)
data.dumpdata(self.cachefd)
else:
# rewrite cache file from scratch
# pull data from parent cache
data.extend(self.parent()._data.values())
data.sort(key=lambda x: x.creation)
# write header
size = len(data) + self.preallocate
self.cachefd.seek(0)
self.cachefd.truncate()
self.cachefd.write(self._struct.pack(self._version, size))
# write storage slot definitions
prev = None
for d in data:
if prev == None:
d.position = 4 + 16*size
else:
d.position = prev.position + prev.size
d.dumpslot(self.cachefd)
prev = d
# fill in allocated slots
for i in range(2**8):
self.cachefd.write(FileCacheObject._struct.pack(0, 0, 0))
# write stored data
for d in data:
d.dumpdata(self.cachefd)
self.cachefd.flush()
def expire(self, key):
pass

View File

@@ -1,27 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: cache_null.py
# Python Library
# Author: Raymond Wagner
# Purpose: Null caching engine for debugging purposes
#-----------------------
from cache_engine import CacheEngine
class NullEngine(CacheEngine):
"""Non-caching engine for debugging."""
name = 'null'
def configure(self):
pass
def get(self, date):
return []
def put(self, key, value, lifetime):
return []
def expire(self, key):
pass

View File

@@ -1,642 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: locales.py Stores locale information for filtering results
# Python Library
# Author: Raymond Wagner
#-----------------------
from tmdb_exceptions import *
import locale
syslocale = None
class LocaleBase(object):
__slots__ = ['__immutable']
_stored = {}
fallthrough = False
def __init__(self, *keys):
for key in keys:
self._stored[key.lower()] = self
self.__immutable = True
def __setattr__(self, key, value):
if getattr(self, '__immutable', False):
raise NotImplementedError(self.__class__.__name__ +
' does not support modification.')
super(LocaleBase, self).__setattr__(key, value)
def __delattr__(self, key):
if getattr(self, '__immutable', False):
raise NotImplementedError(self.__class__.__name__ +
' does not support modification.')
super(LocaleBase, self).__delattr__(key)
def __lt__(self, other):
return (id(self) != id(other)) and (str(self) > str(other))
def __gt__(self, other):
return (id(self) != id(other)) and (str(self) < str(other))
def __eq__(self, other):
return (id(self) == id(other)) or (str(self) == str(other))
@classmethod
def getstored(cls, key):
if key is None:
return None
try:
return cls._stored[key.lower()]
except:
raise TMDBLocaleError("'{0}' is not a known valid {1} code."\
.format(key, cls.__name__))
class Language(LocaleBase):
__slots__ = ['ISO639_1', 'ISO639_2', 'ISO639_2B', 'englishname',
'nativename']
_stored = {}
def __init__(self, iso1, iso2, ename):
self.ISO639_1 = iso1
self.ISO639_2 = iso2
# self.ISO639_2B = iso2b
self.englishname = ename
# self.nativename = nname
super(Language, self).__init__(iso1, iso2)
def __str__(self):
return self.ISO639_1
def __repr__(self):
return u"<Language '{0.englishname}' ({0.ISO639_1})>".format(self)
class Country(LocaleBase):
__slots__ = ['alpha2', 'name']
_stored = {}
def __init__(self, alpha2, name):
self.alpha2 = alpha2
self.name = name
super(Country, self).__init__(alpha2)
def __str__(self):
return self.alpha2
def __repr__(self):
return u"<Country '{0.name}' ({0.alpha2})>".format(self)
class Locale(LocaleBase):
__slots__ = ['language', 'country', 'encoding']
def __init__(self, language, country, encoding):
self.language = Language.getstored(language)
self.country = Country.getstored(country)
self.encoding = encoding if encoding else 'latin-1'
def __str__(self):
return u"{0}_{1}".format(self.language, self.country)
def __repr__(self):
return u"<Locale {0.language}_{0.country}>".format(self)
def encode(self, dat):
"""Encode using system default encoding for network/file output."""
try:
return dat.encode(self.encoding)
except AttributeError:
# not a string type, pass along
return dat
except UnicodeDecodeError:
# just return unmodified and hope for the best
return dat
def decode(self, dat):
"""Decode to system default encoding for internal use."""
try:
return dat.decode(self.encoding)
except AttributeError:
# not a string type, pass along
return dat
except UnicodeEncodeError:
# just return unmodified and hope for the best
return dat
def set_locale(language=None, country=None, fallthrough=False):
global syslocale
LocaleBase.fallthrough = fallthrough
sysloc, sysenc = locale.getdefaultlocale()
if (not language) or (not country):
dat = None
if syslocale is not None:
dat = (str(syslocale.language), str(syslocale.country))
else:
if (sysloc is None) or ('_' not in sysloc):
dat = ('en', 'US')
else:
dat = sysloc.split('_')
if language is None:
language = dat[0]
if country is None:
country = dat[1]
syslocale = Locale(language, country, sysenc)
def get_locale(language=-1, country=-1):
"""Output locale using provided attributes, or return system locale."""
global syslocale
# pull existing stored values
if syslocale is None:
loc = Locale(None, None, locale.getdefaultlocale()[1])
else:
loc = syslocale
# both options are default, return stored values
if language == country == -1:
return loc
# supplement default option with stored values
if language == -1:
language = loc.language
elif country == -1:
country = loc.country
return Locale(language, country, loc.encoding)
######## AUTOGENERATED LANGUAGE AND COUNTRY DATA BELOW HERE #########
Language("ab", "abk", u"Abkhazian")
Language("aa", "aar", u"Afar")
Language("af", "afr", u"Afrikaans")
Language("ak", "aka", u"Akan")
Language("sq", "alb/sqi", u"Albanian")
Language("am", "amh", u"Amharic")
Language("ar", "ara", u"Arabic")
Language("an", "arg", u"Aragonese")
Language("hy", "arm/hye", u"Armenian")
Language("as", "asm", u"Assamese")
Language("av", "ava", u"Avaric")
Language("ae", "ave", u"Avestan")
Language("ay", "aym", u"Aymara")
Language("az", "aze", u"Azerbaijani")
Language("bm", "bam", u"Bambara")
Language("ba", "bak", u"Bashkir")
Language("eu", "baq/eus", u"Basque")
Language("be", "bel", u"Belarusian")
Language("bn", "ben", u"Bengali")
Language("bh", "bih", u"Bihari languages")
Language("bi", "bis", u"Bislama")
Language("nb", "nob", u"Bokmål, Norwegian")
Language("bs", "bos", u"Bosnian")
Language("br", "bre", u"Breton")
Language("bg", "bul", u"Bulgarian")
Language("my", "bur/mya", u"Burmese")
Language("es", "spa", u"Castilian")
Language("ca", "cat", u"Catalan")
Language("km", "khm", u"Central Khmer")
Language("ch", "cha", u"Chamorro")
Language("ce", "che", u"Chechen")
Language("ny", "nya", u"Chewa")
Language("ny", "nya", u"Chichewa")
Language("zh", "chi/zho", u"Chinese")
Language("za", "zha", u"Chuang")
Language("cu", "chu", u"Church Slavic")
Language("cu", "chu", u"Church Slavonic")
Language("cv", "chv", u"Chuvash")
Language("kw", "cor", u"Cornish")
Language("co", "cos", u"Corsican")
Language("cr", "cre", u"Cree")
Language("hr", "hrv", u"Croatian")
Language("cs", "cze/ces", u"Czech")
Language("da", "dan", u"Danish")
Language("dv", "div", u"Dhivehi")
Language("dv", "div", u"Divehi")
Language("nl", "dut/nld", u"Dutch")
Language("dz", "dzo", u"Dzongkha")
Language("en", "eng", u"English")
Language("eo", "epo", u"Esperanto")
Language("et", "est", u"Estonian")
Language("ee", "ewe", u"Ewe")
Language("fo", "fao", u"Faroese")
Language("fj", "fij", u"Fijian")
Language("fi", "fin", u"Finnish")
Language("nl", "dut/nld", u"Flemish")
Language("fr", "fre/fra", u"French")
Language("ff", "ful", u"Fulah")
Language("gd", "gla", u"Gaelic")
Language("gl", "glg", u"Galician")
Language("lg", "lug", u"Ganda")
Language("ka", "geo/kat", u"Georgian")
Language("de", "ger/deu", u"German")
Language("ki", "kik", u"Gikuyu")
Language("el", "gre/ell", u"Greek, Modern (1453-)")
Language("kl", "kal", u"Greenlandic")
Language("gn", "grn", u"Guarani")
Language("gu", "guj", u"Gujarati")
Language("ht", "hat", u"Haitian")
Language("ht", "hat", u"Haitian Creole")
Language("ha", "hau", u"Hausa")
Language("he", "heb", u"Hebrew")
Language("hz", "her", u"Herero")
Language("hi", "hin", u"Hindi")
Language("ho", "hmo", u"Hiri Motu")
Language("hu", "hun", u"Hungarian")
Language("is", "ice/isl", u"Icelandic")
Language("io", "ido", u"Ido")
Language("ig", "ibo", u"Igbo")
Language("id", "ind", u"Indonesian")
Language("ia", "ina", u"Interlingua (International Auxiliary Language Association)")
Language("ie", "ile", u"Interlingue")
Language("iu", "iku", u"Inuktitut")
Language("ik", "ipk", u"Inupiaq")
Language("ga", "gle", u"Irish")
Language("it", "ita", u"Italian")
Language("ja", "jpn", u"Japanese")
Language("jv", "jav", u"Javanese")
Language("kl", "kal", u"Kalaallisut")
Language("kn", "kan", u"Kannada")
Language("kr", "kau", u"Kanuri")
Language("ks", "kas", u"Kashmiri")
Language("kk", "kaz", u"Kazakh")
Language("ki", "kik", u"Kikuyu")
Language("rw", "kin", u"Kinyarwanda")
Language("ky", "kir", u"Kirghiz")
Language("kv", "kom", u"Komi")
Language("kg", "kon", u"Kongo")
Language("ko", "kor", u"Korean")
Language("kj", "kua", u"Kuanyama")
Language("ku", "kur", u"Kurdish")
Language("kj", "kua", u"Kwanyama")
Language("ky", "kir", u"Kyrgyz")
Language("lo", "lao", u"Lao")
Language("la", "lat", u"Latin")
Language("lv", "lav", u"Latvian")
Language("lb", "ltz", u"Letzeburgesch")
Language("li", "lim", u"Limburgan")
Language("li", "lim", u"Limburger")
Language("li", "lim", u"Limburgish")
Language("ln", "lin", u"Lingala")
Language("lt", "lit", u"Lithuanian")
Language("lu", "lub", u"Luba-Katanga")
Language("lb", "ltz", u"Luxembourgish")
Language("mk", "mac/mkd", u"Macedonian")
Language("mg", "mlg", u"Malagasy")
Language("ms", "may/msa", u"Malay")
Language("ml", "mal", u"Malayalam")
Language("dv", "div", u"Maldivian")
Language("mt", "mlt", u"Maltese")
Language("gv", "glv", u"Manx")
Language("mi", "mao/mri", u"Maori")
Language("mr", "mar", u"Marathi")
Language("mh", "mah", u"Marshallese")
Language("ro", "rum/ron", u"Moldavian")
Language("ro", "rum/ron", u"Moldovan")
Language("mn", "mon", u"Mongolian")
Language("na", "nau", u"Nauru")
Language("nv", "nav", u"Navaho")
Language("nv", "nav", u"Navajo")
Language("nd", "nde", u"Ndebele, North")
Language("nr", "nbl", u"Ndebele, South")
Language("ng", "ndo", u"Ndonga")
Language("ne", "nep", u"Nepali")
Language("nd", "nde", u"North Ndebele")
Language("se", "sme", u"Northern Sami")
Language("no", "nor", u"Norwegian")
Language("nb", "nob", u"Norwegian Bokmål")
Language("nn", "nno", u"Norwegian Nynorsk")
Language("ii", "iii", u"Nuosu")
Language("ny", "nya", u"Nyanja")
Language("nn", "nno", u"Nynorsk, Norwegian")
Language("ie", "ile", u"Occidental")
Language("oc", "oci", u"Occitan (post 1500)")
Language("oj", "oji", u"Ojibwa")
Language("cu", "chu", u"Old Bulgarian")
Language("cu", "chu", u"Old Church Slavonic")
Language("cu", "chu", u"Old Slavonic")
Language("or", "ori", u"Oriya")
Language("om", "orm", u"Oromo")
Language("os", "oss", u"Ossetian")
Language("os", "oss", u"Ossetic")
Language("pi", "pli", u"Pali")
Language("pa", "pan", u"Panjabi")
Language("ps", "pus", u"Pashto")
Language("fa", "per/fas", u"Persian")
Language("pl", "pol", u"Polish")
Language("pt", "por", u"Portuguese")
Language("pa", "pan", u"Punjabi")
Language("ps", "pus", u"Pushto")
Language("qu", "que", u"Quechua")
Language("ro", "rum/ron", u"Romanian")
Language("rm", "roh", u"Romansh")
Language("rn", "run", u"Rundi")
Language("ru", "rus", u"Russian")
Language("sm", "smo", u"Samoan")
Language("sg", "sag", u"Sango")
Language("sa", "san", u"Sanskrit")
Language("sc", "srd", u"Sardinian")
Language("gd", "gla", u"Scottish Gaelic")
Language("sr", "srp", u"Serbian")
Language("sn", "sna", u"Shona")
Language("ii", "iii", u"Sichuan Yi")
Language("sd", "snd", u"Sindhi")
Language("si", "sin", u"Sinhala")
Language("si", "sin", u"Sinhalese")
Language("sk", "slo/slk", u"Slovak")
Language("sl", "slv", u"Slovenian")
Language("so", "som", u"Somali")
Language("st", "sot", u"Sotho, Southern")
Language("nr", "nbl", u"South Ndebele")
Language("es", "spa", u"Spanish")
Language("su", "sun", u"Sundanese")
Language("sw", "swa", u"Swahili")
Language("ss", "ssw", u"Swati")
Language("sv", "swe", u"Swedish")
Language("tl", "tgl", u"Tagalog")
Language("ty", "tah", u"Tahitian")
Language("tg", "tgk", u"Tajik")
Language("ta", "tam", u"Tamil")
Language("tt", "tat", u"Tatar")
Language("te", "tel", u"Telugu")
Language("th", "tha", u"Thai")
Language("bo", "tib/bod", u"Tibetan")
Language("ti", "tir", u"Tigrinya")
Language("to", "ton", u"Tonga (Tonga Islands)")
Language("ts", "tso", u"Tsonga")
Language("tn", "tsn", u"Tswana")
Language("tr", "tur", u"Turkish")
Language("tk", "tuk", u"Turkmen")
Language("tw", "twi", u"Twi")
Language("ug", "uig", u"Uighur")
Language("uk", "ukr", u"Ukrainian")
Language("ur", "urd", u"Urdu")
Language("ug", "uig", u"Uyghur")
Language("uz", "uzb", u"Uzbek")
Language("ca", "cat", u"Valencian")
Language("ve", "ven", u"Venda")
Language("vi", "vie", u"Vietnamese")
Language("vo", "vol", u"Volapük")
Language("wa", "wln", u"Walloon")
Language("cy", "wel/cym", u"Welsh")
Language("fy", "fry", u"Western Frisian")
Language("wo", "wol", u"Wolof")
Language("xh", "xho", u"Xhosa")
Language("yi", "yid", u"Yiddish")
Language("yo", "yor", u"Yoruba")
Language("za", "zha", u"Zhuang")
Language("zu", "zul", u"Zulu")
Country("AF", u"AFGHANISTAN")
Country("AX", u"ÅLAND ISLANDS")
Country("AL", u"ALBANIA")
Country("DZ", u"ALGERIA")
Country("AS", u"AMERICAN SAMOA")
Country("AD", u"ANDORRA")
Country("AO", u"ANGOLA")
Country("AI", u"ANGUILLA")
Country("AQ", u"ANTARCTICA")
Country("AG", u"ANTIGUA AND BARBUDA")
Country("AR", u"ARGENTINA")
Country("AM", u"ARMENIA")
Country("AW", u"ARUBA")
Country("AU", u"AUSTRALIA")
Country("AT", u"AUSTRIA")
Country("AZ", u"AZERBAIJAN")
Country("BS", u"BAHAMAS")
Country("BH", u"BAHRAIN")
Country("BD", u"BANGLADESH")
Country("BB", u"BARBADOS")
Country("BY", u"BELARUS")
Country("BE", u"BELGIUM")
Country("BZ", u"BELIZE")
Country("BJ", u"BENIN")
Country("BM", u"BERMUDA")
Country("BT", u"BHUTAN")
Country("BO", u"BOLIVIA, PLURINATIONAL STATE OF")
Country("BQ", u"BONAIRE, SINT EUSTATIUS AND SABA")
Country("BA", u"BOSNIA AND HERZEGOVINA")
Country("BW", u"BOTSWANA")
Country("BV", u"BOUVET ISLAND")
Country("BR", u"BRAZIL")
Country("IO", u"BRITISH INDIAN OCEAN TERRITORY")
Country("BN", u"BRUNEI DARUSSALAM")
Country("BG", u"BULGARIA")
Country("BF", u"BURKINA FASO")
Country("BI", u"BURUNDI")
Country("KH", u"CAMBODIA")
Country("CM", u"CAMEROON")
Country("CA", u"CANADA")
Country("CV", u"CAPE VERDE")
Country("KY", u"CAYMAN ISLANDS")
Country("CF", u"CENTRAL AFRICAN REPUBLIC")
Country("TD", u"CHAD")
Country("CL", u"CHILE")
Country("CN", u"CHINA")
Country("CX", u"CHRISTMAS ISLAND")
Country("CC", u"COCOS (KEELING) ISLANDS")
Country("CO", u"COLOMBIA")
Country("KM", u"COMOROS")
Country("CG", u"CONGO")
Country("CD", u"CONGO, THE DEMOCRATIC REPUBLIC OF THE")
Country("CK", u"COOK ISLANDS")
Country("CR", u"COSTA RICA")
Country("CI", u"CÔTE D'IVOIRE")
Country("HR", u"CROATIA")
Country("CU", u"CUBA")
Country("CW", u"CURAÇAO")
Country("CY", u"CYPRUS")
Country("CZ", u"CZECH REPUBLIC")
Country("DK", u"DENMARK")
Country("DJ", u"DJIBOUTI")
Country("DM", u"DOMINICA")
Country("DO", u"DOMINICAN REPUBLIC")
Country("EC", u"ECUADOR")
Country("EG", u"EGYPT")
Country("SV", u"EL SALVADOR")
Country("GQ", u"EQUATORIAL GUINEA")
Country("ER", u"ERITREA")
Country("EE", u"ESTONIA")
Country("ET", u"ETHIOPIA")
Country("FK", u"FALKLAND ISLANDS (MALVINAS)")
Country("FO", u"FAROE ISLANDS")
Country("FJ", u"FIJI")
Country("FI", u"FINLAND")
Country("FR", u"FRANCE")
Country("GF", u"FRENCH GUIANA")
Country("PF", u"FRENCH POLYNESIA")
Country("TF", u"FRENCH SOUTHERN TERRITORIES")
Country("GA", u"GABON")
Country("GM", u"GAMBIA")
Country("GE", u"GEORGIA")
Country("DE", u"GERMANY")
Country("GH", u"GHANA")
Country("GI", u"GIBRALTAR")
Country("GR", u"GREECE")
Country("GL", u"GREENLAND")
Country("GD", u"GRENADA")
Country("GP", u"GUADELOUPE")
Country("GU", u"GUAM")
Country("GT", u"GUATEMALA")
Country("GG", u"GUERNSEY")
Country("GN", u"GUINEA")
Country("GW", u"GUINEA-BISSAU")
Country("GY", u"GUYANA")
Country("HT", u"HAITI")
Country("HM", u"HEARD ISLAND AND MCDONALD ISLANDS")
Country("VA", u"HOLY SEE (VATICAN CITY STATE)")
Country("HN", u"HONDURAS")
Country("HK", u"HONG KONG")
Country("HU", u"HUNGARY")
Country("IS", u"ICELAND")
Country("IN", u"INDIA")
Country("ID", u"INDONESIA")
Country("IR", u"IRAN, ISLAMIC REPUBLIC OF")
Country("IQ", u"IRAQ")
Country("IE", u"IRELAND")
Country("IM", u"ISLE OF MAN")
Country("IL", u"ISRAEL")
Country("IT", u"ITALY")
Country("JM", u"JAMAICA")
Country("JP", u"JAPAN")
Country("JE", u"JERSEY")
Country("JO", u"JORDAN")
Country("KZ", u"KAZAKHSTAN")
Country("KE", u"KENYA")
Country("KI", u"KIRIBATI")
Country("KP", u"KOREA, DEMOCRATIC PEOPLE'S REPUBLIC OF")
Country("KR", u"KOREA, REPUBLIC OF")
Country("KW", u"KUWAIT")
Country("KG", u"KYRGYZSTAN")
Country("LA", u"LAO PEOPLE'S DEMOCRATIC REPUBLIC")
Country("LV", u"LATVIA")
Country("LB", u"LEBANON")
Country("LS", u"LESOTHO")
Country("LR", u"LIBERIA")
Country("LY", u"LIBYA")
Country("LI", u"LIECHTENSTEIN")
Country("LT", u"LITHUANIA")
Country("LU", u"LUXEMBOURG")
Country("MO", u"MACAO")
Country("MK", u"MACEDONIA, THE FORMER YUGOSLAV REPUBLIC OF")
Country("MG", u"MADAGASCAR")
Country("MW", u"MALAWI")
Country("MY", u"MALAYSIA")
Country("MV", u"MALDIVES")
Country("ML", u"MALI")
Country("MT", u"MALTA")
Country("MH", u"MARSHALL ISLANDS")
Country("MQ", u"MARTINIQUE")
Country("MR", u"MAURITANIA")
Country("MU", u"MAURITIUS")
Country("YT", u"MAYOTTE")
Country("MX", u"MEXICO")
Country("FM", u"MICRONESIA, FEDERATED STATES OF")
Country("MD", u"MOLDOVA, REPUBLIC OF")
Country("MC", u"MONACO")
Country("MN", u"MONGOLIA")
Country("ME", u"MONTENEGRO")
Country("MS", u"MONTSERRAT")
Country("MA", u"MOROCCO")
Country("MZ", u"MOZAMBIQUE")
Country("MM", u"MYANMAR")
Country("NA", u"NAMIBIA")
Country("NR", u"NAURU")
Country("NP", u"NEPAL")
Country("NL", u"NETHERLANDS")
Country("NC", u"NEW CALEDONIA")
Country("NZ", u"NEW ZEALAND")
Country("NI", u"NICARAGUA")
Country("NE", u"NIGER")
Country("NG", u"NIGERIA")
Country("NU", u"NIUE")
Country("NF", u"NORFOLK ISLAND")
Country("MP", u"NORTHERN MARIANA ISLANDS")
Country("NO", u"NORWAY")
Country("OM", u"OMAN")
Country("PK", u"PAKISTAN")
Country("PW", u"PALAU")
Country("PS", u"PALESTINIAN TERRITORY, OCCUPIED")
Country("PA", u"PANAMA")
Country("PG", u"PAPUA NEW GUINEA")
Country("PY", u"PARAGUAY")
Country("PE", u"PERU")
Country("PH", u"PHILIPPINES")
Country("PN", u"PITCAIRN")
Country("PL", u"POLAND")
Country("PT", u"PORTUGAL")
Country("PR", u"PUERTO RICO")
Country("QA", u"QATAR")
Country("RE", u"RÉUNION")
Country("RO", u"ROMANIA")
Country("RU", u"RUSSIAN FEDERATION")
Country("RW", u"RWANDA")
Country("BL", u"SAINT BARTHÉLEMY")
Country("SH", u"SAINT HELENA, ASCENSION AND TRISTAN DA CUNHA")
Country("KN", u"SAINT KITTS AND NEVIS")
Country("LC", u"SAINT LUCIA")
Country("MF", u"SAINT MARTIN (FRENCH PART)")
Country("PM", u"SAINT PIERRE AND MIQUELON")
Country("VC", u"SAINT VINCENT AND THE GRENADINES")
Country("WS", u"SAMOA")
Country("SM", u"SAN MARINO")
Country("ST", u"SAO TOME AND PRINCIPE")
Country("SA", u"SAUDI ARABIA")
Country("SN", u"SENEGAL")
Country("RS", u"SERBIA")
Country("SC", u"SEYCHELLES")
Country("SL", u"SIERRA LEONE")
Country("SG", u"SINGAPORE")
Country("SX", u"SINT MAARTEN (DUTCH PART)")
Country("SK", u"SLOVAKIA")
Country("SI", u"SLOVENIA")
Country("SB", u"SOLOMON ISLANDS")
Country("SO", u"SOMALIA")
Country("ZA", u"SOUTH AFRICA")
Country("GS", u"SOUTH GEORGIA AND THE SOUTH SANDWICH ISLANDS")
Country("SS", u"SOUTH SUDAN")
Country("ES", u"SPAIN")
Country("LK", u"SRI LANKA")
Country("SD", u"SUDAN")
Country("SR", u"SURINAME")
Country("SJ", u"SVALBARD AND JAN MAYEN")
Country("SZ", u"SWAZILAND")
Country("SE", u"SWEDEN")
Country("CH", u"SWITZERLAND")
Country("SY", u"SYRIAN ARAB REPUBLIC")
Country("TW", u"TAIWAN, PROVINCE OF CHINA")
Country("TJ", u"TAJIKISTAN")
Country("TZ", u"TANZANIA, UNITED REPUBLIC OF")
Country("TH", u"THAILAND")
Country("TL", u"TIMOR-LESTE")
Country("TG", u"TOGO")
Country("TK", u"TOKELAU")
Country("TO", u"TONGA")
Country("TT", u"TRINIDAD AND TOBAGO")
Country("TN", u"TUNISIA")
Country("TR", u"TURKEY")
Country("TM", u"TURKMENISTAN")
Country("TC", u"TURKS AND CAICOS ISLANDS")
Country("TV", u"TUVALU")
Country("UG", u"UGANDA")
Country("UA", u"UKRAINE")
Country("AE", u"UNITED ARAB EMIRATES")
Country("GB", u"UNITED KINGDOM")
Country("US", u"UNITED STATES")
Country("UM", u"UNITED STATES MINOR OUTLYING ISLANDS")
Country("UY", u"URUGUAY")
Country("UZ", u"UZBEKISTAN")
Country("VU", u"VANUATU")
Country("VE", u"VENEZUELA, BOLIVARIAN REPUBLIC OF")
Country("VN", u"VIET NAM")
Country("VG", u"VIRGIN ISLANDS, BRITISH")
Country("VI", u"VIRGIN ISLANDS, U.S.")
Country("WF", u"WALLIS AND FUTUNA")
Country("EH", u"WESTERN SAHARA")
Country("YE", u"YEMEN")
Country("ZM", u"ZAMBIA")
Country("ZW", u"ZIMBABWE")

View File

@@ -1,116 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: pager.py List-like structure designed for handling paged results
# Python Library
# Author: Raymond Wagner
#-----------------------
from collections import Sequence, Iterator
class PagedIterator(Iterator):
def __init__(self, parent):
self._parent = parent
self._index = -1
self._len = len(parent)
def __iter__(self):
return self
def next(self):
self._index += 1
if self._index == self._len:
raise StopIteration
return self._parent[self._index]
class UnpagedData(object):
def copy(self):
return self.__class__()
def __mul__(self, other):
return (self.copy() for a in range(other))
def __rmul__(self, other):
return (self.copy() for a in range(other))
class PagedList(Sequence):
"""
List-like object, with support for automatically grabbing
additional pages from a data source.
"""
_iter_class = None
def __iter__(self):
if self._iter_class is None:
self._iter_class = type(self.__class__.__name__ + 'Iterator',
(PagedIterator,), {})
return self._iter_class(self)
def __len__(self):
try:
return self._len
except:
return len(self._data)
def __init__(self, iterable, pagesize=20):
self._data = list(iterable)
self._pagesize = pagesize
def __getitem__(self, index):
if isinstance(index, slice):
return [self[x] for x in xrange(*index.indices(len(self)))]
if index >= len(self):
raise IndexError("list index outside range")
if (index >= len(self._data)) \
or isinstance(self._data[index], UnpagedData):
self._populatepage(index/self._pagesize + 1)
return self._data[index]
def __setitem__(self, index, value):
raise NotImplementedError
def __delitem__(self, index):
raise NotImplementedError
def __contains__(self, item):
raise NotImplementedError
def _populatepage(self, page):
pagestart = (page-1) * self._pagesize
if len(self._data) < pagestart:
self._data.extend(UnpagedData()*(pagestart-len(self._data)))
if len(self._data) == pagestart:
self._data.extend(self._getpage(page))
else:
for data in self._getpage(page):
self._data[pagestart] = data
pagestart += 1
def _getpage(self, page):
raise NotImplementedError("PagedList._getpage() must be provided " +
"by subclass")
class PagedRequest(PagedList):
"""
Derived PageList that provides a list-like object with automatic
paging intended for use with search requests.
"""
def __init__(self, request, handler=None):
self._request = request
if handler:
self._handler = handler
super(PagedRequest, self).__init__(self._getpage(1), 20)
def _getpage(self, page):
req = self._request.new(page=page)
res = req.readJSON()
self._len = res['total_results']
for item in res['results']:
if item is None:
yield None
else:
yield self._handler(item)

View File

@@ -1,167 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: tmdb_request.py
# Python Library
# Author: Raymond Wagner
# Purpose: Wrapped urllib2.Request class pre-configured for accessing the
# TMDb v3 API
#-----------------------
from tmdb_exceptions import *
from locales import get_locale
from cache import Cache
from urllib import urlencode
import urllib2
import json
import os
DEBUG = False
cache = Cache(filename='pytmdb3.cache')
#DEBUG = True
#cache = Cache(engine='null')
def set_key(key):
"""
Specify the API key to use retrieving data from themoviedb.org.
This key must be set before any calls will function.
"""
if len(key) != 32:
raise TMDBKeyInvalid("Specified API key must be 128-bit hex")
try:
int(key, 16)
except:
raise TMDBKeyInvalid("Specified API key must be 128-bit hex")
Request._api_key = key
def set_cache(engine=None, *args, **kwargs):
"""Specify caching engine and properties."""
cache.configure(engine, *args, **kwargs)
class Request(urllib2.Request):
_api_key = None
_base_url = "http://api.themoviedb.org/3/"
@property
def api_key(self):
if self._api_key is None:
raise TMDBKeyMissing("API key must be specified before " +
"requests can be made")
return self._api_key
def __init__(self, url, **kwargs):
"""
Return a request object, using specified API path and
arguments.
"""
kwargs['api_key'] = self.api_key
self._url = url.lstrip('/')
self._kwargs = dict([(kwa, kwv) for kwa, kwv in kwargs.items()
if kwv is not None])
locale = get_locale()
kwargs = {}
for k, v in self._kwargs.items():
kwargs[k] = locale.encode(v)
url = '{0}{1}?{2}'\
.format(self._base_url, self._url, urlencode(kwargs))
urllib2.Request.__init__(self, url)
self.add_header('Accept', 'application/json')
self.lifetime = 3600 # 1hr
def new(self, **kwargs):
"""
Create a new instance of the request, with tweaked arguments.
"""
args = dict(self._kwargs)
for k, v in kwargs.items():
if v is None:
if k in args:
del args[k]
else:
args[k] = v
obj = self.__class__(self._url, **args)
obj.lifetime = self.lifetime
return obj
def add_data(self, data):
"""Provide data to be sent with POST."""
urllib2.Request.add_data(self, urlencode(data))
def open(self):
"""Open a file object to the specified URL."""
try:
if DEBUG:
print 'loading '+self.get_full_url()
if self.has_data():
print ' '+self.get_data()
return urllib2.urlopen(self)
except urllib2.HTTPError, e:
raise TMDBHTTPError(e)
def read(self):
"""Return result from specified URL as a string."""
return self.open().read()
@cache.cached(urllib2.Request.get_full_url)
def readJSON(self):
"""Parse result from specified URL as JSON data."""
url = self.get_full_url()
try:
# catch HTTP error from open()
data = json.load(self.open())
except TMDBHTTPError, e:
try:
# try to load whatever was returned
data = json.loads(e.response)
except:
# cannot parse json, just raise existing error
raise e
else:
# response parsed, try to raise error from TMDB
handle_status(data, url)
# no error from TMDB, just raise existing error
raise e
handle_status(data, url)
if DEBUG:
import pprint
pprint.PrettyPrinter().pprint(data)
return data
status_handlers = {
1: None,
2: TMDBRequestInvalid('Invalid service - This service does not exist.'),
3: TMDBRequestError('Authentication Failed - You do not have ' +
'permissions to access this service.'),
4: TMDBRequestInvalid("Invalid format - This service doesn't exist " +
'in that format.'),
5: TMDBRequestInvalid('Invalid parameters - Your request parameters ' +
'are incorrect.'),
6: TMDBRequestInvalid('Invalid id - The pre-requisite id is invalid ' +
'or not found.'),
7: TMDBKeyInvalid('Invalid API key - You must be granted a valid key.'),
8: TMDBRequestError('Duplicate entry - The data you tried to submit ' +
'already exists.'),
9: TMDBOffline('This service is tempirarily offline. Try again later.'),
10: TMDBKeyRevoked('Suspended API key - Access to your account has been ' +
'suspended, contact TMDB.'),
11: TMDBError('Internal error - Something went wrong. Contact TMDb.'),
12: None,
13: None,
14: TMDBRequestError('Authentication Failed.'),
15: TMDBError('Failed'),
16: TMDBError('Device Denied'),
17: TMDBError('Session Denied')}
def handle_status(data, query):
status = status_handlers[data.get('status_code', 1)]
if status is not None:
status.tmdberrno = data['status_code']
status.query = query
raise status

View File

@@ -1,910 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: tmdb_api.py Simple-to-use Python interface to TMDB's API v3
# Python Library
# Author: Raymond Wagner
# Purpose: This Python library is intended to provide a series of classes
# and methods for search and retrieval of text metadata and image
# URLs from TMDB.
# Preliminary API specifications can be found at
# http://help.themoviedb.org/kb/api/about-3
# License: Creative Commons GNU GPL v2
# (http://creativecommons.org/licenses/GPL/2.0/)
#-----------------------
__title__ = ("tmdb_api - Simple-to-use Python interface to TMDB's API v3 " +
"(www.themoviedb.org)")
__author__ = "Raymond Wagner"
__purpose__ = """
This Python library is intended to provide a series of classes and methods
for search and retrieval of text metadata and image URLs from TMDB.
Preliminary API specifications can be found at
http://help.themoviedb.org/kb/api/about-3"""
__version__ = "v0.7.0"
# 0.1.0 Initial development
# 0.2.0 Add caching mechanism for API queries
# 0.2.1 Temporary work around for broken search paging
# 0.3.0 Rework backend machinery for managing OO interface to results
# 0.3.1 Add collection support
# 0.3.2 Remove MythTV key from results.py
# 0.3.3 Add functional language support
# 0.3.4 Re-enable search paging
# 0.3.5 Add methods for grabbing current, popular, and top rated movies
# 0.3.6 Rework paging mechanism
# 0.3.7 Generalize caching mechanism, and allow controllability
# 0.4.0 Add full locale support (language and country) and optional fall through
# 0.4.1 Add custom classmethod for dealing with IMDB movie IDs
# 0.4.2 Improve cache file selection for Windows systems
# 0.4.3 Add a few missed Person properties
# 0.4.4 Add support for additional Studio information
# 0.4.5 Add locale fallthrough for images and alternate titles
# 0.4.6 Add slice support for search results
# 0.5.0 Rework cache framework and improve file cache performance
# 0.6.0 Add user authentication support
# 0.6.1 Add adult filtering for people searches
# 0.6.2 Add similar movie search for Movie objects
# 0.6.3 Add Studio search
# 0.6.4 Add Genre list and associated Movie search
# 0.6.5 Prevent data from being blanked out by subsequent queries
# 0.6.6 Turn date processing errors into mutable warnings
# 0.6.7 Add support for searching by year
# 0.6.8 Add support for collection images
# 0.6.9 Correct Movie image language filtering
# 0.6.10 Add upcoming movie classmethod
# 0.6.11 Fix URL for top rated Movie query
# 0.6.12 Add support for Movie watchlist query and editing
# 0.6.13 Fix URL for rating Movies
# 0.6.14 Add support for Lists
# 0.6.15 Add ability to search Collections
# 0.6.16 Make absent primary images return None (previously u'')
# 0.6.17 Add userrating/votes to Image, add overview to Collection, remove
# releasedate sorting from Collection Movies
# 0.7.0 Add support for television series data
from request import set_key, Request
from util import Datapoint, Datalist, Datadict, Element, NameRepr, SearchRepr
from pager import PagedRequest
from locales import get_locale, set_locale
from tmdb_auth import get_session, set_session
from tmdb_exceptions import *
import json
import urllib
import urllib2
import datetime
DEBUG = False
def process_date(datestr):
try:
return datetime.date(*[int(x) for x in datestr.split('-')])
except (TypeError, ValueError):
import sys
import warnings
import traceback
_,_,tb = sys.exc_info()
f,l,_,_ = traceback.extract_tb(tb)[-1]
warnings.warn_explicit(('"{0}" is not a supported date format. ' +
'Please fix upstream data at ' +
'http://www.themoviedb.org.'
).format(datestr), Warning, f, l)
return None
class Configuration(Element):
images = Datapoint('images')
def _populate(self):
return Request('configuration')
Configuration = Configuration()
class Account(NameRepr, Element):
def _populate(self):
return Request('account', session_id=self._session.sessionid)
id = Datapoint('id')
adult = Datapoint('include_adult')
country = Datapoint('iso_3166_1')
language = Datapoint('iso_639_1')
name = Datapoint('name')
username = Datapoint('username')
@property
def locale(self):
return get_locale(self.language, self.country)
def searchMovie(query, locale=None, adult=False, year=None):
kwargs = {'query': query, 'include_adult': adult}
if year is not None:
try:
kwargs['year'] = year.year
except AttributeError:
kwargs['year'] = year
return MovieSearchResult(Request('search/movie', **kwargs), locale=locale)
def searchMovieWithYear(query, locale=None, adult=False):
year = None
if (len(query) > 6) and (query[-1] == ')') and (query[-6] == '('):
# simple syntax check, no need for regular expression
try:
year = int(query[-5:-1])
except ValueError:
pass
else:
if 1885 < year < 2050:
# strip out year from search
query = query[:-7]
else:
# sanity check on resolved year failed, pass through
year = None
return searchMovie(query, locale, adult, year)
class MovieSearchResult(SearchRepr, PagedRequest):
"""Stores a list of search matches."""
_name = None
def __init__(self, request, locale=None):
if locale is None:
locale = get_locale()
super(MovieSearchResult, self).__init__(
request.new(language=locale.language),
lambda x: Movie(raw=x, locale=locale))
def searchSeries(query, first_air_date_year=None, search_type=None, locale=None):
return SeriesSearchResult(
Request('search/tv', query=query, first_air_date_year=first_air_date_year, search_type=search_type),
locale=locale)
class SeriesSearchResult(SearchRepr, PagedRequest):
"""Stores a list of search matches."""
_name = None
def __init__(self, request, locale=None):
if locale is None:
locale = get_locale()
super(SeriesSearchResult, self).__init__(
request.new(language=locale.language),
lambda x: Series(raw=x, locale=locale))
def searchPerson(query, adult=False):
return PeopleSearchResult(Request('search/person', query=query,
include_adult=adult))
class PeopleSearchResult(SearchRepr, PagedRequest):
"""Stores a list of search matches."""
_name = None
def __init__(self, request):
super(PeopleSearchResult, self).__init__(
request, lambda x: Person(raw=x))
def searchStudio(query):
return StudioSearchResult(Request('search/company', query=query))
class StudioSearchResult(SearchRepr, PagedRequest):
"""Stores a list of search matches."""
_name = None
def __init__(self, request):
super(StudioSearchResult, self).__init__(
request, lambda x: Studio(raw=x))
def searchList(query, adult=False):
ListSearchResult(Request('search/list', query=query, include_adult=adult))
class ListSearchResult(SearchRepr, PagedRequest):
"""Stores a list of search matches."""
_name = None
def __init__(self, request):
super(ListSearchResult, self).__init__(
request, lambda x: List(raw=x))
def searchCollection(query, locale=None):
return CollectionSearchResult(Request('search/collection', query=query),
locale=locale)
class CollectionSearchResult(SearchRepr, PagedRequest):
"""Stores a list of search matches."""
_name=None
def __init__(self, request, locale=None):
if locale is None:
locale = get_locale()
super(CollectionSearchResult, self).__init__(
request.new(language=locale.language),
lambda x: Collection(raw=x, locale=locale))
class Image(Element):
filename = Datapoint('file_path', initarg=1,
handler=lambda x: x.lstrip('/'))
aspectratio = Datapoint('aspect_ratio')
height = Datapoint('height')
width = Datapoint('width')
language = Datapoint('iso_639_1')
userrating = Datapoint('vote_average')
votes = Datapoint('vote_count')
def sizes(self):
return ['original']
def geturl(self, size='original'):
if size not in self.sizes():
raise TMDBImageSizeError
url = Configuration.images['secure_base_url'].rstrip('/')
return url+'/{0}/{1}'.format(size, self.filename)
# sort preferring locale's language, but keep remaining ordering consistent
def __lt__(self, other):
if not isinstance(other, Image):
return False
return (self.language == self._locale.language) \
and (self.language != other.language)
def __gt__(self, other):
if not isinstance(other, Image):
return True
return (self.language != other.language) \
and (other.language == self._locale.language)
# direct match for comparison
def __eq__(self, other):
if not isinstance(other, Image):
return False
return self.filename == other.filename
# special handling for boolean to see if exists
def __nonzero__(self):
if len(self.filename) == 0:
return False
return True
def __repr__(self):
# BASE62 encoded filename, no need to worry about unicode
return u"<{0.__class__.__name__} '{0.filename}'>".format(self)
class Backdrop(Image):
def sizes(self):
return Configuration.images['backdrop_sizes']
class Poster(Image):
def sizes(self):
return Configuration.images['poster_sizes']
class Profile(Image):
def sizes(self):
return Configuration.images['profile_sizes']
class Logo(Image):
def sizes(self):
return Configuration.images['logo_sizes']
class AlternateTitle(Element):
country = Datapoint('iso_3166_1')
title = Datapoint('title')
# sort preferring locale's country, but keep remaining ordering consistent
def __lt__(self, other):
return (self.country == self._locale.country) \
and (self.country != other.country)
def __gt__(self, other):
return (self.country != other.country) \
and (other.country == self._locale.country)
def __eq__(self, other):
return self.country == other.country
def __repr__(self):
return u"<{0.__class__.__name__} '{0.title}' ({0.country})>"\
.format(self).encode('utf-8')
class Person(Element):
id = Datapoint('id', initarg=1)
name = Datapoint('name')
biography = Datapoint('biography')
dayofbirth = Datapoint('birthday', default=None, handler=process_date)
dayofdeath = Datapoint('deathday', default=None, handler=process_date)
homepage = Datapoint('homepage')
birthplace = Datapoint('place_of_birth')
profile = Datapoint('profile_path', handler=Profile,
raw=False, default=None)
adult = Datapoint('adult')
aliases = Datalist('also_known_as')
def __repr__(self):
return u"<{0.__class__.__name__} '{0.name}'>"\
.format(self).encode('utf-8')
def _populate(self):
return Request('person/{0}'.format(self.id))
def _populate_credits(self):
return Request('person/{0}/credits'.format(self.id),
language=self._locale.language)
def _populate_images(self):
return Request('person/{0}/images'.format(self.id))
roles = Datalist('cast', handler=lambda x: ReverseCast(raw=x),
poller=_populate_credits)
crew = Datalist('crew', handler=lambda x: ReverseCrew(raw=x),
poller=_populate_credits)
profiles = Datalist('profiles', handler=Profile, poller=_populate_images)
class Cast(Person):
character = Datapoint('character')
order = Datapoint('order')
def __repr__(self):
return u"<{0.__class__.__name__} '{0.name}' as '{0.character}'>"\
.format(self).encode('utf-8')
class Crew(Person):
job = Datapoint('job')
department = Datapoint('department')
def __repr__(self):
return u"<{0.__class__.__name__} '{0.name}','{0.job}'>"\
.format(self).encode('utf-8')
class Keyword(Element):
id = Datapoint('id')
name = Datapoint('name')
def __repr__(self):
return u"<{0.__class__.__name__} {0.name}>"\
.format(self).encode('utf-8')
class Release(Element):
certification = Datapoint('certification')
country = Datapoint('iso_3166_1')
releasedate = Datapoint('release_date', handler=process_date)
def __repr__(self):
return u"<{0.__class__.__name__} {0.country}, {0.releasedate}>"\
.format(self).encode('utf-8')
class Trailer(Element):
name = Datapoint('name')
size = Datapoint('size')
source = Datapoint('source')
class YoutubeTrailer(Trailer):
def geturl(self):
return "http://www.youtube.com/watch?v={0}".format(self.source)
def __repr__(self):
# modified BASE64 encoding, no need to worry about unicode
return u"<{0.__class__.__name__} '{0.name}'>".format(self)
class AppleTrailer(Element):
name = Datapoint('name')
sources = Datadict('sources', handler=Trailer, attr='size')
def sizes(self):
return self.sources.keys()
def geturl(self, size=None):
if size is None:
# sort assuming ###p format for now, take largest resolution
size = str(sorted(
[int(size[:-1]) for size in self.sources]
)[-1]) + 'p'
return self.sources[size].source
def __repr__(self):
return u"<{0.__class__.__name__} '{0.name}'>".format(self)
class Translation(Element):
name = Datapoint('name')
language = Datapoint('iso_639_1')
englishname = Datapoint('english_name')
def __repr__(self):
return u"<{0.__class__.__name__} '{0.name}' ({0.language})>"\
.format(self).encode('utf-8')
class Genre(NameRepr, Element):
id = Datapoint('id')
name = Datapoint('name')
def _populate_movies(self):
return Request('genre/{0}/movies'.format(self.id), \
language=self._locale.language)
@property
def movies(self):
if 'movies' not in self._data:
search = MovieSearchResult(self._populate_movies(), \
locale=self._locale)
search._name = "{0.name} Movies".format(self)
self._data['movies'] = search
return self._data['movies']
@classmethod
def getAll(cls, locale=None):
class GenreList(Element):
genres = Datalist('genres', handler=Genre)
def _populate(self):
return Request('genre/list', language=self._locale.language)
return GenreList(locale=locale).genres
class Studio(NameRepr, Element):
id = Datapoint('id', initarg=1)
name = Datapoint('name')
description = Datapoint('description')
headquarters = Datapoint('headquarters')
logo = Datapoint('logo_path', handler=Logo, raw=False, default=None)
# FIXME: manage not-yet-defined handlers in a way that will propogate
# locale information properly
parent = Datapoint('parent_company', handler=lambda x: Studio(raw=x))
def _populate(self):
return Request('company/{0}'.format(self.id))
def _populate_movies(self):
return Request('company/{0}/movies'.format(self.id),
language=self._locale.language)
# FIXME: add a cleaner way of adding types with no additional processing
@property
def movies(self):
if 'movies' not in self._data:
search = MovieSearchResult(self._populate_movies(),
locale=self._locale)
search._name = "{0.name} Movies".format(self)
self._data['movies'] = search
return self._data['movies']
class Country(NameRepr, Element):
code = Datapoint('iso_3166_1')
name = Datapoint('name')
class Language(NameRepr, Element):
code = Datapoint('iso_639_1')
name = Datapoint('name')
class Movie(Element):
@classmethod
def latest(cls):
req = Request('latest/movie')
req.lifetime = 600
return cls(raw=req.readJSON())
@classmethod
def nowplaying(cls, locale=None):
res = MovieSearchResult(Request('movie/now-playing'), locale=locale)
res._name = 'Now Playing'
return res
@classmethod
def mostpopular(cls, locale=None):
res = MovieSearchResult(Request('movie/popular'), locale=locale)
res._name = 'Popular'
return res
@classmethod
def toprated(cls, locale=None):
res = MovieSearchResult(Request('movie/top_rated'), locale=locale)
res._name = 'Top Rated'
return res
@classmethod
def upcoming(cls, locale=None):
res = MovieSearchResult(Request('movie/upcoming'), locale=locale)
res._name = 'Upcoming'
return res
@classmethod
def favorites(cls, session=None):
if session is None:
session = get_session()
account = Account(session=session)
res = MovieSearchResult(
Request('account/{0}/favorite_movies'.format(account.id),
session_id=session.sessionid))
res._name = "Favorites"
return res
@classmethod
def ratedmovies(cls, session=None):
if session is None:
session = get_session()
account = Account(session=session)
res = MovieSearchResult(
Request('account/{0}/rated_movies'.format(account.id),
session_id=session.sessionid))
res._name = "Movies You Rated"
return res
@classmethod
def watchlist(cls, session=None):
if session is None:
session = get_session()
account = Account(session=session)
res = MovieSearchResult(
Request('account/{0}/movie_watchlist'.format(account.id),
session_id=session.sessionid))
res._name = "Movies You're Watching"
return res
@classmethod
def fromIMDB(cls, imdbid, locale=None):
try:
# assume string
if not imdbid.startswith('tt'):
imdbid = "tt{0:0>7}".format(imdbid)
except AttributeError:
# assume integer
imdbid = "tt{0:0>7}".format(imdbid)
if locale is None:
locale = get_locale()
movie = cls(imdbid, locale=locale)
movie._populate()
return movie
id = Datapoint('id', initarg=1)
title = Datapoint('title')
originaltitle = Datapoint('original_title')
tagline = Datapoint('tagline')
overview = Datapoint('overview')
runtime = Datapoint('runtime')
budget = Datapoint('budget')
revenue = Datapoint('revenue')
releasedate = Datapoint('release_date', handler=process_date)
homepage = Datapoint('homepage')
imdb = Datapoint('imdb_id')
backdrop = Datapoint('backdrop_path', handler=Backdrop,
raw=False, default=None)
poster = Datapoint('poster_path', handler=Poster,
raw=False, default=None)
popularity = Datapoint('popularity')
userrating = Datapoint('vote_average')
votes = Datapoint('vote_count')
adult = Datapoint('adult')
collection = Datapoint('belongs_to_collection', handler=lambda x: \
Collection(raw=x))
genres = Datalist('genres', handler=Genre)
studios = Datalist('production_companies', handler=Studio)
countries = Datalist('production_countries', handler=Country)
languages = Datalist('spoken_languages', handler=Language)
def _populate(self):
return Request('movie/{0}'.format(self.id), \
language=self._locale.language)
def _populate_titles(self):
kwargs = {}
if not self._locale.fallthrough:
kwargs['country'] = self._locale.country
return Request('movie/{0}/alternative_titles'.format(self.id),
**kwargs)
def _populate_cast(self):
return Request('movie/{0}/casts'.format(self.id))
def _populate_images(self):
kwargs = {}
if not self._locale.fallthrough:
kwargs['language'] = self._locale.language
return Request('movie/{0}/images'.format(self.id), **kwargs)
def _populate_keywords(self):
return Request('movie/{0}/keywords'.format(self.id))
def _populate_releases(self):
return Request('movie/{0}/releases'.format(self.id))
def _populate_trailers(self):
return Request('movie/{0}/trailers'.format(self.id),
language=self._locale.language)
def _populate_translations(self):
return Request('movie/{0}/translations'.format(self.id))
alternate_titles = Datalist('titles', handler=AlternateTitle, \
poller=_populate_titles, sort=True)
# FIXME: this data point will need to be changed to 'credits' at some point
cast = Datalist('cast', handler=Cast,
poller=_populate_cast, sort='order')
crew = Datalist('crew', handler=Crew, poller=_populate_cast)
backdrops = Datalist('backdrops', handler=Backdrop,
poller=_populate_images, sort=True)
posters = Datalist('posters', handler=Poster,
poller=_populate_images, sort=True)
keywords = Datalist('keywords', handler=Keyword,
poller=_populate_keywords)
releases = Datadict('countries', handler=Release,
poller=_populate_releases, attr='country')
youtube_trailers = Datalist('youtube', handler=YoutubeTrailer,
poller=_populate_trailers)
apple_trailers = Datalist('quicktime', handler=AppleTrailer,
poller=_populate_trailers)
translations = Datalist('translations', handler=Translation,
poller=_populate_translations)
def setFavorite(self, value):
req = Request('account/{0}/favorite'.format(
Account(session=self._session).id),
session_id=self._session.sessionid)
req.add_data({'movie_id': self.id,
'favorite': str(bool(value)).lower()})
req.lifetime = 0
req.readJSON()
def setRating(self, value):
if not (0 <= value <= 10):
raise TMDBError("Ratings must be between '0' and '10'.")
req = Request('movie/{0}/rating'.format(self.id),
session_id=self._session.sessionid)
req.lifetime = 0
req.add_data({'value':value})
req.readJSON()
def setWatchlist(self, value):
req = Request('account/{0}/movie_watchlist'.format(
Account(session=self._session).id),
session_id=self._session.sessionid)
req.lifetime = 0
req.add_data({'movie_id': self.id,
'movie_watchlist': str(bool(value)).lower()})
req.readJSON()
def getSimilar(self):
return self.similar
@property
def similar(self):
res = MovieSearchResult(Request(
'movie/{0}/similar_movies'.format(self.id)),
locale=self._locale)
res._name = 'Similar to {0}'.format(self._printable_name())
return res
@property
def lists(self):
res = ListSearchResult(Request('movie/{0}/lists'.format(self.id)))
res._name = "Lists containing {0}".format(self._printable_name())
return res
def _printable_name(self):
if self.title is not None:
s = u"'{0}'".format(self.title)
elif self.originaltitle is not None:
s = u"'{0}'".format(self.originaltitle)
else:
s = u"'No Title'"
if self.releasedate:
s = u"{0} ({1})".format(s, self.releasedate.year)
return s
def __repr__(self):
return u"<{0} {1}>".format(self.__class__.__name__,
self._printable_name()).encode('utf-8')
class ReverseCast( Movie ):
character = Datapoint('character')
def __repr__(self):
return (u"<{0.__class__.__name__} '{0.character}' on {1}>"
.format(self, self._printable_name()).encode('utf-8'))
class ReverseCrew( Movie ):
department = Datapoint('department')
job = Datapoint('job')
def __repr__(self):
return (u"<{0.__class__.__name__} '{0.job}' for {1}>"
.format(self, self._printable_name()).encode('utf-8'))
class Collection(NameRepr, Element):
id = Datapoint('id', initarg=1)
name = Datapoint('name')
backdrop = Datapoint('backdrop_path', handler=Backdrop, \
raw=False, default=None)
poster = Datapoint('poster_path', handler=Poster, raw=False, default=None)
members = Datalist('parts', handler=Movie)
overview = Datapoint('overview')
def _populate(self):
return Request('collection/{0}'.format(self.id),
language=self._locale.language)
def _populate_images(self):
kwargs = {}
if not self._locale.fallthrough:
kwargs['language'] = self._locale.language
return Request('collection/{0}/images'.format(self.id), **kwargs)
backdrops = Datalist('backdrops', handler=Backdrop,
poller=_populate_images, sort=True)
posters = Datalist('posters', handler=Poster,
poller=_populate_images, sort=True)
class List(NameRepr, Element):
id = Datapoint('id', initarg=1)
name = Datapoint('name')
author = Datapoint('created_by')
description = Datapoint('description')
favorites = Datapoint('favorite_count')
language = Datapoint('iso_639_1')
count = Datapoint('item_count')
poster = Datapoint('poster_path', handler=Poster, raw=False, default=None)
members = Datalist('items', handler=Movie)
def _populate(self):
return Request('list/{0}'.format(self.id))
class Network(NameRepr,Element):
id = Datapoint('id', initarg=1)
name = Datapoint('name')
class Episode(NameRepr, Element):
episode_number = Datapoint('episode_number', initarg=3)
season_number = Datapoint('season_number', initarg=2)
series_id = Datapoint('series_id', initarg=1)
air_date = Datapoint('air_date', handler=process_date)
overview = Datapoint('overview')
name = Datapoint('name')
userrating = Datapoint('vote_average')
votes = Datapoint('vote_count')
id = Datapoint('id')
production_code = Datapoint('production_code')
still = Datapoint('still_path', handler=Backdrop, raw=False, default=None)
def _populate(self):
return Request('tv/{0}/season/{1}/episode/{2}'.format(self.series_id, self.season_number, self.episode_number),
language=self._locale.language)
def _populate_cast(self):
return Request('tv/{0}/season/{1}/episode/{2}/credits'.format(
self.series_id, self.season_number, self.episode_number),
language=self._locale.language)
def _populate_external_ids(self):
return Request('tv/{0}/season/{1}/episode/{2}/external_ids'.format(
self.series_id, self.season_number, self.episode_number))
def _populate_images(self):
kwargs = {}
if not self._locale.fallthrough:
kwargs['language'] = self._locale.language
return Request('tv/{0}/season/{1}/episode/{2}/images'.format(
self.series_id, self.season_number, self.episode_number), **kwargs)
cast = Datalist('cast', handler=Cast,
poller=_populate_cast, sort='order')
guest_stars = Datalist('guest_stars', handler=Cast,
poller=_populate_cast, sort='order')
crew = Datalist('crew', handler=Crew, poller=_populate_cast)
imdb_id = Datapoint('imdb_id', poller=_populate_external_ids)
freebase_id = Datapoint('freebase_id', poller=_populate_external_ids)
freebase_mid = Datapoint('freebase_mid', poller=_populate_external_ids)
tvdb_id = Datapoint('tvdb_id', poller=_populate_external_ids)
tvrage_id = Datapoint('tvrage_id', poller=_populate_external_ids)
stills = Datalist('stills', handler=Backdrop, poller=_populate_images, sort=True)
class Season(NameRepr, Element):
season_number = Datapoint('season_number', initarg=2)
series_id = Datapoint('series_id', initarg=1)
id = Datapoint('id')
air_date = Datapoint('air_date', handler=process_date)
poster = Datapoint('poster_path', handler=Poster, raw=False, default=None)
overview = Datapoint('overview')
name = Datapoint('name')
episodes = Datadict('episodes', attr='episode_number', handler=Episode,
passthrough={'series_id': 'series_id', 'season_number': 'season_number'})
def _populate(self):
return Request('tv/{0}/season/{1}'.format(self.series_id, self.season_number),
language=self._locale.language)
def _populate_images(self):
kwargs = {}
if not self._locale.fallthrough:
kwargs['language'] = self._locale.language
return Request('tv/{0}/season/{1}/images'.format(self.series_id, self.season_number), **kwargs)
def _populate_external_ids(self):
return Request('tv/{0}/season/{1}/external_ids'.format(self.series_id, self.season_number))
posters = Datalist('posters', handler=Poster,
poller=_populate_images, sort=True)
freebase_id = Datapoint('freebase_id', poller=_populate_external_ids)
freebase_mid = Datapoint('freebase_mid', poller=_populate_external_ids)
tvdb_id = Datapoint('tvdb_id', poller=_populate_external_ids)
tvrage_id = Datapoint('tvrage_id', poller=_populate_external_ids)
class Series(NameRepr, Element):
id = Datapoint('id', initarg=1)
backdrop = Datapoint('backdrop_path', handler=Backdrop, raw=False, default=None)
authors = Datalist('created_by', handler=Person)
episode_run_times = Datalist('episode_run_time')
first_air_date = Datapoint('first_air_date', handler=process_date)
last_air_date = Datapoint('last_air_date', handler=process_date)
genres = Datalist('genres', handler=Genre)
homepage = Datapoint('homepage')
in_production = Datapoint('in_production')
languages = Datalist('languages')
origin_countries = Datalist('origin_country')
name = Datapoint('name')
original_name = Datapoint('original_name')
number_of_episodes = Datapoint('number_of_episodes')
number_of_seasons = Datapoint('number_of_seasons')
overview = Datapoint('overview')
popularity = Datapoint('popularity')
status = Datapoint('status')
userrating = Datapoint('vote_average')
votes = Datapoint('vote_count')
poster = Datapoint('poster_path', handler=Poster, raw=False, default=None)
networks = Datalist('networks', handler=Network)
seasons = Datadict('seasons', attr='season_number', handler=Season, passthrough={'id': 'series_id'})
def _populate(self):
return Request('tv/{0}'.format(self.id),
language=self._locale.language)
def _populate_cast(self):
return Request('tv/{0}/credits'.format(self.id))
def _populate_images(self):
kwargs = {}
if not self._locale.fallthrough:
kwargs['language'] = self._locale.language
return Request('tv/{0}/images'.format(self.id), **kwargs)
def _populate_external_ids(self):
return Request('tv/{0}/external_ids'.format(self.id))
cast = Datalist('cast', handler=Cast,
poller=_populate_cast, sort='order')
crew = Datalist('crew', handler=Crew, poller=_populate_cast)
backdrops = Datalist('backdrops', handler=Backdrop,
poller=_populate_images, sort=True)
posters = Datalist('posters', handler=Poster,
poller=_populate_images, sort=True)
imdb_id = Datapoint('imdb_id', poller=_populate_external_ids)
freebase_id = Datapoint('freebase_id', poller=_populate_external_ids)
freebase_mid = Datapoint('freebase_mid', poller=_populate_external_ids)
tvdb_id = Datapoint('tvdb_id', poller=_populate_external_ids)
tvrage_id = Datapoint('tvrage_id', poller=_populate_external_ids)

View File

@@ -1,138 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: tmdb_auth.py
# Python Library
# Author: Raymond Wagner
# Purpose: Provide authentication and session services for
# calls against the TMDB v3 API
#-----------------------
from datetime import datetime as _pydatetime, \
tzinfo as _pytzinfo
import re
class datetime(_pydatetime):
"""Customized datetime class with ISO format parsing."""
_reiso = re.compile('(?P<year>[0-9]{4})'
'-(?P<month>[0-9]{1,2})'
'-(?P<day>[0-9]{1,2})'
'.'
'(?P<hour>[0-9]{2})'
':(?P<min>[0-9]{2})'
'(:(?P<sec>[0-9]{2}))?'
'(?P<tz>Z|'
'(?P<tzdirec>[-+])'
'(?P<tzhour>[0-9]{1,2})'
'(:)?'
'(?P<tzmin>[0-9]{2})?'
')?')
class _tzinfo(_pytzinfo):
def __init__(self, direc='+', hr=0, min=0):
if direc == '-':
hr = -1*int(hr)
self._offset = timedelta(hours=int(hr), minutes=int(min))
def utcoffset(self, dt):
return self._offset
def tzname(self, dt):
return ''
def dst(self, dt):
return timedelta(0)
@classmethod
def fromIso(cls, isotime, sep='T'):
match = cls._reiso.match(isotime)
if match is None:
raise TypeError("time data '%s' does not match ISO 8601 format"
% isotime)
dt = [int(a) for a in match.groups()[:5]]
if match.group('sec') is not None:
dt.append(int(match.group('sec')))
else:
dt.append(0)
if match.group('tz'):
if match.group('tz') == 'Z':
tz = cls._tzinfo()
elif match.group('tzmin'):
tz = cls._tzinfo(*match.group('tzdirec', 'tzhour', 'tzmin'))
else:
tz = cls._tzinfo(*match.group('tzdirec', 'tzhour'))
dt.append(0)
dt.append(tz)
return cls(*dt)
from request import Request
from tmdb_exceptions import *
syssession = None
def set_session(sessionid):
global syssession
syssession = Session(sessionid)
def get_session(sessionid=None):
global syssession
if sessionid:
return Session(sessionid)
elif syssession is not None:
return syssession
else:
return Session.new()
class Session(object):
@classmethod
def new(cls):
return cls(None)
def __init__(self, sessionid):
self.sessionid = sessionid
@property
def sessionid(self):
if self._sessionid is None:
if self._authtoken is None:
raise TMDBError("No Auth Token to produce Session for")
# TODO: check authtoken expiration against current time
req = Request('authentication/session/new',
request_token=self._authtoken)
req.lifetime = 0
dat = req.readJSON()
if not dat['success']:
raise TMDBError("Session generation failed")
self._sessionid = dat['session_id']
return self._sessionid
@sessionid.setter
def sessionid(self, value):
self._sessionid = value
self._authtoken = None
self._authtokenexpiration = None
if value is None:
self.authenticated = False
else:
self.authenticated = True
@property
def authtoken(self):
if self.authenticated:
raise TMDBError("Session is already authenticated")
if self._authtoken is None:
req = Request('authentication/token/new')
req.lifetime = 0
dat = req.readJSON()
if not dat['success']:
raise TMDBError("Auth Token request failed")
self._authtoken = dat['request_token']
self._authtokenexpiration = datetime.fromIso(dat['expires_at'])
return self._authtoken
@property
def callbackurl(self):
return "http://www.themoviedb.org/authenticate/"+self._authtoken

View File

@@ -1,107 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: tmdb_exceptions.py Common exceptions used in tmdbv3 API library
# Python Library
# Author: Raymond Wagner
#-----------------------
class TMDBError(Exception):
Error = 0
KeyError = 10
KeyMissing = 20
KeyInvalid = 30
KeyRevoked = 40
RequestError = 50
RequestInvalid = 51
PagingIssue = 60
CacheError = 70
CacheReadError = 71
CacheWriteError = 72
CacheDirectoryError = 73
ImageSizeError = 80
HTTPError = 90
Offline = 100
LocaleError = 110
def __init__(self, msg=None, errno=0):
self.errno = errno
if errno == 0:
self.errno = getattr(self, 'TMDB'+self.__class__.__name__, errno)
self.args = (msg,)
class TMDBKeyError(TMDBError):
pass
class TMDBKeyMissing(TMDBKeyError):
pass
class TMDBKeyInvalid(TMDBKeyError):
pass
class TMDBKeyRevoked(TMDBKeyInvalid):
pass
class TMDBRequestError(TMDBError):
pass
class TMDBRequestInvalid(TMDBRequestError):
pass
class TMDBPagingIssue(TMDBRequestError):
pass
class TMDBCacheError(TMDBRequestError):
pass
class TMDBCacheReadError(TMDBCacheError):
def __init__(self, filename):
super(TMDBCacheReadError, self).__init__(
"User does not have permission to access cache file: {0}."\
.format(filename))
self.filename = filename
class TMDBCacheWriteError(TMDBCacheError):
def __init__(self, filename):
super(TMDBCacheWriteError, self).__init__(
"User does not have permission to write cache file: {0}."\
.format(filename))
self.filename = filename
class TMDBCacheDirectoryError(TMDBCacheError):
def __init__(self, filename):
super(TMDBCacheDirectoryError, self).__init__(
"Directory containing cache file does not exist: {0}."\
.format(filename))
self.filename = filename
class TMDBImageSizeError(TMDBError ):
pass
class TMDBHTTPError(TMDBError):
def __init__(self, err):
self.httperrno = err.code
self.response = err.fp.read()
super(TMDBHTTPError, self).__init__(str(err))
class TMDBOffline(TMDBError):
pass
class TMDBLocaleError(TMDBError):
pass

View File

@@ -1,403 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#-----------------------
# Name: util.py Assorted utilities used in tmdb_api
# Python Library
# Author: Raymond Wagner
#-----------------------
from copy import copy
from locales import get_locale
from tmdb_auth import get_session
class NameRepr(object):
"""Mixin for __repr__ methods using 'name' attribute."""
def __repr__(self):
return u"<{0.__class__.__name__} '{0.name}'>"\
.format(self).encode('utf-8')
class SearchRepr(object):
"""
Mixin for __repr__ methods for classes with '_name' and
'_request' attributes.
"""
def __repr__(self):
name = self._name if self._name else self._request._kwargs['query']
return u"<Search Results: {0}>".format(name).encode('utf-8')
class Poller(object):
"""
Wrapper for an optional callable to populate an Element derived
class with raw data, or data from a Request.
"""
def __init__(self, func, lookup, inst=None):
self.func = func
self.lookup = lookup
self.inst = inst
if func:
# with function, this allows polling data from the API
self.__doc__ = func.__doc__
self.__name__ = func.__name__
self.__module__ = func.__module__
else:
# without function, this is just a dummy poller used for applying
# raw data to a new Element class with the lookup table
self.__name__ = '_populate'
def __get__(self, inst, owner):
# normal decorator stuff
# return self for a class
# return instantiated copy of self for an object
if inst is None:
return self
func = None
if self.func:
func = self.func.__get__(inst, owner)
return self.__class__(func, self.lookup, inst)
def __call__(self):
# retrieve data from callable function, and apply
if not callable(self.func):
raise RuntimeError('Poller object called without a source function')
req = self.func()
if ('language' in req._kwargs) or ('country' in req._kwargs) \
and self.inst._locale.fallthrough:
# request specifies a locale filter, and fallthrough is enabled
# run a first pass with specified filter
if not self.apply(req.readJSON(), False):
return
# if first pass results in missed data, run a second pass to
# fill in the gaps
self.apply(req.new(language=None, country=None).readJSON())
# re-apply the filtered first pass data over top the second
# unfiltered set. this is to work around the issue that the
# properties have no way of knowing when they should or
# should not overwrite existing data. the cache engine will
# take care of the duplicate query
self.apply(req.readJSON())
def apply(self, data, set_nones=True):
# apply data directly, bypassing callable function
unfilled = False
for k, v in self.lookup.items():
if (k in data) and \
((data[k] is not None) if callable(self.func) else True):
# argument received data, populate it
setattr(self.inst, v, data[k])
elif v in self.inst._data:
# argument did not receive data, but Element already contains
# some value, so skip this
continue
elif set_nones:
# argument did not receive data, so fill it with None
# to indicate such and prevent a repeat scan
setattr(self.inst, v, None)
else:
# argument does not need data, so ignore it allowing it to
# trigger a later poll. this is intended for use when
# initializing a class with raw data, or when performing a
# first pass through when performing locale fall through
unfilled = True
return unfilled
class Data(object):
"""
Basic response definition class
This maps to a single key in a JSON dictionary received from the API
"""
def __init__(self, field, initarg=None, handler=None, poller=None,
raw=True, default=u'', lang=None, passthrough={}):
"""
This defines how the dictionary value is to be processed by the
poller
field -- defines the dictionary key that filters what data
this uses
initarg -- (optional) specifies that this field must be
supplied when creating a new instance of the Element
class this definition is mapped to. Takes an integer
for the order it should be used in the input
arguments
handler -- (optional) callable used to process the received
value before being stored in the Element object.
poller -- (optional) callable to be used if data is requested
and this value has not yet been defined. the
callable should return a dictionary of data from a
JSON query. many definitions may share a single
poller, which will be and the data used to populate
all referenced definitions based off their defined
field
raw -- (optional) if the specified handler is an Element
class, the data will be passed into it using the
'raw' keyword attribute. setting this to false
will force the data to instead be passed in as the
first argument
"""
self.field = field
self.initarg = initarg
self.poller = poller
self.raw = raw
self.default = default
self.sethandler(handler)
self.passthrough = passthrough
def __get__(self, inst, owner):
if inst is None:
return self
if self.field not in inst._data:
if self.poller is None:
return None
self.poller.__get__(inst, owner)()
return inst._data[self.field]
def __set__(self, inst, value):
if (value is not None) and (value != ''):
value = self.handler(value)
else:
value = self.default
if isinstance(value, Element):
value._locale = inst._locale
value._session = inst._session
for source, dest in self.passthrough:
setattr(value, dest, getattr(inst, source))
inst._data[self.field] = value
def sethandler(self, handler):
# ensure handler is always callable, even for passthrough data
if handler is None:
self.handler = lambda x: x
elif isinstance(handler, ElementType) and self.raw:
self.handler = lambda x: handler(raw=x)
else:
self.handler = lambda x: handler(x)
class Datapoint(Data):
pass
class Datalist(Data):
"""
Response definition class for list data
This maps to a key in a JSON dictionary storing a list of data
"""
def __init__(self, field, handler=None, poller=None, sort=None, raw=True, passthrough={}):
"""
This defines how the dictionary value is to be processed by the
poller
field -- defines the dictionary key that filters what data
this uses
handler -- (optional) callable used to process the received
value before being stored in the Element object.
poller -- (optional) callable to be used if data is requested
and this value has not yet been defined. the
callable should return a dictionary of data from a
JSON query. many definitions may share a single
poller, which will be and the data used to populate
all referenced definitions based off their defined
field
sort -- (optional) name of attribute in resultant data to be
used to sort the list after processing. this
effectively requires a handler be defined to process
the data into something that has attributes
raw -- (optional) if the specified handler is an Element
class, the data will be passed into it using the
'raw' keyword attribute. setting this to false will
force the data to instead be passed in as the first
argument
"""
super(Datalist, self).__init__(field, None, handler, poller, raw, passthrough=passthrough)
self.sort = sort
def __set__(self, inst, value):
data = []
if value:
for val in value:
val = self.handler(val)
if isinstance(val, Element):
val._locale = inst._locale
val._session = inst._session
for source, dest in self.passthrough.items():
setattr(val, dest, getattr(inst, source))
data.append(val)
if self.sort:
if self.sort is True:
data.sort()
else:
data.sort(key=lambda x: getattr(x, self.sort))
inst._data[self.field] = data
class Datadict(Data):
"""
Response definition class for dictionary data
This maps to a key in a JSON dictionary storing a dictionary of data
"""
def __init__(self, field, handler=None, poller=None, raw=True,
key=None, attr=None, passthrough={}):
"""
This defines how the dictionary value is to be processed by the
poller
field -- defines the dictionary key that filters what data
this uses
handler -- (optional) callable used to process the received
value before being stored in the Element object.
poller -- (optional) callable to be used if data is requested
and this value has not yet been defined. the
callable should return a dictionary of data from a
JSON query. many definitions may share a single
poller, which will be and the data used to populate
all referenced definitions based off their defined
field
key -- (optional) name of key in resultant data to be used
as the key in the stored dictionary. if this is not
the field name from the source data is used instead
attr -- (optional) name of attribute in resultant data to be
used as the key in the stored dictionary. if this is
not the field name from the source data is used
instead
raw -- (optional) if the specified handler is an Element
class, the data will be passed into it using the
'raw' keyword attribute. setting this to false will
force the data to instead be passed in as the first
argument
"""
if key and attr:
raise TypeError("`key` and `attr` cannot both be defined")
super(Datadict, self).__init__(field, None, handler, poller, raw, passthrough=passthrough)
if key:
self.getkey = lambda x: x[key]
elif attr:
self.getkey = lambda x: getattr(x, attr)
else:
raise TypeError("Datadict requires `key` or `attr` be defined " +
"for populating the dictionary")
def __set__(self, inst, value):
data = {}
if value:
for val in value:
val = self.handler(val)
if isinstance(val, Element):
val._locale = inst._locale
val._session = inst._session
for source, dest in self.passthrough.items():
setattr(val, dest, getattr(inst, source))
data[self.getkey(val)] = val
inst._data[self.field] = data
class ElementType( type ):
"""
MetaClass used to pre-process Element-derived classes and set up the
Data definitions
"""
def __new__(mcs, name, bases, attrs):
# any Data or Poller object defined in parent classes must be cloned
# and processed in this class to function properly
# scan through available bases for all such definitions and insert
# a copy into this class's attributes
# run in reverse order so higher priority values overwrite lower ones
data = {}
pollers = {'_populate':None}
for base in reversed(bases):
if isinstance(base, mcs):
for k, attr in base.__dict__.items():
if isinstance(attr, Data):
# extract copies of each defined Data element from
# parent classes
attr = copy(attr)
attr.poller = attr.poller.func
data[k] = attr
elif isinstance(attr, Poller):
# extract copies of each defined Poller function
# from parent classes
pollers[k] = attr.func
for k, attr in attrs.items():
if isinstance(attr, Data):
data[k] = attr
if '_populate' in attrs:
pollers['_populate'] = attrs['_populate']
# process all defined Data attribues, testing for use as an initial
# argument, and building a list of what Pollers are used to populate
# which Data points
pollermap = dict([(k, []) for k in pollers])
initargs = []
for k, v in data.items():
v.name = k
if v.initarg:
initargs.append(v)
if v.poller:
pn = v.poller.__name__
if pn not in pollermap:
pollermap[pn] = []
if pn not in pollers:
pollers[pn] = v.poller
pollermap[pn].append(v)
else:
pollermap['_populate'].append(v)
# wrap each used poller function with a Poller class, and push into
# the new class attributes
for k, v in pollermap.items():
if len(v) == 0:
continue
lookup = dict([(attr.field, attr.name) for attr in v])
poller = Poller(pollers[k], lookup)
attrs[k] = poller
# backfill wrapped Poller into each mapped Data object, and ensure
# the data elements are defined for this new class
for attr in v:
attr.poller = poller
attrs[attr.name] = attr
# build sorted list of arguments used for intialization
attrs['_InitArgs'] = tuple(
[a.name for a in sorted(initargs, key=lambda x: x.initarg)])
return type.__new__(mcs, name, bases, attrs)
def __call__(cls, *args, **kwargs):
obj = cls.__new__(cls)
if ('locale' in kwargs) and (kwargs['locale'] is not None):
obj._locale = kwargs['locale']
else:
obj._locale = get_locale()
if 'session' in kwargs:
obj._session = kwargs['session']
else:
obj._session = get_session()
obj._data = {}
if 'raw' in kwargs:
# if 'raw' keyword is supplied, create populate object manually
if len(args) != 0:
raise TypeError(
'__init__() takes exactly 2 arguments (1 given)')
obj._populate.apply(kwargs['raw'], False)
else:
# if not, the number of input arguments must exactly match that
# defined by the Data definitions
if len(args) != len(cls._InitArgs):
raise TypeError(
'__init__() takes exactly {0} arguments ({1} given)'\
.format(len(cls._InitArgs)+1, len(args)+1))
for a, v in zip(cls._InitArgs, args):
setattr(obj, a, v)
obj.__init__()
return obj
class Element( object ):
__metaclass__ = ElementType
_lang = 'en'