Use TheMovieDB v3 api
This commit is contained in:
@@ -594,17 +594,6 @@ class Scanner(Plugin):
|
||||
except:
|
||||
pass
|
||||
|
||||
# Search based on OpenSubtitleHash
|
||||
if not imdb_id and not group['is_dvd']:
|
||||
for cur_file in files['movie']:
|
||||
movie = fireEvent('movie.by_hash', file = cur_file, merge = True)
|
||||
|
||||
if len(movie) > 0:
|
||||
imdb_id = movie[0].get('imdb')
|
||||
if imdb_id:
|
||||
log.debug('Found movie via OpenSubtitleHash: %s', cur_file)
|
||||
break
|
||||
|
||||
# Search based on identifiers
|
||||
if not imdb_id:
|
||||
for identifier in group['identifiers']:
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
from couchpotato.core.event import addEvent
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode
|
||||
from couchpotato.core.helpers.encoding import simplifyString, toUnicode, ss
|
||||
from couchpotato.core.helpers.variable import md5
|
||||
from couchpotato.core.logger import CPLog
|
||||
from couchpotato.core.providers.info.base import MovieProvider
|
||||
from themoviedb import tmdb
|
||||
from couchpotato.environment import Env
|
||||
import os
|
||||
import tmdb3
|
||||
import traceback
|
||||
|
||||
log = CPLog(__name__)
|
||||
@@ -11,44 +14,13 @@ log = CPLog(__name__)
|
||||
class TheMovieDb(MovieProvider):
|
||||
|
||||
def __init__(self):
|
||||
addEvent('movie.by_hash', self.byHash)
|
||||
addEvent('movie.search', self.search, priority = 2)
|
||||
addEvent('movie.info', self.getInfo, priority = 2)
|
||||
addEvent('movie.info_by_tmdb', self.getInfoByTMDBId)
|
||||
addEvent('movie.info_by_tmdb', self.getInfo)
|
||||
|
||||
# Use base wrapper
|
||||
tmdb.configure(self.conf('api_key'))
|
||||
|
||||
def byHash(self, file):
|
||||
''' Find movie by hash '''
|
||||
|
||||
if self.isDisabled():
|
||||
return False
|
||||
|
||||
cache_key = 'tmdb.cache.%s' % simplifyString(file)
|
||||
results = self.getCache(cache_key)
|
||||
|
||||
if not results:
|
||||
log.debug('Searching for movie by hash: %s', file)
|
||||
try:
|
||||
raw = tmdb.searchByHashingFile(file)
|
||||
|
||||
results = []
|
||||
if raw:
|
||||
try:
|
||||
results = self.parseMovie(raw)
|
||||
log.info('Found: %s', results['titles'][0] + ' (' + str(results.get('year', 0)) + ')')
|
||||
|
||||
self.setCache(cache_key, results)
|
||||
return results
|
||||
except SyntaxError, e:
|
||||
log.error('Failed to parse XML response: %s', e)
|
||||
return False
|
||||
except:
|
||||
log.debug('No movies known by hash for: %s', file)
|
||||
pass
|
||||
|
||||
return results
|
||||
# Configure TMDB settings
|
||||
tmdb3.set_key(self.conf('api_key'))
|
||||
tmdb3.set_cache(engine='file', filename=os.path.join(Env.get('cache_dir'), 'python', 'tmdb.cache'))
|
||||
|
||||
def search(self, q, limit = 12):
|
||||
''' Find movie by name '''
|
||||
@@ -65,7 +37,7 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
raw = None
|
||||
try:
|
||||
raw = tmdb.search(search_string)
|
||||
raw = tmdb3.searchMovie(search_string)
|
||||
except:
|
||||
log.error('Failed searching TMDB for "%s": %s', (search_string, traceback.format_exc()))
|
||||
|
||||
@@ -75,7 +47,7 @@ class TheMovieDb(MovieProvider):
|
||||
nr = 0
|
||||
|
||||
for movie in raw:
|
||||
results.append(self.parseMovie(movie))
|
||||
results.append(self.parseMovie(movie, with_titles = False))
|
||||
|
||||
nr += 1
|
||||
if nr == limit:
|
||||
@@ -83,7 +55,7 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
|
||||
|
||||
self.setCache(cache_key, results)
|
||||
self.setCache(md5(ss(cache_key)), results)
|
||||
return results
|
||||
except SyntaxError, e:
|
||||
log.error('Failed to parse XML response: %s', e)
|
||||
@@ -105,109 +77,75 @@ class TheMovieDb(MovieProvider):
|
||||
|
||||
try:
|
||||
log.debug('Getting info: %s', cache_key)
|
||||
movie = tmdb.imdbLookup(id = identifier)
|
||||
except:
|
||||
pass
|
||||
|
||||
if movie:
|
||||
result = self.parseMovie(movie[0])
|
||||
self.setCache(cache_key, result)
|
||||
|
||||
return result
|
||||
|
||||
def getInfoByTMDBId(self, id = None):
|
||||
|
||||
cache_key = 'tmdb.cache.%s' % id
|
||||
result = self.getCache(cache_key)
|
||||
|
||||
if not result:
|
||||
result = {}
|
||||
movie = None
|
||||
|
||||
try:
|
||||
log.debug('Getting info: %s', cache_key)
|
||||
movie = tmdb.getMovieInfo(id = id)
|
||||
except:
|
||||
pass
|
||||
|
||||
if movie:
|
||||
movie = tmdb3.Movie(identifier)
|
||||
result = self.parseMovie(movie)
|
||||
self.setCache(cache_key, result)
|
||||
except:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
def parseMovie(self, movie):
|
||||
def parseMovie(self, movie, with_titles = True):
|
||||
|
||||
# Images
|
||||
poster = self.getImage(movie, type = 'poster', size = 'cover')
|
||||
#backdrop = self.getImage(movie, type = 'backdrop', size = 'w1280')
|
||||
poster = self.getImage(movie, type = 'poster', size = 'poster')
|
||||
poster_original = self.getImage(movie, type = 'poster', size = 'original')
|
||||
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
|
||||
|
||||
# Genres
|
||||
try:
|
||||
genres = self.getCategory(movie, 'genre')
|
||||
genres = [genre.name for genre in movie.genres]
|
||||
except:
|
||||
genres = []
|
||||
|
||||
# 1900 is the same as None
|
||||
year = str(movie.get('released', 'none'))[:4]
|
||||
if year == '1900' or year.lower() == 'none':
|
||||
year = str(movie.releasedate or '')[:4]
|
||||
if not movie.releasedate or year == '1900' or year.lower() == 'none':
|
||||
year = None
|
||||
|
||||
movie_data = {
|
||||
'via_tmdb': True,
|
||||
'tmdb_id': int(movie.get('id', 0)),
|
||||
'titles': [toUnicode(movie.get('name'))],
|
||||
'original_title': movie.get('original_name'),
|
||||
'tmdb_id': movie.id,
|
||||
'titles': [toUnicode(movie.title)],
|
||||
'original_title': movie.originaltitle,
|
||||
'images': {
|
||||
'poster': [poster] if poster else [],
|
||||
#'backdrop': [backdrop] if backdrop else [],
|
||||
'poster_original': [poster_original] if poster_original else [],
|
||||
'backdrop_original': [backdrop_original] if backdrop_original else [],
|
||||
},
|
||||
'imdb': movie.get('imdb_id'),
|
||||
'mpaa': movie.get('certification', ''),
|
||||
'runtime': movie.get('runtime'),
|
||||
'released': movie.get('released'),
|
||||
'imdb': movie.imdb,
|
||||
'runtime': movie.runtime,
|
||||
'released': movie.releasedate,
|
||||
'year': year,
|
||||
'plot': movie.get('overview'),
|
||||
'plot': movie.overview,
|
||||
'genres': genres,
|
||||
}
|
||||
|
||||
movie_data = dict((k, v) for k, v in movie_data.iteritems() if v)
|
||||
|
||||
# Add alternative names
|
||||
for alt in ['original_name', 'alternative_name']:
|
||||
alt_name = toUnicode(movie.get(alt))
|
||||
if alt_name and not alt_name in movie_data['titles'] and alt_name.lower() != 'none' and alt_name != None:
|
||||
movie_data['titles'].append(alt_name)
|
||||
if with_titles:
|
||||
movie_data['titles'].append(movie.originaltitle)
|
||||
for alt in movie.alternate_titles:
|
||||
alt_name = alt.title
|
||||
if alt_name and not alt_name in movie_data['titles'] and alt_name.lower() != 'none' and alt_name != None:
|
||||
movie_data['titles'].append(alt_name)
|
||||
|
||||
movie_data['titles'] = list(set(movie_data['titles']))
|
||||
|
||||
return movie_data
|
||||
|
||||
def getImage(self, movie, type = 'poster', size = 'cover'):
|
||||
def getImage(self, movie, type = 'poster', size = 'poster'):
|
||||
|
||||
image_url = ''
|
||||
for image in movie.get('images', []):
|
||||
if(image.get('type') == type) and image.get(size):
|
||||
image_url = image.get(size)
|
||||
break
|
||||
try:
|
||||
image_url = getattr(movie, type).geturl(size='original')
|
||||
except:
|
||||
log.debug('Failed getting %s.%s for "%s"', (type, size, movie.title))
|
||||
|
||||
return image_url
|
||||
|
||||
def getCategory(self, movie, type = 'genre'):
|
||||
|
||||
cats = movie.get('categories', {}).get(type)
|
||||
|
||||
categories = []
|
||||
for category in cats:
|
||||
try:
|
||||
categories.append(category)
|
||||
except:
|
||||
pass
|
||||
|
||||
return categories
|
||||
|
||||
def isDisabled(self):
|
||||
if self.conf('api_key') == '':
|
||||
log.error('No API key provided.')
|
||||
|
||||
@@ -9,7 +9,7 @@ class TMDB(UserscriptBase):
|
||||
|
||||
def getMovie(self, url):
|
||||
match = re.search('(?P<id>\d+)', url)
|
||||
movie = fireEvent('movie.info_by_tmdb', id = match.group('id'), merge = True)
|
||||
movie = fireEvent('movie.info_by_tmdb', identifier = match.group('id'), merge = True)
|
||||
|
||||
if movie['imdb']:
|
||||
return self.getInfo(movie['imdb'])
|
||||
|
||||
@@ -1,740 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#-*- coding:utf-8 -*-
|
||||
#author:doganaydin /// forked from dbr/Ben
|
||||
#project:themoviedb
|
||||
#repository:http://github.com/doganaydin/themoviedb
|
||||
#license: LGPLv2 http://www.gnu.org/licenses/lgpl.html
|
||||
|
||||
"""An interface to the themoviedb.org API"""
|
||||
|
||||
__author__ = "doganaydin"
|
||||
__version__ = "0.5"
|
||||
|
||||
|
||||
config = {}
|
||||
|
||||
def configure(api_key):
|
||||
config['apikey'] = api_key
|
||||
config['urls'] = {}
|
||||
config['urls']['movie.search'] = "http://api.themoviedb.org/2.1/Movie.search/en/xml/%(apikey)s/%%s" % (config)
|
||||
config['urls']['movie.getInfo'] = "http://api.themoviedb.org/2.1/Movie.getInfo/en/xml/%(apikey)s/%%s" % (config)
|
||||
config['urls']['media.getInfo'] = "http://api.themoviedb.org/2.1/Media.getInfo/en/xml/%(apikey)s/%%s/%%s" % (config)
|
||||
config['urls']['imdb.lookUp'] = "http://api.themoviedb.org/2.1/Movie.imdbLookup/en/xml/%(apikey)s/%%s" % (config)
|
||||
config['urls']['movie.browse'] = "http://api.themoviedb.org/2.1/Movie.browse/en-US/xml/%(apikey)s?%%s" % (config)
|
||||
|
||||
import os, struct, urllib, urllib2, xml.etree.cElementTree as ElementTree
|
||||
|
||||
class TmdBaseError(Exception):
|
||||
pass
|
||||
|
||||
class TmdNoResults(TmdBaseError):
|
||||
pass
|
||||
|
||||
class TmdHttpError(TmdBaseError):
|
||||
pass
|
||||
|
||||
class TmdXmlError(TmdBaseError):
|
||||
pass
|
||||
|
||||
class TmdConfigError(TmdBaseError):
|
||||
pass
|
||||
|
||||
def opensubtitleHashFile(name):
|
||||
"""Hashes a file using OpenSubtitle's method.
|
||||
> In natural language it calculates: size + 64bit chksum of the first and
|
||||
> last 64k (even if they overlap because the file is smaller than 128k).
|
||||
A slightly more Pythonic version of the Python solution on..
|
||||
http://trac.opensubtitles.org/projects/opensubtitles/wiki/HashSourceCodes
|
||||
"""
|
||||
longlongformat = 'q'
|
||||
bytesize = struct.calcsize(longlongformat)
|
||||
|
||||
f = open(name, "rb")
|
||||
|
||||
filesize = os.path.getsize(name)
|
||||
fhash = filesize
|
||||
|
||||
if filesize < 65536 * 2:
|
||||
raise ValueError("File size must be larger than %s bytes (is %s)" % (65536 * 2, filesize))
|
||||
|
||||
for x in range(65536 / bytesize):
|
||||
buf = f.read(bytesize)
|
||||
(l_value,) = struct.unpack(longlongformat, buf)
|
||||
fhash += l_value
|
||||
fhash = fhash & 0xFFFFFFFFFFFFFFFF # to remain as 64bit number
|
||||
|
||||
f.seek(max(0, filesize - 65536), 0)
|
||||
for x in range(65536 / bytesize):
|
||||
buf = f.read(bytesize)
|
||||
(l_value,) = struct.unpack(longlongformat, buf)
|
||||
fhash += l_value
|
||||
fhash = fhash & 0xFFFFFFFFFFFFFFFF
|
||||
|
||||
f.close()
|
||||
return "%016x" % fhash
|
||||
|
||||
class XmlHandler:
|
||||
"""Deals with retrieval of XML files from API"""
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
def _grabUrl(self, url):
|
||||
try:
|
||||
urlhandle = urllib2.urlopen(url)
|
||||
except IOError, errormsg:
|
||||
raise TmdHttpError(errormsg)
|
||||
if urlhandle.code >= 400:
|
||||
raise TmdHttpError("HTTP status code was %d" % urlhandle.code)
|
||||
return urlhandle.read()
|
||||
|
||||
def getEt(self):
|
||||
xml = self._grabUrl(self.url)
|
||||
try:
|
||||
et = ElementTree.fromstring(xml)
|
||||
except SyntaxError, errormsg:
|
||||
raise TmdXmlError(errormsg)
|
||||
return et
|
||||
|
||||
class SearchResults(list):
|
||||
"""Stores a list of Movie's that matched the search"""
|
||||
def __repr__(self):
|
||||
return "<Search results: %s>" % (list.__repr__(self))
|
||||
|
||||
class MovieResult(dict):
|
||||
"""A dict containing the information about a specific search result"""
|
||||
def __repr__(self):
|
||||
return "<MovieResult: %s (%s)>" % (self.get("name"), self.get("released"))
|
||||
|
||||
def info(self):
|
||||
"""Performs a MovieDb.getMovieInfo search on the current id, returns
|
||||
a Movie object
|
||||
"""
|
||||
cur_id = self['id']
|
||||
info = MovieDb().getMovieInfo(cur_id)
|
||||
return info
|
||||
|
||||
class Movie(dict):
|
||||
"""A dict containing the information about the film"""
|
||||
def __repr__(self):
|
||||
return "<MovieResult: %s (%s)>" % (self.get("name"), self.get("released"))
|
||||
|
||||
class Categories(dict):
|
||||
"""Stores category information"""
|
||||
def set(self, category_et):
|
||||
"""Takes an elementtree Element ('category') and stores the url,
|
||||
using the type and name as the dict key.
|
||||
For example:
|
||||
<category type="genre" url="http://themoviedb.org/encyclopedia/category/80" name="Crime"/>
|
||||
..becomes:
|
||||
categories['genre']['Crime'] = 'http://themoviedb.org/encyclopedia/category/80'
|
||||
"""
|
||||
_type = category_et.get("type")
|
||||
name = category_et.get("name")
|
||||
url = category_et.get("url")
|
||||
self.setdefault(_type, {})[name] = url
|
||||
self[_type][name] = url
|
||||
|
||||
class Studios(dict):
|
||||
"""Stores category information"""
|
||||
def set(self, studio_et):
|
||||
"""Takes an elementtree Element ('studio') and stores the url,
|
||||
using the name as the dict key.
|
||||
For example:
|
||||
<studio url="http://www.themoviedb.org/encyclopedia/company/20" name="Miramax Films"/>
|
||||
..becomes:
|
||||
studios['name'] = 'http://www.themoviedb.org/encyclopedia/company/20'
|
||||
"""
|
||||
name = studio_et.get("name")
|
||||
url = studio_et.get("url")
|
||||
self[name] = url
|
||||
|
||||
class Countries(dict):
|
||||
"""Stores country information"""
|
||||
def set(self, country_et):
|
||||
"""Takes an elementtree Element ('country') and stores the url,
|
||||
using the name and code as the dict key.
|
||||
For example:
|
||||
<country url="http://www.themoviedb.org/encyclopedia/country/223" name="United States of America" code="US"/>
|
||||
..becomes:
|
||||
countries['code']['name'] = 'http://www.themoviedb.org/encyclopedia/country/223'
|
||||
"""
|
||||
code = country_et.get("code")
|
||||
name = country_et.get("name")
|
||||
url = country_et.get("url")
|
||||
self.setdefault(code, {})[name] = url
|
||||
|
||||
class Image(dict):
|
||||
"""Stores image information for a single poster/backdrop (includes
|
||||
multiple sizes)
|
||||
"""
|
||||
def __init__(self, _id, _type, size, url):
|
||||
self['id'] = _id
|
||||
self['type'] = _type
|
||||
|
||||
def largest(self):
|
||||
for csize in ["original", "mid", "cover", "thumb"]:
|
||||
if csize in self:
|
||||
return csize
|
||||
|
||||
def __repr__(self):
|
||||
return "<Image (%s for ID %s)>" % (self['type'], self['id'])
|
||||
|
||||
class ImagesList(list):
|
||||
"""Stores a list of Images, and functions to filter "only posters" etc"""
|
||||
def set(self, image_et):
|
||||
"""Takes an elementtree Element ('image') and stores the url,
|
||||
along with the type, id and size.
|
||||
Is a list containing each image as a dictionary (which includes the
|
||||
various sizes)
|
||||
For example:
|
||||
<image type="poster" size="original" url="http://images.themoviedb.org/posters/4181/67926_sin-city-02-color_122_207lo.jpg" id="4181"/>
|
||||
..becomes:
|
||||
images[0] = {'id':4181', 'type': 'poster', 'original': 'http://images.themov...'}
|
||||
"""
|
||||
_type = image_et.get("type")
|
||||
_id = image_et.get("id")
|
||||
size = image_et.get("size")
|
||||
url = image_et.get("url")
|
||||
cur = self.find_by('id', _id)
|
||||
if len(cur) == 0:
|
||||
nimg = Image(_id = _id, _type = _type, size = size, url = url)
|
||||
self.append(nimg)
|
||||
elif len(cur) == 1:
|
||||
cur[0][size] = url
|
||||
else:
|
||||
raise ValueError("Found more than one poster with id %s, this should never happen" % (_id))
|
||||
|
||||
def find_by(self, key, value):
|
||||
ret = []
|
||||
for cur in self:
|
||||
if cur[key] == value:
|
||||
ret.append(cur)
|
||||
return ret
|
||||
|
||||
@property
|
||||
def posters(self):
|
||||
return self.find_by('type', 'poster')
|
||||
|
||||
@property
|
||||
def backdrops(self):
|
||||
return self.find_by('type', 'backdrop')
|
||||
|
||||
class CrewRoleList(dict):
|
||||
"""Stores a list of roles, such as director, actor etc
|
||||
>>> import tmdb
|
||||
>>> tmdb.getMovieInfo(550)['cast'].keys()[:5]
|
||||
['casting', 'producer', 'author', 'sound editor', 'actor']
|
||||
"""
|
||||
pass
|
||||
|
||||
class CrewList(list):
|
||||
"""Stores list of crew in specific role
|
||||
>>> import tmdb
|
||||
>>> tmdb.getMovieInfo(550)['cast']['author']
|
||||
[<author (id 7468): Chuck Palahniuk>, <author (id 7469): Jim Uhls>]
|
||||
"""
|
||||
pass
|
||||
|
||||
class Person(dict):
|
||||
"""Stores information about a specific member of cast"""
|
||||
def __init__(self, job, _id, name, character, url):
|
||||
self['job'] = job
|
||||
self['id'] = _id
|
||||
self['name'] = name
|
||||
self['character'] = character
|
||||
self['url'] = url
|
||||
|
||||
def __repr__(self):
|
||||
if self['character'] is None or self['character'] == "":
|
||||
return "<%(job)s (id %(id)s): %(name)s>" % self
|
||||
else:
|
||||
return "<%(job)s (id %(id)s): %(name)s (as %(character)s)>" % self
|
||||
|
||||
class MovieDb:
|
||||
"""Main interface to www.themoviedb.com
|
||||
The search() method searches for the film by title.
|
||||
The getMovieInfo() method retrieves information about a specific movie using themoviedb id.
|
||||
"""
|
||||
def _parseSearchResults(self, movie_element):
|
||||
cur_movie = MovieResult()
|
||||
cur_images = ImagesList()
|
||||
for item in movie_element.getchildren():
|
||||
if item.tag.lower() == "images":
|
||||
for subitem in item.getchildren():
|
||||
cur_images.set(subitem)
|
||||
else:
|
||||
cur_movie[item.tag] = item.text
|
||||
cur_movie['images'] = cur_images
|
||||
return cur_movie
|
||||
|
||||
def _parseMovie(self, movie_element):
|
||||
cur_movie = Movie()
|
||||
cur_categories = Categories()
|
||||
cur_studios = Studios()
|
||||
cur_countries = Countries()
|
||||
cur_images = ImagesList()
|
||||
cur_cast = CrewRoleList()
|
||||
for item in movie_element.getchildren():
|
||||
if item.tag.lower() == "categories":
|
||||
for subitem in item.getchildren():
|
||||
cur_categories.set(subitem)
|
||||
elif item.tag.lower() == "studios":
|
||||
for subitem in item.getchildren():
|
||||
cur_studios.set(subitem)
|
||||
elif item.tag.lower() == "countries":
|
||||
for subitem in item.getchildren():
|
||||
cur_countries.set(subitem)
|
||||
elif item.tag.lower() == "images":
|
||||
for subitem in item.getchildren():
|
||||
cur_images.set(subitem)
|
||||
elif item.tag.lower() == "cast":
|
||||
for subitem in item.getchildren():
|
||||
job = subitem.get("job").lower()
|
||||
p = Person(
|
||||
job = job,
|
||||
_id = subitem.get("id"),
|
||||
name = subitem.get("name"),
|
||||
character = subitem.get("character"),
|
||||
url = subitem.get("url"),
|
||||
)
|
||||
cur_cast.setdefault(job, CrewList()).append(p)
|
||||
else:
|
||||
cur_movie[item.tag] = item.text
|
||||
|
||||
cur_movie['categories'] = cur_categories
|
||||
cur_movie['studios'] = cur_studios
|
||||
cur_movie['countries'] = cur_countries
|
||||
cur_movie['images'] = cur_images
|
||||
cur_movie['cast'] = cur_cast
|
||||
return cur_movie
|
||||
|
||||
def search(self, title):
|
||||
"""Searches for a film by its title.
|
||||
Returns SearchResults (a list) containing all matches (Movie instances)
|
||||
"""
|
||||
title = urllib.quote(title.encode("utf-8"))
|
||||
url = config['urls']['movie.search'] % (title)
|
||||
etree = XmlHandler(url).getEt()
|
||||
search_results = SearchResults()
|
||||
for cur_result in etree.find("movies").findall("movie"):
|
||||
cur_movie = self._parseSearchResults(cur_result)
|
||||
search_results.append(cur_movie)
|
||||
return search_results
|
||||
|
||||
def getMovieInfo(self, id):
|
||||
"""Returns movie info by it's TheMovieDb ID.
|
||||
Returns a Movie instance
|
||||
"""
|
||||
url = config['urls']['movie.getInfo'] % (id)
|
||||
etree = XmlHandler(url).getEt()
|
||||
moviesTree = etree.find("movies").findall("movie")
|
||||
|
||||
if len(moviesTree) == 0:
|
||||
raise TmdNoResults("No results for id %s" % id)
|
||||
return self._parseMovie(moviesTree[0])
|
||||
|
||||
def mediaGetInfo(self, hash, size):
|
||||
"""Used to retrieve specific information about a movie but instead of
|
||||
passing a TMDb ID, you pass a file hash and filesize in bytes
|
||||
"""
|
||||
url = config['urls']['media.getInfo'] % (hash, size)
|
||||
etree = XmlHandler(url).getEt()
|
||||
moviesTree = etree.find("movies").findall("movie")
|
||||
if len(moviesTree) == 0:
|
||||
raise TmdNoResults("No results for hash %s" % hash)
|
||||
return [self._parseMovie(x) for x in moviesTree]
|
||||
|
||||
def imdbLookup(self, id = 0, title = False):
|
||||
if not config.get('apikey'):
|
||||
raise TmdConfigError("API Key not set")
|
||||
if id > 0:
|
||||
url = config['urls']['imdb.lookUp'] % (id)
|
||||
else:
|
||||
_imdb_id = self.search(title)[0]["imdb_id"]
|
||||
url = config['urls']['imdb.lookUp'] % (_imdb_id)
|
||||
etree = XmlHandler(url).getEt()
|
||||
lookup_results = SearchResults()
|
||||
for cur_lookup in etree.find("movies").findall("movie"):
|
||||
cur_movie = self._parseSearchResults(cur_lookup)
|
||||
lookup_results.append(cur_movie)
|
||||
return lookup_results
|
||||
|
||||
class Browse:
|
||||
|
||||
def __init__(self, params = {}):
|
||||
"""
|
||||
tmdb.Browse(params)
|
||||
default params = {"order_by":"release","order":"desc"}
|
||||
params = {"query":"some query","release_max":"1991",...}
|
||||
all posible parameters = http://api.themoviedb.org/2.1/methods/Movie.browse
|
||||
"""
|
||||
if "order_by" not in params:
|
||||
params.update({"order_by":"release"})
|
||||
if "order" not in params:
|
||||
params.update({"order":"desc"})
|
||||
|
||||
self.params = urllib.urlencode(params)
|
||||
self.movie = self.look(self.params)
|
||||
|
||||
def look(self, look_for):
|
||||
url = config['urls']['movie.browse'] % (look_for)
|
||||
etree = XmlHandler(url).getEt()
|
||||
look_results = SearchResults()
|
||||
for cur_lookup in etree.find("movies").findall("movie"):
|
||||
cur_movie = self._parseSearchResults(cur_lookup)
|
||||
look_results.append(cur_movie)
|
||||
return look_results
|
||||
|
||||
def _parseSearchResults(self, movie_element):
|
||||
cur_movie = MovieResult()
|
||||
cur_images = ImagesList()
|
||||
for item in movie_element.getchildren():
|
||||
if item.tag.lower() == "images":
|
||||
for subitem in item.getchildren():
|
||||
cur_images.set(subitem)
|
||||
else:
|
||||
cur_movie[item.tag] = item.text
|
||||
cur_movie['images'] = cur_images
|
||||
return cur_movie
|
||||
|
||||
def getTotal(self):
|
||||
return len(self.movie)
|
||||
|
||||
def getRating(self, i):
|
||||
return self.movie[i]["rating"]
|
||||
|
||||
def getVotes(self, i):
|
||||
return self.movie[i]["votes"]
|
||||
|
||||
def getName(self, i):
|
||||
return self.movie[i]["name"]
|
||||
|
||||
def getLanguage(self, i):
|
||||
return self.movie[i]["language"]
|
||||
|
||||
def getCertification(self, i):
|
||||
return self.movie[i]["certification"]
|
||||
|
||||
def getUrl(self, i):
|
||||
return self.movie[i]["url"]
|
||||
|
||||
def getOverview(self, i):
|
||||
return self.movie[i]["overview"]
|
||||
|
||||
def getPopularity(self, i):
|
||||
return self.movie[i]["popularity"]
|
||||
|
||||
def getOriginalName(self, i):
|
||||
return self.movie[i]["original_name"]
|
||||
|
||||
def getLastModified(self, i):
|
||||
return self.movie[i]["last_modified_at"]
|
||||
|
||||
def getImdbId(self, i):
|
||||
return self.movie[i]["imdb_id"]
|
||||
|
||||
def getReleased(self, i):
|
||||
return self.movie[i]["released"]
|
||||
|
||||
def getScore(self, i):
|
||||
return self.movie[i]["score"]
|
||||
|
||||
def getAdult(self, i):
|
||||
return self.movie[i]["adult"]
|
||||
|
||||
def getVersion(self, i):
|
||||
return self.movie[i]["version"]
|
||||
|
||||
def getTranslated(self, i):
|
||||
return self.movie[i]["translated"]
|
||||
|
||||
def getType(self, i):
|
||||
return self.movie[i]["type"]
|
||||
|
||||
def getId(self, i):
|
||||
return self.movie[i]["id"]
|
||||
|
||||
def getAlternativeName(self, i):
|
||||
return self.movie[i]["alternative_name"]
|
||||
|
||||
def getPoster(self, i, size):
|
||||
if size == "thumb" or size == "t":
|
||||
return self.movie[i]["images"][0]["thumb"]
|
||||
elif size == "cover" or size == "c":
|
||||
return self.movie[i]["images"][0]["cover"]
|
||||
else:
|
||||
return self.movie[i]["images"][0]["mid"]
|
||||
|
||||
def getBackdrop(self, i, size):
|
||||
if size == "poster" or size == "p":
|
||||
return self.movie[i]["images"][1]["poster"]
|
||||
else:
|
||||
return self.movie[i]["images"][1]["thumb"]
|
||||
|
||||
|
||||
|
||||
# Shortcuts for tmdb search method
|
||||
# using:
|
||||
# movie = tmdb.tmdb("Sin City")
|
||||
# print movie.getRating -> 7.0
|
||||
class tmdb:
|
||||
|
||||
def __init__(self, name):
|
||||
"""Convenience wrapper for MovieDb.search - so you can do..
|
||||
>>> import tmdb
|
||||
>>> movie = tmdb.tmdb("Fight Club")
|
||||
>>> ranking = movie.getRanking() or votes = movie.getVotes()
|
||||
<Search results: [<MovieResult: Fight Club (1999-09-16)>]>
|
||||
"""
|
||||
mdb = MovieDb()
|
||||
self.movie = mdb.search(name)
|
||||
|
||||
def getTotal(self):
|
||||
return len(self.movie)
|
||||
|
||||
def getRating(self, i):
|
||||
return self.movie[i]["rating"]
|
||||
|
||||
def getVotes(self, i):
|
||||
return self.movie[i]["votes"]
|
||||
|
||||
def getName(self, i):
|
||||
return self.movie[i]["name"]
|
||||
|
||||
def getLanguage(self, i):
|
||||
return self.movie[i]["language"]
|
||||
|
||||
def getCertification(self, i):
|
||||
return self.movie[i]["certification"]
|
||||
|
||||
def getUrl(self, i):
|
||||
return self.movie[i]["url"]
|
||||
|
||||
def getOverview(self, i):
|
||||
return self.movie[i]["overview"]
|
||||
|
||||
def getPopularity(self, i):
|
||||
return self.movie[i]["popularity"]
|
||||
|
||||
def getOriginalName(self, i):
|
||||
return self.movie[i]["original_name"]
|
||||
|
||||
def getLastModified(self, i):
|
||||
return self.movie[i]["last_modified_at"]
|
||||
|
||||
def getImdbId(self, i):
|
||||
return self.movie[i]["imdb_id"]
|
||||
|
||||
def getReleased(self, i):
|
||||
return self.movie[i]["released"]
|
||||
|
||||
def getScore(self, i):
|
||||
return self.movie[i]["score"]
|
||||
|
||||
def getAdult(self, i):
|
||||
return self.movie[i]["adult"]
|
||||
|
||||
def getVersion(self, i):
|
||||
return self.movie[i]["version"]
|
||||
|
||||
def getTranslated(self, i):
|
||||
return self.movie[i]["translated"]
|
||||
|
||||
def getType(self, i):
|
||||
return self.movie[i]["type"]
|
||||
|
||||
def getId(self, i):
|
||||
return self.movie[i]["id"]
|
||||
|
||||
def getAlternativeName(self, i):
|
||||
return self.movie[i]["alternative_name"]
|
||||
|
||||
def getPoster(self, i, size):
|
||||
if size == "thumb" or size == "t":
|
||||
return self.movie[i]["images"][0]["thumb"]
|
||||
elif size == "cover" or size == "c":
|
||||
return self.movie[i]["images"][0]["cover"]
|
||||
else:
|
||||
return self.movie[i]["images"][0]["mid"]
|
||||
|
||||
def getBackdrop(self, i, size):
|
||||
if size == "poster" or size == "p":
|
||||
return self.movie[i]["images"][1]["poster"]
|
||||
else:
|
||||
return self.movie[i]["images"][1]["thumb"]
|
||||
|
||||
# Shortcuts for imdb lookup method
|
||||
# using:
|
||||
# movie = tmdb.imdb("Sin City")
|
||||
# print movie.getRating -> 7.0
|
||||
class imdb:
|
||||
|
||||
def __init__(self, id = 0, title = False):
|
||||
# get first movie if result=0
|
||||
"""Convenience wrapper for MovieDb.search - so you can do..
|
||||
>>> import tmdb
|
||||
>>> movie = tmdb.imdb(title="Fight Club") # or movie = tmdb.imdb(id=imdb_id)
|
||||
>>> ranking = movie.getRanking() or votes = movie.getVotes()
|
||||
<Search results: [<MovieResult: Fight Club (1999-09-16)>]>
|
||||
"""
|
||||
self.id = id
|
||||
self.title = title
|
||||
self.mdb = MovieDb()
|
||||
self.movie = self.mdb.imdbLookup(self.id, self.title)
|
||||
|
||||
def getTotal(self):
|
||||
return len(self.movie)
|
||||
|
||||
def getRuntime(self, i):
|
||||
return self.movie[i]["runtime"]
|
||||
|
||||
def getCategories(self):
|
||||
from xml.dom.minidom import parse
|
||||
adres = config['urls']['imdb.lookUp'] % self.getImdbId()
|
||||
d = parse(urllib2.urlopen(adres))
|
||||
s = d.getElementsByTagName("categories")
|
||||
ds = []
|
||||
for i in range(len(s[0].childNodes)):
|
||||
if i % 2 > 0:
|
||||
ds.append(s[0].childNodes[i].getAttribute("name"))
|
||||
return ds
|
||||
|
||||
def getRating(self, i):
|
||||
return self.movie[i]["rating"]
|
||||
|
||||
def getVotes(self, i):
|
||||
return self.movie[i]["votes"]
|
||||
|
||||
def getName(self, i):
|
||||
return self.movie[i]["name"]
|
||||
|
||||
def getLanguage(self, i):
|
||||
return self.movie[i]["language"]
|
||||
|
||||
def getCertification(self, i):
|
||||
return self.movie[i]["certification"]
|
||||
|
||||
def getUrl(self, i):
|
||||
return self.movie[i]["url"]
|
||||
|
||||
def getOverview(self, i):
|
||||
return self.movie[i]["overview"]
|
||||
|
||||
def getPopularity(self, i):
|
||||
return self.movie[i]["popularity"]
|
||||
|
||||
def getOriginalName(self, i):
|
||||
return self.movie[i]["original_name"]
|
||||
|
||||
def getLastModified(self, i):
|
||||
return self.movie[i]["last_modified_at"]
|
||||
|
||||
def getImdbId(self, i):
|
||||
return self.movie[i]["imdb_id"]
|
||||
|
||||
def getReleased(self, i):
|
||||
return self.movie[i]["released"]
|
||||
|
||||
def getAdult(self, i):
|
||||
return self.movie[i]["adult"]
|
||||
|
||||
def getVersion(self, i):
|
||||
return self.movie[i]["version"]
|
||||
|
||||
def getTranslated(self, i):
|
||||
return self.movie[i]["translated"]
|
||||
|
||||
def getType(self, i):
|
||||
return self.movie[i]["type"]
|
||||
|
||||
def getId(self, i):
|
||||
return self.movie[i]["id"]
|
||||
|
||||
def getAlternativeName(self, i):
|
||||
return self.movie[i]["alternative_name"]
|
||||
|
||||
def getPoster(self, i, size):
|
||||
poster = []
|
||||
if size == "thumb" or size == "t":
|
||||
_size = "thumb"
|
||||
elif size == "cover" or size == "c":
|
||||
_size = "cover"
|
||||
else:
|
||||
_size = "mid"
|
||||
for a in self.movie[i]["images"]:
|
||||
if a["type"] == "poster":
|
||||
poster.append(a[_size])
|
||||
return poster
|
||||
del poster
|
||||
|
||||
def getBackdrop(self, i, size):
|
||||
backdrop = []
|
||||
if size == "thumb" or size == "t":
|
||||
_size = "thumb"
|
||||
elif size == "cover" or size == "c":
|
||||
_size = "cover"
|
||||
else:
|
||||
_size = "mid"
|
||||
for a in self.movie[i]["images"]:
|
||||
if a["type"] == "backdrop":
|
||||
backdrop.append(a[_size])
|
||||
return backdrop
|
||||
del backdrop
|
||||
|
||||
def imdbLookup(id = 0, title = False):
|
||||
"""Convenience wrapper for Imdb.Lookup - so you can do..
|
||||
>>> import tmdb
|
||||
>>> tmdb.imdbLookup("Fight Club")
|
||||
<Search results: [<MovieResult: Fight Club (1999-09-16)>]>
|
||||
"""
|
||||
mdb = MovieDb()
|
||||
return mdb.imdbLookup(id, title)
|
||||
|
||||
def search(name):
|
||||
"""Convenience wrapper for MovieDb.search - so you can do..
|
||||
>>> import tmdb
|
||||
>>> tmdb.search("Fight Club")
|
||||
<Search results: [<MovieResult: Fight Club (1999-09-16)>]>
|
||||
"""
|
||||
mdb = MovieDb()
|
||||
return mdb.search(name)
|
||||
|
||||
def getMovieInfo(id):
|
||||
"""Convenience wrapper for MovieDb.search - so you can do..
|
||||
>>> import tmdb
|
||||
>>> tmdb.getMovieInfo(187)
|
||||
<MovieResult: Sin City (2005-04-01)>
|
||||
"""
|
||||
mdb = MovieDb()
|
||||
return mdb.getMovieInfo(id)
|
||||
|
||||
def mediaGetInfo(hash, size):
|
||||
"""Convenience wrapper for MovieDb.mediaGetInfo - so you can do..
|
||||
|
||||
>>> import tmdb
|
||||
>>> tmdb.mediaGetInfo('907172e7fe51ba57', size = 742086656)[0]
|
||||
<MovieResult: Sin City (2005-04-01)>
|
||||
"""
|
||||
mdb = MovieDb()
|
||||
return mdb.mediaGetInfo(hash, size)
|
||||
|
||||
def searchByHashingFile(filename):
|
||||
"""Searches for the specified file using the OpenSubtitle hashing method
|
||||
"""
|
||||
return mediaGetInfo(opensubtitleHashFile(filename), os.path.size(filename))
|
||||
|
||||
def main():
|
||||
results = search("Fight Club")
|
||||
searchResult = results[0]
|
||||
movie = getMovieInfo(searchResult['id'])
|
||||
print movie['name']
|
||||
|
||||
print "Producers:"
|
||||
for prodr in movie['cast']['producer']:
|
||||
print " " * 4, prodr['name']
|
||||
print movie['images']
|
||||
for genreName in movie['categories']['genre']:
|
||||
print "%s (%s)" % (genreName, movie['categories']['genre'][genreName])
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
11
libs/tmdb3/__init__.py
Executable file
11
libs/tmdb3/__init__.py
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from tmdb_api import Configuration, searchMovie, searchMovieWithYear, \
|
||||
searchPerson, searchStudio, searchList, searchCollection, \
|
||||
Person, Movie, Collection, Genre, List, __version__
|
||||
from request import set_key, set_cache
|
||||
from locales import get_locale, set_locale
|
||||
from tmdb_auth import get_session, set_session
|
||||
from cache_engine import CacheEngine
|
||||
from tmdb_exceptions import *
|
||||
|
||||
121
libs/tmdb3/cache.py
Executable file
121
libs/tmdb3/cache.py
Executable file
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: cache.py
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: Caching framework to store TMDb API results
|
||||
#-----------------------
|
||||
|
||||
from tmdb_exceptions import *
|
||||
from cache_engine import Engines
|
||||
|
||||
import cache_null
|
||||
import cache_file
|
||||
|
||||
class Cache( object ):
|
||||
"""
|
||||
This class implements a persistent cache, backed in a file specified in
|
||||
the object creation. The file is protected for safe, concurrent access
|
||||
by multiple instances using flock.
|
||||
This cache uses JSON for speed and storage efficiency, so only simple
|
||||
data types are supported.
|
||||
Data is stored in a simple format {key:(expiretimestamp, data)}
|
||||
"""
|
||||
def __init__(self, engine=None, *args, **kwargs):
|
||||
self._engine = None
|
||||
self._data = {}
|
||||
self._age = 0
|
||||
self.configure(engine, *args, **kwargs)
|
||||
|
||||
def _import(self, data=None):
|
||||
if data is None:
|
||||
data = self._engine.get(self._age)
|
||||
for obj in sorted(data, key=lambda x: x.creation):
|
||||
if not obj.expired:
|
||||
self._data[obj.key] = obj
|
||||
self._age = max(self._age, obj.creation)
|
||||
|
||||
def _expire(self):
|
||||
for k,v in self._data.items():
|
||||
if v.expired:
|
||||
del self._data[k]
|
||||
|
||||
def configure(self, engine, *args, **kwargs):
|
||||
if engine is None:
|
||||
engine = 'file'
|
||||
elif engine not in Engines:
|
||||
raise TMDBCacheError("Invalid cache engine specified: "+engine)
|
||||
self._engine = Engines[engine](self)
|
||||
self._engine.configure(*args, **kwargs)
|
||||
|
||||
def put(self, key, data, lifetime=60*60*12):
|
||||
# pull existing data, so cache will be fresh when written back out
|
||||
if self._engine is None:
|
||||
raise TMDBCacheError("No cache engine configured")
|
||||
self._expire()
|
||||
self._import(self._engine.put(key, data, lifetime))
|
||||
|
||||
def get(self, key):
|
||||
if self._engine is None:
|
||||
raise TMDBCacheError("No cache engine configured")
|
||||
self._expire()
|
||||
if key not in self._data:
|
||||
self._import()
|
||||
try:
|
||||
return self._data[key].data
|
||||
except:
|
||||
return None
|
||||
|
||||
def cached(self, callback):
|
||||
"""
|
||||
Returns a decorator that uses a callback to specify the key to use
|
||||
for caching the responses from the decorated function.
|
||||
"""
|
||||
return self.Cached(self, callback)
|
||||
|
||||
class Cached( object ):
|
||||
def __init__(self, cache, callback, func=None, inst=None):
|
||||
self.cache = cache
|
||||
self.callback = callback
|
||||
self.func = func
|
||||
self.inst = inst
|
||||
|
||||
if func:
|
||||
self.__module__ = func.__module__
|
||||
self.__name__ = func.__name__
|
||||
self.__doc__ = func.__doc__
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if self.func is None: # decorator is waiting to be given a function
|
||||
if len(kwargs) or (len(args) != 1):
|
||||
raise TMDBCacheError('Cache.Cached decorator must be called '+\
|
||||
'a single callable argument before it '+\
|
||||
'be used.')
|
||||
elif args[0] is None:
|
||||
raise TMDBCacheError('Cache.Cached decorator called before '+\
|
||||
'being given a function to wrap.')
|
||||
elif not callable(args[0]):
|
||||
raise TMDBCacheError('Cache.Cached must be provided a '+\
|
||||
'callable object.')
|
||||
return self.__class__(self.cache, self.callback, args[0])
|
||||
elif self.inst.lifetime == 0:
|
||||
return self.func(*args, **kwargs)
|
||||
else:
|
||||
key = self.callback()
|
||||
data = self.cache.get(key)
|
||||
if data is None:
|
||||
data = self.func(*args, **kwargs)
|
||||
if hasattr(self.inst, 'lifetime'):
|
||||
self.cache.put(key, data, self.inst.lifetime)
|
||||
else:
|
||||
self.cache.put(key, data)
|
||||
return data
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
if inst is None:
|
||||
return self
|
||||
func = self.func.__get__(inst, owner)
|
||||
callback = self.callback.__get__(inst, owner)
|
||||
return self.__class__(self.cache, callback, func, inst)
|
||||
|
||||
72
libs/tmdb3/cache_engine.py
Executable file
72
libs/tmdb3/cache_engine.py
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: cache_engine.py
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: Base cache engine class for collecting registered engines
|
||||
#-----------------------
|
||||
|
||||
import time
|
||||
from weakref import ref
|
||||
|
||||
class Engines( object ):
|
||||
def __init__(self):
|
||||
self._engines = {}
|
||||
def register(self, engine):
|
||||
self._engines[engine.__name__] = engine
|
||||
self._engines[engine.name] = engine
|
||||
def __getitem__(self, key):
|
||||
return self._engines[key]
|
||||
def __contains__(self, key):
|
||||
return self._engines.__contains__(key)
|
||||
Engines = Engines()
|
||||
|
||||
class CacheEngineType( type ):
|
||||
"""
|
||||
Cache Engine Metaclass that registers new engines against the cache
|
||||
for named selection and use.
|
||||
"""
|
||||
def __init__(mcs, name, bases, attrs):
|
||||
super(CacheEngineType, mcs).__init__(name, bases, attrs)
|
||||
if name != 'CacheEngine':
|
||||
# skip base class
|
||||
Engines.register(mcs)
|
||||
|
||||
class CacheEngine( object ):
|
||||
__metaclass__ = CacheEngineType
|
||||
|
||||
name = 'unspecified'
|
||||
def __init__(self, parent):
|
||||
self.parent = ref(parent)
|
||||
def configure(self):
|
||||
raise RuntimeError
|
||||
def get(self, date):
|
||||
raise RuntimeError
|
||||
def put(self, key, value, lifetime):
|
||||
raise RuntimeError
|
||||
def expire(self, key):
|
||||
raise RuntimeError
|
||||
|
||||
class CacheObject( object ):
|
||||
"""
|
||||
Cache object class, containing one stored record.
|
||||
"""
|
||||
|
||||
def __init__(self, key, data, lifetime=0, creation=None):
|
||||
self.key = key
|
||||
self.data = data
|
||||
self.lifetime = lifetime
|
||||
self.creation = creation if creation is not None else time.time()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
@property
|
||||
def expired(self):
|
||||
return (self.remaining == 0)
|
||||
|
||||
@property
|
||||
def remaining(self):
|
||||
return max((self.creation + self.lifetime) - time.time(), 0)
|
||||
|
||||
391
libs/tmdb3/cache_file.py
Executable file
391
libs/tmdb3/cache_file.py
Executable file
@@ -0,0 +1,391 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: cache_file.py
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: Persistant file-backed cache using /tmp/ to share data
|
||||
# using flock or msvcrt.locking to allow safe concurrent
|
||||
# access.
|
||||
#-----------------------
|
||||
|
||||
import struct
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import io
|
||||
|
||||
from cStringIO import StringIO
|
||||
|
||||
from tmdb_exceptions import *
|
||||
from cache_engine import CacheEngine, CacheObject
|
||||
|
||||
####################
|
||||
# Cache File Format
|
||||
#------------------
|
||||
# cache version (2) unsigned short
|
||||
# slot count (2) unsigned short
|
||||
# slot 0: timestamp (8) double
|
||||
# slot 0: lifetime (4) unsigned int
|
||||
# slot 0: seek point (4) unsigned int
|
||||
# slot 1: timestamp
|
||||
# slot 1: lifetime index slots are IDd by their query date and
|
||||
# slot 1: seek point are filled incrementally forwards. lifetime
|
||||
# .... is how long after query date before the item
|
||||
# .... expires, and seek point is the location of the
|
||||
# slot N-2: timestamp start of data for that entry. 256 empty slots
|
||||
# slot N-2: lifetime are pre-allocated, allowing fast updates.
|
||||
# slot N-2: seek point when all slots are filled, the cache file is
|
||||
# slot N-1: timestamp rewritten from scrach to add more slots.
|
||||
# slot N-1: lifetime
|
||||
# slot N-1: seek point
|
||||
# block 1 (?) ASCII
|
||||
# block 2
|
||||
# .... blocks are just simple ASCII text, generated
|
||||
# .... as independent objects by the JSON encoder
|
||||
# block N-2
|
||||
# block N-1
|
||||
#
|
||||
####################
|
||||
|
||||
|
||||
def _donothing(*args, **kwargs):
|
||||
pass
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
class Flock( object ):
|
||||
"""
|
||||
Context manager to flock file for the duration the object exists.
|
||||
Referenced file will be automatically unflocked as the interpreter
|
||||
exits the context.
|
||||
Supports an optional callback to process the error and optionally
|
||||
suppress it.
|
||||
"""
|
||||
LOCK_EX = fcntl.LOCK_EX
|
||||
LOCK_SH = fcntl.LOCK_SH
|
||||
|
||||
def __init__(self, fileobj, operation, callback=None):
|
||||
self.fileobj = fileobj
|
||||
self.operation = operation
|
||||
self.callback = callback
|
||||
def __enter__(self):
|
||||
fcntl.flock(self.fileobj, self.operation)
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
suppress = False
|
||||
if callable(self.callback):
|
||||
suppress = self.callback(exc_type, exc_value, exc_tb)
|
||||
fcntl.flock(self.fileobj, fcntl.LOCK_UN)
|
||||
return suppress
|
||||
|
||||
def parse_filename(filename):
|
||||
if '$' in filename:
|
||||
# replace any environmental variables
|
||||
filename = os.path.expandvars(filename)
|
||||
if filename.startswith('~'):
|
||||
# check for home directory
|
||||
return os.path.expanduser(filename)
|
||||
elif filename.startswith('/'):
|
||||
# check for absolute path
|
||||
return filename
|
||||
# return path with temp directory prepended
|
||||
return '/tmp/' + filename
|
||||
|
||||
except ImportError:
|
||||
import msvcrt
|
||||
class Flock( object ):
|
||||
LOCK_EX = msvcrt.LK_LOCK
|
||||
LOCK_SH = msvcrt.LK_LOCK
|
||||
|
||||
def __init__(self, fileobj, operation, callback=None):
|
||||
self.fileobj = fileobj
|
||||
self.operation = operation
|
||||
self.callback = callback
|
||||
def __enter__(self):
|
||||
self.size = os.path.getsize(self.fileobj.name)
|
||||
msvcrt.locking(self.fileobj.fileno(), self.operation, self.size)
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
suppress = False
|
||||
if callable(self.callback):
|
||||
suppress = self.callback(exc_type, exc_value, exc_tb)
|
||||
msvcrt.locking(self.fileobj.fileno(), msvcrt.LK_UNLCK, self.size)
|
||||
return suppress
|
||||
|
||||
def parse_filename(filename):
|
||||
if '%' in filename:
|
||||
# replace any environmental variables
|
||||
filename = os.path.expandvars(filename)
|
||||
if filename.startswith('~'):
|
||||
# check for home directory
|
||||
return os.path.expanduser(filename)
|
||||
elif (ord(filename[0]) in (range(65,91)+range(99,123))) \
|
||||
and (filename[1:3] == ':\\'):
|
||||
# check for absolute drive path (e.g. C:\...)
|
||||
return filename
|
||||
elif (filename.count('\\') >= 3) and (filename.startswith('\\\\')):
|
||||
# check for absolute UNC path (e.g. \\server\...)
|
||||
return filename
|
||||
# return path with temp directory prepended
|
||||
return os.path.expandvars(os.path.join('%TEMP%',filename))
|
||||
|
||||
|
||||
class FileCacheObject( CacheObject ):
|
||||
_struct = struct.Struct('dII') # double and two ints
|
||||
# timestamp, lifetime, position
|
||||
|
||||
@classmethod
|
||||
def fromFile(cls, fd):
|
||||
dat = cls._struct.unpack(fd.read(cls._struct.size))
|
||||
obj = cls(None, None, dat[1], dat[0])
|
||||
obj.position = dat[2]
|
||||
return obj
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._key = None
|
||||
self._data = None
|
||||
self._size = None
|
||||
self._buff = StringIO()
|
||||
super(FileCacheObject, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
if self._size is None:
|
||||
self._buff.seek(0,2)
|
||||
size = self._buff.tell()
|
||||
if size == 0:
|
||||
if (self._key is None) or (self._data is None):
|
||||
raise RuntimeError
|
||||
json.dump([self.key, self.data], self._buff)
|
||||
self._size = self._buff.tell()
|
||||
self._size = size
|
||||
return self._size
|
||||
@size.setter
|
||||
def size(self, value): self._size = value
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
if self._key is None:
|
||||
try:
|
||||
self._key, self._data = json.loads(self._buff.getvalue())
|
||||
except:
|
||||
pass
|
||||
return self._key
|
||||
@key.setter
|
||||
def key(self, value): self._key = value
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
if self._data is None:
|
||||
self._key, self._data = json.loads(self._buff.getvalue())
|
||||
return self._data
|
||||
@data.setter
|
||||
def data(self, value): self._data = value
|
||||
|
||||
def load(self, fd):
|
||||
fd.seek(self.position)
|
||||
self._buff.seek(0)
|
||||
self._buff.write(fd.read(self.size))
|
||||
|
||||
def dumpslot(self, fd):
|
||||
pos = fd.tell()
|
||||
fd.write(self._struct.pack(self.creation, self.lifetime, self.position))
|
||||
|
||||
def dumpdata(self, fd):
|
||||
self.size
|
||||
fd.seek(self.position)
|
||||
fd.write(self._buff.getvalue())
|
||||
|
||||
|
||||
class FileEngine( CacheEngine ):
|
||||
"""Simple file-backed engine."""
|
||||
name = 'file'
|
||||
_struct = struct.Struct('HH') # two shorts for version and count
|
||||
_version = 2
|
||||
|
||||
def __init__(self, parent):
|
||||
super(FileEngine, self).__init__(parent)
|
||||
self.configure(None)
|
||||
|
||||
def configure(self, filename, preallocate=256):
|
||||
self.preallocate = preallocate
|
||||
self.cachefile = filename
|
||||
self.size = 0
|
||||
self.free = 0
|
||||
self.age = 0
|
||||
|
||||
def _init_cache(self):
|
||||
# only run this once
|
||||
self._init_cache = _donothing
|
||||
|
||||
if self.cachefile is None:
|
||||
raise TMDBCacheError("No cache filename given.")
|
||||
|
||||
self.cachefile = parse_filename(self.cachefile)
|
||||
|
||||
try:
|
||||
# attempt to read existing cache at filename
|
||||
# handle any errors that occur
|
||||
self._open('r+b')
|
||||
# seems to have read fine, make sure we have write access
|
||||
if not os.access(self.cachefile, os.W_OK):
|
||||
raise TMDBCacheWriteError(self.cachefile)
|
||||
|
||||
except IOError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# file does not exist, create a new one
|
||||
try:
|
||||
self._open('w+b')
|
||||
self._write([])
|
||||
except IOError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# directory does not exist
|
||||
raise TMDBCacheDirectoryError(self.cachefile)
|
||||
elif e.errno == errno.EACCES:
|
||||
# user does not have rights to create new file
|
||||
raise TMDBCacheWriteError(self.cachefile)
|
||||
else:
|
||||
# let the unhandled error continue through
|
||||
raise
|
||||
elif e.errno == errno.EACCESS:
|
||||
# file exists, but we do not have permission to access it
|
||||
raise TMDBCacheReadError(self.cachefile)
|
||||
else:
|
||||
# let the unhandled error continue through
|
||||
raise
|
||||
|
||||
def get(self, date):
|
||||
self._init_cache()
|
||||
self._open('r+b')
|
||||
|
||||
with Flock(self.cachefd, Flock.LOCK_SH): # lock for shared access
|
||||
# return any new objects in the cache
|
||||
return self._read(date)
|
||||
|
||||
def put(self, key, value, lifetime):
|
||||
self._init_cache()
|
||||
self._open('r+b')
|
||||
|
||||
with Flock(self.cachefd, Flock.LOCK_EX): # lock for exclusive access
|
||||
newobjs = self._read(self.age)
|
||||
newobjs.append(FileCacheObject(key, value, lifetime))
|
||||
|
||||
# this will cause a new file object to be opened with the proper
|
||||
# access mode, however the Flock should keep the old object open
|
||||
# and properly locked
|
||||
self._open('r+b')
|
||||
self._write(newobjs)
|
||||
return newobjs
|
||||
|
||||
def _open(self, mode='r+b'):
|
||||
# enforce binary operation
|
||||
try:
|
||||
if self.cachefd.mode == mode:
|
||||
# already opened in requested mode, nothing to do
|
||||
self.cachefd.seek(0)
|
||||
return
|
||||
except: pass # catch issue of no cachefile yet opened
|
||||
self.cachefd = io.open(self.cachefile, mode)
|
||||
|
||||
def _read(self, date):
|
||||
try:
|
||||
self.cachefd.seek(0)
|
||||
version, count = self._struct.unpack(\
|
||||
self.cachefd.read(self._struct.size))
|
||||
if version != self._version:
|
||||
# old version, break out and well rewrite when finished
|
||||
raise Exception
|
||||
|
||||
self.size = count
|
||||
cache = []
|
||||
while count:
|
||||
# loop through storage definitions
|
||||
obj = FileCacheObject.fromFile(self.cachefd)
|
||||
cache.append(obj)
|
||||
count -= 1
|
||||
|
||||
except:
|
||||
# failed to read information, so just discard it and return empty
|
||||
self.size = 0
|
||||
self.free = 0
|
||||
return []
|
||||
|
||||
# get end of file
|
||||
self.cachefd.seek(0,2)
|
||||
position = self.cachefd.tell()
|
||||
newobjs = []
|
||||
emptycount = 0
|
||||
|
||||
# walk backward through all, collecting new content and populating size
|
||||
while len(cache):
|
||||
obj = cache.pop()
|
||||
if obj.creation == 0:
|
||||
# unused slot, skip
|
||||
emptycount += 1
|
||||
elif obj.expired:
|
||||
# object has passed expiration date, no sense processing
|
||||
continue
|
||||
elif obj.creation > date:
|
||||
# used slot with new data, process
|
||||
obj.size, position = position - obj.position, obj.position
|
||||
newobjs.append(obj)
|
||||
# update age
|
||||
self.age = max(self.age, obj.creation)
|
||||
elif len(newobjs):
|
||||
# end of new data, break
|
||||
break
|
||||
|
||||
# walk forward and load new content
|
||||
for obj in newobjs:
|
||||
obj.load(self.cachefd)
|
||||
|
||||
self.free = emptycount
|
||||
return newobjs
|
||||
|
||||
def _write(self, data):
|
||||
if self.free and (self.size != self.free):
|
||||
# we only care about the last data point, since the rest are
|
||||
# already stored in the file
|
||||
data = data[-1]
|
||||
|
||||
# determine write position of data in cache
|
||||
self.cachefd.seek(0,2)
|
||||
end = self.cachefd.tell()
|
||||
data.position = end
|
||||
|
||||
# write incremental update to free slot
|
||||
self.cachefd.seek(4 + 16*(self.size-self.free))
|
||||
data.dumpslot(self.cachefd)
|
||||
data.dumpdata(self.cachefd)
|
||||
|
||||
else:
|
||||
# rewrite cache file from scratch
|
||||
# pull data from parent cache
|
||||
data.extend(self.parent()._data.values())
|
||||
data.sort(key=lambda x: x.creation)
|
||||
# write header
|
||||
size = len(data) + self.preallocate
|
||||
self.cachefd.seek(0)
|
||||
self.cachefd.truncate()
|
||||
self.cachefd.write(self._struct.pack(self._version, size))
|
||||
# write storage slot definitions
|
||||
prev = None
|
||||
for d in data:
|
||||
if prev == None:
|
||||
d.position = 4 + 16*size
|
||||
else:
|
||||
d.position = prev.position + prev.size
|
||||
d.dumpslot(self.cachefd)
|
||||
prev = d
|
||||
# fill in allocated slots
|
||||
for i in range(2**8):
|
||||
self.cachefd.write(FileCacheObject._struct.pack(0, 0, 0))
|
||||
# write stored data
|
||||
for d in data:
|
||||
d.dumpdata(self.cachefd)
|
||||
|
||||
self.cachefd.flush()
|
||||
|
||||
def expire(self, key):
|
||||
pass
|
||||
|
||||
|
||||
19
libs/tmdb3/cache_null.py
Executable file
19
libs/tmdb3/cache_null.py
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: cache_null.py
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: Null caching engine for debugging purposes
|
||||
#-----------------------
|
||||
|
||||
from cache_engine import CacheEngine
|
||||
|
||||
class NullEngine( CacheEngine ):
|
||||
"""Non-caching engine for debugging."""
|
||||
name = 'null'
|
||||
def configure(self): pass
|
||||
def get(self, date): return []
|
||||
def put(self, key, value, lifetime): return []
|
||||
def expire(self, key): pass
|
||||
|
||||
634
libs/tmdb3/locales.py
Executable file
634
libs/tmdb3/locales.py
Executable file
@@ -0,0 +1,634 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: locales.py Stores locale information for filtering results
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
#-----------------------
|
||||
|
||||
from tmdb_exceptions import *
|
||||
import locale
|
||||
|
||||
syslocale = None
|
||||
|
||||
class LocaleBase( object ):
|
||||
__slots__ = ['__immutable']
|
||||
_stored = {}
|
||||
fallthrough = False
|
||||
|
||||
def __init__(self, *keys):
|
||||
for key in keys:
|
||||
self._stored[key.lower()] = self
|
||||
self.__immutable = True
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if getattr(self, '__immutable', False):
|
||||
raise NotImplementedError(self.__class__.__name__ +
|
||||
' does not support modification.')
|
||||
super(LocaleBase, self).__setattr__(key, value)
|
||||
|
||||
def __delattr__(self, key):
|
||||
if getattr(self, '__immutable', False):
|
||||
raise NotImplementedError(self.__class__.__name__ +
|
||||
' does not support modification.')
|
||||
super(LocaleBase, self).__delattr__(key)
|
||||
|
||||
def __lt__(self, other):
|
||||
return (id(self) != id(other)) and (str(self) > str(other))
|
||||
def __gt__(self, other):
|
||||
return (id(self) != id(other)) and (str(self) < str(other))
|
||||
def __eq__(self, other):
|
||||
return (id(self) == id(other)) or (str(self) == str(other))
|
||||
|
||||
@classmethod
|
||||
def getstored(cls, key):
|
||||
if key is None:
|
||||
return None
|
||||
try:
|
||||
return cls._stored[key.lower()]
|
||||
except:
|
||||
raise TMDBLocaleError("'{0}' is not a known valid {1} code."\
|
||||
.format(key, cls.__name__))
|
||||
|
||||
class Language( LocaleBase ):
|
||||
__slots__ = ['ISO639_1', 'ISO639_2', 'ISO639_2B', 'englishname',
|
||||
'nativename']
|
||||
_stored = {}
|
||||
|
||||
def __init__(self, iso1, iso2, ename):
|
||||
self.ISO639_1 = iso1
|
||||
self.ISO639_2 = iso2
|
||||
# self.ISO639_2B = iso2b
|
||||
self.englishname = ename
|
||||
# self.nativename = nname
|
||||
super(Language, self).__init__(iso1, iso2)
|
||||
|
||||
def __str__(self):
|
||||
return self.ISO639_1
|
||||
|
||||
def __repr__(self):
|
||||
return u"<Language '{0.englishname}' ({0.ISO639_1})>".format(self)
|
||||
|
||||
class Country( LocaleBase ):
|
||||
__slots__ = ['alpha2', 'name']
|
||||
_stored = {}
|
||||
|
||||
def __init__(self, alpha2, name):
|
||||
self.alpha2 = alpha2
|
||||
self.name = name
|
||||
super(Country, self).__init__(alpha2)
|
||||
|
||||
def __str__(self):
|
||||
return self.alpha2
|
||||
|
||||
def __repr__(self):
|
||||
return u"<Country '{0.name}' ({0.alpha2})>".format(self)
|
||||
|
||||
class Locale( LocaleBase ):
|
||||
__slots__ = ['language', 'country', 'encoding']
|
||||
|
||||
def __init__(self, language, country, encoding):
|
||||
self.language = Language.getstored(language)
|
||||
self.country = Country.getstored(country)
|
||||
self.encoding = encoding if encoding else 'latin-1'
|
||||
|
||||
def __str__(self):
|
||||
return u"{0}_{1}".format(self.language, self.country)
|
||||
|
||||
def __repr__(self):
|
||||
return u"<Locale {0.language}_{0.country}>".format(self)
|
||||
|
||||
def encode(self, dat):
|
||||
"""Encode using system default encoding for network/file output."""
|
||||
try:
|
||||
return dat.encode(self.encoding)
|
||||
except AttributeError:
|
||||
# not a string type, pass along
|
||||
return dat
|
||||
except UnicodeDecodeError:
|
||||
# just return unmodified and hope for the best
|
||||
return dat
|
||||
|
||||
def decode(self, dat):
|
||||
"""Decode to system default encoding for internal use."""
|
||||
try:
|
||||
return dat.decode(self.encoding)
|
||||
except AttributeError:
|
||||
# not a string type, pass along
|
||||
return dat
|
||||
except UnicodeEncodeError:
|
||||
# just return unmodified and hope for the best
|
||||
return dat
|
||||
|
||||
def set_locale(language=None, country=None, fallthrough=False):
|
||||
global syslocale
|
||||
LocaleBase.fallthrough = fallthrough
|
||||
|
||||
sysloc, sysenc = locale.getdefaultlocale()
|
||||
|
||||
if (not language) or (not country):
|
||||
dat = None
|
||||
if syslocale is not None:
|
||||
dat = (str(syslocale.language), str(syslocale.country))
|
||||
else:
|
||||
if (sysloc is None) or ('_' not in sysloc):
|
||||
dat = ('en', 'US')
|
||||
else:
|
||||
dat = sysloc.split('_')
|
||||
if language is None:
|
||||
language = dat[0]
|
||||
if country is None:
|
||||
country = dat[1]
|
||||
|
||||
syslocale = Locale(language, country, sysenc)
|
||||
|
||||
def get_locale(language=-1, country=-1):
|
||||
"""Output locale using provided attributes, or return system locale."""
|
||||
global syslocale
|
||||
# pull existing stored values
|
||||
if syslocale is None:
|
||||
loc = Locale(None, None, locale.getdefaultlocale()[1])
|
||||
else:
|
||||
loc = syslocale
|
||||
|
||||
# both options are default, return stored values
|
||||
if language == country == -1:
|
||||
return loc
|
||||
|
||||
# supplement default option with stored values
|
||||
if language == -1:
|
||||
language = loc.language
|
||||
elif country == -1:
|
||||
country = loc.country
|
||||
return Locale(language, country, loc.encoding)
|
||||
|
||||
######## AUTOGENERATED LANGUAGE AND COUNTRY DATA BELOW HERE #########
|
||||
|
||||
Language("ab", "abk", u"Abkhazian")
|
||||
Language("aa", "aar", u"Afar")
|
||||
Language("af", "afr", u"Afrikaans")
|
||||
Language("ak", "aka", u"Akan")
|
||||
Language("sq", "alb/sqi", u"Albanian")
|
||||
Language("am", "amh", u"Amharic")
|
||||
Language("ar", "ara", u"Arabic")
|
||||
Language("an", "arg", u"Aragonese")
|
||||
Language("hy", "arm/hye", u"Armenian")
|
||||
Language("as", "asm", u"Assamese")
|
||||
Language("av", "ava", u"Avaric")
|
||||
Language("ae", "ave", u"Avestan")
|
||||
Language("ay", "aym", u"Aymara")
|
||||
Language("az", "aze", u"Azerbaijani")
|
||||
Language("bm", "bam", u"Bambara")
|
||||
Language("ba", "bak", u"Bashkir")
|
||||
Language("eu", "baq/eus", u"Basque")
|
||||
Language("be", "bel", u"Belarusian")
|
||||
Language("bn", "ben", u"Bengali")
|
||||
Language("bh", "bih", u"Bihari languages")
|
||||
Language("bi", "bis", u"Bislama")
|
||||
Language("nb", "nob", u"Bokmål, Norwegian")
|
||||
Language("bs", "bos", u"Bosnian")
|
||||
Language("br", "bre", u"Breton")
|
||||
Language("bg", "bul", u"Bulgarian")
|
||||
Language("my", "bur/mya", u"Burmese")
|
||||
Language("es", "spa", u"Castilian")
|
||||
Language("ca", "cat", u"Catalan")
|
||||
Language("km", "khm", u"Central Khmer")
|
||||
Language("ch", "cha", u"Chamorro")
|
||||
Language("ce", "che", u"Chechen")
|
||||
Language("ny", "nya", u"Chewa")
|
||||
Language("ny", "nya", u"Chichewa")
|
||||
Language("zh", "chi/zho", u"Chinese")
|
||||
Language("za", "zha", u"Chuang")
|
||||
Language("cu", "chu", u"Church Slavic")
|
||||
Language("cu", "chu", u"Church Slavonic")
|
||||
Language("cv", "chv", u"Chuvash")
|
||||
Language("kw", "cor", u"Cornish")
|
||||
Language("co", "cos", u"Corsican")
|
||||
Language("cr", "cre", u"Cree")
|
||||
Language("hr", "hrv", u"Croatian")
|
||||
Language("cs", "cze/ces", u"Czech")
|
||||
Language("da", "dan", u"Danish")
|
||||
Language("dv", "div", u"Dhivehi")
|
||||
Language("dv", "div", u"Divehi")
|
||||
Language("nl", "dut/nld", u"Dutch")
|
||||
Language("dz", "dzo", u"Dzongkha")
|
||||
Language("en", "eng", u"English")
|
||||
Language("eo", "epo", u"Esperanto")
|
||||
Language("et", "est", u"Estonian")
|
||||
Language("ee", "ewe", u"Ewe")
|
||||
Language("fo", "fao", u"Faroese")
|
||||
Language("fj", "fij", u"Fijian")
|
||||
Language("fi", "fin", u"Finnish")
|
||||
Language("nl", "dut/nld", u"Flemish")
|
||||
Language("fr", "fre/fra", u"French")
|
||||
Language("ff", "ful", u"Fulah")
|
||||
Language("gd", "gla", u"Gaelic")
|
||||
Language("gl", "glg", u"Galician")
|
||||
Language("lg", "lug", u"Ganda")
|
||||
Language("ka", "geo/kat", u"Georgian")
|
||||
Language("de", "ger/deu", u"German")
|
||||
Language("ki", "kik", u"Gikuyu")
|
||||
Language("el", "gre/ell", u"Greek, Modern (1453-)")
|
||||
Language("kl", "kal", u"Greenlandic")
|
||||
Language("gn", "grn", u"Guarani")
|
||||
Language("gu", "guj", u"Gujarati")
|
||||
Language("ht", "hat", u"Haitian")
|
||||
Language("ht", "hat", u"Haitian Creole")
|
||||
Language("ha", "hau", u"Hausa")
|
||||
Language("he", "heb", u"Hebrew")
|
||||
Language("hz", "her", u"Herero")
|
||||
Language("hi", "hin", u"Hindi")
|
||||
Language("ho", "hmo", u"Hiri Motu")
|
||||
Language("hu", "hun", u"Hungarian")
|
||||
Language("is", "ice/isl", u"Icelandic")
|
||||
Language("io", "ido", u"Ido")
|
||||
Language("ig", "ibo", u"Igbo")
|
||||
Language("id", "ind", u"Indonesian")
|
||||
Language("ia", "ina", u"Interlingua (International Auxiliary Language Association)")
|
||||
Language("ie", "ile", u"Interlingue")
|
||||
Language("iu", "iku", u"Inuktitut")
|
||||
Language("ik", "ipk", u"Inupiaq")
|
||||
Language("ga", "gle", u"Irish")
|
||||
Language("it", "ita", u"Italian")
|
||||
Language("ja", "jpn", u"Japanese")
|
||||
Language("jv", "jav", u"Javanese")
|
||||
Language("kl", "kal", u"Kalaallisut")
|
||||
Language("kn", "kan", u"Kannada")
|
||||
Language("kr", "kau", u"Kanuri")
|
||||
Language("ks", "kas", u"Kashmiri")
|
||||
Language("kk", "kaz", u"Kazakh")
|
||||
Language("ki", "kik", u"Kikuyu")
|
||||
Language("rw", "kin", u"Kinyarwanda")
|
||||
Language("ky", "kir", u"Kirghiz")
|
||||
Language("kv", "kom", u"Komi")
|
||||
Language("kg", "kon", u"Kongo")
|
||||
Language("ko", "kor", u"Korean")
|
||||
Language("kj", "kua", u"Kuanyama")
|
||||
Language("ku", "kur", u"Kurdish")
|
||||
Language("kj", "kua", u"Kwanyama")
|
||||
Language("ky", "kir", u"Kyrgyz")
|
||||
Language("lo", "lao", u"Lao")
|
||||
Language("la", "lat", u"Latin")
|
||||
Language("lv", "lav", u"Latvian")
|
||||
Language("lb", "ltz", u"Letzeburgesch")
|
||||
Language("li", "lim", u"Limburgan")
|
||||
Language("li", "lim", u"Limburger")
|
||||
Language("li", "lim", u"Limburgish")
|
||||
Language("ln", "lin", u"Lingala")
|
||||
Language("lt", "lit", u"Lithuanian")
|
||||
Language("lu", "lub", u"Luba-Katanga")
|
||||
Language("lb", "ltz", u"Luxembourgish")
|
||||
Language("mk", "mac/mkd", u"Macedonian")
|
||||
Language("mg", "mlg", u"Malagasy")
|
||||
Language("ms", "may/msa", u"Malay")
|
||||
Language("ml", "mal", u"Malayalam")
|
||||
Language("dv", "div", u"Maldivian")
|
||||
Language("mt", "mlt", u"Maltese")
|
||||
Language("gv", "glv", u"Manx")
|
||||
Language("mi", "mao/mri", u"Maori")
|
||||
Language("mr", "mar", u"Marathi")
|
||||
Language("mh", "mah", u"Marshallese")
|
||||
Language("ro", "rum/ron", u"Moldavian")
|
||||
Language("ro", "rum/ron", u"Moldovan")
|
||||
Language("mn", "mon", u"Mongolian")
|
||||
Language("na", "nau", u"Nauru")
|
||||
Language("nv", "nav", u"Navaho")
|
||||
Language("nv", "nav", u"Navajo")
|
||||
Language("nd", "nde", u"Ndebele, North")
|
||||
Language("nr", "nbl", u"Ndebele, South")
|
||||
Language("ng", "ndo", u"Ndonga")
|
||||
Language("ne", "nep", u"Nepali")
|
||||
Language("nd", "nde", u"North Ndebele")
|
||||
Language("se", "sme", u"Northern Sami")
|
||||
Language("no", "nor", u"Norwegian")
|
||||
Language("nb", "nob", u"Norwegian Bokmål")
|
||||
Language("nn", "nno", u"Norwegian Nynorsk")
|
||||
Language("ii", "iii", u"Nuosu")
|
||||
Language("ny", "nya", u"Nyanja")
|
||||
Language("nn", "nno", u"Nynorsk, Norwegian")
|
||||
Language("ie", "ile", u"Occidental")
|
||||
Language("oc", "oci", u"Occitan (post 1500)")
|
||||
Language("oj", "oji", u"Ojibwa")
|
||||
Language("cu", "chu", u"Old Bulgarian")
|
||||
Language("cu", "chu", u"Old Church Slavonic")
|
||||
Language("cu", "chu", u"Old Slavonic")
|
||||
Language("or", "ori", u"Oriya")
|
||||
Language("om", "orm", u"Oromo")
|
||||
Language("os", "oss", u"Ossetian")
|
||||
Language("os", "oss", u"Ossetic")
|
||||
Language("pi", "pli", u"Pali")
|
||||
Language("pa", "pan", u"Panjabi")
|
||||
Language("ps", "pus", u"Pashto")
|
||||
Language("fa", "per/fas", u"Persian")
|
||||
Language("pl", "pol", u"Polish")
|
||||
Language("pt", "por", u"Portuguese")
|
||||
Language("pa", "pan", u"Punjabi")
|
||||
Language("ps", "pus", u"Pushto")
|
||||
Language("qu", "que", u"Quechua")
|
||||
Language("ro", "rum/ron", u"Romanian")
|
||||
Language("rm", "roh", u"Romansh")
|
||||
Language("rn", "run", u"Rundi")
|
||||
Language("ru", "rus", u"Russian")
|
||||
Language("sm", "smo", u"Samoan")
|
||||
Language("sg", "sag", u"Sango")
|
||||
Language("sa", "san", u"Sanskrit")
|
||||
Language("sc", "srd", u"Sardinian")
|
||||
Language("gd", "gla", u"Scottish Gaelic")
|
||||
Language("sr", "srp", u"Serbian")
|
||||
Language("sn", "sna", u"Shona")
|
||||
Language("ii", "iii", u"Sichuan Yi")
|
||||
Language("sd", "snd", u"Sindhi")
|
||||
Language("si", "sin", u"Sinhala")
|
||||
Language("si", "sin", u"Sinhalese")
|
||||
Language("sk", "slo/slk", u"Slovak")
|
||||
Language("sl", "slv", u"Slovenian")
|
||||
Language("so", "som", u"Somali")
|
||||
Language("st", "sot", u"Sotho, Southern")
|
||||
Language("nr", "nbl", u"South Ndebele")
|
||||
Language("es", "spa", u"Spanish")
|
||||
Language("su", "sun", u"Sundanese")
|
||||
Language("sw", "swa", u"Swahili")
|
||||
Language("ss", "ssw", u"Swati")
|
||||
Language("sv", "swe", u"Swedish")
|
||||
Language("tl", "tgl", u"Tagalog")
|
||||
Language("ty", "tah", u"Tahitian")
|
||||
Language("tg", "tgk", u"Tajik")
|
||||
Language("ta", "tam", u"Tamil")
|
||||
Language("tt", "tat", u"Tatar")
|
||||
Language("te", "tel", u"Telugu")
|
||||
Language("th", "tha", u"Thai")
|
||||
Language("bo", "tib/bod", u"Tibetan")
|
||||
Language("ti", "tir", u"Tigrinya")
|
||||
Language("to", "ton", u"Tonga (Tonga Islands)")
|
||||
Language("ts", "tso", u"Tsonga")
|
||||
Language("tn", "tsn", u"Tswana")
|
||||
Language("tr", "tur", u"Turkish")
|
||||
Language("tk", "tuk", u"Turkmen")
|
||||
Language("tw", "twi", u"Twi")
|
||||
Language("ug", "uig", u"Uighur")
|
||||
Language("uk", "ukr", u"Ukrainian")
|
||||
Language("ur", "urd", u"Urdu")
|
||||
Language("ug", "uig", u"Uyghur")
|
||||
Language("uz", "uzb", u"Uzbek")
|
||||
Language("ca", "cat", u"Valencian")
|
||||
Language("ve", "ven", u"Venda")
|
||||
Language("vi", "vie", u"Vietnamese")
|
||||
Language("vo", "vol", u"Volapük")
|
||||
Language("wa", "wln", u"Walloon")
|
||||
Language("cy", "wel/cym", u"Welsh")
|
||||
Language("fy", "fry", u"Western Frisian")
|
||||
Language("wo", "wol", u"Wolof")
|
||||
Language("xh", "xho", u"Xhosa")
|
||||
Language("yi", "yid", u"Yiddish")
|
||||
Language("yo", "yor", u"Yoruba")
|
||||
Language("za", "zha", u"Zhuang")
|
||||
Language("zu", "zul", u"Zulu")
|
||||
Country("AF", u"AFGHANISTAN")
|
||||
Country("AX", u"ÅLAND ISLANDS")
|
||||
Country("AL", u"ALBANIA")
|
||||
Country("DZ", u"ALGERIA")
|
||||
Country("AS", u"AMERICAN SAMOA")
|
||||
Country("AD", u"ANDORRA")
|
||||
Country("AO", u"ANGOLA")
|
||||
Country("AI", u"ANGUILLA")
|
||||
Country("AQ", u"ANTARCTICA")
|
||||
Country("AG", u"ANTIGUA AND BARBUDA")
|
||||
Country("AR", u"ARGENTINA")
|
||||
Country("AM", u"ARMENIA")
|
||||
Country("AW", u"ARUBA")
|
||||
Country("AU", u"AUSTRALIA")
|
||||
Country("AT", u"AUSTRIA")
|
||||
Country("AZ", u"AZERBAIJAN")
|
||||
Country("BS", u"BAHAMAS")
|
||||
Country("BH", u"BAHRAIN")
|
||||
Country("BD", u"BANGLADESH")
|
||||
Country("BB", u"BARBADOS")
|
||||
Country("BY", u"BELARUS")
|
||||
Country("BE", u"BELGIUM")
|
||||
Country("BZ", u"BELIZE")
|
||||
Country("BJ", u"BENIN")
|
||||
Country("BM", u"BERMUDA")
|
||||
Country("BT", u"BHUTAN")
|
||||
Country("BO", u"BOLIVIA, PLURINATIONAL STATE OF")
|
||||
Country("BQ", u"BONAIRE, SINT EUSTATIUS AND SABA")
|
||||
Country("BA", u"BOSNIA AND HERZEGOVINA")
|
||||
Country("BW", u"BOTSWANA")
|
||||
Country("BV", u"BOUVET ISLAND")
|
||||
Country("BR", u"BRAZIL")
|
||||
Country("IO", u"BRITISH INDIAN OCEAN TERRITORY")
|
||||
Country("BN", u"BRUNEI DARUSSALAM")
|
||||
Country("BG", u"BULGARIA")
|
||||
Country("BF", u"BURKINA FASO")
|
||||
Country("BI", u"BURUNDI")
|
||||
Country("KH", u"CAMBODIA")
|
||||
Country("CM", u"CAMEROON")
|
||||
Country("CA", u"CANADA")
|
||||
Country("CV", u"CAPE VERDE")
|
||||
Country("KY", u"CAYMAN ISLANDS")
|
||||
Country("CF", u"CENTRAL AFRICAN REPUBLIC")
|
||||
Country("TD", u"CHAD")
|
||||
Country("CL", u"CHILE")
|
||||
Country("CN", u"CHINA")
|
||||
Country("CX", u"CHRISTMAS ISLAND")
|
||||
Country("CC", u"COCOS (KEELING) ISLANDS")
|
||||
Country("CO", u"COLOMBIA")
|
||||
Country("KM", u"COMOROS")
|
||||
Country("CG", u"CONGO")
|
||||
Country("CD", u"CONGO, THE DEMOCRATIC REPUBLIC OF THE")
|
||||
Country("CK", u"COOK ISLANDS")
|
||||
Country("CR", u"COSTA RICA")
|
||||
Country("CI", u"CÔTE D'IVOIRE")
|
||||
Country("HR", u"CROATIA")
|
||||
Country("CU", u"CUBA")
|
||||
Country("CW", u"CURAÇAO")
|
||||
Country("CY", u"CYPRUS")
|
||||
Country("CZ", u"CZECH REPUBLIC")
|
||||
Country("DK", u"DENMARK")
|
||||
Country("DJ", u"DJIBOUTI")
|
||||
Country("DM", u"DOMINICA")
|
||||
Country("DO", u"DOMINICAN REPUBLIC")
|
||||
Country("EC", u"ECUADOR")
|
||||
Country("EG", u"EGYPT")
|
||||
Country("SV", u"EL SALVADOR")
|
||||
Country("GQ", u"EQUATORIAL GUINEA")
|
||||
Country("ER", u"ERITREA")
|
||||
Country("EE", u"ESTONIA")
|
||||
Country("ET", u"ETHIOPIA")
|
||||
Country("FK", u"FALKLAND ISLANDS (MALVINAS)")
|
||||
Country("FO", u"FAROE ISLANDS")
|
||||
Country("FJ", u"FIJI")
|
||||
Country("FI", u"FINLAND")
|
||||
Country("FR", u"FRANCE")
|
||||
Country("GF", u"FRENCH GUIANA")
|
||||
Country("PF", u"FRENCH POLYNESIA")
|
||||
Country("TF", u"FRENCH SOUTHERN TERRITORIES")
|
||||
Country("GA", u"GABON")
|
||||
Country("GM", u"GAMBIA")
|
||||
Country("GE", u"GEORGIA")
|
||||
Country("DE", u"GERMANY")
|
||||
Country("GH", u"GHANA")
|
||||
Country("GI", u"GIBRALTAR")
|
||||
Country("GR", u"GREECE")
|
||||
Country("GL", u"GREENLAND")
|
||||
Country("GD", u"GRENADA")
|
||||
Country("GP", u"GUADELOUPE")
|
||||
Country("GU", u"GUAM")
|
||||
Country("GT", u"GUATEMALA")
|
||||
Country("GG", u"GUERNSEY")
|
||||
Country("GN", u"GUINEA")
|
||||
Country("GW", u"GUINEA-BISSAU")
|
||||
Country("GY", u"GUYANA")
|
||||
Country("HT", u"HAITI")
|
||||
Country("HM", u"HEARD ISLAND AND MCDONALD ISLANDS")
|
||||
Country("VA", u"HOLY SEE (VATICAN CITY STATE)")
|
||||
Country("HN", u"HONDURAS")
|
||||
Country("HK", u"HONG KONG")
|
||||
Country("HU", u"HUNGARY")
|
||||
Country("IS", u"ICELAND")
|
||||
Country("IN", u"INDIA")
|
||||
Country("ID", u"INDONESIA")
|
||||
Country("IR", u"IRAN, ISLAMIC REPUBLIC OF")
|
||||
Country("IQ", u"IRAQ")
|
||||
Country("IE", u"IRELAND")
|
||||
Country("IM", u"ISLE OF MAN")
|
||||
Country("IL", u"ISRAEL")
|
||||
Country("IT", u"ITALY")
|
||||
Country("JM", u"JAMAICA")
|
||||
Country("JP", u"JAPAN")
|
||||
Country("JE", u"JERSEY")
|
||||
Country("JO", u"JORDAN")
|
||||
Country("KZ", u"KAZAKHSTAN")
|
||||
Country("KE", u"KENYA")
|
||||
Country("KI", u"KIRIBATI")
|
||||
Country("KP", u"KOREA, DEMOCRATIC PEOPLE'S REPUBLIC OF")
|
||||
Country("KR", u"KOREA, REPUBLIC OF")
|
||||
Country("KW", u"KUWAIT")
|
||||
Country("KG", u"KYRGYZSTAN")
|
||||
Country("LA", u"LAO PEOPLE'S DEMOCRATIC REPUBLIC")
|
||||
Country("LV", u"LATVIA")
|
||||
Country("LB", u"LEBANON")
|
||||
Country("LS", u"LESOTHO")
|
||||
Country("LR", u"LIBERIA")
|
||||
Country("LY", u"LIBYA")
|
||||
Country("LI", u"LIECHTENSTEIN")
|
||||
Country("LT", u"LITHUANIA")
|
||||
Country("LU", u"LUXEMBOURG")
|
||||
Country("MO", u"MACAO")
|
||||
Country("MK", u"MACEDONIA, THE FORMER YUGOSLAV REPUBLIC OF")
|
||||
Country("MG", u"MADAGASCAR")
|
||||
Country("MW", u"MALAWI")
|
||||
Country("MY", u"MALAYSIA")
|
||||
Country("MV", u"MALDIVES")
|
||||
Country("ML", u"MALI")
|
||||
Country("MT", u"MALTA")
|
||||
Country("MH", u"MARSHALL ISLANDS")
|
||||
Country("MQ", u"MARTINIQUE")
|
||||
Country("MR", u"MAURITANIA")
|
||||
Country("MU", u"MAURITIUS")
|
||||
Country("YT", u"MAYOTTE")
|
||||
Country("MX", u"MEXICO")
|
||||
Country("FM", u"MICRONESIA, FEDERATED STATES OF")
|
||||
Country("MD", u"MOLDOVA, REPUBLIC OF")
|
||||
Country("MC", u"MONACO")
|
||||
Country("MN", u"MONGOLIA")
|
||||
Country("ME", u"MONTENEGRO")
|
||||
Country("MS", u"MONTSERRAT")
|
||||
Country("MA", u"MOROCCO")
|
||||
Country("MZ", u"MOZAMBIQUE")
|
||||
Country("MM", u"MYANMAR")
|
||||
Country("NA", u"NAMIBIA")
|
||||
Country("NR", u"NAURU")
|
||||
Country("NP", u"NEPAL")
|
||||
Country("NL", u"NETHERLANDS")
|
||||
Country("NC", u"NEW CALEDONIA")
|
||||
Country("NZ", u"NEW ZEALAND")
|
||||
Country("NI", u"NICARAGUA")
|
||||
Country("NE", u"NIGER")
|
||||
Country("NG", u"NIGERIA")
|
||||
Country("NU", u"NIUE")
|
||||
Country("NF", u"NORFOLK ISLAND")
|
||||
Country("MP", u"NORTHERN MARIANA ISLANDS")
|
||||
Country("NO", u"NORWAY")
|
||||
Country("OM", u"OMAN")
|
||||
Country("PK", u"PAKISTAN")
|
||||
Country("PW", u"PALAU")
|
||||
Country("PS", u"PALESTINIAN TERRITORY, OCCUPIED")
|
||||
Country("PA", u"PANAMA")
|
||||
Country("PG", u"PAPUA NEW GUINEA")
|
||||
Country("PY", u"PARAGUAY")
|
||||
Country("PE", u"PERU")
|
||||
Country("PH", u"PHILIPPINES")
|
||||
Country("PN", u"PITCAIRN")
|
||||
Country("PL", u"POLAND")
|
||||
Country("PT", u"PORTUGAL")
|
||||
Country("PR", u"PUERTO RICO")
|
||||
Country("QA", u"QATAR")
|
||||
Country("RE", u"RÉUNION")
|
||||
Country("RO", u"ROMANIA")
|
||||
Country("RU", u"RUSSIAN FEDERATION")
|
||||
Country("RW", u"RWANDA")
|
||||
Country("BL", u"SAINT BARTHÉLEMY")
|
||||
Country("SH", u"SAINT HELENA, ASCENSION AND TRISTAN DA CUNHA")
|
||||
Country("KN", u"SAINT KITTS AND NEVIS")
|
||||
Country("LC", u"SAINT LUCIA")
|
||||
Country("MF", u"SAINT MARTIN (FRENCH PART)")
|
||||
Country("PM", u"SAINT PIERRE AND MIQUELON")
|
||||
Country("VC", u"SAINT VINCENT AND THE GRENADINES")
|
||||
Country("WS", u"SAMOA")
|
||||
Country("SM", u"SAN MARINO")
|
||||
Country("ST", u"SAO TOME AND PRINCIPE")
|
||||
Country("SA", u"SAUDI ARABIA")
|
||||
Country("SN", u"SENEGAL")
|
||||
Country("RS", u"SERBIA")
|
||||
Country("SC", u"SEYCHELLES")
|
||||
Country("SL", u"SIERRA LEONE")
|
||||
Country("SG", u"SINGAPORE")
|
||||
Country("SX", u"SINT MAARTEN (DUTCH PART)")
|
||||
Country("SK", u"SLOVAKIA")
|
||||
Country("SI", u"SLOVENIA")
|
||||
Country("SB", u"SOLOMON ISLANDS")
|
||||
Country("SO", u"SOMALIA")
|
||||
Country("ZA", u"SOUTH AFRICA")
|
||||
Country("GS", u"SOUTH GEORGIA AND THE SOUTH SANDWICH ISLANDS")
|
||||
Country("SS", u"SOUTH SUDAN")
|
||||
Country("ES", u"SPAIN")
|
||||
Country("LK", u"SRI LANKA")
|
||||
Country("SD", u"SUDAN")
|
||||
Country("SR", u"SURINAME")
|
||||
Country("SJ", u"SVALBARD AND JAN MAYEN")
|
||||
Country("SZ", u"SWAZILAND")
|
||||
Country("SE", u"SWEDEN")
|
||||
Country("CH", u"SWITZERLAND")
|
||||
Country("SY", u"SYRIAN ARAB REPUBLIC")
|
||||
Country("TW", u"TAIWAN, PROVINCE OF CHINA")
|
||||
Country("TJ", u"TAJIKISTAN")
|
||||
Country("TZ", u"TANZANIA, UNITED REPUBLIC OF")
|
||||
Country("TH", u"THAILAND")
|
||||
Country("TL", u"TIMOR-LESTE")
|
||||
Country("TG", u"TOGO")
|
||||
Country("TK", u"TOKELAU")
|
||||
Country("TO", u"TONGA")
|
||||
Country("TT", u"TRINIDAD AND TOBAGO")
|
||||
Country("TN", u"TUNISIA")
|
||||
Country("TR", u"TURKEY")
|
||||
Country("TM", u"TURKMENISTAN")
|
||||
Country("TC", u"TURKS AND CAICOS ISLANDS")
|
||||
Country("TV", u"TUVALU")
|
||||
Country("UG", u"UGANDA")
|
||||
Country("UA", u"UKRAINE")
|
||||
Country("AE", u"UNITED ARAB EMIRATES")
|
||||
Country("GB", u"UNITED KINGDOM")
|
||||
Country("US", u"UNITED STATES")
|
||||
Country("UM", u"UNITED STATES MINOR OUTLYING ISLANDS")
|
||||
Country("UY", u"URUGUAY")
|
||||
Country("UZ", u"UZBEKISTAN")
|
||||
Country("VU", u"VANUATU")
|
||||
Country("VE", u"VENEZUELA, BOLIVARIAN REPUBLIC OF")
|
||||
Country("VN", u"VIET NAM")
|
||||
Country("VG", u"VIRGIN ISLANDS, BRITISH")
|
||||
Country("VI", u"VIRGIN ISLANDS, U.S.")
|
||||
Country("WF", u"WALLIS AND FUTUNA")
|
||||
Country("EH", u"WESTERN SAHARA")
|
||||
Country("YE", u"YEMEN")
|
||||
Country("ZM", u"ZAMBIA")
|
||||
Country("ZW", u"ZIMBABWE")
|
||||
109
libs/tmdb3/pager.py
Executable file
109
libs/tmdb3/pager.py
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: pager.py List-like structure designed for handling paged results
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
#-----------------------
|
||||
|
||||
from collections import Sequence, Iterator
|
||||
|
||||
class PagedIterator( Iterator ):
|
||||
def __init__(self, parent):
|
||||
self._parent = parent
|
||||
self._index = -1
|
||||
self._len = len(parent)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
self._index += 1
|
||||
if self._index == self._len:
|
||||
raise StopIteration
|
||||
return self._parent[self._index]
|
||||
|
||||
class UnpagedData( object ):
|
||||
def copy(self):
|
||||
return self.__class__()
|
||||
|
||||
def __mul__(self, other):
|
||||
return (self.copy() for a in range(other))
|
||||
|
||||
def __rmul__(self, other):
|
||||
return (self.copy() for a in range(other))
|
||||
|
||||
class PagedList( Sequence ):
|
||||
"""
|
||||
List-like object, with support for automatically grabbing additional
|
||||
pages from a data source.
|
||||
"""
|
||||
_iter_class = None
|
||||
|
||||
def __iter__(self):
|
||||
if self._iter_class is None:
|
||||
self._iter_class = type(self.__class__.__name__ + 'Iterator',
|
||||
(PagedIterator,), {})
|
||||
return self._iter_class(self)
|
||||
|
||||
def __len__(self):
|
||||
try:
|
||||
return self._len
|
||||
except:
|
||||
return len(self._data)
|
||||
|
||||
def __init__(self, iterable, pagesize=20):
|
||||
self._data = list(iterable)
|
||||
self._pagesize = pagesize
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
return [self[x] for x in xrange(*index.indices(len(self)))]
|
||||
if index >= len(self):
|
||||
raise IndexError("list index outside range")
|
||||
if (index >= len(self._data)) \
|
||||
or isinstance(self._data[index], UnpagedData):
|
||||
self._populatepage(index/self._pagesize + 1)
|
||||
return self._data[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
raise NotImplementedError
|
||||
|
||||
def __delitem__(self, index):
|
||||
raise NotImplementedError
|
||||
|
||||
def __contains__(self, item):
|
||||
raise NotImplementedError
|
||||
|
||||
def _populatepage(self, page):
|
||||
pagestart = (page-1) * self._pagesize
|
||||
if len(self._data) < pagestart:
|
||||
self._data.extend(UnpagedData()*(pagestart-len(self._data)))
|
||||
if len(self._data) == pagestart:
|
||||
self._data.extend(self._getpage(page))
|
||||
else:
|
||||
for data in self._getpage(page):
|
||||
self._data[pagestart] = data
|
||||
pagestart += 1
|
||||
|
||||
def _getpage(self, page):
|
||||
raise NotImplementedError("PagedList._getpage() must be provided "+\
|
||||
"by subclass")
|
||||
|
||||
class PagedRequest( PagedList ):
|
||||
"""
|
||||
Derived PageList that provides a list-like object with automatic paging
|
||||
intended for use with search requests.
|
||||
"""
|
||||
def __init__(self, request, handler=None):
|
||||
self._request = request
|
||||
if handler: self._handler = handler
|
||||
super(PagedRequest, self).__init__(self._getpage(1), 20)
|
||||
|
||||
def _getpage(self, page):
|
||||
req = self._request.new(page=page)
|
||||
res = req.readJSON()
|
||||
self._len = res['total_results']
|
||||
for item in res['results']:
|
||||
yield self._handler(item)
|
||||
|
||||
157
libs/tmdb3/request.py
Executable file
157
libs/tmdb3/request.py
Executable file
@@ -0,0 +1,157 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: tmdb_request.py
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: Wrapped urllib2.Request class pre-configured for accessing the
|
||||
# TMDb v3 API
|
||||
#-----------------------
|
||||
|
||||
from tmdb_exceptions import *
|
||||
from locales import get_locale
|
||||
from cache import Cache
|
||||
|
||||
from urllib import urlencode
|
||||
import urllib2
|
||||
import json
|
||||
|
||||
DEBUG = False
|
||||
cache = Cache(filename='pytmdb3.cache')
|
||||
|
||||
#DEBUG = True
|
||||
#cache = Cache(engine='null')
|
||||
|
||||
def set_key(key):
|
||||
"""
|
||||
Specify the API key to use retrieving data from themoviedb.org. This
|
||||
key must be set before any calls will function.
|
||||
"""
|
||||
if len(key) != 32:
|
||||
raise TMDBKeyInvalid("Specified API key must be 128-bit hex")
|
||||
try:
|
||||
int(key, 16)
|
||||
except:
|
||||
raise TMDBKeyInvalid("Specified API key must be 128-bit hex")
|
||||
Request._api_key = key
|
||||
|
||||
def set_cache(engine=None, *args, **kwargs):
|
||||
"""Specify caching engine and properties."""
|
||||
cache.configure(engine, *args, **kwargs)
|
||||
|
||||
class Request( urllib2.Request ):
|
||||
_api_key = None
|
||||
_base_url = "http://api.themoviedb.org/3/"
|
||||
|
||||
@property
|
||||
def api_key(self):
|
||||
if self._api_key is None:
|
||||
raise TMDBKeyMissing("API key must be specified before "+\
|
||||
"requests can be made")
|
||||
return self._api_key
|
||||
|
||||
def __init__(self, url, **kwargs):
|
||||
"""Return a request object, using specified API path and arguments."""
|
||||
kwargs['api_key'] = self.api_key
|
||||
self._url = url.lstrip('/')
|
||||
self._kwargs = dict([(kwa,kwv) for kwa,kwv in kwargs.items()
|
||||
if kwv is not None])
|
||||
|
||||
locale = get_locale()
|
||||
kwargs = {}
|
||||
for k,v in self._kwargs.items():
|
||||
kwargs[k] = locale.encode(v)
|
||||
url = '{0}{1}?{2}'.format(self._base_url, self._url, urlencode(kwargs))
|
||||
|
||||
urllib2.Request.__init__(self, url)
|
||||
self.add_header('Accept', 'application/json')
|
||||
self.lifetime = 3600 # 1hr
|
||||
|
||||
def new(self, **kwargs):
|
||||
"""Create a new instance of the request, with tweaked arguments."""
|
||||
args = dict(self._kwargs)
|
||||
for k,v in kwargs.items():
|
||||
if v is None:
|
||||
if k in args:
|
||||
del args[k]
|
||||
else:
|
||||
args[k] = v
|
||||
obj = self.__class__(self._url, **args)
|
||||
obj.lifetime = self.lifetime
|
||||
return obj
|
||||
|
||||
def add_data(self, data):
|
||||
"""Provide data to be sent with POST."""
|
||||
urllib2.Request.add_data(self, urlencode(data))
|
||||
|
||||
def open(self):
|
||||
"""Open a file object to the specified URL."""
|
||||
try:
|
||||
if DEBUG:
|
||||
print 'loading '+self.get_full_url()
|
||||
if self.has_data():
|
||||
print ' '+self.get_data()
|
||||
return urllib2.urlopen(self)
|
||||
except urllib2.HTTPError, e:
|
||||
raise TMDBHTTPError(e)
|
||||
|
||||
def read(self):
|
||||
"""Return result from specified URL as a string."""
|
||||
return self.open().read()
|
||||
|
||||
@cache.cached(urllib2.Request.get_full_url)
|
||||
def readJSON(self):
|
||||
"""Parse result from specified URL as JSON data."""
|
||||
url = self.get_full_url()
|
||||
try:
|
||||
# catch HTTP error from open()
|
||||
data = json.load(self.open())
|
||||
except TMDBHTTPError, e:
|
||||
try:
|
||||
# try to load whatever was returned
|
||||
data = json.loads(e.response)
|
||||
except:
|
||||
# cannot parse json, just raise existing error
|
||||
raise e
|
||||
else:
|
||||
# response parsed, try to raise error from TMDB
|
||||
handle_status(data, url)
|
||||
# no error from TMDB, just raise existing error
|
||||
raise e
|
||||
handle_status(data, url)
|
||||
#if DEBUG:
|
||||
# import pprint
|
||||
# pprint.PrettyPrinter().pprint(data)
|
||||
return data
|
||||
|
||||
status_handlers = {
|
||||
1: None,
|
||||
2: TMDBRequestInvalid('Invalid service - This service does not exist.'),
|
||||
3: TMDBRequestError('Authentication Failed - You do not have '+\
|
||||
'permissions to access this service.'),
|
||||
4: TMDBRequestInvalid("Invalid format - This service doesn't exist "+\
|
||||
'in that format.'),
|
||||
5: TMDBRequestInvalid('Invalid parameters - Your request parameters '+\
|
||||
'are incorrect.'),
|
||||
6: TMDBRequestInvalid('Invalid id - The pre-requisite id is invalid '+\
|
||||
'or not found.'),
|
||||
7: TMDBKeyInvalid('Invalid API key - You must be granted a valid key.'),
|
||||
8: TMDBRequestError('Duplicate entry - The data you tried to submit '+\
|
||||
'already exists.'),
|
||||
9: TMDBOffline('This service is tempirarily offline. Try again later.'),
|
||||
10: TMDBKeyRevoked('Suspended API key - Access to your account has been '+\
|
||||
'suspended, contact TMDB.'),
|
||||
11: TMDBError('Internal error - Something went wrong. Contact TMDb.'),
|
||||
12: None,
|
||||
13: None,
|
||||
14: TMDBRequestError('Authentication Failed.'),
|
||||
15: TMDBError('Failed'),
|
||||
16: TMDBError('Device Denied'),
|
||||
17: TMDBError('Session Denied')}
|
||||
|
||||
def handle_status(data, query):
|
||||
status = status_handlers[data.get('status_code', 1)]
|
||||
if status is not None:
|
||||
status.tmdberrno = data['status_code']
|
||||
status.query = query
|
||||
raise status
|
||||
689
libs/tmdb3/tmdb_api.py
Executable file
689
libs/tmdb3/tmdb_api.py
Executable file
@@ -0,0 +1,689 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: tmdb_api.py Simple-to-use Python interface to TMDB's API v3
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: This Python library is intended to provide a series of classes
|
||||
# and methods for search and retrieval of text metadata and image
|
||||
# URLs from TMDB.
|
||||
# Preliminary API specifications can be found at
|
||||
# http://help.themoviedb.org/kb/api/about-3
|
||||
# License: Creative Commons GNU GPL v2
|
||||
# (http://creativecommons.org/licenses/GPL/2.0/)
|
||||
#-----------------------
|
||||
|
||||
__title__ = "tmdb_api - Simple-to-use Python interface to TMDB's API v3 "+\
|
||||
"(www.themoviedb.org)"
|
||||
__author__ = "Raymond Wagner"
|
||||
__purpose__ = """
|
||||
This Python library is intended to provide a series of classes and methods
|
||||
for search and retrieval of text metadata and image URLs from TMDB.
|
||||
Preliminary API specifications can be found at
|
||||
http://help.themoviedb.org/kb/api/about-3"""
|
||||
|
||||
__version__="v0.6.17"
|
||||
# 0.1.0 Initial development
|
||||
# 0.2.0 Add caching mechanism for API queries
|
||||
# 0.2.1 Temporary work around for broken search paging
|
||||
# 0.3.0 Rework backend machinery for managing OO interface to results
|
||||
# 0.3.1 Add collection support
|
||||
# 0.3.2 Remove MythTV key from results.py
|
||||
# 0.3.3 Add functional language support
|
||||
# 0.3.4 Re-enable search paging
|
||||
# 0.3.5 Add methods for grabbing current, popular, and top rated movies
|
||||
# 0.3.6 Rework paging mechanism
|
||||
# 0.3.7 Generalize caching mechanism, and allow controllability
|
||||
# 0.4.0 Add full locale support (language and country) and optional fall through
|
||||
# 0.4.1 Add custom classmethod for dealing with IMDB movie IDs
|
||||
# 0.4.2 Improve cache file selection for Windows systems
|
||||
# 0.4.3 Add a few missed Person properties
|
||||
# 0.4.4 Add support for additional Studio information
|
||||
# 0.4.5 Add locale fallthrough for images and alternate titles
|
||||
# 0.4.6 Add slice support for search results
|
||||
# 0.5.0 Rework cache framework and improve file cache performance
|
||||
# 0.6.0 Add user authentication support
|
||||
# 0.6.1 Add adult filtering for people searches
|
||||
# 0.6.2 Add similar movie search for Movie objects
|
||||
# 0.6.3 Add Studio search
|
||||
# 0.6.4 Add Genre list and associated Movie search
|
||||
# 0.6.5 Prevent data from being blanked out by subsequent queries
|
||||
# 0.6.6 Turn date processing errors into mutable warnings
|
||||
# 0.6.7 Add support for searching by year
|
||||
# 0.6.8 Add support for collection images
|
||||
# 0.6.9 Correct Movie image language filtering
|
||||
# 0.6.10 Add upcoming movie classmethod
|
||||
# 0.6.11 Fix URL for top rated Movie query
|
||||
# 0.6.12 Add support for Movie watchlist query and editing
|
||||
# 0.6.13 Fix URL for rating Movies
|
||||
# 0.6.14 Add support for Lists
|
||||
# 0.6.15 Add ability to search Collections
|
||||
# 0.6.16 Make absent primary images return None (previously u'')
|
||||
# 0.6.17 Add userrating/votes to Image, add overview to Collection, remove
|
||||
# releasedate sorting from Collection Movies
|
||||
|
||||
from request import set_key, Request
|
||||
from util import Datapoint, Datalist, Datadict, Element, NameRepr, SearchRepr
|
||||
from pager import PagedRequest
|
||||
from locales import get_locale, set_locale
|
||||
from tmdb_auth import get_session, set_session
|
||||
from tmdb_exceptions import *
|
||||
|
||||
import datetime
|
||||
|
||||
DEBUG = False
|
||||
|
||||
def process_date(datestr):
|
||||
try:
|
||||
return datetime.date(*[int(x) for x in datestr.split('-')])
|
||||
except (TypeError, ValueError):
|
||||
import sys
|
||||
import warnings
|
||||
import traceback
|
||||
_,_,tb = sys.exc_info()
|
||||
f,l,_,_ = traceback.extract_tb(tb)[-1]
|
||||
warnings.warn_explicit(('"{0}" is not a supported date format. '
|
||||
'Please fix upstream data at http://www.themoviedb.org.')\
|
||||
.format(datestr), Warning, f, l)
|
||||
return None
|
||||
|
||||
class Configuration( Element ):
|
||||
images = Datapoint('images')
|
||||
def _populate(self):
|
||||
return Request('configuration')
|
||||
Configuration = Configuration()
|
||||
|
||||
class Account( NameRepr, Element ):
|
||||
def _populate(self):
|
||||
return Request('account', session_id=self._session.sessionid)
|
||||
|
||||
id = Datapoint('id')
|
||||
adult = Datapoint('include_adult')
|
||||
country = Datapoint('iso_3166_1')
|
||||
language = Datapoint('iso_639_1')
|
||||
name = Datapoint('name')
|
||||
username = Datapoint('username')
|
||||
|
||||
@property
|
||||
def locale(self):
|
||||
return get_locale(self.language, self.country)
|
||||
|
||||
def searchMovie(query, locale=None, adult=False, year=None):
|
||||
kwargs = {'query':query, 'include_adult':adult}
|
||||
if year is not None:
|
||||
try:
|
||||
kwargs['year'] = year.year
|
||||
except AttributeError:
|
||||
kwargs['year'] = year
|
||||
return MovieSearchResult(Request('search/movie', **kwargs), locale=locale)
|
||||
|
||||
def searchMovieWithYear(query, locale=None, adult=False):
|
||||
year = None
|
||||
if (len(query) > 6) and (query[-1] == ')') and (query[-6] == '('):
|
||||
# simple syntax check, no need for regular expression
|
||||
try:
|
||||
year = int(query[-5:-1])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if 1885 < year < 2050:
|
||||
# strip out year from search
|
||||
query = query[:-7]
|
||||
else:
|
||||
# sanity check on resolved year failed, pass through
|
||||
year = None
|
||||
return searchMovie(query, locale, adult, year)
|
||||
|
||||
class MovieSearchResult( SearchRepr, PagedRequest ):
|
||||
"""Stores a list of search matches."""
|
||||
_name = None
|
||||
def __init__(self, request, locale=None):
|
||||
if locale is None:
|
||||
locale = get_locale()
|
||||
super(MovieSearchResult, self).__init__(
|
||||
request.new(language=locale.language),
|
||||
lambda x: Movie(raw=x, locale=locale))
|
||||
|
||||
def searchPerson(query, adult=False):
|
||||
return PeopleSearchResult(Request('search/person', query=query,
|
||||
include_adult=adult))
|
||||
|
||||
class PeopleSearchResult( SearchRepr, PagedRequest ):
|
||||
"""Stores a list of search matches."""
|
||||
_name = None
|
||||
def __init__(self, request):
|
||||
super(PeopleSearchResult, self).__init__(request,
|
||||
lambda x: Person(raw=x))
|
||||
|
||||
def searchStudio(query):
|
||||
return StudioSearchResult(Request('search/company', query=query))
|
||||
|
||||
class StudioSearchResult( SearchRepr, PagedRequest ):
|
||||
"""Stores a list of search matches."""
|
||||
_name = None
|
||||
def __init__(self, request):
|
||||
super(StudioSearchResult, self).__init__(request,
|
||||
lambda x: Studio(raw=x))
|
||||
|
||||
def searchList(query, adult=False):
|
||||
ListSearchResult(Request('search/list', query=query, include_adult=adult))
|
||||
|
||||
class ListSearchResult( SearchRepr, PagedRequest ):
|
||||
"""Stores a list of search matches."""
|
||||
_name = None
|
||||
def __init__(self, request):
|
||||
super(ListSearchResult, self).__init__(request,
|
||||
lambda x: List(raw=x))
|
||||
|
||||
def searchCollection(query, locale=None):
|
||||
return CollectionSearchResult(Request('search/collection', query=query),
|
||||
locale=locale)
|
||||
|
||||
class CollectionSearchResult( SearchRepr, PagedRequest ):
|
||||
"""Stores a list of search matches."""
|
||||
_name=None
|
||||
def __init__(self, request, locale=None):
|
||||
if locale is None:
|
||||
locale = get_locale()
|
||||
super(CollectionSearchResult, self).__init__(
|
||||
request.new(language=locale.language),
|
||||
lambda x: Collection(raw=x, locale=locale))
|
||||
|
||||
class Image( Element ):
|
||||
filename = Datapoint('file_path', initarg=1,
|
||||
handler=lambda x: x.lstrip('/'))
|
||||
aspectratio = Datapoint('aspect_ratio')
|
||||
height = Datapoint('height')
|
||||
width = Datapoint('width')
|
||||
language = Datapoint('iso_639_1')
|
||||
userrating = Datapoint('vote_average')
|
||||
votes = Datapoint('vote_count')
|
||||
|
||||
def sizes(self):
|
||||
return ['original']
|
||||
|
||||
def geturl(self, size='original'):
|
||||
if size not in self.sizes():
|
||||
raise TMDBImageSizeError
|
||||
url = Configuration.images['base_url'].rstrip('/')
|
||||
return url+'/{0}/{1}'.format(size, self.filename)
|
||||
|
||||
# sort preferring locale's language, but keep remaining ordering consistent
|
||||
def __lt__(self, other):
|
||||
return (self.language == self._locale.language) \
|
||||
and (self.language != other.language)
|
||||
def __gt__(self, other):
|
||||
return (self.language != other.language) \
|
||||
and (other.language == self._locale.language)
|
||||
# direct match for comparison
|
||||
def __eq__(self, other):
|
||||
return self.filename == other.filename
|
||||
# special handling for boolean to see if exists
|
||||
def __nonzero__(self):
|
||||
if len(self.filename) == 0:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
# BASE62 encoded filename, no need to worry about unicode
|
||||
return u"<{0.__class__.__name__} '{0.filename}'>".format(self)
|
||||
|
||||
class Backdrop( Image ):
|
||||
def sizes(self):
|
||||
return Configuration.images['backdrop_sizes']
|
||||
class Poster( Image ):
|
||||
def sizes(self):
|
||||
return Configuration.images['poster_sizes']
|
||||
class Profile( Image ):
|
||||
def sizes(self):
|
||||
return Configuration.images['profile_sizes']
|
||||
class Logo( Image ):
|
||||
def sizes(self):
|
||||
return Configuration.images['logo_sizes']
|
||||
|
||||
class AlternateTitle( Element ):
|
||||
country = Datapoint('iso_3166_1')
|
||||
title = Datapoint('title')
|
||||
|
||||
# sort preferring locale's country, but keep remaining ordering consistent
|
||||
def __lt__(self, other):
|
||||
return (self.country == self._locale.country) \
|
||||
and (self.country != other.country)
|
||||
def __gt__(self, other):
|
||||
return (self.country != other.country) \
|
||||
and (other.country == self._locale.country)
|
||||
def __eq__(self, other):
|
||||
return self.country == other.country
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.title}' ({0.country})>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
class Person( Element ):
|
||||
id = Datapoint('id', initarg=1)
|
||||
name = Datapoint('name')
|
||||
biography = Datapoint('biography')
|
||||
dayofbirth = Datapoint('birthday', default=None, handler=process_date)
|
||||
dayofdeath = Datapoint('deathday', default=None, handler=process_date)
|
||||
homepage = Datapoint('homepage')
|
||||
birthplace = Datapoint('place_of_birth')
|
||||
profile = Datapoint('profile_path', handler=Profile, \
|
||||
raw=False, default=None)
|
||||
adult = Datapoint('adult')
|
||||
aliases = Datalist('also_known_as')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.name}'>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
def _populate(self):
|
||||
return Request('person/{0}'.format(self.id))
|
||||
def _populate_credits(self):
|
||||
return Request('person/{0}/credits'.format(self.id), \
|
||||
language=self._locale.language)
|
||||
def _populate_images(self):
|
||||
return Request('person/{0}/images'.format(self.id))
|
||||
|
||||
roles = Datalist('cast', handler=lambda x: ReverseCast(raw=x), \
|
||||
poller=_populate_credits)
|
||||
crew = Datalist('crew', handler=lambda x: ReverseCrew(raw=x), \
|
||||
poller=_populate_credits)
|
||||
profiles = Datalist('profiles', handler=Profile, poller=_populate_images)
|
||||
|
||||
class Cast( Person ):
|
||||
character = Datapoint('character')
|
||||
order = Datapoint('order')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.name}' as '{0.character}'>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
class Crew( Person ):
|
||||
job = Datapoint('job')
|
||||
department = Datapoint('department')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.name}','{0.job}'>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
class Keyword( Element ):
|
||||
id = Datapoint('id')
|
||||
name = Datapoint('name')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} {0.name}>".format(self).encode('utf-8')
|
||||
|
||||
class Release( Element ):
|
||||
certification = Datapoint('certification')
|
||||
country = Datapoint('iso_3166_1')
|
||||
releasedate = Datapoint('release_date', handler=process_date)
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} {0.country}, {0.releasedate}>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
class Trailer( Element ):
|
||||
name = Datapoint('name')
|
||||
size = Datapoint('size')
|
||||
source = Datapoint('source')
|
||||
|
||||
class YoutubeTrailer( Trailer ):
|
||||
def geturl(self):
|
||||
return "http://www.youtube.com/watch?v={0}".format(self.source)
|
||||
|
||||
def __repr__(self):
|
||||
# modified BASE64 encoding, no need to worry about unicode
|
||||
return u"<{0.__class__.__name__} '{0.name}'>".format(self)
|
||||
|
||||
class AppleTrailer( Element ):
|
||||
name = Datapoint('name')
|
||||
sources = Datadict('sources', handler=Trailer, attr='size')
|
||||
|
||||
def sizes(self):
|
||||
return self.sources.keys()
|
||||
|
||||
def geturl(self, size=None):
|
||||
if size is None:
|
||||
# sort assuming ###p format for now, take largest resolution
|
||||
size = str(sorted([int(size[:-1]) for size in self.sources])[-1])+'p'
|
||||
return self.sources[size].source
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.name}'>".format(self)
|
||||
|
||||
class Translation( Element ):
|
||||
name = Datapoint('name')
|
||||
language = Datapoint('iso_639_1')
|
||||
englishname = Datapoint('english_name')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.name}' ({0.language})>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
class Genre( NameRepr, Element ):
|
||||
id = Datapoint('id')
|
||||
name = Datapoint('name')
|
||||
|
||||
def _populate_movies(self):
|
||||
return Request('genre/{0}/movies'.format(self.id), \
|
||||
language=self._locale.language)
|
||||
|
||||
@property
|
||||
def movies(self):
|
||||
if 'movies' not in self._data:
|
||||
search = MovieSearchResult(self._populate_movies(), \
|
||||
locale=self._locale)
|
||||
search._name = "{0.name} Movies".format(self)
|
||||
self._data['movies'] = search
|
||||
return self._data['movies']
|
||||
|
||||
@classmethod
|
||||
def getAll(cls, locale=None):
|
||||
class GenreList( Element ):
|
||||
genres = Datalist('genres', handler=Genre)
|
||||
def _populate(self):
|
||||
return Request('genre/list', language=self._locale.language)
|
||||
return GenreList(locale=locale).genres
|
||||
|
||||
|
||||
class Studio( NameRepr, Element ):
|
||||
id = Datapoint('id', initarg=1)
|
||||
name = Datapoint('name')
|
||||
description = Datapoint('description')
|
||||
headquarters = Datapoint('headquarters')
|
||||
logo = Datapoint('logo_path', handler=Logo, \
|
||||
raw=False, default=None)
|
||||
# FIXME: manage not-yet-defined handlers in a way that will propogate
|
||||
# locale information properly
|
||||
parent = Datapoint('parent_company', \
|
||||
handler=lambda x: Studio(raw=x))
|
||||
|
||||
def _populate(self):
|
||||
return Request('company/{0}'.format(self.id))
|
||||
def _populate_movies(self):
|
||||
return Request('company/{0}/movies'.format(self.id), \
|
||||
language=self._locale.language)
|
||||
|
||||
# FIXME: add a cleaner way of adding types with no additional processing
|
||||
@property
|
||||
def movies(self):
|
||||
if 'movies' not in self._data:
|
||||
search = MovieSearchResult(self._populate_movies(), \
|
||||
locale=self._locale)
|
||||
search._name = "{0.name} Movies".format(self)
|
||||
self._data['movies'] = search
|
||||
return self._data['movies']
|
||||
|
||||
class Country( NameRepr, Element ):
|
||||
code = Datapoint('iso_3166_1')
|
||||
name = Datapoint('name')
|
||||
|
||||
class Language( NameRepr, Element ):
|
||||
code = Datapoint('iso_639_1')
|
||||
name = Datapoint('name')
|
||||
|
||||
class Movie( Element ):
|
||||
@classmethod
|
||||
def latest(cls):
|
||||
req = Request('latest/movie')
|
||||
req.lifetime = 600
|
||||
return cls(raw=req.readJSON())
|
||||
|
||||
@classmethod
|
||||
def nowplaying(cls, locale=None):
|
||||
res = MovieSearchResult(Request('movie/now-playing'), locale=locale)
|
||||
res._name = 'Now Playing'
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def mostpopular(cls, locale=None):
|
||||
res = MovieSearchResult(Request('movie/popular'), locale=locale)
|
||||
res._name = 'Popular'
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def toprated(cls, locale=None):
|
||||
res = MovieSearchResult(Request('movie/top_rated'), locale=locale)
|
||||
res._name = 'Top Rated'
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def upcoming(cls, locale=None):
|
||||
res = MovieSearchResult(Request('movie/upcoming'), locale=locale)
|
||||
res._name = 'Upcoming'
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def favorites(cls, session=None):
|
||||
if session is None:
|
||||
session = get_session()
|
||||
account = Account(session=session)
|
||||
res = MovieSearchResult(
|
||||
Request('account/{0}/favorite_movies'.format(account.id),
|
||||
session_id=session.sessionid))
|
||||
res._name = "Favorites"
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def ratedmovies(cls, session=None):
|
||||
if session is None:
|
||||
session = get_session()
|
||||
account = Account(session=session)
|
||||
res = MovieSearchResult(
|
||||
Request('account/{0}/rated_movies'.format(account.id),
|
||||
session_id=session.sessionid))
|
||||
res._name = "Movies You Rated"
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def watchlist(cls, session=None):
|
||||
if session is None:
|
||||
session = get_session()
|
||||
account = Account(session=session)
|
||||
res = MovieSearchResult(
|
||||
Request('account/{0}/movie_watchlist'.format(account.id),
|
||||
session_id=session.sessionid))
|
||||
res._name = "Movies You're Watching"
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def fromIMDB(cls, imdbid, locale=None):
|
||||
try:
|
||||
# assume string
|
||||
if not imdbid.startswith('tt'):
|
||||
imdbid = "tt{0:0>7}".format(imdbid)
|
||||
except AttributeError:
|
||||
# assume integer
|
||||
imdbid = "tt{0:0>7}".format(imdbid)
|
||||
if locale is None:
|
||||
locale = get_locale()
|
||||
movie = cls(imdbid, locale=locale)
|
||||
movie._populate()
|
||||
return movie
|
||||
|
||||
id = Datapoint('id', initarg=1)
|
||||
title = Datapoint('title')
|
||||
originaltitle = Datapoint('original_title')
|
||||
tagline = Datapoint('tagline')
|
||||
overview = Datapoint('overview')
|
||||
runtime = Datapoint('runtime')
|
||||
budget = Datapoint('budget')
|
||||
revenue = Datapoint('revenue')
|
||||
releasedate = Datapoint('release_date', handler=process_date)
|
||||
homepage = Datapoint('homepage')
|
||||
imdb = Datapoint('imdb_id')
|
||||
|
||||
backdrop = Datapoint('backdrop_path', handler=Backdrop, \
|
||||
raw=False, default=None)
|
||||
poster = Datapoint('poster_path', handler=Poster, \
|
||||
raw=False, default=None)
|
||||
|
||||
popularity = Datapoint('popularity')
|
||||
userrating = Datapoint('vote_average')
|
||||
votes = Datapoint('vote_count')
|
||||
|
||||
adult = Datapoint('adult')
|
||||
collection = Datapoint('belongs_to_collection', handler=lambda x: \
|
||||
Collection(raw=x))
|
||||
genres = Datalist('genres', handler=Genre)
|
||||
studios = Datalist('production_companies', handler=Studio)
|
||||
countries = Datalist('production_countries', handler=Country)
|
||||
languages = Datalist('spoken_languages', handler=Language)
|
||||
|
||||
def _populate(self):
|
||||
return Request('movie/{0}'.format(self.id), \
|
||||
language=self._locale.language)
|
||||
def _populate_titles(self):
|
||||
kwargs = {}
|
||||
if not self._locale.fallthrough:
|
||||
kwargs['country'] = self._locale.country
|
||||
return Request('movie/{0}/alternative_titles'.format(self.id), **kwargs)
|
||||
def _populate_cast(self):
|
||||
return Request('movie/{0}/casts'.format(self.id))
|
||||
def _populate_images(self):
|
||||
kwargs = {}
|
||||
if not self._locale.fallthrough:
|
||||
kwargs['language'] = self._locale.language
|
||||
return Request('movie/{0}/images'.format(self.id), **kwargs)
|
||||
def _populate_keywords(self):
|
||||
return Request('movie/{0}/keywords'.format(self.id))
|
||||
def _populate_releases(self):
|
||||
return Request('movie/{0}/releases'.format(self.id))
|
||||
def _populate_trailers(self):
|
||||
return Request('movie/{0}/trailers'.format(self.id), \
|
||||
language=self._locale.language)
|
||||
def _populate_translations(self):
|
||||
return Request('movie/{0}/translations'.format(self.id))
|
||||
|
||||
alternate_titles = Datalist('titles', handler=AlternateTitle, \
|
||||
poller=_populate_titles, sort=True)
|
||||
cast = Datalist('cast', handler=Cast, \
|
||||
poller=_populate_cast, sort='order')
|
||||
crew = Datalist('crew', handler=Crew, poller=_populate_cast)
|
||||
backdrops = Datalist('backdrops', handler=Backdrop, \
|
||||
poller=_populate_images, sort=True)
|
||||
posters = Datalist('posters', handler=Poster, \
|
||||
poller=_populate_images, sort=True)
|
||||
keywords = Datalist('keywords', handler=Keyword, \
|
||||
poller=_populate_keywords)
|
||||
releases = Datadict('countries', handler=Release, \
|
||||
poller=_populate_releases, attr='country')
|
||||
youtube_trailers = Datalist('youtube', handler=YoutubeTrailer, \
|
||||
poller=_populate_trailers)
|
||||
apple_trailers = Datalist('quicktime', handler=AppleTrailer, \
|
||||
poller=_populate_trailers)
|
||||
translations = Datalist('translations', handler=Translation, \
|
||||
poller=_populate_translations)
|
||||
|
||||
def setFavorite(self, value):
|
||||
req = Request('account/{0}/favorite'.format(\
|
||||
Account(session=self._session).id),
|
||||
session_id=self._session.sessionid)
|
||||
req.add_data({'movie_id':self.id, 'favorite':str(bool(value)).lower()})
|
||||
req.lifetime = 0
|
||||
req.readJSON()
|
||||
|
||||
def setRating(self, value):
|
||||
if not (0 <= value <= 10):
|
||||
raise TMDBError("Ratings must be between '0' and '10'.")
|
||||
req = Request('movie/{0}/rating'.format(self.id), \
|
||||
session_id=self._session.sessionid)
|
||||
req.lifetime = 0
|
||||
req.add_data({'value':value})
|
||||
req.readJSON()
|
||||
|
||||
def setWatchlist(self, value):
|
||||
req = Request('account/{0}/movie_watchlist'.format(\
|
||||
Account(session=self._session).id),
|
||||
session_id=self._session.sessionid)
|
||||
req.lifetime = 0
|
||||
req.add_data({'movie_id':self.id,
|
||||
'movie_watchlist':str(bool(value)).lower()})
|
||||
req.readJSON()
|
||||
|
||||
def getSimilar(self):
|
||||
return self.similar
|
||||
|
||||
@property
|
||||
def similar(self):
|
||||
res = MovieSearchResult(Request('movie/{0}/similar_movies'\
|
||||
.format(self.id)),
|
||||
locale=self._locale)
|
||||
res._name = 'Similar to {0}'.format(self._printable_name())
|
||||
return res
|
||||
|
||||
@property
|
||||
def lists(self):
|
||||
res = ListSearchResult(Request('movie/{0}/lists'.format(self.id)))
|
||||
res._name = "Lists containing {0}".format(self._printable_name())
|
||||
return res
|
||||
|
||||
def _printable_name(self):
|
||||
if self.title is not None:
|
||||
s = u"'{0}'".format(self.title)
|
||||
elif self.originaltitle is not None:
|
||||
s = u"'{0}'".format(self.originaltitle)
|
||||
else:
|
||||
s = u"'No Title'"
|
||||
if self.releasedate:
|
||||
s = u"{0} ({1})".format(s, self.releasedate.year)
|
||||
return s
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0} {1}>".format(self.__class__.__name__,\
|
||||
self._printable_name()).encode('utf-8')
|
||||
|
||||
class ReverseCast( Movie ):
|
||||
character = Datapoint('character')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.character}' on {1}>"\
|
||||
.format(self, self._printable_name()).encode('utf-8')
|
||||
|
||||
class ReverseCrew( Movie ):
|
||||
department = Datapoint('department')
|
||||
job = Datapoint('job')
|
||||
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.job}' for {1}>"\
|
||||
.format(self, self._printable_name()).encode('utf-8')
|
||||
|
||||
class Collection( NameRepr, Element ):
|
||||
id = Datapoint('id', initarg=1)
|
||||
name = Datapoint('name')
|
||||
backdrop = Datapoint('backdrop_path', handler=Backdrop, \
|
||||
raw=False, default=None)
|
||||
poster = Datapoint('poster_path', handler=Poster, \
|
||||
raw=False, default=None)
|
||||
members = Datalist('parts', handler=Movie)
|
||||
overview = Datapoint('overview')
|
||||
|
||||
def _populate(self):
|
||||
return Request('collection/{0}'.format(self.id), \
|
||||
language=self._locale.language)
|
||||
def _populate_images(self):
|
||||
kwargs = {}
|
||||
if not self._locale.fallthrough:
|
||||
kwargs['language'] = self._locale.language
|
||||
return Request('collection/{0}/images'.format(self.id), **kwargs)
|
||||
|
||||
backdrops = Datalist('backdrops', handler=Backdrop, \
|
||||
poller=_populate_images, sort=True)
|
||||
posters = Datalist('posters', handler=Poster, \
|
||||
poller=_populate_images, sort=True)
|
||||
|
||||
class List( NameRepr, Element ):
|
||||
id = Datapoint('id', initarg=1)
|
||||
name = Datapoint('name')
|
||||
author = Datapoint('created_by')
|
||||
description = Datapoint('description')
|
||||
favorites = Datapoint('favorite_count')
|
||||
language = Datapoint('iso_639_1')
|
||||
count = Datapoint('item_count')
|
||||
poster = Datapoint('poster_path', handler=Poster, \
|
||||
raw=False, default=None)
|
||||
|
||||
members = Datalist('items', handler=Movie)
|
||||
|
||||
def _populate(self):
|
||||
return Request('list/{0}'.format(self.id))
|
||||
|
||||
131
libs/tmdb3/tmdb_auth.py
Executable file
131
libs/tmdb3/tmdb_auth.py
Executable file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: tmdb_auth.py
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
# Purpose: Provide authentication and session services for
|
||||
# calls against the TMDB v3 API
|
||||
#-----------------------
|
||||
|
||||
from datetime import datetime as _pydatetime, \
|
||||
tzinfo as _pytzinfo
|
||||
import re
|
||||
class datetime( _pydatetime ):
|
||||
"""Customized datetime class with ISO format parsing."""
|
||||
_reiso = re.compile('(?P<year>[0-9]{4})'
|
||||
'-(?P<month>[0-9]{1,2})'
|
||||
'-(?P<day>[0-9]{1,2})'
|
||||
'.'
|
||||
'(?P<hour>[0-9]{2})'
|
||||
':(?P<min>[0-9]{2})'
|
||||
'(:(?P<sec>[0-9]{2}))?'
|
||||
'(?P<tz>Z|'
|
||||
'(?P<tzdirec>[-+])'
|
||||
'(?P<tzhour>[0-9]{1,2})'
|
||||
'(:)?'
|
||||
'(?P<tzmin>[0-9]{2})?'
|
||||
')?')
|
||||
|
||||
class _tzinfo( _pytzinfo):
|
||||
def __init__(self, direc='+', hr=0, min=0):
|
||||
if direc == '-':
|
||||
hr = -1*int(hr)
|
||||
self._offset = timedelta(hours=int(hr), minutes=int(min))
|
||||
def utcoffset(self, dt): return self._offset
|
||||
def tzname(self, dt): return ''
|
||||
def dst(self, dt): return timedelta(0)
|
||||
|
||||
@classmethod
|
||||
def fromIso(cls, isotime, sep='T'):
|
||||
match = cls._reiso.match(isotime)
|
||||
if match is None:
|
||||
raise TypeError("time data '%s' does not match ISO 8601 format" \
|
||||
% isotime)
|
||||
|
||||
dt = [int(a) for a in match.groups()[:5]]
|
||||
if match.group('sec') is not None:
|
||||
dt.append(int(match.group('sec')))
|
||||
else:
|
||||
dt.append(0)
|
||||
if match.group('tz'):
|
||||
if match.group('tz') == 'Z':
|
||||
tz = cls._tzinfo()
|
||||
elif match.group('tzmin'):
|
||||
tz = cls._tzinfo(*match.group('tzdirec','tzhour','tzmin'))
|
||||
else:
|
||||
tz = cls._tzinfo(*match.group('tzdirec','tzhour'))
|
||||
dt.append(0)
|
||||
dt.append(tz)
|
||||
return cls(*dt)
|
||||
|
||||
from request import Request
|
||||
from tmdb_exceptions import *
|
||||
|
||||
syssession = None
|
||||
|
||||
def set_session(sessionid):
|
||||
global syssession
|
||||
syssession = Session(sessionid)
|
||||
|
||||
def get_session(sessionid=None):
|
||||
global syssession
|
||||
if sessionid:
|
||||
return Session(sessionid)
|
||||
elif syssession is not None:
|
||||
return syssession
|
||||
else:
|
||||
return Session.new()
|
||||
|
||||
class Session( object ):
|
||||
|
||||
@classmethod
|
||||
def new(cls):
|
||||
return cls(None)
|
||||
|
||||
def __init__(self, sessionid):
|
||||
self.sessionid = sessionid
|
||||
|
||||
@property
|
||||
def sessionid(self):
|
||||
if self._sessionid is None:
|
||||
if self._authtoken is None:
|
||||
raise TMDBError("No Auth Token to produce Session for")
|
||||
# TODO: check authtokenexpiration against current time
|
||||
req = Request('authentication/session/new', \
|
||||
request_token=self._authtoken)
|
||||
req.lifetime = 0
|
||||
dat = req.readJSON()
|
||||
if not dat['success']:
|
||||
raise TMDBError("Session generation failed")
|
||||
self._sessionid = dat['session_id']
|
||||
return self._sessionid
|
||||
|
||||
@sessionid.setter
|
||||
def sessionid(self, value):
|
||||
self._sessionid = value
|
||||
self._authtoken = None
|
||||
self._authtokenexpiration = None
|
||||
if value is None:
|
||||
self.authenticated = False
|
||||
else:
|
||||
self.authenticated = True
|
||||
|
||||
@property
|
||||
def authtoken(self):
|
||||
if self.authenticated:
|
||||
raise TMDBError("Session is already authenticated")
|
||||
if self._authtoken is None:
|
||||
req = Request('authentication/token/new')
|
||||
req.lifetime = 0
|
||||
dat = req.readJSON()
|
||||
if not dat['success']:
|
||||
raise TMDBError("Auth Token request failed")
|
||||
self._authtoken = dat['request_token']
|
||||
self._authtokenexpiration = datetime.fromIso(dat['expires_at'])
|
||||
return self._authtoken
|
||||
|
||||
@property
|
||||
def callbackurl(self):
|
||||
return "http://www.themoviedb.org/authenticate/"+self._authtoken
|
||||
|
||||
89
libs/tmdb3/tmdb_exceptions.py
Executable file
89
libs/tmdb3/tmdb_exceptions.py
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: tmdb_exceptions.py Common exceptions used in tmdbv3 API library
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
#-----------------------
|
||||
|
||||
class TMDBError( Exception ):
|
||||
Error = 0
|
||||
KeyError = 10
|
||||
KeyMissing = 20
|
||||
KeyInvalid = 30
|
||||
KeyRevoked = 40
|
||||
RequestError = 50
|
||||
RequestInvalid = 51
|
||||
PagingIssue = 60
|
||||
CacheError = 70
|
||||
CacheReadError = 71
|
||||
CacheWriteError = 72
|
||||
CacheDirectoryError = 73
|
||||
ImageSizeError = 80
|
||||
HTTPError = 90
|
||||
Offline = 100
|
||||
LocaleError = 110
|
||||
|
||||
def __init__(self, msg=None, errno=0):
|
||||
self.errno = errno
|
||||
if errno == 0:
|
||||
self.errno = getattr(self, 'TMDB'+self.__class__.__name__, errno)
|
||||
self.args = (msg,)
|
||||
|
||||
class TMDBKeyError( TMDBError ):
|
||||
pass
|
||||
|
||||
class TMDBKeyMissing( TMDBKeyError ):
|
||||
pass
|
||||
|
||||
class TMDBKeyInvalid( TMDBKeyError ):
|
||||
pass
|
||||
|
||||
class TMDBKeyRevoked( TMDBKeyInvalid ):
|
||||
pass
|
||||
|
||||
class TMDBRequestError( TMDBError ):
|
||||
pass
|
||||
|
||||
class TMDBRequestInvalid( TMDBRequestError ):
|
||||
pass
|
||||
|
||||
class TMDBPagingIssue( TMDBRequestError ):
|
||||
pass
|
||||
|
||||
class TMDBCacheError( TMDBRequestError ):
|
||||
pass
|
||||
|
||||
class TMDBCacheReadError( TMDBCacheError ):
|
||||
def __init__(self, filename):
|
||||
super(TMDBCacheReadError, self).__init__(
|
||||
"User does not have permission to access cache file: {0}.".format(filename))
|
||||
self.filename = filename
|
||||
|
||||
class TMDBCacheWriteError( TMDBCacheError ):
|
||||
def __init__(self, filename):
|
||||
super(TMDBCacheWriteError, self).__init__(
|
||||
"User does not have permission to write cache file: {0}.".format(filename))
|
||||
self.filename = filename
|
||||
|
||||
class TMDBCacheDirectoryError( TMDBCacheError ):
|
||||
def __init__(self, filename):
|
||||
super(TMDBCacheDirectoryError, self).__init__(
|
||||
"Directory containing cache file does not exist: {0}.".format(filename))
|
||||
self.filename = filename
|
||||
|
||||
class TMDBImageSizeError( TMDBError ):
|
||||
pass
|
||||
|
||||
class TMDBHTTPError( TMDBError ):
|
||||
def __init__(self, err):
|
||||
self.httperrno = err.code
|
||||
self.response = err.fp.read()
|
||||
super(TMDBHTTPError, self).__init__(str(err))
|
||||
|
||||
class TMDBOffline( TMDBError ):
|
||||
pass
|
||||
|
||||
class TMDBLocaleError( TMDBError ):
|
||||
pass
|
||||
|
||||
366
libs/tmdb3/util.py
Executable file
366
libs/tmdb3/util.py
Executable file
@@ -0,0 +1,366 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#-----------------------
|
||||
# Name: util.py Assorted utilities used in tmdb_api
|
||||
# Python Library
|
||||
# Author: Raymond Wagner
|
||||
#-----------------------
|
||||
|
||||
from copy import copy
|
||||
from locales import get_locale
|
||||
from tmdb_auth import get_session
|
||||
|
||||
class NameRepr( object ):
|
||||
"""Mixin for __repr__ methods using 'name' attribute."""
|
||||
def __repr__(self):
|
||||
return u"<{0.__class__.__name__} '{0.name}'>"\
|
||||
.format(self).encode('utf-8')
|
||||
|
||||
class SearchRepr( object ):
|
||||
"""
|
||||
Mixin for __repr__ methods for classes with '_name' and
|
||||
'_request' attributes.
|
||||
"""
|
||||
def __repr__(self):
|
||||
name = self._name if self._name else self._request._kwargs['query']
|
||||
return u"<Search Results: {0}>".format(name).encode('utf-8')
|
||||
|
||||
class Poller( object ):
|
||||
"""
|
||||
Wrapper for an optional callable to populate an Element derived class
|
||||
with raw data, or data from a Request.
|
||||
"""
|
||||
def __init__(self, func, lookup, inst=None):
|
||||
self.func = func
|
||||
self.lookup = lookup
|
||||
self.inst = inst
|
||||
if func:
|
||||
# with function, this allows polling data from the API
|
||||
self.__doc__ = func.__doc__
|
||||
self.__name__ = func.__name__
|
||||
self.__module__ = func.__module__
|
||||
else:
|
||||
# without function, this is just a dummy poller used for applying
|
||||
# raw data to a new Element class with the lookup table
|
||||
self.__name__ = '_populate'
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
# normal decorator stuff
|
||||
# return self for a class
|
||||
# return instantiated copy of self for an object
|
||||
if inst is None:
|
||||
return self
|
||||
func = None
|
||||
if self.func:
|
||||
func = self.func.__get__(inst, owner)
|
||||
return self.__class__(func, self.lookup, inst)
|
||||
|
||||
def __call__(self):
|
||||
# retrieve data from callable function, and apply
|
||||
if not callable(self.func):
|
||||
raise RuntimeError('Poller object called without a source function')
|
||||
req = self.func()
|
||||
if (('language' in req._kwargs) or ('country' in req._kwargs)) \
|
||||
and self.inst._locale.fallthrough:
|
||||
# request specifies a locale filter, and fallthrough is enabled
|
||||
# run a first pass with specified filter
|
||||
if not self.apply(req.readJSON(), False):
|
||||
return
|
||||
# if first pass results in missed data, run a second pass to
|
||||
# fill in the gaps
|
||||
self.apply(req.new(language=None, country=None).readJSON())
|
||||
# re-apply the filtered first pass data over top the second
|
||||
# unfiltered set. this is to work around the issue that the
|
||||
# properties have no way of knowing when they should or
|
||||
# should not overwrite existing data. the cache engine will
|
||||
# take care of the duplicate query
|
||||
self.apply(req.readJSON())
|
||||
|
||||
def apply(self, data, set_nones=True):
|
||||
# apply data directly, bypassing callable function
|
||||
unfilled = False
|
||||
for k,v in self.lookup.items():
|
||||
if (k in data) and \
|
||||
((data[k] is not None) if callable(self.func) else True):
|
||||
# argument received data, populate it
|
||||
setattr(self.inst, v, data[k])
|
||||
elif v in self.inst._data:
|
||||
# argument did not receive data, but Element already contains
|
||||
# some value, so skip this
|
||||
continue
|
||||
elif set_nones:
|
||||
# argument did not receive data, so fill it with None
|
||||
# to indicate such and prevent a repeat scan
|
||||
setattr(self.inst, v, None)
|
||||
else:
|
||||
# argument does not need data, so ignore it allowing it to
|
||||
# trigger a later poll. this is intended for use when
|
||||
# initializing a class with raw data, or when performing a
|
||||
# first pass through when performing locale fall through
|
||||
unfilled = True
|
||||
return unfilled
|
||||
|
||||
class Data( object ):
|
||||
"""
|
||||
Basic response definition class
|
||||
This maps to a single key in a JSON dictionary received from the API
|
||||
"""
|
||||
def __init__(self, field, initarg=None, handler=None, poller=None,
|
||||
raw=True, default=u'', lang=False):
|
||||
"""
|
||||
This defines how the dictionary value is to be processed by the poller
|
||||
field -- defines the dictionary key that filters what data this uses
|
||||
initarg -- (optional) specifies that this field must be supplied
|
||||
when creating a new instance of the Element class this
|
||||
definition is mapped to. Takes an integer for the order
|
||||
it should be used in the input arguments
|
||||
handler -- (optional) callable used to process the received value
|
||||
before being stored in the Element object.
|
||||
poller -- (optional) callable to be used if data is requested and
|
||||
this value has not yet been defined. the callable should
|
||||
return a dictionary of data from a JSON query. many
|
||||
definitions may share a single poller, which will be
|
||||
and the data used to populate all referenced definitions
|
||||
based off their defined field
|
||||
raw -- (optional) if the specified handler is an Element class,
|
||||
the data will be passed into it using the 'raw' keyword
|
||||
attribute. setting this to false will force the data to
|
||||
instead be passed in as the first argument
|
||||
"""
|
||||
self.field = field
|
||||
self.initarg = initarg
|
||||
self.poller = poller
|
||||
self.raw = raw
|
||||
self.default = default
|
||||
self.sethandler(handler)
|
||||
|
||||
def __get__(self, inst, owner):
|
||||
if inst is None:
|
||||
return self
|
||||
if self.field not in inst._data:
|
||||
if self.poller is None:
|
||||
return None
|
||||
self.poller.__get__(inst, owner)()
|
||||
return inst._data[self.field]
|
||||
|
||||
def __set__(self, inst, value):
|
||||
if (value is not None) and (value != ''):
|
||||
value = self.handler(value)
|
||||
else:
|
||||
value = self.default
|
||||
if isinstance(value, Element):
|
||||
value._locale = inst._locale
|
||||
value._session = inst._session
|
||||
inst._data[self.field] = value
|
||||
|
||||
def sethandler(self, handler):
|
||||
# ensure handler is always callable, even for passthrough data
|
||||
if handler is None:
|
||||
self.handler = lambda x: x
|
||||
elif isinstance(handler, ElementType) and self.raw:
|
||||
self.handler = lambda x: handler(raw=x)
|
||||
else:
|
||||
self.handler = lambda x: handler(x)
|
||||
|
||||
class Datapoint( Data ):
|
||||
pass
|
||||
|
||||
class Datalist( Data ):
|
||||
"""
|
||||
Response definition class for list data
|
||||
This maps to a key in a JSON dictionary storing a list of data
|
||||
"""
|
||||
def __init__(self, field, handler=None, poller=None, sort=None, raw=True):
|
||||
"""
|
||||
This defines how the dictionary value is to be processed by the poller
|
||||
field -- defines the dictionary key that filters what data this uses
|
||||
handler -- (optional) callable used to process the received value
|
||||
before being stored in the Element object.
|
||||
poller -- (optional) callable to be used if data is requested and
|
||||
this value has not yet been defined. the callable should
|
||||
return a dictionary of data from a JSON query. many
|
||||
definitions may share a single poller, which will be
|
||||
and the data used to populate all referenced definitions
|
||||
based off their defined field
|
||||
sort -- (optional) name of attribute in resultant data to be used
|
||||
to sort the list after processing. this effectively
|
||||
a handler be defined to process the data into something
|
||||
that has attributes
|
||||
raw -- (optional) if the specified handler is an Element class,
|
||||
the data will be passed into it using the 'raw' keyword
|
||||
attribute. setting this to false will force the data to
|
||||
instead be passed in as the first argument
|
||||
"""
|
||||
super(Datalist, self).__init__(field, None, handler, poller, raw)
|
||||
self.sort = sort
|
||||
def __set__(self, inst, value):
|
||||
data = []
|
||||
if value:
|
||||
for val in value:
|
||||
val = self.handler(val)
|
||||
if isinstance(val, Element):
|
||||
val._locale = inst._locale
|
||||
val._session = inst._session
|
||||
data.append(val)
|
||||
if self.sort:
|
||||
if self.sort is True:
|
||||
data.sort()
|
||||
else:
|
||||
data.sort(key=lambda x: getattr(x, self.sort))
|
||||
inst._data[self.field] = data
|
||||
|
||||
class Datadict( Data ):
|
||||
"""
|
||||
Response definition class for dictionary data
|
||||
This maps to a key in a JSON dictionary storing a dictionary of data
|
||||
"""
|
||||
def __init__(self, field, handler=None, poller=None, raw=True,
|
||||
key=None, attr=None):
|
||||
"""
|
||||
This defines how the dictionary value is to be processed by the poller
|
||||
field -- defines the dictionary key that filters what data this uses
|
||||
handler -- (optional) callable used to process the received value
|
||||
before being stored in the Element object.
|
||||
poller -- (optional) callable to be used if data is requested and
|
||||
this value has not yet been defined. the callable should
|
||||
return a dictionary of data from a JSON query. many
|
||||
definitions may share a single poller, which will be
|
||||
and the data used to populate all referenced definitions
|
||||
based off their defined field
|
||||
key -- (optional) name of key in resultant data to be used as
|
||||
the key in the stored dictionary. if this is not the
|
||||
field name from the source data is used instead
|
||||
attr -- (optional) name of attribute in resultant data to be used
|
||||
as the key in the stored dictionary. if this is not
|
||||
the field name from the source data is used instead
|
||||
raw -- (optional) if the specified handler is an Element class,
|
||||
the data will be passed into it using the 'raw' keyword
|
||||
attribute. setting this to false will force the data to
|
||||
instead be passed in as the first argument
|
||||
"""
|
||||
if key and attr:
|
||||
raise TypeError("`key` and `attr` cannot both be defined")
|
||||
super(Datadict, self).__init__(field, None, handler, poller, raw)
|
||||
if key:
|
||||
self.getkey = lambda x: x[key]
|
||||
elif attr:
|
||||
self.getkey = lambda x: getattr(x, attr)
|
||||
else:
|
||||
raise TypeError("Datadict requires `key` or `attr` be defined "+\
|
||||
"for populating the dictionary")
|
||||
def __set__(self, inst, value):
|
||||
data = {}
|
||||
if value:
|
||||
for val in value:
|
||||
val = self.handler(val)
|
||||
if isinstance(val, Element):
|
||||
val._locale = inst._locale
|
||||
val._session = inst._session
|
||||
data[self.getkey(val)] = val
|
||||
inst._data[self.field] = data
|
||||
|
||||
class ElementType( type ):
|
||||
"""
|
||||
MetaClass used to pre-process Element-derived classes and set up the
|
||||
Data definitions
|
||||
"""
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
# any Data or Poller object defined in parent classes must be cloned
|
||||
# and processed in this class to function properly
|
||||
# scan through available bases for all such definitions and insert
|
||||
# a copy into this class's attributes
|
||||
# run in reverse order so higher priority values overwrite lower ones
|
||||
data = {}
|
||||
pollers = {'_populate':None}
|
||||
|
||||
for base in reversed(bases):
|
||||
if isinstance(base, mcs):
|
||||
for k, attr in base.__dict__.items():
|
||||
if isinstance(attr, Data):
|
||||
# extract copies of each defined Data element from
|
||||
# parent classes
|
||||
attr = copy(attr)
|
||||
attr.poller = attr.poller.func
|
||||
data[k] = attr
|
||||
elif isinstance(attr, Poller):
|
||||
# extract copies of each defined Poller function
|
||||
# from parent classes
|
||||
pollers[k] = attr.func
|
||||
for k,attr in attrs.items():
|
||||
if isinstance(attr, Data):
|
||||
data[k] = attr
|
||||
if '_populate' in attrs:
|
||||
pollers['_populate'] = attrs['_populate']
|
||||
|
||||
# process all defined Data attribues, testing for use as an initial
|
||||
# argument, and building a list of what Pollers are used to populate
|
||||
# which Data points
|
||||
pollermap = dict([(k,[]) for k in pollers])
|
||||
initargs = []
|
||||
for k,v in data.items():
|
||||
v.name = k
|
||||
if v.initarg:
|
||||
initargs.append(v)
|
||||
if v.poller:
|
||||
pn = v.poller.__name__
|
||||
if pn not in pollermap:
|
||||
pollermap[pn] = []
|
||||
if pn not in pollers:
|
||||
pollers[pn] = v.poller
|
||||
pollermap[pn].append(v)
|
||||
else:
|
||||
pollermap['_populate'].append(v)
|
||||
|
||||
# wrap each used poller function with a Poller class, and push into
|
||||
# the new class attributes
|
||||
for k,v in pollermap.items():
|
||||
if len(v) == 0:
|
||||
continue
|
||||
lookup = dict([(attr.field, attr.name) for attr in v])
|
||||
poller = Poller(pollers[k], lookup)
|
||||
attrs[k] = poller
|
||||
# backfill wrapped Poller into each mapped Data object, and ensure
|
||||
# the data elements are defined for this new class
|
||||
for attr in v:
|
||||
attr.poller = poller
|
||||
attrs[attr.name] = attr
|
||||
|
||||
# build sorted list of arguments used for intialization
|
||||
attrs['_InitArgs'] = tuple([a.name for a in \
|
||||
sorted(initargs, key=lambda x: x.initarg)])
|
||||
return type.__new__(mcs, name, bases, attrs)
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
obj = cls.__new__(cls)
|
||||
if ('locale' in kwargs) and (kwargs['locale'] is not None):
|
||||
obj._locale = kwargs['locale']
|
||||
else:
|
||||
obj._locale = get_locale()
|
||||
|
||||
if 'session' in kwargs:
|
||||
obj._session = kwargs['session']
|
||||
else:
|
||||
obj._session = get_session()
|
||||
|
||||
obj._data = {}
|
||||
if 'raw' in kwargs:
|
||||
# if 'raw' keyword is supplied, create populate object manually
|
||||
if len(args) != 0:
|
||||
raise TypeError('__init__() takes exactly 2 arguments (1 given)')
|
||||
obj._populate.apply(kwargs['raw'], False)
|
||||
else:
|
||||
# if not, the number of input arguments must exactly match that
|
||||
# defined by the Data definitions
|
||||
if len(args) != len(cls._InitArgs):
|
||||
raise TypeError('__init__() takes exactly {0} arguments ({1} given)'\
|
||||
.format(len(cls._InitArgs)+1, len(args)+1))
|
||||
for a,v in zip(cls._InitArgs, args):
|
||||
setattr(obj, a, v)
|
||||
|
||||
obj.__init__()
|
||||
return obj
|
||||
|
||||
class Element( object ):
|
||||
__metaclass__ = ElementType
|
||||
_lang = 'en'
|
||||
|
||||
Reference in New Issue
Block a user