torrents are found and appended to the results argument

This commit is contained in:
salfab
2013-09-29 12:44:53 +02:00
committed by Ruud
parent f121db059e
commit 87754047fa
2 changed files with 58 additions and 60 deletions

View File

@@ -18,11 +18,19 @@ config = [{
'type': 'enabler',
'default': True
},
{
'name': 'domain',
'advanced': True,
'label': 'Proxy server',
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
{
'name': 'username',
'label': 'Username',
'type': 'string',
'default': '',
'description': 'The user name for your ILT account',
},
{
'name': 'password',
'label': 'Password',
'type': 'password',
'default': '',
'description': 'The password for your ILT account.',
},
{
'name': 'seed_ratio',

View File

@@ -14,8 +14,13 @@ log = CPLog(__name__)
class ILoveTorrents(TorrentMagnetProvider):
urls = {
'detail': '%s/torrent/%s',
'search': '%s/browse.php?search=%s&page=%s&cat=%s'
'domain': 'www.ilovetorrents.me',
'download': 'http://www.ilovetorrents.me/%s',
'detail': '%s/torrent/%s',
'search': '%s/browse.php?search=%s&page=%s&cat=%s',
'test' : 'http://www.ilovetorrents.me/',
'login' : 'http://www.ilovetorrents.me/takelogin.php',
'login_check' : 'http://www.ilovetorrents.me'
}
cat_ids = [
@@ -28,12 +33,8 @@ class ILoveTorrents(TorrentMagnetProvider):
disable_provider = False
http_time_between_calls = 0
proxy_list = [
'http://www.ilovetorrents.me',
]
def __init__(self):
self.domain = self.conf('domain')
self.domain = self.urls['domain']
super(ILoveTorrents, self).__init__()
def _searchOnTitle(self, title, movie, quality, results):
@@ -43,31 +44,30 @@ class ILoveTorrents(TorrentMagnetProvider):
cats = self.getCatId(quality['identifier'])
while page < total_pages:
search_url = self.urls['search'] % (self.getDomain(), tryUrlencode('"%s" %s' % (title, movie['library']['year'])), page, cats[0]))
movieTitle = tryUrlencode('"%s" %s' % (title, movie['library']['year']))
search_url = self.urls['search'] % (self.getDomain(), movieTitle, page, cats[0])
page += 1
data = self.getHTMLData(search_url)
data = self.getHTMLData(search_url, opener = self.login_opener)
if data:
try:
soup = BeautifulSoup(data, "html5lib")
results_table = soup.find('table', attrs = {'class': 'koptekst'})
if not results_table:
return
try:
pagelinks = soup.findAll(href=re.compile("page"))
pagelinks = soup.findAll(href=re.compile("page"))
pageNumbers = [int(re.search('page=(?P<pageNumber>.+'')', i["href"]).group('pageNumber')) for i in pagelinks]
total_pages = max(pageNumbers)
except:
pass
entries = results_table.find_all('tr')
for result in entries[1:]:
link = result.find(href = re.compile('details.php'))['href']
download = result.find('a', href = re.compile('download.php'))['href']
@@ -80,68 +80,58 @@ class ILoveTorrents(TorrentMagnetProvider):
continue
if link and download:
def extra_score(item):
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
return confirmed + trusted + vip + moderated
return confirmed + trusted + vip + moderated
id = re.search('id=(?P<id>\d+)&', link).group('id')
url = self.urls['download'] % (download)
detail_url = self.getDomain("/"+link)
fileSize = self.parseSize(size)
results.append({
'id': re.search('/(?P<id>\d+)/', link['href']).group('id'),
'name': link.string,
'url': download['href'],
'detail_url': self.getDomain(link['href']),
'size': self.parseSize(size),
'id': id,
'name': link,
'url': url,
'detail_url': detail_url,
'size': fileSize,
'seeders': tryInt(result.find_all('td')[2].string),
'leechers': tryInt(result.find_all('td')[3].string),
'extra_score': extra_score,
'get_more_info': self.getMoreInfo
'extra_score': extra_score,
'get_more_info': self.getMoreInfo
})
log.info(results)
except:
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def getLoginParams(self):
return tryUrlencode({
'username': self.conf('username'),
'password': self.conf('password'),
'submit': 'Welcome to ILT',
})
def isEnabled(self):
return super(ILoveTorrents, self).isEnabled() and self.getDomain()
def getDomain(self, url = ''):
if not self.domain:
for proxy in self.proxy_list:
prop_name = 'tpb_proxy.%s' % proxy
last_check = float(Env.prop(prop_name, default = 0))
if last_check > time.time() - 1209600:
continue
data = ''
try:
data = self.urlopen(proxy, timeout = 3, show_error = False)
except:
log.debug('Failed tpb proxy %s', proxy)
if 'title="Pirate Search"' in data:
log.debug('Using proxy: %s', proxy)
self.domain = proxy
break
Env.prop(prop_name, time.time())
if not self.domain:
log.error('No ILT proxies left, please add one in settings, or let us know which one to add on the forum.')
return None
def getDomain(self, url = ''):
return cleanHost(self.domain).rstrip('/') + url
def getMoreInfo(self, item):
full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
log.info('Getting more info')
full_description = self.getCache('ilt.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
html = BeautifulSoup(full_description)
nfo_pre = html.find('div', attrs = {'class':'nfo'})
description = toUnicode(nfo_pre.text) if nfo_pre else ''
item['description'] = description
return item
def loginSuccess(self, output):
return 'logout.php' in output.lower()
loginCheckSuccess = loginSuccess