Commit 49ea8f5a authored by echel0n's avatar echel0n

Merge branch 'release/4.0.7'

parents 82f58c70 fb3d485c
......@@ -39,10 +39,10 @@
<div class="h2footer pull-right"><b>Limit:</b>
<select name="limit" id="limit" class="form-control form-control-inline input-sm">
<option value="0" #if $limit == "0" then "selected=\"selected\"" else ""#>All</option>
<option value="100" #if $limit == "100" then "selected=\"selected\"" else ""#>100</option>
<option value="250" #if $limit == "250" then "selected=\"selected\"" else ""#>250</option>
<option value="500" #if $limit == "500" then "selected=\"selected\"" else ""#>500</option>
<option value="0" #if $limit == "0" then "selected=\"selected\"" else ""#>All</option>
</select>
</div>
......@@ -88,4 +88,4 @@
</tbody>
</table>
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_bottom.tmpl")
\ No newline at end of file
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_bottom.tmpl")
......@@ -98,7 +98,7 @@ class Cache:
del self.storage[url]
return
def fetch(self, url, force_update=False, offline=False, request_headers=None):
def fetch(self, url, force_update=False, offline=False, request_headers=None, referrer=None):
"""Return the feed at url.
url - The URL of the feed.
......@@ -112,6 +112,10 @@ class Cache:
cache and never access the remote
URL.
request_headers=None - Add addition request headers to request
referrer=None - Added a referrer to request
If there is data for that feed in the cache already, check
the expiration date before accessing the server. If the
cached data has not expired, return it without accessing the
......@@ -175,6 +179,7 @@ class Cache:
agent=self.user_agent,
modified=modified,
etag=etag,
referrer=referrer,
request_headers=request_headers)
status = parsed_result.get('status', None)
......
......@@ -30,47 +30,56 @@ __all__ = ['Guess', 'Language',
# it will then always be available
# with code from http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/
import sys
if sys.version_info[0] >= 3:
PY3 = True
unicode_text_type = str
native_text_type = str
base_text_type = str
def u(x):
return str(x)
def s(x):
return x
class UnicodeMixin(object):
__str__ = lambda x: x.__unicode__()
import binascii
def to_hex(x):
return binascii.hexlify(x).decode('utf-8')
else:
PY3 = False
__all__ = [ str(s) for s in __all__ ] # fix imports for python2
__all__ = [str(s) for s in __all__] # fix imports for python2
unicode_text_type = unicode
native_text_type = str
base_text_type = basestring
def u(x):
if isinstance(x, str):
return x.decode('utf-8')
return unicode(x)
def s(x):
if isinstance(x, unicode):
return x.encode('utf-8')
if isinstance(x, list):
return [ s(y) for y in x ]
return [s(y) for y in x]
if isinstance(x, tuple):
return tuple(s(y) for y in x)
if isinstance(x, dict):
return dict((s(key), s(value)) for key, value in x.items())
return x
class UnicodeMixin(object):
__str__ = lambda x: unicode(x).encode('utf-8')
def to_hex(x):
return x.encode('hex')
from guessit.guess import Guess, merge_all
from guessit.language import Language
from guessit.matcher import IterativeMatcher
......@@ -80,7 +89,6 @@ import logging
log = logging.getLogger(__name__)
class NullHandler(logging.Handler):
def emit(self, record):
pass
......@@ -112,7 +120,6 @@ def _guess_filename(filename, filetype):
mtree = IterativeMatcher(filename, filetype=filetype,
opts=['skip_first_year'])
m = mtree.matched()
if 'language' not in m and 'subtitleLanguage' not in m:
......@@ -123,7 +130,6 @@ def _guess_filename(filename, filetype):
opts=['nolanguage', 'nocountry'])
m2 = mtree2.matched()
if m.get('title') is None:
return m
......@@ -156,9 +162,9 @@ def _guess_filename(filename, filetype):
# if filetype is subtitle and the language appears last, just before
# the extension, then it is likely a subtitle language
parts = clean_string(title.root.value).split()
if (m['type'] in ['moviesubtitle', 'episodesubtitle'] and
parts.index(lang.value) == len(parts) - 2):
return m
if (m['type'] in ['moviesubtitle', 'episodesubtitle']):
if lang.value in parts and (parts.index(lang.value) == len(parts) - 2):
return m
# if the language was in the middle of the other potential title,
# keep the other title (eg: The Italian Job), except if it is at the
......@@ -177,7 +183,6 @@ def _guess_filename(filename, filetype):
return warning('Not sure of the title because of the language position')
return m
......@@ -206,6 +211,7 @@ def guess_file_info(filename, filetype, info=None):
elif infotype == 'hash_mpc':
from guessit.hash_mpc import hash_file
try:
result.append(Guess({'hash_mpc': hash_file(filename)},
confidence=1.0))
......@@ -214,6 +220,7 @@ def guess_file_info(filename, filetype, info=None):
elif infotype == 'hash_ed2k':
from guessit.hash_ed2k import hash_file
try:
result.append(Guess({'hash_ed2k': hash_file(filename)},
confidence=1.0))
......@@ -222,6 +229,7 @@ def guess_file_info(filename, filetype, info=None):
elif infotype.startswith('hash_'):
import hashlib
hashname = infotype[5:]
try:
hasher = getattr(hashlib, hashname)()
......@@ -259,7 +267,6 @@ def guess_file_info(filename, filetype, info=None):
if 'series' in result and 'country' in result:
result['series'] += ' (%s)' % result['country'].alpha2.upper()
return result
......
......@@ -13,17 +13,15 @@ class TraktAPI():
self.timeout = timeout
def validateAccount(self):
return self.traktRequest("account/test/%APIKEY%")
return self.traktRequest("account/test/%APIKEY%", method='POST')
def traktRequest(self, url, data=None):
def traktRequest(self, url, data=None, method='GET'):
base_url = self.protocol + 'api.trakt.tv/%s' % url.replace('%APIKEY%', self.apikey).replace('%USER%',
self.username)
# request the URL from trakt and parse the result as json
try:
resp = requests.get(base_url,
auth=HTTPBasicAuth(self.username, self.password),
data=data if data else [])
resp = requests.request(method, base_url, auth=HTTPBasicAuth(self.username, self.password), data=data if data else [])
# check for http errors and raise if any are present
resp.raise_for_status()
......
......@@ -818,42 +818,8 @@ def md5_for_file(filename, block_size=2 ** 16):
def get_lan_ip():
"""
Simple function to get LAN localhost_ip
http://stackoverflow.com/questions/11735821/python-get-localhost-ip
"""
if os.name != "nt":
import fcntl
import struct
def get_interface_ip(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s',
ifname[:15]))[20:24])
ip = socket.gethostbyname(socket.gethostname())
if ip.startswith("127.") and os.name != "nt":
interfaces = [
"eth0",
"eth1",
"eth2",
"wlan0",
"wlan1",
"wifi0",
"ath0",
"ath1",
"ppp0",
]
for ifname in interfaces:
try:
ip = get_interface_ip(ifname)
print ifname, ip
break
except IOError:
pass
return ip
try:return [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][0]
except:return socket.gethostname()
def check_url(url):
"""
......@@ -1154,7 +1120,7 @@ def _getTempDir():
return os.path.join(tempfile.gettempdir(), "sickrage-%s" % (uid))
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
def getURL(url, post_data=None, params=None, headers={}, timeout=30, session=None, json=False):
"""
Returns a byte-string retrieved from the url provider.
"""
......@@ -1164,10 +1130,8 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
# request session headers
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
if headers:
req_headers.update(headers)
session.headers.update(req_headers)
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
session.headers.update(headers)
# request session ssl verify
session.verify = False
......@@ -1176,11 +1140,6 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
session.params = params
try:
# Remove double-slashes from url
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
url = urlparse.urlunparse(parsed)
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
......
......@@ -161,13 +161,14 @@ class Logger(object):
regex = "^(%s)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$" % curError.time
maxlines = 50
pastebin_url = None
for i, x in enumerate(reversed(log_data)):
x = ek.ss(x)
match = re.match(regex, x)
if match:
level = match.group(2)
if reverseNames[level] >= ERROR:
if reverseNames[level] == ERROR:
paste_data = "".join(log_data[len(log_data) - i - 50:])
pastebin_url = PastebinAPI().paste('f59b8e9fa1fc2d033e399e6c7fb09d19', paste_data)
break
......@@ -192,10 +193,8 @@ class Logger(object):
if not sickbeard.GIT_AUTOISSUES:
ui.notifications.message('Your issue ticket #%s was submitted successfully!' % issue.number)
classes.ErrorViewer.clear()
except Exception as e:
pass
finally:
classes.ErrorViewer.clear()
class Wrapper(object):
......
......@@ -65,11 +65,11 @@ class TraktNotifier:
data[trakt_id] = ep_obj.show.indexerid
# update library
trakt_api.traktRequest("show/episode/library/%APIKEY%", data)
trakt_api.traktRequest("show/episode/library/%APIKEY%", data, method='POST')
# remove from watchlist
if sickbeard.TRAKT_REMOVE_WATCHLIST:
trakt_api.traktRequest("show/episode/unwatchlist/%APIKEY%", data)
trakt_api.traktRequest("show/episode/unwatchlist/%APIKEY%", data, method='POST')
if sickbeard.TRAKT_REMOVE_SERIESLIST:
data = {
......@@ -84,7 +84,7 @@ class TraktNotifier:
if trakt_id == 'tvdb_id':
data['shows'][0][trakt_id] = ep_obj.show.indexerid
trakt_api.traktRequest("show/unwatchlist/%APIKEY%", data)
trakt_api.traktRequest("show/unwatchlist/%APIKEY%", data, method='POST')
# Remove all episodes from episode watchlist
# Start by getting all episodes in the watchlist
......@@ -105,7 +105,7 @@ class TraktNotifier:
ep = {'season': episodes['season'], 'episode': episodes['number']}
data_show['episodes'].append(ep)
trakt_api.traktRequest("show/episode/unwatchlist/%APIKEY%", data_show)
trakt_api.traktRequest("show/episode/unwatchlist/%APIKEY%", data_show, method='POST')
except (traktException, traktAuthException, traktServerBusy) as e:
logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
......
......@@ -43,6 +43,7 @@ __all__ = ['ezrss',
import sickbeard
import generic
from sickbeard import logger
from os import sys
from random import shuffle
......
......@@ -27,15 +27,15 @@ from sickbeard import classes, show_name_helpers, helpers
from sickbeard import exceptions, logger
from sickbeard.common import *
from sickbeard import tvcache
from lib.dateutil.parser import parse as parseDate
class Animezb(generic.NZBProvider):
def __init__(self):
generic.NZBProvider.__init__(self, "Animezb")
self.urls = {'base_url': 'https://animezb.com/'}
self.url = self.urls['base_url']
self.supportsBacklog = False
self.supportsAbsoluteNumbering = True
self.anime_only = True
......@@ -44,8 +44,6 @@ class Animezb(generic.NZBProvider):
self.cache = AnimezbCache(self)
self.url = 'https://animezb.com/'
def isEnabled(self):
return self.enabled
......
......@@ -38,19 +38,19 @@ from unidecode import unidecode
class BitSoupProvider(generic.TorrentProvider):
urls = {'base_url': 'https://www.bitsoup.me',
'login': 'https://www.bitsoup.me/takelogin.php',
'detail': 'https://www.bitsoup.me/details.php?id=%s',
'search': 'https://www.bitsoup.me/browse.php?search=%s%s',
'download': 'https://bitsoup.me/%s',
}
def __init__(self):
generic.TorrentProvider.__init__(self, "BitSoup")
self.supportsBacklog = True
self.urls = {'base_url': 'https://www.bitsoup.me',
'login': 'https://www.bitsoup.me/takelogin.php',
'detail': 'https://www.bitsoup.me/details.php?id=%s',
'search': 'https://www.bitsoup.me/browse.php?search=%s%s',
'download': 'https://bitsoup.me/%s',
}
self.url = self.urls['base_url']
self.supportsBacklog = True
self.enabled = False
self.username = None
self.password = None
......@@ -60,8 +60,6 @@ class BitSoupProvider(generic.TorrentProvider):
self.cache = BitSoupCache(self)
self.url = self.urls['base_url']
self.categories = "&c42=1&c45=1&c49=1&c7=1"
def isEnabled(self):
......
......@@ -47,7 +47,10 @@ class BTNProvider(generic.TorrentProvider):
self.cache = BTNCache(self)
self.url = "http://api.btnapps.net"
self.urls = {'base_url': "http://api.btnapps.net"}
self.url = self.urls['base_url']
def isEnabled(self):
return self.enabled
......
......@@ -36,17 +36,18 @@ from sickbeard import helpers
class EZRSSProvider(generic.TorrentProvider):
def __init__(self):
self.urls = {'base_url': 'https://www.ezrss.it/'}
self.url = self.urls['base_url']
generic.TorrentProvider.__init__(self, "EZRSS")
self.supportsBacklog = True
self.enabled = False
self.ratio = None
self.cache = EZRSSCache(self)
self.url = 'https://www.ezrss.it/'
def isEnabled(self):
return self.enabled
......
......@@ -44,7 +44,9 @@ class Fanzub(generic.NZBProvider):
self.cache = FanzubCache(self)
self.url = 'https://fanzub.com/'
self.urls = {'base_url': 'https://fanzub.com/'}
self.url = self.urls['base_url']
def isEnabled(self):
return self.enabled
......
......@@ -39,12 +39,6 @@ from sickbeard.helpers import sanitizeSceneName
class FreshOnTVProvider(generic.TorrentProvider):
urls = {'base_url': 'http://freshon.tv/',
'login': 'http://freshon.tv/login.php?action=makelogin',
'detail': 'http://freshon.tv/details.php?id=%s',
'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
'download': 'http://freshon.tv/download.php?id=%s&type=torrent',
}
def __init__(self):
......@@ -64,7 +58,15 @@ class FreshOnTVProvider(generic.TorrentProvider):
self.cache = FreshOnTVCache(self)
self.urls = {'base_url': 'http://freshon.tv/',
'login': 'http://freshon.tv/login.php?action=makelogin',
'detail': 'http://freshon.tv/details.php?id=%s',
'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
'download': 'http://freshon.tv/download.php?id=%s&type=torrent',
}
self.url = self.urls['base_url']
self.cookies = None
def isEnabled(self):
......@@ -90,7 +92,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
return True
if self._uid and self._hash:
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
else:
login_params = {'username': self.username,
'password': self.password,
......@@ -107,18 +109,18 @@ class FreshOnTVProvider(generic.TorrentProvider):
return False
if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text):
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
return False
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
return False
try:
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
self.cookies = {'uid': self._uid,
'pass': self._hash
}
return True
self.cookies = {'uid': self._uid,
'pass': self._hash
}
return True
except:
pass
......
......@@ -23,6 +23,7 @@ import datetime
import os
import re
import itertools
import urllib
import sickbeard
import requests
......@@ -33,7 +34,6 @@ from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality
from sickbeard import clients
from hachoir_parser import createParser
from base64 import b16encode, b32decode
......@@ -46,6 +46,9 @@ class GenericProvider:
# these need to be set in the subclass
self.providerType = None
self.name = name
self.proxy = ProviderProxy()
self.urls = {}
self.url = ''
self.show = None
......@@ -63,11 +66,7 @@ class GenericProvider:
self.session = requests.session()
self.headers = {
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
#otherwise session might be broken and download fail, asking again for authentication
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT}
self.headers = {'User-Agent': USER_AGENT}
def getID(self):
return GenericProvider.makeID(self.name)
......@@ -125,7 +124,8 @@ class GenericProvider:
if not self._doLogin():
return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
self.headers.update({'Referer': self.proxy.getProxyURL()})
return helpers.getURL(self.proxy._buildURL(url), post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json)
def downloadResult(self, result):
......@@ -188,15 +188,18 @@ class GenericProvider:
# primitive verification of torrents, just make sure we didn't get a text file or something
if self.providerType == GenericProvider.TORRENT:
parser = createParser(file_name)
if parser:
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except:
pass
if mime_type == 'application/x-bittorrent':
return True
try:
parser = createParser(file_name)
if parser:
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log(u"Failed to validate torrent file: " + ex(e), logger.DEBUG)
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
......@@ -471,3 +474,44 @@ class TorrentProvider(GenericProvider):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.TORRENT
class ProviderProxy:
def __init__(self):
self.Type = 'GlypeProxy'
self.param = 'browse.php?u='
self.option = '&b=32&f=norefer'
self.enabled = False
self.url = None
self.urls = {
'getprivate.eu (NL)': 'http://getprivate.eu/',
'hideme.nl (NL)': 'http://hideme.nl/',
'proxite.eu (DE)': 'http://proxite.eu/',
'interproxy.net (EU)': 'http://interproxy.net/',
}
def isEnabled(self):
""" Return True if we Choose to call TPB via Proxy """
return self.enabled
def getProxyURL(self):
""" Return the Proxy URL Choosen via Provider Setting """
return str(self.url)
def _buildURL(self, url):
""" Return the Proxyfied URL of the page """
if self.isEnabled():
url = self.getProxyURL() + self.param + urllib.quote_plus(url) + self.option
logger.log(u"Proxified URL: " + url, logger.DEBUG)
return url
def _buildRE(self, regx):
""" Return the Proxyfied RE string """
if self.isEnabled():
regx = re.sub('//1', self.option, regx).replace('&', '&amp;')
logger.log(u"Proxified REGEX: " + regx, logger.DEBUG)
else:
regx = re.sub('//1', '', regx)
return regx
\ No newline at end of file
# This file is part of SickRage.
#
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import time
import datetime
import urllib
import urlparse
import sys
import generic
import sickbeard
import generic
from lib import requests
from lib.requests import exceptions
from sickbeard import classes
from sickbeard import logger, tvcache, exceptions
from sickbeard import helpers
from sickbeard import clients
from sickbeard.common import cpu_presets
from sickbeard.exceptions import ex, AuthException
from sickbeard import logger, tvcache
from sickbeard.exceptions import AuthException
try:
import json
except ImportError:
......@@ -50,10 +43,13 @@ class HDBitsProvider(generic.TorrentProvider):
self.cache = HDBitsCache(self)
self.url = 'https://hdbits.org'
self.search_url = 'https://hdbits.org/api/torrents'
self.rss_url = 'https://hdbits.org/api/torrents'
self.download_url = 'https://hdbits.org/download.php?'
self.urls = {'base_url': 'https://hdbits.org',
'search': 'https://hdbits.org/api/torrents',
'rss': 'https://hdbits.org/api/torrents',
'download': 'https://hdbits.org/download.php?'
}
self.url = self.urls['base_url']
def isEnabled(self):
return self.enabled
......@@ -91,7 +87,7 @@ class HDBitsProvider(generic.TorrentProvider):
title = u'' + title
title = title.replace(' ', '.')
url = self.download_url + urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
url = self.urls['download'] + urllib.urlencode({'id': item['id'], 'passkey': self.passkey})