Commit ddffe651 authored by Dustyn Gibson's avatar Dustyn Gibson
Browse files

Proxy and session fixes:

Move duplicate code to a new method, to set up a default set of session parameters
Remove referer from header is not using proxy
Add referer when using global proxy setting too, not just provider proxy
use `with closing()` when reading from a stream based get, because we could return without reading all of the content
No need to define self.session in providers, it's defined in generic __init__
tvcache.getRSSFeed only proxied 1 scheme, use the url for both until we can add a sickbeard.HTTPS_PROXY_SETTING
parent c5a1180d
......@@ -38,6 +38,7 @@ import datetime
import errno
import ast
import operator
from contextlib import closing
import sickbeard
import subliminal
......@@ -1290,35 +1291,55 @@ def codeDescription(status_code):
return 'unknown'
def headURL(url, params=None, headers={}, timeout=30, session=None, json=False, proxyGlypeProxySSLwarning=None):
def _setUpSession(session, headers):
"""
Checks if URL is valid, without reading it
Returns a session initialized with default cache and parameter settings
"""
# request session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')), cache_etags=False)
# request session clear residual referer
if 'Referer' in session.headers and not 'Referer' in headers:
session.headers.pop('Referer')
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
session.headers.update(headers)
# request session paramaters
session.params = params
# request session ssl verify
session.verify = certifi.where()
try:
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
# request session allow redirects
session.allow_redirects = True
# request session proxies
if not 'Referer' in session.headers and sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
scheme, address = urllib2.splittype(sickbeard.PROXY_SETTING)
address = sickbeard.PROXY_SETTING if scheme else 'http://' + sickbeard.PROXY_SETTING
session.proxies = {
"http": address,
"https": address,
}
session.headers.update({'Referer': address})
if 'Content-Type' in session.headers:
session.headers.pop('Content-Type')
return session
def headURL(url, params=None, headers={}, timeout=30, session=None, json=False, proxyGlypeProxySSLwarning=None):
"""
Checks if URL is valid, without reading it
"""
if 'Content-Type' in session.headers:
session.headers.pop('Content-Type')
session = _setUpSession(session, headers)
session.params = params
resp = session.head(url, timeout=timeout, allow_redirects=True)
try:
resp = session.head(url, timeout=timeout)
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
......@@ -1348,42 +1369,22 @@ def headURL(url, params=None, headers={}, timeout=30, session=None, json=False,
return False
def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None, json=False, proxyGlypeProxySSLwarning=None):
"""
Returns a byte-string retrieved from the url provider.
"""
# request session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')), cache_etags=False)
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
session.headers.update(headers)
# request session ssl verify
session.verify = certifi.where()
# request session paramaters
session = _setUpSession(session, headers)
session.params = params
try:
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
# decide if we get or post data to server
if post_data:
session.headers.update({'Content-Type': 'application/x-www-form-urlencoded'})
resp = session.post(url, data=post_data, timeout=timeout)
else:
if 'Content-Type' in session.headers:
session.headers.pop('Content-Type')
resp = session.get(url, timeout=timeout, allow_redirects=True)
resp = session.get(url, timeout=timeout)
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
......@@ -1415,47 +1416,29 @@ def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None,
return resp.content if not json else resp.json()
def download_file(url, filename, session=None, headers={}):
# create session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')), cache_etags=False)
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
session.headers.update(headers)
# request session ssl verify
session.verify = certifi.where()
def download_file(url, filename, session=None, headers={}):
# request session streaming
session = _setUpSession(session, headers)
session.stream = True
# request session proxies
if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
session.proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
try:
resp = session.get(url)
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
return False
with closing(session.get(url)) as resp:
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + codeDescription(resp.status_code), logger.DEBUG)
return False
try:
with open(filename, 'wb') as fp:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fp.write(chunk)
fp.flush()
chmodAsParent(filename)
except:
logger.log(u"Problem setting permissions or writing file to: %s" % filename, logger.WARNING)
try:
with open(filename, 'wb') as fp:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fp.write(chunk)
fp.flush()
chmodAsParent(filename)
except:
logger.log(u"Problem setting permissions or writing file to: %s" % filename, logger.WARNING)
except requests.exceptions.HTTPError, e:
_remove_file_failed(filename)
......
......@@ -137,13 +137,13 @@ class GenericProvider:
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
# GlypeProxy SSL warning message
self.proxyGlypeProxySSLwarning = self.proxy.getProxyURL() + 'includes/process.php?action=sslagree&submit=Continue anyway...'
url = self.proxy._buildURL(url)
else:
if 'Referer' in self.headers:
self.headers.pop('Referer')
self.proxyGlypeProxySSLwarning = None
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
return helpers.getURL(self.proxy._buildURL(url), post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json, proxyGlypeProxySSLwarning=self.proxyGlypeProxySSLwarning)
......@@ -192,15 +192,15 @@ class GenericProvider:
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
# GlypeProxy SSL warning message
self.proxyGlypeProxySSLwarning = self.proxy.getProxyURL() + 'includes/process.php?action=sslagree&submit=Continue anyway...'
else:
if 'Referer' in self.headers:
self.headers.pop('Referer')
self.proxyGlypeProxySSLwarning = None
for url in urls:
if self.proxy.isEnabled():
url = self.proxy._buildURL(url)
if helpers.headURL(url, session=self.session, headers=self.headers, proxyGlypeProxySSLwarning=self.proxyGlypeProxySSLwarning):
if helpers.headURL(self.proxy._buildURL(url), session=self.session, headers=self.headers,
proxyGlypeProxySSLwarning=self.proxyGlypeProxySSLwarning):
return url
return u''
......@@ -216,9 +216,14 @@ class GenericProvider:
urls, filename = self._makeURL(result)
if self.proxy.isEnabled():
self.headers.update({'Referer': self.proxy.getProxyURL()})
elif 'Referer' in self.headers:
self.headers.pop('Referer')
for url in urls:
logger.log(u"Downloading a result from " + self.name + " at " + url)
if helpers.download_file(url, filename, session=self.session, headers=self.headers):
if helpers.download_file(self.proxy._buildURL(url), filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log(u"Saved result to " + filename, logger.INFO)
return True
......
......@@ -54,7 +54,6 @@ class RarbgProvider(generic.TorrentProvider):
generic.TorrentProvider.__init__(self, "Rarbg")
self.enabled = False
self.session = None
self.supportsBacklog = True
self.ratio = None
self.minseed = None
......@@ -105,14 +104,13 @@ class RarbgProvider(generic.TorrentProvider):
if self.token and self.tokenExpireDate and datetime.datetime.now() < self.tokenExpireDate:
return True
self.session = requests.Session()
resp_json = None
try:
response = self.session.get(self.urls['token'], timeout=30, headers=self.headers)
response.raise_for_status()
resp_json = response.json()
except (RequestException, BaseSSLError) as e:
except (RequestException) as e:
logger.log(u'Unable to connect to {name} provider: {error}'.format(name=self.name, error=ex(e)), logger.ERROR)
return False
......
......@@ -144,12 +144,11 @@ class TVCache():
elif sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG)
scheme, address = urllib2.splittype(sickbeard.PROXY_SETTING)
if not scheme:
scheme = 'http'
address = 'http://' + sickbeard.PROXY_SETTING
else:
address = sickbeard.PROXY_SETTING
handlers = [urllib2.ProxyHandler({scheme: address})]
address = sickbeard.PROXY_SETTING if scheme else 'http://' + sickbeard.PROXY_SETTING
handlers = [urllib2.ProxyHandler({'http': address, 'https': address})]
self.provider.headers.update({'Referer': address})
elif 'Referer' in self.provider.headers:
self.provider.headers.pop('Referer')
return RSSFeeds(self.providerID).getFeed(
self.provider.proxy._buildURL(url),
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment