Commit 35ebfbab authored by Dustyn Gibson's avatar Dustyn Gibson
Browse files

Fix build with new cachecontrol, must have a session now - cc wont create it!

parent 501c27f4
......@@ -479,6 +479,8 @@ class Tvdb:
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
self.config['session'] = requests.Session()
self.config['banners_enabled'] = banners
self.config['actors_enabled'] = actors
......@@ -563,7 +565,8 @@ class Tvdb:
# get response from TVDB
if self.config['cache_enabled']:
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
session = CacheControl(sess=self.config['session'], cache=caches.FileCache(self.config['cache_location']), cache_etags=False)
if self.config['proxy']:
log().debug("Using proxy for URL: %s" % url)
session.proxies = {
......@@ -571,7 +574,7 @@ class Tvdb:
"https": self.config['proxy'],
}
resp = session.get(url.strip(), cache_auto=True, params=params)
resp = session.get(url.strip(), params=params)
else:
resp = requests.get(url.strip(), params=params)
......
......@@ -327,6 +327,8 @@ class TVRage:
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
self.config['session'] = requests.Session()
if self.config['debug_enabled']:
warnings.warn("The debug argument to tvrage_api.__init__ will be removed in the next version. "
"To enable debug messages, use the following code before importing: "
......@@ -399,7 +401,7 @@ class TVRage:
# get response from TVRage
if self.config['cache_enabled']:
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
session = CacheControl(sess=self.config['session'], cache=caches.FileCache(self.config['cache_location']), cache_etags=False)
if self.config['proxy']:
log().debug("Using proxy for URL: %s" % url)
session.proxies = {
......@@ -407,7 +409,7 @@ class TVRage:
"https": self.config['proxy'],
}
resp = session.get(url.strip(), cache_auto=True, params=params)
resp = session.get(url.strip(), params=params)
else:
resp = requests.get(url.strip(), params=params)
......
......@@ -25,7 +25,7 @@ class GenericClient(object):
self.response = None
self.auth = None
self.last_time = time.time()
self.session = requests.session()
self.session = requests.Session()
self.session.auth = (self.username, self.password)
def _request(self, method='get', params={}, data=None, files=None):
......
......@@ -57,6 +57,7 @@ from sickbeard import notifiers
from sickbeard import clients
from lib.cachecontrol import CacheControl, caches
from itertools import izip, cycle
import shutil
......@@ -1290,7 +1291,7 @@ def headURL(url, params=None, headers={}, timeout=30, session=None, json=False,
# request session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')), cache_etags=False)
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
......@@ -1345,7 +1346,7 @@ def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None,
# request session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')), cache_etags=False)
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
......@@ -1405,7 +1406,7 @@ def getURL(url, post_data=None, params={}, headers={}, timeout=30, session=None,
def download_file(url, filename, session=None, headers={}):
# create session
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')), cache_etags=False)
# request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
......
from lib.tvdb_api.tvdb_api import Tvdb
from lib.tvrage_api.tvrage_api import TVRage
import requests
INDEXER_TVDB = 1
INDEXER_TVRAGE = 2
......@@ -25,6 +26,7 @@ indexerConfig[INDEXER_TVDB] = {
'language': 'en',
'useZip': True,
},
'session': requests.Session()
}
indexerConfig[INDEXER_TVRAGE] = {
......@@ -34,6 +36,7 @@ indexerConfig[INDEXER_TVRAGE] = {
'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt',
'language': 'en',
},
'session': requests.Session()
}
# TVDB Indexer Settings
......
......@@ -88,7 +88,7 @@ class BitSoupProvider(generic.TorrentProvider):
}
if not self.session:
self.session = requests.session()
self.session = requests.Session()
try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30)
......
......@@ -66,7 +66,7 @@ class GenericProvider:
self.cache = tvcache.TVCache(self)
self.session = requests.session()
self.session = requests.Session()
self.headers = {'Content-Type': 'application/x-www-form-urlencoded', 'User-Agent': USER_AGENT}
......
......@@ -29,6 +29,7 @@ from sickbeard import logger
from sickbeard import db
from sickbeard import encodingKludge as ek
import os
import requests
exception_dict = {}
anidb_exception_dict = {}
......@@ -175,7 +176,7 @@ def retrieve_exceptions():
loc = sickbeard.indexerApi(indexer).config['scene_loc']
if loc.startswith("http"):
data = helpers.getURL(loc)
data = helpers.getURL(loc, session=sickbeard.indexerApi(indexer).session)
else:
loc = helpers.real_path(ek.ek(os.path.join, ek.ek(os.path.dirname, __file__), loc))
with open(loc, 'r') as file:
......@@ -293,8 +294,11 @@ def _anidb_exceptions_fetcher():
return anidb_exception_dict
xem_session = requests.Session()
def _xem_exceptions_fetcher():
global xem_exception_dict
global xem_session
if shouldRefresh('xem'):
for indexer in sickbeard.indexerApi().indexers:
......@@ -303,7 +307,7 @@ def _xem_exceptions_fetcher():
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config[
'xem_origin']
parsedJSON = helpers.getURL(url, json=True)
parsedJSON = helpers.getURL(url, session=xem_session, json=True)
if not parsedJSON:
logger.log(u"Check scene exceptions update failed for " + sickbeard.indexerApi(
indexer).name + ", Unable to get URL: " + url, logger.ERROR)
......
......@@ -29,6 +29,7 @@ import urlparse
import test_lib as test
from bs4 import BeautifulSoup
from sickbeard.helpers import getURL
import requests
class TorrentBasicTests(test.SickbeardTestDBCase):
......@@ -36,7 +37,7 @@ class TorrentBasicTests(test.SickbeardTestDBCase):
self.url = 'http://kickass.to/'
searchURL = 'http://kickass.to/usearch/American%20Dad%21%20S08%20-S08E%20category%3Atv/?field=seeders&sorder=desc'
html = getURL(searchURL)
html = getURL(searchURL, session=requests.Session())
if not html:
return
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment