Commit 3f23fd2b authored by echel0n's avatar echel0n
Browse files

refactored remaining database calls to use custom database calls

parent cf0821cd
# Changelog
- * b23202e - 2017-12-11: Fixed issue #151 - using unicode instead of str to result post-process results
- * 42d7ad6 - 2017-12-11: refactored remaining database calls to use custom database calls
- * cf0821c - 2017-12-11: Fixed issue #151 - using unicode instead of str to result post-process results
- * 4323d84 - 2017-12-10: Refactored database calls, resolves memory usage issues
- * 0d8b4d1 - 2017-12-10: Small memory footprint improvement
- * f2d7dc6 - 2017-12-10: Release v9.2.61
......
......@@ -51,7 +51,7 @@ class BlackAndWhiteList(object):
:param values: Values to be inserted in table
"""
for value in values:
sickrage.app.main_db.db.insert({
sickrage.app.main_db.insert({
'_t': table,
'show_id': self.show_id,
'keywork': value
......@@ -86,7 +86,7 @@ class BlackAndWhiteList(object):
:param table: database table remove keywords from
"""
try:
sickrage.app.main_db.db.delete(sickrage.app.main_db.db.get(table, self.show_id, with_doc=True)['doc'])
sickrage.app.main_db.delete(sickrage.app.main_db.get(table, self.show_id, with_doc=True)['doc'])
except RecordNotFound:
pass
......
......@@ -43,7 +43,7 @@ class NameCache(object):
def put(self, name, indexer_id=0):
"""
Adds the show & tvdb id to the scene_names table in cache.db.
Adds the show & tvdb id to the scene_names table in cache db
:param name: The show name to cache
:param indexer_id: the TVDB id that this show should be cached with (can be None/0 for unknown)
......@@ -58,14 +58,14 @@ class NameCache(object):
if not len([x for x in sickrage.app.cache_db.get_many('scene_names', name)
if x['indexer_id'] == indexer_id]):
# insert name into cache
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'scene_names',
'indexer_id': indexer_id,
'name': name
})
except RecordNotFound:
# insert name into cache
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'scene_names',
'indexer_id': indexer_id,
'name': name
......@@ -73,7 +73,7 @@ class NameCache(object):
def get(self, name):
"""
Looks up the given name in the scene_names table in cache.db.
Looks up the given name in the scene_names table in cache db
:param name: The show name to look up.
:return: the TVDB id that resulted from the cache lookup or None if the show wasn't found in the cache
......@@ -86,7 +86,7 @@ class NameCache(object):
"""
Deletes all "unknown" entries from the cache (names with indexer_id of 0).
"""
[sickrage.app.cache_db.db.delete(x) for x in
[sickrage.app.cache_db.delete(x) for x in
sickrage.app.cache_db.all('scene_names')
if x['indexer_id'] in [indexerid, 0]]
......@@ -107,7 +107,7 @@ class NameCache(object):
pass
# insert name into cache
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'scene_names',
'indexer_id': indexer_id,
'name': name
......
......@@ -41,7 +41,7 @@ class TVCache(object):
def clear(self):
if self.shouldClearCache():
[sickrage.app.cache_db.db.delete(x) for x in
[sickrage.app.cache_db.delete(x) for x in
sickrage.app.cache_db.get_many('providers', self.providerID)]
def _get_title_and_url(self, item):
......@@ -120,7 +120,7 @@ class TVCache(object):
@property
def last_update(self):
try:
dbData = sickrage.app.cache_db.db.get('lastUpdate', self.providerID, with_doc=True)['doc']
dbData = sickrage.app.cache_db.get('lastUpdate', self.providerID, with_doc=True)['doc']
lastTime = int(dbData["time"])
if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0
except RecordNotFound:
......@@ -131,11 +131,11 @@ class TVCache(object):
@last_update.setter
def last_update(self, toDate):
try:
dbData = sickrage.app.cache_db.db.get('lastUpdate', self.providerID, with_doc=True)['doc']
dbData = sickrage.app.cache_db.get('lastUpdate', self.providerID, with_doc=True)['doc']
dbData['time'] = int(time.mktime(toDate.timetuple()))
sickrage.app.cache_db.db.update(dbData)
sickrage.app.cache_db.update(dbData)
except RecordNotFound:
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'lastUpdate',
'provider': self.providerID,
'time': int(time.mktime(toDate.timetuple()))
......@@ -144,7 +144,7 @@ class TVCache(object):
@property
def last_search(self):
try:
dbData = sickrage.app.cache_db.db.get('lastSearch', self.providerID, with_doc=True)['doc']
dbData = sickrage.app.cache_db.get('lastSearch', self.providerID, with_doc=True)['doc']
lastTime = int(dbData["time"])
if lastTime > int(time.mktime(datetime.datetime.today().timetuple())): lastTime = 0
except RecordNotFound:
......@@ -155,11 +155,11 @@ class TVCache(object):
@last_search.setter
def last_search(self, toDate):
try:
dbData = sickrage.app.cache_db.db.get('lastSearch', self.providerID, with_doc=True)['doc']
dbData = sickrage.app.cache_db.get('lastSearch', self.providerID, with_doc=True)['doc']
dbData['time'] = int(time.mktime(toDate.timetuple()))
sickrage.app.cache_db.db.update(dbData)
sickrage.app.cache_db.update(dbData)
except RecordNotFound:
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'lastUpdate',
'provider': self.providerID,
'time': int(time.mktime(toDate.timetuple()))
......@@ -222,7 +222,7 @@ class TVCache(object):
}
# add to internal database
sickrage.app.cache_db.db.insert(dbData)
sickrage.app.cache_db.insert(dbData)
# add to external database
if sickrage.app.config.enable_api_providers_cache and not self.provider.private:
......
......@@ -207,9 +207,9 @@ class srDatabase(object):
try:
for x in self.db.all(index_name):
try:
self.db.get('id', x.get('_id'), with_doc=True)
self.get('id', x.get('_id'), with_doc=True)
except (ValueError, TypeError) as e:
self.db.delete(self.db.get(index_name, x.get('key'), with_doc=True)['doc'])
self.delete(self.get(index_name, x.get('key'), with_doc=True)['doc'])
except Exception as e:
if index_name in self.db.indexes_names:
self.db.destroy_index(self.db.indexes_names[index_name])
......@@ -265,10 +265,10 @@ class srDatabase(object):
if isinstance(v, list):
for d in v:
d.update({'_t': t_name})
self.db.insert(d)
self.insert(d)
else:
v.update({'_t': t_name})
self.db.insert(v)
self.insert(v)
sickrage.app.log.info('Total migration took %s', (time.time() - migrate_start))
sickrage.app.log.info('=' * 30)
......@@ -293,11 +293,23 @@ class srDatabase(object):
if os.path.isfile(self.old_db_path + '-shm'):
os.rename(self.old_db_path + '-shm', '{}-shm.{}_old'.format(self.old_db_path, random))
def all(self, *args):
return (x['doc'] for x in self.db.all(*args, with_doc=True))
def all(self, *args, **kwargs):
return (x['doc'] for x in self.db.all(*args, **kwargs, with_doc=True))
def get_many(self, *args):
return (x['doc'] for x in self.db.get_many(*args, with_doc=True))
def get_many(self, *args, **kwargs):
return (x['doc'] for x in self.db.get_many(*args, **kwargs, with_doc=True))
def delete(self, *args):
return self.db.delete(*args)
def get(self, *args, **kwargs):
return self.db.get(*args, **kwargs)
def update(self, *args):
return self.db.update(*args)
def insert(self, *args):
return self.db.insert(*args)
# Monkey-Patch storage to suppress logging messages
IU_Storage.get = Custom_IU_Storage_get
......@@ -54,5 +54,5 @@ class CacheDB(srDatabase):
# Wipe table if versions are different
if previous_version < current_version:
for x in self.all(index_name):
self.db.delete(x)
self.delete(x)
super(CacheDB, self).check_versions(index_name, current_version, previous_version)
......@@ -18,13 +18,13 @@
from __future__ import unicode_literals
from hashlib import md5
import hashlib
from CodernityDB.hash_index import HashIndex
class CacheLastUpdateIndex(HashIndex):
_version = 2
_version = 3
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
......@@ -32,14 +32,14 @@ class CacheLastUpdateIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'lastUpdate' and data.get('provider'):
return md5(data.get('provider')).hexdigest(), None
return hashlib.md5(data.get('provider')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
class CacheLastSearchIndex(HashIndex):
_version = 2
_version = 3
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
......@@ -47,10 +47,10 @@ class CacheLastSearchIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'lastSearch' and data.get('provider'):
return md5(data.get('provider')).hexdigest(), None
return hashlib.md5(data.get('provider')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
class CacheSceneExceptionsIndex(HashIndex):
......@@ -69,7 +69,7 @@ class CacheSceneExceptionsIndex(HashIndex):
class CacheSceneNamesIndex(HashIndex):
_version = 2
_version = 3
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
......@@ -77,14 +77,14 @@ class CacheSceneNamesIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'scene_names' and data.get('name'):
return md5(data.get('name')).hexdigest(), None
return hashlib.md5(data.get('name')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
class CacheNetworkTimezonesIndex(HashIndex):
_version = 2
_version = 3
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
......@@ -92,14 +92,14 @@ class CacheNetworkTimezonesIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'network_timezones' and data.get('network_name'):
return md5(data.get('network_name')).hexdigest(), None
return hashlib.md5(data.get('network_name')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
class CacheSceneExceptionsRefreshIndex(HashIndex):
_version = 2
_version = 3
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
......@@ -107,10 +107,10 @@ class CacheSceneExceptionsRefreshIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'scene_exceptions_refresh' and data.get('list'):
return md5(data.get('list')).hexdigest(), None
return hashlib.md5(data.get('list')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
class CacheProvidersIndex(HashIndex):
......@@ -122,7 +122,7 @@ class CacheProvidersIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'providers' and data.get('provider'):
return md5(data.get('provider')).hexdigest(), None
return hashlib.md5(data.get('provider')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
......@@ -44,5 +44,5 @@ class FailedDB(srDatabase):
# Wipe table if versions are different
if previous_version < current_version:
for x in self.all(index_name):
self.db.delete(x)
self.delete(x)
super(FailedDB, self).check_versions(index_name, current_version, previous_version)
\ No newline at end of file
......@@ -18,13 +18,13 @@
from __future__ import unicode_literals
from hashlib import md5
import hashlib
from CodernityDB.hash_index import HashIndex
class FailedIndex(HashIndex):
_version = 2
_version = 3
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
......@@ -32,10 +32,10 @@ class FailedIndex(HashIndex):
def make_key_value(self, data):
if data.get('_t') == 'failed' and data.get('release'):
return md5(data.get('release')).hexdigest(), None
return hashlib.md5(data.get('release')).hexdigest(), None
def make_key(self, key):
return md5(key.encode('utf-8')).hexdigest()
return hashlib.md5(key.encode('utf-8')).hexdigest()
class FailedHistoryIndex(HashIndex):
_version = 1
......
......@@ -91,7 +91,7 @@ class MainDB(srDatabase):
pass
if dirty:
self.db.update(show)
self.update(show)
checked += [show['indexer_id']]
......@@ -114,7 +114,7 @@ class MainDB(srDatabase):
pass
if dirty:
self.db.update(ep)
self.update(ep)
checked += [ep['showid']]
......@@ -129,7 +129,7 @@ class MainDB(srDatabase):
for dupe in list(self.get_many('tv_shows', show['indexer_id']))[1::]:
sickrage.app.log.info("Deleting duplicate show with id: {}".format(dupe["indexer_id"]))
self.db.delete(dupe)
self.delete(dupe)
checked += [show['indexer_id']]
......@@ -145,7 +145,7 @@ class MainDB(srDatabase):
for dupe in list(self.get_many('tv_episodes', ep['showid']))[1::]:
if dupe['indexerid'] == ep['indexerid']:
sickrage.app.log.info("Deleting duplicate episode with id: {}".format(dupe["indexerid"]))
self.db.delete(dupe)
self.delete(dupe)
checked += [ep['showid']]
......@@ -153,6 +153,6 @@ class MainDB(srDatabase):
def fix_orphaned_episodes(self):
for ep in self.all('tv_episodes'):
if not self.db.get('tv_shows', ep['showid'], with_doc=True)['doc']:
if not self.get('tv_shows', ep['showid'], with_doc=True)['doc']:
sickrage.app.log.info("Deleting orphan episode with id: {}".format(ep["indexerid"]))
self.db.delete(ep)
self.delete(ep)
......@@ -52,7 +52,7 @@ def shouldRefresh(exList):
MAX_REFRESH_AGE_SECS = 86400 # 1 day
try:
dbData = sickrage.app.cache_db.db.get('scene_exceptions_refresh', exList, with_doc=True)['doc']
dbData = sickrage.app.cache_db.get('scene_exceptions_refresh', exList, with_doc=True)['doc']
lastRefresh = int(dbData['last_refreshed'])
return int(time.mktime(datetime.datetime.today().timetuple())) > lastRefresh + MAX_REFRESH_AGE_SECS
except RecordNotFound:
......@@ -66,11 +66,11 @@ def setLastRefresh(exList):
:param exList: exception list to set refresh time
"""
try:
dbData = sickrage.app.cache_db.db.get('scene_exceptions_refresh', exList, with_doc=True)['doc']
dbData = sickrage.app.cache_db.get('scene_exceptions_refresh', exList, with_doc=True)['doc']
dbData['last_refreshed'] = int(time.mktime(datetime.datetime.today().timetuple()))
sickrage.app.cache_db.db.update(dbData)
sickrage.app.cache_db.update(dbData)
except RecordNotFound:
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'scene_exceptions_refresh',
'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple())),
'list': exList
......@@ -80,7 +80,7 @@ def setLastRefresh(exList):
def retrieve_exceptions(get_xem=True, get_anidb=True):
"""
Looks up the exceptions on github, parses them into a dict, and inserts them into the
scene_exceptions table in cache.db. Also clears the scene name cache.
scene_exceptions table in cache db and also clears the scene name cache.
"""
updated_exceptions = False
......@@ -133,7 +133,7 @@ def retrieve_exceptions(get_xem=True, get_anidb=True):
for cur_exception, curSeason in dict([(key, d[key]) for d in cur_exception_dict for key in d]).items():
if cur_exception not in existing_exceptions:
updated_exceptions = True
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'scene_exceptions',
'indexer_id': cur_indexer_id,
'show_name': cur_exception,
......@@ -254,7 +254,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions, season=-1):
"""
Given a indexer_id, and a list of all show scene exceptions, update the db.
"""
[sickrage.app.cache_db.db.delete(x) for x in sickrage.app.cache_db.get_many('scene_exceptions', indexer_id)
[sickrage.app.cache_db.delete(x) for x in sickrage.app.cache_db.get_many('scene_exceptions', indexer_id)
if x['season'] == season]
sickrage.app.log.info("Updating scene exceptions")
......@@ -265,7 +265,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions, season=-1):
exceptionsCache[indexer_id][season] = scene_exceptions
for cur_exception in scene_exceptions:
sickrage.app.cache_db.db.insert({
sickrage.app.cache_db.insert({
'_t': 'scene_exceptions',
'indexer_id': indexer_id,
'show_name': cur_exception,
......
......@@ -209,9 +209,9 @@ def set_scene_numbering(indexer_id, indexer, season=0, episode=0, absolute_numbe
if len(dbData):
dbData[0]['scene_season'] = sceneSeason
dbData[0]['scene_episode'] = sceneEpisode
sickrage.app.main_db.db.update(dbData[0])
sickrage.app.main_db.update(dbData[0])
else:
sickrage.app.main_db.db.insert({
sickrage.app.main_db.insert({
'_t': 'scene_numbering',
'indexer': indexer,
'indexer_id': indexer_id,
......@@ -230,9 +230,9 @@ def set_scene_numbering(indexer_id, indexer, season=0, episode=0, absolute_numbe
if len(dbData):
dbData[0]['scene_absolute_number'] = sceneAbsolute
sickrage.app.main_db.db.update(dbData[0])
sickrage.app.main_db.update(dbData[0])
else:
sickrage.app.main_db.db.insert({
sickrage.app.main_db.insert({
'_t': 'scene_numbering',
'indexer': indexer,
'indexer_id': indexer_id,
......@@ -479,7 +479,7 @@ def xem_refresh(indexer_id, indexer, force=False):
MAX_REFRESH_AGE_SECS = 86400 # 1 day
try:
dbData = sickrage.app.main_db.db.get('xem_refresh', indexer_id, with_doc=True)['doc']
dbData = sickrage.app.main_db.get('xem_refresh', indexer_id, with_doc=True)['doc']
lastRefresh = try_int(dbData['last_refreshed'])
refresh = int(time.mktime(datetime.datetime.today().timetuple())) > lastRefresh + MAX_REFRESH_AGE_SECS
except RecordNotFound:
......@@ -491,11 +491,11 @@ def xem_refresh(indexer_id, indexer, force=False):
# mark refreshed
try:
dbData = sickrage.app.main_db.db.get('xem_refresh', indexer_id, with_doc=True)['doc']
dbData = sickrage.app.main_db.get('xem_refresh', indexer_id, with_doc=True)['doc']
dbData['last_refreshed'] = int(time.mktime(datetime.datetime.today().timetuple()))
sickrage.app.main_db.db.update(dbData)
sickrage.app.main_db.update(dbData)
except RecordNotFound:
sickrage.app.main_db.db.insert({
sickrage.app.main_db.insert({
'_t': 'xem_refresh',
'indexer': indexer,
'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple())),
......@@ -513,7 +513,7 @@ def xem_refresh(indexer_id, indexer, force=False):
except Exception:
for x in sickrage.app.main_db.get_many('tv_episodes', indexer_id):
x['scene_season'], x['scene_episode'], x['scene_absolute_number'] = 0, 0, 0
sickrage.app.main_db.db.update(x)
sickrage.app.main_db.update(x)
return
# XEM API URL
......@@ -546,7 +546,7 @@ def xem_refresh(indexer_id, indexer, force=False):
dbData['scene_episode'] = entry['scene_2']['episode']
dbData['scene_absolute_number'] = entry['scene_2']['absolute']
sickrage.app.main_db.db.update(dbData)
sickrage.app.main_db.update(dbData)
except Exception as e:
sickrage.app.log.warning(
......
......@@ -183,14 +183,14 @@ class BacklogSearcher(object):
dbData = [x for x in sickrage.app.main_db.all('info')]
if len(dbData) == 0:
sickrage.app.main_db.db.insert({
sickrage.app.main_db.insert({
'_t': 'info',
'last_backlog': str(when),
'last_indexer': 0
})
else:
dbData[0]['last_backlog'] = str(when)
sickrage.app.main_db.db.update(dbData[0])
sickrage.app.main_db.update(dbData[0])
def get_backlog_cycle_time(self):
return max([sickrage.app.config.daily_searcher_freq * 4, 30])
......@@ -295,7 +295,7 @@ class ProperSearcher(object):
dbData = [x for x in sickrage.app.main_db.all('info')]
if len(dbData) == 0:
sickrage.app.main_db.db.insert({
sickrage.app.main_db.insert({
'_t': 'info',
'last_backlog': 0,
'last_indexer': 0,
......@@ -303,7 +303,7 @@ class ProperSearcher(object):
})
else:
dbData[0]['last_proper_search'] = str(when)
sickrage.app.main_db.db.update(dbData[0])
sickrage.app.main_db.update(dbData[0])
@staticmethod
def _get_lastProperSearch():
......
......@@ -382,7 +382,7 @@ class TVEpisode(object):
if len(dbData) > 1:
for ep in dbData:
sickrage.app.main_db.db.delete(ep)
sickrage.app.main_db.delete(ep)
return False
elif len(dbData) == 0:
sickrage.app.log.debug("%s: Episode S%02dE%02d not found in the database" % (
......@@ -719,7 +719,7 @@ class TVEpisode(object):
# delete myself from the DB
sickrage.app.log.debug("Deleting myself from the database")
[sickrage.app.main_db.db.delete(x) for x in
[sickrage.app.main_db.delete(x) for x in
sickrage.app.main_db.get_many('tv_episodes', self.show.indexerid)
if x['season'] == self.season and x['episode'] == self.episode]
......@@ -783,9 +783,9 @@ class TVEpisode(object):
if x['indexerid'] == self.indexerid][0]
dbData.update(tv_episode)
sickrage.app.main_db.db.update(dbData)
sickrage.app.main_db.update(dbData)
except:
sickrage.app.main_db.db.insert(tv_episode)
sickrage.app.main_db.insert(tv_episode)
def fullPath(self):
if self.location is None or self.location == "":
......
......@@ -935,7 +935,7 @@ class TVShow(object):
'last_update'
]
dbData = sickrage.app.main_db.db.get('imdb_info', self.indexerid, with_doc=True)['doc']
dbData = sickrage.app.main_db.get('imdb_info', self.indexerid, with_doc=True)['doc']
self._imdb_info = {k: dbData[k] for k in imdb_info_keys if k in dbData}