Commit c2d334b5 authored by Dustyn Gibson's avatar Dustyn Gibson
Browse files

Update initial schema for failed.db and cache.db, Big clean up of logging on...

Update initial schema for failed.db and cache.db, Big clean up of logging on startup by moving some INFO->DEBUG.
parent 32afca87
......@@ -457,7 +457,7 @@ class SickRage(object):
Populates the showList with shows from the database
"""
logger.log(u"Loading initial show list")
logger.log(u"Loading initial show list", logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_shows")
......
......@@ -21,15 +21,19 @@ from sickbeard import db
# Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
def test(self):
return self.hasTable("lastUpdate")
return self.hasTable("db_version")
def execute(self):
queries = [
("CREATE TABLE db_version (db_version INTEGER);",),
("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",),
("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);",),
("CREATE TABLE db_version (db_version INTEGER);",),
("INSERT INTO db_version (db_version) VALUES (?)", 1),
("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",),
("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);",),
("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);",),
("CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);",),
("INSERT INTO db_version(db_version) VALUES (1);",),
]
for query in queries:
if len(query) == 1:
......@@ -88,4 +92,4 @@ class AddSceneExceptionsRefresh(AddSceneExceptionsCustom):
def execute(self):
self.connection.action(
"CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)")
\ No newline at end of file
"CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)")
......@@ -23,13 +23,14 @@ from sickbeard.common import Quality
# Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
def test(self):
return self.hasTable('failed')
return self.hasTable('db_version')
def execute(self):
queries = [
('CREATE TABLE failed (release TEXT);',),
('CREATE TABLE failed (release TEXT, size NUMERIC, provider TEXT);',),
('CREATE TABLE history (date NUMERIC, size NUMERIC, release TEXT, provider TEXT, old_status NUMERIC DEFAULT (?), showid NUMERIC DEFAULT -1, season NUMERIC DEFAULT -1, episode NUMERIC DEFAULT -1);', Quality.NONE),
('CREATE TABLE db_version (db_version INTEGER);',),
('INSERT INTO db_version (db_version) VALUES (?)', 1),
('INSERT INTO db_version (db_version) VALUES (1);',),
]
for query in queries:
if len(query) == 1:
......@@ -43,7 +44,7 @@ class SizeAndProvider(InitialSchema):
return self.hasColumn('failed', 'size') and self.hasColumn('failed', 'provider')
def execute(self):
self.addColumn('failed', 'size')
self.addColumn('failed', 'size', 'NUMERIC')
self.addColumn('failed', 'provider', 'TEXT', '')
......
......@@ -61,7 +61,7 @@ class MainSanityCheck(db.DBSanityCheck):
self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]])
else:
logger.log(u"No duplicate show, check passed")
logger.log(u"No duplicate show, check passed", logger.DEBUG)
def fix_duplicate_episodes(self):
......@@ -85,7 +85,7 @@ class MainSanityCheck(db.DBSanityCheck):
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_dupe_id["episode_id"]])
else:
logger.log(u"No duplicate episode, check passed")
logger.log(u"No duplicate episode, check passed", logger.DEBUG)
def fix_orphan_episodes(self):
......@@ -99,7 +99,7 @@ class MainSanityCheck(db.DBSanityCheck):
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]])
else:
logger.log(u"No orphan episodes, check passed")
logger.log(u"No orphan episodes, check passed", logger.DEBUG)
def fix_missing_table_indexes(self):
if not self.connection.select("PRAGMA index_info('idx_indexer_id')"):
......@@ -142,7 +142,7 @@ class MainSanityCheck(db.DBSanityCheck):
[common.UNAIRED, cur_unaired["episode_id"]])
else:
logger.log(u"No UNAIRED episodes, check passed")
logger.log(u"No UNAIRED episodes, check passed", logger.DEBUG)
def fix_tvrage_show_statues(self):
status_map = {
......@@ -174,7 +174,7 @@ class MainSanityCheck(db.DBSanityCheck):
self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?",
[common.UNKNOWN, cur_ep["episode_id"]])
else:
logger.log(u"No MALFORMED episode statuses, check passed")
logger.log(u"No MALFORMED episode statuses, check passed", logger.DEBUG)
def backupDatabase(version):
......
......@@ -266,7 +266,7 @@ class DBSanityCheck(object):
# ===============
def upgradeDatabase(connection, schema):
logger.log(u"Checking database structure...", logger.INFO)
logger.log(u"Checking database structure..." + connection.filename, logger.DEBUG)
_processUpgrade(connection, schema)
......@@ -287,7 +287,7 @@ def _processUpgrade(connection, upgradeClass):
instance = upgradeClass(connection)
logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG)
if not instance.test():
logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.INFO)
logger.log(u"Database upgrade required: " + prettyName(upgradeClass.__name__), logger.DEBUG)
try:
instance.execute()
except sqlite3.DatabaseError, e:
......
......@@ -1388,7 +1388,7 @@ def clearCache(force=False):
# clean out cache directory, remove everything > 12 hours old
if sickbeard.CACHE_DIR:
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR)
logger.log(u"Trying to clean cache folder " + sickbeard.CACHE_DIR, logger.DEBUG)
# Does our cache_dir exists
if not ek.ek(os.path.isdir, sickbeard.CACHE_DIR):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment