move index etc functions from fpdb_simple.py into fpdb_db.py

This commit is contained in:
sqlcoder 2009-06-07 20:45:09 +01:00
parent 45a303eb25
commit 7e8b80948c
3 changed files with 355 additions and 339 deletions

View File

@ -31,6 +31,110 @@ class fpdb_db:
self.MYSQL_INNODB = 2 self.MYSQL_INNODB = 2
self.PGSQL = 3 self.PGSQL = 3
self.SQLITE = 4 self.SQLITE = 4
# Data Structures for index and foreign key creation
# drop_code is an int with possible values: 0 - don't drop for bulk import
# 1 - drop during bulk import
# db differences:
# - note that mysql automatically creates indexes on constrained columns when
# foreign keys are created, while postgres does not. Hence the much longer list
# of indexes is required for postgres.
# all primary keys are left on all the time
#
# table column drop_code
self.indexes = [
[ ] # no db with index 0
, [ ] # no db with index 1
, [ # indexes for mysql (list index 2)
{'tab':'Players', 'col':'name', 'drop':0}
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
]
, [ # indexes for postgres (list index 3)
{'tab':'Boardcards', 'col':'handId', 'drop':0}
, {'tab':'Gametypes', 'col':'siteId', 'drop':0}
, {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
, {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'Players', 'col':'siteId', 'drop':1}
, {'tab':'Players', 'col':'name', 'drop':0}
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
]
]
self.foreignKeys = [
[ ] # no db with index 0
, [ ] # no db with index 1
, [ # foreign keys for mysql
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
]
, [ # foreign keys for postgres
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
]
]
# MySQL Notes:
# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
# - creates index handId on <thistable>.handId
# alter table t drop foreign key fk
# alter table t add foreign key (fkcol) references tab(rcol)
# alter table t add constraint c foreign key (fkcol) references tab(rcol)
# (fkcol is used for foreigh key name)
# mysql to list indexes:
# SELECT table_name, index_name, non_unique, column_name
# FROM INFORMATION_SCHEMA.STATISTICS
# WHERE table_name = 'tbl_name'
# AND table_schema = 'db_name'
# ORDER BY table_name, index_name, seq_in_index
#
# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
# ALTER TABLE tab DROP INDEX idx
# mysql to list fks:
# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
# FROM information_schema.KEY_COLUMN_USAGE
# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
# AND REFERENCED_TABLE_NAME is not null
# ORDER BY TABLE_NAME, COLUMN_NAME;
# this may indicate missing object
# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
# PG notes:
# To add a foreign key constraint to a table:
# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
# ALTER TABLE tab DROP CONSTRAINT zipchk
#
# Note: index names must be unique across a schema
# CREATE INDEX idx ON tab(col)
# DROP INDEX idx
#end def __init__ #end def __init__
def do_connect(self, config=None): def do_connect(self, config=None):
@ -215,11 +319,239 @@ class fpdb_db:
self.drop_tables() self.drop_tables()
self.create_tables() self.create_tables()
fpdb_simple.createAllIndexes(self) self.createAllIndexes()
self.db.commit() self.db.commit()
print "Finished recreating tables" print "Finished recreating tables"
#end def recreate_tables #end def recreate_tables
def prepareBulkImport(self):
"""Drop some indexes/foreign keys to prepare for bulk import.
Currently keeping the standalone indexes as needed to import quickly"""
# self is a fpdb_db object including backend, db, cursor, sql variables
if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow table/index operations to work
for fk in self.foreignKeys[self.backend]:
if fk['drop'] == 1:
if self.backend == self.MYSQL_INNODB:
self.cursor.execute("SELECT constraint_name " +
"FROM information_schema.KEY_COLUMN_USAGE " +
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
"WHERE 1=1 " +
"AND table_name = %s AND column_name = %s " +
"AND referenced_table_name = %s " +
"AND referenced_column_name = %s ",
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
cons = self.cursor.fetchone()
#print "preparebulk: cons=", cons
if cons:
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
try:
self.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
except:
pass
elif self.backend == self.PGSQL:
# DON'T FORGET TO RECREATE THEM!!
print "dropping pg fk", fk['fktab'], fk['fkcol']
try:
# try to lock table to see if index drop will work:
# hmmm, tested by commenting out rollback in grapher. lock seems to work but
# then drop still hangs :-( does work in some tests though??
# will leave code here for now pending further tests/enhancement ...
self.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
#print "after lock, status:", self.cursor.statusmessage
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
try:
self.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
except:
if "does not exist" not in str(sys.exc_value):
print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
% (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
except:
print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
% (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
else:
print "Only MySQL and Postgres supported so far"
return -1
for idx in self.indexes[self.backend]:
if idx['drop'] == 1:
if self.backend == self.MYSQL_INNODB:
print "dropping mysql index ", idx['tab'], idx['col']
try:
# apparently nowait is not implemented in mysql so this just hands if there are locks
# preventing the index drop :-(
self.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
except:
pass
elif self.backend == self.PGSQL:
# DON'T FORGET TO RECREATE THEM!!
print "dropping pg index ", idx['tab'], idx['col']
try:
# try to lock table to see if index drop will work:
self.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
#print "after lock, status:", self.cursor.statusmessage
try:
# table locked ok so index drop should work:
#print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
self.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
#print "dropped pg index ", idx['tab'], idx['col']
except:
if "does not exist" not in str(sys.exc_value):
print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
except:
print "warning: index %s_%s_idx not dropped %s, continuing ..." \
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
else:
print "Error: Only MySQL and Postgres supported so far"
return -1
if self.backend == self.PGSQL:
self.db.set_isolation_level(1) # go back to normal isolation level
self.db.commit() # seems to clear up errors if there were any in postgres
#end def prepareBulkImport
def afterBulkImport(self):
"""Re-create any dropped indexes/foreign keys after bulk import"""
# self is a fpdb_db object including backend, db, cursor, sql variables
if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow table/index operations to work
for fk in self.foreignKeys[self.backend]:
if fk['drop'] == 1:
if self.backend == self.MYSQL_INNODB:
self.cursor.execute("SELECT constraint_name " +
"FROM information_schema.KEY_COLUMN_USAGE " +
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
"WHERE 1=1 " +
"AND table_name = %s AND column_name = %s " +
"AND referenced_table_name = %s " +
"AND referenced_column_name = %s ",
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
cons = self.cursor.fetchone()
print "afterbulk: cons=", cons
if cons:
pass
else:
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
try:
self.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
+ fk['rcol'] + ")")
except:
pass
elif self.backend == self.PGSQL:
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
try:
self.cursor.execute("alter table " + fk['fktab'] + " add constraint "
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
+ " foreign key (" + fk['fkcol']
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
except:
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
for idx in self.indexes[self.backend]:
if idx['drop'] == 1:
if self.backend == self.MYSQL_INNODB:
print "creating mysql index ", idx['tab'], idx['col']
try:
self.cursor.execute( "alter table %s add index %s(%s)"
, (idx['tab'],idx['col'],idx['col']) )
except:
pass
elif self.backend == self.PGSQL:
# pass
# mod to use tab_col for index name?
print "creating pg index ", idx['tab'], idx['col']
try:
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
self.cursor.execute( "create index %s_%s_idx on %s(%s)"
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
except:
print " ERROR! :-("
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
if self.backend == self.PGSQL:
self.db.set_isolation_level(1) # go back to normal isolation level
self.db.commit() # seems to clear up errors if there were any in postgres
#end def afterBulkImport
def createAllIndexes(self):
"""Create new indexes"""
if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow table/index operations to work
for idx in self.indexes[self.backend]:
if self.backend == self.MYSQL_INNODB:
print "creating mysql index ", idx['tab'], idx['col']
try:
self.cursor.execute( "alter table %s add index %s(%s)"
, (idx['tab'],idx['col'],idx['col']) )
except:
pass
elif self.backend == self.PGSQL:
# mod to use tab_col for index name?
print "creating pg index ", idx['tab'], idx['col']
try:
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
self.cursor.execute( "create index %s_%s_idx on %s(%s)"
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
except:
print " ERROR! :-("
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
if self.backend == self.PGSQL:
self.db.set_isolation_level(1) # go back to normal isolation level
#end def createAllIndexes
def dropAllIndexes(self):
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
using list of indexes in indexes data structure"""
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow table/index operations to work
for idx in self.indexes[self.backend]:
if self.backend == self.MYSQL_INNODB:
print "dropping mysql index ", idx['tab'], idx['col']
try:
self.cursor.execute( "alter table %s drop index %s"
, (idx['tab'],idx['col']) )
except:
pass
elif self.backend == self.PGSQL:
print "dropping pg index ", idx['tab'], idx['col']
# mod to use tab_col for index name?
try:
self.cursor.execute( "drop index %s_%s_idx"
% (idx['tab'],idx['col']) )
except:
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
if self.backend == self.PGSQL:
self.db.set_isolation_level(1) # go back to normal isolation level
#end def dropAllIndexes
def analyzeDB(self):
"""Do whatever the DB can offer to update index/table statistics"""
if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow vacuum to work
try:
self.cursor.execute("vacuum analyze")
except:
print "Error during vacuum"
self.db.set_isolation_level(1) # go back to normal isolation level
self.db.commit()
#end def analyzeDB
# Currently uses an exclusive lock on the Hands table as a global lock # Currently uses an exclusive lock on the Hands table as a global lock
# Return values are Unix style, 0 for success, positive integers for errors # Return values are Unix style, 0 for success, positive integers for errors
# 1 = generic error # 1 = generic error

View File

@ -150,7 +150,9 @@ class Importer:
self.monitor = True self.monitor = True
self.dirlist[site] = [dir] + [filter] self.dirlist[site] = [dir] + [filter]
#print "addImportDirectory: checking files in", dir
for file in os.listdir(dir): for file in os.listdir(dir):
#print " adding file ", file
self.addImportFile(os.path.join(dir, file), site, filter) self.addImportFile(os.path.join(dir, file), site, filter)
else: else:
print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory" print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory"
@ -162,7 +164,7 @@ class Importer:
if self.settings['dropIndexes'] == 'auto': if self.settings['dropIndexes'] == 'auto':
self.settings['dropIndexes'] = self.calculate_auto() self.settings['dropIndexes'] = self.calculate_auto()
if self.settings['dropIndexes'] == 'drop': if self.settings['dropIndexes'] == 'drop':
fpdb_simple.prepareBulkImport(self.fdb) self.fdb.prepareBulkImport()
totstored = 0 totstored = 0
totdups = 0 totdups = 0
totpartial = 0 totpartial = 0
@ -177,8 +179,8 @@ class Importer:
toterrors += errors toterrors += errors
tottime += ttime tottime += ttime
if self.settings['dropIndexes'] == 'drop': if self.settings['dropIndexes'] == 'drop':
fpdb_simple.afterBulkImport(self.fdb) self.fdb.afterBulkImport()
fpdb_simple.analyzeDB(self.fdb) self.fdb.analyzeDB(self.fdb)
return (totstored, totdups, totpartial, toterrors, tottime) return (totstored, totdups, totpartial, toterrors, tottime)
# else: import threaded # else: import threaded
@ -203,14 +205,18 @@ class Importer:
#todo: make efficient - always checks for new file, should be able to use mtime of directory #todo: make efficient - always checks for new file, should be able to use mtime of directory
# ^^ May not work on windows # ^^ May not work on windows
#rulog = open('runUpdated.txt', 'a')
#rulog.writelines("runUpdated ... ")
for site in self.dirlist: for site in self.dirlist:
self.addImportDirectory(self.dirlist[site][0], False, site, self.dirlist[site][1]) self.addImportDirectory(self.dirlist[site][0], False, site, self.dirlist[site][1])
for file in self.filelist: for file in self.filelist:
if os.path.exists(file): if os.path.exists(file):
stat_info = os.stat(file) stat_info = os.stat(file)
#rulog.writelines("path exists ")
try: try:
lastupdate = self.updated[file] lastupdate = self.updated[file]
#rulog.writelines("lastupdate = %d, mtime = %d" % (lastupdate,stat_info.st_mtime))
if stat_info.st_mtime > lastupdate: if stat_info.st_mtime > lastupdate:
self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1]) self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
self.updated[file] = time() self.updated[file] = time()
@ -236,7 +242,8 @@ class Importer:
self.addToDirList = {} self.addToDirList = {}
self.removeFromFileList = {} self.removeFromFileList = {}
self.fdb.db.rollback() self.fdb.db.rollback()
#rulog.writelines(" finished\n")
#rulog.close()
# This is now an internal function that should not be called directly. # This is now an internal function that should not be called directly.
def import_file_dict(self, file, site, filter): def import_file_dict(self, file, site, filter):
@ -282,6 +289,7 @@ class Importer:
starttime = time() starttime = time()
last_read_hand = 0 last_read_hand = 0
loc = 0 loc = 0
#print "file =", file
if file == "stdin": if file == "stdin":
inputFile = sys.stdin inputFile = sys.stdin
else: else:
@ -292,10 +300,17 @@ class Importer:
return (0, 0, 0, 1, 0) return (0, 0, 0, 1, 0)
try: try:
loc = self.pos_in_file[file] loc = self.pos_in_file[file]
#size = os.path.getsize(file)
#print "loc =", loc, 'size =', size
except: except:
pass pass
# Read input file into class and close file # Read input file into class and close file
inputFile.seek(loc) inputFile.seek(loc)
#tmplines = inputFile.readlines()
#if tmplines == None or tmplines == []:
# print "tmplines = ", tmplines
#else:
# print "tmplines[0] =", tmplines[0]
self.lines = fpdb_simple.removeTrailingEOL(inputFile.readlines()) self.lines = fpdb_simple.removeTrailingEOL(inputFile.readlines())
self.pos_in_file[file] = inputFile.tell() self.pos_in_file[file] = inputFile.tell()
inputFile.close() inputFile.close()
@ -303,7 +318,8 @@ class Importer:
try: # sometimes we seem to be getting an empty self.lines, in which case, we just want to return. try: # sometimes we seem to be getting an empty self.lines, in which case, we just want to return.
firstline = self.lines[0] firstline = self.lines[0]
except: except:
print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc) # just skip the debug message and return silently:
#print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc)
return (0,0,0,1,0) return (0,0,0,1,0)
if firstline.find("Tournament Summary")!=-1: if firstline.find("Tournament Summary")!=-1:
@ -348,6 +364,7 @@ class Importer:
if self.callHud: if self.callHud:
#print "call to HUD here. handsId:",handsId #print "call to HUD here. handsId:",handsId
#pipe the Hands.id out to the HUD #pipe the Hands.id out to the HUD
print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep) self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
except fpdb_simple.DuplicateError: except fpdb_simple.DuplicateError:
duplicates += 1 duplicates += 1
@ -364,7 +381,6 @@ class Importer:
except (fpdb_simple.FpdbError), fe: except (fpdb_simple.FpdbError), fe:
errors += 1 errors += 1
self.printEmailErrorMessage(errors, file, hand) self.printEmailErrorMessage(errors, file, hand)
self.fdb.db.rollback() self.fdb.db.rollback()
if self.settings['failOnError']: if self.settings['failOnError']:

View File

@ -40,338 +40,6 @@ SQLITE = 4
# config while trying out new hudcache mechanism # config while trying out new hudcache mechanism
use_date_in_hudcache = True use_date_in_hudcache = True
# Data Structures for index and foreign key creation
# drop_code is an int with possible values: 0 - don't drop for bulk import
# 1 - drop during bulk import
# db differences:
# - note that mysql automatically creates indexes on constrained columns when
# foreign keys are created, while postgres does not. Hence the much longer list
# of indexes is required for postgres.
# all primary keys are left on all the time
#
# table column drop_code
indexes = [
[ ] # no db with index 0
, [ ] # no db with index 1
, [ # indexes for mysql (list index 2)
{'tab':'Players', 'col':'name', 'drop':0}
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
]
, [ # indexes for postgres (list index 3)
{'tab':'Boardcards', 'col':'handId', 'drop':0}
, {'tab':'Gametypes', 'col':'siteId', 'drop':0}
, {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
, {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'Players', 'col':'siteId', 'drop':1}
, {'tab':'Players', 'col':'name', 'drop':0}
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
]
]
foreignKeys = [
[ ] # no db with index 0
, [ ] # no db with index 1
, [ # foreign keys for mysql
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
]
, [ # foreign keys for postgres
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
]
]
# MySQL Notes:
# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
# - creates index handId on <thistable>.handId
# alter table t drop foreign key fk
# alter table t add foreign key (fkcol) references tab(rcol)
# alter table t add constraint c foreign key (fkcol) references tab(rcol)
# (fkcol is used for foreigh key name)
# mysql to list indexes:
# SELECT table_name, index_name, non_unique, column_name
# FROM INFORMATION_SCHEMA.STATISTICS
# WHERE table_name = 'tbl_name'
# AND table_schema = 'db_name'
# ORDER BY table_name, index_name, seq_in_index
#
# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
# ALTER TABLE tab DROP INDEX idx
# mysql to list fks:
# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
# FROM information_schema.KEY_COLUMN_USAGE
# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
# AND REFERENCED_TABLE_NAME is not null
# ORDER BY TABLE_NAME, COLUMN_NAME;
# this may indicate missing object
# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
# PG notes:
# To add a foreign key constraint to a table:
# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
# ALTER TABLE tab DROP CONSTRAINT zipchk
#
# Note: index names must be unique across a schema
# CREATE INDEX idx ON tab(col)
# DROP INDEX idx
def prepareBulkImport(fdb):
"""Drop some indexes/foreign keys to prepare for bulk import.
Currently keeping the standalone indexes as needed to import quickly"""
# fdb is a fpdb_db object including backend, db, cursor, sql variables
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(0) # allow table/index operations to work
for fk in foreignKeys[fdb.backend]:
if fk['drop'] == 1:
if fdb.backend == MYSQL_INNODB:
fdb.cursor.execute("SELECT constraint_name " +
"FROM information_schema.KEY_COLUMN_USAGE " +
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
"WHERE 1=1 " +
"AND table_name = %s AND column_name = %s " +
"AND referenced_table_name = %s " +
"AND referenced_column_name = %s ",
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
cons = fdb.cursor.fetchone()
#print "preparebulk: cons=", cons
if cons:
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
try:
fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
except:
pass
elif fdb.backend == PGSQL:
# DON'T FORGET TO RECREATE THEM!!
print "dropping pg fk", fk['fktab'], fk['fkcol']
try:
# try to lock table to see if index drop will work:
# hmmm, tested by commenting out rollback in grapher. lock seems to work but
# then drop still hangs :-( does work in some tests though??
# will leave code here for now pending further tests/enhancement ...
fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
#print "after lock, status:", fdb.cursor.statusmessage
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
try:
fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
except:
if "does not exist" not in str(sys.exc_value):
print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
% (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
except:
print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
% (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
else:
print "Only MySQL and Postgres supported so far"
return -1
for idx in indexes[fdb.backend]:
if idx['drop'] == 1:
if fdb.backend == MYSQL_INNODB:
print "dropping mysql index ", idx['tab'], idx['col']
try:
# apparently nowait is not implemented in mysql so this just hands if there are locks
# preventing the index drop :-(
fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
except:
pass
elif fdb.backend == PGSQL:
# DON'T FORGET TO RECREATE THEM!!
print "dropping pg index ", idx['tab'], idx['col']
try:
# try to lock table to see if index drop will work:
fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
#print "after lock, status:", fdb.cursor.statusmessage
try:
# table locked ok so index drop should work:
#print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
#print "dropped pg index ", idx['tab'], idx['col']
except:
if "does not exist" not in str(sys.exc_value):
print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
except:
print "warning: index %s_%s_idx not dropped %s, continuing ..." \
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
else:
print "Error: Only MySQL and Postgres supported so far"
return -1
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(1) # go back to normal isolation level
fdb.db.commit() # seems to clear up errors if there were any in postgres
#end def prepareBulkImport
def afterBulkImport(fdb):
"""Re-create any dropped indexes/foreign keys after bulk import"""
# fdb is a fpdb_db object including backend, db, cursor, sql variables
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(0) # allow table/index operations to work
for fk in foreignKeys[fdb.backend]:
if fk['drop'] == 1:
if fdb.backend == MYSQL_INNODB:
fdb.cursor.execute("SELECT constraint_name " +
"FROM information_schema.KEY_COLUMN_USAGE " +
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
"WHERE 1=1 " +
"AND table_name = %s AND column_name = %s " +
"AND referenced_table_name = %s " +
"AND referenced_column_name = %s ",
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
cons = fdb.cursor.fetchone()
print "afterbulk: cons=", cons
if cons:
pass
else:
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
try:
fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
+ fk['rcol'] + ")")
except:
pass
elif fdb.backend == PGSQL:
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
try:
fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint "
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
+ " foreign key (" + fk['fkcol']
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
except:
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
for idx in indexes[fdb.backend]:
if idx['drop'] == 1:
if fdb.backend == MYSQL_INNODB:
print "creating mysql index ", idx['tab'], idx['col']
try:
fdb.cursor.execute( "alter table %s add index %s(%s)"
, (idx['tab'],idx['col'],idx['col']) )
except:
pass
elif fdb.backend == PGSQL:
# pass
# mod to use tab_col for index name?
print "creating pg index ", idx['tab'], idx['col']
try:
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
except:
print " ERROR! :-("
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(1) # go back to normal isolation level
fdb.db.commit() # seems to clear up errors if there were any in postgres
#end def afterBulkImport
def createAllIndexes(fdb):
"""Create new indexes"""
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(0) # allow table/index operations to work
for idx in indexes[fdb.backend]:
if fdb.backend == MYSQL_INNODB:
print "creating mysql index ", idx['tab'], idx['col']
try:
fdb.cursor.execute( "alter table %s add index %s(%s)"
, (idx['tab'],idx['col'],idx['col']) )
except:
pass
elif fdb.backend == PGSQL:
# mod to use tab_col for index name?
print "creating pg index ", idx['tab'], idx['col']
try:
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
except:
print " ERROR! :-("
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(1) # go back to normal isolation level
#end def createAllIndexes
def dropAllIndexes(fdb):
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
using list of indexes in indexes data structure"""
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(0) # allow table/index operations to work
for idx in indexes[fdb.backend]:
if fdb.backend == MYSQL_INNODB:
print "dropping mysql index ", idx['tab'], idx['col']
try:
fdb.cursor.execute( "alter table %s drop index %s"
, (idx['tab'],idx['col']) )
except:
pass
elif fdb.backend == PGSQL:
print "dropping pg index ", idx['tab'], idx['col']
# mod to use tab_col for index name?
try:
fdb.cursor.execute( "drop index %s_%s_idx"
% (idx['tab'],idx['col']) )
except:
pass
else:
print "Only MySQL and Postgres supported so far"
return -1
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(1) # go back to normal isolation level
#end def dropAllIndexes
def analyzeDB(fdb):
"""Do whatever the DB can offer to update index/table statistics"""
if fdb.backend == PGSQL:
fdb.db.set_isolation_level(0) # allow vacuum to work
try:
fdb.cursor.execute("vacuum analyze")
except:
print "Error during vacuum"
fdb.db.set_isolation_level(1) # go back to normal isolation level
fdb.db.commit()
#end def analyzeDB
class DuplicateError(Exception): class DuplicateError(Exception):
def __init__(self, value): def __init__(self, value):
self.value = value self.value = value