Merge branch 'master' of git://git.assembla.com/fpdb-sql

This commit is contained in:
Ray 2009-08-05 19:28:16 -04:00
commit 095d1ee3dc
6 changed files with 2786 additions and 2524 deletions

View File

@ -184,6 +184,7 @@ class Database:
self.pcache = None # PlayerId cache
self.cachemiss = 0 # Delete me later - using to count player cache misses
self.cachehit = 0 # Delete me later - using to count player cache hits
# config while trying out new hudcache mechanism
self.use_date_in_hudcache = True
@ -451,7 +452,7 @@ class Database:
def get_player_id(self, config, site, player_name):
c = self.connection.cursor()
c.execute(self.sql.query['get_player_id'], {'player': player_name, 'site': site})
c.execute(self.sql.query['get_player_id'], (player_name, site))
row = c.fetchone()
if row:
return row[0]
@ -812,9 +813,11 @@ class Database:
self.fillDefaultData()
self.commit()
except:
print "Error creating tables: ", str(sys.exc_value)
#print "Error creating tables: ", str(sys.exc_value)
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error creating tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
self.rollback()
raise fpdb_simple.FpdbError( "Error creating tables " + str(sys.exc_value) )
raise
#end def disconnect
def drop_tables(self):
@ -844,8 +847,9 @@ class Database:
self.commit()
except:
print "Error dropping tables: " + str(sys.exc_value)
raise fpdb_simple.FpdbError( "Error dropping tables " + str(sys.exc_value) )
print "***Error dropping tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
self.rollback()
raise
#end def drop_tables
def createAllIndexes(self):
@ -916,7 +920,10 @@ class Database:
c.execute("INSERT INTO Sites (name,currency) VALUES ('PokerStars', 'USD')")
c.execute("INSERT INTO Sites (name,currency) VALUES ('Everleaf', 'USD')")
c.execute("INSERT INTO Sites (name,currency) VALUES ('Win2day', 'USD')")
c.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
if self.backend == self.SQLITE:
c.execute("INSERT INTO TourneyTypes VALUES (NULL, 1, 0, 0, 0, 0);")
else:
c.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
#c.execute("""INSERT INTO TourneyTypes
# (siteId,buyin,fee,knockout,rebuyOrAddon) VALUES
# (1,0,0,0,?)""",(False,) )
@ -976,21 +983,26 @@ class Database:
for player in pnames:
result[player] = self.pcache[player]
# NOTE: Using the LambdaDict does the same thing as:
#if player in self.pcache:
# #print "DEBUG: cachehit"
# pass
#else:
# self.pcache[player] = self.insertPlayer(player, siteid)
#result[player] = self.pcache[player]
return result
def insertPlayer(self, name, site_id):
self.cachemiss += 1
result = None
c = self.get_cursor()
c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
tmp=c.fetchall()
if (len(tmp)==0): #new player
c.execute ("INSERT INTO Players (name, siteId) VALUES (%s, %s)", (name, site_id))
#Get last id might be faster here.
c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
tmp=c.fetchall()
#print "recognisePlayerIDs, names[i]:",names[i],"tmp:",tmp
print "DEBUG: cache misses: %s" %self.cachemiss
return tmp[0][0]
@ -1788,6 +1800,8 @@ if __name__=="__main__":
print "database connection object = ", db_connection.connection
print "database type = ", db_connection.type
db_connection.recreate_tables()
h = db_connection.get_last_hand()
print "last hand = ", h

View File

@ -36,7 +36,7 @@ import Configuration
class GuiBulkImport():
# CONFIGURATION - update these as preferred:
allowThreads = False # set to True to try out the threads field
allowThreads = True # set to True to try out the threads field
# not used
def import_dir(self):
@ -71,12 +71,19 @@ class GuiBulkImport():
self.importer.setHandsInDB(self.n_hands_in_db)
cb_model = self.cb_dropindexes.get_model()
cb_index = self.cb_dropindexes.get_active()
cb_hmodel = self.cb_drophudcache.get_model()
cb_hindex = self.cb_drophudcache.get_active()
self.lab_info.set_text("Importing") # doesn't display :-(
if cb_index:
self.importer.setDropIndexes(cb_model[cb_index][0])
else:
self.importer.setDropIndexes("auto")
if cb_hindex:
self.importer.setDropHudCache(cb_hmodel[cb_hindex][0])
else:
self.importer.setDropHudCache("auto")
sitename = self.cbfilter.get_model()[self.cbfilter.get_active()][0]
self.lab_info.set_text("Importing")
self.importer.addBulkImportImportFileOrDir(self.inputFile, site = sitename)
self.importer.setCallHud(False)
@ -88,6 +95,13 @@ class GuiBulkImport():
print 'GuiBulkImport.load done: Stored: %d \tDuplicates: %d \tPartial: %d \tErrors: %d in %s seconds - %.0f/sec'\
% (stored, dups, partial, errs, ttime, (stored+0.0) / ttime)
self.importer.clearFileList()
if self.n_hands_in_db == 0 and stored > 0:
self.cb_dropindexes.set_sensitive(True)
self.cb_dropindexes.set_active(0)
self.lab_drop.set_sensitive(True)
self.cb_drophudcache.set_sensitive(True)
self.cb_drophudcache.set_active(0)
self.lab_hdrop.set_sensitive(True)
self.lab_info.set_text("Import finished")
except:
@ -115,7 +129,7 @@ class GuiBulkImport():
self.chooser.show()
# Table widget to hold the settings
self.table = gtk.Table(rows = 3, columns = 5, homogeneous = False)
self.table = gtk.Table(rows = 5, columns = 5, homogeneous = False)
self.vbox.add(self.table)
self.table.show()
@ -130,6 +144,7 @@ class GuiBulkImport():
self.table.attach(self.lab_status, 1, 2, 0, 1, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_status.show()
self.lab_status.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_status.set_alignment(1.0, 0.5)
# spin button - status
status_adj = gtk.Adjustment(value=100, lower=0, upper=300, step_incr=10, page_incr=1, page_size=0) #not sure what upper value should be!
@ -141,13 +156,15 @@ class GuiBulkImport():
self.lab_threads = gtk.Label("Number of threads:")
self.table.attach(self.lab_threads, 3, 4, 0, 1, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_threads.show()
self.lab_threads.set_sensitive(False)
if not self.allowThreads:
self.lab_threads.set_sensitive(False)
self.lab_threads.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_threads.set_alignment(1.0, 0.5)
# spin button - threads
threads_adj = gtk.Adjustment(value=0, lower=0, upper=10, step_incr=1, page_incr=1, page_size=0) #not sure what upper value should be!
self.spin_threads = gtk.SpinButton(adjustment=threads_adj, climb_rate=0.0, digits=0)
self.table.attach(self.spin_threads, 4, 5, 0, 1, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.spin_threads, 4, 5, 0, 1, xpadding = 10, ypadding = 0, yoptions=gtk.SHRINK)
self.spin_threads.show()
if not self.allowThreads:
self.spin_threads.set_sensitive(False)
@ -162,6 +179,7 @@ class GuiBulkImport():
self.table.attach(self.lab_hands, 1, 2, 1, 2, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_hands.show()
self.lab_hands.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_hands.set_alignment(1.0, 0.5)
# spin button - hands to import
hands_adj = gtk.Adjustment(value=0, lower=0, upper=10, step_incr=1, page_incr=1, page_size=0) #not sure what upper value should be!
@ -174,6 +192,7 @@ class GuiBulkImport():
self.table.attach(self.lab_drop, 3, 4, 1, 2, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_drop.show()
self.lab_drop.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_drop.set_alignment(1.0, 0.5)
# ComboBox - drop indexes
self.cb_dropindexes = gtk.combo_box_new_text()
@ -186,9 +205,10 @@ class GuiBulkImport():
# label - filter
self.lab_filter = gtk.Label("Site filter:")
self.table.attach(self.lab_filter, 2, 3, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.lab_filter, 1, 2, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_filter.show()
self.lab_filter.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_filter.set_alignment(1.0, 0.5)
# ComboBox - filter
self.cbfilter = gtk.combo_box_new_text()
@ -196,21 +216,42 @@ class GuiBulkImport():
print w
self.cbfilter.append_text(w)
self.cbfilter.set_active(0)
self.table.attach(self.cbfilter, 3, 4, 2, 3, xpadding = 10, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.cbfilter, 2, 3, 2, 3, xpadding = 10, ypadding = 1, yoptions=gtk.SHRINK)
self.cbfilter.show()
# label - info
self.lab_info = gtk.Label()
self.table.attach(self.lab_info, 0, 4, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_info.show()
# label - drop hudcache
self.lab_hdrop = gtk.Label("Drop HudCache:")
self.table.attach(self.lab_hdrop, 3, 4, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_hdrop.show()
self.lab_hdrop.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_hdrop.set_alignment(1.0, 0.5)
# ComboBox - drop hudcache
self.cb_drophudcache = gtk.combo_box_new_text()
self.cb_drophudcache.append_text('auto')
self.cb_drophudcache.append_text("don't drop")
self.cb_drophudcache.append_text('drop')
self.cb_drophudcache.set_active(0)
self.table.attach(self.cb_drophudcache, 4, 5, 2, 3, xpadding = 10, ypadding = 0, yoptions=gtk.SHRINK)
self.cb_drophudcache.show()
# button - Import
self.load_button = gtk.Button('Import') # todo: rename variables to import too
self.load_button.connect('clicked', self.load_clicked,
'Import clicked')
self.table.attach(self.load_button, 4, 5, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.load_button, 2, 3, 4, 5, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.load_button.show()
# label - spacer (keeps rows 3 & 5 apart)
self.lab_spacer = gtk.Label()
self.table.attach(self.lab_spacer, 3, 5, 3, 4, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_spacer.show()
# label - info
self.lab_info = gtk.Label()
self.table.attach(self.lab_info, 3, 5, 4, 5, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_info.show()
# see how many hands are in the db and adjust accordingly
tcursor = self.importer.database.cursor
tcursor.execute("Select count(1) from Hands")
@ -222,6 +263,9 @@ class GuiBulkImport():
self.cb_dropindexes.set_active(2)
self.cb_dropindexes.set_sensitive(False)
self.lab_drop.set_sensitive(False)
self.cb_drophudcache.set_active(2)
self.cb_drophudcache.set_sensitive(False)
self.lab_hdrop.set_sensitive(False)
def main(argv=None):
"""main can also be called in the python interpreter, by supplying the command line as the argument."""

File diff suppressed because it is too large Load Diff

View File

@ -64,7 +64,7 @@ class fpdb_db:
if backend==fpdb_db.MYSQL_INNODB:
import MySQLdb
try:
self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True)
self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True, charset="utf8")
except:
raise fpdb_simple.FpdbError("MySQL connection failed")
elif backend==fpdb_db.PGSQL:
@ -155,7 +155,7 @@ class fpdb_db:
return (self.host, self.database, self.user, self.password)
#end def get_db_info
def getLastInsertId(self):
def getLastInsertId(self, cursor=None):
try:
if self.backend == self.MYSQL_INNODB:
ret = self.db.insert_id()
@ -177,9 +177,7 @@ class fpdb_db:
else:
ret = row[0]
elif self.backend == fpdb_db.SQLITE:
# don't know how to do this in sqlite
print "getLastInsertId(): not coded for sqlite yet"
ret = -1
ret = cursor.lastrowid
else:
print "getLastInsertId(): unknown backend ", self.backend
ret = -1

View File

@ -78,19 +78,18 @@ class Importer:
#Set defaults
self.callHud = self.config.get_import_parameters().get("callFpdbHud")
# CONFIGURATION OPTIONS - update allowHudcacheRebuild and forceThreads for faster imports
# CONFIGURATION OPTIONS
self.settings.setdefault("minPrint", 30)
self.settings.setdefault("handCount", 0)
self.settings.setdefault("allowHudcacheRebuild", False) # if True speeds up big imports a lot, also
# stops deadlock problems with threaded imports
self.settings.setdefault("forceThreads", 0) # set to 1/2/more for faster imports
#self.settings.setdefault("allowHudcacheRebuild", True) # NOT USED NOW
#self.settings.setdefault("forceThreads", 2) # NOT USED NOW
self.settings.setdefault("writeQSize", 1000) # no need to change
self.settings.setdefault("writeQMaxWait", 10) # not used
self.writeq = None
self.database = Database.Database(self.config, sql = self.sql)
self.writerdbs = []
self.settings.setdefault("threads", 1) # value overridden by GuiBulkImport - use forceThreads above
self.settings.setdefault("threads", 1) # value set by GuiBulkImport
for i in xrange(self.settings['threads']):
self.writerdbs.append( Database.Database(self.config, sql = self.sql) )
@ -124,6 +123,9 @@ class Importer:
def setDropIndexes(self, value):
self.settings['dropIndexes'] = value
def setDropHudCache(self, value):
self.settings['dropHudCache'] = value
# def setWatchTime(self):
# self.updated = time()
@ -186,8 +188,8 @@ class Importer:
def runImport(self):
""""Run full import on self.filelist. This is called from GuiBulkImport.py"""
if self.settings['forceThreads'] > 0: # use forceThreads until threading enabled in GuiBulkImport
self.setThreads(self.settings['forceThreads'])
#if self.settings['forceThreads'] > 0: # use forceThreads until threading enabled in GuiBulkImport
# self.setThreads(self.settings['forceThreads'])
# Initial setup
start = datetime.datetime.now()
@ -195,7 +197,7 @@ class Importer:
print "Started at", start, "--", len(self.filelist), "files to import.", self.settings['dropIndexes']
if self.settings['dropIndexes'] == 'auto':
self.settings['dropIndexes'] = self.calculate_auto2(self.database, 12.0, 500.0)
if self.settings['allowHudcacheRebuild']:
if self.settings['dropHudCache'] == 'auto':
self.settings['dropHudCache'] = self.calculate_auto2(self.database, 25.0, 500.0) # returns "drop"/"don't drop"
if self.settings['dropIndexes'] == 'drop':
@ -237,7 +239,7 @@ class Importer:
self.database.afterBulkImport()
else:
print "No need to rebuild indexes."
if self.settings['allowHudcacheRebuild'] and self.settings['dropHudCache'] == 'drop':
if self.settings['dropHudCache'] == 'drop':
self.database.rebuild_hudcache()
else:
print "No need to rebuild hudcache."

View File

@ -48,37 +48,6 @@ class FpdbError(Exception):
self.value = value
def __str__(self):
return repr(self.value)
# gets value for last auto-increment key generated
# returns -1 if a problem occurs
def getLastInsertId(backend, conn, cursor):
if backend == MYSQL_INNODB:
ret = conn.insert_id()
if ret < 1 or ret > 999999999:
print "getLastInsertId(): problem fetching insert_id? ret=", ret
ret = -1
elif backend == PGSQL:
# some options:
# currval(hands_id_seq) - use name of implicit seq here
# lastval() - still needs sequences set up?
# insert ... returning is useful syntax (but postgres specific?)
# see rules (fancy trigger type things)
cursor.execute ("SELECT lastval()")
row = cursor.fetchone()
if not row:
print "getLastInsertId(%s): problem fetching lastval? row=" % seq, row
ret = -1
else:
ret = row[0]
elif backend == SQLITE:
# don't know how to do this in sqlite
print "getLastInsertId(): not coded for sqlite yet"
ret = -1
else:
print "getLastInsertId(): unknown backend ", backend
ret = -1
return ret
#end def getLastInsertId
#returns an array of the total money paid. intending to add rebuys/addons here
def calcPayin(count, buyin, fee):