From 2eb86784fea85bd412019c53b97c46896eb698c8 Mon Sep 17 00:00:00 2001 From: sqlcoder Date: Sat, 30 May 2009 22:33:08 +0100 Subject: [PATCH 1/5] change BB heading to bb (numbers are in big blinds not Big Bets) --- pyfpdb/GuiPositionalStats.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyfpdb/GuiPositionalStats.py b/pyfpdb/GuiPositionalStats.py index 042f7779..0e20e632 100644 --- a/pyfpdb/GuiPositionalStats.py +++ b/pyfpdb/GuiPositionalStats.py @@ -87,7 +87,7 @@ class GuiPositionalStats (threading.Thread): ) self.posnheads = ( "Game", "Seats", "Posn", "VPIP", "PFR", "PF3", "Steals" , "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq" - , "PoFAFq", "Net($)", "BB/100", "$/hand", "Variance", "Hds" + , "PoFAFq", "Net($)", "bb/100", "$/hand", "Variance", "Hds" ) self.fillStatsFrame(self.stats_frame) From c741d720a0836daf9e688c631b2866ece73557b3 Mon Sep 17 00:00:00 2001 From: sqlcoder Date: Sat, 30 May 2009 22:38:54 +0100 Subject: [PATCH 2/5] add date filter to player stats --- pyfpdb/FpdbSQLQueries.py | 83 ++++++++++++++++++++++++++++++++++++++-- pyfpdb/GuiPlayerStats.py | 26 ++++++++----- 2 files changed, 96 insertions(+), 13 deletions(-) diff --git a/pyfpdb/FpdbSQLQueries.py b/pyfpdb/FpdbSQLQueries.py index a85c2491..4445908e 100644 --- a/pyfpdb/FpdbSQLQueries.py +++ b/pyfpdb/FpdbSQLQueries.py @@ -900,7 +900,7 @@ class FpdbSQLQueries: GROUP BY h.handStart, hp.handId, hp.totalProfit ORDER BY h.handStart""" - if self.dbname in ['MySQL InnoDB', 'PostgreSQL']: + if self.dbname in ['MySQL InnoDB']: self.query['playerDetailedStats'] = """ select AS hgametypeid ,gt.base @@ -949,14 +949,89 @@ class FpdbSQLQueries: ,avg(h.seats+0.0) AS avgseats ,variance(hp.totalProfit/100.0) AS variance from HandsPlayers hp - inner join Hands h on (h.id = hp.handId) - inner join Gametypes gt on (gt.Id = h.gameTypeId) - inner join Sites s on (s.Id = gt.siteId) + inner join Hands h on (h.id = hp.handId) + inner join Gametypes gt on (gt.Id = h.gameTypeId) + inner join Sites s on (s.Id = gt.siteId) where hp.playerId in and hp.tourneysPlayersId IS NULL and h.seats + and date_format(h.handStart, '%Y-%m-%d') + group by hgameTypeId + ,hp.playerId + ,gt.base + ,gt.category + + ,upper(gt.limitType) + ,s.name + order by hp.playerId + ,gt.base + ,gt.category + + + ,maxbigblind desc + ,upper(gt.limitType) + ,s.name + """ + elif self.dbname in ['PostgreSQL']: + self.query['playerDetailedStats'] = """ + select AS hgametypeid + ,gt.base + ,gt.category + ,upper(gt.limitType) AS limittype + ,s.name + ,min(gt.bigBlind) AS minbigblind + ,max(gt.bigBlind) AS maxbigblind + /*, AS gtid*/ + ,count(1) AS n + ,100.0*sum(cast(hp.street0VPI as integer))/count(1) AS vpip + ,100.0*sum(cast(hp.street0Aggr as integer))/count(1) AS pfr + ,case when sum(cast(hp.street0_3Bchance as integer)) = 0 then -999 + else 100.0*sum(cast(hp.street0_3Bdone as integer))/sum(cast(hp.street0_3Bchance as integer)) + end AS pf3 + ,case when sum(cast(hp.stealattemptchance as integer)) = 0 then -999 + else 100.0*sum(cast(hp.stealattempted as integer))/sum(cast(hp.stealattemptchance as integer)) + end AS steals + ,100.0*sum(cast(hp.street1Seen as integer))/count(1) AS saw_f + ,100.0*sum(cast(hp.sawShowdown as integer))/count(1) AS sawsd + ,case when sum(cast(hp.street1Seen as integer)) = 0 then -999 + else 100.0*sum(cast(hp.sawShowdown as integer))/sum(cast(hp.street1Seen as integer)) + end AS wtsdwsf + ,case when sum(cast(hp.sawShowdown as integer)) = 0 then -999 + else 100.0*sum(cast(hp.wonAtSD as integer))/sum(cast(hp.sawShowdown as integer)) + end AS wmsd + ,case when sum(cast(hp.street1Seen as integer)) = 0 then -999 + else 100.0*sum(cast(hp.street1Aggr as integer))/sum(cast(hp.street1Seen as integer)) + end AS flafq + ,case when sum(cast(hp.street2Seen as integer)) = 0 then -999 + else 100.0*sum(cast(hp.street2Aggr as integer))/sum(cast(hp.street2Seen as integer)) + end AS tuafq + ,case when sum(cast(hp.street3Seen as integer)) = 0 then -999 + else 100.0*sum(cast(hp.street3Aggr as integer))/sum(cast(hp.street3Seen as integer)) + end AS rvafq + ,case when sum(cast(hp.street1Seen as integer))+sum(cast(hp.street2Seen as integer))+sum(cast(hp.street3Seen as integer)) = 0 then -999 + else 100.0*(sum(cast(hp.street1Aggr as integer))+sum(cast(hp.street2Aggr as integer))+sum(cast(hp.street3Aggr as integer))) + /(sum(cast(hp.street1Seen as integer))+sum(cast(hp.street2Seen as integer))+sum(cast(hp.street3Seen as integer))) + end AS pofafq + ,sum(hp.totalProfit)/100.0 AS net + ,sum(hp.rake)/100.0 AS rake + ,100.0*avg(hp.totalProfit/(gt.bigBlind+0.0)) AS bbper100 + ,avg(hp.totalProfit)/100.0 AS profitperhand + ,100.0*avg((hp.totalProfit+hp.rake)/(gt.bigBlind+0.0)) AS bb100xr + ,avg((hp.totalProfit+hp.rake)/100.0) AS profhndxr + ,avg(h.seats+0.0) AS avgseats + ,variance(hp.totalProfit/100.0) AS variance + from HandsPlayers hp + inner join Hands h on (h.id = hp.handId) + inner join Gametypes gt on (gt.Id = h.gameTypeId) + inner join Sites s on (s.Id = gt.siteId) + where hp.playerId in + and hp.tourneysPlayersId IS NULL + and h.seats + + + and to_char(h.handStart, 'YYYY-MM-DD') group by hgameTypeId ,hp.playerId ,gt.base diff --git a/pyfpdb/GuiPlayerStats.py b/pyfpdb/GuiPlayerStats.py index 311d9a10..28990279 100644 --- a/pyfpdb/GuiPlayerStats.py +++ b/pyfpdb/GuiPlayerStats.py @@ -60,7 +60,7 @@ class GuiPlayerStats (threading.Thread): "LimitSep" : True, "Seats" : True, "SeatSep" : True, - "Dates" : False, + "Dates" : True, "Groups" : True, "Button1" : True, "Button2" : True @@ -93,8 +93,9 @@ class GuiPlayerStats (threading.Thread): , ("rvafq", True, "RvAFq", 1.0, "%3.1f") , ("pofafq", False, "PoFAFq", 1.0, "%3.1f") , ("net", True, "Net($)", 1.0, "%6.2f") - , ("bbper100", True, "BB/100", 1.0, "%4.2f") + , ("bbper100", True, "bb/100", 1.0, "%4.2f") , ("rake", True, "Rake($)", 1.0, "%6.2f") + , ("bb100xr", True, "bbxr/100", 1.0, "%4.2f") , ("variance", True, "Variance", 1.0, "%5.2f") ] @@ -155,6 +156,7 @@ class GuiPlayerStats (threading.Thread): siteids = self.filters.getSiteIds() limits = self.filters.getLimits() seats = self.filters.getSeats() + dates = self.filters.getDates() sitenos = [] playerids = [] @@ -178,16 +180,16 @@ class GuiPlayerStats (threading.Thread): print "No limits found" return - self.createStatsTable(vbox, playerids, sitenos, limits, seats) + self.createStatsTable(vbox, playerids, sitenos, limits, seats, dates) - def createStatsTable(self, vbox, playerids, sitenos, limits, seats): + def createStatsTable(self, vbox, playerids, sitenos, limits, seats, dates): starttime = time() # Display summary table at top of page # 3rd parameter passes extra flags, currently includes: # holecards - whether to display card breakdown (True/False) flags = [False] - self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats) + self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates) # Separator sep = gtk.HSeparator() @@ -210,13 +212,13 @@ class GuiPlayerStats (threading.Thread): # Detailed table flags = [True] - self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats) + self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates) self.db.db.commit() print "Stats page displayed in %4.2f seconds" % (time() - starttime) #end def fillStatsFrame(self, vbox): - def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats): + def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats, dates): row = 0 sqlrow = 0 colalias,colshow,colheading,colxalign,colformat = 0,1,2,3,4 @@ -229,7 +231,7 @@ class GuiPlayerStats (threading.Thread): self.stats_table.show() tmp = self.sql.query[query] - tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats) + tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats, dates) self.cursor.execute(tmp) result = self.cursor.fetchall() colnames = [desc[0].lower() for desc in self.cursor.description] @@ -311,7 +313,7 @@ class GuiPlayerStats (threading.Thread): #end def addTable(self, query, vars, playerids, sitenos, limits, seats): - def refineQuery(self, query, flags, playerids, sitenos, limits, seats): + def refineQuery(self, query, flags, playerids, sitenos, limits, seats, dates): if not flags: holecards = False else: holecards = flags[0] @@ -371,6 +373,9 @@ class GuiPlayerStats (threading.Thread): else: query = query.replace("", '') + # Filter on dates + query = query.replace("", " between '" + dates[0] + "' and '" + dates[1] + "'") + #print "query =\n", query return(query) #end def refineQuery(self, query, playerids, sitenos, limits): @@ -438,3 +443,6 @@ class GuiPlayerStats (threading.Thread): detailDialog.destroy() + + + From de98ff67dd7724fbfbe6013598722e8494180a71 Mon Sep 17 00:00:00 2001 From: sqlcoder Date: Sun, 31 May 2009 21:06:33 +0100 Subject: [PATCH 3/5] try to improve reliability of index/fk dropping in postgres and add warnings (not errors) when problems occur --- pyfpdb/fpdb_simple.py | 51 +++++++++++++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py index e4471e24..cecaf09b 100644 --- a/pyfpdb/fpdb_simple.py +++ b/pyfpdb/fpdb_simple.py @@ -20,6 +20,7 @@ import datetime import time import re +import sys import Card @@ -156,7 +157,7 @@ def prepareBulkImport(fdb): "AND referenced_column_name = %s ", (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) cons = fdb.cursor.fetchone() - print "preparebulk: cons=", cons + #print "preparebulk: cons=", cons if cons: print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol'] try: @@ -165,13 +166,25 @@ def prepareBulkImport(fdb): pass elif fdb.backend == PGSQL: # DON'T FORGET TO RECREATE THEM!! - #print "dropping pg fk", fk['fktab'], fk['fkcol'] + print "dropping pg fk", fk['fktab'], fk['fkcol'] try: - #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']) - fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])) - print "dropped pg fk pg fk %s_%s_fkey" % (fk['fktab'], fk['fkcol']) + # try to lock table to see if index drop will work: + # hmmm, tested by commenting out rollback in grapher. lock seems to work but + # then drop still hangs :-( does work in some tests though?? + # will leave code here for now pending further tests/enhancement ... + fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) ) + #print "after lock, status:", fdb.cursor.statusmessage + #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']) + try: + fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])) + print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol']) + except: + if "does not exist" not in str(sys.exc_value): + print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \ + % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') ) except: - print "! failed drop pg fk %s_%s_fkey" % (fk['fktab'], fk['fkcol']) + print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \ + % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n')) else: print "Only MySQL and Postgres supported so far" return -1 @@ -181,22 +194,32 @@ def prepareBulkImport(fdb): if fdb.backend == MYSQL_INNODB: print "dropping mysql index ", idx['tab'], idx['col'] try: + # apparently nowait is not implemented in mysql so this just hands if there are locks + # preventing the index drop :-( fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) ) except: pass elif fdb.backend == PGSQL: # DON'T FORGET TO RECREATE THEM!! - #print "Index dropping disabled for postgresql." - #print "dropping pg index ", idx['tab'], idx['col'] - # mod to use tab_col for index name? + print "dropping pg index ", idx['tab'], idx['col'] try: - fdb.cursor.execute( "drop index %s_%s_idx" % (idx['tab'],idx['col']) ) - print "drop index %s_%s_idx" % (idx['tab'],idx['col']) - #print "dropped pg index ", idx['tab'], idx['col'] + # try to lock table to see if index drop will work: + fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) ) + #print "after lock, status:", fdb.cursor.statusmessage + try: + # table locked ok so index drop should work: + #print "drop index %s_%s_idx" % (idx['tab'],idx['col']) + fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) ) + #print "dropped pg index ", idx['tab'], idx['col'] + except: + if "does not exist" not in str(sys.exc_value): + print "warning: drop index %s_%s_idx failed: %s, continuing ..." \ + % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) except: - print "! failed drop index %s_%s_idx" % (idx['tab'],idx['col']) + print "warning: index %s_%s_idx not dropped %s, continuing ..." \ + % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) else: - print "Only MySQL and Postgres supported so far" + print "Error: Only MySQL and Postgres supported so far" return -1 if fdb.backend == PGSQL: From be61b2d2534c83dbf7dd59a2900674e8e8c33f5c Mon Sep 17 00:00:00 2001 From: sqlcoder Date: Sun, 31 May 2009 21:53:10 +0100 Subject: [PATCH 4/5] change quit option to standard 'Quit' --- pyfpdb/fpdb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py index 70ece5e4..379b4e44 100755 --- a/pyfpdb/fpdb.py +++ b/pyfpdb/fpdb.py @@ -283,7 +283,7 @@ class fpdb: # Create actions actiongroup.add_actions([('main', None, '_Main'), - ('Quit', gtk.STOCK_QUIT, '_Quit me!', None, 'Quit the Program', self.quit), + ('Quit', gtk.STOCK_QUIT, '_Quit', None, 'Quit the Program', self.quit), ('LoadProf', None, '_Load Profile (broken)', 'L', 'Load your profile', self.dia_load_profile), ('EditProf', None, '_Edit Profile (todo)', 'E', 'Edit your profile', self.dia_edit_profile), ('SaveProf', None, '_Save Profile (todo)', 'S', 'Save your profile', self.dia_save_profile), From 5b96769de3ca7197394befbc8c63daad3fc76eb2 Mon Sep 17 00:00:00 2001 From: sqlcoder Date: Sun, 31 May 2009 21:59:01 +0100 Subject: [PATCH 5/5] implement global lock by locking hands table --- pyfpdb/fpdb.py | 58 +++++++++++++++++++++++++------------------ pyfpdb/fpdb_simple.py | 21 ++++++++++++++++ 2 files changed, 55 insertions(+), 24 deletions(-) diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py index 379b4e44..8a26462d 100755 --- a/pyfpdb/fpdb.py +++ b/pyfpdb/fpdb.py @@ -174,35 +174,36 @@ class fpdb: def dia_load_profile(self, widget, data=None): """Dialogue to select a file to load a profile from""" - self.obtain_global_lock() - chooser = gtk.FileChooserDialog(title="Please select a profile file to load", - action=gtk.FILE_CHOOSER_ACTION_OPEN, - buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) - chooser.set_filename(self.profile) + if self.obtain_global_lock(): + chooser = gtk.FileChooserDialog(title="Please select a profile file to load", + action=gtk.FILE_CHOOSER_ACTION_OPEN, + buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) + chooser.set_filename(self.profile) - response = chooser.run() - chooser.destroy() - if response == gtk.RESPONSE_OK: - self.load_profile(chooser.get_filename()) - elif response == gtk.RESPONSE_CANCEL: - print 'User cancelled loading profile' + response = chooser.run() + chooser.destroy() + if response == gtk.RESPONSE_OK: + self.load_profile(chooser.get_filename()) + elif response == gtk.RESPONSE_CANCEL: + print 'User cancelled loading profile' #end def dia_load_profile def dia_recreate_tables(self, widget, data=None): """Dialogue that asks user to confirm that he wants to delete and recreate the tables""" - self.obtain_global_lock() + if self.obtain_global_lock(): - dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING, - buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables") - diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted." - dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted + dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING, + buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables") + diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted." + dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted - response = dia_confirm.run() - dia_confirm.destroy() - if response == gtk.RESPONSE_YES: - self.db.recreate_tables() - elif response == gtk.RESPONSE_NO: - print 'User cancelled recreating tables' + response = dia_confirm.run() + dia_confirm.destroy() + if response == gtk.RESPONSE_YES: + self.db.recreate_tables() + elif response == gtk.RESPONSE_NO: + print 'User cancelled recreating tables' + self.release_global_lock() #end def dia_recreate_tables def dia_regression_test(self, widget, data=None): @@ -371,7 +372,14 @@ class fpdb: #end def not_implemented def obtain_global_lock(self): - print "todo: implement obtain_global_lock (users: pls ignore this)" + print "\nTaking global lock ..." + self.fdb_lock = fpdb_db.fpdb_db() + self.fdb_lock.connect(self.settings['db-backend'], + self.settings['db-host'], + self.settings['db-databaseName'], + self.settings['db-user'], + self.settings['db-password']) + return fpdb_simple.get_global_lock(self.fdb_lock) #end def obtain_global_lock def quit(self, widget): @@ -382,7 +390,9 @@ class fpdb: #end def quit_cliecked def release_global_lock(self): - print "todo: implement release_global_lock" + self.fdb_lock.db.rollback() + self.fdb_lock.disconnect() + print "Global lock released." #end def release_global_lock def tab_abbreviations(self, widget, data=None): diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py index cecaf09b..c6f79d9d 100644 --- a/pyfpdb/fpdb_simple.py +++ b/pyfpdb/fpdb_simple.py @@ -367,6 +367,27 @@ def analyzeDB(fdb): fdb.db.commit() #end def analyzeDB +def get_global_lock(fdb): + if fdb.backend == MYSQL_INNODB: + try: + fdb.cursor.execute( "lock tables Hands write" ) + except: + print "Error! failed to obtain global lock. Close all programs accessing " \ + + "database (including fpdb) and try again (%s)." \ + % ( str(sys.exc_value).rstrip('\n'), ) + return(False) + elif fdb.backend == PGSQL: + try: + fdb.cursor.execute( "lock table Hands in exclusive mode nowait" ) + #print "... after lock table, status =", fdb.cursor.statusmessage + except: + print "Error! failed to obtain global lock. Close all programs accessing " \ + + "database (including fpdb) and try again (%s)." \ + % ( str(sys.exc_value).rstrip('\n'), ) + return(False) + return(True) + + class DuplicateError(Exception): def __init__(self, value): self.value = value