From 75221f0b6ac5d266be0ecdeb103f2569959f3e02 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Thu, 7 May 2009 21:24:06 +0100
Subject: [PATCH 01/29] oops, missed out the key change in previous commit
---
 pyfpdb/fpdb_db.py | 2 ++
 1 file changed, 2 insertions(+)
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index eba87f52..fda04e19 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -62,6 +62,8 @@ class fpdb_db:
             self.db=MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True)
         elif backend==self.PGSQL:
             import psycopg2
+            import psycopg2.extensions
+            psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
             # If DB connection is made over TCP, then the variables
             # host, user and password are required
             print "host=%s user=%s pass=%s." % (host, user, password)
From 0b29286c9d029885c1c9c5a18fdbc9f7bf912898 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 26 Apr 2009 21:31:05 +0100
Subject: [PATCH 02/29] fix 3bet stat bug in storeHudCache2
---
 pyfpdb/fpdb_simple.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index 609e3b4c..9c0e7845 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -2498,7 +2498,7 @@ def storeHudCache2(backend, cursor, base, category, gametypeId, playerIds, hudIm
             # Try to do the update first:
             num = cursor.execute("""UPDATE HudCache
 SET HDs=HDs+%s, street0VPI=street0VPI+%s, street0Aggr=street0Aggr+%s,
-    street0_3B4BChance=%s, street0_3B4BDone=%s,
+    street0_3B4BChance=street0_3B4BChance+%s, street0_3B4BDone=street0_3B4BDone+%s,
     street1Seen=street1Seen+%s, street2Seen=street2Seen+%s, street3Seen=street3Seen+%s,
     street4Seen=street4Seen+%s, sawShowdown=sawShowdown+%s,
     street1Aggr=street1Aggr+%s, street2Aggr=street2Aggr+%s, street3Aggr=street3Aggr+%s,
From d811c6b656dd43829b7bfa76e61af6adea1793d4 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 26 Apr 2009 21:38:04 +0100
Subject: [PATCH 03/29] add rollback to release locks when db reads are
 finished
---
 pyfpdb/fpdb.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 49b3ad82..168a75d8 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -287,6 +287,7 @@ class fpdb:
 
         # Database connected to successfully, load queries to pass on to other classes
         self.querydict = FpdbSQLQueries.FpdbSQLQueries(self.db.get_backend_name())
+        self.db.db.rollback()
     #end def load_profile
 
     def not_implemented(self):
@@ -296,7 +297,7 @@ class fpdb:
     def obtain_global_lock(self):
         print "todo: implement obtain_global_lock (users: pls ignore this)"
     #end def obtain_global_lock
-
+    
     def quit(self, widget, data):
         print "Quitting normally"
         #check if current settings differ from profile, if so offer to save or abort
@@ -341,7 +342,6 @@ class fpdb:
         ps_tab=new_ps_thread.get_vbox()
         self.add_and_display_tab(ps_tab, "Positional Stats")
 
-
     def tab_main_help(self, widget, data):
         """Displays a tab with the main fpdb help screen"""
         #print "start of tab_main_help"
From f0b4d82fa554c678f5df0e847083fb8c3bbd0bfa Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 26 Apr 2009 21:47:28 +0100
Subject: [PATCH 04/29] always release locks when db work finished
---
 pyfpdb/fpdb_import.py | 4 ++++
 1 file changed, 4 insertions(+)
diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py
index 6c0af7f5..6d2f846d 100644
--- a/pyfpdb/fpdb_import.py
+++ b/pyfpdb/fpdb_import.py
@@ -73,6 +73,7 @@ class Importer:
         
         self.fdb = fpdb_db.fpdb_db()   # sets self.fdb.db self.fdb.cursor and self.fdb.sql
         self.fdb.do_connect(self.config)
+        self.fdb.db.rollback()
 
     #Set functions
     def setCallHud(self, value):
@@ -367,6 +368,7 @@ class Importer:
                             self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
                     except fpdb_simple.DuplicateError:
                         duplicates += 1
+                        self.fdb.db.rollback()
                     except (ValueError), fe:
                         errors += 1
                         self.printEmailErrorMessage(errors, file, hand)
@@ -374,6 +376,8 @@ class Importer:
                         if (self.settings['failOnError']):
                             self.fdb.db.commit() #dont remove this, in case hand processing was cancelled.
                             raise
+                        else:
+                            self.fdb.db.rollback()
                     except (fpdb_simple.FpdbError), fe:
                         errors += 1
                         self.printEmailErrorMessage(errors, file, hand)
From 0c0832b9d3778b66b7f7e159e6ba8f89f04b5d3b Mon Sep 17 00:00:00 2001
From: Worros 
Date: Sun, 31 May 2009 02:56:08 +0800
Subject: [PATCH 05/29] Bump db version number
---
 pyfpdb/fpdb_db.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index 6e48fcf4..8814f104 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -96,7 +96,7 @@ class fpdb_db:
         try:
             self.cursor.execute("SELECT * FROM Settings")
             settings=self.cursor.fetchone()
-            if settings[0]!=118:
+            if settings[0]!=119:
                 print "outdated or too new database version - please recreate tables"
                 self.wrongDbVersion=True
         except:# _mysql_exceptions.ProgrammingError:
@@ -201,7 +201,7 @@ class fpdb_db:
     #end def get_db_info
     
     def fillDefaultData(self):
-        self.cursor.execute("INSERT INTO Settings VALUES (118);")
+        self.cursor.execute("INSERT INTO Settings VALUES (119);")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');")
From 2eb86784fea85bd412019c53b97c46896eb698c8 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sat, 30 May 2009 22:33:08 +0100
Subject: [PATCH 06/29] change BB heading to bb (numbers are in big blinds not
 Big Bets)
---
 pyfpdb/GuiPositionalStats.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyfpdb/GuiPositionalStats.py b/pyfpdb/GuiPositionalStats.py
index 042f7779..0e20e632 100644
--- a/pyfpdb/GuiPositionalStats.py
+++ b/pyfpdb/GuiPositionalStats.py
@@ -87,7 +87,7 @@ class GuiPositionalStats (threading.Thread):
                          )
         self.posnheads = ( "Game", "Seats", "Posn", "VPIP", "PFR", "PF3", "Steals"
                          , "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq"
-                         , "PoFAFq", "Net($)", "BB/100", "$/hand", "Variance", "Hds"
+                         , "PoFAFq", "Net($)", "bb/100", "$/hand", "Variance", "Hds"
                          )
 
         self.fillStatsFrame(self.stats_frame)
From c741d720a0836daf9e688c631b2866ece73557b3 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sat, 30 May 2009 22:38:54 +0100
Subject: [PATCH 07/29] add date filter to player stats
---
 pyfpdb/FpdbSQLQueries.py | 83 ++++++++++++++++++++++++++++++++++++++--
 pyfpdb/GuiPlayerStats.py | 26 ++++++++-----
 2 files changed, 96 insertions(+), 13 deletions(-)
diff --git a/pyfpdb/FpdbSQLQueries.py b/pyfpdb/FpdbSQLQueries.py
index a85c2491..4445908e 100644
--- a/pyfpdb/FpdbSQLQueries.py
+++ b/pyfpdb/FpdbSQLQueries.py
@@ -900,7 +900,7 @@ class FpdbSQLQueries:
                 GROUP BY h.handStart, hp.handId, hp.totalProfit
                 ORDER BY h.handStart"""
 
-        if self.dbname in ['MySQL InnoDB', 'PostgreSQL']:
+        if self.dbname in ['MySQL InnoDB']:
             self.query['playerDetailedStats'] = """
                      select                                                            AS hgametypeid
                             ,gt.base
@@ -949,14 +949,89 @@ class FpdbSQLQueries:
                             ,avg(h.seats+0.0)                                                       AS avgseats
                             ,variance(hp.totalProfit/100.0)                                         AS variance
                       from HandsPlayers hp
-                      inner join Hands h       on  (h.id = hp.handId)
-                      inner join Gametypes gt  on  (gt.Id = h.gameTypeId)
-                      inner join Sites s       on  (s.Id = gt.siteId)
+                           inner join Hands h       on  (h.id = hp.handId)
+                           inner join Gametypes gt  on  (gt.Id = h.gameTypeId)
+                           inner join Sites s       on  (s.Id = gt.siteId)
                       where hp.playerId in 
                       and   hp.tourneysPlayersId IS NULL
                       and   h.seats 
                       
                       
+                      and   date_format(h.handStart, '%Y-%m-%d') 
+                      group by hgameTypeId
+                              ,hp.playerId
+                              ,gt.base
+                              ,gt.category
+                              
+                              ,upper(gt.limitType)
+                              ,s.name
+                      order by hp.playerId
+                              ,gt.base
+                              ,gt.category
+                              
+                              
+                              ,maxbigblind desc
+                              ,upper(gt.limitType)
+                              ,s.name
+                      """
+        elif self.dbname in ['PostgreSQL']:
+            self.query['playerDetailedStats'] = """
+                     select                                                            AS hgametypeid
+                            ,gt.base
+                            ,gt.category
+                            ,upper(gt.limitType)                                                    AS limittype
+                            ,s.name
+                            ,min(gt.bigBlind)                                                       AS minbigblind
+                            ,max(gt.bigBlind)                                                       AS maxbigblind
+                            /*,                                                         AS gtid*/
+                            ,count(1)                                                               AS n
+                            ,100.0*sum(cast(hp.street0VPI as integer))/count(1)             AS vpip
+                            ,100.0*sum(cast(hp.street0Aggr as integer))/count(1)            AS pfr
+                            ,case when sum(cast(hp.street0_3Bchance as integer)) = 0 then -999
+                                  else 100.0*sum(cast(hp.street0_3Bdone as integer))/sum(cast(hp.street0_3Bchance as integer))
+                             end                                                                    AS pf3
+                            ,case when sum(cast(hp.stealattemptchance as integer)) = 0 then -999
+                                  else 100.0*sum(cast(hp.stealattempted as integer))/sum(cast(hp.stealattemptchance as integer))
+                             end                                                                    AS steals
+                            ,100.0*sum(cast(hp.street1Seen as integer))/count(1)           AS saw_f
+                            ,100.0*sum(cast(hp.sawShowdown as integer))/count(1)           AS sawsd
+                            ,case when sum(cast(hp.street1Seen as integer)) = 0 then -999
+                                  else 100.0*sum(cast(hp.sawShowdown as integer))/sum(cast(hp.street1Seen as integer))
+                             end                                                                    AS wtsdwsf
+                            ,case when sum(cast(hp.sawShowdown as integer)) = 0 then -999
+                                  else 100.0*sum(cast(hp.wonAtSD as integer))/sum(cast(hp.sawShowdown as integer))
+                             end                                                                    AS wmsd
+                            ,case when sum(cast(hp.street1Seen as integer)) = 0 then -999
+                                  else 100.0*sum(cast(hp.street1Aggr as integer))/sum(cast(hp.street1Seen as integer))
+                             end                                                                    AS flafq
+                            ,case when sum(cast(hp.street2Seen as integer)) = 0 then -999
+                                  else 100.0*sum(cast(hp.street2Aggr as integer))/sum(cast(hp.street2Seen as integer))
+                             end                                                                    AS tuafq
+                            ,case when sum(cast(hp.street3Seen as integer)) = 0 then -999
+                                 else 100.0*sum(cast(hp.street3Aggr as integer))/sum(cast(hp.street3Seen as integer))
+                             end                                                                    AS rvafq
+                            ,case when sum(cast(hp.street1Seen as integer))+sum(cast(hp.street2Seen as integer))+sum(cast(hp.street3Seen as integer)) = 0 then -999
+                                 else 100.0*(sum(cast(hp.street1Aggr as integer))+sum(cast(hp.street2Aggr as integer))+sum(cast(hp.street3Aggr as integer)))
+                                          /(sum(cast(hp.street1Seen as integer))+sum(cast(hp.street2Seen as integer))+sum(cast(hp.street3Seen as integer)))
+                             end                                                                    AS pofafq
+                            ,sum(hp.totalProfit)/100.0                                              AS net
+                            ,sum(hp.rake)/100.0                                                     AS rake
+                            ,100.0*avg(hp.totalProfit/(gt.bigBlind+0.0))                            AS bbper100
+                            ,avg(hp.totalProfit)/100.0                                              AS profitperhand
+                            ,100.0*avg((hp.totalProfit+hp.rake)/(gt.bigBlind+0.0))                  AS bb100xr
+                            ,avg((hp.totalProfit+hp.rake)/100.0)                                    AS profhndxr
+                            ,avg(h.seats+0.0)                                                       AS avgseats
+                            ,variance(hp.totalProfit/100.0)                                         AS variance
+                      from HandsPlayers hp
+                           inner join Hands h       on  (h.id = hp.handId)
+                           inner join Gametypes gt  on  (gt.Id = h.gameTypeId)
+                           inner join Sites s       on  (s.Id = gt.siteId)
+                      where hp.playerId in 
+                      and   hp.tourneysPlayersId IS NULL
+                      and   h.seats 
+                      
+                      
+                      and   to_char(h.handStart, 'YYYY-MM-DD') 
                       group by hgameTypeId
                               ,hp.playerId
                               ,gt.base
diff --git a/pyfpdb/GuiPlayerStats.py b/pyfpdb/GuiPlayerStats.py
index 311d9a10..28990279 100644
--- a/pyfpdb/GuiPlayerStats.py
+++ b/pyfpdb/GuiPlayerStats.py
@@ -60,7 +60,7 @@ class GuiPlayerStats (threading.Thread):
                             "LimitSep" :  True,
                             "Seats"    :  True,
                             "SeatSep"  :  True,
-                            "Dates"    :  False,
+                            "Dates"    :  True,
                             "Groups"   :  True,
                             "Button1"  :  True,
                             "Button2"  :  True
@@ -93,8 +93,9 @@ class GuiPlayerStats (threading.Thread):
                        , ("rvafq",    True,  "RvAFq",    1.0, "%3.1f")
                        , ("pofafq",   False, "PoFAFq",   1.0, "%3.1f")
                        , ("net",      True,  "Net($)",   1.0, "%6.2f")
-                       , ("bbper100", True,  "BB/100",   1.0, "%4.2f")
+                       , ("bbper100", True,  "bb/100",   1.0, "%4.2f")
                        , ("rake",     True,  "Rake($)",  1.0, "%6.2f")
+                       , ("bb100xr",  True,  "bbxr/100", 1.0, "%4.2f")
                        , ("variance", True,  "Variance", 1.0, "%5.2f")
                        ]
 
@@ -155,6 +156,7 @@ class GuiPlayerStats (threading.Thread):
         siteids = self.filters.getSiteIds()
         limits  = self.filters.getLimits()
         seats  = self.filters.getSeats()
+        dates = self.filters.getDates()
         sitenos = []
         playerids = []
 
@@ -178,16 +180,16 @@ class GuiPlayerStats (threading.Thread):
             print "No limits found"
             return
 
-        self.createStatsTable(vbox, playerids, sitenos, limits, seats)
+        self.createStatsTable(vbox, playerids, sitenos, limits, seats, dates)
 
-    def createStatsTable(self, vbox, playerids, sitenos, limits, seats):
+    def createStatsTable(self, vbox, playerids, sitenos, limits, seats, dates):
         starttime = time()
 
         # Display summary table at top of page
         # 3rd parameter passes extra flags, currently includes:
         # holecards - whether to display card breakdown (True/False)
         flags = [False]
-        self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats)
+        self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates)
 
         # Separator
         sep = gtk.HSeparator()
@@ -210,13 +212,13 @@ class GuiPlayerStats (threading.Thread):
 
         # Detailed table
         flags = [True]
-        self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats)
+        self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates)
 
         self.db.db.commit()
         print "Stats page displayed in %4.2f seconds" % (time() - starttime)
     #end def fillStatsFrame(self, vbox):
 
-    def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats):
+    def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats, dates):
         row = 0
         sqlrow = 0
         colalias,colshow,colheading,colxalign,colformat = 0,1,2,3,4
@@ -229,7 +231,7 @@ class GuiPlayerStats (threading.Thread):
         self.stats_table.show()
         
         tmp = self.sql.query[query]
-        tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats)
+        tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats, dates)
         self.cursor.execute(tmp)
         result = self.cursor.fetchall()
         colnames = [desc[0].lower() for desc in self.cursor.description]
@@ -311,7 +313,7 @@ class GuiPlayerStats (threading.Thread):
         
     #end def addTable(self, query, vars, playerids, sitenos, limits, seats):
 
-    def refineQuery(self, query, flags, playerids, sitenos, limits, seats):
+    def refineQuery(self, query, flags, playerids, sitenos, limits, seats, dates):
         if not flags:  holecards = False
         else:          holecards = flags[0]
 
@@ -371,6 +373,9 @@ class GuiPlayerStats (threading.Thread):
         else:
             query = query.replace("", '')
 
+        # Filter on dates
+        query = query.replace("", " between '" + dates[0] + "' and '" + dates[1] + "'")
+
         #print "query =\n", query
         return(query)
     #end def refineQuery(self, query, playerids, sitenos, limits):
@@ -438,3 +443,6 @@ class GuiPlayerStats (threading.Thread):
         detailDialog.destroy()
 
 
+
+
+
From de98ff67dd7724fbfbe6013598722e8494180a71 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 31 May 2009 21:06:33 +0100
Subject: [PATCH 08/29] try to improve reliability of index/fk dropping in
 postgres and add warnings (not errors) when problems occur
---
 pyfpdb/fpdb_simple.py | 51 +++++++++++++++++++++++++++++++------------
 1 file changed, 37 insertions(+), 14 deletions(-)
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index e4471e24..cecaf09b 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -20,6 +20,7 @@
 import datetime
 import time
 import re
+import sys
 
 import Card
  
@@ -156,7 +157,7 @@ def prepareBulkImport(fdb):
                                    "AND referenced_column_name = %s ",
                                    (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
                 cons = fdb.cursor.fetchone()
-                print "preparebulk: cons=", cons
+                #print "preparebulk: cons=", cons
                 if cons:
                     print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
                     try:
@@ -165,13 +166,25 @@ def prepareBulkImport(fdb):
                         pass
             elif fdb.backend == PGSQL:
 #    DON'T FORGET TO RECREATE THEM!!
-                #print "dropping pg fk", fk['fktab'], fk['fkcol']
+                print "dropping pg fk", fk['fktab'], fk['fkcol']
                 try:
-                #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
-                    fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
-                    print "dropped pg fk pg fk %s_%s_fkey" % (fk['fktab'], fk['fkcol'])
+                    # try to lock table to see if index drop will work:
+                    # hmmm, tested by commenting out rollback in grapher. lock seems to work but 
+                    # then drop still hangs :-(  does work in some tests though??
+                    # will leave code here for now pending further tests/enhancement ...
+                    fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
+                    #print "after lock, status:", fdb.cursor.statusmessage
+                    #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
+                    try:
+                        fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
+                        print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
+                    except:
+                        if "does not exist" not in str(sys.exc_value):
+                            print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
+                                  % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
                 except:
-                    print "! failed drop pg fk %s_%s_fkey" % (fk['fktab'], fk['fkcol'])
+                    print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
+                          % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
             else:
                 print "Only MySQL and Postgres supported so far"
                 return -1
@@ -181,22 +194,32 @@ def prepareBulkImport(fdb):
             if fdb.backend == MYSQL_INNODB:
                 print "dropping mysql index ", idx['tab'], idx['col']
                 try:
+                    # apparently nowait is not implemented in mysql so this just hands if there are locks 
+                    # preventing the index drop :-(
                     fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
                 except:
                     pass
             elif fdb.backend == PGSQL:
 #    DON'T FORGET TO RECREATE THEM!!
-                #print "Index dropping disabled for postgresql."
-                #print "dropping pg index ", idx['tab'], idx['col']
-                # mod to use tab_col for index name?
+                print "dropping pg index ", idx['tab'], idx['col']
                 try:
-                    fdb.cursor.execute( "drop index %s_%s_idx" % (idx['tab'],idx['col']) )
-		    print "drop index %s_%s_idx" % (idx['tab'],idx['col']) 
-                    #print "dropped  pg index ", idx['tab'], idx['col']
+                    # try to lock table to see if index drop will work:
+                    fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
+                    #print "after lock, status:", fdb.cursor.statusmessage
+                    try:
+                        # table locked ok so index drop should work:
+                        #print "drop index %s_%s_idx" % (idx['tab'],idx['col']) 
+                        fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
+                        #print "dropped  pg index ", idx['tab'], idx['col']
+                    except:
+                        if "does not exist" not in str(sys.exc_value):
+                            print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
+                                  % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) 
                 except:
-		    print "! failed drop index %s_%s_idx" % (idx['tab'],idx['col']) 
+                    print "warning: index %s_%s_idx not dropped %s, continuing ..." \
+                          % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
             else:
-                print "Only MySQL and Postgres supported so far"
+                print "Error: Only MySQL and Postgres supported so far"
                 return -1
 
     if fdb.backend == PGSQL:
From be61b2d2534c83dbf7dd59a2900674e8e8c33f5c Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 31 May 2009 21:53:10 +0100
Subject: [PATCH 09/29] change quit option to standard 'Quit'
---
 pyfpdb/fpdb.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 70ece5e4..379b4e44 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -283,7 +283,7 @@ class fpdb:
 
         # Create actions
         actiongroup.add_actions([('main', None, '_Main'),
-                                 ('Quit', gtk.STOCK_QUIT, '_Quit me!', None, 'Quit the Program', self.quit),
+                                 ('Quit', gtk.STOCK_QUIT, '_Quit', None, 'Quit the Program', self.quit),
                                  ('LoadProf', None, '_Load Profile (broken)', 'L', 'Load your profile', self.dia_load_profile),
                                  ('EditProf', None, '_Edit Profile (todo)', 'E', 'Edit your profile', self.dia_edit_profile),
                                  ('SaveProf', None, '_Save Profile (todo)', 'S', 'Save your profile', self.dia_save_profile),
From 5b96769de3ca7197394befbc8c63daad3fc76eb2 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 31 May 2009 21:59:01 +0100
Subject: [PATCH 10/29] implement global lock by locking hands table
---
 pyfpdb/fpdb.py        | 58 +++++++++++++++++++++++++------------------
 pyfpdb/fpdb_simple.py | 21 ++++++++++++++++
 2 files changed, 55 insertions(+), 24 deletions(-)
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 379b4e44..8a26462d 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -174,35 +174,36 @@ class fpdb:
 
     def dia_load_profile(self, widget, data=None):
         """Dialogue to select a file to load a profile from"""
-        self.obtain_global_lock()
-        chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
-                action=gtk.FILE_CHOOSER_ACTION_OPEN,
-                buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
-        chooser.set_filename(self.profile)
+        if self.obtain_global_lock():
+            chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
+                    action=gtk.FILE_CHOOSER_ACTION_OPEN,
+                    buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
+            chooser.set_filename(self.profile)
 
-        response = chooser.run()
-        chooser.destroy()    
-        if response == gtk.RESPONSE_OK:
-            self.load_profile(chooser.get_filename())
-        elif response == gtk.RESPONSE_CANCEL:
-            print 'User cancelled loading profile'
+            response = chooser.run()
+            chooser.destroy()    
+            if response == gtk.RESPONSE_OK:
+                self.load_profile(chooser.get_filename())
+            elif response == gtk.RESPONSE_CANCEL:
+                print 'User cancelled loading profile'
     #end def dia_load_profile
 
     def dia_recreate_tables(self, widget, data=None):
         """Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
-        self.obtain_global_lock()
+        if self.obtain_global_lock():
         
-        dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
-                buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
-        diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted."
-        dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted
+            dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
+                    buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
+            diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted."
+            dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted
 
-        response = dia_confirm.run()
-        dia_confirm.destroy()
-        if response == gtk.RESPONSE_YES:
-            self.db.recreate_tables()
-        elif response == gtk.RESPONSE_NO:
-            print 'User cancelled recreating tables'
+            response = dia_confirm.run()
+            dia_confirm.destroy()
+            if response == gtk.RESPONSE_YES:
+                self.db.recreate_tables()
+            elif response == gtk.RESPONSE_NO:
+                print 'User cancelled recreating tables'
+            self.release_global_lock()
     #end def dia_recreate_tables
 
     def dia_regression_test(self, widget, data=None):
@@ -371,7 +372,14 @@ class fpdb:
     #end def not_implemented
 
     def obtain_global_lock(self):
-        print "todo: implement obtain_global_lock (users: pls ignore this)"
+        print "\nTaking global lock ..."
+        self.fdb_lock = fpdb_db.fpdb_db()
+        self.fdb_lock.connect(self.settings['db-backend'],
+                              self.settings['db-host'],
+                              self.settings['db-databaseName'],
+                              self.settings['db-user'], 
+                              self.settings['db-password'])
+        return fpdb_simple.get_global_lock(self.fdb_lock)
     #end def obtain_global_lock
 
     def quit(self, widget):
@@ -382,7 +390,9 @@ class fpdb:
     #end def quit_cliecked
 
     def release_global_lock(self):
-        print "todo: implement release_global_lock"
+        self.fdb_lock.db.rollback()
+        self.fdb_lock.disconnect()
+        print "Global lock released."
     #end def release_global_lock
 
     def tab_abbreviations(self, widget, data=None):
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index cecaf09b..c6f79d9d 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -367,6 +367,27 @@ def analyzeDB(fdb):
     fdb.db.commit()
 #end def analyzeDB
 
+def get_global_lock(fdb):
+    if fdb.backend == MYSQL_INNODB:
+        try:
+            fdb.cursor.execute( "lock tables Hands write" )
+        except:
+            print "Error! failed to obtain global lock. Close all programs accessing " \
+                  + "database (including fpdb) and try again (%s)." \
+                  % ( str(sys.exc_value).rstrip('\n'), )
+            return(False)
+    elif fdb.backend == PGSQL:
+        try:
+            fdb.cursor.execute( "lock table Hands in exclusive mode nowait" )
+            #print "... after lock table, status =", fdb.cursor.statusmessage
+        except:
+            print "Error! failed to obtain global lock. Close all programs accessing " \
+                  + "database (including fpdb) and try again (%s)." \
+                  % ( str(sys.exc_value).rstrip('\n'), )
+            return(False)
+    return(True) 
+
+
 class DuplicateError(Exception):
     def __init__(self, value):
         self.value = value
From a26151717570747a4843f6f3cebadbbf7c554156 Mon Sep 17 00:00:00 2001
From: Worros 
Date: Mon, 1 Jun 2009 17:14:31 +0800
Subject: [PATCH 11/29] Add additional siteids to db, move player select
 function
---
 pyfpdb/BetfairToFpdb.py     |  1 +
 pyfpdb/CarbonToFpdb.py      |  1 +
 pyfpdb/EverleafToFpdb.py    |  1 +
 pyfpdb/FulltiltToFpdb.py    |  1 +
 pyfpdb/Hand.py              | 23 +++++++++++++++++++++++
 pyfpdb/OnGameToFpdb.py      |  1 +
 pyfpdb/PokerStarsToFpdb.py  |  1 +
 pyfpdb/UltimateBetToFpdb.py |  1 +
 pyfpdb/fpdb_db.py           |  4 ++++
 pyfpdb/fpdb_simple.py       | 21 ---------------------
 10 files changed, 34 insertions(+), 21 deletions(-)
diff --git a/pyfpdb/BetfairToFpdb.py b/pyfpdb/BetfairToFpdb.py
index bc1d17c9..672e858a 100755
--- a/pyfpdb/BetfairToFpdb.py
+++ b/pyfpdb/BetfairToFpdb.py
@@ -43,6 +43,7 @@ follow :  whether to tail -f the input"""
         logging.info("Initialising Betfair converter class")
         self.filetype = "text"
         self.codepage = "cp1252"
+        self.siteId   = 7 # Needs to match id entry in Sites database
         if autostart:
             self.start()
 
diff --git a/pyfpdb/CarbonToFpdb.py b/pyfpdb/CarbonToFpdb.py
index cf9fc8d3..fa1ad6fd 100644
--- a/pyfpdb/CarbonToFpdb.py
+++ b/pyfpdb/CarbonToFpdb.py
@@ -54,6 +54,7 @@ class CarbonPoker(HandHistoryConverter):
 		print "Initialising Carbon Poker converter class"
 		HandHistoryConverter.__init__(self, config, filename, "Carbon") # Call super class init
 		self.setFileType("xml")
+        self.siteId   = 4 # Needs to match id entry in Sites database
 
 	def readSupportedGames(self): 
 		pass
diff --git a/pyfpdb/EverleafToFpdb.py b/pyfpdb/EverleafToFpdb.py
index f353bc61..6ed2b6ba 100755
--- a/pyfpdb/EverleafToFpdb.py
+++ b/pyfpdb/EverleafToFpdb.py
@@ -49,6 +49,7 @@ debugging: if False, pass on partially supported game types. If true, have a go
         logging.info("Initialising Everleaf converter class")
         self.filetype = "text"
         self.codepage = "cp1252"
+        self.siteId   = 3 # Needs to match id entry in Sites database
         self.debugging = debugging
         if autostart:
             self.start()
diff --git a/pyfpdb/FulltiltToFpdb.py b/pyfpdb/FulltiltToFpdb.py
index dd0927b9..a84f683c 100755
--- a/pyfpdb/FulltiltToFpdb.py
+++ b/pyfpdb/FulltiltToFpdb.py
@@ -45,6 +45,7 @@ follow :  whether to tail -f the input"""
         logging.info("Initialising Fulltilt converter class")
         self.filetype = "text"
         self.codepage = "cp1252"
+        self.siteId   = 1 # Needs to match id entry in Sites database
         if autostart:
             self.start()
 
diff --git a/pyfpdb/Hand.py b/pyfpdb/Hand.py
index 17eefe12..d45bae97 100644
--- a/pyfpdb/Hand.py
+++ b/pyfpdb/Hand.py
@@ -100,6 +100,29 @@ db: a connected fpdb_db object"""
         """ Function to create Hand object from database """
         pass
 
+# Get SQL player IDs from database
+# this version could also be improved upon using list comprehensions, etc
+
+#def recognisePlayerIDs(cursor, names, site_id):
+#    result = []
+#    notfound = []
+#    cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
+#    tmp = dict(cursor.fetchall())
+#    for n in names:
+#        if n not in tmp:
+#            notfound.append(n)
+#        else:
+#            result.append(tmp[n])
+#    if notfound:
+#        cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
+#        cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
+#        tmp = cursor.fetchall()
+#        for n in tmp:
+#            result.append(n[0])
+#        
+#    return result
+
+
 
     def addPlayer(self, seat, name, chips):
         """\
diff --git a/pyfpdb/OnGameToFpdb.py b/pyfpdb/OnGameToFpdb.py
index ee16eb41..8a68b105 100755
--- a/pyfpdb/OnGameToFpdb.py
+++ b/pyfpdb/OnGameToFpdb.py
@@ -72,6 +72,7 @@ class OnGame(HandHistoryConverter):
         HandHistoryConverter.__init__(self, config, file, sitename="OnGame") # Call super class init.
         self.sitename = "OnGame"
         self.setFileType("text", "cp1252")
+        self.siteId   = 5 # Needs to match id entry in Sites database
         #self.rexx.setGameInfoRegex('.*Blinds \$?(?P[.0-9]+)/\$?(?P[.0-9]+)')
         self.rexx.setSplitHandRegex('\n\n\n+')
         
diff --git a/pyfpdb/PokerStarsToFpdb.py b/pyfpdb/PokerStarsToFpdb.py
index 300c6071..2b4ec6a1 100755
--- a/pyfpdb/PokerStarsToFpdb.py
+++ b/pyfpdb/PokerStarsToFpdb.py
@@ -44,6 +44,7 @@ follow :  whether to tail -f the input"""
         logging.info("Initialising PokerStars converter class")
         self.filetype = "text"
         self.codepage = "cp1252"
+        self.siteId   = 2 # Needs to match id entry in Sites database
         if autostart:
             self.start()
 
diff --git a/pyfpdb/UltimateBetToFpdb.py b/pyfpdb/UltimateBetToFpdb.py
index 6b11d8e6..b57e789e 100755
--- a/pyfpdb/UltimateBetToFpdb.py
+++ b/pyfpdb/UltimateBetToFpdb.py
@@ -42,6 +42,7 @@ follow :  whether to tail -f the input"""
         logging.info("Initialising UltimateBetconverter class")
         self.filetype = "text"
         self.codepage = "cp1252"
+        self.siteId   = 6 # Needs to match id entry in Sites database
         if autostart:
             self.start()
 
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index 8814f104..b45d6cbf 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -205,6 +205,10 @@ class fpdb_db:
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');")
+        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Carbon', 'USD');")
+        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'OnGame', 'USD');")
+        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'UltimateBet', 'USD');")
+        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Betfair', 'USD');")
         self.cursor.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
     #end def fillDefaultData
     
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index c6f79d9d..2241feab 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -1390,27 +1390,6 @@ def recognisePlayerIDs(cursor, names, site_id):
 #end def recognisePlayerIDs
 
 
-# Here's a version that would work if it wasn't for the fact that it needs to have the output in the same order as input
-# this version could also be improved upon using list comprehensions, etc
-
-#def recognisePlayerIDs(cursor, names, site_id):
-#    result = []
-#    notfound = []
-#    cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
-#    tmp = dict(cursor.fetchall())
-#    for n in names:
-#        if n not in tmp:
-#            notfound.append(n)
-#        else:
-#            result.append(tmp[n])
-#    if notfound:
-#        cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
-#        cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
-#        tmp = cursor.fetchall()
-#        for n in tmp:
-#            result.append(n[0])
-#        
-#    return result
  
 #recognises the name in the given line and returns its array position in the given array
 def recognisePlayerNo(line, names, atype):
From 3e49e87842b04039d323c6ef8ca0e7dc1a4508b7 Mon Sep 17 00:00:00 2001
From: Worros 
Date: Mon, 1 Jun 2009 20:53:42 +0800
Subject: [PATCH 12/29] Begin moving query functions into fpdb_db function
No idea whether the code actually works
---
 pyfpdb/Hand.py    |  1 +
 pyfpdb/fpdb_db.py | 20 +++++++++++++++++++-
 2 files changed, 20 insertions(+), 1 deletion(-)
diff --git a/pyfpdb/Hand.py b/pyfpdb/Hand.py
index d45bae97..a51b710a 100644
--- a/pyfpdb/Hand.py
+++ b/pyfpdb/Hand.py
@@ -86,6 +86,7 @@ Should not commit, and do minimal selects. Callers may want to cache commits
 db: a connected fpdb_db object"""
         # TODO:
         # Players - base playerid and siteid tuple
+        sqlids = db.getSqlPlayerIDs([p[1] for p in self.players], self.siteId)
         # HudCache data to come from DerivedStats class
         # HandsActions - all actions for all players for all streets - self.actions
         # BoardCards - ?
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index b45d6cbf..51697708 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -221,4 +221,22 @@ class fpdb_db:
         self.db.commit()
         print "Finished recreating tables"
     #end def recreate_tables
-#end class fpdb_db
+
+    def getSqlPlayerIDs(names, site_id):
+        result = []
+        notfound = []
+        self.cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
+        tmp = dict(self.cursor.fetchall())
+        for n in names:
+            if n not in tmp:
+                notfound.append(n)
+            else:
+                result.append(tmp[n])
+        if notfound:
+            cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
+            cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
+            tmp = cursor.fetchall()
+            for n in tmp:
+                result.append(n[0])
+
+        return result
From 44cb8efb2c4e33c06b8cb3a578aa6e76dae01cf8 Mon Sep 17 00:00:00 2001
From: Worros 
Date: Mon, 1 Jun 2009 22:21:40 +0800
Subject: [PATCH 13/29] Add reminder for id caching
---
 pyfpdb/fpdb_db.py | 1 +
 1 file changed, 1 insertion(+)
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index 51697708..ad599b13 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -239,4 +239,5 @@ class fpdb_db:
             for n in tmp:
                 result.append(n[0])
 
+        #We proabably want to cache this
         return result
From b016c496f2f5927bb6e7597d8f68edab27df18d9 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Thu, 4 Jun 2009 20:27:15 +0100
Subject: [PATCH 14/29] fix handling of global lock when re-creating tables
---
 pyfpdb/fpdb.py | 62 ++++++++++++++++++++++++++++++++------------------
 1 file changed, 40 insertions(+), 22 deletions(-)
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 8a26462d..234bb95f 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -175,35 +175,53 @@ class fpdb:
     def dia_load_profile(self, widget, data=None):
         """Dialogue to select a file to load a profile from"""
         if self.obtain_global_lock():
-            chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
-                    action=gtk.FILE_CHOOSER_ACTION_OPEN,
-                    buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
-            chooser.set_filename(self.profile)
+            try:
+                chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
+                        action=gtk.FILE_CHOOSER_ACTION_OPEN,
+                        buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
+                chooser.set_filename(self.profile)
 
-            response = chooser.run()
-            chooser.destroy()    
-            if response == gtk.RESPONSE_OK:
-                self.load_profile(chooser.get_filename())
-            elif response == gtk.RESPONSE_CANCEL:
-                print 'User cancelled loading profile'
+                response = chooser.run()
+                chooser.destroy()    
+                if response == gtk.RESPONSE_OK:
+                    self.load_profile(chooser.get_filename())
+                elif response == gtk.RESPONSE_CANCEL:
+                    print 'User cancelled loading profile'
+            except:
+                pass
+            self.release_global_lock()
     #end def dia_load_profile
 
     def dia_recreate_tables(self, widget, data=None):
         """Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
         if self.obtain_global_lock():
-        
-            dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
-                    buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
-            diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted."
-            dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted
 
-            response = dia_confirm.run()
-            dia_confirm.destroy()
-            if response == gtk.RESPONSE_YES:
-                self.db.recreate_tables()
-            elif response == gtk.RESPONSE_NO:
-                print 'User cancelled recreating tables'
-            self.release_global_lock()
+            lock_released = False
+            try:
+                dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
+                        buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
+                diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted."
+                dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted
+
+                response = dia_confirm.run()
+                dia_confirm.destroy()
+                if response == gtk.RESPONSE_YES:
+                    if self.db.backend == self.fdb_lock.MYSQL_INNODB:
+                        # mysql requires locks on all tables or none - easier to release this lock 
+                        # than lock all the other tables
+                        # ToDo: lock all other tables so that lock doesn't have to be released
+                        self.release_global_lock()
+                        lock_released = True
+                        self.db.recreate_tables()
+                    else:
+                        # for other dbs use same connection as holds global lock
+                        self.fdb_lock.recreate_tables()
+                elif response == gtk.RESPONSE_NO:
+                    print 'User cancelled recreating tables'
+            except:
+                pass
+            if not lock_released:
+                self.release_global_lock()
     #end def dia_recreate_tables
 
     def dia_regression_test(self, widget, data=None):
From a9055049fea9d791a931a000cde39d503cf826f9 Mon Sep 17 00:00:00 2001
From: Worros 
Date: Fri, 5 Jun 2009 23:13:41 +0800
Subject: [PATCH 15/29] Fix HUD query under linux.
Case sensitive
---
 pyfpdb/SQL.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/pyfpdb/SQL.py b/pyfpdb/SQL.py
index 18d7d116..9bad1918 100644
--- a/pyfpdb/SQL.py
+++ b/pyfpdb/SQL.py
@@ -564,13 +564,13 @@ class Sql:
             if db_server == 'mysql':
                 self.query['get_hand_1day_ago'] = """
                     select coalesce(max(id),0)
-                    from hands
-                    where handstart < date_sub(utc_timestamp(), interval '1' day)"""
+                    from Hands
+                    where handStart < date_sub(utc_timestamp(), interval '1' day)"""
             else:  # assume postgresql
                 self.query['get_hand_1day_ago'] = """
                     select coalesce(max(id),0)
-                    from hands
-                    where handstart < now() at time zone 'UTC' - interval '1 day'"""
+                    from Hands
+                    where handStart < now() at time zone 'UTC' - interval '1 day'"""
 
 if __name__== "__main__":
 #    just print the default queries and exit
From 56f211240380828d25edf5c1622047593e19a89b Mon Sep 17 00:00:00 2001
From: Worros 
Date: Sat, 6 Jun 2009 19:26:33 +0800
Subject: [PATCH 16/29] Add a few temporary comments
---
 pyfpdb/Hand.py | 75 ++++++++++++++++++++++++++++++++++----------------
 1 file changed, 51 insertions(+), 24 deletions(-)
diff --git a/pyfpdb/Hand.py b/pyfpdb/Hand.py
index a51b710a..879dad85 100644
--- a/pyfpdb/Hand.py
+++ b/pyfpdb/Hand.py
@@ -36,6 +36,7 @@ class Hand:
         self.sitename = sitename
         self.stats = DerivedStats.DerivedStats(self)
         self.gametype = gametype
+        self.starttime = 0
         self.handText = handText
         self.handid = 0
         self.tablename = "Slartibartfast"
@@ -89,8 +90,57 @@ db: a connected fpdb_db object"""
         sqlids = db.getSqlPlayerIDs([p[1] for p in self.players], self.siteId)
         # HudCache data to come from DerivedStats class
         # HandsActions - all actions for all players for all streets - self.actions
-        # BoardCards - ?
+        # BoardCards - Skip - no longer necessary?
         # Hands - Summary information of hand indexed by handId - gameinfo
+             # self.tablename = tableName
+             # self.handid = siteHandNo
+             # gametypeId SMALLINT UNSIGNED NOT NULL, FOREIGN KEY (gametypeId) REFERENCES Gametypes(id),
+                #
+             # self.starttime = handStart
+             # importTime DATETIME NOT NULL,
+                #
+             # seats TINYINT NOT NULL,
+                #
+             # self.maxseats = maxSeats
+             # boardcard1 smallint,  /* 0=none, 1-13=2-Ah 14-26=2-Ad 27-39=2-Ac 40-52=2-As */
+             # boardcard2 smallint,
+             # boardcard3 smallint,
+             # boardcard4 smallint,
+             # boardcard5 smallint,
+             # texture smallint,
+             # playersVpi SMALLINT NOT NULL,         /* num of players vpi */
+                # Needs to be recorded
+             # playersAtStreet1 SMALLINT NOT NULL,   /* num of players seeing flop/street4 */
+                # Needs to be recorded
+             # playersAtStreet2 SMALLINT NOT NULL,
+                # Needs to be recorded
+             # playersAtStreet3 SMALLINT NOT NULL,
+                # Needs to be recorded
+             # playersAtStreet4 SMALLINT NOT NULL,
+                # Needs to be recorded
+             # playersAtShowdown SMALLINT NOT NULL,
+                # Needs to be recorded
+             # street0Raises TINYINT NOT NULL, /* num small bets paid to see flop/street4, including blind */
+                # Needs to be recorded
+             # street1Raises TINYINT NOT NULL, /* num small bets paid to see turn/street5 */
+                # Needs to be recorded
+             # street2Raises TINYINT NOT NULL, /* num big bets paid to see river/street6 */
+                # Needs to be recorded
+             # street3Raises TINYINT NOT NULL, /* num big bets paid to see sd/street7 */
+                # Needs to be recorded
+             # street4Raises TINYINT NOT NULL, /* num big bets paid to see showdown */
+                # Needs to be recorded
+             # street1Pot INT,                  /* pot size at flop/street4 */
+                # Needs to be recorded
+             # street2Pot INT,                  /* pot size at turn/street5 */
+                # Needs to be recorded
+             # street3Pot INT,                  /* pot size at river/street6 */
+                # Needs to be recorded
+             # street4Pot INT,                  /* pot size at sd/street7 */
+                # Needs to be recorded
+             # showdownPot INT,                 /* pot size at sd/street7 */
+             # comment TEXT,
+             # commentTs DATETIME
         # HandsPlayers - ? ... Do we fix winnings?
         # Tourneys ?
         # TourneysPlayers
@@ -101,29 +151,6 @@ db: a connected fpdb_db object"""
         """ Function to create Hand object from database """
         pass
 
-# Get SQL player IDs from database
-# this version could also be improved upon using list comprehensions, etc
-
-#def recognisePlayerIDs(cursor, names, site_id):
-#    result = []
-#    notfound = []
-#    cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
-#    tmp = dict(cursor.fetchall())
-#    for n in names:
-#        if n not in tmp:
-#            notfound.append(n)
-#        else:
-#            result.append(tmp[n])
-#    if notfound:
-#        cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
-#        cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
-#        tmp = cursor.fetchall()
-#        for n in tmp:
-#            result.append(n[0])
-#        
-#    return result
-
-
 
     def addPlayer(self, seat, name, chips):
         """\
From 45a303eb25afc1c311a441089e6043a1028224b0 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 7 Jun 2009 20:07:18 +0100
Subject: [PATCH 17/29] make global lock work (do nothing) if hands table
 doesn't exist
---
 pyfpdb/fpdb.py        |  7 +++--
 pyfpdb/fpdb_db.py     | 59 +++++++++++++++++++++++++------------------
 pyfpdb/fpdb_simple.py | 47 +++++++++++++++++++---------------
 3 files changed, 63 insertions(+), 50 deletions(-)
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 645ddef5..8d514b90 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -181,7 +181,7 @@ class fpdb:
 
     def dia_load_profile(self, widget, data=None):
         """Dialogue to select a file to load a profile from"""
-        if self.obtain_global_lock():
+        if self.obtain_global_lock() == 0:  # returns 0 if successful
             try:
                 chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
                         action=gtk.FILE_CHOOSER_ACTION_OPEN,
@@ -201,7 +201,7 @@ class fpdb:
 
     def dia_recreate_tables(self, widget, data=None):
         """Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
-        if self.obtain_global_lock():
+        if self.obtain_global_lock() in (0,2):  # returns 0 if successful, 2 if Hands table does not exist
 
             lock_released = False
             try:
@@ -406,7 +406,7 @@ class fpdb:
                               self.settings['db-databaseName'],
                               self.settings['db-user'], 
                               self.settings['db-password'])
-        return fpdb_simple.get_global_lock(self.fdb_lock)
+        return self.fdb_lock.get_global_lock()
     #end def obtain_global_lock
 
     def quit(self, widget):
@@ -455,7 +455,6 @@ class fpdb:
         ps_tab=new_ps_thread.get_vbox()
         self.add_and_display_tab(ps_tab, "Positional Stats")
 
-
     def tab_main_help(self, widget, data=None):
         """Displays a tab with the main fpdb help screen"""
         #print "start of tab_main_help"
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index ad599b13..dd7f5afe 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -17,6 +17,8 @@
 
 import os
 import re
+import sys
+
 import fpdb_simple
 import FpdbSQLQueries
 
@@ -96,7 +98,7 @@ class fpdb_db:
         try:
             self.cursor.execute("SELECT * FROM Settings")
             settings=self.cursor.fetchone()
-            if settings[0]!=119:
+            if settings[0]!=118:
                 print "outdated or too new database version - please recreate tables"
                 self.wrongDbVersion=True
         except:# _mysql_exceptions.ProgrammingError:
@@ -201,14 +203,10 @@ class fpdb_db:
     #end def get_db_info
     
     def fillDefaultData(self):
-        self.cursor.execute("INSERT INTO Settings VALUES (119);")
+        self.cursor.execute("INSERT INTO Settings VALUES (118);")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');")
         self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');")
-        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Carbon', 'USD');")
-        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'OnGame', 'USD');")
-        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'UltimateBet', 'USD');")
-        self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Betfair', 'USD');")
         self.cursor.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
     #end def fillDefaultData
     
@@ -222,22 +220,33 @@ class fpdb_db:
         print "Finished recreating tables"
     #end def recreate_tables
 
-    def getSqlPlayerIDs(names, site_id):
-        result = []
-        notfound = []
-        self.cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
-        tmp = dict(self.cursor.fetchall())
-        for n in names:
-            if n not in tmp:
-                notfound.append(n)
-            else:
-                result.append(tmp[n])
-        if notfound:
-            cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
-            cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
-            tmp = cursor.fetchall()
-            for n in tmp:
-                result.append(n[0])
-
-        #We proabably want to cache this
-        return result
+    # Currently uses an exclusive lock on the Hands table as a global lock
+    # Return values are Unix style, 0 for success, positive integers for errors
+    # 1 = generic error
+    # 2 = hands table does not exist (error message is suppressed)
+    def get_global_lock(self):
+        if self.backend == self.MYSQL_INNODB:
+            try:
+                self.cursor.execute( "lock tables Hands write" )
+            except:
+                # Table 'fpdb.hands' doesn't exist
+                if str(sys.exc_value).find(".hands' doesn't exist") >= 0:
+                    return(2)
+                print "Error! failed to obtain global lock. Close all programs accessing " \
+                      + "database (including fpdb) and try again (%s)." \
+                      % ( str(sys.exc_value).rstrip('\n'), )
+                return(1)
+        elif self.backend == self.PGSQL:
+            try:
+                self.cursor.execute( "lock table Hands in exclusive mode nowait" )
+                #print "... after lock table, status =", self.cursor.statusmessage
+            except:
+                # relation "hands" does not exist
+                if str(sys.exc_value).find('relation "hands" does not exist') >= 0:
+                    return(2)
+                print "Error! failed to obtain global lock. Close all programs accessing " \
+                      + "database (including fpdb) and try again (%s)." \
+                      % ( str(sys.exc_value).rstrip('\n'), )
+                return(1)
+        return(0) 
+#end class fpdb_db
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index 2241feab..58eb9881 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -16,6 +16,10 @@
 #agpl-3.0.txt in the docs folder of the package.
  
 #This file contains simple functions for fpdb
+
+#Aiming to eventually remove this module, functions will move to, eg:
+#fpdb_db      db create/re-create/management/etc
+#Hands        or related files for saving hands to db, etc
  
 import datetime
 import time
@@ -28,6 +32,7 @@ PS  = 1
 FTP = 2
 
 # TODO: these constants are also used in fpdb_save_to_db and others, is there a way to do like C #define, and #include ?
+# answer - yes. These are defined in fpdb_db so are accessible through that class.
 MYSQL_INNODB    = 2
 PGSQL           = 3
 SQLITE          = 4
@@ -367,27 +372,6 @@ def analyzeDB(fdb):
     fdb.db.commit()
 #end def analyzeDB
 
-def get_global_lock(fdb):
-    if fdb.backend == MYSQL_INNODB:
-        try:
-            fdb.cursor.execute( "lock tables Hands write" )
-        except:
-            print "Error! failed to obtain global lock. Close all programs accessing " \
-                  + "database (including fpdb) and try again (%s)." \
-                  % ( str(sys.exc_value).rstrip('\n'), )
-            return(False)
-    elif fdb.backend == PGSQL:
-        try:
-            fdb.cursor.execute( "lock table Hands in exclusive mode nowait" )
-            #print "... after lock table, status =", fdb.cursor.statusmessage
-        except:
-            print "Error! failed to obtain global lock. Close all programs accessing " \
-                  + "database (including fpdb) and try again (%s)." \
-                  % ( str(sys.exc_value).rstrip('\n'), )
-            return(False)
-    return(True) 
-
-
 class DuplicateError(Exception):
     def __init__(self, value):
         self.value = value
@@ -1390,6 +1374,27 @@ def recognisePlayerIDs(cursor, names, site_id):
 #end def recognisePlayerIDs
 
 
+# Here's a version that would work if it wasn't for the fact that it needs to have the output in the same order as input
+# this version could also be improved upon using list comprehensions, etc
+
+#def recognisePlayerIDs(cursor, names, site_id):
+#    result = []
+#    notfound = []
+#    cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
+#    tmp = dict(cursor.fetchall())
+#    for n in names:
+#        if n not in tmp:
+#            notfound.append(n)
+#        else:
+#            result.append(tmp[n])
+#    if notfound:
+#        cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
+#        cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
+#        tmp = cursor.fetchall()
+#        for n in tmp:
+#            result.append(n[0])
+#        
+#    return result
  
 #recognises the name in the given line and returns its array position in the given array
 def recognisePlayerNo(line, names, atype):
From 7e8b80948cc9b7f0628bff720d9017259255d735 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 7 Jun 2009 20:45:09 +0100
Subject: [PATCH 18/29] move index etc functions from fpdb_simple.py into
 fpdb_db.py
---
 pyfpdb/fpdb_db.py     | 334 +++++++++++++++++++++++++++++++++++++++++-
 pyfpdb/fpdb_import.py |  28 +++-
 pyfpdb/fpdb_simple.py | 332 -----------------------------------------
 3 files changed, 355 insertions(+), 339 deletions(-)
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index dd7f5afe..474d694b 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -31,6 +31,110 @@ class fpdb_db:
         self.MYSQL_INNODB   = 2
         self.PGSQL          = 3
         self.SQLITE         = 4
+
+        # Data Structures for index and foreign key creation
+        # drop_code is an int with possible values:  0 - don't drop for bulk import
+        #                                            1 - drop during bulk import
+        # db differences: 
+        # - note that mysql automatically creates indexes on constrained columns when
+        #   foreign keys are created, while postgres does not. Hence the much longer list
+        #   of indexes is required for postgres.
+        # all primary keys are left on all the time
+        #
+        #             table     column           drop_code
+
+        self.indexes = [
+                         [ ] # no db with index 0
+                       , [ ] # no db with index 1
+                       , [ # indexes for mysql (list index 2)
+                           {'tab':'Players',  'col':'name',          'drop':0}
+                         , {'tab':'Hands',    'col':'siteHandNo',    'drop':0}
+                         , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
+                         ]
+                       , [ # indexes for postgres (list index 3)
+                           {'tab':'Boardcards',      'col':'handId',            'drop':0}
+                         , {'tab':'Gametypes',       'col':'siteId',            'drop':0}
+                         , {'tab':'Hands',           'col':'gametypeId',        'drop':0} # mct 22/3/09
+                         , {'tab':'Hands',           'col':'siteHandNo',        'drop':0}
+                         , {'tab':'HandsActions',    'col':'handsPlayerId',     'drop':0}
+                         , {'tab':'HandsPlayers',    'col':'handId',            'drop':1}
+                         , {'tab':'HandsPlayers',    'col':'playerId',          'drop':1}
+                         , {'tab':'HandsPlayers',    'col':'tourneysPlayersId', 'drop':0}
+                         , {'tab':'HudCache',        'col':'gametypeId',        'drop':1}
+                         , {'tab':'HudCache',        'col':'playerId',          'drop':0}
+                         , {'tab':'HudCache',        'col':'tourneyTypeId',     'drop':0}
+                         , {'tab':'Players',         'col':'siteId',            'drop':1}
+                         , {'tab':'Players',         'col':'name',              'drop':0}
+                         , {'tab':'Tourneys',        'col':'tourneyTypeId',     'drop':1}
+                         , {'tab':'Tourneys',        'col':'siteTourneyNo',     'drop':0}
+                         , {'tab':'TourneysPlayers', 'col':'playerId',          'drop':0}
+                         , {'tab':'TourneysPlayers', 'col':'tourneyId',         'drop':0}
+                         , {'tab':'TourneyTypes',    'col':'siteId',            'drop':0}
+                         ]
+                       ]
+
+        self.foreignKeys = [
+                             [ ] # no db with index 0
+                           , [ ] # no db with index 1
+                           , [ # foreign keys for mysql
+                               {'fktab':'Hands',        'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
+                             , {'fktab':'HandsPlayers', 'fkcol':'handId',        'rtab':'Hands',         'rcol':'id', 'drop':1}
+                             , {'fktab':'HandsPlayers', 'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':1}
+                             , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers',  'rcol':'id', 'drop':1}
+                             , {'fktab':'HudCache',     'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
+                             , {'fktab':'HudCache',     'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':0}
+                             , {'fktab':'HudCache',     'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes',  'rcol':'id', 'drop':1}
+                             ]
+                           , [ # foreign keys for postgres
+                               {'fktab':'Hands',        'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
+                             , {'fktab':'HandsPlayers', 'fkcol':'handId',        'rtab':'Hands',         'rcol':'id', 'drop':1}
+                             , {'fktab':'HandsPlayers', 'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':1}
+                             , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers',  'rcol':'id', 'drop':1}
+                             , {'fktab':'HudCache',     'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
+                             , {'fktab':'HudCache',     'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':0}
+                             , {'fktab':'HudCache',     'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes',  'rcol':'id', 'drop':1}
+                             ]
+                           ]
+
+
+        # MySQL Notes:
+        #    "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
+        #                                                - creates index handId on .handId
+        # alter table t drop foreign key fk
+        # alter table t add foreign key (fkcol) references tab(rcol)
+        # alter table t add constraint c foreign key (fkcol) references tab(rcol)
+        # (fkcol is used for foreigh key name)
+
+        # mysql to list indexes:
+        #   SELECT table_name, index_name, non_unique, column_name 
+        #   FROM INFORMATION_SCHEMA.STATISTICS
+        #     WHERE table_name = 'tbl_name'
+        #     AND table_schema = 'db_name'
+        #   ORDER BY table_name, index_name, seq_in_index
+        #
+        # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
+        # ALTER TABLE tab DROP INDEX idx
+
+        # mysql to list fks:
+        #   SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
+        #   FROM information_schema.KEY_COLUMN_USAGE
+        #   WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
+        #   AND REFERENCED_TABLE_NAME is not null
+        #   ORDER BY TABLE_NAME, COLUMN_NAME;
+
+        # this may indicate missing object
+        # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
+
+
+        # PG notes:
+
+        #  To add a foreign key constraint to a table:
+        #  ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
+        #  ALTER TABLE tab DROP CONSTRAINT zipchk
+        #
+        #  Note: index names must be unique across a schema
+        #  CREATE INDEX idx ON tab(col)
+        #  DROP INDEX idx
     #end def __init__
 
     def do_connect(self, config=None):
@@ -215,11 +319,239 @@ class fpdb_db:
         
         self.drop_tables()
         self.create_tables()
-        fpdb_simple.createAllIndexes(self)
+        self.createAllIndexes()
         self.db.commit()
         print "Finished recreating tables"
     #end def recreate_tables
 
+    def prepareBulkImport(self):
+        """Drop some indexes/foreign keys to prepare for bulk import. 
+           Currently keeping the standalone indexes as needed to import quickly"""
+        # self is a fpdb_db object including backend, db, cursor, sql variables
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(0)   # allow table/index operations to work
+        for fk in self.foreignKeys[self.backend]:
+            if fk['drop'] == 1:
+                if self.backend == self.MYSQL_INNODB:
+                    self.cursor.execute("SELECT constraint_name " +
+                                       "FROM information_schema.KEY_COLUMN_USAGE " +
+                                       #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
+                                       "WHERE 1=1 " +
+                                       "AND table_name = %s AND column_name = %s " + 
+                                       "AND referenced_table_name = %s " +
+                                       "AND referenced_column_name = %s ",
+                                       (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
+                    cons = self.cursor.fetchone()
+                    #print "preparebulk: cons=", cons
+                    if cons:
+                        print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
+                        try:
+                            self.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
+                        except:
+                            pass
+                elif self.backend == self.PGSQL:
+    #    DON'T FORGET TO RECREATE THEM!!
+                    print "dropping pg fk", fk['fktab'], fk['fkcol']
+                    try:
+                        # try to lock table to see if index drop will work:
+                        # hmmm, tested by commenting out rollback in grapher. lock seems to work but 
+                        # then drop still hangs :-(  does work in some tests though??
+                        # will leave code here for now pending further tests/enhancement ...
+                        self.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
+                        #print "after lock, status:", self.cursor.statusmessage
+                        #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
+                        try:
+                            self.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
+                            print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
+                        except:
+                            if "does not exist" not in str(sys.exc_value):
+                                print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
+                                      % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
+                    except:
+                        print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
+                              % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
+                else:
+                    print "Only MySQL and Postgres supported so far"
+                    return -1
+        
+        for idx in self.indexes[self.backend]:
+            if idx['drop'] == 1:
+                if self.backend == self.MYSQL_INNODB:
+                    print "dropping mysql index ", idx['tab'], idx['col']
+                    try:
+                        # apparently nowait is not implemented in mysql so this just hands if there are locks 
+                        # preventing the index drop :-(
+                        self.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
+                    except:
+                        pass
+                elif self.backend == self.PGSQL:
+    #    DON'T FORGET TO RECREATE THEM!!
+                    print "dropping pg index ", idx['tab'], idx['col']
+                    try:
+                        # try to lock table to see if index drop will work:
+                        self.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
+                        #print "after lock, status:", self.cursor.statusmessage
+                        try:
+                            # table locked ok so index drop should work:
+                            #print "drop index %s_%s_idx" % (idx['tab'],idx['col']) 
+                            self.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
+                            #print "dropped  pg index ", idx['tab'], idx['col']
+                        except:
+                            if "does not exist" not in str(sys.exc_value):
+                                print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
+                                      % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) 
+                    except:
+                        print "warning: index %s_%s_idx not dropped %s, continuing ..." \
+                              % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
+                else:
+                    print "Error: Only MySQL and Postgres supported so far"
+                    return -1
+
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(1)   # go back to normal isolation level
+        self.db.commit() # seems to clear up errors if there were any in postgres
+    #end def prepareBulkImport
+
+    def afterBulkImport(self):
+        """Re-create any dropped indexes/foreign keys after bulk import"""
+        # self is a fpdb_db object including backend, db, cursor, sql variables
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(0)   # allow table/index operations to work
+        for fk in self.foreignKeys[self.backend]:
+            if fk['drop'] == 1:
+                if self.backend == self.MYSQL_INNODB:
+                    self.cursor.execute("SELECT constraint_name " +
+                                       "FROM information_schema.KEY_COLUMN_USAGE " +
+                                       #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
+                                       "WHERE 1=1 " +
+                                       "AND table_name = %s AND column_name = %s " + 
+                                       "AND referenced_table_name = %s " +
+                                       "AND referenced_column_name = %s ",
+                                       (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
+                    cons = self.cursor.fetchone()
+                    print "afterbulk: cons=", cons
+                    if cons:
+                        pass
+                    else:
+                        print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
+                        try:
+                            self.cursor.execute("alter table " + fk['fktab'] + " add foreign key (" 
+                                               + fk['fkcol'] + ") references " + fk['rtab'] + "(" 
+                                               + fk['rcol'] + ")")
+                        except:
+                            pass
+                elif self.backend == self.PGSQL:
+                    print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
+                    try:
+                        self.cursor.execute("alter table " + fk['fktab'] + " add constraint "
+                                           + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
+                                           + " foreign key (" + fk['fkcol']
+                                           + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
+                    except:
+                        pass
+                else:
+                    print "Only MySQL and Postgres supported so far"
+                    return -1
+        
+        for idx in self.indexes[self.backend]:
+            if idx['drop'] == 1:
+                if self.backend == self.MYSQL_INNODB:
+                    print "creating mysql index ", idx['tab'], idx['col']
+                    try:
+                        self.cursor.execute( "alter table %s add index %s(%s)"
+                                          , (idx['tab'],idx['col'],idx['col']) )
+                    except:
+                        pass
+                elif self.backend == self.PGSQL:
+    #                pass
+                    # mod to use tab_col for index name?
+                    print "creating pg index ", idx['tab'], idx['col']
+                    try:
+                        print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
+                        self.cursor.execute( "create index %s_%s_idx on %s(%s)"
+                                          % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
+                    except:
+                        print "   ERROR! :-("
+                        pass
+                else:
+                    print "Only MySQL and Postgres supported so far"
+                    return -1
+
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(1)   # go back to normal isolation level
+        self.db.commit()   # seems to clear up errors if there were any in postgres
+    #end def afterBulkImport
+
+    def createAllIndexes(self):
+        """Create new indexes"""
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(0)   # allow table/index operations to work
+        for idx in self.indexes[self.backend]:
+            if self.backend == self.MYSQL_INNODB:
+                print "creating mysql index ", idx['tab'], idx['col']
+                try:
+                    self.cursor.execute( "alter table %s add index %s(%s)"
+                                      , (idx['tab'],idx['col'],idx['col']) )
+                except:
+                    pass
+            elif self.backend == self.PGSQL:
+                # mod to use tab_col for index name?
+                print "creating pg index ", idx['tab'], idx['col']
+                try:
+                    print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
+                    self.cursor.execute( "create index %s_%s_idx on %s(%s)"
+                                      % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
+                except:
+                    print "   ERROR! :-("
+                    pass
+            else:
+                print "Only MySQL and Postgres supported so far"
+                return -1
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(1)   # go back to normal isolation level
+    #end def createAllIndexes
+
+    def dropAllIndexes(self):
+        """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
+           using list of indexes in indexes data structure"""
+        # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(0)   # allow table/index operations to work
+        for idx in self.indexes[self.backend]:
+            if self.backend == self.MYSQL_INNODB:
+                print "dropping mysql index ", idx['tab'], idx['col']
+                try:
+                    self.cursor.execute( "alter table %s drop index %s"
+                                      , (idx['tab'],idx['col']) )
+                except:
+                    pass
+            elif self.backend == self.PGSQL:
+                print "dropping pg index ", idx['tab'], idx['col']
+                # mod to use tab_col for index name?
+                try:
+                    self.cursor.execute( "drop index %s_%s_idx"
+                                      % (idx['tab'],idx['col']) )
+                except:
+                    pass
+            else:
+                print "Only MySQL and Postgres supported so far"
+                return -1
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(1)   # go back to normal isolation level
+    #end def dropAllIndexes
+
+    def analyzeDB(self):
+        """Do whatever the DB can offer to update index/table statistics"""
+        if self.backend == self.PGSQL:
+            self.db.set_isolation_level(0)   # allow vacuum to work
+            try:
+                self.cursor.execute("vacuum analyze")
+            except:
+                print "Error during vacuum"
+            self.db.set_isolation_level(1)   # go back to normal isolation level
+        self.db.commit()
+    #end def analyzeDB
+
     # Currently uses an exclusive lock on the Hands table as a global lock
     # Return values are Unix style, 0 for success, positive integers for errors
     # 1 = generic error
diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py
index 2dbb4807..8c931a49 100644
--- a/pyfpdb/fpdb_import.py
+++ b/pyfpdb/fpdb_import.py
@@ -150,7 +150,9 @@ class Importer:
                 self.monitor = True
                 self.dirlist[site] = [dir] + [filter]
 
+            #print "addImportDirectory: checking files in", dir
             for file in os.listdir(dir):
+                #print "                    adding file ", file
                 self.addImportFile(os.path.join(dir, file), site, filter)
         else:
             print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory"
@@ -162,7 +164,7 @@ class Importer:
         if self.settings['dropIndexes'] == 'auto':
             self.settings['dropIndexes'] = self.calculate_auto()
         if self.settings['dropIndexes'] == 'drop':
-            fpdb_simple.prepareBulkImport(self.fdb)
+            self.fdb.prepareBulkImport()
         totstored = 0
         totdups = 0
         totpartial = 0
@@ -177,8 +179,8 @@ class Importer:
             toterrors += errors
             tottime += ttime
         if self.settings['dropIndexes'] == 'drop':
-            fpdb_simple.afterBulkImport(self.fdb)
-        fpdb_simple.analyzeDB(self.fdb)
+            self.fdb.afterBulkImport()
+        self.fdb.analyzeDB(self.fdb)
         return (totstored, totdups, totpartial, toterrors, tottime)
 #        else: import threaded
 
@@ -203,14 +205,18 @@ class Importer:
         #todo: make efficient - always checks for new file, should be able to use mtime of directory
         # ^^ May not work on windows
         
+        #rulog = open('runUpdated.txt', 'a')
+        #rulog.writelines("runUpdated ... ")
         for site in self.dirlist:
             self.addImportDirectory(self.dirlist[site][0], False, site, self.dirlist[site][1])
 
         for file in self.filelist:
             if os.path.exists(file):
                 stat_info = os.stat(file)
+                #rulog.writelines("path exists ")
                 try: 
                     lastupdate = self.updated[file]
+                    #rulog.writelines("lastupdate = %d, mtime = %d" % (lastupdate,stat_info.st_mtime))
                     if stat_info.st_mtime > lastupdate:
                         self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
                         self.updated[file] = time()
@@ -236,7 +242,8 @@ class Importer:
         self.addToDirList = {}
         self.removeFromFileList = {}
         self.fdb.db.rollback()
-
+        #rulog.writelines("  finished\n")
+        #rulog.close()
 
     # This is now an internal function that should not be called directly.
     def import_file_dict(self, file, site, filter):
@@ -282,6 +289,7 @@ class Importer:
         starttime = time()
         last_read_hand = 0
         loc = 0
+        #print "file =", file
         if file == "stdin":
             inputFile = sys.stdin
         else:
@@ -292,10 +300,17 @@ class Importer:
                 return (0, 0, 0, 1, 0)
             try:
                 loc = self.pos_in_file[file]
+                #size = os.path.getsize(file)
+                #print "loc =", loc, 'size =', size
             except:
                 pass
         # Read input file into class and close file
         inputFile.seek(loc)
+        #tmplines = inputFile.readlines()
+        #if tmplines == None or tmplines == []:
+        #    print "tmplines = ", tmplines
+        #else:
+        #    print "tmplines[0] =", tmplines[0]
         self.lines = fpdb_simple.removeTrailingEOL(inputFile.readlines())
         self.pos_in_file[file] = inputFile.tell()
         inputFile.close()
@@ -303,7 +318,8 @@ class Importer:
         try: # sometimes we seem to be getting an empty self.lines, in which case, we just want to return.
             firstline = self.lines[0]
         except:
-            print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc)
+            # just skip the debug message and return silently:
+            #print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc)
             return (0,0,0,1,0)
 
         if firstline.find("Tournament Summary")!=-1:
@@ -348,6 +364,7 @@ class Importer:
                         if self.callHud:
                             #print "call to HUD here. handsId:",handsId
                             #pipe the Hands.id out to the HUD
+                            print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud
                             self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
                     except fpdb_simple.DuplicateError:
                         duplicates += 1
@@ -364,7 +381,6 @@ class Importer:
                     except (fpdb_simple.FpdbError), fe:
                         errors += 1
                         self.printEmailErrorMessage(errors, file, hand)
-
                         self.fdb.db.rollback()
 
                         if self.settings['failOnError']:
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index 58eb9881..72b7b656 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -40,338 +40,6 @@ SQLITE          = 4
 # config while trying out new hudcache mechanism
 use_date_in_hudcache = True
 
-# Data Structures for index and foreign key creation
-# drop_code is an int with possible values:  0 - don't drop for bulk import
-#                                            1 - drop during bulk import
-# db differences: 
-# - note that mysql automatically creates indexes on constrained columns when
-#   foreign keys are created, while postgres does not. Hence the much longer list
-#   of indexes is required for postgres.
-# all primary keys are left on all the time
-#
-#             table     column           drop_code
-
-indexes = [
-            [ ] # no db with index 0
-          , [ ] # no db with index 1
-          , [ # indexes for mysql (list index 2)
-              {'tab':'Players',  'col':'name',          'drop':0}
-            , {'tab':'Hands',    'col':'siteHandNo',    'drop':0}
-            , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
-            ]
-          , [ # indexes for postgres (list index 3)
-              {'tab':'Boardcards',      'col':'handId',            'drop':0}
-            , {'tab':'Gametypes',       'col':'siteId',            'drop':0}
-            , {'tab':'Hands',           'col':'gametypeId',        'drop':0} # mct 22/3/09
-            , {'tab':'Hands',           'col':'siteHandNo',        'drop':0}
-            , {'tab':'HandsActions',    'col':'handsPlayerId',     'drop':0}
-            , {'tab':'HandsPlayers',    'col':'handId',            'drop':1}
-            , {'tab':'HandsPlayers',    'col':'playerId',          'drop':1}
-            , {'tab':'HandsPlayers',    'col':'tourneysPlayersId', 'drop':0}
-            , {'tab':'HudCache',        'col':'gametypeId',        'drop':1}
-            , {'tab':'HudCache',        'col':'playerId',          'drop':0}
-            , {'tab':'HudCache',        'col':'tourneyTypeId',     'drop':0}
-            , {'tab':'Players',         'col':'siteId',            'drop':1}
-            , {'tab':'Players',         'col':'name',              'drop':0}
-            , {'tab':'Tourneys',        'col':'tourneyTypeId',     'drop':1}
-            , {'tab':'Tourneys',        'col':'siteTourneyNo',     'drop':0}
-            , {'tab':'TourneysPlayers', 'col':'playerId',          'drop':0}
-            , {'tab':'TourneysPlayers', 'col':'tourneyId',         'drop':0}
-            , {'tab':'TourneyTypes',    'col':'siteId',            'drop':0}
-            ]
-          ]
-
-foreignKeys = [
-                [ ] # no db with index 0
-              , [ ] # no db with index 1
-              , [ # foreign keys for mysql
-                  {'fktab':'Hands',        'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
-                , {'fktab':'HandsPlayers', 'fkcol':'handId',        'rtab':'Hands',         'rcol':'id', 'drop':1}
-                , {'fktab':'HandsPlayers', 'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':1}
-                , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers',  'rcol':'id', 'drop':1}
-                , {'fktab':'HudCache',     'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
-                , {'fktab':'HudCache',     'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':0}
-                , {'fktab':'HudCache',     'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes',  'rcol':'id', 'drop':1}
-                ]
-              , [ # foreign keys for postgres
-                  {'fktab':'Hands',        'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
-                , {'fktab':'HandsPlayers', 'fkcol':'handId',        'rtab':'Hands',         'rcol':'id', 'drop':1}
-                , {'fktab':'HandsPlayers', 'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':1}
-                , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers',  'rcol':'id', 'drop':1}
-                , {'fktab':'HudCache',     'fkcol':'gametypeId',    'rtab':'Gametypes',     'rcol':'id', 'drop':1}
-                , {'fktab':'HudCache',     'fkcol':'playerId',      'rtab':'Players',       'rcol':'id', 'drop':0}
-                , {'fktab':'HudCache',     'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes',  'rcol':'id', 'drop':1}
-                ]
-              ]
-
-
-# MySQL Notes:
-#    "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
-#                                                - creates index handId on .handId
-# alter table t drop foreign key fk
-# alter table t add foreign key (fkcol) references tab(rcol)
-# alter table t add constraint c foreign key (fkcol) references tab(rcol)
-# (fkcol is used for foreigh key name)
-
-# mysql to list indexes:
-#   SELECT table_name, index_name, non_unique, column_name 
-#   FROM INFORMATION_SCHEMA.STATISTICS
-#     WHERE table_name = 'tbl_name'
-#     AND table_schema = 'db_name'
-#   ORDER BY table_name, index_name, seq_in_index
-#
-# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
-# ALTER TABLE tab DROP INDEX idx
-
-# mysql to list fks:
-#   SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
-#   FROM information_schema.KEY_COLUMN_USAGE
-#   WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
-#   AND REFERENCED_TABLE_NAME is not null
-#   ORDER BY TABLE_NAME, COLUMN_NAME;
-
-# this may indicate missing object
-# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
-
-
-# PG notes:
-
-#  To add a foreign key constraint to a table:
-#  ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
-#  ALTER TABLE tab DROP CONSTRAINT zipchk
-#
-#  Note: index names must be unique across a schema
-#  CREATE INDEX idx ON tab(col)
-#  DROP INDEX idx
-
-def prepareBulkImport(fdb):
-    """Drop some indexes/foreign keys to prepare for bulk import. 
-       Currently keeping the standalone indexes as needed to import quickly"""
-    # fdb is a fpdb_db object including backend, db, cursor, sql variables
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(0)   # allow table/index operations to work
-    for fk in foreignKeys[fdb.backend]:
-        if fk['drop'] == 1:
-            if fdb.backend == MYSQL_INNODB:
-                fdb.cursor.execute("SELECT constraint_name " +
-                                   "FROM information_schema.KEY_COLUMN_USAGE " +
-                                   #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
-                                   "WHERE 1=1 " +
-                                   "AND table_name = %s AND column_name = %s " + 
-                                   "AND referenced_table_name = %s " +
-                                   "AND referenced_column_name = %s ",
-                                   (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
-                cons = fdb.cursor.fetchone()
-                #print "preparebulk: cons=", cons
-                if cons:
-                    print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
-                    try:
-                        fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
-                    except:
-                        pass
-            elif fdb.backend == PGSQL:
-#    DON'T FORGET TO RECREATE THEM!!
-                print "dropping pg fk", fk['fktab'], fk['fkcol']
-                try:
-                    # try to lock table to see if index drop will work:
-                    # hmmm, tested by commenting out rollback in grapher. lock seems to work but 
-                    # then drop still hangs :-(  does work in some tests though??
-                    # will leave code here for now pending further tests/enhancement ...
-                    fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
-                    #print "after lock, status:", fdb.cursor.statusmessage
-                    #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
-                    try:
-                        fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
-                        print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
-                    except:
-                        if "does not exist" not in str(sys.exc_value):
-                            print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
-                                  % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
-                except:
-                    print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
-                          % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
-            else:
-                print "Only MySQL and Postgres supported so far"
-                return -1
-    
-    for idx in indexes[fdb.backend]:
-        if idx['drop'] == 1:
-            if fdb.backend == MYSQL_INNODB:
-                print "dropping mysql index ", idx['tab'], idx['col']
-                try:
-                    # apparently nowait is not implemented in mysql so this just hands if there are locks 
-                    # preventing the index drop :-(
-                    fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
-                except:
-                    pass
-            elif fdb.backend == PGSQL:
-#    DON'T FORGET TO RECREATE THEM!!
-                print "dropping pg index ", idx['tab'], idx['col']
-                try:
-                    # try to lock table to see if index drop will work:
-                    fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
-                    #print "after lock, status:", fdb.cursor.statusmessage
-                    try:
-                        # table locked ok so index drop should work:
-                        #print "drop index %s_%s_idx" % (idx['tab'],idx['col']) 
-                        fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
-                        #print "dropped  pg index ", idx['tab'], idx['col']
-                    except:
-                        if "does not exist" not in str(sys.exc_value):
-                            print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
-                                  % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) 
-                except:
-                    print "warning: index %s_%s_idx not dropped %s, continuing ..." \
-                          % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
-            else:
-                print "Error: Only MySQL and Postgres supported so far"
-                return -1
-
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(1)   # go back to normal isolation level
-    fdb.db.commit() # seems to clear up errors if there were any in postgres
-#end def prepareBulkImport
-
-def afterBulkImport(fdb):
-    """Re-create any dropped indexes/foreign keys after bulk import"""
-    # fdb is a fpdb_db object including backend, db, cursor, sql variables
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(0)   # allow table/index operations to work
-    for fk in foreignKeys[fdb.backend]:
-        if fk['drop'] == 1:
-            if fdb.backend == MYSQL_INNODB:
-                fdb.cursor.execute("SELECT constraint_name " +
-                                   "FROM information_schema.KEY_COLUMN_USAGE " +
-                                   #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
-                                   "WHERE 1=1 " +
-                                   "AND table_name = %s AND column_name = %s " + 
-                                   "AND referenced_table_name = %s " +
-                                   "AND referenced_column_name = %s ",
-                                   (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
-                cons = fdb.cursor.fetchone()
-                print "afterbulk: cons=", cons
-                if cons:
-                    pass
-                else:
-                    print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
-                    try:
-                        fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key (" 
-                                           + fk['fkcol'] + ") references " + fk['rtab'] + "(" 
-                                           + fk['rcol'] + ")")
-                    except:
-                        pass
-            elif fdb.backend == PGSQL:
-                print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
-                try:
-                    fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint "
-                                       + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
-                                       + " foreign key (" + fk['fkcol']
-                                       + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
-                except:
-                    pass
-            else:
-                print "Only MySQL and Postgres supported so far"
-                return -1
-    
-    for idx in indexes[fdb.backend]:
-        if idx['drop'] == 1:
-            if fdb.backend == MYSQL_INNODB:
-                print "creating mysql index ", idx['tab'], idx['col']
-                try:
-                    fdb.cursor.execute( "alter table %s add index %s(%s)"
-                                      , (idx['tab'],idx['col'],idx['col']) )
-                except:
-                    pass
-            elif fdb.backend == PGSQL:
-#                pass
-                # mod to use tab_col for index name?
-                print "creating pg index ", idx['tab'], idx['col']
-                try:
-                    print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
-                    fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
-                                      % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
-                except:
-                    print "   ERROR! :-("
-                    pass
-            else:
-                print "Only MySQL and Postgres supported so far"
-                return -1
-
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(1)   # go back to normal isolation level
-    fdb.db.commit()   # seems to clear up errors if there were any in postgres
-#end def afterBulkImport
-
-def createAllIndexes(fdb):
-    """Create new indexes"""
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(0)   # allow table/index operations to work
-    for idx in indexes[fdb.backend]:
-        if fdb.backend == MYSQL_INNODB:
-            print "creating mysql index ", idx['tab'], idx['col']
-            try:
-                fdb.cursor.execute( "alter table %s add index %s(%s)"
-                                  , (idx['tab'],idx['col'],idx['col']) )
-            except:
-                pass
-        elif fdb.backend == PGSQL:
-            # mod to use tab_col for index name?
-            print "creating pg index ", idx['tab'], idx['col']
-            try:
-                print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
-                fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
-                                  % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
-            except:
-                print "   ERROR! :-("
-                pass
-        else:
-            print "Only MySQL and Postgres supported so far"
-            return -1
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(1)   # go back to normal isolation level
-#end def createAllIndexes
-
-def dropAllIndexes(fdb):
-    """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
-       using list of indexes in indexes data structure"""
-    # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(0)   # allow table/index operations to work
-    for idx in indexes[fdb.backend]:
-        if fdb.backend == MYSQL_INNODB:
-            print "dropping mysql index ", idx['tab'], idx['col']
-            try:
-                fdb.cursor.execute( "alter table %s drop index %s"
-                                  , (idx['tab'],idx['col']) )
-            except:
-                pass
-        elif fdb.backend == PGSQL:
-            print "dropping pg index ", idx['tab'], idx['col']
-            # mod to use tab_col for index name?
-            try:
-                fdb.cursor.execute( "drop index %s_%s_idx"
-                                  % (idx['tab'],idx['col']) )
-            except:
-                pass
-        else:
-            print "Only MySQL and Postgres supported so far"
-            return -1
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(1)   # go back to normal isolation level
-#end def dropAllIndexes
-
-def analyzeDB(fdb):
-    """Do whatever the DB can offer to update index/table statistics"""
-    if fdb.backend == PGSQL:
-        fdb.db.set_isolation_level(0)   # allow vacuum to work
-        try:
-            fdb.cursor.execute("vacuum analyze")
-        except:
-            print "Error during vacuum"
-        fdb.db.set_isolation_level(1)   # go back to normal isolation level
-    fdb.db.commit()
-#end def analyzeDB
-
 class DuplicateError(Exception):
     def __init__(self, value):
         self.value = value
From e662279acd5b0312a71c6e70856b96b0b04de5c4 Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 7 Jun 2009 20:49:35 +0100
Subject: [PATCH 19/29] remove parameter to analyzeDB() (error in previous
 release)
---
 pyfpdb/fpdb_import.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py
index 8c931a49..a1e2a835 100644
--- a/pyfpdb/fpdb_import.py
+++ b/pyfpdb/fpdb_import.py
@@ -180,7 +180,7 @@ class Importer:
             tottime += ttime
         if self.settings['dropIndexes'] == 'drop':
             self.fdb.afterBulkImport()
-        self.fdb.analyzeDB(self.fdb)
+        self.fdb.analyzeDB()
         return (totstored, totdups, totpartial, toterrors, tottime)
 #        else: import threaded
 
From c6f3595b93b45dd4d8d17565df2963633aa4919a Mon Sep 17 00:00:00 2001
From: sqlcoder 
Date: Sun, 7 Jun 2009 21:55:49 +0100
Subject: [PATCH 20/29] update with latest handsplayers/hudcache changes
---
 docs/tabledesign.html | 588 ++++++++++++++++++++++++++++++++++++++----
 1 file changed, 544 insertions(+), 44 deletions(-)
diff --git a/docs/tabledesign.html b/docs/tabledesign.html
index e148b9cf..86898528 100644
--- a/docs/tabledesign.html
+++ b/docs/tabledesign.html
@@ -312,11 +312,13 @@ The program itself is licensed under AGPLv3, see agpl-3.0.txt
 
 
 Table HandsPlayers
-cardX: can be 1 through 20, one for each card. In holdem only 1-2 of these are used, in omaha 1-4, in stud/razz 1-7, in single draw 1-10, in tripple draw all 20 and in badugi 1-16 (4*4).
-For the draw games: the first 5 (badugi: 4) cards are the initial cards, the next 5 (badugi: 4) are after the first draw, etc.
-Example 1: If a player gets 2-6 spades for his first five cards and decides to throw away the 4 and then gets a 7 of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 4, 5, 6, 2, 3, 5, 6, 7
-Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and decides to throw away the 2 and the 3 and then gets a Q and K of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 5, 8, J, 5, 8, J, Q, K.
+cardX: can be 1 through 20, one for each card. In holdem only 1-2 of these are used, in omaha 1-4, in stud/razz 1-7, in single draw games 1-10 is used and in badugi 1-16 (4*4) is used.
+For the draw games: the first 5 (badugi: 4) cards are the initial cards, the next 5 (badugi: 4) are after the first draw. If a player keeps some cards then those cards' spaces are filled with "k", short for "kept".
+Example 1: If a player gets 2-6 spades for his first five cards and decides to throw away the 4 and then gets a 7 of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 4, 5, 6, k, k, 7, k, k
+Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and decides to throw away the 2 and the 3 and then gets a Q and K of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 5, 8, J, Q, K, k, k, k
+Note that it will k in the space of which card was there previously, so in example 2 where the player kept the last 3 cards, the last 3 fields of the first draw (ie. card8-10Value) are replaced with k.
 I did not separate this into an extra table because I felt the lost space is not sufficiently large. Also the benefit for searching is far less relevant.
+ToDo: Original plan was to implement the many flags from hudcache as booleans - need to try this out as it will save space and may therefore be quicker.
 
 	
 		| @@ -353,33 +355,24 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andField Name | smallint | The seat in which the person was sitting - necessary for HUD | 
+	
+		| +card1(..7) | +smallint | +0=none/unknown, 1-13=2-Ah 14-26=2-Ad 27-39=2-Ac 40-52=2-As | 
+	
+		| +startCards | +smallint | +int representing Holdem starting cards.Hand is stored as an int 13 * x + y where x and y
+are in range 0..12, and (x+2) and (y+2) represents rank of each card (2=2 .. 14=Ace).
 +If x > y then pair is suited, if x < y then unsuited.
 +Omaha and other games may need to use this as a key into another table. (to be decided ...)
 | 
 	
 		| ante | int | note: for cash this could be boolean, but in tourneys you may enter a hand with less than the full ante | 
-	
-		| -cardXValue | -smallint | -2-10=2-10, J=11, Q=12, K=13, A=14 (even in razz), unknown/no card=x-			see note above table
 | 
-	
-		| -cardXSuit | -char(1) | -h=hearts, s=spades, d=diamonds, c=clubs, unknown/no card=x | 
-	
-		| -cardXDiscarded | -boolean | -Whether the card was discarded (this only applies to draw games, X can be 1 through 15 since the final cards can obviously not be discarded). | 
-	
-		| -DrawnX | -smallint | -X can be 1 through 3.-			This field denotes how many cards the player has drawn on each draw.
 | 
 	
 		| winnings | @@ -388,7 +381,12 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andint | 
 		| rake | -int | +rake for this player for this hand | +rake for this player for this hand (i.e. final pot(s) size = winnings + rake) | 
+	
+		| +totalProfit | +int | profit for this player for this hand ( i.e. winnings - (ante + bets) ) | 
 	
 		| @@ -405,6 +403,384 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andcomment | bigint | references TourneysPlayers.id | 
+	
+		| +tourneyTypeId | +bigint | +references TourneyTypes.id (maybe this should be on Hands?) | 
+	
+		| +wonWhenSeenStreet1(..4) | +float | +How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.+			To be completely clear, this stores a hand count, NOT a money amount.
 +   (2/3/4: Same for turn/street5, river/street6, street7)
 | 
+	
+		| +wonAtSD | +float | +As wonWhenSeenStreet1, but for showdown. | 
+	
+		| +street0VPI | +int | +did player pay to see flop, 1 or 0 | 
+	
+		| +street0Aggr | +int | +did player raise before flop, 1 or 0 | 
+	
+		| +street0_3BChance | +int | +did player have chance to 3B, 1 or 0 | 
+	
+		| +street0_3BDone | +int | +did player 3bet before flop, 1 or 0 | 
+	
+		| +street0_4BChance | +int | +did player have chance to 4B, 1 or 0 | 
+	
+		| +street0_4BDone | +int | +did player 4bet before flop, 1 or 0 | 
+	
+		| +other_3BStreet0 | +int | +did other player 3bet before flop, 1 or 0 | 
+	
+		| +other_4BStreet0 | +int | +did other player 4bet before flop, 1 or 0 | 
+	
+		| +street1Seen(/2/3/4) | +int | +did player see flop/street4 (.. etc) | 
+	
+		| +sawShowdown | +int | +did player see showdown | 
+	
+		| +street1Aggr | +int | +number of hands where player raised flop/street4 | 
+	
+		| +street2Aggr | +int | +number of hands where player raised turn/street5 | 
+	
+		| +street3Aggr | +int | +number of hands where player raised river/street6 | 
+	
+		| +street4Aggr | +int | +number of hands where player raised street7 | 
+	
+		| +otherRaisedStreet0 | +int | +number of hands where someone else raised pre-flop/street3 | 
+	
+		| +otherRaisedStreet1 | +int | +number of hands where someone else raised flop/street4 | 
+	
+		| +otherRaisedStreet2 | +int | +number of hands where someone else raised turn/street5 | 
+	
+		| +otherRaisedStreet3 | +int | +number of hands where someone else raised river/street6 | 
+	
+		| +otherRaisedStreet4 | +int | +number of hands where someone else raised street7 | 
+	
+		| +foldToOtherRaisedStreet0 | +int | +number of hands where someone else raised flop/street4 and the player folded | 
+	
+		| +foldToOtherRaisedStreet1 | +int | +number of hands where someone else raised flop/street4 and the player folded | 
+	
+		| +foldToOtherRaisedStreet2 | +int | +number of hands where someone else raised Turn/street5 and the player folded | 
+	
+		| +foldToOtherRaisedStreet3 | +int | +number of hands where someone else raised River/street6 and the player folded | 
+	
+		| +foldToOtherRaisedStreet4 | +int | +number of hands where someone else raised street7 and the player folded | 
+	
+		| +stealAttemptChance | +int | +Player was in CO, BTN or SB and nobody has called yet | 
+	
+		| +stealAttempted | +int | +Player took a chance per the above condition | 
+	
+		| +foldBbToStealChance | +int | +Somebody tried to steal BB from player | 
+	
+		| +foldedBbToSteal | +int | +Player folded BB to steal attempt | 
+	
+		| +foldSbToStealChance | +int | +Somebody tried to steal SB from player | 
+	
+		| +foldedSbToSteal | +int | +Player folded SB to steal attempt | 
+
+	
+		| +street1CBChance | +int | +Player had chance to make continuation bet on flop/street4 | 
+	
+		| +street1CBDone | +int | +Player used chance to make continuation bet on flop/street4 | 
+	
+		| +street2CBChance | +int | +Player had chance to make continuation bet on turn/street5 | 
+	
+		| +street2CBDone | +int | +Player used chance to make continuation bet on turn/street5 | 
+	
+		| +street3CBChance | +int | +Player had chance to make continuation bet on river/street6 | 
+	
+		| +street3CBDone | +int | +Player used chance to make continuation bet on river/street6 | 
+	
+		| +street4CBChance | +int | +Player had chance to make continuation bet on street7 | 
+	
+		| +street4CBDone | +int | +Player used chance to make continuation bet on street7 | 
+	
+	
+		| +foldToStreet1CBChance | +int | +Player had chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet1CBDone | +int | +Player used chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet2CBChance | +int | +Player had chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet2CBDone | +int | +Player used chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet3CBChance | +int | +Player had chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet3CBDone | +int | +Player used chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet4CBChance | +int | +Player had chance to fold to continuation bet on this street | 
+	
+		| +foldToStreet4CBDone | +int | +Player used chance to fold to continuation bet on this street | 
+	
+	
+		| +street1CheckCallRaiseChance | +int | +How often player had the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street1CheckCallRaiseDone | +int | +How often player used the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street2CheckCallRaiseChance | +int | +How often player had the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street2CheckCallRaiseDone | +int | +How often player used the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street3CheckCallRaiseChance | +int | +How often player had the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street3CheckCallRaiseDone | +int | +How often player used the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street4CheckCallRaiseChance | +int | +How often player had the chance to do a check-raise or a call-raise on this street | 
+	
+		| +street4CheckCallRaiseDone | +int | +How often player used the chance to do a check-raise or a call-raise on this street | 
+	
+	
+		| +street0Calls | +int | +Number of times player called on this street | 
+	
+		| +street1Calls | +int | +Number of times player called on this street | 
+	
+		| +street2Calls | +int | +Number of times player called on this street | 
+	
+		| +street3Calls | +int | +Number of times player called on this street | 
+	
+		| +street4Calls | +int | +Number of times player called on this street | 
+	
+	
+		| +street0Bets | +int | +Number of times player bet on this street | 
+	
+		| +street1Bets | +int | +Number of times player bet on this street | 
+	
+		| +street2Bets | +int | +Number of times player bet on this street | 
+	
+		| +street3Bets | +int | +Number of times player bet on this street | 
+	
+		| +street4Bets | +int | +Number of times player bet on this street | 
+	
+	
+		| +street0Raises | +int | +Number of times player raised on this street | 
+	
+		| +street1Raises | +int | +Number of times player raised on this street | 
+	
+		| +street2Raises | +int | +Number of times player raised on this street | 
+	
+		| +street3Raises | +int | +Number of times player raised on this street | 
+	
+		| +street4Raises | +int | +Number of times player raised on this street | 
+
+	
+		| +actionString | +int | +Experimental - idea is to store the action on this street as a string: e.g. kkBrcfC, with 
+  player's own choices in upper case and other players in lower case. k=check, b=bet, c=call, 
+  r=raise. (Perhaps NL would miss out bet sizes for this?) It would then be possible to do complex 
+  ad-hoc queries using queries like: actionString like '%B%r%C%
+   | 
 
 
 Table HudCache
@@ -444,12 +820,23 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
 		smallint | -References TourneyTypes.id | 
 		| HDs | int | number of hands this player played in this gametype with this number of seats | 
+	
+		| +wonWhenSeenStreet1(/2/3/4) | +float | +How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.+			To be completely clear, this stores a hand count, NOT a money amount.
 +   (/2/3/4: Same for turn/street5, river/street6, street7)
 | 
+	
+		| +wonAtSD | +float | +As wonWhenSeenStreet1, but for showdown. | 
 	
 		| street0VPI | @@ -463,14 +850,24 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andint | number of hands where player raised before flop | 
 	
-		| +street0_3B4BChance | street0_3BChance | -int | +number of hands where player had chance to 3B or 4B | number of hands where player had chance to 3B before flop | 
 	
-		| +street0_3B4BDone | street0_3BDone | -int | +number of hands where player 3bet/4bet before flop | +number of hands where player 3bet before flop | 
+	
+		| +street0_4BChance | +int | +number of hands where player had chance to 4B before flop | 
+	
+		| +street0_4BDone | +int | number of hands where player 4bet before flop | 
 	
 		| @@ -517,6 +914,11 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andstreet1Seen | int | number of hands where player raised street7 | 
+	
+		| +otherRaisedStreet0 | +int | +number of hands where someone else raised pre-flop/street3 | 
 	
 		| otherRaisedStreet1 | @@ -537,6 +939,11 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andint | int | number of hands where someone else raised street7 | 
+	
+		| +foldToOtherRaisedStreet0 | +int | +number of hands where someone else raised pre-flop/street3 and the player folded | 
 	
 		| foldToOtherRaisedStreet1 | @@ -557,18 +964,6 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andint | int | number of hands where someone else raised street7 and the player folded | 
-	
-		| -wonWhenSeenStreet1 | -float | -How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.-			To be completely clear, this stores a hand count, NOT a money amount.
 | 
-	
-		| -wonAtSD | -float | -As wonWhenSeenStreet1, but for showdown. | 
-
 	
 		| stealAttemptChance | @@ -729,6 +1124,84 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards andint | How often player used the chance to do a check-raise or a call-raise on this street | 
 	
+	
+		| +street0Calls | +int | +Number of times player called on this street | 
+	
+		| +street1Calls | +int | +Number of times player called on this street | 
+	
+		| +street2Calls | +int | +Number of times player called on this street | 
+	
+		| +street3Calls | +int | +Number of times player called on this street | 
+	
+		| +street4Calls | +int | +Number of times player called on this street | 
+	
+	
+		| +street0Bets | +int | +Number of times player bet on this street | 
+	
+		| +street1Bets | +int | +Number of times player bet on this street | 
+	
+		| +street2Bets | +int | +Number of times player bet on this street | 
+	
+		| +street3Bets | +int | +Number of times player bet on this street | 
+	
+		| +street4Bets | +int | +Number of times player bet on this street | 
+	
+	
+		| +street0Raises | +int | +Number of times player raised on this street | 
+	
+		| +street1Raises | +int | +Number of times player raised on this street | 
+	
+		| +street2Raises | +int | +Number of times player raised on this street | 
+	
+		| +street3Raises | +int | +Number of times player raised on this street | 
+	
+		| +street4Raises | +int | +Number of times player raised on this street | 
+	
 
 
 Table HandsActions
@@ -926,5 +1399,32 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
 		+
 | 
+Possible Changes
+
+	
+		| +Table | +Comment | 
+	
+		| +BoardCards | +Remove as these attributes are now stored on Hands | 
+	
+		| +HandsActions | +Remove if/when these attributes are stored on Hands or elsewhere | 
+	
+		| +HandsPlayers | +Move tourneyTypeId field to Hands table. | 
+	
+		| +Comments | +Comment fields on various tables should probably be moved to a single comment table. Aim 
+  should be to where possible reduce tables to a list of fixed length not-null columns and have 
+  the larger, sparser comment columns in a dedicated table. (May not be possible or practical but 
+  something to aim at.) | 
+