From 45a303eb25afc1c311a441089e6043a1028224b0 Mon Sep 17 00:00:00 2001
From: sqlcoder
Date: Sun, 7 Jun 2009 20:07:18 +0100
Subject: [PATCH 1/6] make global lock work (do nothing) if hands table doesn't
exist
---
pyfpdb/fpdb.py | 7 +++--
pyfpdb/fpdb_db.py | 59 +++++++++++++++++++++++++------------------
pyfpdb/fpdb_simple.py | 47 +++++++++++++++++++---------------
3 files changed, 63 insertions(+), 50 deletions(-)
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 645ddef5..8d514b90 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -181,7 +181,7 @@ class fpdb:
def dia_load_profile(self, widget, data=None):
"""Dialogue to select a file to load a profile from"""
- if self.obtain_global_lock():
+ if self.obtain_global_lock() == 0: # returns 0 if successful
try:
chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
action=gtk.FILE_CHOOSER_ACTION_OPEN,
@@ -201,7 +201,7 @@ class fpdb:
def dia_recreate_tables(self, widget, data=None):
"""Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
- if self.obtain_global_lock():
+ if self.obtain_global_lock() in (0,2): # returns 0 if successful, 2 if Hands table does not exist
lock_released = False
try:
@@ -406,7 +406,7 @@ class fpdb:
self.settings['db-databaseName'],
self.settings['db-user'],
self.settings['db-password'])
- return fpdb_simple.get_global_lock(self.fdb_lock)
+ return self.fdb_lock.get_global_lock()
#end def obtain_global_lock
def quit(self, widget):
@@ -455,7 +455,6 @@ class fpdb:
ps_tab=new_ps_thread.get_vbox()
self.add_and_display_tab(ps_tab, "Positional Stats")
-
def tab_main_help(self, widget, data=None):
"""Displays a tab with the main fpdb help screen"""
#print "start of tab_main_help"
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index ad599b13..dd7f5afe 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -17,6 +17,8 @@
import os
import re
+import sys
+
import fpdb_simple
import FpdbSQLQueries
@@ -96,7 +98,7 @@ class fpdb_db:
try:
self.cursor.execute("SELECT * FROM Settings")
settings=self.cursor.fetchone()
- if settings[0]!=119:
+ if settings[0]!=118:
print "outdated or too new database version - please recreate tables"
self.wrongDbVersion=True
except:# _mysql_exceptions.ProgrammingError:
@@ -201,14 +203,10 @@ class fpdb_db:
#end def get_db_info
def fillDefaultData(self):
- self.cursor.execute("INSERT INTO Settings VALUES (119);")
+ self.cursor.execute("INSERT INTO Settings VALUES (118);")
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');")
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');")
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');")
- self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Carbon', 'USD');")
- self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'OnGame', 'USD');")
- self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'UltimateBet', 'USD');")
- self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Betfair', 'USD');")
self.cursor.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
#end def fillDefaultData
@@ -222,22 +220,33 @@ class fpdb_db:
print "Finished recreating tables"
#end def recreate_tables
- def getSqlPlayerIDs(names, site_id):
- result = []
- notfound = []
- self.cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
- tmp = dict(self.cursor.fetchall())
- for n in names:
- if n not in tmp:
- notfound.append(n)
- else:
- result.append(tmp[n])
- if notfound:
- cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
- cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
- tmp = cursor.fetchall()
- for n in tmp:
- result.append(n[0])
-
- #We proabably want to cache this
- return result
+ # Currently uses an exclusive lock on the Hands table as a global lock
+ # Return values are Unix style, 0 for success, positive integers for errors
+ # 1 = generic error
+ # 2 = hands table does not exist (error message is suppressed)
+ def get_global_lock(self):
+ if self.backend == self.MYSQL_INNODB:
+ try:
+ self.cursor.execute( "lock tables Hands write" )
+ except:
+ # Table 'fpdb.hands' doesn't exist
+ if str(sys.exc_value).find(".hands' doesn't exist") >= 0:
+ return(2)
+ print "Error! failed to obtain global lock. Close all programs accessing " \
+ + "database (including fpdb) and try again (%s)." \
+ % ( str(sys.exc_value).rstrip('\n'), )
+ return(1)
+ elif self.backend == self.PGSQL:
+ try:
+ self.cursor.execute( "lock table Hands in exclusive mode nowait" )
+ #print "... after lock table, status =", self.cursor.statusmessage
+ except:
+ # relation "hands" does not exist
+ if str(sys.exc_value).find('relation "hands" does not exist') >= 0:
+ return(2)
+ print "Error! failed to obtain global lock. Close all programs accessing " \
+ + "database (including fpdb) and try again (%s)." \
+ % ( str(sys.exc_value).rstrip('\n'), )
+ return(1)
+ return(0)
+#end class fpdb_db
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index 2241feab..58eb9881 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -16,6 +16,10 @@
#agpl-3.0.txt in the docs folder of the package.
#This file contains simple functions for fpdb
+
+#Aiming to eventually remove this module, functions will move to, eg:
+#fpdb_db db create/re-create/management/etc
+#Hands or related files for saving hands to db, etc
import datetime
import time
@@ -28,6 +32,7 @@ PS = 1
FTP = 2
# TODO: these constants are also used in fpdb_save_to_db and others, is there a way to do like C #define, and #include ?
+# answer - yes. These are defined in fpdb_db so are accessible through that class.
MYSQL_INNODB = 2
PGSQL = 3
SQLITE = 4
@@ -367,27 +372,6 @@ def analyzeDB(fdb):
fdb.db.commit()
#end def analyzeDB
-def get_global_lock(fdb):
- if fdb.backend == MYSQL_INNODB:
- try:
- fdb.cursor.execute( "lock tables Hands write" )
- except:
- print "Error! failed to obtain global lock. Close all programs accessing " \
- + "database (including fpdb) and try again (%s)." \
- % ( str(sys.exc_value).rstrip('\n'), )
- return(False)
- elif fdb.backend == PGSQL:
- try:
- fdb.cursor.execute( "lock table Hands in exclusive mode nowait" )
- #print "... after lock table, status =", fdb.cursor.statusmessage
- except:
- print "Error! failed to obtain global lock. Close all programs accessing " \
- + "database (including fpdb) and try again (%s)." \
- % ( str(sys.exc_value).rstrip('\n'), )
- return(False)
- return(True)
-
-
class DuplicateError(Exception):
def __init__(self, value):
self.value = value
@@ -1390,6 +1374,27 @@ def recognisePlayerIDs(cursor, names, site_id):
#end def recognisePlayerIDs
+# Here's a version that would work if it wasn't for the fact that it needs to have the output in the same order as input
+# this version could also be improved upon using list comprehensions, etc
+
+#def recognisePlayerIDs(cursor, names, site_id):
+# result = []
+# notfound = []
+# cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
+# tmp = dict(cursor.fetchall())
+# for n in names:
+# if n not in tmp:
+# notfound.append(n)
+# else:
+# result.append(tmp[n])
+# if notfound:
+# cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
+# cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
+# tmp = cursor.fetchall()
+# for n in tmp:
+# result.append(n[0])
+#
+# return result
#recognises the name in the given line and returns its array position in the given array
def recognisePlayerNo(line, names, atype):
From 7e8b80948cc9b7f0628bff720d9017259255d735 Mon Sep 17 00:00:00 2001
From: sqlcoder
Date: Sun, 7 Jun 2009 20:45:09 +0100
Subject: [PATCH 2/6] move index etc functions from fpdb_simple.py into
fpdb_db.py
---
pyfpdb/fpdb_db.py | 334 +++++++++++++++++++++++++++++++++++++++++-
pyfpdb/fpdb_import.py | 28 +++-
pyfpdb/fpdb_simple.py | 332 -----------------------------------------
3 files changed, 355 insertions(+), 339 deletions(-)
diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py
index dd7f5afe..474d694b 100644
--- a/pyfpdb/fpdb_db.py
+++ b/pyfpdb/fpdb_db.py
@@ -31,6 +31,110 @@ class fpdb_db:
self.MYSQL_INNODB = 2
self.PGSQL = 3
self.SQLITE = 4
+
+ # Data Structures for index and foreign key creation
+ # drop_code is an int with possible values: 0 - don't drop for bulk import
+ # 1 - drop during bulk import
+ # db differences:
+ # - note that mysql automatically creates indexes on constrained columns when
+ # foreign keys are created, while postgres does not. Hence the much longer list
+ # of indexes is required for postgres.
+ # all primary keys are left on all the time
+ #
+ # table column drop_code
+
+ self.indexes = [
+ [ ] # no db with index 0
+ , [ ] # no db with index 1
+ , [ # indexes for mysql (list index 2)
+ {'tab':'Players', 'col':'name', 'drop':0}
+ , {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
+ , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
+ ]
+ , [ # indexes for postgres (list index 3)
+ {'tab':'Boardcards', 'col':'handId', 'drop':0}
+ , {'tab':'Gametypes', 'col':'siteId', 'drop':0}
+ , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
+ , {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
+ , {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0}
+ , {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
+ , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
+ , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
+ , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
+ , {'tab':'HudCache', 'col':'playerId', 'drop':0}
+ , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
+ , {'tab':'Players', 'col':'siteId', 'drop':1}
+ , {'tab':'Players', 'col':'name', 'drop':0}
+ , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
+ , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
+ , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
+ , {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
+ , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
+ ]
+ ]
+
+ self.foreignKeys = [
+ [ ] # no db with index 0
+ , [ ] # no db with index 1
+ , [ # foreign keys for mysql
+ {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
+ , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
+ , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
+ , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
+ ]
+ , [ # foreign keys for postgres
+ {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
+ , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
+ , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
+ , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
+ ]
+ ]
+
+
+ # MySQL Notes:
+ # "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
+ # - creates index handId on .handId
+ # alter table t drop foreign key fk
+ # alter table t add foreign key (fkcol) references tab(rcol)
+ # alter table t add constraint c foreign key (fkcol) references tab(rcol)
+ # (fkcol is used for foreigh key name)
+
+ # mysql to list indexes:
+ # SELECT table_name, index_name, non_unique, column_name
+ # FROM INFORMATION_SCHEMA.STATISTICS
+ # WHERE table_name = 'tbl_name'
+ # AND table_schema = 'db_name'
+ # ORDER BY table_name, index_name, seq_in_index
+ #
+ # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
+ # ALTER TABLE tab DROP INDEX idx
+
+ # mysql to list fks:
+ # SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
+ # FROM information_schema.KEY_COLUMN_USAGE
+ # WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
+ # AND REFERENCED_TABLE_NAME is not null
+ # ORDER BY TABLE_NAME, COLUMN_NAME;
+
+ # this may indicate missing object
+ # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
+
+
+ # PG notes:
+
+ # To add a foreign key constraint to a table:
+ # ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
+ # ALTER TABLE tab DROP CONSTRAINT zipchk
+ #
+ # Note: index names must be unique across a schema
+ # CREATE INDEX idx ON tab(col)
+ # DROP INDEX idx
#end def __init__
def do_connect(self, config=None):
@@ -215,11 +319,239 @@ class fpdb_db:
self.drop_tables()
self.create_tables()
- fpdb_simple.createAllIndexes(self)
+ self.createAllIndexes()
self.db.commit()
print "Finished recreating tables"
#end def recreate_tables
+ def prepareBulkImport(self):
+ """Drop some indexes/foreign keys to prepare for bulk import.
+ Currently keeping the standalone indexes as needed to import quickly"""
+ # self is a fpdb_db object including backend, db, cursor, sql variables
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(0) # allow table/index operations to work
+ for fk in self.foreignKeys[self.backend]:
+ if fk['drop'] == 1:
+ if self.backend == self.MYSQL_INNODB:
+ self.cursor.execute("SELECT constraint_name " +
+ "FROM information_schema.KEY_COLUMN_USAGE " +
+ #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
+ "WHERE 1=1 " +
+ "AND table_name = %s AND column_name = %s " +
+ "AND referenced_table_name = %s " +
+ "AND referenced_column_name = %s ",
+ (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
+ cons = self.cursor.fetchone()
+ #print "preparebulk: cons=", cons
+ if cons:
+ print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
+ try:
+ self.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
+ except:
+ pass
+ elif self.backend == self.PGSQL:
+ # DON'T FORGET TO RECREATE THEM!!
+ print "dropping pg fk", fk['fktab'], fk['fkcol']
+ try:
+ # try to lock table to see if index drop will work:
+ # hmmm, tested by commenting out rollback in grapher. lock seems to work but
+ # then drop still hangs :-( does work in some tests though??
+ # will leave code here for now pending further tests/enhancement ...
+ self.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
+ #print "after lock, status:", self.cursor.statusmessage
+ #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
+ try:
+ self.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
+ print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
+ except:
+ if "does not exist" not in str(sys.exc_value):
+ print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
+ % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
+ except:
+ print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
+ % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
+ else:
+ print "Only MySQL and Postgres supported so far"
+ return -1
+
+ for idx in self.indexes[self.backend]:
+ if idx['drop'] == 1:
+ if self.backend == self.MYSQL_INNODB:
+ print "dropping mysql index ", idx['tab'], idx['col']
+ try:
+ # apparently nowait is not implemented in mysql so this just hands if there are locks
+ # preventing the index drop :-(
+ self.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
+ except:
+ pass
+ elif self.backend == self.PGSQL:
+ # DON'T FORGET TO RECREATE THEM!!
+ print "dropping pg index ", idx['tab'], idx['col']
+ try:
+ # try to lock table to see if index drop will work:
+ self.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
+ #print "after lock, status:", self.cursor.statusmessage
+ try:
+ # table locked ok so index drop should work:
+ #print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
+ self.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
+ #print "dropped pg index ", idx['tab'], idx['col']
+ except:
+ if "does not exist" not in str(sys.exc_value):
+ print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
+ % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
+ except:
+ print "warning: index %s_%s_idx not dropped %s, continuing ..." \
+ % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
+ else:
+ print "Error: Only MySQL and Postgres supported so far"
+ return -1
+
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(1) # go back to normal isolation level
+ self.db.commit() # seems to clear up errors if there were any in postgres
+ #end def prepareBulkImport
+
+ def afterBulkImport(self):
+ """Re-create any dropped indexes/foreign keys after bulk import"""
+ # self is a fpdb_db object including backend, db, cursor, sql variables
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(0) # allow table/index operations to work
+ for fk in self.foreignKeys[self.backend]:
+ if fk['drop'] == 1:
+ if self.backend == self.MYSQL_INNODB:
+ self.cursor.execute("SELECT constraint_name " +
+ "FROM information_schema.KEY_COLUMN_USAGE " +
+ #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
+ "WHERE 1=1 " +
+ "AND table_name = %s AND column_name = %s " +
+ "AND referenced_table_name = %s " +
+ "AND referenced_column_name = %s ",
+ (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
+ cons = self.cursor.fetchone()
+ print "afterbulk: cons=", cons
+ if cons:
+ pass
+ else:
+ print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
+ try:
+ self.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
+ + fk['fkcol'] + ") references " + fk['rtab'] + "("
+ + fk['rcol'] + ")")
+ except:
+ pass
+ elif self.backend == self.PGSQL:
+ print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
+ try:
+ self.cursor.execute("alter table " + fk['fktab'] + " add constraint "
+ + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
+ + " foreign key (" + fk['fkcol']
+ + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
+ except:
+ pass
+ else:
+ print "Only MySQL and Postgres supported so far"
+ return -1
+
+ for idx in self.indexes[self.backend]:
+ if idx['drop'] == 1:
+ if self.backend == self.MYSQL_INNODB:
+ print "creating mysql index ", idx['tab'], idx['col']
+ try:
+ self.cursor.execute( "alter table %s add index %s(%s)"
+ , (idx['tab'],idx['col'],idx['col']) )
+ except:
+ pass
+ elif self.backend == self.PGSQL:
+ # pass
+ # mod to use tab_col for index name?
+ print "creating pg index ", idx['tab'], idx['col']
+ try:
+ print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
+ self.cursor.execute( "create index %s_%s_idx on %s(%s)"
+ % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
+ except:
+ print " ERROR! :-("
+ pass
+ else:
+ print "Only MySQL and Postgres supported so far"
+ return -1
+
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(1) # go back to normal isolation level
+ self.db.commit() # seems to clear up errors if there were any in postgres
+ #end def afterBulkImport
+
+ def createAllIndexes(self):
+ """Create new indexes"""
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(0) # allow table/index operations to work
+ for idx in self.indexes[self.backend]:
+ if self.backend == self.MYSQL_INNODB:
+ print "creating mysql index ", idx['tab'], idx['col']
+ try:
+ self.cursor.execute( "alter table %s add index %s(%s)"
+ , (idx['tab'],idx['col'],idx['col']) )
+ except:
+ pass
+ elif self.backend == self.PGSQL:
+ # mod to use tab_col for index name?
+ print "creating pg index ", idx['tab'], idx['col']
+ try:
+ print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
+ self.cursor.execute( "create index %s_%s_idx on %s(%s)"
+ % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
+ except:
+ print " ERROR! :-("
+ pass
+ else:
+ print "Only MySQL and Postgres supported so far"
+ return -1
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(1) # go back to normal isolation level
+ #end def createAllIndexes
+
+ def dropAllIndexes(self):
+ """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
+ using list of indexes in indexes data structure"""
+ # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(0) # allow table/index operations to work
+ for idx in self.indexes[self.backend]:
+ if self.backend == self.MYSQL_INNODB:
+ print "dropping mysql index ", idx['tab'], idx['col']
+ try:
+ self.cursor.execute( "alter table %s drop index %s"
+ , (idx['tab'],idx['col']) )
+ except:
+ pass
+ elif self.backend == self.PGSQL:
+ print "dropping pg index ", idx['tab'], idx['col']
+ # mod to use tab_col for index name?
+ try:
+ self.cursor.execute( "drop index %s_%s_idx"
+ % (idx['tab'],idx['col']) )
+ except:
+ pass
+ else:
+ print "Only MySQL and Postgres supported so far"
+ return -1
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(1) # go back to normal isolation level
+ #end def dropAllIndexes
+
+ def analyzeDB(self):
+ """Do whatever the DB can offer to update index/table statistics"""
+ if self.backend == self.PGSQL:
+ self.db.set_isolation_level(0) # allow vacuum to work
+ try:
+ self.cursor.execute("vacuum analyze")
+ except:
+ print "Error during vacuum"
+ self.db.set_isolation_level(1) # go back to normal isolation level
+ self.db.commit()
+ #end def analyzeDB
+
# Currently uses an exclusive lock on the Hands table as a global lock
# Return values are Unix style, 0 for success, positive integers for errors
# 1 = generic error
diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py
index 2dbb4807..8c931a49 100644
--- a/pyfpdb/fpdb_import.py
+++ b/pyfpdb/fpdb_import.py
@@ -150,7 +150,9 @@ class Importer:
self.monitor = True
self.dirlist[site] = [dir] + [filter]
+ #print "addImportDirectory: checking files in", dir
for file in os.listdir(dir):
+ #print " adding file ", file
self.addImportFile(os.path.join(dir, file), site, filter)
else:
print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory"
@@ -162,7 +164,7 @@ class Importer:
if self.settings['dropIndexes'] == 'auto':
self.settings['dropIndexes'] = self.calculate_auto()
if self.settings['dropIndexes'] == 'drop':
- fpdb_simple.prepareBulkImport(self.fdb)
+ self.fdb.prepareBulkImport()
totstored = 0
totdups = 0
totpartial = 0
@@ -177,8 +179,8 @@ class Importer:
toterrors += errors
tottime += ttime
if self.settings['dropIndexes'] == 'drop':
- fpdb_simple.afterBulkImport(self.fdb)
- fpdb_simple.analyzeDB(self.fdb)
+ self.fdb.afterBulkImport()
+ self.fdb.analyzeDB(self.fdb)
return (totstored, totdups, totpartial, toterrors, tottime)
# else: import threaded
@@ -203,14 +205,18 @@ class Importer:
#todo: make efficient - always checks for new file, should be able to use mtime of directory
# ^^ May not work on windows
+ #rulog = open('runUpdated.txt', 'a')
+ #rulog.writelines("runUpdated ... ")
for site in self.dirlist:
self.addImportDirectory(self.dirlist[site][0], False, site, self.dirlist[site][1])
for file in self.filelist:
if os.path.exists(file):
stat_info = os.stat(file)
+ #rulog.writelines("path exists ")
try:
lastupdate = self.updated[file]
+ #rulog.writelines("lastupdate = %d, mtime = %d" % (lastupdate,stat_info.st_mtime))
if stat_info.st_mtime > lastupdate:
self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
self.updated[file] = time()
@@ -236,7 +242,8 @@ class Importer:
self.addToDirList = {}
self.removeFromFileList = {}
self.fdb.db.rollback()
-
+ #rulog.writelines(" finished\n")
+ #rulog.close()
# This is now an internal function that should not be called directly.
def import_file_dict(self, file, site, filter):
@@ -282,6 +289,7 @@ class Importer:
starttime = time()
last_read_hand = 0
loc = 0
+ #print "file =", file
if file == "stdin":
inputFile = sys.stdin
else:
@@ -292,10 +300,17 @@ class Importer:
return (0, 0, 0, 1, 0)
try:
loc = self.pos_in_file[file]
+ #size = os.path.getsize(file)
+ #print "loc =", loc, 'size =', size
except:
pass
# Read input file into class and close file
inputFile.seek(loc)
+ #tmplines = inputFile.readlines()
+ #if tmplines == None or tmplines == []:
+ # print "tmplines = ", tmplines
+ #else:
+ # print "tmplines[0] =", tmplines[0]
self.lines = fpdb_simple.removeTrailingEOL(inputFile.readlines())
self.pos_in_file[file] = inputFile.tell()
inputFile.close()
@@ -303,7 +318,8 @@ class Importer:
try: # sometimes we seem to be getting an empty self.lines, in which case, we just want to return.
firstline = self.lines[0]
except:
- print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc)
+ # just skip the debug message and return silently:
+ #print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc)
return (0,0,0,1,0)
if firstline.find("Tournament Summary")!=-1:
@@ -348,6 +364,7 @@ class Importer:
if self.callHud:
#print "call to HUD here. handsId:",handsId
#pipe the Hands.id out to the HUD
+ print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
except fpdb_simple.DuplicateError:
duplicates += 1
@@ -364,7 +381,6 @@ class Importer:
except (fpdb_simple.FpdbError), fe:
errors += 1
self.printEmailErrorMessage(errors, file, hand)
-
self.fdb.db.rollback()
if self.settings['failOnError']:
diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py
index 58eb9881..72b7b656 100644
--- a/pyfpdb/fpdb_simple.py
+++ b/pyfpdb/fpdb_simple.py
@@ -40,338 +40,6 @@ SQLITE = 4
# config while trying out new hudcache mechanism
use_date_in_hudcache = True
-# Data Structures for index and foreign key creation
-# drop_code is an int with possible values: 0 - don't drop for bulk import
-# 1 - drop during bulk import
-# db differences:
-# - note that mysql automatically creates indexes on constrained columns when
-# foreign keys are created, while postgres does not. Hence the much longer list
-# of indexes is required for postgres.
-# all primary keys are left on all the time
-#
-# table column drop_code
-
-indexes = [
- [ ] # no db with index 0
- , [ ] # no db with index 1
- , [ # indexes for mysql (list index 2)
- {'tab':'Players', 'col':'name', 'drop':0}
- , {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
- , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
- ]
- , [ # indexes for postgres (list index 3)
- {'tab':'Boardcards', 'col':'handId', 'drop':0}
- , {'tab':'Gametypes', 'col':'siteId', 'drop':0}
- , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
- , {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
- , {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0}
- , {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
- , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
- , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
- , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
- , {'tab':'HudCache', 'col':'playerId', 'drop':0}
- , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
- , {'tab':'Players', 'col':'siteId', 'drop':1}
- , {'tab':'Players', 'col':'name', 'drop':0}
- , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
- , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
- , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
- , {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
- , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
- ]
- ]
-
-foreignKeys = [
- [ ] # no db with index 0
- , [ ] # no db with index 1
- , [ # foreign keys for mysql
- {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
- , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
- , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
- , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
- , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
- , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
- , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
- ]
- , [ # foreign keys for postgres
- {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
- , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
- , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
- , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
- , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
- , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
- , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
- ]
- ]
-
-
-# MySQL Notes:
-# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
-# - creates index handId on .handId
-# alter table t drop foreign key fk
-# alter table t add foreign key (fkcol) references tab(rcol)
-# alter table t add constraint c foreign key (fkcol) references tab(rcol)
-# (fkcol is used for foreigh key name)
-
-# mysql to list indexes:
-# SELECT table_name, index_name, non_unique, column_name
-# FROM INFORMATION_SCHEMA.STATISTICS
-# WHERE table_name = 'tbl_name'
-# AND table_schema = 'db_name'
-# ORDER BY table_name, index_name, seq_in_index
-#
-# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
-# ALTER TABLE tab DROP INDEX idx
-
-# mysql to list fks:
-# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
-# FROM information_schema.KEY_COLUMN_USAGE
-# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
-# AND REFERENCED_TABLE_NAME is not null
-# ORDER BY TABLE_NAME, COLUMN_NAME;
-
-# this may indicate missing object
-# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
-
-
-# PG notes:
-
-# To add a foreign key constraint to a table:
-# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
-# ALTER TABLE tab DROP CONSTRAINT zipchk
-#
-# Note: index names must be unique across a schema
-# CREATE INDEX idx ON tab(col)
-# DROP INDEX idx
-
-def prepareBulkImport(fdb):
- """Drop some indexes/foreign keys to prepare for bulk import.
- Currently keeping the standalone indexes as needed to import quickly"""
- # fdb is a fpdb_db object including backend, db, cursor, sql variables
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(0) # allow table/index operations to work
- for fk in foreignKeys[fdb.backend]:
- if fk['drop'] == 1:
- if fdb.backend == MYSQL_INNODB:
- fdb.cursor.execute("SELECT constraint_name " +
- "FROM information_schema.KEY_COLUMN_USAGE " +
- #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
- "WHERE 1=1 " +
- "AND table_name = %s AND column_name = %s " +
- "AND referenced_table_name = %s " +
- "AND referenced_column_name = %s ",
- (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
- cons = fdb.cursor.fetchone()
- #print "preparebulk: cons=", cons
- if cons:
- print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
- try:
- fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
- except:
- pass
- elif fdb.backend == PGSQL:
-# DON'T FORGET TO RECREATE THEM!!
- print "dropping pg fk", fk['fktab'], fk['fkcol']
- try:
- # try to lock table to see if index drop will work:
- # hmmm, tested by commenting out rollback in grapher. lock seems to work but
- # then drop still hangs :-( does work in some tests though??
- # will leave code here for now pending further tests/enhancement ...
- fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
- #print "after lock, status:", fdb.cursor.statusmessage
- #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
- try:
- fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
- print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
- except:
- if "does not exist" not in str(sys.exc_value):
- print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
- % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
- except:
- print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
- % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
- else:
- print "Only MySQL and Postgres supported so far"
- return -1
-
- for idx in indexes[fdb.backend]:
- if idx['drop'] == 1:
- if fdb.backend == MYSQL_INNODB:
- print "dropping mysql index ", idx['tab'], idx['col']
- try:
- # apparently nowait is not implemented in mysql so this just hands if there are locks
- # preventing the index drop :-(
- fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
- except:
- pass
- elif fdb.backend == PGSQL:
-# DON'T FORGET TO RECREATE THEM!!
- print "dropping pg index ", idx['tab'], idx['col']
- try:
- # try to lock table to see if index drop will work:
- fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
- #print "after lock, status:", fdb.cursor.statusmessage
- try:
- # table locked ok so index drop should work:
- #print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
- fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
- #print "dropped pg index ", idx['tab'], idx['col']
- except:
- if "does not exist" not in str(sys.exc_value):
- print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
- % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
- except:
- print "warning: index %s_%s_idx not dropped %s, continuing ..." \
- % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
- else:
- print "Error: Only MySQL and Postgres supported so far"
- return -1
-
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(1) # go back to normal isolation level
- fdb.db.commit() # seems to clear up errors if there were any in postgres
-#end def prepareBulkImport
-
-def afterBulkImport(fdb):
- """Re-create any dropped indexes/foreign keys after bulk import"""
- # fdb is a fpdb_db object including backend, db, cursor, sql variables
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(0) # allow table/index operations to work
- for fk in foreignKeys[fdb.backend]:
- if fk['drop'] == 1:
- if fdb.backend == MYSQL_INNODB:
- fdb.cursor.execute("SELECT constraint_name " +
- "FROM information_schema.KEY_COLUMN_USAGE " +
- #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
- "WHERE 1=1 " +
- "AND table_name = %s AND column_name = %s " +
- "AND referenced_table_name = %s " +
- "AND referenced_column_name = %s ",
- (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
- cons = fdb.cursor.fetchone()
- print "afterbulk: cons=", cons
- if cons:
- pass
- else:
- print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
- try:
- fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
- + fk['fkcol'] + ") references " + fk['rtab'] + "("
- + fk['rcol'] + ")")
- except:
- pass
- elif fdb.backend == PGSQL:
- print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
- try:
- fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint "
- + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
- + " foreign key (" + fk['fkcol']
- + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
- except:
- pass
- else:
- print "Only MySQL and Postgres supported so far"
- return -1
-
- for idx in indexes[fdb.backend]:
- if idx['drop'] == 1:
- if fdb.backend == MYSQL_INNODB:
- print "creating mysql index ", idx['tab'], idx['col']
- try:
- fdb.cursor.execute( "alter table %s add index %s(%s)"
- , (idx['tab'],idx['col'],idx['col']) )
- except:
- pass
- elif fdb.backend == PGSQL:
-# pass
- # mod to use tab_col for index name?
- print "creating pg index ", idx['tab'], idx['col']
- try:
- print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
- fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
- % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
- except:
- print " ERROR! :-("
- pass
- else:
- print "Only MySQL and Postgres supported so far"
- return -1
-
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(1) # go back to normal isolation level
- fdb.db.commit() # seems to clear up errors if there were any in postgres
-#end def afterBulkImport
-
-def createAllIndexes(fdb):
- """Create new indexes"""
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(0) # allow table/index operations to work
- for idx in indexes[fdb.backend]:
- if fdb.backend == MYSQL_INNODB:
- print "creating mysql index ", idx['tab'], idx['col']
- try:
- fdb.cursor.execute( "alter table %s add index %s(%s)"
- , (idx['tab'],idx['col'],idx['col']) )
- except:
- pass
- elif fdb.backend == PGSQL:
- # mod to use tab_col for index name?
- print "creating pg index ", idx['tab'], idx['col']
- try:
- print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
- fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
- % (idx['tab'], idx['col'], idx['tab'], idx['col']) )
- except:
- print " ERROR! :-("
- pass
- else:
- print "Only MySQL and Postgres supported so far"
- return -1
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(1) # go back to normal isolation level
-#end def createAllIndexes
-
-def dropAllIndexes(fdb):
- """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
- using list of indexes in indexes data structure"""
- # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(0) # allow table/index operations to work
- for idx in indexes[fdb.backend]:
- if fdb.backend == MYSQL_INNODB:
- print "dropping mysql index ", idx['tab'], idx['col']
- try:
- fdb.cursor.execute( "alter table %s drop index %s"
- , (idx['tab'],idx['col']) )
- except:
- pass
- elif fdb.backend == PGSQL:
- print "dropping pg index ", idx['tab'], idx['col']
- # mod to use tab_col for index name?
- try:
- fdb.cursor.execute( "drop index %s_%s_idx"
- % (idx['tab'],idx['col']) )
- except:
- pass
- else:
- print "Only MySQL and Postgres supported so far"
- return -1
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(1) # go back to normal isolation level
-#end def dropAllIndexes
-
-def analyzeDB(fdb):
- """Do whatever the DB can offer to update index/table statistics"""
- if fdb.backend == PGSQL:
- fdb.db.set_isolation_level(0) # allow vacuum to work
- try:
- fdb.cursor.execute("vacuum analyze")
- except:
- print "Error during vacuum"
- fdb.db.set_isolation_level(1) # go back to normal isolation level
- fdb.db.commit()
-#end def analyzeDB
-
class DuplicateError(Exception):
def __init__(self, value):
self.value = value
From e662279acd5b0312a71c6e70856b96b0b04de5c4 Mon Sep 17 00:00:00 2001
From: sqlcoder
Date: Sun, 7 Jun 2009 20:49:35 +0100
Subject: [PATCH 3/6] remove parameter to analyzeDB() (error in previous
release)
---
pyfpdb/fpdb_import.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py
index 8c931a49..a1e2a835 100644
--- a/pyfpdb/fpdb_import.py
+++ b/pyfpdb/fpdb_import.py
@@ -180,7 +180,7 @@ class Importer:
tottime += ttime
if self.settings['dropIndexes'] == 'drop':
self.fdb.afterBulkImport()
- self.fdb.analyzeDB(self.fdb)
+ self.fdb.analyzeDB()
return (totstored, totdups, totpartial, toterrors, tottime)
# else: import threaded
From c6f3595b93b45dd4d8d17565df2963633aa4919a Mon Sep 17 00:00:00 2001
From: sqlcoder
Date: Sun, 7 Jun 2009 21:55:49 +0100
Subject: [PATCH 4/6] update with latest handsplayers/hudcache changes
---
docs/tabledesign.html | 588 ++++++++++++++++++++++++++++++++++++++----
1 file changed, 544 insertions(+), 44 deletions(-)
diff --git a/docs/tabledesign.html b/docs/tabledesign.html
index e148b9cf..86898528 100644
--- a/docs/tabledesign.html
+++ b/docs/tabledesign.html
@@ -312,11 +312,13 @@ The program itself is licensed under AGPLv3, see agpl-3.0.txt
Table HandsPlayers
-cardX: can be 1 through 20, one for each card. In holdem only 1-2 of these are used, in omaha 1-4, in stud/razz 1-7, in single draw 1-10, in tripple draw all 20 and in badugi 1-16 (4*4).
-For the draw games: the first 5 (badugi: 4) cards are the initial cards, the next 5 (badugi: 4) are after the first draw, etc.
-Example 1: If a player gets 2-6 spades for his first five cards and decides to throw away the 4 and then gets a 7 of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 4, 5, 6, 2, 3, 5, 6, 7
-Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and decides to throw away the 2 and the 3 and then gets a Q and K of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 5, 8, J, 5, 8, J, Q, K.
+cardX: can be 1 through 20, one for each card. In holdem only 1-2 of these are used, in omaha 1-4, in stud/razz 1-7, in single draw games 1-10 is used and in badugi 1-16 (4*4) is used.
+For the draw games: the first 5 (badugi: 4) cards are the initial cards, the next 5 (badugi: 4) are after the first draw. If a player keeps some cards then those cards' spaces are filled with "k", short for "kept".
+Example 1: If a player gets 2-6 spades for his first five cards and decides to throw away the 4 and then gets a 7 of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 4, 5, 6, k, k, 7, k, k
+Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and decides to throw away the 2 and the 3 and then gets a Q and K of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 5, 8, J, Q, K, k, k, k
+Note that it will k in the space of which card was there previously, so in example 2 where the player kept the last 3 cards, the last 3 fields of the first draw (ie. card8-10Value) are replaced with k.
I did not separate this into an extra table because I felt the lost space is not sufficiently large. Also the benefit for searching is far less relevant.
+ToDo: Original plan was to implement the many flags from hudcache as booleans - need to try this out as it will save space and may therefore be quicker.
Field Name |
@@ -353,33 +355,24 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
smallint |
The seat in which the person was sitting - necessary for HUD |
+
+ card1(..7) |
+ smallint |
+ 0=none/unknown, 1-13=2-Ah 14-26=2-Ad 27-39=2-Ac 40-52=2-As |
+
+
+ startCards |
+ smallint |
+ int representing Holdem starting cards. Hand is stored as an int 13 * x + y where x and y
+are in range 0..12, and (x+2) and (y+2) represents rank of each card (2=2 .. 14=Ace).
+If x > y then pair is suited, if x < y then unsuited.
+Omaha and other games may need to use this as a key into another table. (to be decided ...) |
+
ante |
int |
note: for cash this could be boolean, but in tourneys you may enter a hand with less than the full ante |
-
- cardXValue |
- smallint |
- 2-10=2-10, J=11, Q=12, K=13, A=14 (even in razz), unknown/no card=x
- see note above table |
-
-
- cardXSuit |
- char(1) |
- h=hearts, s=spades, d=diamonds, c=clubs, unknown/no card=x |
-
-
- cardXDiscarded |
- boolean |
- Whether the card was discarded (this only applies to draw games, X can be 1 through 15 since the final cards can obviously not be discarded). |
-
-
- DrawnX |
- smallint |
- X can be 1 through 3.
- This field denotes how many cards the player has drawn on each draw. |
-
winnings |
int |
@@ -388,7 +381,12 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
rake |
int |
- rake for this player for this hand |
+ rake for this player for this hand (i.e. final pot(s) size = winnings + rake) |
+
+
+ totalProfit |
+ int |
+ profit for this player for this hand ( i.e. winnings - (ante + bets) ) |
comment |
@@ -405,6 +403,384 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
bigint |
references TourneysPlayers.id |
+
+ tourneyTypeId |
+ bigint |
+ references TourneyTypes.id (maybe this should be on Hands?) |
+
+
+ wonWhenSeenStreet1(..4) |
+ float |
+ How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.
+ To be completely clear, this stores a hand count, NOT a money amount.
+ (2/3/4: Same for turn/street5, river/street6, street7) |
+
+
+ wonAtSD |
+ float |
+ As wonWhenSeenStreet1, but for showdown. |
+
+
+ street0VPI |
+ int |
+ did player pay to see flop, 1 or 0 |
+
+
+ street0Aggr |
+ int |
+ did player raise before flop, 1 or 0 |
+
+
+ street0_3BChance |
+ int |
+ did player have chance to 3B, 1 or 0 |
+
+
+ street0_3BDone |
+ int |
+ did player 3bet before flop, 1 or 0 |
+
+
+ street0_4BChance |
+ int |
+ did player have chance to 4B, 1 or 0 |
+
+
+ street0_4BDone |
+ int |
+ did player 4bet before flop, 1 or 0 |
+
+
+ other_3BStreet0 |
+ int |
+ did other player 3bet before flop, 1 or 0 |
+
+
+ other_4BStreet0 |
+ int |
+ did other player 4bet before flop, 1 or 0 |
+
+
+ street1Seen(/2/3/4) |
+ int |
+ did player see flop/street4 (.. etc) |
+
+
+ sawShowdown |
+ int |
+ did player see showdown |
+
+
+ street1Aggr |
+ int |
+ number of hands where player raised flop/street4 |
+
+
+ street2Aggr |
+ int |
+ number of hands where player raised turn/street5 |
+
+
+ street3Aggr |
+ int |
+ number of hands where player raised river/street6 |
+
+
+ street4Aggr |
+ int |
+ number of hands where player raised street7 |
+
+
+ otherRaisedStreet0 |
+ int |
+ number of hands where someone else raised pre-flop/street3 |
+
+
+ otherRaisedStreet1 |
+ int |
+ number of hands where someone else raised flop/street4 |
+
+
+ otherRaisedStreet2 |
+ int |
+ number of hands where someone else raised turn/street5 |
+
+
+ otherRaisedStreet3 |
+ int |
+ number of hands where someone else raised river/street6 |
+
+
+ otherRaisedStreet4 |
+ int |
+ number of hands where someone else raised street7 |
+
+
+ foldToOtherRaisedStreet0 |
+ int |
+ number of hands where someone else raised flop/street4 and the player folded |
+
+
+ foldToOtherRaisedStreet1 |
+ int |
+ number of hands where someone else raised flop/street4 and the player folded |
+
+
+ foldToOtherRaisedStreet2 |
+ int |
+ number of hands where someone else raised Turn/street5 and the player folded |
+
+
+ foldToOtherRaisedStreet3 |
+ int |
+ number of hands where someone else raised River/street6 and the player folded |
+
+
+ foldToOtherRaisedStreet4 |
+ int |
+ number of hands where someone else raised street7 and the player folded |
+
+
+ stealAttemptChance |
+ int |
+ Player was in CO, BTN or SB and nobody has called yet |
+
+
+ stealAttempted |
+ int |
+ Player took a chance per the above condition |
+
+
+ foldBbToStealChance |
+ int |
+ Somebody tried to steal BB from player |
+
+
+ foldedBbToSteal |
+ int |
+ Player folded BB to steal attempt |
+
+
+ foldSbToStealChance |
+ int |
+ Somebody tried to steal SB from player |
+
+
+ foldedSbToSteal |
+ int |
+ Player folded SB to steal attempt |
+
+
+
+ street1CBChance |
+ int |
+ Player had chance to make continuation bet on flop/street4 |
+
+
+ street1CBDone |
+ int |
+ Player used chance to make continuation bet on flop/street4 |
+
+
+ street2CBChance |
+ int |
+ Player had chance to make continuation bet on turn/street5 |
+
+
+ street2CBDone |
+ int |
+ Player used chance to make continuation bet on turn/street5 |
+
+
+ street3CBChance |
+ int |
+ Player had chance to make continuation bet on river/street6 |
+
+
+ street3CBDone |
+ int |
+ Player used chance to make continuation bet on river/street6 |
+
+
+ street4CBChance |
+ int |
+ Player had chance to make continuation bet on street7 |
+
+
+ street4CBDone |
+ int |
+ Player used chance to make continuation bet on street7 |
+
+
+
+ foldToStreet1CBChance |
+ int |
+ Player had chance to fold to continuation bet on this street |
+
+
+ foldToStreet1CBDone |
+ int |
+ Player used chance to fold to continuation bet on this street |
+
+
+ foldToStreet2CBChance |
+ int |
+ Player had chance to fold to continuation bet on this street |
+
+
+ foldToStreet2CBDone |
+ int |
+ Player used chance to fold to continuation bet on this street |
+
+
+ foldToStreet3CBChance |
+ int |
+ Player had chance to fold to continuation bet on this street |
+
+
+ foldToStreet3CBDone |
+ int |
+ Player used chance to fold to continuation bet on this street |
+
+
+ foldToStreet4CBChance |
+ int |
+ Player had chance to fold to continuation bet on this street |
+
+
+ foldToStreet4CBDone |
+ int |
+ Player used chance to fold to continuation bet on this street |
+
+
+
+ street1CheckCallRaiseChance |
+ int |
+ How often player had the chance to do a check-raise or a call-raise on this street |
+
+
+ street1CheckCallRaiseDone |
+ int |
+ How often player used the chance to do a check-raise or a call-raise on this street |
+
+
+ street2CheckCallRaiseChance |
+ int |
+ How often player had the chance to do a check-raise or a call-raise on this street |
+
+
+ street2CheckCallRaiseDone |
+ int |
+ How often player used the chance to do a check-raise or a call-raise on this street |
+
+
+ street3CheckCallRaiseChance |
+ int |
+ How often player had the chance to do a check-raise or a call-raise on this street |
+
+
+ street3CheckCallRaiseDone |
+ int |
+ How often player used the chance to do a check-raise or a call-raise on this street |
+
+
+ street4CheckCallRaiseChance |
+ int |
+ How often player had the chance to do a check-raise or a call-raise on this street |
+
+
+ street4CheckCallRaiseDone |
+ int |
+ How often player used the chance to do a check-raise or a call-raise on this street |
+
+
+
+ street0Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street1Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street2Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street3Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street4Calls |
+ int |
+ Number of times player called on this street |
+
+
+
+ street0Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street1Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street2Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street3Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street4Bets |
+ int |
+ Number of times player bet on this street |
+
+
+
+ street0Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street1Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street2Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street3Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street4Raises |
+ int |
+ Number of times player raised on this street |
+
+
+
+ actionString |
+ int |
+ Experimental - idea is to store the action on this street as a string: e.g. kkBrcfC, with
+ player's own choices in upper case and other players in lower case. k=check, b=bet, c=call,
+ r=raise. (Perhaps NL would miss out bet sizes for this?) It would then be possible to do complex
+ ad-hoc queries using queries like: actionString like '%B%r%C%
+ |
+
Table HudCache
@@ -444,12 +820,23 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
smallint |
References TourneyTypes.id |
-
HDs |
int |
number of hands this player played in this gametype with this number of seats |
+
+ wonWhenSeenStreet1(/2/3/4) |
+ float |
+ How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.
+ To be completely clear, this stores a hand count, NOT a money amount.
+ (/2/3/4: Same for turn/street5, river/street6, street7) |
+
+
+ wonAtSD |
+ float |
+ As wonWhenSeenStreet1, but for showdown. |
+
street0VPI |
int |
@@ -463,14 +850,24 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
number of hands where player raised before flop |
- street0_3B4BChance |
+ street0_3BChance |
int |
- number of hands where player had chance to 3B or 4B |
+ number of hands where player had chance to 3B before flop |
- street0_3B4BDone |
+ street0_3BDone |
int |
- number of hands where player 3bet/4bet before flop |
+ number of hands where player 3bet before flop |
+
+
+ street0_4BChance |
+ int |
+ number of hands where player had chance to 4B before flop |
+
+
+ street0_4BDone |
+ int |
+ number of hands where player 4bet before flop |
street1Seen |
@@ -517,6 +914,11 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
int |
number of hands where player raised street7 |
+
+ otherRaisedStreet0 |
+ int |
+ number of hands where someone else raised pre-flop/street3 |
+
otherRaisedStreet1 |
int |
@@ -537,6 +939,11 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
int |
number of hands where someone else raised street7 |
+
+ foldToOtherRaisedStreet0 |
+ int |
+ number of hands where someone else raised pre-flop/street3 and the player folded |
+
foldToOtherRaisedStreet1 |
int |
@@ -557,18 +964,6 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
int |
number of hands where someone else raised street7 and the player folded |
-
- wonWhenSeenStreet1 |
- float |
- How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.
- To be completely clear, this stores a hand count, NOT a money amount. |
-
-
- wonAtSD |
- float |
- As wonWhenSeenStreet1, but for showdown. |
-
-
stealAttemptChance |
int |
@@ -729,6 +1124,84 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
How often player used the chance to do a check-raise or a call-raise on this street |
+
+ street0Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street1Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street2Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street3Calls |
+ int |
+ Number of times player called on this street |
+
+
+ street4Calls |
+ int |
+ Number of times player called on this street |
+
+
+
+ street0Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street1Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street2Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street3Bets |
+ int |
+ Number of times player bet on this street |
+
+
+ street4Bets |
+ int |
+ Number of times player bet on this street |
+
+
+
+ street0Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street1Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street2Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street3Raises |
+ int |
+ Number of times player raised on this street |
+
+
+ street4Raises |
+ int |
+ Number of times player raised on this street |
+
+
Table HandsActions
@@ -926,5 +1399,32 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and
|
+
+Possible Changes
+
+
+ Table |
+ Comment |
+
+
+ BoardCards |
+ Remove as these attributes are now stored on Hands |
+
+
+ HandsActions |
+ Remove if/when these attributes are stored on Hands or elsewhere |
+
+
+ HandsPlayers |
+ Move tourneyTypeId field to Hands table. |
+
+
+ Comments |
+ Comment fields on various tables should probably be moved to a single comment table. Aim
+ should be to where possible reduce tables to a list of fixed length not-null columns and have
+ the larger, sparser comment columns in a dedicated table. (May not be possible or practical but
+ something to aim at.) |
+
+