2008-08-04 05:44:28 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
#Copyright 2008 Steffen Jobbagy-Felso
|
|
|
|
#This program is free software: you can redistribute it and/or modify
|
|
|
|
#it under the terms of the GNU Affero General Public License as published by
|
|
|
|
#the Free Software Foundation, version 3 of the License.
|
|
|
|
#
|
|
|
|
#This program is distributed in the hope that it will be useful,
|
|
|
|
#but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
#GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
#You should have received a copy of the GNU Affero General Public License
|
|
|
|
#along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#In the "official" distribution you can find the license in
|
|
|
|
#agpl-3.0.txt in the docs folder of the package.
|
|
|
|
|
|
|
|
import os
|
2008-10-07 19:41:06 +02:00
|
|
|
import re
|
2009-06-07 21:07:18 +02:00
|
|
|
import sys
|
2009-07-14 01:04:10 +02:00
|
|
|
import logging
|
2009-06-07 23:21:58 +02:00
|
|
|
from time import time, strftime
|
2009-06-07 21:07:18 +02:00
|
|
|
|
2008-08-04 05:44:28 +02:00
|
|
|
import fpdb_simple
|
2008-10-07 11:33:37 +02:00
|
|
|
import FpdbSQLQueries
|
2008-08-04 05:44:28 +02:00
|
|
|
|
|
|
|
class fpdb_db:
|
2009-07-14 01:04:10 +02:00
|
|
|
MYSQL_INNODB = 2
|
|
|
|
PGSQL = 3
|
|
|
|
SQLITE = 4
|
2008-12-06 13:40:04 +01:00
|
|
|
def __init__(self):
|
|
|
|
"""Simple constructor, doesnt really do anything"""
|
2009-03-17 01:52:50 +01:00
|
|
|
self.db = None
|
|
|
|
self.cursor = None
|
|
|
|
self.sql = {}
|
2009-06-07 21:45:09 +02:00
|
|
|
|
|
|
|
# Data Structures for index and foreign key creation
|
|
|
|
# drop_code is an int with possible values: 0 - don't drop for bulk import
|
|
|
|
# 1 - drop during bulk import
|
|
|
|
# db differences:
|
|
|
|
# - note that mysql automatically creates indexes on constrained columns when
|
|
|
|
# foreign keys are created, while postgres does not. Hence the much longer list
|
|
|
|
# of indexes is required for postgres.
|
|
|
|
# all primary keys are left on all the time
|
|
|
|
#
|
|
|
|
# table column drop_code
|
|
|
|
|
|
|
|
self.indexes = [
|
|
|
|
[ ] # no db with index 0
|
|
|
|
, [ ] # no db with index 1
|
|
|
|
, [ # indexes for mysql (list index 2)
|
|
|
|
{'tab':'Players', 'col':'name', 'drop':0}
|
|
|
|
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
|
|
|
|
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
|
|
|
|
]
|
|
|
|
, [ # indexes for postgres (list index 3)
|
|
|
|
{'tab':'Boardcards', 'col':'handId', 'drop':0}
|
|
|
|
, {'tab':'Gametypes', 'col':'siteId', 'drop':0}
|
|
|
|
, {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
|
|
|
|
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
|
|
|
|
, {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
|
|
|
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
|
|
|
|
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
|
|
|
|
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
|
|
|
|
, {'tab':'Players', 'col':'siteId', 'drop':1}
|
|
|
|
, {'tab':'Players', 'col':'name', 'drop':0}
|
|
|
|
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
|
|
|
|
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
|
|
|
|
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
|
|
|
|
, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
|
|
|
|
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
|
|
|
]
|
2009-07-14 01:04:10 +02:00
|
|
|
, [ # indexes for sqlite (list index 4)
|
|
|
|
]
|
2009-06-07 21:45:09 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
self.foreignKeys = [
|
|
|
|
[ ] # no db with index 0
|
|
|
|
, [ ] # no db with index 1
|
|
|
|
, [ # foreign keys for mysql
|
|
|
|
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
|
|
|
]
|
|
|
|
, [ # foreign keys for postgres
|
|
|
|
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
# MySQL Notes:
|
|
|
|
# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
|
|
|
|
# - creates index handId on <thistable>.handId
|
|
|
|
# alter table t drop foreign key fk
|
|
|
|
# alter table t add foreign key (fkcol) references tab(rcol)
|
|
|
|
# alter table t add constraint c foreign key (fkcol) references tab(rcol)
|
|
|
|
# (fkcol is used for foreigh key name)
|
|
|
|
|
|
|
|
# mysql to list indexes:
|
|
|
|
# SELECT table_name, index_name, non_unique, column_name
|
|
|
|
# FROM INFORMATION_SCHEMA.STATISTICS
|
|
|
|
# WHERE table_name = 'tbl_name'
|
|
|
|
# AND table_schema = 'db_name'
|
|
|
|
# ORDER BY table_name, index_name, seq_in_index
|
|
|
|
#
|
|
|
|
# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
|
|
|
|
# ALTER TABLE tab DROP INDEX idx
|
|
|
|
|
|
|
|
# mysql to list fks:
|
|
|
|
# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
|
|
|
|
# FROM information_schema.KEY_COLUMN_USAGE
|
|
|
|
# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
|
|
|
|
# AND REFERENCED_TABLE_NAME is not null
|
|
|
|
# ORDER BY TABLE_NAME, COLUMN_NAME;
|
|
|
|
|
|
|
|
# this may indicate missing object
|
|
|
|
# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
|
|
|
|
|
|
|
|
|
|
|
|
# PG notes:
|
|
|
|
|
|
|
|
# To add a foreign key constraint to a table:
|
|
|
|
# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
|
|
|
|
# ALTER TABLE tab DROP CONSTRAINT zipchk
|
|
|
|
#
|
|
|
|
# Note: index names must be unique across a schema
|
|
|
|
# CREATE INDEX idx ON tab(col)
|
|
|
|
# DROP INDEX idx
|
2008-12-06 13:40:04 +01:00
|
|
|
#end def __init__
|
2008-12-18 00:03:17 +01:00
|
|
|
|
|
|
|
def do_connect(self, config=None):
|
|
|
|
"""Connects a database using information in config"""
|
|
|
|
if config is None:
|
|
|
|
raise FpdbError('Configuration not defined')
|
|
|
|
|
|
|
|
self.settings = {}
|
2009-03-17 01:52:50 +01:00
|
|
|
self.settings['os'] = "linuxmac" if os.name != "nt" else "windows"
|
2008-12-18 00:03:17 +01:00
|
|
|
|
2009-07-14 01:04:10 +02:00
|
|
|
db = config.get_db_parameters()
|
|
|
|
self.connect(backend=db['db-backend'],
|
|
|
|
host=db['db-host'],
|
|
|
|
database=db['db-databaseName'],
|
|
|
|
user=db['db-user'],
|
|
|
|
password=db['db-password'])
|
2008-12-18 00:03:17 +01:00
|
|
|
#end def do_connect
|
2008-12-06 13:40:04 +01:00
|
|
|
|
2008-12-06 14:07:37 +01:00
|
|
|
def connect(self, backend=None, host=None, database=None,
|
|
|
|
user=None, password=None):
|
2008-12-06 13:40:04 +01:00
|
|
|
"""Connects a database with the given parameters"""
|
2008-12-06 14:07:37 +01:00
|
|
|
if backend is None:
|
|
|
|
raise FpdbError('Database backend not defined')
|
2008-12-06 13:40:04 +01:00
|
|
|
self.backend=backend
|
|
|
|
self.host=host
|
|
|
|
self.user=user
|
|
|
|
self.password=password
|
2008-12-06 14:07:37 +01:00
|
|
|
self.database=database
|
2009-07-14 01:04:10 +02:00
|
|
|
if backend==fpdb_db.MYSQL_INNODB:
|
2008-12-06 13:40:04 +01:00
|
|
|
import MySQLdb
|
2009-05-21 19:02:42 +02:00
|
|
|
try:
|
|
|
|
self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True)
|
|
|
|
except:
|
|
|
|
raise fpdb_simple.FpdbError("MySQL connection failed")
|
2009-07-14 01:04:10 +02:00
|
|
|
elif backend==fpdb_db.PGSQL:
|
2008-12-06 13:40:04 +01:00
|
|
|
import psycopg2
|
2009-06-01 03:25:36 +02:00
|
|
|
import psycopg2.extensions
|
2009-05-07 22:24:06 +02:00
|
|
|
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
|
2008-12-06 14:07:37 +01:00
|
|
|
# If DB connection is made over TCP, then the variables
|
|
|
|
# host, user and password are required
|
|
|
|
# For local domain-socket connections, only DB name is
|
|
|
|
# needed, and everything else is in fact undefined and/or
|
|
|
|
# flat out wrong
|
2009-06-09 22:38:30 +02:00
|
|
|
# sqlcoder: This database only connect failed in my windows setup??
|
|
|
|
# Modifed it to try the 4 parameter style if the first connect fails - does this work everywhere?
|
|
|
|
connected = False
|
2009-06-02 16:59:54 +02:00
|
|
|
if self.host == "localhost" or self.host == "127.0.0.1":
|
2009-06-09 22:38:30 +02:00
|
|
|
try:
|
|
|
|
self.db = psycopg2.connect(database = database)
|
|
|
|
connected = True
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
#msg = "PostgreSQL direct connection to database (%s) failed, trying with user ..." % (database,)
|
|
|
|
#print msg
|
|
|
|
#raise fpdb_simple.FpdbError(msg)
|
|
|
|
if not connected:
|
|
|
|
try:
|
|
|
|
self.db = psycopg2.connect(host = host,
|
|
|
|
user = user,
|
|
|
|
password = password,
|
|
|
|
database = database)
|
|
|
|
except:
|
|
|
|
msg = "PostgreSQL connection to database (%s) user (%s) failed." % (database, user)
|
|
|
|
print msg
|
|
|
|
raise fpdb_simple.FpdbError(msg)
|
2009-07-14 01:04:10 +02:00
|
|
|
elif backend==fpdb_db.SQLITE:
|
2009-07-15 01:14:25 +02:00
|
|
|
logging.info("Connecting to SQLite:%(database)s" % {'database':database})
|
2009-07-14 01:04:10 +02:00
|
|
|
import sqlite3
|
|
|
|
self.db = sqlite3.connect(database,detect_types=sqlite3.PARSE_DECLTYPES)
|
|
|
|
sqlite3.register_converter("bool", lambda x: bool(int(x)))
|
|
|
|
sqlite3.register_adapter(bool, lambda x: "1" if x else "0")
|
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
else:
|
|
|
|
raise fpdb_simple.FpdbError("unrecognised database backend:"+backend)
|
|
|
|
self.cursor=self.db.cursor()
|
|
|
|
# Set up query dictionary as early in the connection process as we can.
|
|
|
|
self.sql = FpdbSQLQueries.FpdbSQLQueries(self.get_backend_name())
|
2009-07-14 01:04:10 +02:00
|
|
|
self.cursor.execute(self.sql.query['set tx level'])
|
2008-12-06 13:40:04 +01:00
|
|
|
self.wrongDbVersion=False
|
|
|
|
try:
|
|
|
|
self.cursor.execute("SELECT * FROM Settings")
|
|
|
|
settings=self.cursor.fetchone()
|
2009-06-07 21:07:18 +02:00
|
|
|
if settings[0]!=118:
|
2008-12-06 13:40:04 +01:00
|
|
|
print "outdated or too new database version - please recreate tables"
|
|
|
|
self.wrongDbVersion=True
|
|
|
|
except:# _mysql_exceptions.ProgrammingError:
|
|
|
|
print "failed to read settings table - please recreate tables"
|
|
|
|
self.wrongDbVersion=True
|
|
|
|
#end def connect
|
2008-08-04 05:44:28 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
def disconnect(self, due_to_error=False):
|
|
|
|
"""Disconnects the DB"""
|
|
|
|
if due_to_error:
|
|
|
|
self.db.rollback()
|
|
|
|
else:
|
|
|
|
self.db.commit()
|
|
|
|
self.cursor.close()
|
|
|
|
self.db.close()
|
|
|
|
#end def disconnect
|
|
|
|
|
|
|
|
def reconnect(self, due_to_error=False):
|
|
|
|
"""Reconnects the DB"""
|
|
|
|
#print "started fpdb_db.reconnect"
|
|
|
|
self.disconnect(due_to_error)
|
|
|
|
self.connect(self.backend, self.host, self.database, self.user, self.password)
|
2008-10-07 19:16:26 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
def create_tables(self):
|
|
|
|
#todo: should detect and fail gracefully if tables already exist.
|
2009-07-14 01:04:10 +02:00
|
|
|
logging.debug(self.sql.query['createSettingsTable'])
|
2008-12-06 13:40:04 +01:00
|
|
|
self.cursor.execute(self.sql.query['createSettingsTable'])
|
2009-07-14 01:04:10 +02:00
|
|
|
logging.debug(self.sql.query['createSitesTable'])
|
2008-12-06 13:40:04 +01:00
|
|
|
self.cursor.execute(self.sql.query['createSitesTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createGametypesTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createPlayersTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createAutoratesTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createHandsTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createBoardCardsTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createTourneyTypesTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createTourneysTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createTourneysPlayersTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createHandsPlayersTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createHandsActionsTable'])
|
|
|
|
self.cursor.execute(self.sql.query['createHudCacheTable'])
|
2008-12-14 03:42:07 +01:00
|
|
|
#self.cursor.execute(self.sql.query['addTourneyIndex'])
|
|
|
|
#self.cursor.execute(self.sql.query['addHandsIndex'])
|
|
|
|
#self.cursor.execute(self.sql.query['addPlayersIndex'])
|
2008-12-06 13:40:04 +01:00
|
|
|
self.fillDefaultData()
|
|
|
|
self.db.commit()
|
2008-10-07 19:16:26 +02:00
|
|
|
#end def disconnect
|
2008-12-06 13:40:04 +01:00
|
|
|
|
|
|
|
def drop_tables(self):
|
|
|
|
"""Drops the fpdb tables from the current db"""
|
2008-10-07 19:41:06 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
if(self.get_backend_name() == 'MySQL InnoDB'):
|
|
|
|
#Databases with FOREIGN KEY support need this switched of before you can drop tables
|
2009-05-21 19:02:42 +02:00
|
|
|
self.drop_referential_integrity()
|
2008-10-07 19:41:06 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
# Query the DB to see what tables exist
|
|
|
|
self.cursor.execute(self.sql.query['list_tables'])
|
|
|
|
for table in self.cursor:
|
|
|
|
self.cursor.execute(self.sql.query['drop_table'] + table[0])
|
|
|
|
elif(self.get_backend_name() == 'PostgreSQL'):
|
|
|
|
self.db.commit()# I have no idea why this makes the query work--REB 07OCT2008
|
|
|
|
self.cursor.execute(self.sql.query['list_tables'])
|
|
|
|
tables = self.cursor.fetchall()
|
|
|
|
for table in tables:
|
|
|
|
self.cursor.execute(self.sql.query['drop_table'] + table[0] + ' cascade')
|
|
|
|
elif(self.get_backend_name() == 'SQLite'):
|
2009-07-14 01:04:10 +02:00
|
|
|
self.cursor.execute(self.sql.query['list_tables'])
|
|
|
|
for table in self.cursor.fetchall():
|
|
|
|
logging.debug(self.sql.query['drop_table'] + table[0])
|
|
|
|
self.cursor.execute(self.sql.query['drop_table'] + table[0])
|
2008-10-07 20:28:18 +02:00
|
|
|
|
2009-07-14 01:04:10 +02:00
|
|
|
self.db.commit()
|
2008-12-06 13:40:04 +01:00
|
|
|
#end def drop_tables
|
2008-10-07 19:41:06 +02:00
|
|
|
|
2009-05-21 19:02:42 +02:00
|
|
|
def drop_referential_integrity(self):
|
2008-12-06 13:40:04 +01:00
|
|
|
"""Update all tables to remove foreign keys"""
|
2008-10-07 19:41:06 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
self.cursor.execute(self.sql.query['list_tables'])
|
|
|
|
result = self.cursor.fetchall()
|
2008-10-07 19:41:06 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
for i in range(len(result)):
|
|
|
|
self.cursor.execute("SHOW CREATE TABLE " + result[i][0])
|
|
|
|
inner = self.cursor.fetchall()
|
2008-10-07 19:41:06 +02:00
|
|
|
|
2008-12-06 13:40:04 +01:00
|
|
|
for j in range(len(inner)):
|
|
|
|
# result[i][0] - Table name
|
|
|
|
# result[i][1] - CREATE TABLE parameters
|
|
|
|
#Searching for CONSTRAINT `tablename_ibfk_1`
|
|
|
|
for m in re.finditer('(ibfk_[0-9]+)', inner[j][1]):
|
|
|
|
key = "`" + inner[j][0] + "_" + m.group() + "`"
|
|
|
|
self.cursor.execute("ALTER TABLE " + inner[j][0] + " DROP FOREIGN KEY " + key)
|
2008-10-07 19:41:06 +02:00
|
|
|
self.db.commit()
|
2009-05-21 19:02:42 +02:00
|
|
|
#end drop_referential_inegrity
|
2008-12-06 13:40:04 +01:00
|
|
|
|
|
|
|
def get_backend_name(self):
|
|
|
|
"""Returns the name of the currently used backend"""
|
|
|
|
if self.backend==2:
|
|
|
|
return "MySQL InnoDB"
|
|
|
|
elif self.backend==3:
|
|
|
|
return "PostgreSQL"
|
2009-07-14 01:04:10 +02:00
|
|
|
elif self.backend==4:
|
|
|
|
return "SQLite"
|
2008-12-06 13:40:04 +01:00
|
|
|
else:
|
|
|
|
raise fpdb_simple.FpdbError("invalid backend")
|
|
|
|
#end def get_backend_name
|
|
|
|
|
|
|
|
def get_db_info(self):
|
|
|
|
return (self.host, self.database, self.user, self.password)
|
|
|
|
#end def get_db_info
|
|
|
|
|
|
|
|
def fillDefaultData(self):
|
2009-07-14 01:04:10 +02:00
|
|
|
logging.debug("INSERT INTO Settings (version) VALUES (118);")
|
|
|
|
self.cursor.execute("INSERT INTO Settings (version) VALUES (118);")
|
|
|
|
self.cursor.execute("INSERT INTO Sites (name,currency) VALUES ('Full Tilt Poker', 'USD')")
|
|
|
|
self.cursor.execute("INSERT INTO Sites (name,currency) VALUES ('PokerStars', 'USD')")
|
|
|
|
self.cursor.execute("INSERT INTO Sites (name,currency) VALUES ('Everleaf', 'USD')")
|
|
|
|
self.cursor.execute("""INSERT INTO TourneyTypes
|
|
|
|
(siteId,buyin,fee,knockout,rebuyOrAddon) VALUES
|
|
|
|
(1,0,0,0,?)""",(False,) )
|
2008-12-06 13:40:04 +01:00
|
|
|
#end def fillDefaultData
|
|
|
|
|
|
|
|
def recreate_tables(self):
|
|
|
|
"""(Re-)creates the tables of the current DB"""
|
|
|
|
|
|
|
|
self.drop_tables()
|
|
|
|
self.create_tables()
|
2009-06-07 21:45:09 +02:00
|
|
|
self.createAllIndexes()
|
2008-12-06 13:40:04 +01:00
|
|
|
self.db.commit()
|
|
|
|
print "Finished recreating tables"
|
|
|
|
#end def recreate_tables
|
2009-06-01 14:53:42 +02:00
|
|
|
|
2009-06-07 21:45:09 +02:00
|
|
|
def prepareBulkImport(self):
|
|
|
|
"""Drop some indexes/foreign keys to prepare for bulk import.
|
|
|
|
Currently keeping the standalone indexes as needed to import quickly"""
|
2009-06-07 23:21:58 +02:00
|
|
|
stime = time()
|
2009-06-07 21:45:09 +02:00
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for fk in self.foreignKeys[self.backend]:
|
|
|
|
if fk['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
self.cursor.execute("SELECT constraint_name " +
|
|
|
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
|
|
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
|
|
|
"WHERE 1=1 " +
|
|
|
|
"AND table_name = %s AND column_name = %s " +
|
|
|
|
"AND referenced_table_name = %s " +
|
|
|
|
"AND referenced_column_name = %s ",
|
|
|
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
|
|
|
cons = self.cursor.fetchone()
|
|
|
|
#print "preparebulk: cons=", cons
|
|
|
|
if cons:
|
|
|
|
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
|
|
|
try:
|
|
|
|
self.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# DON'T FORGET TO RECREATE THEM!!
|
|
|
|
print "dropping pg fk", fk['fktab'], fk['fkcol']
|
|
|
|
try:
|
|
|
|
# try to lock table to see if index drop will work:
|
|
|
|
# hmmm, tested by commenting out rollback in grapher. lock seems to work but
|
|
|
|
# then drop still hangs :-( does work in some tests though??
|
|
|
|
# will leave code here for now pending further tests/enhancement ...
|
|
|
|
self.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
|
|
|
|
#print "after lock, status:", self.cursor.statusmessage
|
|
|
|
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
|
|
|
|
try:
|
|
|
|
self.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
|
|
|
|
print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
|
|
|
|
except:
|
|
|
|
if "does not exist" not in str(sys.exc_value):
|
|
|
|
print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
|
|
|
|
% (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
|
|
|
|
except:
|
|
|
|
print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
|
|
|
|
% (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
else:
|
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
return -1
|
|
|
|
|
|
|
|
for idx in self.indexes[self.backend]:
|
|
|
|
if idx['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "dropping mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
# apparently nowait is not implemented in mysql so this just hands if there are locks
|
|
|
|
# preventing the index drop :-(
|
|
|
|
self.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# DON'T FORGET TO RECREATE THEM!!
|
|
|
|
print "dropping pg index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
# try to lock table to see if index drop will work:
|
|
|
|
self.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
|
|
|
|
#print "after lock, status:", self.cursor.statusmessage
|
|
|
|
try:
|
|
|
|
# table locked ok so index drop should work:
|
|
|
|
#print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
|
|
|
|
self.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
|
|
|
|
#print "dropped pg index ", idx['tab'], idx['col']
|
|
|
|
except:
|
|
|
|
if "does not exist" not in str(sys.exc_value):
|
|
|
|
print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
|
|
|
|
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
except:
|
|
|
|
print "warning: index %s_%s_idx not dropped %s, continuing ..." \
|
|
|
|
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
else:
|
|
|
|
print "Error: Only MySQL and Postgres supported so far"
|
|
|
|
return -1
|
|
|
|
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.db.commit() # seems to clear up errors if there were any in postgres
|
2009-06-07 23:21:58 +02:00
|
|
|
ptime = time() - stime
|
|
|
|
print "prepare import took", ptime, "seconds"
|
2009-06-07 21:45:09 +02:00
|
|
|
#end def prepareBulkImport
|
|
|
|
|
|
|
|
def afterBulkImport(self):
|
|
|
|
"""Re-create any dropped indexes/foreign keys after bulk import"""
|
2009-06-07 23:21:58 +02:00
|
|
|
stime = time()
|
2009-06-07 21:45:09 +02:00
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for fk in self.foreignKeys[self.backend]:
|
|
|
|
if fk['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
self.cursor.execute("SELECT constraint_name " +
|
|
|
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
|
|
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
|
|
|
"WHERE 1=1 " +
|
|
|
|
"AND table_name = %s AND column_name = %s " +
|
|
|
|
"AND referenced_table_name = %s " +
|
|
|
|
"AND referenced_column_name = %s ",
|
|
|
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
|
|
|
cons = self.cursor.fetchone()
|
|
|
|
print "afterbulk: cons=", cons
|
|
|
|
if cons:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
|
|
|
try:
|
|
|
|
self.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
|
|
|
|
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
|
|
|
|
+ fk['rcol'] + ")")
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
|
|
|
try:
|
|
|
|
self.cursor.execute("alter table " + fk['fktab'] + " add constraint "
|
|
|
|
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
|
|
|
|
+ " foreign key (" + fk['fkcol']
|
|
|
|
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
return -1
|
|
|
|
|
|
|
|
for idx in self.indexes[self.backend]:
|
|
|
|
if idx['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "creating mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
self.cursor.execute( "alter table %s add index %s(%s)"
|
|
|
|
, (idx['tab'],idx['col'],idx['col']) )
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# pass
|
|
|
|
# mod to use tab_col for index name?
|
|
|
|
print "creating pg index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
|
|
|
self.cursor.execute( "create index %s_%s_idx on %s(%s)"
|
|
|
|
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
|
|
|
|
except:
|
|
|
|
print " ERROR! :-("
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
return -1
|
|
|
|
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.db.commit() # seems to clear up errors if there were any in postgres
|
2009-06-07 23:21:58 +02:00
|
|
|
atime = time() - stime
|
|
|
|
print "after import took", atime, "seconds"
|
2009-06-07 21:45:09 +02:00
|
|
|
#end def afterBulkImport
|
|
|
|
|
|
|
|
def createAllIndexes(self):
|
|
|
|
"""Create new indexes"""
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for idx in self.indexes[self.backend]:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "creating mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
self.cursor.execute( "alter table %s add index %s(%s)"
|
|
|
|
, (idx['tab'],idx['col'],idx['col']) )
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# mod to use tab_col for index name?
|
|
|
|
print "creating pg index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
|
|
|
self.cursor.execute( "create index %s_%s_idx on %s(%s)"
|
|
|
|
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
|
|
|
|
except:
|
|
|
|
print " ERROR! :-("
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
return -1
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
#end def createAllIndexes
|
|
|
|
|
|
|
|
def dropAllIndexes(self):
|
|
|
|
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
|
|
|
|
using list of indexes in indexes data structure"""
|
|
|
|
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for idx in self.indexes[self.backend]:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "dropping mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
self.cursor.execute( "alter table %s drop index %s"
|
|
|
|
, (idx['tab'],idx['col']) )
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
print "dropping pg index ", idx['tab'], idx['col']
|
|
|
|
# mod to use tab_col for index name?
|
|
|
|
try:
|
|
|
|
self.cursor.execute( "drop index %s_%s_idx"
|
|
|
|
% (idx['tab'],idx['col']) )
|
|
|
|
except:
|
|
|
|
pass
|
2009-06-01 14:53:42 +02:00
|
|
|
else:
|
2009-06-07 21:45:09 +02:00
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
return -1
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
#end def dropAllIndexes
|
|
|
|
|
|
|
|
def analyzeDB(self):
|
|
|
|
"""Do whatever the DB can offer to update index/table statistics"""
|
2009-06-07 23:21:58 +02:00
|
|
|
stime = time()
|
2009-06-07 21:45:09 +02:00
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.db.set_isolation_level(0) # allow vacuum to work
|
|
|
|
try:
|
|
|
|
self.cursor.execute("vacuum analyze")
|
|
|
|
except:
|
|
|
|
print "Error during vacuum"
|
|
|
|
self.db.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.db.commit()
|
2009-06-07 23:21:58 +02:00
|
|
|
atime = time() - stime
|
|
|
|
print "analyze took", atime, "seconds"
|
2009-06-07 21:45:09 +02:00
|
|
|
#end def analyzeDB
|
2009-06-01 14:53:42 +02:00
|
|
|
|
2009-06-07 21:07:18 +02:00
|
|
|
# Currently uses an exclusive lock on the Hands table as a global lock
|
|
|
|
# Return values are Unix style, 0 for success, positive integers for errors
|
|
|
|
# 1 = generic error
|
|
|
|
# 2 = hands table does not exist (error message is suppressed)
|
|
|
|
def get_global_lock(self):
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
try:
|
|
|
|
self.cursor.execute( "lock tables Hands write" )
|
|
|
|
except:
|
|
|
|
# Table 'fpdb.hands' doesn't exist
|
2009-06-19 18:58:41 +02:00
|
|
|
if str(sys.exc_value).find(".Hands' doesn't exist") >= 0:
|
2009-06-07 21:07:18 +02:00
|
|
|
return(2)
|
|
|
|
print "Error! failed to obtain global lock. Close all programs accessing " \
|
|
|
|
+ "database (including fpdb) and try again (%s)." \
|
|
|
|
% ( str(sys.exc_value).rstrip('\n'), )
|
|
|
|
return(1)
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
try:
|
|
|
|
self.cursor.execute( "lock table Hands in exclusive mode nowait" )
|
|
|
|
#print "... after lock table, status =", self.cursor.statusmessage
|
|
|
|
except:
|
|
|
|
# relation "hands" does not exist
|
|
|
|
if str(sys.exc_value).find('relation "hands" does not exist') >= 0:
|
|
|
|
return(2)
|
|
|
|
print "Error! failed to obtain global lock. Close all programs accessing " \
|
|
|
|
+ "database (including fpdb) and try again (%s)." \
|
|
|
|
% ( str(sys.exc_value).rstrip('\n'), )
|
|
|
|
return(1)
|
2009-06-20 00:22:57 +02:00
|
|
|
return(0)
|
|
|
|
|
|
|
|
def getLastInsertId(self):
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
ret = self.db.insert_id()
|
|
|
|
if ret < 1 or ret > 999999999:
|
|
|
|
print "getLastInsertId(): problem fetching insert_id? ret=", ret
|
|
|
|
ret = -1
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# some options:
|
|
|
|
# currval(hands_id_seq) - use name of implicit seq here
|
|
|
|
# lastval() - still needs sequences set up?
|
|
|
|
# insert ... returning is useful syntax (but postgres specific?)
|
|
|
|
# see rules (fancy trigger type things)
|
|
|
|
self.cursor.execute ("SELECT lastval()")
|
|
|
|
row = self.cursor.fetchone()
|
|
|
|
if not row:
|
|
|
|
print "getLastInsertId(%s): problem fetching lastval? row=" % seq, row
|
|
|
|
ret = -1
|
|
|
|
else:
|
|
|
|
ret = row[0]
|
2009-07-14 01:04:10 +02:00
|
|
|
elif self.backend == fpdb_db.SQLITE:
|
2009-06-20 00:22:57 +02:00
|
|
|
# don't know how to do this in sqlite
|
|
|
|
print "getLastInsertId(): not coded for sqlite yet"
|
|
|
|
ret = -1
|
|
|
|
else:
|
|
|
|
print "getLastInsertId(): unknown backend ", self.backend
|
|
|
|
ret = -1
|
|
|
|
return ret
|
2009-06-08 12:19:55 +02:00
|
|
|
|
|
|
|
def storeHand(self, p):
|
|
|
|
#stores into table hands:
|
2009-07-14 01:04:10 +02:00
|
|
|
self.cursor.execute ("""INSERT INTO Hands (
|
|
|
|
tablename,
|
|
|
|
sitehandno,
|
|
|
|
gametypeid,
|
|
|
|
handstart,
|
|
|
|
importtime,
|
|
|
|
seats,
|
|
|
|
maxseats,
|
|
|
|
boardcard1,
|
|
|
|
boardcard2,
|
|
|
|
boardcard3,
|
|
|
|
boardcard4,
|
|
|
|
boardcard5,
|
|
|
|
-- texture,
|
|
|
|
playersVpi,
|
|
|
|
playersAtStreet1,
|
|
|
|
playersAtStreet2,
|
|
|
|
playersAtStreet3,
|
|
|
|
playersAtStreet4,
|
|
|
|
playersAtShowdown,
|
|
|
|
street0Raises,
|
|
|
|
street1Raises,
|
|
|
|
street2Raises,
|
|
|
|
street3Raises,
|
|
|
|
street4Raises,
|
|
|
|
-- street1Pot,
|
|
|
|
-- street2Pot,
|
|
|
|
-- street3Pot,
|
|
|
|
-- street4Pot,
|
|
|
|
-- showdownPot
|
2009-06-08 12:19:55 +02:00
|
|
|
)
|
|
|
|
VALUES
|
2009-07-14 01:04:10 +02:00
|
|
|
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
|
|
|
|
%s, %s, %s, %s, %s, %s, %s)""",
|
|
|
|
(
|
|
|
|
p['tablename'],
|
|
|
|
p['sitehandno'],
|
|
|
|
p['gametypeid'],
|
|
|
|
p['handStart'],
|
|
|
|
datetime.datetime.today(),
|
|
|
|
len(p['names']),
|
|
|
|
p['maxSeats'],
|
|
|
|
p['boardcard1'],
|
|
|
|
p['boardcard2'],
|
|
|
|
p['boardcard3'],
|
|
|
|
p['boardcard4'],
|
|
|
|
p['boardcard5'],
|
|
|
|
hudCache['playersVpi'],
|
|
|
|
hudCache['playersAtStreet1'],
|
|
|
|
hudCache['playersAtStreet2'],
|
|
|
|
hudCache['playersAtStreet3'],
|
|
|
|
hudCache['playersAtStreet4'],
|
|
|
|
hudCache['playersAtShowdown'],
|
|
|
|
hudCache['street0Raises'],
|
|
|
|
hudCache['street1Raises'],
|
|
|
|
hudCache['street2Raises'],
|
|
|
|
hudCache['street3Raises'],
|
|
|
|
hudCache['street4Raises'],
|
|
|
|
hudCache['street1Pot'],
|
|
|
|
hudCache['street2Pot'],
|
|
|
|
hudCache['street3Pot'],
|
|
|
|
hudCache['street4Pot'],
|
|
|
|
hudCache['showdownPot']
|
|
|
|
)
|
|
|
|
)
|
2009-06-08 12:19:55 +02:00
|
|
|
#return getLastInsertId(backend, conn, cursor)
|
2009-06-07 21:07:18 +02:00
|
|
|
#end class fpdb_db
|