2010-07-08 20:01:03 +02:00
|
|
|
#!/usr/bin/env python
|
2010-06-20 17:34:58 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2008-08-19 00:53:25 +02:00
|
|
|
"""Database.py
|
|
|
|
|
|
|
|
Create and manage the database objects.
|
|
|
|
"""
|
2010-07-04 03:05:16 +02:00
|
|
|
# Copyright 2008-2010, Ray E. Barker
|
2008-08-19 00:53:25 +02:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
|
2010-07-04 03:05:16 +02:00
|
|
|
# TODO: - rebuild indexes / vacuum option
|
2009-11-30 00:02:45 +01:00
|
|
|
# - check speed of get_stats_from_hand() - add log info
|
|
|
|
# - check size of db, seems big? (mysql)
|
2009-11-29 18:36:37 +01:00
|
|
|
# - investigate size of mysql db (200K for just 7K hands? 2GB for 140K hands?)
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
# postmaster -D /var/lib/pgsql/data
|
|
|
|
|
|
|
|
# Standard Library modules
|
2009-08-28 02:22:08 +02:00
|
|
|
import os
|
2008-09-15 22:31:55 +02:00
|
|
|
import sys
|
2008-10-04 22:43:50 +02:00
|
|
|
import traceback
|
2009-05-28 00:34:10 +02:00
|
|
|
from datetime import datetime, date, time, timedelta
|
2009-08-01 01:06:07 +02:00
|
|
|
from time import time, strftime, sleep
|
2009-08-08 19:59:44 +02:00
|
|
|
from decimal import Decimal
|
2009-06-02 00:27:56 +02:00
|
|
|
import string
|
2009-07-29 00:58:10 +02:00
|
|
|
import re
|
2009-07-31 22:24:21 +02:00
|
|
|
import Queue
|
Store names as UTF-8
The names should be always in UTF-8 encoding. At least for PostgreSQL
the encdoding of the database comes from the time of running 'initdb'
(which is different from 'createdb') and if the encoding was selected or
set to something else at that time, the following error will occur:
File ".../pyfpdb/Database.py", line 1630, in <lambda>
self.pcache = LambdaDict(lambda key:self.insertPlayer(key, siteid))
File ".../pyfpdb/Database.py", line 1661, in insertPlayer
c.execute (q, (site_id, _name))
File "/usr/lib/python2.5/encodings/iso8859_15.py", line 12, in encode
return codecs.charmap_encode(input,errors,encoding_table)
UnicodeEncodeError: 'charmap' codec can't encode character u'\u2122' in
position 10: character maps to <undefined>
This happens because 'name' is a regular string as opposed to a valid
unicode object. By forcing the string to unicode and encoding it in
UTF-8 the error goes away. In my case the database encoding was
ISO-8859-15 (latin9) but any other "wrong" encoding would trigger the
same problem.
This is a relatively common problem in python.
2009-12-24 08:52:47 +01:00
|
|
|
import codecs
|
2010-01-27 00:54:04 +01:00
|
|
|
import math
|
|
|
|
|
2010-02-01 22:03:51 +01:00
|
|
|
import logging
|
|
|
|
# logging has been set up in fpdb.py or HUD_main.py, use their settings:
|
|
|
|
log = logging.getLogger("db")
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
|
|
|
|
# pyGTK modules
|
|
|
|
|
2010-01-27 00:54:04 +01:00
|
|
|
|
2010-01-28 00:48:39 +01:00
|
|
|
# FreePokerTools modules
|
|
|
|
import SQL
|
|
|
|
import Card
|
|
|
|
import Charset
|
|
|
|
from Exceptions import *
|
|
|
|
import Configuration
|
|
|
|
|
|
|
|
|
2010-01-27 00:54:04 +01:00
|
|
|
# Other library modules
|
|
|
|
try:
|
|
|
|
import sqlalchemy.pool as pool
|
|
|
|
use_pool = True
|
|
|
|
except ImportError:
|
2010-01-28 00:48:39 +01:00
|
|
|
log.info("Not using sqlalchemy connection pool.")
|
2010-01-27 00:54:04 +01:00
|
|
|
use_pool = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
from numpy import var
|
|
|
|
use_numpy = True
|
|
|
|
except ImportError:
|
2010-01-28 00:48:39 +01:00
|
|
|
log.info("Not using numpy to define variance in sqlite.")
|
2010-01-27 00:54:04 +01:00
|
|
|
use_numpy = False
|
|
|
|
|
|
|
|
|
2010-07-07 04:01:40 +02:00
|
|
|
DB_VERSION = 127
|
2010-01-27 00:54:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Variance created as sqlite has a bunch of undefined aggregate functions.
|
|
|
|
|
|
|
|
class VARIANCE:
|
|
|
|
def __init__(self):
|
|
|
|
self.store = []
|
|
|
|
|
|
|
|
def step(self, value):
|
|
|
|
self.store.append(value)
|
|
|
|
|
|
|
|
def finalize(self):
|
|
|
|
return float(var(self.store))
|
|
|
|
|
|
|
|
class sqlitemath:
|
|
|
|
def mod(self, a, b):
|
|
|
|
return a%b
|
|
|
|
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
class Database:
|
2009-07-21 23:26:23 +02:00
|
|
|
|
|
|
|
MYSQL_INNODB = 2
|
|
|
|
PGSQL = 3
|
|
|
|
SQLITE = 4
|
|
|
|
|
2009-11-30 22:43:29 +01:00
|
|
|
hero_hudstart_def = '1999-12-31' # default for length of Hero's stats in HUD
|
|
|
|
villain_hudstart_def = '1999-12-31' # default for length of Villain's stats in HUD
|
2009-09-27 00:32:44 +02:00
|
|
|
|
2009-07-29 00:58:10 +02:00
|
|
|
# Data Structures for index and foreign key creation
|
|
|
|
# drop_code is an int with possible values: 0 - don't drop for bulk import
|
|
|
|
# 1 - drop during bulk import
|
|
|
|
# db differences:
|
|
|
|
# - note that mysql automatically creates indexes on constrained columns when
|
|
|
|
# foreign keys are created, while postgres does not. Hence the much longer list
|
|
|
|
# of indexes is required for postgres.
|
|
|
|
# all primary keys are left on all the time
|
|
|
|
#
|
|
|
|
# table column drop_code
|
|
|
|
|
|
|
|
indexes = [
|
|
|
|
[ ] # no db with index 0
|
|
|
|
, [ ] # no db with index 1
|
2009-11-30 00:02:45 +01:00
|
|
|
, [ # indexes for mysql (list index 2) (foreign keys not here, in next data structure)
|
2009-09-26 00:10:58 +02:00
|
|
|
# {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
|
|
|
|
# {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
|
|
|
|
#, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
|
2009-07-29 00:58:10 +02:00
|
|
|
]
|
|
|
|
, [ # indexes for postgres (list index 3)
|
2009-07-29 07:17:51 +02:00
|
|
|
{'tab':'Gametypes', 'col':'siteId', 'drop':0}
|
2009-07-29 00:58:10 +02:00
|
|
|
, {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
|
2009-09-26 00:10:58 +02:00
|
|
|
#, {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
|
2009-07-29 00:58:10 +02:00
|
|
|
, {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
|
|
|
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
|
|
|
|
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
|
|
|
|
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
|
|
|
|
, {'tab':'Players', 'col':'siteId', 'drop':1}
|
2009-09-26 00:10:58 +02:00
|
|
|
#, {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
|
2009-07-29 00:58:10 +02:00
|
|
|
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
|
2009-09-26 00:10:58 +02:00
|
|
|
#, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
|
2009-07-29 00:58:10 +02:00
|
|
|
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
|
2009-09-26 00:10:58 +02:00
|
|
|
#, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} unique indexes not dropped
|
2009-07-29 00:58:10 +02:00
|
|
|
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
|
|
|
]
|
|
|
|
, [ # indexes for sqlite (list index 4)
|
2009-09-26 00:10:58 +02:00
|
|
|
{'tab':'Hands', 'col':'gametypeId', 'drop':0}
|
2009-08-12 02:46:39 +02:00
|
|
|
, {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'tourneyTypeId', 'drop':0}
|
|
|
|
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
2010-02-25 21:43:15 +01:00
|
|
|
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
|
|
|
|
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
|
|
|
|
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
|
|
|
|
, {'tab':'Players', 'col':'siteId', 'drop':1}
|
|
|
|
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
|
|
|
|
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
|
|
|
|
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
2009-07-29 00:58:10 +02:00
|
|
|
]
|
|
|
|
]
|
|
|
|
|
|
|
|
foreignKeys = [
|
|
|
|
[ ] # no db with index 0
|
|
|
|
, [ ] # no db with index 1
|
2009-08-06 21:31:46 +02:00
|
|
|
, [ # foreign keys for mysql (index 2)
|
2009-07-29 00:58:10 +02:00
|
|
|
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
2009-11-30 00:02:45 +01:00
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'tourneysPlayersId','rtab':'TourneysPlayers','rcol':'id', 'drop':1}
|
2009-07-29 00:58:10 +02:00
|
|
|
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
|
|
|
]
|
2009-08-06 21:31:46 +02:00
|
|
|
, [ # foreign keys for postgres (index 3)
|
2009-07-29 00:58:10 +02:00
|
|
|
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
|
|
|
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
|
|
|
]
|
2009-08-06 21:31:46 +02:00
|
|
|
, [ # no foreign keys in sqlite (index 4)
|
|
|
|
]
|
2009-07-29 00:58:10 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
# MySQL Notes:
|
|
|
|
# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
|
|
|
|
# - creates index handId on <thistable>.handId
|
|
|
|
# alter table t drop foreign key fk
|
|
|
|
# alter table t add foreign key (fkcol) references tab(rcol)
|
|
|
|
# alter table t add constraint c foreign key (fkcol) references tab(rcol)
|
|
|
|
# (fkcol is used for foreigh key name)
|
|
|
|
|
2010-06-04 09:25:56 +02:00
|
|
|
# mysql to list indexes: (CG - "LIST INDEXES" should work too)
|
2009-07-29 00:58:10 +02:00
|
|
|
# SELECT table_name, index_name, non_unique, column_name
|
|
|
|
# FROM INFORMATION_SCHEMA.STATISTICS
|
|
|
|
# WHERE table_name = 'tbl_name'
|
|
|
|
# AND table_schema = 'db_name'
|
|
|
|
# ORDER BY table_name, index_name, seq_in_index
|
|
|
|
#
|
|
|
|
# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
|
|
|
|
# ALTER TABLE tab DROP INDEX idx
|
|
|
|
|
|
|
|
# mysql to list fks:
|
|
|
|
# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
|
|
|
|
# FROM information_schema.KEY_COLUMN_USAGE
|
|
|
|
# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
|
|
|
|
# AND REFERENCED_TABLE_NAME is not null
|
|
|
|
# ORDER BY TABLE_NAME, COLUMN_NAME;
|
|
|
|
|
|
|
|
# this may indicate missing object
|
|
|
|
# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
|
|
|
|
|
|
|
|
|
|
|
|
# PG notes:
|
|
|
|
|
|
|
|
# To add a foreign key constraint to a table:
|
|
|
|
# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
|
|
|
|
# ALTER TABLE tab DROP CONSTRAINT zipchk
|
|
|
|
#
|
|
|
|
# Note: index names must be unique across a schema
|
|
|
|
# CREATE INDEX idx ON tab(col)
|
|
|
|
# DROP INDEX idx
|
2010-06-04 09:25:56 +02:00
|
|
|
# SELECT * FROM PG_INDEXES
|
2009-07-29 00:58:10 +02:00
|
|
|
|
2009-08-12 02:46:39 +02:00
|
|
|
# SQLite notes:
|
|
|
|
|
|
|
|
# To add an index:
|
|
|
|
# create index indexname on tablename (col);
|
|
|
|
|
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
def __init__(self, c, sql = None, autoconnect = True):
|
2010-02-01 23:31:00 +01:00
|
|
|
#log = Configuration.get_logger("logging.conf", "db", log_dir=c.dir_log)
|
2010-02-01 22:03:51 +01:00
|
|
|
log.debug("Creating Database instance, sql = %s" % sql)
|
2009-09-27 00:32:44 +02:00
|
|
|
self.config = c
|
2009-12-12 10:51:07 +01:00
|
|
|
self.__connected = False
|
2010-01-27 00:54:04 +01:00
|
|
|
self.settings = {}
|
|
|
|
self.settings['os'] = "linuxmac" if os.name != "nt" else "windows"
|
|
|
|
db_params = c.get_db_parameters()
|
|
|
|
self.import_options = c.get_import_parameters()
|
|
|
|
self.backend = db_params['db-backend']
|
|
|
|
self.db_server = db_params['db-server']
|
|
|
|
self.database = db_params['db-databaseName']
|
|
|
|
self.host = db_params['db-host']
|
2010-02-01 23:31:00 +01:00
|
|
|
self.db_path = ''
|
2010-01-27 00:54:04 +01:00
|
|
|
|
|
|
|
# where possible avoid creating new SQL instance by using the global one passed in
|
|
|
|
if sql is None:
|
|
|
|
self.sql = SQL.Sql(db_server = self.db_server)
|
|
|
|
else:
|
|
|
|
self.sql = sql
|
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
if autoconnect:
|
|
|
|
# connect to db
|
|
|
|
self.do_connect(c)
|
|
|
|
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
|
|
|
|
#ISOLATION_LEVEL_AUTOCOMMIT = 0
|
|
|
|
#ISOLATION_LEVEL_READ_COMMITTED = 1
|
|
|
|
#ISOLATION_LEVEL_SERIALIZABLE = 2
|
2009-07-22 21:19:41 +02:00
|
|
|
|
2009-08-12 02:46:39 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
if self.backend == self.SQLITE and self.database == ':memory:' and self.wrongDbVersion:
|
|
|
|
log.info("sqlite/:memory: - creating")
|
|
|
|
self.recreate_tables()
|
|
|
|
self.wrongDbVersion = False
|
2009-08-12 02:46:39 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
self.pcache = None # PlayerId cache
|
|
|
|
self.cachemiss = 0 # Delete me later - using to count player cache misses
|
|
|
|
self.cachehit = 0 # Delete me later - using to count player cache hits
|
2009-08-02 06:19:33 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
# config while trying out new hudcache mechanism
|
|
|
|
self.use_date_in_hudcache = True
|
2009-07-26 02:42:09 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
#self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
|
|
|
|
#self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
|
|
|
|
#self.hud_hero_days = 30 # but last T days or last H hands for yourself
|
2009-07-26 02:42:09 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
# vars for hand ids or dates fetched according to above config:
|
|
|
|
self.hand_1day_ago = 0 # max hand id more than 24 hrs earlier than now
|
|
|
|
self.date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD)
|
|
|
|
self.h_date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD) for hero
|
|
|
|
self.date_nhands_ago = {} # dates N hands ago per player - not used yet
|
2009-07-18 19:29:06 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
self.saveActions = False if self.import_options['saveActions'] == False else True
|
2009-06-19 23:33:33 +02:00
|
|
|
|
2010-02-20 19:59:49 +01:00
|
|
|
self.connection.rollback() # make sure any locks taken so far are released
|
2009-07-29 00:58:10 +02:00
|
|
|
#end def __init__
|
2009-07-21 23:26:23 +02:00
|
|
|
|
2009-07-18 19:29:06 +02:00
|
|
|
# could be used by hud to change hud style
|
|
|
|
def set_hud_style(self, style):
|
|
|
|
self.hud_style = style
|
|
|
|
|
2009-06-30 23:00:55 +02:00
|
|
|
def do_connect(self, c):
|
2010-01-27 00:54:04 +01:00
|
|
|
if c is None:
|
|
|
|
raise FpdbError('Configuration not defined')
|
|
|
|
|
|
|
|
db = c.get_db_parameters()
|
2009-12-12 10:51:07 +01:00
|
|
|
try:
|
2010-01-27 00:54:04 +01:00
|
|
|
self.connect(backend=db['db-backend'],
|
|
|
|
host=db['db-host'],
|
|
|
|
database=db['db-databaseName'],
|
|
|
|
user=db['db-user'],
|
|
|
|
password=db['db-password'])
|
2009-12-12 10:51:07 +01:00
|
|
|
except:
|
|
|
|
# error during connect
|
|
|
|
self.__connected = False
|
|
|
|
raise
|
2009-10-13 22:53:51 +02:00
|
|
|
|
|
|
|
db_params = c.get_db_parameters()
|
|
|
|
self.import_options = c.get_import_parameters()
|
|
|
|
self.backend = db_params['db-backend']
|
|
|
|
self.db_server = db_params['db-server']
|
|
|
|
self.database = db_params['db-databaseName']
|
|
|
|
self.host = db_params['db-host']
|
2009-12-12 10:51:07 +01:00
|
|
|
self.__connected = True
|
2009-05-28 00:34:10 +02:00
|
|
|
|
2010-01-27 00:54:04 +01:00
|
|
|
def connect(self, backend=None, host=None, database=None,
|
2010-02-20 19:59:49 +01:00
|
|
|
user=None, password=None, create=False):
|
2010-01-27 00:54:04 +01:00
|
|
|
"""Connects a database with the given parameters"""
|
|
|
|
if backend is None:
|
|
|
|
raise FpdbError('Database backend not defined')
|
|
|
|
self.backend = backend
|
|
|
|
self.host = host
|
|
|
|
self.user = user
|
|
|
|
self.password = password
|
|
|
|
self.database = database
|
|
|
|
self.connection = None
|
|
|
|
self.cursor = None
|
|
|
|
|
|
|
|
if backend == Database.MYSQL_INNODB:
|
|
|
|
import MySQLdb
|
|
|
|
if use_pool:
|
|
|
|
MySQLdb = pool.manage(MySQLdb, pool_size=5)
|
|
|
|
try:
|
|
|
|
self.connection = MySQLdb.connect(host=host, user=user, passwd=password, db=database, use_unicode=True)
|
|
|
|
#TODO: Add port option
|
|
|
|
except MySQLdb.Error, ex:
|
|
|
|
if ex.args[0] == 1045:
|
|
|
|
raise FpdbMySQLAccessDenied(ex.args[0], ex.args[1])
|
|
|
|
elif ex.args[0] == 2002 or ex.args[0] == 2003: # 2002 is no unix socket, 2003 is no tcp socket
|
|
|
|
raise FpdbMySQLNoDatabase(ex.args[0], ex.args[1])
|
|
|
|
else:
|
|
|
|
print "*** WARNING UNKNOWN MYSQL ERROR", ex
|
|
|
|
elif backend == Database.PGSQL:
|
|
|
|
import psycopg2
|
|
|
|
import psycopg2.extensions
|
|
|
|
if use_pool:
|
|
|
|
psycopg2 = pool.manage(psycopg2, pool_size=5)
|
|
|
|
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
|
|
|
|
# If DB connection is made over TCP, then the variables
|
|
|
|
# host, user and password are required
|
|
|
|
# For local domain-socket connections, only DB name is
|
|
|
|
# needed, and everything else is in fact undefined and/or
|
|
|
|
# flat out wrong
|
|
|
|
# sqlcoder: This database only connect failed in my windows setup??
|
|
|
|
# Modifed it to try the 4 parameter style if the first connect fails - does this work everywhere?
|
|
|
|
connected = False
|
|
|
|
if self.host == "localhost" or self.host == "127.0.0.1":
|
|
|
|
try:
|
|
|
|
self.connection = psycopg2.connect(database = database)
|
|
|
|
connected = True
|
|
|
|
except:
|
|
|
|
# direct connection failed so try user/pass/... version
|
|
|
|
pass
|
|
|
|
if not connected:
|
|
|
|
try:
|
|
|
|
self.connection = psycopg2.connect(host = host,
|
|
|
|
user = user,
|
|
|
|
password = password,
|
|
|
|
database = database)
|
|
|
|
except Exception, ex:
|
|
|
|
if 'Connection refused' in ex.args[0]:
|
|
|
|
# meaning eg. db not running
|
|
|
|
raise FpdbPostgresqlNoDatabase(errmsg = ex.args[0])
|
|
|
|
elif 'password authentication' in ex.args[0]:
|
|
|
|
raise FpdbPostgresqlAccessDenied(errmsg = ex.args[0])
|
|
|
|
else:
|
|
|
|
msg = ex.args[0]
|
|
|
|
print msg
|
|
|
|
raise FpdbError(msg)
|
|
|
|
elif backend == Database.SQLITE:
|
2010-03-07 11:30:56 +01:00
|
|
|
create = True
|
2010-01-27 00:54:04 +01:00
|
|
|
import sqlite3
|
|
|
|
if use_pool:
|
|
|
|
sqlite3 = pool.manage(sqlite3, pool_size=1)
|
2010-02-06 11:28:17 +01:00
|
|
|
#else:
|
|
|
|
# log.warning("SQLite won't work well without 'sqlalchemy' installed.")
|
2010-01-27 00:54:04 +01:00
|
|
|
|
|
|
|
if database != ":memory:":
|
2010-02-20 19:59:49 +01:00
|
|
|
if not os.path.isdir(self.config.dir_database) and create:
|
2010-01-31 12:25:24 +01:00
|
|
|
print "Creating directory: '%s'" % (self.config.dir_database)
|
|
|
|
log.info("Creating directory: '%s'" % (self.config.dir_database))
|
|
|
|
os.mkdir(self.config.dir_database)
|
|
|
|
database = os.path.join(self.config.dir_database, database)
|
2010-02-01 23:31:00 +01:00
|
|
|
self.db_path = database
|
|
|
|
log.info("Connecting to SQLite: %(database)s" % {'database':self.db_path})
|
2010-02-20 19:59:49 +01:00
|
|
|
if os.path.exists(database) or create:
|
|
|
|
self.connection = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES )
|
|
|
|
sqlite3.register_converter("bool", lambda x: bool(int(x)))
|
|
|
|
sqlite3.register_adapter(bool, lambda x: "1" if x else "0")
|
|
|
|
self.connection.create_function("floor", 1, math.floor)
|
|
|
|
tmp = sqlitemath()
|
|
|
|
self.connection.create_function("mod", 2, tmp.mod)
|
|
|
|
if use_numpy:
|
|
|
|
self.connection.create_aggregate("variance", 1, VARIANCE)
|
|
|
|
else:
|
|
|
|
log.warning("Some database functions will not work without NumPy support")
|
|
|
|
self.cursor = self.connection.cursor()
|
|
|
|
self.cursor.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
|
|
|
|
self.cursor.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
|
2010-01-27 00:54:04 +01:00
|
|
|
else:
|
2010-02-20 19:59:49 +01:00
|
|
|
raise FpdbError("sqlite database "+database+" does not exist")
|
2010-01-27 00:54:04 +01:00
|
|
|
else:
|
2010-02-20 19:59:49 +01:00
|
|
|
raise FpdbError("unrecognised database backend:"+str(backend))
|
2010-01-27 00:54:04 +01:00
|
|
|
|
|
|
|
self.cursor = self.connection.cursor()
|
|
|
|
self.cursor.execute(self.sql.query['set tx level'])
|
2010-02-20 19:59:49 +01:00
|
|
|
self.check_version(database=database, create=create)
|
2010-01-27 01:48:02 +01:00
|
|
|
|
|
|
|
|
|
|
|
def check_version(self, database, create):
|
2010-01-27 00:54:04 +01:00
|
|
|
self.wrongDbVersion = False
|
|
|
|
try:
|
|
|
|
self.cursor.execute("SELECT * FROM Settings")
|
|
|
|
settings = self.cursor.fetchone()
|
|
|
|
if settings[0] != DB_VERSION:
|
2010-01-28 00:48:39 +01:00
|
|
|
log.error("outdated or too new database version (%s) - please recreate tables"
|
2010-01-27 00:54:04 +01:00
|
|
|
% (settings[0]))
|
|
|
|
self.wrongDbVersion = True
|
|
|
|
except:# _mysql_exceptions.ProgrammingError:
|
2010-01-27 01:48:02 +01:00
|
|
|
if database != ":memory:":
|
|
|
|
if create:
|
|
|
|
print "Failed to read settings table - recreating tables"
|
|
|
|
log.info("failed to read settings table - recreating tables")
|
|
|
|
self.recreate_tables()
|
|
|
|
self.check_version(database=database, create=False)
|
|
|
|
else:
|
|
|
|
print "Failed to read settings table - please recreate tables"
|
|
|
|
log.info("failed to read settings table - please recreate tables")
|
|
|
|
self.wrongDbVersion = True
|
|
|
|
else:
|
|
|
|
self.wrongDbVersion = True
|
2010-01-27 00:54:04 +01:00
|
|
|
#end def connect
|
|
|
|
|
2009-06-19 22:15:52 +02:00
|
|
|
def commit(self):
|
2010-01-28 00:48:39 +01:00
|
|
|
if self.backend != self.SQLITE:
|
|
|
|
self.connection.commit()
|
|
|
|
else:
|
|
|
|
# sqlite commits can fail because of shared locks on the database (SQLITE_BUSY)
|
|
|
|
# re-try commit if it fails in case this happened
|
|
|
|
maxtimes = 5
|
|
|
|
pause = 1
|
|
|
|
ok = False
|
|
|
|
for i in xrange(maxtimes):
|
|
|
|
try:
|
|
|
|
ret = self.connection.commit()
|
|
|
|
log.debug("commit finished ok, i = "+str(i))
|
|
|
|
ok = True
|
|
|
|
except:
|
|
|
|
log.debug("commit "+str(i)+" failed: info=" + str(sys.exc_info())
|
|
|
|
+ " value=" + str(sys.exc_value))
|
|
|
|
sleep(pause)
|
|
|
|
if ok: break
|
|
|
|
if not ok:
|
|
|
|
log.debug("commit failed")
|
|
|
|
raise FpdbError('sqlite commit failed')
|
2009-05-28 00:34:10 +02:00
|
|
|
|
2009-07-19 19:28:13 +02:00
|
|
|
def rollback(self):
|
2010-01-27 00:54:04 +01:00
|
|
|
self.connection.rollback()
|
2009-07-19 19:28:13 +02:00
|
|
|
|
2009-12-12 10:51:07 +01:00
|
|
|
def connected(self):
|
|
|
|
return self.__connected
|
|
|
|
|
2009-07-21 23:26:23 +02:00
|
|
|
def get_cursor(self):
|
|
|
|
return self.connection.cursor()
|
|
|
|
|
2008-09-15 22:31:55 +02:00
|
|
|
def close_connection(self):
|
|
|
|
self.connection.close()
|
2009-06-30 23:00:55 +02:00
|
|
|
|
|
|
|
def disconnect(self, due_to_error=False):
|
|
|
|
"""Disconnects the DB (rolls back if param is true, otherwise commits"""
|
2010-01-27 00:54:04 +01:00
|
|
|
if due_to_error:
|
|
|
|
self.connection.rollback()
|
|
|
|
else:
|
|
|
|
self.connection.commit()
|
|
|
|
self.cursor.close()
|
|
|
|
self.connection.close()
|
2009-06-30 23:00:55 +02:00
|
|
|
|
|
|
|
def reconnect(self, due_to_error=False):
|
|
|
|
"""Reconnects the DB"""
|
2010-01-27 00:54:04 +01:00
|
|
|
#print "started reconnect"
|
|
|
|
self.disconnect(due_to_error)
|
|
|
|
self.connect(self.backend, self.host, self.database, self.user, self.password)
|
2009-06-30 23:00:55 +02:00
|
|
|
|
|
|
|
def get_backend_name(self):
|
2009-08-06 21:31:46 +02:00
|
|
|
"""Returns the name of the currently used backend"""
|
|
|
|
if self.backend==2:
|
|
|
|
return "MySQL InnoDB"
|
|
|
|
elif self.backend==3:
|
|
|
|
return "PostgreSQL"
|
|
|
|
elif self.backend==4:
|
|
|
|
return "SQLite"
|
|
|
|
else:
|
2009-08-12 02:46:39 +02:00
|
|
|
raise FpdbError("invalid backend")
|
2009-06-30 23:00:55 +02:00
|
|
|
|
2010-01-27 00:54:04 +01:00
|
|
|
def get_db_info(self):
|
|
|
|
return (self.host, self.database, self.user, self.password)
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
def get_table_name(self, hand_id):
|
|
|
|
c = self.connection.cursor()
|
2008-09-15 22:31:55 +02:00
|
|
|
c.execute(self.sql.query['get_table_name'], (hand_id, ))
|
2008-08-19 00:53:25 +02:00
|
|
|
row = c.fetchone()
|
|
|
|
return row
|
|
|
|
|
2009-11-04 03:51:10 +01:00
|
|
|
def get_table_info(self, hand_id):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['get_table_name'], (hand_id, ))
|
|
|
|
row = c.fetchone()
|
|
|
|
l = list(row)
|
|
|
|
if row[3] == "ring": # cash game
|
|
|
|
l.append(None)
|
|
|
|
l.append(None)
|
|
|
|
return l
|
|
|
|
else: # tournament
|
|
|
|
tour_no, tab_no = re.split(" ", row[0])
|
|
|
|
l.append(tour_no)
|
|
|
|
l.append(tab_no)
|
|
|
|
return l
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
def get_last_hand(self):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['get_last_hand'])
|
|
|
|
row = c.fetchone()
|
|
|
|
return row[0]
|
|
|
|
|
|
|
|
def get_xml(self, hand_id):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['get_xml'], (hand_id))
|
|
|
|
row = c.fetchone()
|
|
|
|
return row[0]
|
|
|
|
|
|
|
|
def get_recent_hands(self, last_hand):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['get_recent_hands'], {'last_hand': last_hand})
|
|
|
|
return c.fetchall()
|
|
|
|
|
|
|
|
def get_hand_info(self, new_hand_id):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['get_hand_info'], new_hand_id)
|
|
|
|
return c.fetchall()
|
|
|
|
|
2010-07-10 02:07:47 +02:00
|
|
|
def getHandCount(self):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['getHandCount'])
|
|
|
|
return c.fetchone()[0]
|
|
|
|
#end def getHandCount
|
|
|
|
|
|
|
|
def getTourneyCount(self):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['getTourneyCount'])
|
|
|
|
return c.fetchone()[0]
|
|
|
|
#end def getTourneyCount
|
|
|
|
|
2010-07-10 19:39:05 +02:00
|
|
|
def getTourneyTypeCount(self):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['getTourneyTypeCount'])
|
|
|
|
return c.fetchone()[0]
|
|
|
|
#end def getTourneyCount
|
|
|
|
|
2008-10-10 02:50:12 +02:00
|
|
|
def get_actual_seat(self, hand_id, name):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['get_actual_seat'], (hand_id, name))
|
|
|
|
row = c.fetchone()
|
|
|
|
return row[0]
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
def get_cards(self, hand):
|
2009-02-26 05:35:15 +01:00
|
|
|
"""Get and return the cards for each player in the hand."""
|
2009-06-17 05:00:46 +02:00
|
|
|
cards = {} # dict of cards, the key is the seat number,
|
|
|
|
# the value is a tuple of the players cards
|
|
|
|
# example: {1: (0, 0, 20, 21, 22, 0 , 0)}
|
2008-08-19 00:53:25 +02:00
|
|
|
c = self.connection.cursor()
|
2009-05-02 01:28:53 +02:00
|
|
|
c.execute(self.sql.query['get_cards'], [hand])
|
2008-08-19 00:53:25 +02:00
|
|
|
for row in c.fetchall():
|
2009-06-17 05:00:46 +02:00
|
|
|
cards[row[0]] = row[1:]
|
2009-02-26 05:35:15 +01:00
|
|
|
return cards
|
|
|
|
|
2009-03-10 16:14:23 +01:00
|
|
|
def get_common_cards(self, hand):
|
|
|
|
"""Get and return the community cards for the specified hand."""
|
2008-08-19 00:53:25 +02:00
|
|
|
cards = {}
|
2009-03-10 16:14:23 +01:00
|
|
|
c = self.connection.cursor()
|
2009-05-02 01:28:53 +02:00
|
|
|
c.execute(self.sql.query['get_common_cards'], [hand])
|
2009-06-19 21:51:56 +02:00
|
|
|
# row = c.fetchone()
|
|
|
|
cards['common'] = c.fetchone()
|
2009-03-10 16:14:23 +01:00
|
|
|
return cards
|
|
|
|
|
2008-11-13 04:45:09 +01:00
|
|
|
def get_action_from_hand(self, hand_no):
|
|
|
|
action = [ [], [], [], [], [] ]
|
|
|
|
c = self.connection.cursor()
|
2009-06-18 00:03:43 +02:00
|
|
|
c.execute(self.sql.query['get_action_from_hand'], (hand_no,))
|
2008-11-13 04:45:09 +01:00
|
|
|
for row in c.fetchall():
|
|
|
|
street = row[0]
|
|
|
|
act = row[1:]
|
|
|
|
action[street].append(act)
|
|
|
|
return action
|
2008-12-08 20:10:45 +01:00
|
|
|
|
|
|
|
def get_winners_from_hand(self, hand):
|
|
|
|
"""Returns a hash of winners:amount won, given a hand number."""
|
|
|
|
winners = {}
|
|
|
|
c = self.connection.cursor()
|
2009-06-18 00:03:43 +02:00
|
|
|
c.execute(self.sql.query['get_winners_from_hand'], (hand,))
|
2008-12-08 20:10:45 +01:00
|
|
|
for row in c.fetchall():
|
|
|
|
winners[row[0]] = row[1]
|
|
|
|
return winners
|
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
def init_hud_stat_vars(self, hud_days, h_hud_days):
|
2009-09-27 22:21:26 +02:00
|
|
|
"""Initialise variables used by Hud to fetch stats:
|
2009-09-29 00:59:17 +02:00
|
|
|
self.hand_1day_ago handId of latest hand played more than a day ago
|
|
|
|
self.date_ndays_ago date n days ago
|
|
|
|
self.h_date_ndays_ago date n days ago for hero (different n)
|
2009-09-27 22:21:26 +02:00
|
|
|
"""
|
2009-08-02 00:15:04 +02:00
|
|
|
|
2009-09-16 04:04:00 +02:00
|
|
|
self.hand_1day_ago = 1
|
2009-08-02 00:15:04 +02:00
|
|
|
try:
|
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute(self.sql.query['get_hand_1day_ago'])
|
|
|
|
row = c.fetchone()
|
2009-09-16 04:04:00 +02:00
|
|
|
except: # TODO: what error is a database error?!
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
2009-11-03 19:56:26 +01:00
|
|
|
print "*** Database Error: " + err[2] + "(" + str(err[1]) + "): " + str(sys.exc_info()[1])
|
2009-09-16 04:04:00 +02:00
|
|
|
else:
|
2009-08-02 00:15:04 +02:00
|
|
|
if row and row[0]:
|
2009-10-18 13:19:22 +02:00
|
|
|
self.hand_1day_ago = int(row[0])
|
2009-09-27 22:21:26 +02:00
|
|
|
|
|
|
|
d = timedelta(days=hud_days)
|
|
|
|
now = datetime.utcnow() - d
|
|
|
|
self.date_ndays_ago = "d%02d%02d%02d" % (now.year - 2000, now.month, now.day)
|
2009-08-02 00:15:04 +02:00
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
d = timedelta(days=h_hud_days)
|
|
|
|
now = datetime.utcnow() - d
|
|
|
|
self.h_date_ndays_ago = "d%02d%02d%02d" % (now.year - 2000, now.month, now.day)
|
|
|
|
|
2009-08-02 00:15:04 +02:00
|
|
|
def init_player_hud_stat_vars(self, playerid):
|
|
|
|
# not sure if this is workable, to be continued ...
|
|
|
|
try:
|
|
|
|
# self.date_nhands_ago is used for fetching stats for last n hands (hud_style = 'H')
|
|
|
|
# This option not used yet - needs to be called for each player :-(
|
|
|
|
self.date_nhands_ago[str(playerid)] = 'd000000'
|
|
|
|
|
|
|
|
# should use aggregated version of query if appropriate
|
|
|
|
c.execute(self.sql.query['get_date_nhands_ago'], (self.hud_hands, playerid))
|
|
|
|
row = c.fetchone()
|
|
|
|
if row and row[0]:
|
|
|
|
self.date_nhands_ago[str(playerid)] = row[0]
|
|
|
|
c.close()
|
2009-11-03 19:56:26 +01:00
|
|
|
print "Database: date n hands ago = " + self.date_nhands_ago[str(playerid)] + "(playerid "+str(playerid)+")"
|
2009-08-02 00:15:04 +02:00
|
|
|
except:
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
2009-11-03 19:56:26 +01:00
|
|
|
print "*** Database Error: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
2009-08-02 00:15:04 +02:00
|
|
|
|
2009-11-29 18:36:37 +01:00
|
|
|
# is get_stats_from_hand slow?
|
2009-09-30 00:34:52 +02:00
|
|
|
def get_stats_from_hand( self, hand, type # type is "ring" or "tour"
|
2009-11-17 22:47:43 +01:00
|
|
|
, hud_params = {'hud_style':'A', 'agg_bb_mult':1000
|
2009-11-26 22:28:05 +01:00
|
|
|
,'seats_style':'A', 'seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
|
|
|
|
,'h_hud_style':'S', 'h_agg_bb_mult':1000
|
|
|
|
,'h_seats_style':'A', 'h_seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
|
|
|
|
}
|
2009-09-29 00:59:17 +02:00
|
|
|
, hero_id = -1
|
2009-11-26 22:28:05 +01:00
|
|
|
, num_seats = 6
|
2009-09-26 12:30:12 +02:00
|
|
|
):
|
2009-09-29 00:59:17 +02:00
|
|
|
hud_style = hud_params['hud_style']
|
2009-11-17 22:47:43 +01:00
|
|
|
agg_bb_mult = hud_params['agg_bb_mult']
|
2009-11-26 22:28:05 +01:00
|
|
|
seats_style = hud_params['seats_style']
|
|
|
|
seats_cust_nums = hud_params['seats_cust_nums']
|
2009-09-29 00:59:17 +02:00
|
|
|
h_hud_style = hud_params['h_hud_style']
|
2009-11-17 22:47:43 +01:00
|
|
|
h_agg_bb_mult = hud_params['h_agg_bb_mult']
|
2009-11-26 22:28:05 +01:00
|
|
|
h_seats_style = hud_params['h_seats_style']
|
|
|
|
h_seats_cust_nums = hud_params['h_seats_cust_nums']
|
|
|
|
|
2009-09-26 12:30:12 +02:00
|
|
|
stat_dict = {}
|
|
|
|
|
2009-11-26 22:28:05 +01:00
|
|
|
if seats_style == 'A':
|
2009-11-28 12:48:06 +01:00
|
|
|
seats_min, seats_max = 0, 10
|
2009-11-26 22:28:05 +01:00
|
|
|
elif seats_style == 'C':
|
|
|
|
seats_min, seats_max = seats_cust_nums[num_seats][0], seats_cust_nums[num_seats][1]
|
|
|
|
elif seats_style == 'E':
|
|
|
|
seats_min, seats_max = num_seats, num_seats
|
|
|
|
else:
|
2009-11-28 12:48:06 +01:00
|
|
|
seats_min, seats_max = 0, 10
|
2009-11-26 22:28:05 +01:00
|
|
|
print "bad seats_style value:", seats_style
|
|
|
|
|
|
|
|
if h_seats_style == 'A':
|
2009-11-28 12:48:06 +01:00
|
|
|
h_seats_min, h_seats_max = 0, 10
|
2009-11-26 22:28:05 +01:00
|
|
|
elif h_seats_style == 'C':
|
|
|
|
h_seats_min, h_seats_max = h_seats_cust_nums[num_seats][0], h_seats_cust_nums[num_seats][1]
|
|
|
|
elif h_seats_style == 'E':
|
|
|
|
h_seats_min, h_seats_max = num_seats, num_seats
|
|
|
|
else:
|
2009-11-28 12:48:06 +01:00
|
|
|
h_seats_min, h_seats_max = 0, 10
|
2009-11-26 22:28:05 +01:00
|
|
|
print "bad h_seats_style value:", h_seats_style
|
2009-12-05 23:20:44 +01:00
|
|
|
log.info("opp seats style %s %d %d hero seats style %s %d %d"
|
|
|
|
% (seats_style, seats_min, seats_max
|
|
|
|
,h_seats_style, h_seats_min, h_seats_max) )
|
2009-11-26 22:28:05 +01:00
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
if hud_style == 'S' or h_hud_style == 'S':
|
2009-11-26 22:28:05 +01:00
|
|
|
self.get_stats_from_hand_session(hand, stat_dict, hero_id
|
|
|
|
,hud_style, seats_min, seats_max
|
|
|
|
,h_hud_style, h_seats_min, h_seats_max)
|
2009-10-12 00:34:05 +02:00
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
if hud_style == 'S' and h_hud_style == 'S':
|
|
|
|
return stat_dict
|
|
|
|
|
|
|
|
if hud_style == 'T':
|
|
|
|
stylekey = self.date_ndays_ago
|
|
|
|
elif hud_style == 'A':
|
|
|
|
stylekey = '0000000' # all stylekey values should be higher than this
|
|
|
|
elif hud_style == 'S':
|
|
|
|
stylekey = 'zzzzzzz' # all stylekey values should be lower than this
|
2009-11-20 07:48:49 +01:00
|
|
|
else:
|
|
|
|
stylekey = '0000000'
|
|
|
|
log.info('hud_style: %s' % hud_style)
|
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
#elif hud_style == 'H':
|
|
|
|
# stylekey = date_nhands_ago needs array by player here ...
|
|
|
|
|
|
|
|
if h_hud_style == 'T':
|
|
|
|
h_stylekey = self.h_date_ndays_ago
|
|
|
|
elif h_hud_style == 'A':
|
|
|
|
h_stylekey = '0000000' # all stylekey values should be higher than this
|
|
|
|
elif h_hud_style == 'S':
|
|
|
|
h_stylekey = 'zzzzzzz' # all stylekey values should be lower than this
|
2009-11-20 07:48:49 +01:00
|
|
|
else:
|
|
|
|
h_stylekey = '000000'
|
|
|
|
log.info('h_hud_style: %s' % h_hud_style)
|
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
#elif h_hud_style == 'H':
|
|
|
|
# h_stylekey = date_nhands_ago needs array by player here ...
|
|
|
|
|
|
|
|
query = 'get_stats_from_hand_aggregated'
|
2009-11-26 22:28:05 +01:00
|
|
|
subs = (hand
|
|
|
|
,hero_id, stylekey, agg_bb_mult, agg_bb_mult, seats_min, seats_max # hero params
|
|
|
|
,hero_id, h_stylekey, h_agg_bb_mult, h_agg_bb_mult, h_seats_min, h_seats_max) # villain params
|
2009-08-02 14:00:55 +02:00
|
|
|
|
|
|
|
#print "get stats: hud style =", hud_style, "query =", query, "subs =", subs
|
2009-05-28 00:34:10 +02:00
|
|
|
c = self.connection.cursor()
|
2008-08-19 00:53:25 +02:00
|
|
|
|
2009-05-28 00:34:10 +02:00
|
|
|
# now get the stats
|
2008-11-11 15:50:20 +01:00
|
|
|
c.execute(self.sql.query[query], subs)
|
2010-02-25 21:43:15 +01:00
|
|
|
#for row in c.fetchall(): # needs "explain query plan" in sql statement
|
|
|
|
# print "query plan: ", row
|
2008-08-19 00:53:25 +02:00
|
|
|
colnames = [desc[0] for desc in c.description]
|
|
|
|
for row in c.fetchall():
|
2009-10-04 00:11:45 +02:00
|
|
|
playerid = row[0]
|
|
|
|
if (playerid == hero_id and h_hud_style != 'S') or (playerid != hero_id and hud_style != 'S'):
|
|
|
|
t_dict = {}
|
|
|
|
for name, val in zip(colnames, row):
|
|
|
|
t_dict[name.lower()] = val
|
|
|
|
# print t_dict
|
|
|
|
stat_dict[t_dict['player_id']] = t_dict
|
2009-05-28 00:34:10 +02:00
|
|
|
|
|
|
|
return stat_dict
|
|
|
|
|
|
|
|
# uses query on handsplayers instead of hudcache to get stats on just this session
|
2009-11-26 22:28:05 +01:00
|
|
|
def get_stats_from_hand_session(self, hand, stat_dict, hero_id
|
|
|
|
,hud_style, seats_min, seats_max
|
|
|
|
,h_hud_style, h_seats_min, h_seats_max):
|
2009-09-29 00:59:17 +02:00
|
|
|
"""Get stats for just this session (currently defined as any play in the last 24 hours - to
|
|
|
|
be improved at some point ...)
|
|
|
|
h_hud_style and hud_style params indicate whether to get stats for hero and/or others
|
|
|
|
- only fetch heros stats if h_hud_style == 'S',
|
|
|
|
and only fetch others stats if hud_style == 'S'
|
2009-11-26 22:28:05 +01:00
|
|
|
seats_min/max params give seats limits, only include stats if between these values
|
2009-09-29 00:59:17 +02:00
|
|
|
"""
|
2009-05-28 00:34:10 +02:00
|
|
|
|
2009-08-02 14:00:55 +02:00
|
|
|
query = self.sql.query['get_stats_from_hand_session']
|
|
|
|
if self.db_server == 'mysql':
|
|
|
|
query = query.replace("<signed>", 'signed ')
|
|
|
|
else:
|
|
|
|
query = query.replace("<signed>", '')
|
2009-05-28 00:34:10 +02:00
|
|
|
|
2009-11-26 22:28:05 +01:00
|
|
|
subs = (self.hand_1day_ago, hand, hero_id, seats_min, seats_max
|
|
|
|
, hero_id, h_seats_min, h_seats_max)
|
2009-08-02 00:15:04 +02:00
|
|
|
c = self.get_cursor()
|
2009-05-28 00:34:10 +02:00
|
|
|
|
|
|
|
# now get the stats
|
|
|
|
#print "sess_stats: subs =", subs, "subs[0] =", subs[0]
|
|
|
|
c.execute(query, subs)
|
|
|
|
colnames = [desc[0] for desc in c.description]
|
2009-09-26 12:30:12 +02:00
|
|
|
n = 0
|
|
|
|
|
2009-05-28 00:34:10 +02:00
|
|
|
row = c.fetchone()
|
2009-09-26 12:30:12 +02:00
|
|
|
if colnames[0].lower() == 'player_id':
|
|
|
|
|
2009-09-29 00:59:17 +02:00
|
|
|
# Loop through stats adding them to appropriate stat_dict:
|
2009-09-26 12:30:12 +02:00
|
|
|
while row:
|
2009-10-04 00:11:45 +02:00
|
|
|
playerid = row[0]
|
2009-11-26 22:28:05 +01:00
|
|
|
seats = row[1]
|
2009-09-29 00:59:17 +02:00
|
|
|
if (playerid == hero_id and h_hud_style == 'S') or (playerid != hero_id and hud_style == 'S'):
|
|
|
|
for name, val in zip(colnames, row):
|
|
|
|
if not playerid in stat_dict:
|
|
|
|
stat_dict[playerid] = {}
|
|
|
|
stat_dict[playerid][name.lower()] = val
|
|
|
|
elif not name.lower() in stat_dict[playerid]:
|
|
|
|
stat_dict[playerid][name.lower()] = val
|
|
|
|
elif name.lower() not in ('hand_id', 'player_id', 'seat', 'screen_name', 'seats'):
|
2009-12-23 16:14:34 +01:00
|
|
|
#print "DEBUG: stat_dict[%s][%s]: %s" %(playerid, name.lower(), val)
|
2009-09-29 00:59:17 +02:00
|
|
|
stat_dict[playerid][name.lower()] += val
|
|
|
|
n += 1
|
|
|
|
if n >= 10000: break # todo: don't think this is needed so set nice and high
|
2009-11-26 22:28:05 +01:00
|
|
|
# prevents infinite loop so leave for now - comment out or remove?
|
2009-09-26 12:30:12 +02:00
|
|
|
row = c.fetchone()
|
|
|
|
else:
|
|
|
|
log.error("ERROR: query %s result does not have player_id as first column" % (query,))
|
2009-05-28 00:34:10 +02:00
|
|
|
|
|
|
|
#print " %d rows fetched, len(stat_dict) = %d" % (n, len(stat_dict))
|
|
|
|
|
|
|
|
#print "session stat_dict =", stat_dict
|
2009-09-26 12:30:12 +02:00
|
|
|
#return stat_dict
|
2008-08-19 00:53:25 +02:00
|
|
|
|
2010-07-08 19:46:25 +02:00
|
|
|
def get_player_id(self, config, siteName, playerName):
|
2008-08-19 00:53:25 +02:00
|
|
|
c = self.connection.cursor()
|
2010-07-11 00:37:32 +02:00
|
|
|
siteNameUtf = Charset.to_utf8(siteName)
|
2010-07-08 19:46:25 +02:00
|
|
|
playerNameUtf = Charset.to_utf8(playerName)
|
2010-07-08 21:41:27 +02:00
|
|
|
#print "db.get_player_id siteName",siteName,"playerName",playerName
|
2010-07-11 00:37:32 +02:00
|
|
|
c.execute(self.sql.query['get_player_id'], (playerNameUtf, siteNameUtf))
|
2008-08-19 00:53:25 +02:00
|
|
|
row = c.fetchone()
|
2009-05-21 22:27:44 +02:00
|
|
|
if row:
|
|
|
|
return row[0]
|
|
|
|
else:
|
|
|
|
return None
|
2010-07-08 19:46:25 +02:00
|
|
|
|
2009-10-05 23:12:35 +02:00
|
|
|
def get_player_names(self, config, site_id=None, like_player_name="%"):
|
|
|
|
"""Fetch player names from players. Use site_id and like_player_name if provided"""
|
|
|
|
|
2009-11-03 20:30:52 +01:00
|
|
|
if site_id is None:
|
2009-10-05 23:12:35 +02:00
|
|
|
site_id = -1
|
|
|
|
c = self.get_cursor()
|
2010-01-21 20:24:55 +01:00
|
|
|
p_name = Charset.to_utf8(like_player_name)
|
|
|
|
c.execute(self.sql.query['get_player_names'], (p_name, site_id, site_id))
|
2009-10-05 23:12:35 +02:00
|
|
|
rows = c.fetchall()
|
|
|
|
return rows
|
|
|
|
|
2009-07-29 00:58:10 +02:00
|
|
|
def get_site_id(self, site):
|
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute(self.sql.query['getSiteId'], (site,))
|
|
|
|
result = c.fetchall()
|
|
|
|
return result
|
2008-08-19 00:53:25 +02:00
|
|
|
|
2009-08-06 21:31:46 +02:00
|
|
|
def get_last_insert_id(self, cursor=None):
|
|
|
|
ret = None
|
2009-07-26 02:42:09 +02:00
|
|
|
try:
|
2009-08-06 21:31:46 +02:00
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
ret = self.connection.insert_id()
|
|
|
|
if ret < 1 or ret > 999999999:
|
2009-08-12 02:46:39 +02:00
|
|
|
log.warning("getLastInsertId(): problem fetching insert_id? ret=%d" % ret)
|
2009-08-06 21:31:46 +02:00
|
|
|
ret = -1
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# some options:
|
|
|
|
# currval(hands_id_seq) - use name of implicit seq here
|
|
|
|
# lastval() - still needs sequences set up?
|
|
|
|
# insert ... returning is useful syntax (but postgres specific?)
|
|
|
|
# see rules (fancy trigger type things)
|
|
|
|
c = self.get_cursor()
|
|
|
|
ret = c.execute ("SELECT lastval()")
|
|
|
|
row = c.fetchone()
|
|
|
|
if not row:
|
2009-08-12 02:46:39 +02:00
|
|
|
log.warning("getLastInsertId(%s): problem fetching lastval? row=%d" % (seq, row))
|
2009-08-06 21:31:46 +02:00
|
|
|
ret = -1
|
|
|
|
else:
|
|
|
|
ret = row[0]
|
|
|
|
elif self.backend == self.SQLITE:
|
|
|
|
ret = cursor.lastrowid
|
|
|
|
else:
|
2009-08-12 02:46:39 +02:00
|
|
|
log.error("getLastInsertId(): unknown backend: %d" % self.backend)
|
2009-08-06 21:31:46 +02:00
|
|
|
ret = -1
|
2009-07-26 02:42:09 +02:00
|
|
|
except:
|
2009-08-06 21:31:46 +02:00
|
|
|
ret = -1
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])
|
2009-11-03 19:56:26 +01:00
|
|
|
print "*** Database get_last_insert_id error: " + str(sys.exc_info()[1])
|
2009-08-06 21:31:46 +02:00
|
|
|
print "\n".join( [e[0]+':'+str(e[1])+" "+e[2] for e in err] )
|
|
|
|
raise
|
2009-07-26 02:42:09 +02:00
|
|
|
return ret
|
2009-06-20 00:22:57 +02:00
|
|
|
|
2009-06-19 22:15:52 +02:00
|
|
|
|
2009-07-29 00:58:10 +02:00
|
|
|
def prepareBulkImport(self):
|
|
|
|
"""Drop some indexes/foreign keys to prepare for bulk import.
|
|
|
|
Currently keeping the standalone indexes as needed to import quickly"""
|
|
|
|
stime = time()
|
2009-07-29 07:17:51 +02:00
|
|
|
c = self.get_cursor()
|
2009-07-29 08:57:18 +02:00
|
|
|
# sc: don't think autocommit=0 is needed, should already be in that mode
|
2009-07-29 00:58:10 +02:00
|
|
|
if self.backend == self.MYSQL_INNODB:
|
2009-07-29 07:17:51 +02:00
|
|
|
c.execute("SET foreign_key_checks=0")
|
|
|
|
c.execute("SET autocommit=0")
|
2009-07-29 00:58:10 +02:00
|
|
|
return
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for fk in self.foreignKeys[self.backend]:
|
|
|
|
if fk['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
c.execute("SELECT constraint_name " +
|
|
|
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
|
|
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
|
|
|
"WHERE 1=1 " +
|
|
|
|
"AND table_name = %s AND column_name = %s " +
|
|
|
|
"AND referenced_table_name = %s " +
|
|
|
|
"AND referenced_column_name = %s ",
|
|
|
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
|
|
|
cons = c.fetchone()
|
2009-07-31 22:24:21 +02:00
|
|
|
#print "preparebulk find fk: cons=", cons
|
2009-07-29 00:58:10 +02:00
|
|
|
if cons:
|
|
|
|
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
|
|
|
try:
|
|
|
|
c.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
|
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print " drop failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# DON'T FORGET TO RECREATE THEM!!
|
|
|
|
print "dropping pg fk", fk['fktab'], fk['fkcol']
|
|
|
|
try:
|
|
|
|
# try to lock table to see if index drop will work:
|
|
|
|
# hmmm, tested by commenting out rollback in grapher. lock seems to work but
|
|
|
|
# then drop still hangs :-( does work in some tests though??
|
|
|
|
# will leave code here for now pending further tests/enhancement ...
|
|
|
|
c.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
|
|
|
|
#print "after lock, status:", c.statusmessage
|
|
|
|
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
|
|
|
|
try:
|
|
|
|
c.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
|
|
|
|
print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
|
|
|
|
except:
|
|
|
|
if "does not exist" not in str(sys.exc_value):
|
|
|
|
print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
|
|
|
|
% (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
|
|
|
|
except:
|
|
|
|
print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
|
|
|
|
% (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2009-07-29 07:17:51 +02:00
|
|
|
for idx in self.indexes[self.backend]:
|
2009-07-29 00:58:10 +02:00
|
|
|
if idx['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "dropping mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
2009-07-29 08:57:18 +02:00
|
|
|
# apparently nowait is not implemented in mysql so this just hangs if there are locks
|
2009-07-29 00:58:10 +02:00
|
|
|
# preventing the index drop :-(
|
2009-07-29 08:57:18 +02:00
|
|
|
c.execute( "alter table %s drop index %s;", (idx['tab'],idx['col']) )
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print " drop index failed: " + str(sys.exc_info())
|
|
|
|
# ALTER TABLE `fpdb`.`handsplayers` DROP INDEX `playerId`;
|
|
|
|
# using: 'HandsPlayers' drop index 'playerId'
|
2009-07-29 00:58:10 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# DON'T FORGET TO RECREATE THEM!!
|
|
|
|
print "dropping pg index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
# try to lock table to see if index drop will work:
|
|
|
|
c.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
|
|
|
|
#print "after lock, status:", c.statusmessage
|
|
|
|
try:
|
|
|
|
# table locked ok so index drop should work:
|
|
|
|
#print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
|
|
|
|
c.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
|
|
|
|
#print "dropped pg index ", idx['tab'], idx['col']
|
|
|
|
except:
|
|
|
|
if "does not exist" not in str(sys.exc_value):
|
|
|
|
print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
|
|
|
|
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
except:
|
|
|
|
print "warning: index %s_%s_idx not dropped %s, continuing ..." \
|
|
|
|
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
else:
|
|
|
|
return -1
|
2009-07-21 23:26:23 +02:00
|
|
|
|
2009-07-29 00:58:10 +02:00
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.commit() # seems to clear up errors if there were any in postgres
|
|
|
|
ptime = time() - stime
|
|
|
|
print "prepare import took", ptime, "seconds"
|
|
|
|
#end def prepareBulkImport
|
|
|
|
|
|
|
|
def afterBulkImport(self):
|
|
|
|
"""Re-create any dropped indexes/foreign keys after bulk import"""
|
2009-07-21 23:26:23 +02:00
|
|
|
stime = time()
|
2009-07-29 00:58:10 +02:00
|
|
|
|
2009-07-29 07:17:51 +02:00
|
|
|
c = self.get_cursor()
|
2009-07-29 00:58:10 +02:00
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
c.execute("SET foreign_key_checks=1")
|
|
|
|
c.execute("SET autocommit=1")
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for fk in self.foreignKeys[self.backend]:
|
|
|
|
if fk['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
c.execute("SELECT constraint_name " +
|
|
|
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
|
|
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
|
|
|
"WHERE 1=1 " +
|
|
|
|
"AND table_name = %s AND column_name = %s " +
|
|
|
|
"AND referenced_table_name = %s " +
|
|
|
|
"AND referenced_column_name = %s ",
|
|
|
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
|
|
|
cons = c.fetchone()
|
|
|
|
#print "afterbulk: cons=", cons
|
|
|
|
if cons:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
|
|
|
try:
|
|
|
|
c.execute("alter table " + fk['fktab'] + " add foreign key ("
|
|
|
|
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
|
|
|
|
+ fk['rcol'] + ")")
|
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print " create fk failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
|
|
|
try:
|
|
|
|
c.execute("alter table " + fk['fktab'] + " add constraint "
|
|
|
|
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
|
|
|
|
+ " foreign key (" + fk['fkcol']
|
|
|
|
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
|
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print " create fk failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
2009-07-29 07:17:51 +02:00
|
|
|
for idx in self.indexes[self.backend]:
|
2009-07-29 00:58:10 +02:00
|
|
|
if idx['drop'] == 1:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "creating mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
2009-08-03 02:30:51 +02:00
|
|
|
s = "alter table %s add index %s(%s)" % (idx['tab'],idx['col'],idx['col'])
|
|
|
|
c.execute(s)
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print " create fk failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# pass
|
|
|
|
# mod to use tab_col for index name?
|
|
|
|
print "creating pg index ", idx['tab'], idx['col']
|
|
|
|
try:
|
2009-08-03 02:30:51 +02:00
|
|
|
s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
|
|
|
c.execute(s)
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print " create index failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.commit() # seems to clear up errors if there were any in postgres
|
|
|
|
atime = time() - stime
|
|
|
|
print "After import took", atime, "seconds"
|
|
|
|
#end def afterBulkImport
|
|
|
|
|
|
|
|
def drop_referential_integrity(self):
|
|
|
|
"""Update all tables to remove foreign keys"""
|
|
|
|
|
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute(self.sql.query['list_tables'])
|
|
|
|
result = c.fetchall()
|
|
|
|
|
|
|
|
for i in range(len(result)):
|
|
|
|
c.execute("SHOW CREATE TABLE " + result[i][0])
|
|
|
|
inner = c.fetchall()
|
|
|
|
|
|
|
|
for j in range(len(inner)):
|
|
|
|
# result[i][0] - Table name
|
|
|
|
# result[i][1] - CREATE TABLE parameters
|
|
|
|
#Searching for CONSTRAINT `tablename_ibfk_1`
|
|
|
|
for m in re.finditer('(ibfk_[0-9]+)', inner[j][1]):
|
|
|
|
key = "`" + inner[j][0] + "_" + m.group() + "`"
|
|
|
|
c.execute("ALTER TABLE " + inner[j][0] + " DROP FOREIGN KEY " + key)
|
|
|
|
self.commit()
|
2010-06-06 14:20:39 +02:00
|
|
|
#end drop_referential_inegrity
|
2009-07-29 00:58:10 +02:00
|
|
|
|
|
|
|
def recreate_tables(self):
|
|
|
|
"""(Re-)creates the tables of the current DB"""
|
|
|
|
|
|
|
|
self.drop_tables()
|
|
|
|
self.create_tables()
|
|
|
|
self.createAllIndexes()
|
2009-07-21 23:26:23 +02:00
|
|
|
self.commit()
|
2010-01-27 01:48:02 +01:00
|
|
|
print "Finished recreating tables"
|
2009-08-12 02:46:39 +02:00
|
|
|
log.info("Finished recreating tables")
|
2009-07-29 00:58:10 +02:00
|
|
|
#end def recreate_tables
|
|
|
|
|
|
|
|
def create_tables(self):
|
|
|
|
#todo: should detect and fail gracefully if tables already exist.
|
|
|
|
try:
|
2009-08-12 02:46:39 +02:00
|
|
|
log.debug(self.sql.query['createSettingsTable'])
|
2009-07-29 00:58:10 +02:00
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute(self.sql.query['createSettingsTable'])
|
2009-09-26 00:10:58 +02:00
|
|
|
|
2010-06-04 09:25:56 +02:00
|
|
|
log.debug("Creating tables")
|
2009-07-29 00:58:10 +02:00
|
|
|
c.execute(self.sql.query['createSitesTable'])
|
|
|
|
c.execute(self.sql.query['createGametypesTable'])
|
|
|
|
c.execute(self.sql.query['createPlayersTable'])
|
|
|
|
c.execute(self.sql.query['createAutoratesTable'])
|
|
|
|
c.execute(self.sql.query['createHandsTable'])
|
|
|
|
c.execute(self.sql.query['createTourneyTypesTable'])
|
|
|
|
c.execute(self.sql.query['createTourneysTable'])
|
|
|
|
c.execute(self.sql.query['createTourneysPlayersTable'])
|
|
|
|
c.execute(self.sql.query['createHandsPlayersTable'])
|
|
|
|
c.execute(self.sql.query['createHandsActionsTable'])
|
|
|
|
c.execute(self.sql.query['createHudCacheTable'])
|
2009-09-26 00:10:58 +02:00
|
|
|
|
2010-06-04 09:25:56 +02:00
|
|
|
# Create unique indexes:
|
|
|
|
log.debug("Creating unique indexes")
|
2009-09-24 00:03:34 +02:00
|
|
|
c.execute(self.sql.query['addTourneyIndex'])
|
2009-09-26 00:10:58 +02:00
|
|
|
c.execute(self.sql.query['addHandsIndex'])
|
|
|
|
c.execute(self.sql.query['addPlayersIndex'])
|
|
|
|
c.execute(self.sql.query['addTPlayersIndex'])
|
|
|
|
c.execute(self.sql.query['addTTypesIndex'])
|
|
|
|
|
2009-07-29 00:58:10 +02:00
|
|
|
self.fillDefaultData()
|
|
|
|
self.commit()
|
|
|
|
except:
|
2009-08-04 23:06:03 +02:00
|
|
|
#print "Error creating tables: ", str(sys.exc_value)
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error creating tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
2009-07-29 00:58:10 +02:00
|
|
|
self.rollback()
|
2009-08-04 23:06:03 +02:00
|
|
|
raise
|
2009-07-29 00:58:10 +02:00
|
|
|
#end def disconnect
|
|
|
|
|
|
|
|
def drop_tables(self):
|
|
|
|
"""Drops the fpdb tables from the current db"""
|
|
|
|
try:
|
2009-07-29 07:17:51 +02:00
|
|
|
c = self.get_cursor()
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-09-04 13:49:46 +02:00
|
|
|
print "*** Error unable to get cursor"
|
|
|
|
else:
|
|
|
|
backend = self.get_backend_name()
|
|
|
|
if backend == 'MySQL InnoDB': # what happens if someone is using MyISAM?
|
|
|
|
try:
|
|
|
|
self.drop_referential_integrity() # needed to drop tables with foreign keys
|
|
|
|
c.execute(self.sql.query['list_tables'])
|
|
|
|
tables = c.fetchall()
|
|
|
|
for table in tables:
|
|
|
|
c.execute(self.sql.query['drop_table'] + table[0])
|
|
|
|
except:
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error dropping tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
|
|
|
self.rollback()
|
|
|
|
elif backend == 'PostgreSQL':
|
|
|
|
try:
|
|
|
|
self.commit()
|
|
|
|
c.execute(self.sql.query['list_tables'])
|
|
|
|
tables = c.fetchall()
|
|
|
|
for table in tables:
|
|
|
|
c.execute(self.sql.query['drop_table'] + table[0] + ' cascade')
|
|
|
|
except:
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error dropping tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
|
|
|
self.rollback()
|
|
|
|
elif backend == 'SQLite':
|
|
|
|
try:
|
|
|
|
c.execute(self.sql.query['list_tables'])
|
|
|
|
for table in c.fetchall():
|
|
|
|
log.debug(self.sql.query['drop_table'] + table[0])
|
|
|
|
c.execute(self.sql.query['drop_table'] + table[0])
|
|
|
|
except:
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error dropping tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
|
|
|
self.rollback()
|
|
|
|
try:
|
|
|
|
self.commit()
|
|
|
|
except:
|
|
|
|
print "*** Error in committing table drop"
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error dropping tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
|
|
|
self.rollback()
|
2009-07-29 00:58:10 +02:00
|
|
|
#end def drop_tables
|
|
|
|
|
|
|
|
def createAllIndexes(self):
|
|
|
|
"""Create new indexes"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for idx in self.indexes[self.backend]:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
2010-06-04 08:37:46 +02:00
|
|
|
print "Creating mysql index %s %s" %(idx['tab'], idx['col'])
|
|
|
|
log.debug("Creating sqlite index %s %s" %(idx['tab'], idx['col']))
|
2009-07-29 00:58:10 +02:00
|
|
|
try:
|
2009-08-03 02:30:51 +02:00
|
|
|
s = "create index %s on %s(%s)" % (idx['col'],idx['tab'],idx['col'])
|
|
|
|
self.get_cursor().execute(s)
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-08-03 02:30:51 +02:00
|
|
|
print " create idx failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# mod to use tab_col for index name?
|
2010-06-04 08:37:46 +02:00
|
|
|
print "Creating pg index %s %s" %(idx['tab'], idx['col'])
|
|
|
|
log.debug("Creating sqlite index %s %s" %(idx['tab'], idx['col']))
|
2009-07-29 00:58:10 +02:00
|
|
|
try:
|
2009-08-03 02:30:51 +02:00
|
|
|
s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
|
|
|
self.get_cursor().execute(s)
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-08-03 02:30:51 +02:00
|
|
|
print " create idx failed: " + str(sys.exc_info())
|
2009-08-12 02:46:39 +02:00
|
|
|
elif self.backend == self.SQLITE:
|
2010-06-04 08:37:46 +02:00
|
|
|
print "Creating sqlite index %s %s" %(idx['tab'], idx['col'])
|
|
|
|
log.debug("Creating sqlite index %s %s" %(idx['tab'], idx['col']))
|
2009-08-12 02:46:39 +02:00
|
|
|
try:
|
|
|
|
s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
|
|
|
self.get_cursor().execute(s)
|
|
|
|
except:
|
|
|
|
log.debug("Create idx failed: " + str(sys.exc_info()))
|
2009-07-29 00:58:10 +02:00
|
|
|
else:
|
2010-06-04 08:37:46 +02:00
|
|
|
print "Unknown database: MySQL, Postgres and SQLite supported"
|
2009-07-29 00:58:10 +02:00
|
|
|
return -1
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
except:
|
|
|
|
print "Error creating indexes: " + str(sys.exc_value)
|
2009-08-12 02:46:39 +02:00
|
|
|
raise FpdbError( "Error creating indexes " + str(sys.exc_value) )
|
2009-07-29 00:58:10 +02:00
|
|
|
#end def createAllIndexes
|
|
|
|
|
|
|
|
def dropAllIndexes(self):
|
|
|
|
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
|
|
|
|
using list of indexes in indexes data structure"""
|
|
|
|
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
for idx in self.indexes[self.backend]:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
print "dropping mysql index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
self.get_cursor().execute( "alter table %s drop index %s"
|
2009-11-30 00:02:45 +01:00
|
|
|
, (idx['tab'], idx['col']) )
|
2009-07-29 00:58:10 +02:00
|
|
|
except:
|
2009-11-30 00:02:45 +01:00
|
|
|
print " drop idx failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
print "dropping pg index ", idx['tab'], idx['col']
|
|
|
|
# mod to use tab_col for index name?
|
|
|
|
try:
|
|
|
|
self.get_cursor().execute( "drop index %s_%s_idx"
|
|
|
|
% (idx['tab'],idx['col']) )
|
|
|
|
except:
|
2009-11-30 00:02:45 +01:00
|
|
|
print " drop idx failed: " + str(sys.exc_info())
|
2010-06-04 08:37:46 +02:00
|
|
|
elif self.backend == self.SQLITE:
|
|
|
|
print "Dropping sqlite index ", idx['tab'], idx['col']
|
|
|
|
try:
|
|
|
|
self.get_cursor().execute( "drop index %s_%s_idx"
|
|
|
|
% (idx['tab'],idx['col']) )
|
|
|
|
except:
|
|
|
|
print " drop idx failed: " + str(sys.exc_info())
|
2009-07-29 00:58:10 +02:00
|
|
|
else:
|
2010-06-04 08:37:46 +02:00
|
|
|
print "Only MySQL, Postgres and SQLITE supported, what are you trying to use?"
|
2009-07-29 00:58:10 +02:00
|
|
|
return -1
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
#end def dropAllIndexes
|
2009-11-30 00:02:45 +01:00
|
|
|
|
|
|
|
def createAllForeignKeys(self):
|
|
|
|
"""Create foreign keys"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
c = self.get_cursor()
|
|
|
|
except:
|
|
|
|
print " set_isolation_level failed: " + str(sys.exc_info())
|
|
|
|
|
|
|
|
for fk in self.foreignKeys[self.backend]:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
c.execute("SELECT constraint_name " +
|
|
|
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
|
|
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
|
|
|
"WHERE 1=1 " +
|
|
|
|
"AND table_name = %s AND column_name = %s " +
|
|
|
|
"AND referenced_table_name = %s " +
|
|
|
|
"AND referenced_column_name = %s ",
|
|
|
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
|
|
|
cons = c.fetchone()
|
|
|
|
#print "afterbulk: cons=", cons
|
|
|
|
if cons:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
|
|
|
try:
|
|
|
|
c.execute("alter table " + fk['fktab'] + " add foreign key ("
|
|
|
|
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
|
|
|
|
+ fk['rcol'] + ")")
|
|
|
|
except:
|
|
|
|
print " create fk failed: " + str(sys.exc_info())
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
|
|
|
try:
|
|
|
|
c.execute("alter table " + fk['fktab'] + " add constraint "
|
|
|
|
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
|
|
|
|
+ " foreign key (" + fk['fkcol']
|
|
|
|
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
|
|
|
|
except:
|
|
|
|
print " create fk failed: " + str(sys.exc_info())
|
|
|
|
else:
|
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
|
|
|
|
try:
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
except:
|
|
|
|
print " set_isolation_level failed: " + str(sys.exc_info())
|
|
|
|
#end def createAllForeignKeys
|
|
|
|
|
|
|
|
def dropAllForeignKeys(self):
|
|
|
|
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
|
|
|
|
using list of indexes in indexes data structure"""
|
|
|
|
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow table/index operations to work
|
|
|
|
c = self.get_cursor()
|
|
|
|
|
|
|
|
for fk in self.foreignKeys[self.backend]:
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
c.execute("SELECT constraint_name " +
|
|
|
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
|
|
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
|
|
|
"WHERE 1=1 " +
|
|
|
|
"AND table_name = %s AND column_name = %s " +
|
|
|
|
"AND referenced_table_name = %s " +
|
|
|
|
"AND referenced_column_name = %s ",
|
|
|
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
|
|
|
cons = c.fetchone()
|
|
|
|
#print "preparebulk find fk: cons=", cons
|
|
|
|
if cons:
|
|
|
|
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
|
|
|
try:
|
|
|
|
c.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
|
|
|
|
except:
|
|
|
|
print " drop failed: " + str(sys.exc_info())
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
# DON'T FORGET TO RECREATE THEM!!
|
|
|
|
print "dropping pg fk", fk['fktab'], fk['fkcol']
|
|
|
|
try:
|
|
|
|
# try to lock table to see if index drop will work:
|
|
|
|
# hmmm, tested by commenting out rollback in grapher. lock seems to work but
|
|
|
|
# then drop still hangs :-( does work in some tests though??
|
|
|
|
# will leave code here for now pending further tests/enhancement ...
|
|
|
|
c.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
|
|
|
|
#print "after lock, status:", c.statusmessage
|
|
|
|
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
|
|
|
|
try:
|
|
|
|
c.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
|
|
|
|
print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
|
|
|
|
except:
|
|
|
|
if "does not exist" not in str(sys.exc_value):
|
|
|
|
print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
|
|
|
|
% (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
|
|
|
|
except:
|
|
|
|
print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
|
|
|
|
% (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
|
|
|
|
else:
|
|
|
|
print "Only MySQL and Postgres supported so far"
|
|
|
|
|
|
|
|
if self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
#end def dropAllForeignKeys
|
|
|
|
|
2009-07-29 00:58:10 +02:00
|
|
|
|
|
|
|
def fillDefaultData(self):
|
2009-08-05 18:58:25 +02:00
|
|
|
c = self.get_cursor()
|
2010-01-27 01:48:02 +01:00
|
|
|
c.execute("INSERT INTO Settings (version) VALUES (%s);" % (DB_VERSION))
|
2010-06-20 17:34:58 +02:00
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Full Tilt Poker', 'FT')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('PokerStars', 'PS')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Everleaf', 'EV')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Win2day', 'W2')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('OnGame', 'OG')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('UltimateBet', 'UB')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Betfair', 'BF')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Absolute', 'AB')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('PartyPoker', 'PP')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Partouche', 'PA')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('Carbon', 'CA')")
|
|
|
|
c.execute("INSERT INTO Sites (name,code) VALUES ('PKR', 'PK')")
|
2009-08-04 23:06:03 +02:00
|
|
|
if self.backend == self.SQLITE:
|
2010-06-26 12:24:30 +02:00
|
|
|
c.execute("""INSERT INTO TourneyTypes (id, siteId, currency, buyin, fee, buyInChips, maxSeats, knockout,
|
2010-07-02 19:39:02 +02:00
|
|
|
rebuy, addOn, speed, shootout, matrix)
|
|
|
|
VALUES (NULL, 1, 'USD', 0, 0, 0, 0, 0, 0, 0, NULL, 0, 0);""")
|
2009-09-19 10:44:06 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
2010-06-25 11:02:01 +02:00
|
|
|
c.execute("""insert into TourneyTypes(siteId, currency, buyin, fee, buyInChips, maxSeats, knockout
|
2010-07-02 19:39:02 +02:00
|
|
|
,rebuy, addOn, speed, shootout, matrix)
|
|
|
|
values (1, 'USD', 0, 0, 0, 0, False, False, False, null, False, False);""")
|
2009-09-28 01:44:55 +02:00
|
|
|
elif self.backend == self.MYSQL_INNODB:
|
2010-06-25 11:02:01 +02:00
|
|
|
c.execute("""insert into TourneyTypes(id, siteId, currency, buyin, fee, buyInChips, maxSeats, knockout
|
2010-07-02 19:39:02 +02:00
|
|
|
,rebuy, addOn, speed, shootout, matrix)
|
|
|
|
values (DEFAULT, 1, 'USD', 0, 0, 0, 0, False, False, False, null, False, False);""")
|
2009-07-29 00:58:10 +02:00
|
|
|
#end def fillDefaultData
|
|
|
|
|
2009-11-30 00:02:45 +01:00
|
|
|
def rebuild_indexes(self, start=None):
|
|
|
|
self.dropAllIndexes()
|
|
|
|
self.createAllIndexes()
|
|
|
|
self.dropAllForeignKeys()
|
|
|
|
self.createAllForeignKeys()
|
2010-06-25 11:02:01 +02:00
|
|
|
#end def rebuild_indexes
|
2009-11-30 00:02:45 +01:00
|
|
|
|
2009-11-30 22:43:29 +01:00
|
|
|
def rebuild_hudcache(self, h_start=None, v_start=None):
|
2009-07-29 00:58:10 +02:00
|
|
|
"""clears hudcache and rebuilds from the individual handsplayers records"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
stime = time()
|
2009-09-27 14:36:45 +02:00
|
|
|
# derive list of program owner's player ids
|
|
|
|
self.hero = {} # name of program owner indexed by site id
|
|
|
|
self.hero_ids = {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
|
|
|
|
# make sure at least two values in list
|
|
|
|
# so that tuple generation creates doesn't use
|
|
|
|
# () or (1,) style
|
2009-09-27 00:32:44 +02:00
|
|
|
for site in self.config.get_supported_sites():
|
|
|
|
result = self.get_site_id(site)
|
|
|
|
if result:
|
|
|
|
site_id = result[0][0]
|
|
|
|
self.hero[site_id] = self.config.supported_sites[site].screen_name
|
2009-09-27 03:06:07 +02:00
|
|
|
p_id = self.get_player_id(self.config, site, self.hero[site_id])
|
|
|
|
if p_id:
|
2009-09-27 14:36:45 +02:00
|
|
|
self.hero_ids[site_id] = int(p_id)
|
|
|
|
|
2009-11-30 22:43:29 +01:00
|
|
|
if h_start is None:
|
|
|
|
h_start = self.hero_hudstart_def
|
|
|
|
if v_start is None:
|
|
|
|
v_start = self.villain_hudstart_def
|
2009-09-27 03:06:07 +02:00
|
|
|
if self.hero_ids == {}:
|
|
|
|
where = ""
|
|
|
|
else:
|
2009-11-30 22:43:29 +01:00
|
|
|
where = "where ( hp.playerId not in " + str(tuple(self.hero_ids.values())) \
|
|
|
|
+ " and h.handStart > '" + v_start + "')" \
|
|
|
|
+ " or ( hp.playerId in " + str(tuple(self.hero_ids.values())) \
|
|
|
|
+ " and h.handStart > '" + h_start + "')"
|
2009-09-27 00:32:44 +02:00
|
|
|
rebuild_sql = self.sql.query['rebuildHudCache'].replace('<where_clause>', where)
|
|
|
|
|
2009-08-07 01:04:44 +02:00
|
|
|
self.get_cursor().execute(self.sql.query['clearHudCache'])
|
2009-09-27 00:32:44 +02:00
|
|
|
self.get_cursor().execute(rebuild_sql)
|
2009-07-29 00:58:10 +02:00
|
|
|
self.commit()
|
|
|
|
print "Rebuild hudcache took %.1f seconds" % (time() - stime,)
|
|
|
|
except:
|
2009-08-07 01:04:44 +02:00
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
2009-07-29 00:58:10 +02:00
|
|
|
print "Error rebuilding hudcache:", str(sys.exc_value)
|
2009-08-07 01:04:44 +02:00
|
|
|
print err
|
2009-07-21 23:26:23 +02:00
|
|
|
#end def rebuild_hudcache
|
|
|
|
|
2009-09-27 02:42:26 +02:00
|
|
|
def get_hero_hudcache_start(self):
|
|
|
|
"""fetches earliest stylekey from hudcache for one of hero's player ids"""
|
|
|
|
|
|
|
|
try:
|
2009-09-27 14:36:45 +02:00
|
|
|
# derive list of program owner's player ids
|
|
|
|
self.hero = {} # name of program owner indexed by site id
|
|
|
|
self.hero_ids = {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
|
|
|
|
# make sure at least two values in list
|
|
|
|
# so that tuple generation creates doesn't use
|
|
|
|
# () or (1,) style
|
2009-09-27 02:42:26 +02:00
|
|
|
for site in self.config.get_supported_sites():
|
|
|
|
result = self.get_site_id(site)
|
|
|
|
if result:
|
|
|
|
site_id = result[0][0]
|
|
|
|
self.hero[site_id] = self.config.supported_sites[site].screen_name
|
2009-09-27 14:36:45 +02:00
|
|
|
p_id = self.get_player_id(self.config, site, self.hero[site_id])
|
|
|
|
if p_id:
|
|
|
|
self.hero_ids[site_id] = int(p_id)
|
2009-09-27 02:42:26 +02:00
|
|
|
|
|
|
|
q = self.sql.query['get_hero_hudcache_start'].replace("<playerid_list>", str(tuple(self.hero_ids.values())))
|
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute(q)
|
|
|
|
tmp = c.fetchone()
|
2009-09-27 14:36:45 +02:00
|
|
|
if tmp == (None,):
|
2009-09-27 02:42:26 +02:00
|
|
|
return self.hero_hudstart_def
|
|
|
|
else:
|
|
|
|
return "20"+tmp[0][1:3] + "-" + tmp[0][3:5] + "-" + tmp[0][5:7]
|
|
|
|
except:
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "Error rebuilding hudcache:", str(sys.exc_value)
|
|
|
|
print err
|
|
|
|
#end def get_hero_hudcache_start
|
|
|
|
|
2009-07-21 23:26:23 +02:00
|
|
|
|
|
|
|
def analyzeDB(self):
|
|
|
|
"""Do whatever the DB can offer to update index/table statistics"""
|
|
|
|
stime = time()
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
try:
|
2009-07-26 02:42:09 +02:00
|
|
|
self.get_cursor().execute(self.sql.query['analyze'])
|
2009-07-21 23:26:23 +02:00
|
|
|
except:
|
2009-07-26 02:42:09 +02:00
|
|
|
print "Error during analyze:", str(sys.exc_value)
|
2009-07-21 23:26:23 +02:00
|
|
|
elif self.backend == self.PGSQL:
|
2009-11-30 00:02:45 +01:00
|
|
|
self.connection.set_isolation_level(0) # allow analyze to work
|
2009-07-21 23:26:23 +02:00
|
|
|
try:
|
2009-07-26 02:42:09 +02:00
|
|
|
self.get_cursor().execute(self.sql.query['analyze'])
|
2009-07-21 23:26:23 +02:00
|
|
|
except:
|
|
|
|
print "Error during analyze:", str(sys.exc_value)
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.commit()
|
|
|
|
atime = time() - stime
|
|
|
|
print "Analyze took %.1f seconds" % (atime,)
|
|
|
|
#end def analyzeDB
|
|
|
|
|
2009-11-30 00:02:45 +01:00
|
|
|
def vacuumDB(self):
|
|
|
|
"""Do whatever the DB can offer to update index/table statistics"""
|
|
|
|
stime = time()
|
|
|
|
if self.backend == self.MYSQL_INNODB:
|
|
|
|
try:
|
|
|
|
self.get_cursor().execute(self.sql.query['vacuum'])
|
|
|
|
except:
|
|
|
|
print "Error during vacuum:", str(sys.exc_value)
|
|
|
|
elif self.backend == self.PGSQL:
|
|
|
|
self.connection.set_isolation_level(0) # allow vacuum to work
|
|
|
|
try:
|
|
|
|
self.get_cursor().execute(self.sql.query['vacuum'])
|
|
|
|
except:
|
|
|
|
print "Error during vacuum:", str(sys.exc_value)
|
|
|
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
|
|
|
self.commit()
|
|
|
|
atime = time() - stime
|
|
|
|
print "Vacuum took %.1f seconds" % (atime,)
|
|
|
|
#end def analyzeDB
|
2009-07-26 02:42:09 +02:00
|
|
|
|
|
|
|
# Start of Hand Writing routines. Idea is to provide a mixture of routines to store Hand data
|
|
|
|
# however the calling prog requires. Main aims:
|
|
|
|
# - existing static routines from fpdb_simple just modified
|
2009-07-28 22:19:31 +02:00
|
|
|
|
|
|
|
def lock_for_insert(self):
|
|
|
|
"""Lock tables in MySQL to try to speed inserts up"""
|
|
|
|
try:
|
|
|
|
self.get_cursor().execute(self.sql.query['lockForInsert'])
|
|
|
|
except:
|
2010-01-27 00:54:04 +01:00
|
|
|
print "Error during lock_for_insert:", str(sys.exc_value)
|
2009-07-28 22:19:31 +02:00
|
|
|
#end def lock_for_insert
|
2009-07-29 07:17:51 +02:00
|
|
|
|
2009-09-19 13:24:07 +02:00
|
|
|
###########################
|
|
|
|
# NEWIMPORT CODE
|
|
|
|
###########################
|
|
|
|
|
2009-08-06 21:31:46 +02:00
|
|
|
def storeHand(self, p):
|
|
|
|
#stores into table hands:
|
2009-10-14 06:05:17 +02:00
|
|
|
q = self.sql.query['store_hand']
|
2009-08-08 10:15:36 +02:00
|
|
|
|
|
|
|
q = q.replace('%s', self.sql.query['placeholder'])
|
2009-10-13 11:42:02 +02:00
|
|
|
|
2009-11-30 00:02:45 +01:00
|
|
|
c = self.get_cursor()
|
2009-11-24 09:39:59 +01:00
|
|
|
|
|
|
|
c.execute(q, (
|
2009-08-08 10:15:36 +02:00
|
|
|
p['tableName'],
|
2009-08-08 19:59:44 +02:00
|
|
|
p['gameTypeId'],
|
2009-08-08 10:15:36 +02:00
|
|
|
p['siteHandNo'],
|
2010-01-26 23:46:48 +01:00
|
|
|
0, # tourneyId: 0 means not a tourney hand
|
2009-08-06 21:31:46 +02:00
|
|
|
p['handStart'],
|
2009-08-08 10:15:36 +02:00
|
|
|
datetime.today(), #importtime
|
2009-08-08 19:59:44 +02:00
|
|
|
p['seats'],
|
2009-10-06 06:08:20 +02:00
|
|
|
p['maxSeats'],
|
2009-10-13 08:04:19 +02:00
|
|
|
p['texture'],
|
2009-10-08 12:07:54 +02:00
|
|
|
p['playersVpi'],
|
2009-08-06 21:31:46 +02:00
|
|
|
p['boardcard1'],
|
|
|
|
p['boardcard2'],
|
|
|
|
p['boardcard3'],
|
|
|
|
p['boardcard4'],
|
2009-08-12 16:21:54 +02:00
|
|
|
p['boardcard5'],
|
2009-10-08 12:13:13 +02:00
|
|
|
p['playersAtStreet1'],
|
|
|
|
p['playersAtStreet2'],
|
|
|
|
p['playersAtStreet3'],
|
|
|
|
p['playersAtStreet4'],
|
|
|
|
p['playersAtShowdown'],
|
2009-10-13 08:04:19 +02:00
|
|
|
p['street0Raises'],
|
|
|
|
p['street1Raises'],
|
|
|
|
p['street2Raises'],
|
|
|
|
p['street3Raises'],
|
|
|
|
p['street4Raises'],
|
2009-08-12 16:21:54 +02:00
|
|
|
p['street1Pot'],
|
|
|
|
p['street2Pot'],
|
|
|
|
p['street3Pot'],
|
|
|
|
p['street4Pot'],
|
|
|
|
p['showdownPot']
|
|
|
|
))
|
2009-11-24 09:39:59 +01:00
|
|
|
return self.get_last_insert_id(c)
|
2009-08-06 21:31:46 +02:00
|
|
|
# def storeHand
|
|
|
|
|
2009-10-14 14:20:17 +02:00
|
|
|
def storeHandsPlayers(self, hid, pids, pdata):
|
|
|
|
#print "DEBUG: %s %s %s" %(hid, pids, pdata)
|
|
|
|
inserts = []
|
|
|
|
for p in pdata:
|
|
|
|
inserts.append( (hid,
|
|
|
|
pids[p],
|
|
|
|
pdata[p]['startCash'],
|
2009-10-14 17:55:51 +02:00
|
|
|
pdata[p]['seatNo'],
|
2010-07-02 22:17:15 +02:00
|
|
|
pdata[p]['sitout'],
|
2009-11-22 17:05:23 +01:00
|
|
|
pdata[p]['card1'],
|
|
|
|
pdata[p]['card2'],
|
|
|
|
pdata[p]['card3'],
|
|
|
|
pdata[p]['card4'],
|
2009-11-22 17:20:51 +01:00
|
|
|
pdata[p]['card5'],
|
|
|
|
pdata[p]['card6'],
|
|
|
|
pdata[p]['card7'],
|
2009-11-06 11:39:59 +01:00
|
|
|
pdata[p]['winnings'],
|
2009-11-24 09:39:59 +01:00
|
|
|
pdata[p]['rake'],
|
2009-11-24 10:29:54 +01:00
|
|
|
pdata[p]['totalProfit'],
|
2009-11-06 12:30:50 +01:00
|
|
|
pdata[p]['street0VPI'],
|
2009-11-06 12:13:52 +01:00
|
|
|
pdata[p]['street1Seen'],
|
|
|
|
pdata[p]['street2Seen'],
|
|
|
|
pdata[p]['street3Seen'],
|
|
|
|
pdata[p]['street4Seen'],
|
2009-11-24 16:14:43 +01:00
|
|
|
pdata[p]['sawShowdown'],
|
2009-11-24 16:23:21 +01:00
|
|
|
pdata[p]['wonAtSD'],
|
2009-10-14 17:55:51 +02:00
|
|
|
pdata[p]['street0Aggr'],
|
|
|
|
pdata[p]['street1Aggr'],
|
|
|
|
pdata[p]['street2Aggr'],
|
|
|
|
pdata[p]['street3Aggr'],
|
2009-11-24 14:58:27 +01:00
|
|
|
pdata[p]['street4Aggr'],
|
2009-11-25 07:59:29 +01:00
|
|
|
pdata[p]['street1CBChance'],
|
|
|
|
pdata[p]['street2CBChance'],
|
|
|
|
pdata[p]['street3CBChance'],
|
|
|
|
pdata[p]['street4CBChance'],
|
|
|
|
pdata[p]['street1CBDone'],
|
|
|
|
pdata[p]['street2CBDone'],
|
|
|
|
pdata[p]['street3CBDone'],
|
|
|
|
pdata[p]['street4CBDone'],
|
2009-11-24 17:57:14 +01:00
|
|
|
pdata[p]['wonWhenSeenStreet1'],
|
|
|
|
pdata[p]['street0Calls'],
|
|
|
|
pdata[p]['street1Calls'],
|
|
|
|
pdata[p]['street2Calls'],
|
|
|
|
pdata[p]['street3Calls'],
|
|
|
|
pdata[p]['street4Calls'],
|
|
|
|
pdata[p]['street0Bets'],
|
|
|
|
pdata[p]['street1Bets'],
|
|
|
|
pdata[p]['street2Bets'],
|
|
|
|
pdata[p]['street3Bets'],
|
|
|
|
pdata[p]['street4Bets'],
|
2009-12-14 10:52:08 +01:00
|
|
|
pdata[p]['position'],
|
|
|
|
pdata[p]['tourneyTypeId'],
|
|
|
|
pdata[p]['startCards'],
|
|
|
|
pdata[p]['street0_3BChance'],
|
|
|
|
pdata[p]['street0_3BDone'],
|
|
|
|
pdata[p]['otherRaisedStreet1'],
|
|
|
|
pdata[p]['otherRaisedStreet2'],
|
|
|
|
pdata[p]['otherRaisedStreet3'],
|
|
|
|
pdata[p]['otherRaisedStreet4'],
|
|
|
|
pdata[p]['foldToOtherRaisedStreet1'],
|
|
|
|
pdata[p]['foldToOtherRaisedStreet2'],
|
|
|
|
pdata[p]['foldToOtherRaisedStreet3'],
|
|
|
|
pdata[p]['foldToOtherRaisedStreet4'],
|
|
|
|
pdata[p]['stealAttemptChance'],
|
|
|
|
pdata[p]['stealAttempted'],
|
|
|
|
pdata[p]['foldBbToStealChance'],
|
|
|
|
pdata[p]['foldedBbToSteal'],
|
|
|
|
pdata[p]['foldSbToStealChance'],
|
|
|
|
pdata[p]['foldedSbToSteal'],
|
|
|
|
pdata[p]['foldToStreet1CBChance'],
|
|
|
|
pdata[p]['foldToStreet1CBDone'],
|
|
|
|
pdata[p]['foldToStreet2CBChance'],
|
|
|
|
pdata[p]['foldToStreet2CBDone'],
|
|
|
|
pdata[p]['foldToStreet3CBChance'],
|
|
|
|
pdata[p]['foldToStreet3CBDone'],
|
|
|
|
pdata[p]['foldToStreet4CBChance'],
|
|
|
|
pdata[p]['foldToStreet4CBDone'],
|
|
|
|
pdata[p]['street1CheckCallRaiseChance'],
|
|
|
|
pdata[p]['street1CheckCallRaiseDone'],
|
|
|
|
pdata[p]['street2CheckCallRaiseChance'],
|
|
|
|
pdata[p]['street2CheckCallRaiseDone'],
|
|
|
|
pdata[p]['street3CheckCallRaiseChance'],
|
|
|
|
pdata[p]['street3CheckCallRaiseDone'],
|
2009-12-18 03:27:43 +01:00
|
|
|
pdata[p]['street4CheckCallRaiseChance'],
|
2010-06-11 20:33:08 +02:00
|
|
|
pdata[p]['street4CheckCallRaiseDone'],
|
|
|
|
pdata[p]['street0Raises'],
|
|
|
|
pdata[p]['street1Raises'],
|
|
|
|
pdata[p]['street2Raises'],
|
|
|
|
pdata[p]['street3Raises'],
|
|
|
|
pdata[p]['street4Raises']
|
2009-10-14 14:20:17 +02:00
|
|
|
) )
|
|
|
|
|
2009-12-14 11:01:24 +01:00
|
|
|
q = self.sql.query['store_hands_players']
|
2009-10-13 11:42:02 +02:00
|
|
|
q = q.replace('%s', self.sql.query['placeholder'])
|
|
|
|
|
2009-10-14 14:20:17 +02:00
|
|
|
#print "DEBUG: inserts: %s" %inserts
|
2009-11-24 09:39:59 +01:00
|
|
|
#print "DEBUG: q: %s" % q
|
2009-11-30 00:02:45 +01:00
|
|
|
c = self.get_cursor()
|
2009-11-24 09:39:59 +01:00
|
|
|
c.executemany(q, inserts)
|
2009-10-13 11:42:02 +02:00
|
|
|
|
2009-12-23 16:14:34 +01:00
|
|
|
def storeHudCache(self, gid, pids, starttime, pdata):
|
|
|
|
"""Update cached statistics. If update fails because no record exists, do an insert."""
|
2009-07-26 02:42:09 +02:00
|
|
|
|
2009-12-23 16:14:34 +01:00
|
|
|
if self.use_date_in_hudcache:
|
|
|
|
styleKey = datetime.strftime(starttime, 'd%y%m%d')
|
|
|
|
#styleKey = "d%02d%02d%02d" % (hand_start_time.year-2000, hand_start_time.month, hand_start_time.day)
|
|
|
|
else:
|
|
|
|
# hard-code styleKey as 'A000000' (all-time cache, no key) for now
|
|
|
|
styleKey = 'A000000'
|
|
|
|
|
|
|
|
update_hudcache = self.sql.query['update_hudcache']
|
|
|
|
update_hudcache = update_hudcache.replace('%s', self.sql.query['placeholder'])
|
|
|
|
insert_hudcache = self.sql.query['insert_hudcache']
|
|
|
|
insert_hudcache = insert_hudcache.replace('%s', self.sql.query['placeholder'])
|
2009-07-26 02:42:09 +02:00
|
|
|
|
2009-12-23 16:14:34 +01:00
|
|
|
#print "DEBUG: %s %s %s" %(hid, pids, pdata)
|
|
|
|
inserts = []
|
|
|
|
for p in pdata:
|
2010-06-11 20:33:08 +02:00
|
|
|
line = [0]*76
|
2009-12-23 16:14:34 +01:00
|
|
|
|
|
|
|
line[0] = 1 # HDs
|
|
|
|
if pdata[p]['street0VPI']: line[1] = 1
|
|
|
|
if pdata[p]['street0Aggr']: line[2] = 1
|
|
|
|
if pdata[p]['street0_3BChance']: line[3] = 1
|
|
|
|
if pdata[p]['street0_3BDone']: line[4] = 1
|
|
|
|
if pdata[p]['street1Seen']: line[5] = 1
|
|
|
|
if pdata[p]['street2Seen']: line[6] = 1
|
|
|
|
if pdata[p]['street3Seen']: line[7] = 1
|
|
|
|
if pdata[p]['street4Seen']: line[8] = 1
|
|
|
|
if pdata[p]['sawShowdown']: line[9] = 1
|
|
|
|
if pdata[p]['street1Aggr']: line[10] = 1
|
|
|
|
if pdata[p]['street2Aggr']: line[11] = 1
|
|
|
|
if pdata[p]['street3Aggr']: line[12] = 1
|
|
|
|
if pdata[p]['street4Aggr']: line[13] = 1
|
|
|
|
if pdata[p]['otherRaisedStreet1']: line[14] = 1
|
|
|
|
if pdata[p]['otherRaisedStreet2']: line[15] = 1
|
|
|
|
if pdata[p]['otherRaisedStreet3']: line[16] = 1
|
|
|
|
if pdata[p]['otherRaisedStreet4']: line[17] = 1
|
|
|
|
if pdata[p]['foldToOtherRaisedStreet1']: line[18] = 1
|
|
|
|
if pdata[p]['foldToOtherRaisedStreet2']: line[19] = 1
|
|
|
|
if pdata[p]['foldToOtherRaisedStreet3']: line[20] = 1
|
|
|
|
if pdata[p]['foldToOtherRaisedStreet4']: line[21] = 1
|
|
|
|
line[22] = pdata[p]['wonWhenSeenStreet1']
|
|
|
|
line[23] = pdata[p]['wonAtSD']
|
|
|
|
if pdata[p]['stealAttemptChance']: line[24] = 1
|
|
|
|
if pdata[p]['stealAttempted']: line[25] = 1
|
|
|
|
if pdata[p]['foldBbToStealChance']: line[26] = 1
|
|
|
|
if pdata[p]['foldedBbToSteal']: line[27] = 1
|
|
|
|
if pdata[p]['foldSbToStealChance']: line[28] = 1
|
|
|
|
if pdata[p]['foldedSbToSteal']: line[29] = 1
|
|
|
|
if pdata[p]['street1CBChance']: line[30] = 1
|
|
|
|
if pdata[p]['street1CBDone']: line[31] = 1
|
|
|
|
if pdata[p]['street2CBChance']: line[32] = 1
|
|
|
|
if pdata[p]['street2CBDone']: line[33] = 1
|
|
|
|
if pdata[p]['street3CBChance']: line[34] = 1
|
|
|
|
if pdata[p]['street3CBDone']: line[35] = 1
|
|
|
|
if pdata[p]['street4CBChance']: line[36] = 1
|
|
|
|
if pdata[p]['street4CBDone']: line[37] = 1
|
|
|
|
if pdata[p]['foldToStreet1CBChance']: line[38] = 1
|
|
|
|
if pdata[p]['foldToStreet1CBDone']: line[39] = 1
|
|
|
|
if pdata[p]['foldToStreet2CBChance']: line[40] = 1
|
|
|
|
if pdata[p]['foldToStreet2CBDone']: line[41] = 1
|
|
|
|
if pdata[p]['foldToStreet3CBChance']: line[42] = 1
|
|
|
|
if pdata[p]['foldToStreet3CBDone']: line[43] = 1
|
|
|
|
if pdata[p]['foldToStreet4CBChance']: line[44] = 1
|
|
|
|
if pdata[p]['foldToStreet4CBDone']: line[45] = 1
|
|
|
|
line[46] = pdata[p]['totalProfit']
|
|
|
|
if pdata[p]['street1CheckCallRaiseChance']: line[47] = 1
|
|
|
|
if pdata[p]['street1CheckCallRaiseDone']: line[48] = 1
|
|
|
|
if pdata[p]['street2CheckCallRaiseChance']: line[49] = 1
|
|
|
|
if pdata[p]['street2CheckCallRaiseDone']: line[50] = 1
|
|
|
|
if pdata[p]['street3CheckCallRaiseChance']: line[51] = 1
|
|
|
|
if pdata[p]['street3CheckCallRaiseDone']: line[52] = 1
|
|
|
|
if pdata[p]['street4CheckCallRaiseChance']: line[53] = 1
|
|
|
|
if pdata[p]['street4CheckCallRaiseDone']: line[54] = 1
|
2010-06-10 21:00:30 +02:00
|
|
|
if pdata[p]['street0Calls']: line[55] = 1
|
|
|
|
if pdata[p]['street1Calls']: line[56] = 1
|
|
|
|
if pdata[p]['street2Calls']: line[57] = 1
|
|
|
|
if pdata[p]['street3Calls']: line[58] = 1
|
|
|
|
if pdata[p]['street4Calls']: line[59] = 1
|
|
|
|
if pdata[p]['street0Bets']: line[60] = 1
|
|
|
|
if pdata[p]['street1Bets']: line[61] = 1
|
|
|
|
if pdata[p]['street2Bets']: line[62] = 1
|
|
|
|
if pdata[p]['street3Bets']: line[63] = 1
|
|
|
|
if pdata[p]['street4Bets']: line[64] = 1
|
2010-06-11 20:33:08 +02:00
|
|
|
if pdata[p]['street0Raises']: line[65] = 1
|
|
|
|
if pdata[p]['street1Raises']: line[66] = 1
|
|
|
|
if pdata[p]['street2Raises']: line[67] = 1
|
|
|
|
if pdata[p]['street3Raises']: line[68] = 1
|
|
|
|
if pdata[p]['street4Raises']: line[69] = 1
|
|
|
|
|
|
|
|
line[70] = gid # gametypeId
|
|
|
|
line[71] = pids[p] # playerId
|
|
|
|
line[72] = len(pids) # activeSeats
|
2010-01-15 14:24:37 +01:00
|
|
|
pos = {'B':'B', 'S':'S', 0:'D', 1:'C', 2:'M', 3:'M', 4:'M', 5:'E', 6:'E', 7:'E', 8:'E', 9:'E' }
|
2010-06-11 20:33:08 +02:00
|
|
|
line[73] = pos[pdata[p]['position']]
|
|
|
|
line[74] = pdata[p]['tourneyTypeId']
|
|
|
|
line[75] = styleKey # styleKey
|
2009-12-23 16:14:34 +01:00
|
|
|
inserts.append(line)
|
|
|
|
|
|
|
|
|
|
|
|
cursor = self.get_cursor()
|
|
|
|
|
|
|
|
for row in inserts:
|
|
|
|
# Try to do the update first:
|
|
|
|
num = cursor.execute(update_hudcache, row)
|
2009-12-23 18:12:39 +01:00
|
|
|
#print "DEBUG: values: %s" % row[-6:]
|
|
|
|
# Test statusmessage to see if update worked, do insert if not
|
|
|
|
# num is a cursor in sqlite
|
2009-12-23 16:14:34 +01:00
|
|
|
if ((self.backend == self.PGSQL and cursor.statusmessage != "UPDATE 1")
|
|
|
|
or (self.backend == self.MYSQL_INNODB and num == 0)
|
|
|
|
or (self.backend == self.SQLITE and num.rowcount == 0)):
|
|
|
|
#move the last 6 items in WHERE clause of row from the end of the array
|
|
|
|
# to the beginning for the INSERT statement
|
2009-12-23 18:12:39 +01:00
|
|
|
#print "DEBUG: using INSERT: %s" % num
|
2009-12-23 16:14:34 +01:00
|
|
|
row = row[-6:] + row[:-6]
|
|
|
|
num = cursor.execute(insert_hudcache, row)
|
2009-12-23 18:12:39 +01:00
|
|
|
#print "DEBUG: Successfully(?: %s) updated HudCacho using INSERT" % num
|
2009-12-23 16:14:34 +01:00
|
|
|
else:
|
2009-12-23 18:12:39 +01:00
|
|
|
#print "DEBUG: Successfully updated HudCacho using UPDATE"
|
2009-12-23 16:14:34 +01:00
|
|
|
pass
|
2009-07-26 02:42:09 +02:00
|
|
|
|
2009-12-23 06:56:18 +01:00
|
|
|
def isDuplicate(self, gametypeID, siteHandNo):
|
|
|
|
dup = False
|
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute(self.sql.query['isAlreadyInDB'], (gametypeID, siteHandNo))
|
|
|
|
result = c.fetchall()
|
|
|
|
if len(result) > 0:
|
|
|
|
dup = True
|
|
|
|
return dup
|
|
|
|
|
|
|
|
def getGameTypeId(self, siteid, game):
|
|
|
|
c = self.get_cursor()
|
|
|
|
#FIXME: Fixed for NL at the moment
|
|
|
|
c.execute(self.sql.query['getGametypeNL'], (siteid, game['type'], game['category'], game['limitType'],
|
|
|
|
int(Decimal(game['sb'])*100), int(Decimal(game['bb'])*100)))
|
|
|
|
tmp = c.fetchone()
|
|
|
|
if (tmp == None):
|
|
|
|
hilo = "h"
|
|
|
|
if game['category'] in ['studhilo', 'omahahilo']:
|
|
|
|
hilo = "s"
|
|
|
|
elif game['category'] in ['razz','27_3draw','badugi']:
|
|
|
|
hilo = "l"
|
2010-06-25 11:02:01 +02:00
|
|
|
tmp = self.insertGameTypes( (siteid, 'USD', game['type'], game['base'], game['category'], game['limitType'], hilo,
|
2009-12-23 06:56:18 +01:00
|
|
|
int(Decimal(game['sb'])*100), int(Decimal(game['bb'])*100), 0, 0) )
|
2010-06-25 11:02:01 +02:00
|
|
|
#FIXME: recognise currency
|
2009-12-23 06:56:18 +01:00
|
|
|
return tmp[0]
|
|
|
|
|
|
|
|
def getSqlPlayerIDs(self, pnames, siteid):
|
|
|
|
result = {}
|
|
|
|
if(self.pcache == None):
|
2010-02-03 16:48:39 +01:00
|
|
|
self.pcache = LambdaDict(lambda key:self.insertPlayer(key[0], key[1]))
|
2009-07-26 02:42:09 +02:00
|
|
|
|
2009-12-23 06:56:18 +01:00
|
|
|
for player in pnames:
|
2010-02-03 16:48:39 +01:00
|
|
|
result[player] = self.pcache[(player,siteid)]
|
2009-12-23 06:56:18 +01:00
|
|
|
# NOTE: Using the LambdaDict does the same thing as:
|
|
|
|
#if player in self.pcache:
|
|
|
|
# #print "DEBUG: cachehit"
|
|
|
|
# pass
|
|
|
|
#else:
|
|
|
|
# self.pcache[player] = self.insertPlayer(player, siteid)
|
|
|
|
#result[player] = self.pcache[player]
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def insertPlayer(self, name, site_id):
|
|
|
|
result = None
|
2010-01-29 23:29:14 +01:00
|
|
|
_name = Charset.to_db_utf8(name)
|
2009-12-23 06:56:18 +01:00
|
|
|
c = self.get_cursor()
|
|
|
|
q = "SELECT name, id FROM Players WHERE siteid=%s and name=%s"
|
|
|
|
q = q.replace('%s', self.sql.query['placeholder'])
|
|
|
|
|
|
|
|
#NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
|
|
|
|
#Usage:
|
|
|
|
# INSERT INTO `tags` (`tag`, `count`)
|
|
|
|
# VALUES ($tag, 1)
|
|
|
|
# ON DUPLICATE KEY UPDATE `count`=`count`+1;
|
|
|
|
|
|
|
|
|
|
|
|
#print "DEBUG: name: %s site: %s" %(name, site_id)
|
|
|
|
|
Store names as UTF-8
The names should be always in UTF-8 encoding. At least for PostgreSQL
the encdoding of the database comes from the time of running 'initdb'
(which is different from 'createdb') and if the encoding was selected or
set to something else at that time, the following error will occur:
File ".../pyfpdb/Database.py", line 1630, in <lambda>
self.pcache = LambdaDict(lambda key:self.insertPlayer(key, siteid))
File ".../pyfpdb/Database.py", line 1661, in insertPlayer
c.execute (q, (site_id, _name))
File "/usr/lib/python2.5/encodings/iso8859_15.py", line 12, in encode
return codecs.charmap_encode(input,errors,encoding_table)
UnicodeEncodeError: 'charmap' codec can't encode character u'\u2122' in
position 10: character maps to <undefined>
This happens because 'name' is a regular string as opposed to a valid
unicode object. By forcing the string to unicode and encoding it in
UTF-8 the error goes away. In my case the database encoding was
ISO-8859-15 (latin9) but any other "wrong" encoding would trigger the
same problem.
This is a relatively common problem in python.
2009-12-24 08:52:47 +01:00
|
|
|
c.execute (q, (site_id, _name))
|
2009-12-23 06:56:18 +01:00
|
|
|
|
|
|
|
tmp = c.fetchone()
|
|
|
|
if (tmp == None): #new player
|
|
|
|
c.execute ("INSERT INTO Players (name, siteId) VALUES (%s, %s)".replace('%s',self.sql.query['placeholder'])
|
Store names as UTF-8
The names should be always in UTF-8 encoding. At least for PostgreSQL
the encdoding of the database comes from the time of running 'initdb'
(which is different from 'createdb') and if the encoding was selected or
set to something else at that time, the following error will occur:
File ".../pyfpdb/Database.py", line 1630, in <lambda>
self.pcache = LambdaDict(lambda key:self.insertPlayer(key, siteid))
File ".../pyfpdb/Database.py", line 1661, in insertPlayer
c.execute (q, (site_id, _name))
File "/usr/lib/python2.5/encodings/iso8859_15.py", line 12, in encode
return codecs.charmap_encode(input,errors,encoding_table)
UnicodeEncodeError: 'charmap' codec can't encode character u'\u2122' in
position 10: character maps to <undefined>
This happens because 'name' is a regular string as opposed to a valid
unicode object. By forcing the string to unicode and encoding it in
UTF-8 the error goes away. In my case the database encoding was
ISO-8859-15 (latin9) but any other "wrong" encoding would trigger the
same problem.
This is a relatively common problem in python.
2009-12-24 08:52:47 +01:00
|
|
|
,(_name, site_id))
|
2009-12-23 06:56:18 +01:00
|
|
|
#Get last id might be faster here.
|
|
|
|
#c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
|
|
|
|
result = self.get_last_insert_id(c)
|
|
|
|
else:
|
|
|
|
result = tmp[1]
|
|
|
|
return result
|
|
|
|
|
|
|
|
def insertGameTypes(self, row):
|
|
|
|
c = self.get_cursor()
|
|
|
|
c.execute( self.sql.query['insertGameTypes'], row )
|
|
|
|
return [self.get_last_insert_id(c)]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#################################
|
|
|
|
# Finish of NEWIMPORT CODE
|
|
|
|
#################################
|
|
|
|
|
2009-07-31 22:24:21 +02:00
|
|
|
# read HandToWrite objects from q and insert into database
|
|
|
|
def insert_queue_hands(self, q, maxwait=10, commitEachHand=True):
|
|
|
|
n,fails,maxTries,firstWait = 0,0,4,0.1
|
|
|
|
sendFinal = False
|
|
|
|
t0 = time()
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
h = q.get(True) # (True,maxWait) has probs if 1st part of import is all dups
|
|
|
|
except Queue.Empty:
|
|
|
|
# Queue.Empty exception thrown if q was empty for
|
|
|
|
# if q.empty() also possible - no point if testing for Queue.Empty exception
|
|
|
|
# maybe increment a counter and only break after a few times?
|
|
|
|
# could also test threading.active_count() or look through threading.enumerate()
|
|
|
|
# so break immediately if no threads, but count up to X exceptions if a writer
|
|
|
|
# thread is still alive???
|
|
|
|
print "queue empty too long - writer stopping ..."
|
|
|
|
break
|
|
|
|
except:
|
|
|
|
print "writer stopping, error reading queue: " + str(sys.exc_info())
|
|
|
|
break
|
|
|
|
#print "got hand", str(h.get_finished())
|
|
|
|
|
|
|
|
tries,wait,again = 0,firstWait,True
|
|
|
|
while again:
|
|
|
|
try:
|
|
|
|
again = False # set this immediately to avoid infinite loops!
|
|
|
|
if h.get_finished():
|
|
|
|
# all items on queue processed
|
|
|
|
sendFinal = True
|
|
|
|
else:
|
|
|
|
self.store_the_hand(h)
|
|
|
|
# optional commit, could be every hand / every N hands / every time a
|
|
|
|
# commit message received?? mark flag to indicate if commits outstanding
|
|
|
|
if commitEachHand:
|
|
|
|
self.commit()
|
|
|
|
n = n + 1
|
|
|
|
except:
|
|
|
|
#print "iqh store error", sys.exc_value # debug
|
|
|
|
self.rollback()
|
|
|
|
if re.search('deadlock', str(sys.exc_info()[1]), re.I):
|
|
|
|
# deadlocks only a problem if hudcache is being updated
|
|
|
|
tries = tries + 1
|
|
|
|
if tries < maxTries and wait < 5: # wait < 5 just to make sure
|
|
|
|
print "deadlock detected - trying again ..."
|
2009-08-01 01:06:07 +02:00
|
|
|
sleep(wait)
|
2009-07-31 22:24:21 +02:00
|
|
|
wait = wait + wait
|
|
|
|
again = True
|
|
|
|
else:
|
|
|
|
print "too many deadlocks - failed to store hand " + h.get_siteHandNo()
|
|
|
|
if not again:
|
|
|
|
fails = fails + 1
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error storing hand: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
|
|
|
# finished trying to store hand
|
|
|
|
|
|
|
|
# always reduce q count, whether or not this hand was saved ok
|
|
|
|
q.task_done()
|
|
|
|
# while True loop
|
|
|
|
|
|
|
|
self.commit()
|
|
|
|
if sendFinal:
|
|
|
|
q.task_done()
|
|
|
|
print "db writer finished: stored %d hands (%d fails) in %.1f seconds" % (n, fails, time()-t0)
|
|
|
|
# end def insert_queue_hands():
|
|
|
|
|
|
|
|
|
|
|
|
def send_finish_msg(self, q):
|
|
|
|
try:
|
|
|
|
h = HandToWrite(True)
|
|
|
|
q.put(h)
|
|
|
|
except:
|
|
|
|
err = traceback.extract_tb(sys.exc_info()[2])[-1]
|
|
|
|
print "***Error sending finish: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
|
|
|
|
# end def send_finish_msg():
|
|
|
|
|
2010-07-08 23:46:54 +02:00
|
|
|
def createOrUpdateTourneyType(self, hand):#note: this method is used on Hand and TourneySummary objects
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeId = 1
|
|
|
|
|
2009-09-01 02:35:52 +02:00
|
|
|
# Check if Tourney exists, and if so retrieve TTypeId : in that case, check values of the ttype
|
|
|
|
cursor = self.get_cursor()
|
|
|
|
cursor.execute (self.sql.query['getTourneyTypeIdByTourneyNo'].replace('%s', self.sql.query['placeholder']),
|
2010-07-07 04:01:40 +02:00
|
|
|
(hand.tourNo, hand.siteId)
|
2009-09-01 02:35:52 +02:00
|
|
|
)
|
|
|
|
result=cursor.fetchone()
|
|
|
|
|
|
|
|
expectedValues = { 1 : "buyin", 2 : "fee", 4 : "isKO", 5 : "isRebuy", 6 : "speed",
|
2010-07-02 19:39:02 +02:00
|
|
|
7 : "isShootout", 8 : "isMatrix" }
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeIdMatch = True
|
2009-09-01 02:35:52 +02:00
|
|
|
|
|
|
|
try:
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeId = result[0]
|
|
|
|
log.debug("Tourney found in db with Tourney_Type_ID = %d" % tourneyTypeId)
|
2009-09-01 02:35:52 +02:00
|
|
|
for ev in expectedValues :
|
2010-07-07 04:01:40 +02:00
|
|
|
if ( getattr( hand, expectedValues.get(ev) ) <> result[ev] ):
|
|
|
|
log.debug("TypeId mismatch : wrong %s : Tourney=%s / db=%s" % (expectedValues.get(ev), getattr( hand, expectedValues.get(ev)), result[ev]) )
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeIdMatch = False
|
2009-09-01 02:35:52 +02:00
|
|
|
#break
|
|
|
|
except:
|
|
|
|
# Tourney not found : a TourneyTypeId has to be found or created for that specific tourney
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeIdMatch = False
|
2009-09-01 02:35:52 +02:00
|
|
|
|
2010-07-05 11:48:26 +02:00
|
|
|
if tourneyTypeIdMatch == False :
|
|
|
|
# Check for an existing TTypeId that matches tourney info, if not found create it
|
2009-09-01 02:35:52 +02:00
|
|
|
cursor.execute (self.sql.query['getTourneyTypeId'].replace('%s', self.sql.query['placeholder']),
|
2010-07-07 04:01:40 +02:00
|
|
|
(hand.siteId, hand.buyinCurrency, hand.buyin, hand.fee, hand.isKO,
|
|
|
|
hand.isRebuy, hand.isRebuy, hand.speed, hand.isShootout, hand.isMatrix)
|
2009-09-01 02:35:52 +02:00
|
|
|
)
|
|
|
|
result=cursor.fetchone()
|
2009-08-31 01:30:28 +02:00
|
|
|
|
2009-09-01 02:35:52 +02:00
|
|
|
try:
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeId = result[0]
|
2009-09-01 02:35:52 +02:00
|
|
|
except TypeError: #this means we need to create a new entry
|
2010-07-05 11:48:26 +02:00
|
|
|
cursor.execute (self.sql.query['insertTourneyType'].replace('%s', self.sql.query['placeholder']),
|
2010-07-07 04:01:40 +02:00
|
|
|
(hand.siteId, hand.buyinCurrency, hand.buyin, hand.fee, hand.buyInChips,
|
|
|
|
hand.isKO, hand.isRebuy,
|
|
|
|
hand.isAddOn, hand.speed, hand.isShootout, hand.isMatrix)
|
2009-09-01 02:35:52 +02:00
|
|
|
)
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyTypeId = self.get_last_insert_id(cursor)
|
|
|
|
return tourneyTypeId
|
2010-07-06 23:43:49 +02:00
|
|
|
#end def createOrUpdateTourneyType
|
2010-07-05 11:48:26 +02:00
|
|
|
|
2010-07-08 23:46:54 +02:00
|
|
|
def createOrUpdateTourney(self, hand, source):#note: this method is used on Hand and TourneySummary objects
|
2010-07-05 11:48:26 +02:00
|
|
|
cursor = self.get_cursor()
|
|
|
|
cursor.execute (self.sql.query['getTourneyIdByTourneyNo'].replace('%s', self.sql.query['placeholder']),
|
2010-07-07 04:01:40 +02:00
|
|
|
(hand.siteId, hand.tourNo))
|
2010-07-05 11:48:26 +02:00
|
|
|
result=cursor.fetchone()
|
2009-09-01 02:35:52 +02:00
|
|
|
|
2010-07-07 00:52:44 +02:00
|
|
|
if result != None and len(result)==1:
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyId = result[0]
|
2010-07-06 23:52:59 +02:00
|
|
|
else:
|
2010-07-08 23:46:54 +02:00
|
|
|
if source=="HHC":
|
|
|
|
cursor.execute (self.sql.query['insertTourney'].replace('%s', self.sql.query['placeholder']),
|
2010-07-07 04:01:40 +02:00
|
|
|
(hand.tourneyTypeId, hand.tourNo, None, None,
|
2010-07-08 23:46:54 +02:00
|
|
|
hand.startTime, None, None, None, None, None))
|
|
|
|
elif source=="TS":
|
|
|
|
cursor.execute (self.sql.query['insertTourney'].replace('%s', self.sql.query['placeholder']),
|
|
|
|
(hand.tourneyTypeId, hand.tourNo, hand.entries, hand.prizepool,
|
|
|
|
hand.startTime, hand.endTime, hand.tourneyName, hand.matrixIdProcessed, hand.totalRebuyCount, hand.totalAddOnCount))
|
|
|
|
else:
|
|
|
|
raise FpdbParseError("invalid source in Database.createOrUpdateTourney")
|
2010-07-05 11:48:26 +02:00
|
|
|
tourneyId = self.get_last_insert_id(cursor)
|
|
|
|
return tourneyId
|
2010-07-06 23:43:49 +02:00
|
|
|
#end def createOrUpdateTourney
|
2009-08-31 01:30:28 +02:00
|
|
|
|
2010-07-08 23:46:54 +02:00
|
|
|
def createOrUpdateTourneysPlayers(self, hand, source):#note: this method is used on Hand and TourneySummary objects
|
2010-07-07 00:52:44 +02:00
|
|
|
tourneysPlayersIds=[]
|
|
|
|
for player in hand.players:
|
2010-07-08 23:46:54 +02:00
|
|
|
if source=="TS": #TODO remove this horrible hack
|
2010-07-08 19:46:25 +02:00
|
|
|
playerId = hand.dbid_pids[player]
|
2010-07-08 23:46:54 +02:00
|
|
|
elif source=="HHC":
|
2010-07-08 19:46:25 +02:00
|
|
|
playerId = hand.dbid_pids[player[1]]
|
2010-07-08 23:46:54 +02:00
|
|
|
else:
|
|
|
|
raise FpdbParseError("invalid source in Database.createOrUpdateTourneysPlayers")
|
2010-07-07 00:52:44 +02:00
|
|
|
|
|
|
|
cursor = self.get_cursor()
|
|
|
|
cursor.execute (self.sql.query['getTourneysPlayersId'].replace('%s', self.sql.query['placeholder']),
|
2010-07-07 04:01:40 +02:00
|
|
|
(hand.tourneyId, playerId))
|
2010-07-07 00:52:44 +02:00
|
|
|
result=cursor.fetchone()
|
|
|
|
|
|
|
|
if result != None and len(result)==1:
|
|
|
|
tourneysPlayersIds.append(result[0])
|
|
|
|
else:
|
2010-07-09 01:07:22 +02:00
|
|
|
if source=="HHC":
|
|
|
|
cursor.execute (self.sql.query['insertTourneysPlayer'].replace('%s', self.sql.query['placeholder']),
|
|
|
|
(hand.tourneyId, playerId, None, None, None, None, None, None))
|
|
|
|
elif source=="TS":
|
|
|
|
#print "all values: tourneyId",hand.tourneyId, "playerId",playerId, "rank",hand.ranks[player], "winnings",hand.winnings[player], "winCurr",hand.winningsCurrency[player], hand.rebuyCounts[player], hand.addOnCounts[player], hand.koCounts[player]
|
2010-07-11 01:55:15 +02:00
|
|
|
if hand.ranks[player]:
|
|
|
|
cursor.execute (self.sql.query['insertTourneysPlayer'].replace('%s', self.sql.query['placeholder']),
|
|
|
|
(hand.tourneyId, playerId, int(hand.ranks[player]), int(hand.winnings[player]), hand.winningsCurrency[player],
|
|
|
|
hand.rebuyCounts[player], hand.addOnCounts[player], hand.koCounts[player]))
|
|
|
|
else:
|
|
|
|
cursor.execute (self.sql.query['insertTourneysPlayer'].replace('%s', self.sql.query['placeholder']),
|
|
|
|
(hand.tourneyId, playerId, None, None, None,
|
|
|
|
hand.rebuyCounts[player], hand.addOnCounts[player], hand.koCounts[player]))
|
2010-07-07 00:52:44 +02:00
|
|
|
tourneysPlayersIds.append(self.get_last_insert_id(cursor))
|
|
|
|
return tourneysPlayersIds
|
2010-07-06 23:43:49 +02:00
|
|
|
#end def createOrUpdateTourneysPlayers
|
2010-07-10 05:19:50 +02:00
|
|
|
|
|
|
|
def getTourneyTypesIds(self):
|
|
|
|
c = self.connection.cursor()
|
|
|
|
c.execute(self.sql.query['getTourneyTypesIds'])
|
|
|
|
result = c.fetchall()
|
|
|
|
print "DB.getTourneyTypesIds result:",result
|
|
|
|
return result
|
|
|
|
#end def getTourneyTypesIds
|
2010-07-05 11:48:26 +02:00
|
|
|
#end class Database
|
2009-07-31 22:24:21 +02:00
|
|
|
|
2009-07-29 07:17:51 +02:00
|
|
|
# Class used to hold all the data needed to write a hand to the db
|
|
|
|
# mainParser() in fpdb_parse_logic.py creates one of these and then passes it to
|
2009-07-31 22:24:21 +02:00
|
|
|
# self.insert_queue_hands()
|
2009-07-29 07:17:51 +02:00
|
|
|
|
|
|
|
class HandToWrite:
|
|
|
|
|
|
|
|
def __init__(self, finished = False): # db_name and game not used any more
|
|
|
|
try:
|
|
|
|
self.finished = finished
|
|
|
|
self.config = None
|
|
|
|
self.settings = None
|
|
|
|
self.base = None
|
|
|
|
self.category = None
|
|
|
|
self.siteTourneyNo = None
|
|
|
|
self.buyin = None
|
|
|
|
self.fee = None
|
|
|
|
self.knockout = None
|
|
|
|
self.entries = None
|
|
|
|
self.prizepool = None
|
|
|
|
self.tourneyStartTime = None
|
|
|
|
self.isTourney = None
|
|
|
|
self.tourneyTypeId = None
|
|
|
|
self.siteID = None
|
|
|
|
self.siteHandNo = None
|
|
|
|
self.gametypeID = None
|
|
|
|
self.handStartTime = None
|
|
|
|
self.names = None
|
|
|
|
self.playerIDs = None
|
|
|
|
self.startCashes = None
|
|
|
|
self.positions = None
|
|
|
|
self.antes = None
|
|
|
|
self.cardValues = None
|
|
|
|
self.cardSuits = None
|
|
|
|
self.boardValues = None
|
|
|
|
self.boardSuits = None
|
|
|
|
self.winnings = None
|
|
|
|
self.rakes = None
|
|
|
|
self.actionTypes = None
|
|
|
|
self.allIns = None
|
|
|
|
self.actionAmounts = None
|
|
|
|
self.actionNos = None
|
|
|
|
self.hudImportData = None
|
|
|
|
self.maxSeats = None
|
|
|
|
self.tableName = None
|
|
|
|
self.seatNos = None
|
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print "htw.init error: " + str(sys.exc_info())
|
2009-07-29 07:17:51 +02:00
|
|
|
raise
|
|
|
|
# end def __init__
|
|
|
|
|
|
|
|
def set_all( self, config, settings, base, category, siteTourneyNo, buyin
|
|
|
|
, fee, knockout, entries, prizepool, tourneyStartTime
|
|
|
|
, isTourney, tourneyTypeId, siteID, siteHandNo
|
|
|
|
, gametypeID, handStartTime, names, playerIDs, startCashes
|
|
|
|
, positions, antes, cardValues, cardSuits, boardValues, boardSuits
|
|
|
|
, winnings, rakes, actionTypes, allIns, actionAmounts
|
|
|
|
, actionNos, hudImportData, maxSeats, tableName, seatNos):
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.config = config
|
|
|
|
self.settings = settings
|
|
|
|
self.base = base
|
|
|
|
self.category = category
|
|
|
|
self.siteTourneyNo = siteTourneyNo
|
|
|
|
self.buyin = buyin
|
|
|
|
self.fee = fee
|
|
|
|
self.knockout = knockout
|
|
|
|
self.entries = entries
|
|
|
|
self.prizepool = prizepool
|
|
|
|
self.tourneyStartTime = tourneyStartTime
|
|
|
|
self.isTourney = isTourney
|
|
|
|
self.tourneyTypeId = tourneyTypeId
|
|
|
|
self.siteID = siteID
|
|
|
|
self.siteHandNo = siteHandNo
|
|
|
|
self.gametypeID = gametypeID
|
|
|
|
self.handStartTime = handStartTime
|
|
|
|
self.names = names
|
|
|
|
self.playerIDs = playerIDs
|
|
|
|
self.startCashes = startCashes
|
|
|
|
self.positions = positions
|
|
|
|
self.antes = antes
|
|
|
|
self.cardValues = cardValues
|
|
|
|
self.cardSuits = cardSuits
|
|
|
|
self.boardValues = boardValues
|
|
|
|
self.boardSuits = boardSuits
|
|
|
|
self.winnings = winnings
|
|
|
|
self.rakes = rakes
|
|
|
|
self.actionTypes = actionTypes
|
|
|
|
self.allIns = allIns
|
|
|
|
self.actionAmounts = actionAmounts
|
|
|
|
self.actionNos = actionNos
|
|
|
|
self.hudImportData = hudImportData
|
|
|
|
self.maxSeats = maxSeats
|
|
|
|
self.tableName = tableName
|
|
|
|
self.seatNos = seatNos
|
|
|
|
except:
|
2009-07-29 08:57:18 +02:00
|
|
|
print "htw.set_all error: " + str(sys.exc_info())
|
2009-07-29 07:17:51 +02:00
|
|
|
raise
|
|
|
|
# end def set_hand
|
|
|
|
|
|
|
|
def get_finished(self):
|
|
|
|
return( self.finished )
|
|
|
|
# end def get_finished
|
|
|
|
|
|
|
|
def get_siteHandNo(self):
|
|
|
|
return( self.siteHandNo )
|
|
|
|
# end def get_siteHandNo
|
|
|
|
|
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
if __name__=="__main__":
|
|
|
|
c = Configuration.Config()
|
2010-02-25 21:43:15 +01:00
|
|
|
sql = SQL.Sql(db_server = 'sqlite')
|
2008-08-19 00:53:25 +02:00
|
|
|
|
2009-11-09 04:53:10 +01:00
|
|
|
db_connection = Database(c) # mysql fpdb holdem
|
2008-10-04 22:43:50 +02:00
|
|
|
# db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem
|
2008-08-19 00:53:25 +02:00
|
|
|
# db_connection = Database(c, 'PTrackSv2', 'razz') # mysql razz
|
|
|
|
# db_connection = Database(c, 'ptracks', 'razz') # postgres
|
|
|
|
print "database connection object = ", db_connection.connection
|
2009-11-26 22:28:05 +01:00
|
|
|
# db_connection.recreate_tables()
|
|
|
|
db_connection.dropAllIndexes()
|
|
|
|
db_connection.createAllIndexes()
|
2009-08-04 23:06:03 +02:00
|
|
|
|
2008-08-19 00:53:25 +02:00
|
|
|
h = db_connection.get_last_hand()
|
|
|
|
print "last hand = ", h
|
|
|
|
|
|
|
|
hero = db_connection.get_player_id(c, 'PokerStars', 'nutOmatic')
|
2009-05-21 22:27:44 +02:00
|
|
|
if hero:
|
|
|
|
print "nutOmatic is id_player = %d" % hero
|
2008-08-19 00:53:25 +02:00
|
|
|
|
2010-02-25 21:43:15 +01:00
|
|
|
# example of displaying query plan in sqlite:
|
|
|
|
if db_connection.backend == 4:
|
|
|
|
print
|
|
|
|
c = db_connection.get_cursor()
|
|
|
|
c.execute('explain query plan '+sql.query['get_table_name'], (h, ))
|
|
|
|
for row in c.fetchall():
|
|
|
|
print "query plan: ", row
|
|
|
|
print
|
|
|
|
|
|
|
|
t0 = time()
|
2009-09-30 00:34:52 +02:00
|
|
|
stat_dict = db_connection.get_stats_from_hand(h, "ring")
|
2010-02-25 21:43:15 +01:00
|
|
|
t1 = time()
|
2008-09-15 22:31:55 +02:00
|
|
|
for p in stat_dict.keys():
|
|
|
|
print p, " ", stat_dict[p]
|
|
|
|
|
2009-06-01 03:25:36 +02:00
|
|
|
print "cards =", db_connection.get_cards(u'1')
|
2008-09-15 22:31:55 +02:00
|
|
|
db_connection.close_connection
|
2010-02-25 21:43:15 +01:00
|
|
|
|
|
|
|
print "get_stats took: %4.3f seconds" % (t1-t0)
|
2008-09-15 22:31:55 +02:00
|
|
|
|
|
|
|
print "press enter to continue"
|
|
|
|
sys.stdin.readline()
|
2009-08-02 06:19:33 +02:00
|
|
|
|
|
|
|
#Code borrowed from http://push.cx/2008/caching-dictionaries-in-python-vs-ruby
|
|
|
|
class LambdaDict(dict):
|
|
|
|
def __init__(self, l):
|
|
|
|
super(LambdaDict, self).__init__()
|
|
|
|
self.l = l
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
if key in self:
|
|
|
|
return self.get(key)
|
|
|
|
else:
|
|
|
|
self.__setitem__(key, self.l(key))
|
|
|
|
return self.get(key)
|