Conflicts:

pyfpdb/fpdb_import.py
This commit is contained in:
Eric Blade 2009-08-29 06:54:20 -05:00
commit bd7f3de3d7
13 changed files with 183 additions and 118 deletions

View File

@ -33,6 +33,11 @@ import shutil
import xml.dom.minidom import xml.dom.minidom
from xml.dom.minidom import Node from xml.dom.minidom import Node
import logging, logging.config
logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
log = logging.getLogger("config")
log.debug("config logger initialised")
def fix_tf(x, default = True): def fix_tf(x, default = True):
# The xml parser doesn't translate "True" to True. Therefore, we never get # The xml parser doesn't translate "True" to True. Therefore, we never get
# True or False from the parser only "True" or "False". So translate the # True or False from the parser only "True" or "False". So translate the
@ -201,6 +206,9 @@ class Database:
self.db_user = node.getAttribute("db_user") self.db_user = node.getAttribute("db_user")
self.db_type = node.getAttribute("db_type") self.db_type = node.getAttribute("db_type")
self.db_pass = node.getAttribute("db_pass") self.db_pass = node.getAttribute("db_pass")
self.db_selected = fix_tf(node.getAttribute("default"),"False")
log.debug("Database db_name:'%(name)s' db_server:'%(server)s' db_ip:'%(ip)s' db_user:'%(user)s' db_type:'%(type)s' db_pass (not logged) selected:'%(sel)s'" \
% { 'name':self.db_name, 'server':self.db_server, 'ip':self.db_ip, 'user':self.db_user, 'type':self.db_type, 'sel':self.db_selected} )
def __str__(self): def __str__(self):
temp = 'Database = ' + self.db_name + '\n' temp = 'Database = ' + self.db_name + '\n'
@ -208,7 +216,7 @@ class Database:
if key.startswith('__'): continue if key.startswith('__'): continue
value = getattr(self, key) value = getattr(self, key)
if callable(value): continue if callable(value): continue
temp = temp + ' ' + key + " = " + value + "\n" temp = temp + ' ' + key + " = " + repr(value) + "\n"
return temp return temp
class Aux_window: class Aux_window:
@ -279,11 +287,10 @@ class Tv:
(self.combinedStealFold, self.combined2B3B, self.combinedPostflop) ) (self.combinedStealFold, self.combined2B3B, self.combinedPostflop) )
class Config: class Config:
def __init__(self, file = None, dbname = 'fpdb'): def __init__(self, file = None, dbname = ''):
# "file" is a path to an xml file with the fpdb/HUD configuration # "file" is a path to an xml file with the fpdb/HUD configuration
# we check the existence of "file" and try to recover if it doesn't exist # we check the existence of "file" and try to recover if it doesn't exist
self.dbname = dbname
self.default_config_path = self.get_default_config_path() self.default_config_path = self.get_default_config_path()
if file != None: # configuration file path has been passed if file != None: # configuration file path has been passed
@ -309,10 +316,10 @@ class Config:
# Parse even if there was no real config file found and we are using the example # Parse even if there was no real config file found and we are using the example
# If using the example, we'll edit it later # If using the example, we'll edit it later
try: try:
print "Reading configuration file %s\n" % (file) log.info("Reading configuration file %s" % (file))
doc = xml.dom.minidom.parse(file) doc = xml.dom.minidom.parse(file)
except: except:
print "Error parsing %s. See error log file." % (file) log.error("Error parsing %s. See error log file." % (file))
traceback.print_exc(file=sys.stderr) traceback.print_exc(file=sys.stderr)
print "press enter to continue" print "press enter to continue"
sys.stdin.readline() sys.stdin.readline()
@ -338,9 +345,21 @@ class Config:
self.supported_games[game.game_name] = game self.supported_games[game.game_name] = game
# s_dbs = doc.getElementsByTagName("supported_databases") # s_dbs = doc.getElementsByTagName("supported_databases")
if dbname and dbname in self.supported_databases:
self.db_selected = dbname
for db_node in doc.getElementsByTagName("database"): for db_node in doc.getElementsByTagName("database"):
db = Database(node = db_node) try:
self.supported_databases[db.db_name] = db db = Database(node = db_node)
if db.db_name in self.supported_databases:
raise FpdbError("Database names must be unique")
# If there is only one Database node, or none are marked default, the first is selected
if len(self.supported_databases) == 0:
self.db_selected = db.db_name
self.supported_databases[db.db_name] = db
if db.db_selected:
self.db_selected = db.db_name
except:
raise
# s_dbs = doc.getElementsByTagName("mucked_windows") # s_dbs = doc.getElementsByTagName("mucked_windows")
for aw_node in doc.getElementsByTagName("aw"): for aw_node in doc.getElementsByTagName("aw"):
@ -507,7 +526,7 @@ class Config:
def get_db_parameters(self): def get_db_parameters(self):
db = {} db = {}
name = self.dbname name = self.db_selected
try: db['db-databaseName'] = name try: db['db-databaseName'] = name
except: pass except: pass

View File

@ -24,6 +24,7 @@ Create and manage the database objects.
# postmaster -D /var/lib/pgsql/data # postmaster -D /var/lib/pgsql/data
# Standard Library modules # Standard Library modules
import os
import sys import sys
import traceback import traceback
from datetime import datetime, date, time, timedelta from datetime import datetime, date, time, timedelta
@ -31,7 +32,6 @@ from time import time, strftime, sleep
from decimal import Decimal from decimal import Decimal
import string import string
import re import re
import logging
import Queue import Queue
# pyGTK modules # pyGTK modules
@ -42,6 +42,11 @@ import fpdb_simple
import Configuration import Configuration
import SQL import SQL
import Card import Card
from Exceptions import *
import logging, logging.config
logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
log = logging.getLogger('db')
class Database: class Database:
@ -93,6 +98,14 @@ class Database:
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0} , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
] ]
, [ # indexes for sqlite (list index 4) , [ # indexes for sqlite (list index 4)
{'tab':'Players', 'col':'name', 'drop':0}
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
, {'tab':'Hands', 'col':'gametypeId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
] ]
] ]
@ -161,8 +174,14 @@ class Database:
# CREATE INDEX idx ON tab(col) # CREATE INDEX idx ON tab(col)
# DROP INDEX idx # DROP INDEX idx
# SQLite notes:
# To add an index:
# create index indexname on tablename (col);
def __init__(self, c, db_name = None, game = None, sql = None): # db_name and game not used any more def __init__(self, c, db_name = None, game = None, sql = None): # db_name and game not used any more
print "\ncreating Database instance, sql =", sql log.info("Creating Database instance, sql = %s" % sql)
self.fdb = fpdb_db.fpdb_db() # sets self.fdb.db self.fdb.cursor and self.fdb.sql self.fdb = fpdb_db.fpdb_db() # sets self.fdb.db self.fdb.cursor and self.fdb.sql
self.fdb.do_connect(c) self.fdb.do_connect(c)
self.connection = self.fdb.db self.connection = self.fdb.db
@ -179,12 +198,17 @@ class Database:
#ISOLATION_LEVEL_READ_COMMITTED = 1 #ISOLATION_LEVEL_READ_COMMITTED = 1
#ISOLATION_LEVEL_SERIALIZABLE = 2 #ISOLATION_LEVEL_SERIALIZABLE = 2
# where possible avoid creating new SQL instance by using the global one passed in # where possible avoid creating new SQL instance by using the global one passed in
if sql == None: if sql == None:
self.sql = SQL.Sql(type = self.type, db_server = db_params['db-server']) self.sql = SQL.Sql(type = self.type, db_server = db_params['db-server'])
else: else:
self.sql = sql self.sql = sql
if self.backend == self.SQLITE and db_params['db-databaseName'] == ':memory:' and self.fdb.wrongDbVersion:
log.info("sqlite/:memory: - creating")
self.recreate_tables()
self.pcache = None # PlayerId cache self.pcache = None # PlayerId cache
self.cachemiss = 0 # Delete me later - using to count player cache misses self.cachemiss = 0 # Delete me later - using to count player cache misses
self.cachehit = 0 # Delete me later - using to count player cache hits self.cachehit = 0 # Delete me later - using to count player cache hits
@ -244,7 +268,7 @@ class Database:
elif self.backend==4: elif self.backend==4:
return "SQLite" return "SQLite"
else: else:
raise fpdb_simple.FpdbError("invalid backend") raise FpdbError("invalid backend")
def get_table_name(self, hand_id): def get_table_name(self, hand_id):
c = self.connection.cursor() c = self.connection.cursor()
@ -439,7 +463,7 @@ class Database:
if colnames[0].lower() == 'player_id': if colnames[0].lower() == 'player_id':
playerid = row[0] playerid = row[0]
else: else:
print "ERROR: query %s result does not have player_id as first column" % (query,) log.error("ERROR: query %s result does not have player_id as first column" % (query,))
break break
for name, val in zip(colnames, row): for name, val in zip(colnames, row):
@ -480,7 +504,7 @@ class Database:
if self.backend == self.MYSQL_INNODB: if self.backend == self.MYSQL_INNODB:
ret = self.connection.insert_id() ret = self.connection.insert_id()
if ret < 1 or ret > 999999999: if ret < 1 or ret > 999999999:
print "getLastInsertId(): problem fetching insert_id? ret=", ret log.warning("getLastInsertId(): problem fetching insert_id? ret=%d" % ret)
ret = -1 ret = -1
elif self.backend == self.PGSQL: elif self.backend == self.PGSQL:
# some options: # some options:
@ -492,14 +516,14 @@ class Database:
ret = c.execute ("SELECT lastval()") ret = c.execute ("SELECT lastval()")
row = c.fetchone() row = c.fetchone()
if not row: if not row:
print "getLastInsertId(%s): problem fetching lastval? row=" % seq, row log.warning("getLastInsertId(%s): problem fetching lastval? row=%d" % (seq, row))
ret = -1 ret = -1
else: else:
ret = row[0] ret = row[0]
elif self.backend == self.SQLITE: elif self.backend == self.SQLITE:
ret = cursor.lastrowid ret = cursor.lastrowid
else: else:
print "getLastInsertId(): unknown backend ", self.backend log.error("getLastInsertId(): unknown backend: %d" % self.backend)
ret = -1 ret = -1
except: except:
ret = -1 ret = -1
@ -823,16 +847,16 @@ class Database:
self.create_tables() self.create_tables()
self.createAllIndexes() self.createAllIndexes()
self.commit() self.commit()
print "Finished recreating tables" log.info("Finished recreating tables")
#end def recreate_tables #end def recreate_tables
def create_tables(self): def create_tables(self):
#todo: should detect and fail gracefully if tables already exist. #todo: should detect and fail gracefully if tables already exist.
try: try:
logging.debug(self.sql.query['createSettingsTable']) log.debug(self.sql.query['createSettingsTable'])
c = self.get_cursor() c = self.get_cursor()
c.execute(self.sql.query['createSettingsTable']) c.execute(self.sql.query['createSettingsTable'])
logging.debug(self.sql.query['createSitesTable']) log.debug(self.sql.query['createSitesTable'])
c.execute(self.sql.query['createSitesTable']) c.execute(self.sql.query['createSitesTable'])
c.execute(self.sql.query['createGametypesTable']) c.execute(self.sql.query['createGametypesTable'])
c.execute(self.sql.query['createPlayersTable']) c.execute(self.sql.query['createPlayersTable'])
@ -879,7 +903,7 @@ class Database:
elif(self.get_backend_name() == 'SQLite'): elif(self.get_backend_name() == 'SQLite'):
c.execute(self.sql.query['list_tables']) c.execute(self.sql.query['list_tables'])
for table in c.fetchall(): for table in c.fetchall():
logging.debug(self.sql.query['drop_table'] + table[0]) log.debug(self.sql.query['drop_table'] + table[0])
c.execute(self.sql.query['drop_table'] + table[0]) c.execute(self.sql.query['drop_table'] + table[0])
self.commit() self.commit()
@ -912,14 +936,21 @@ class Database:
self.get_cursor().execute(s) self.get_cursor().execute(s)
except: except:
print " create idx failed: " + str(sys.exc_info()) print " create idx failed: " + str(sys.exc_info())
elif self.backend == self.SQLITE:
log.debug("Creating sqlite index %s %s" % (idx['tab'], idx['col']))
try:
s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
self.get_cursor().execute(s)
except:
log.debug("Create idx failed: " + str(sys.exc_info()))
else: else:
print "Only MySQL and Postgres supported so far" print "Only MySQL, Postgres and SQLite supported so far"
return -1 return -1
if self.backend == self.PGSQL: if self.backend == self.PGSQL:
self.connection.set_isolation_level(1) # go back to normal isolation level self.connection.set_isolation_level(1) # go back to normal isolation level
except: except:
print "Error creating indexes: " + str(sys.exc_value) print "Error creating indexes: " + str(sys.exc_value)
raise fpdb_simple.FpdbError( "Error creating indexes " + str(sys.exc_value) ) raise FpdbError( "Error creating indexes " + str(sys.exc_value) )
#end def createAllIndexes #end def createAllIndexes
def dropAllIndexes(self): def dropAllIndexes(self):
@ -1107,7 +1138,7 @@ class Database:
, h.allIns, h.actionAmounts, h.actionNos, h.hudImportData, h.maxSeats , h.allIns, h.actionAmounts, h.actionNos, h.hudImportData, h.maxSeats
, h.tableName, h.seatNos) , h.tableName, h.seatNos)
else: else:
raise fpdb_simple.FpdbError("unrecognised category") raise FpdbError("unrecognised category")
else: else:
if h.base == "hold": if h.base == "hold":
result = self.ring_holdem_omaha( result = self.ring_holdem_omaha(
@ -1125,7 +1156,7 @@ class Database:
, h.actionAmounts, h.actionNos, h.hudImportData, h.maxSeats, h.tableName , h.actionAmounts, h.actionNos, h.hudImportData, h.maxSeats, h.tableName
, h.seatNos) , h.seatNos)
else: else:
raise fpdb_simple.FpdbError("unrecognised category") raise FpdbError("unrecognised category")
except: except:
print "Error storing hand: " + str(sys.exc_value) print "Error storing hand: " + str(sys.exc_value)
self.rollback() self.rollback()
@ -1245,7 +1276,7 @@ class Database:
ret = self.get_last_insert_id(c) ret = self.get_last_insert_id(c)
except: except:
ret = -1 ret = -1
raise fpdb_simple.FpdbError( "storeHands error: " + str(sys.exc_value) ) raise FpdbError( "storeHands error: " + str(sys.exc_value) )
return ret return ret
#end def storeHands #end def storeHands
@ -1280,7 +1311,7 @@ class Database:
card3 = Card.cardFromValueSuit(card_values[i][2], card_suits[i][2]) card3 = Card.cardFromValueSuit(card_values[i][2], card_suits[i][2])
card4 = Card.cardFromValueSuit(card_values[i][3], card_suits[i][3]) card4 = Card.cardFromValueSuit(card_values[i][3], card_suits[i][3])
else: else:
raise fpdb_simple.FpdbError("invalid category") raise FpdbError("invalid category")
inserts.append( ( inserts.append( (
hands_id, player_ids[i], start_cashes[i], positions[i], 1, # tourneytypeid hands_id, player_ids[i], start_cashes[i], positions[i], 1, # tourneytypeid
@ -1339,7 +1370,7 @@ class Database:
,inserts ) ,inserts )
result.append( self.get_last_insert_id(c) ) # wrong? not used currently result.append( self.get_last_insert_id(c) ) # wrong? not used currently
except: except:
raise fpdb_simple.FpdbError( "store_hands_players_holdem_omaha error: " + str(sys.exc_value) ) raise FpdbError( "store_hands_players_holdem_omaha error: " + str(sys.exc_value) )
return result return result
#end def store_hands_players_holdem_omaha #end def store_hands_players_holdem_omaha
@ -1377,7 +1408,7 @@ class Database:
#result.append(cursor.fetchall()[0][0]) #result.append(cursor.fetchall()[0][0])
result.append( self.get_last_insert_id(c) ) result.append( self.get_last_insert_id(c) )
except: except:
raise fpdb_simple.FpdbError( "store_hands_players_stud error: " + str(sys.exc_value) ) raise FpdbError( "store_hands_players_stud error: " + str(sys.exc_value) )
return result return result
#end def store_hands_players_stud #end def store_hands_players_stud
@ -1470,7 +1501,7 @@ class Database:
#cursor.execute("SELECT id FROM HandsPlayers WHERE handId=%s AND playerId+0=%s", (hands_id, player_ids[i])) #cursor.execute("SELECT id FROM HandsPlayers WHERE handId=%s AND playerId+0=%s", (hands_id, player_ids[i]))
#result.append(cursor.fetchall()[0][0]) #result.append(cursor.fetchall()[0][0])
except: except:
raise fpdb_simple.FpdbError( "store_hands_players_holdem_omaha_tourney error: " + str(sys.exc_value) ) raise FpdbError( "store_hands_players_holdem_omaha_tourney error: " + str(sys.exc_value) )
return result return result
#end def store_hands_players_holdem_omaha_tourney #end def store_hands_players_holdem_omaha_tourney
@ -1500,7 +1531,7 @@ class Database:
#result.append(cursor.fetchall()[0][0]) #result.append(cursor.fetchall()[0][0])
result.append( self.get_last_insert_id(c) ) result.append( self.get_last_insert_id(c) )
except: except:
raise fpdb_simple.FpdbError( "store_hands_players_stud_tourney error: " + str(sys.exc_value) ) raise FpdbError( "store_hands_players_stud_tourney error: " + str(sys.exc_value) )
return result return result
#end def store_hands_players_stud_tourney #end def store_hands_players_stud_tourney
@ -1697,7 +1728,7 @@ class Database:
# print "todo: implement storeHudCache for stud base" # print "todo: implement storeHudCache for stud base"
except: except:
raise fpdb_simple.FpdbError( "storeHudCache error: " + str(sys.exc_value) ) raise FpdbError( "storeHudCache error: " + str(sys.exc_value) )
#end def storeHudCache #end def storeHudCache
@ -1720,7 +1751,7 @@ class Database:
tmp=cursor.fetchone() tmp=cursor.fetchone()
#print "created new tourneys.id:",tmp #print "created new tourneys.id:",tmp
except: except:
raise fpdb_simple.FpdbError( "store_tourneys error: " + str(sys.exc_value) ) raise FpdbError( "store_tourneys error: " + str(sys.exc_value) )
return tmp[0] return tmp[0]
#end def store_tourneys #end def store_tourneys
@ -1753,7 +1784,7 @@ class Database:
#print "created new tourneys_players.id:",tmp #print "created new tourneys_players.id:",tmp
result.append(tmp[0]) result.append(tmp[0])
except: except:
raise fpdb_simple.FpdbError( "store_tourneys_players error: " + str(sys.exc_value) ) raise FpdbError( "store_tourneys_players error: " + str(sys.exc_value) )
return result return result
#end def store_tourneys_players #end def store_tourneys_players

View File

@ -1,7 +1,18 @@
class FPDBError(Exception): class FpdbError(Exception):
pass def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class FpdbParseError(FPDBError): class FpdbParseError(FpdbError):
def __init__(self,hid=None): def __init__(self,value='',hid=''):
self.value = value
self.hid = hid self.hid = hid
def __str__(self):
if hid:
return repr("HID:"+hid+", "+self.value)
else:
return repr(self.value)
class DuplicateError(FpdbError):
pass

View File

@ -319,6 +319,7 @@ def main(argv=None):
# importer.setDropIndexes("auto") # importer.setDropIndexes("auto")
importer.setDropIndexes("don't drop") importer.setDropIndexes("don't drop")
importer.setFailOnError(options.failOnError) importer.setFailOnError(options.failOnError)
importer.setThreads(-1)
importer.addBulkImportImportFileOrDir(os.path.expanduser(options.filename), site=options.filtername) importer.addBulkImportImportFileOrDir(os.path.expanduser(options.filename), site=options.filtername)
importer.setCallHud(False) importer.setCallHud(False)
importer.runImport() importer.runImport()

View File

@ -24,6 +24,7 @@ import fpdb_simple
import fpdb_import import fpdb_import
import fpdb_db import fpdb_db
from Exceptions import *
class GuiTableViewer (threading.Thread): class GuiTableViewer (threading.Thread):
@ -73,7 +74,7 @@ class GuiTableViewer (threading.Thread):
tmp+=("WtSD", "W$wsF", "W$SD") tmp+=("WtSD", "W$wsF", "W$SD")
else: else:
raise fpdb_simple.FpdbError("reimplement stud") raise FpdbError("reimplement stud")
arr.append(tmp) arr.append(tmp)
#then the data rows #then the data rows
@ -93,7 +94,7 @@ class GuiTableViewer (threading.Thread):
elif seatCount==2 or seatCount==3: elif seatCount==2 or seatCount==3:
minSeats,maxSeats=seatCount,seatCount minSeats,maxSeats=seatCount,seatCount
else: else:
fpdb_simple.FpdbError("invalid seatCount") FpdbError("invalid seatCount")
self.cursor.execute("SELECT * FROM HudCache WHERE gametypeId=%s AND playerId=%s AND activeSeats>=%s AND activeSeats<=%s", (self.gametype_id, self.player_ids[player][0], minSeats, maxSeats)) self.cursor.execute("SELECT * FROM HudCache WHERE gametypeId=%s AND playerId=%s AND activeSeats>=%s AND activeSeats<=%s", (self.gametype_id, self.player_ids[player][0], minSeats, maxSeats))
rows=self.cursor.fetchall() rows=self.cursor.fetchall()

View File

@ -1,4 +1,5 @@
#!/usr/bin/python #!/usr/bin/python
# -*- coding: utf-8 -*-
#Copyright 2008 Carl Gherardi #Copyright 2008 Carl Gherardi
#This program is free software: you can redistribute it and/or modify #This program is free software: you can redistribute it and/or modify
@ -672,7 +673,7 @@ class HoldemOmahaHand(Hand):
tmp5 = 0 tmp5 = 0
return (tmp1,tmp2,tmp3,tmp4,tmp5) return (tmp1,tmp2,tmp3,tmp4,tmp5)
def writeHTMLHand(self, fh=sys.__stdout__): def writeHTMLHand(self):
from nevow import tags as T from nevow import tags as T
from nevow import flat from nevow import flat
players_who_act_preflop = (([x[0] for x in self.actions['PREFLOP']]+[x[0] for x in self.actions['BLINDSANTES']])) players_who_act_preflop = (([x[0] for x in self.actions['PREFLOP']]+[x[0] for x in self.actions['BLINDSANTES']]))

View File

@ -37,7 +37,7 @@ import gettext
gettext.install('fpdb') gettext.install('fpdb')
import logging, logging.config import logging, logging.config
logging.config.fileConfig("logging.conf") logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
log = logging.getLogger("parser") log = logging.getLogger("parser")
class HandHistoryConverter(): class HandHistoryConverter():

View File

@ -76,6 +76,7 @@ import SQL
import Database import Database
import FpdbSQLQueries import FpdbSQLQueries
import Configuration import Configuration
from Exceptions import *
VERSION = "0.11" VERSION = "0.11"
@ -98,7 +99,7 @@ class fpdb:
for i in self.tab_names: #todo: check this is valid for i in self.tab_names: #todo: check this is valid
if i==new_tab_name: if i==new_tab_name:
return # we depend on this to not create duplicate tabs, there's no reason to raise an error here? return # we depend on this to not create duplicate tabs, there's no reason to raise an error here?
# raise fpdb_simple.FpdbError("duplicate tab_name not permitted") # raise FpdbError("duplicate tab_name not permitted")
self.tabs.append(new_tab) self.tabs.append(new_tab)
self.tab_names.append(new_tab_name) self.tab_names.append(new_tab_name)
@ -120,7 +121,7 @@ class fpdb:
break break
if tab_no == -1: if tab_no == -1:
raise fpdb_simple.FpdbError("invalid tab_no") raise FpdbError("invalid tab_no")
else: else:
self.main_vbox.remove(self.current_tab) self.main_vbox.remove(self.current_tab)
#self.current_tab.destroy() #self.current_tab.destroy()

View File

@ -20,9 +20,12 @@ import re
import sys import sys
import logging import logging
from time import time, strftime from time import time, strftime
import sqlalchemy.pool as pool
import fpdb_simple import fpdb_simple
import FpdbSQLQueries import FpdbSQLQueries
from Exceptions import *
class fpdb_db: class fpdb_db:
MYSQL_INNODB = 2 MYSQL_INNODB = 2
@ -63,13 +66,15 @@ class fpdb_db:
self.database=database self.database=database
if backend==fpdb_db.MYSQL_INNODB: if backend==fpdb_db.MYSQL_INNODB:
import MySQLdb import MySQLdb
MySQLdb = pool.manage(MySQLdb, pool_size=5)
try: try:
self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True) self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True)
except: except:
raise fpdb_simple.FpdbError("MySQL connection failed") raise FpdbError("MySQL connection failed")
elif backend==fpdb_db.PGSQL: elif backend==fpdb_db.PGSQL:
import psycopg2 import psycopg2
import psycopg2.extensions import psycopg2.extensions
psycopg2 = pool.manage(psycopg2, pool_size=5)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
# If DB connection is made over TCP, then the variables # If DB connection is made over TCP, then the variables
# host, user and password are required # host, user and password are required
@ -87,7 +92,7 @@ class fpdb_db:
pass pass
#msg = "PostgreSQL direct connection to database (%s) failed, trying with user ..." % (database,) #msg = "PostgreSQL direct connection to database (%s) failed, trying with user ..." % (database,)
#print msg #print msg
#raise fpdb_simple.FpdbError(msg) #raise FpdbError(msg)
if not connected: if not connected:
try: try:
self.db = psycopg2.connect(host = host, self.db = psycopg2.connect(host = host,
@ -97,16 +102,16 @@ class fpdb_db:
except: except:
msg = "PostgreSQL connection to database (%s) user (%s) failed." % (database, user) msg = "PostgreSQL connection to database (%s) user (%s) failed." % (database, user)
print msg print msg
raise fpdb_simple.FpdbError(msg) raise FpdbError(msg)
elif backend==fpdb_db.SQLITE: elif backend==fpdb_db.SQLITE:
logging.info("Connecting to SQLite:%(database)s" % {'database':database}) logging.info("Connecting to SQLite:%(database)s" % {'database':database})
import sqlite3 import sqlite3
sqlite3 = pool.manage(sqlite3, pool_size=1)
self.db = sqlite3.connect(database,detect_types=sqlite3.PARSE_DECLTYPES) self.db = sqlite3.connect(database,detect_types=sqlite3.PARSE_DECLTYPES)
sqlite3.register_converter("bool", lambda x: bool(int(x))) sqlite3.register_converter("bool", lambda x: bool(int(x)))
sqlite3.register_adapter(bool, lambda x: "1" if x else "0") sqlite3.register_adapter(bool, lambda x: "1" if x else "0")
else: else:
raise fpdb_simple.FpdbError("unrecognised database backend:"+backend) raise FpdbError("unrecognised database backend:"+backend)
self.cursor=self.db.cursor() self.cursor=self.db.cursor()
# Set up query dictionary as early in the connection process as we can. # Set up query dictionary as early in the connection process as we can.
self.sql = FpdbSQLQueries.FpdbSQLQueries(self.get_backend_name()) self.sql = FpdbSQLQueries.FpdbSQLQueries(self.get_backend_name())
@ -148,7 +153,7 @@ class fpdb_db:
elif self.backend==4: elif self.backend==4:
return "SQLite" return "SQLite"
else: else:
raise fpdb_simple.FpdbError("invalid backend") raise FpdbError("invalid backend")
#end def get_backend_name #end def get_backend_name
def get_db_info(self): def get_db_info(self):

View File

@ -22,7 +22,6 @@
import os # todo: remove this once import_dir is in fpdb_import import os # todo: remove this once import_dir is in fpdb_import
import sys import sys
from time import time, strftime, sleep from time import time, strftime, sleep
import logging
import traceback import traceback
import math import math
import datetime import datetime
@ -39,21 +38,26 @@ import Database
import fpdb_parse_logic import fpdb_parse_logic
import Configuration import Configuration
import logging, logging.config
logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
log = logging.getLogger('importer')
# database interface modules # database interface modules
try: try:
import MySQLdb import MySQLdb
mysqlLibFound=True mysqlLibFound=True
log.debug("Import module: MySQLdb")
except: except:
pass log.debug("Import module: MySQLdb not found")
try: try:
import psycopg2 import psycopg2
pgsqlLibFound=True pgsqlLibFound=True
import psycopg2.extensions import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
log.debug("Import module: pyscopg2")
except: except:
pass log.debug("Import module: pyscopg2 not found")
class Importer: class Importer:
@ -153,9 +157,9 @@ class Importer:
self.siteIds[site] = result[0][0] self.siteIds[site] = result[0][0]
else: else:
if len(result) == 0: if len(result) == 0:
print "[ERROR] Database ID for %s not found" % site log.error("Database ID for %s not found" % site)
else: else:
print "[ERROR] More than 1 Database ID found for %s - Multiple currencies not implemented yet" % site log.error("[ERROR] More than 1 Database ID found for %s - Multiple currencies not implemented yet" % site)
# Called from GuiBulkImport to add a file or directory. # Called from GuiBulkImport to add a file or directory.
@ -168,7 +172,7 @@ class Importer:
if os.path.isdir(inputPath): if os.path.isdir(inputPath):
for subdir in os.walk(inputPath): for subdir in os.walk(inputPath):
for file in subdir[2]: for file in subdir[2]:
self.addImportFile(os.path.join(inputPath, subdir[0], file), site=site, filter=filter) self.addImportFile(os.path.join(subdir[0], file), site=site, filter=filter)
else: else:
self.addImportFile(inputPath, site=site, filter=filter) self.addImportFile(inputPath, site=site, filter=filter)
#Add a directory of files to filelist #Add a directory of files to filelist
@ -189,7 +193,7 @@ class Importer:
#print " adding file ", file #print " adding file ", file
self.addImportFile(os.path.join(dir, file), site, filter) self.addImportFile(os.path.join(dir, file), site, filter)
else: else:
print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory" log.warning("Attempted to add non-directory: '" + str(dir) + "' as an import directory")
def runImport(self): def runImport(self):
""""Run full import on self.filelist. This is called from GuiBulkImport.py""" """"Run full import on self.filelist. This is called from GuiBulkImport.py"""
@ -199,7 +203,7 @@ class Importer:
# Initial setup # Initial setup
start = datetime.datetime.now() start = datetime.datetime.now()
starttime = time() starttime = time()
print "Started at", start, "--", len(self.filelist), "files to import.", self.settings['dropIndexes'] log.info("Started at %s -- %d files to import. indexes: %s" % (start, len(self.filelist), self.settings['dropIndexes']))
if self.settings['dropIndexes'] == 'auto': if self.settings['dropIndexes'] == 'auto':
self.settings['dropIndexes'] = self.calculate_auto2(self.database, 12.0, 500.0) self.settings['dropIndexes'] = self.calculate_auto2(self.database, 12.0, 500.0)
if 'dropHudCache' in self.settings and self.settings['dropHudCache'] == 'auto': if 'dropHudCache' in self.settings and self.settings['dropHudCache'] == 'auto':
@ -208,7 +212,7 @@ class Importer:
if self.settings['dropIndexes'] == 'drop': if self.settings['dropIndexes'] == 'drop':
self.database.prepareBulkImport() self.database.prepareBulkImport()
else: else:
print "No need to drop indexes." log.debug("No need to drop indexes.")
#print "dropInd =", self.settings['dropIndexes'], " dropHudCache =", self.settings['dropHudCache'] #print "dropInd =", self.settings['dropIndexes'], " dropHudCache =", self.settings['dropHudCache']
if self.settings['threads'] <= 0: if self.settings['threads'] <= 0:
@ -384,9 +388,9 @@ class Importer:
# Load filter, process file, pass returned filename to import_fpdb_file # Load filter, process file, pass returned filename to import_fpdb_file
if self.settings['threads'] > 0 and self.writeq != None: if self.settings['threads'] > 0 and self.writeq != None:
print "\nConverting " + file + " (" + str(q.qsize()) + ")" log.info("Converting " + file + " (" + str(q.qsize()) + ")")
else: else:
print "\nConverting " + file log.info("Converting " + file)
hhbase = self.config.get_import_parameters().get("hhArchiveBase") hhbase = self.config.get_import_parameters().get("hhArchiveBase")
hhbase = os.path.expanduser(hhbase) hhbase = os.path.expanduser(hhbase)
hhdir = os.path.join(hhbase,site) hhdir = os.path.join(hhbase,site)
@ -396,43 +400,27 @@ class Importer:
out_path = os.path.join(hhdir, "x"+strftime("%d-%m-%y")+os.path.basename(file)) out_path = os.path.join(hhdir, "x"+strftime("%d-%m-%y")+os.path.basename(file))
filter_name = filter.replace("ToFpdb", "") filter_name = filter.replace("ToFpdb", "")
mod = __import__(filter) mod = __import__(filter)
obj = getattr(mod, filter_name, None) obj = getattr(mod, filter_name, None)
if callable(obj): if callable(obj):
hhc = obj(in_path = file, out_path = out_path, index = 0) # Index into file 0 until changeover hhc = obj(in_path = file, out_path = out_path, index = 0) # Index into file 0 until changeover
if hhc.getParsedObjectType() == "HH": if(hhc.getStatus() and self.NEWIMPORT == False):
if(hhc.getStatus() and self.NEWIMPORT == False): (stored, duplicates, partial, errors, ttime) = self.import_fpdb_file(db, out_path, site, q)
(stored, duplicates, partial, errors, ttime) = self.import_fpdb_file(db, out_path, site, q) elif (hhc.getStatus() and self.NEWIMPORT == True):
elif (hhc.getStatus() and self.NEWIMPORT == True): #This code doesn't do anything yet
#This code doesn't do anything yet handlist = hhc.getProcessedHands()
handlist = hhc.getProcessedHands() self.pos_in_file[file] = hhc.getLastCharacterRead()
self.pos_in_file[file] = hhc.getLastCharacterRead()
for hand in handlist: for hand in handlist:
#hand.prepInsert() #hand.prepInsert()
hand.insert(self.database) hand.insert(self.database)
else:
# conversion didn't work
# TODO: appropriate response?
return (0, 0, 0, 1, 0, -1)
elif hhc.getParsedObjectType() == "Summary":
if(hhc.getStatus()):
tourney = hhc.getTourney()
#print tourney
#tourney.prepInsert()
(stored, duplicates, partial, errors, ttime) = tourney.insert(self.database)
return (stored, duplicates, partial, errors, ttime)
else:
# conversion didn't work
# Could just be the parsing of a non summary file (classic HH file)
return (0, 0, 0, 0, 0)
else: else:
print "Unknown objects parsed by HHC :'%s'" %(hhc.getObjectTypeRead()) # conversion didn't work
# TODO: appropriate response?
return (0, 0, 0, 1, 0, -1) return (0, 0, 0, 1, 0, -1)
else: else:
print "Unknown filter filter_name:'%s' in filter:'%s'" %(filter_name, filter) log.warning("Unknown filter filter_name:'%s' in filter:'%s'" %(filter_name, filter))
return (0, 0, 0, 1, 0, -1) return (0, 0, 0, 1, 0, -1)
#This will barf if conv.getStatus != True #This will barf if conv.getStatus != True
@ -475,7 +463,7 @@ class Importer:
db.commit() db.commit()
ttime = time() - starttime ttime = time() - starttime
if q == None: if q == None:
print "\rTotal stored:", stored, " duplicates:", duplicates, "errors:", errors, " time:", ttime log.info("Total stored: %(stored)d\tduplicates:%(duplicates)d\terrors:%(errors)d\ttime:%(ttime)s" % locals())
if not stored: if not stored:
if duplicates: if duplicates:

View File

@ -22,6 +22,7 @@ import sys
import fpdb_simple import fpdb_simple
import Database import Database
from time import time, strftime from time import time, strftime
from Exceptions import *
#parses a holdem hand #parses a holdem hand
@ -126,7 +127,7 @@ def mainParser(settings, siteID, category, hand, config, db = None, writeq = Non
elif lineTypes[i]=="table": elif lineTypes[i]=="table":
tableResult=fpdb_simple.parseTableLine(base, line) tableResult=fpdb_simple.parseTableLine(base, line)
else: else:
raise fpdb_simple.FpdbError("unrecognised lineType:"+lineTypes[i]) raise FpdbError("unrecognised lineType:"+lineTypes[i])
maxSeats = tableResult['maxSeats'] maxSeats = tableResult['maxSeats']
tableName = tableResult['tableName'] tableName = tableResult['tableName']

View File

@ -25,6 +25,7 @@ import datetime
import time import time
import re import re
import sys import sys
from Exceptions import *
import locale import locale
import Card import Card
@ -40,18 +41,6 @@ SQLITE = 4
LOCALE_ENCODING = locale.getdefaultlocale()[1] LOCALE_ENCODING = locale.getdefaultlocale()[1]
class DuplicateError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class FpdbError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
#returns an array of the total money paid. intending to add rebuys/addons here #returns an array of the total money paid. intending to add rebuys/addons here
def calcPayin(count, buyin, fee): def calcPayin(count, buyin, fee):
return [buyin + fee for i in xrange(count)] return [buyin + fee for i in xrange(count)]
@ -227,7 +216,7 @@ def fillCardArrays(player_count, base, category, card_values, card_suits):
elif base=="stud": elif base=="stud":
cardCount = 7 cardCount = 7
else: else:
raise fpdb_simple.FpdbError("invalid category:", category) raise FpdbError("invalid category:", category)
for i in xrange(player_count): for i in xrange(player_count):
while (len(card_values[i]) < cardCount): while (len(card_values[i]) < cardCount):

View File

@ -1,5 +1,5 @@
[loggers] [loggers]
keys=root,parser keys=root,parser,importer,config,db
[handlers] [handlers]
keys=consoleHandler,fileHandler keys=consoleHandler,fileHandler
@ -13,20 +13,37 @@ handlers=consoleHandler,fileHandler
[logger_parser] [logger_parser]
level=INFO level=INFO
# set to NOTSET or DEBUG to see everything the parser does
handlers=consoleHandler,fileHandler handlers=consoleHandler,fileHandler
qualname=parser qualname=parser
propagate=0 propagate=0
[logger_importer]
level=DEBUG
handlers=consoleHandler,fileHandler
qualname=importer
propagate=0
[logger_config]
level=DEBUG
handlers=consoleHandler,fileHandler
qualname=config
propagate=0
[logger_db]
level=DEBUG
handlers=consoleHandler,fileHandler
qualname=db
propagate=0
[handler_consoleHandler] [handler_consoleHandler]
class=StreamHandler class=StreamHandler
level=INFO level=DEBUG
formatter=stderrFormatter formatter=stderrFormatter
args=(sys.stderr,) args=(sys.stderr,)
[handler_fileHandler] [handler_fileHandler]
class=FileHandler class=FileHandler
level=INFO level=DEBUG
formatter=fileFormatter formatter=fileFormatter
args=('logging.out', 'a') args=('logging.out', 'a')