Merge branch 'master' of git://git.assembla.com/fpdb-sql.git
This commit is contained in:
commit
2c0f280165
|
@ -481,12 +481,19 @@ class Config:
|
||||||
print "\nReading configuration file %s\n" % file
|
print "\nReading configuration file %s\n" % file
|
||||||
try:
|
try:
|
||||||
doc = xml.dom.minidom.parse(file)
|
doc = xml.dom.minidom.parse(file)
|
||||||
|
self.file_error = None
|
||||||
except:
|
except:
|
||||||
log.error("Error parsing %s. See error log file." % (file))
|
log.error("Error parsing %s. See error log file." % (file))
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
print "press enter to continue"
|
self.file_error = sys.exc_info()[1]
|
||||||
sys.stdin.readline()
|
# we could add a parameter to decide whether to return or read a line and exit?
|
||||||
sys.exit()
|
return
|
||||||
|
#print "press enter to continue"
|
||||||
|
#sys.stdin.readline()
|
||||||
|
#sys.exit()
|
||||||
|
#ExpatError: not well-formed (invalid token): line 511, column 4
|
||||||
|
#sys.exc_info = (<class 'xml.parsers.expat.ExpatError'>, ExpatError('not well-formed (invalid token): line 511,
|
||||||
|
# column 4',), <traceback object at 0x024503A0>)
|
||||||
|
|
||||||
self.doc = doc
|
self.doc = doc
|
||||||
self.supported_sites = {}
|
self.supported_sites = {}
|
||||||
|
@ -688,18 +695,8 @@ class Config:
|
||||||
try: db['db-server'] = self.supported_databases[name].db_server
|
try: db['db-server'] = self.supported_databases[name].db_server
|
||||||
except: pass
|
except: pass
|
||||||
|
|
||||||
if self.supported_databases[name].db_server== DATABASE_TYPE_MYSQL:
|
db['db-backend'] = self.get_backend(self.supported_databases[name].db_server)
|
||||||
db['db-backend'] = 2
|
|
||||||
elif self.supported_databases[name].db_server== DATABASE_TYPE_POSTGRESQL:
|
|
||||||
db['db-backend'] = 3
|
|
||||||
elif self.supported_databases[name].db_server== DATABASE_TYPE_SQLITE:
|
|
||||||
db['db-backend'] = 4
|
|
||||||
# sqlcoder: this assignment fixes unicode problems for me with sqlite (windows, cp1252)
|
|
||||||
# feel free to remove or improve this if you understand the problems
|
|
||||||
# better than me (not hard!)
|
|
||||||
Charset.not_needed1, Charset.not_needed2, Charset.not_needed3 = True, True, True
|
|
||||||
else:
|
|
||||||
raise ValueError('Unsupported database backend: %s' % self.supported_databases[name].db_server)
|
|
||||||
return db
|
return db
|
||||||
|
|
||||||
def set_db_parameters(self, db_name = 'fpdb', db_ip = None, db_user = None,
|
def set_db_parameters(self, db_name = 'fpdb', db_ip = None, db_user = None,
|
||||||
|
@ -718,6 +715,23 @@ class Config:
|
||||||
if db_server is not None: self.supported_databases[db_name].dp_server = db_server
|
if db_server is not None: self.supported_databases[db_name].dp_server = db_server
|
||||||
if db_type is not None: self.supported_databases[db_name].dp_type = db_type
|
if db_type is not None: self.supported_databases[db_name].dp_type = db_type
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def get_backend(self, name):
|
||||||
|
"""Returns the number of the currently used backend"""
|
||||||
|
if name == DATABASE_TYPE_MYSQL:
|
||||||
|
ret = 2
|
||||||
|
elif name == DATABASE_TYPE_POSTGRESQL:
|
||||||
|
ret = 3
|
||||||
|
elif name == DATABASE_TYPE_SQLITE:
|
||||||
|
ret = 4
|
||||||
|
# sqlcoder: this assignment fixes unicode problems for me with sqlite (windows, cp1252)
|
||||||
|
# feel free to remove or improve this if you understand the problems
|
||||||
|
# better than me (not hard!)
|
||||||
|
Charset.not_needed1, Charset.not_needed2, Charset.not_needed3 = True, True, True
|
||||||
|
else:
|
||||||
|
raise ValueError('Unsupported database backend: %s' % self.supported_databases[name].db_server)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
def getDefaultSite(self):
|
def getDefaultSite(self):
|
||||||
"Returns first enabled site or None"
|
"Returns first enabled site or None"
|
||||||
|
|
|
@ -142,14 +142,18 @@ class Database:
|
||||||
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
||||||
]
|
]
|
||||||
, [ # indexes for sqlite (list index 4)
|
, [ # indexes for sqlite (list index 4)
|
||||||
# {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
|
|
||||||
# {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
|
|
||||||
{'tab':'Hands', 'col':'gametypeId', 'drop':0}
|
{'tab':'Hands', 'col':'gametypeId', 'drop':0}
|
||||||
, {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
|
, {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
|
||||||
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
|
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
|
||||||
, {'tab':'HandsPlayers', 'col':'tourneyTypeId', 'drop':0}
|
, {'tab':'HandsPlayers', 'col':'tourneyTypeId', 'drop':0}
|
||||||
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
||||||
#, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
|
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
|
||||||
|
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
|
||||||
|
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
|
||||||
|
, {'tab':'Players', 'col':'siteId', 'drop':1}
|
||||||
|
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
|
||||||
|
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
|
||||||
|
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -226,7 +230,7 @@ class Database:
|
||||||
# create index indexname on tablename (col);
|
# create index indexname on tablename (col);
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, c, sql = None):
|
def __init__(self, c, sql = None, autoconnect = True):
|
||||||
#log = Configuration.get_logger("logging.conf", "db", log_dir=c.dir_log)
|
#log = Configuration.get_logger("logging.conf", "db", log_dir=c.dir_log)
|
||||||
log.debug("Creating Database instance, sql = %s" % sql)
|
log.debug("Creating Database instance, sql = %s" % sql)
|
||||||
self.config = c
|
self.config = c
|
||||||
|
@ -247,41 +251,42 @@ class Database:
|
||||||
else:
|
else:
|
||||||
self.sql = sql
|
self.sql = sql
|
||||||
|
|
||||||
# connect to db
|
if autoconnect:
|
||||||
self.do_connect(c)
|
# connect to db
|
||||||
|
self.do_connect(c)
|
||||||
if self.backend == self.PGSQL:
|
|
||||||
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
|
if self.backend == self.PGSQL:
|
||||||
#ISOLATION_LEVEL_AUTOCOMMIT = 0
|
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
|
||||||
#ISOLATION_LEVEL_READ_COMMITTED = 1
|
#ISOLATION_LEVEL_AUTOCOMMIT = 0
|
||||||
#ISOLATION_LEVEL_SERIALIZABLE = 2
|
#ISOLATION_LEVEL_READ_COMMITTED = 1
|
||||||
|
#ISOLATION_LEVEL_SERIALIZABLE = 2
|
||||||
|
|
||||||
|
|
||||||
if self.backend == self.SQLITE and self.database == ':memory:' and self.wrongDbVersion:
|
if self.backend == self.SQLITE and self.database == ':memory:' and self.wrongDbVersion:
|
||||||
log.info("sqlite/:memory: - creating")
|
log.info("sqlite/:memory: - creating")
|
||||||
self.recreate_tables()
|
self.recreate_tables()
|
||||||
self.wrongDbVersion = False
|
self.wrongDbVersion = False
|
||||||
|
|
||||||
self.pcache = None # PlayerId cache
|
self.pcache = None # PlayerId cache
|
||||||
self.cachemiss = 0 # Delete me later - using to count player cache misses
|
self.cachemiss = 0 # Delete me later - using to count player cache misses
|
||||||
self.cachehit = 0 # Delete me later - using to count player cache hits
|
self.cachehit = 0 # Delete me later - using to count player cache hits
|
||||||
|
|
||||||
# config while trying out new hudcache mechanism
|
# config while trying out new hudcache mechanism
|
||||||
self.use_date_in_hudcache = True
|
self.use_date_in_hudcache = True
|
||||||
|
|
||||||
#self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
|
#self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
|
||||||
#self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
|
#self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
|
||||||
#self.hud_hero_days = 30 # but last T days or last H hands for yourself
|
#self.hud_hero_days = 30 # but last T days or last H hands for yourself
|
||||||
|
|
||||||
# vars for hand ids or dates fetched according to above config:
|
# vars for hand ids or dates fetched according to above config:
|
||||||
self.hand_1day_ago = 0 # max hand id more than 24 hrs earlier than now
|
self.hand_1day_ago = 0 # max hand id more than 24 hrs earlier than now
|
||||||
self.date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD)
|
self.date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD)
|
||||||
self.h_date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD) for hero
|
self.h_date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD) for hero
|
||||||
self.date_nhands_ago = {} # dates N hands ago per player - not used yet
|
self.date_nhands_ago = {} # dates N hands ago per player - not used yet
|
||||||
|
|
||||||
self.saveActions = False if self.import_options['saveActions'] == False else True
|
self.saveActions = False if self.import_options['saveActions'] == False else True
|
||||||
|
|
||||||
self.connection.rollback() # make sure any locks taken so far are released
|
self.connection.rollback() # make sure any locks taken so far are released
|
||||||
#end def __init__
|
#end def __init__
|
||||||
|
|
||||||
# could be used by hud to change hud style
|
# could be used by hud to change hud style
|
||||||
|
@ -313,7 +318,7 @@ class Database:
|
||||||
self.__connected = True
|
self.__connected = True
|
||||||
|
|
||||||
def connect(self, backend=None, host=None, database=None,
|
def connect(self, backend=None, host=None, database=None,
|
||||||
user=None, password=None):
|
user=None, password=None, create=False):
|
||||||
"""Connects a database with the given parameters"""
|
"""Connects a database with the given parameters"""
|
||||||
if backend is None:
|
if backend is None:
|
||||||
raise FpdbError('Database backend not defined')
|
raise FpdbError('Database backend not defined')
|
||||||
|
@ -384,32 +389,35 @@ class Database:
|
||||||
# log.warning("SQLite won't work well without 'sqlalchemy' installed.")
|
# log.warning("SQLite won't work well without 'sqlalchemy' installed.")
|
||||||
|
|
||||||
if database != ":memory:":
|
if database != ":memory:":
|
||||||
if not os.path.isdir(self.config.dir_database):
|
if not os.path.isdir(self.config.dir_database) and create:
|
||||||
print "Creating directory: '%s'" % (self.config.dir_database)
|
print "Creating directory: '%s'" % (self.config.dir_database)
|
||||||
log.info("Creating directory: '%s'" % (self.config.dir_database))
|
log.info("Creating directory: '%s'" % (self.config.dir_database))
|
||||||
os.mkdir(self.config.dir_database)
|
os.mkdir(self.config.dir_database)
|
||||||
database = os.path.join(self.config.dir_database, database)
|
database = os.path.join(self.config.dir_database, database)
|
||||||
self.db_path = database
|
self.db_path = database
|
||||||
log.info("Connecting to SQLite: %(database)s" % {'database':self.db_path})
|
log.info("Connecting to SQLite: %(database)s" % {'database':self.db_path})
|
||||||
self.connection = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES )
|
if os.path.exists(database) or create:
|
||||||
sqlite3.register_converter("bool", lambda x: bool(int(x)))
|
self.connection = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES )
|
||||||
sqlite3.register_adapter(bool, lambda x: "1" if x else "0")
|
sqlite3.register_converter("bool", lambda x: bool(int(x)))
|
||||||
self.connection.create_function("floor", 1, math.floor)
|
sqlite3.register_adapter(bool, lambda x: "1" if x else "0")
|
||||||
tmp = sqlitemath()
|
self.connection.create_function("floor", 1, math.floor)
|
||||||
self.connection.create_function("mod", 2, tmp.mod)
|
tmp = sqlitemath()
|
||||||
if use_numpy:
|
self.connection.create_function("mod", 2, tmp.mod)
|
||||||
self.connection.create_aggregate("variance", 1, VARIANCE)
|
if use_numpy:
|
||||||
|
self.connection.create_aggregate("variance", 1, VARIANCE)
|
||||||
|
else:
|
||||||
|
log.warning("Some database functions will not work without NumPy support")
|
||||||
|
self.cursor = self.connection.cursor()
|
||||||
|
self.cursor.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
|
||||||
|
self.cursor.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
|
||||||
else:
|
else:
|
||||||
log.warning("Some database functions will not work without NumPy support")
|
raise FpdbError("sqlite database "+database+" does not exist")
|
||||||
self.cursor = self.connection.cursor()
|
|
||||||
self.cursor.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
|
|
||||||
self.cursor.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
|
|
||||||
else:
|
else:
|
||||||
raise FpdbError("unrecognised database backend:"+backend)
|
raise FpdbError("unrecognised database backend:"+str(backend))
|
||||||
|
|
||||||
self.cursor = self.connection.cursor()
|
self.cursor = self.connection.cursor()
|
||||||
self.cursor.execute(self.sql.query['set tx level'])
|
self.cursor.execute(self.sql.query['set tx level'])
|
||||||
self.check_version(database=database, create=True)
|
self.check_version(database=database, create=create)
|
||||||
|
|
||||||
|
|
||||||
def check_version(self, database, create):
|
def check_version(self, database, create):
|
||||||
|
@ -721,6 +729,8 @@ class Database:
|
||||||
|
|
||||||
# now get the stats
|
# now get the stats
|
||||||
c.execute(self.sql.query[query], subs)
|
c.execute(self.sql.query[query], subs)
|
||||||
|
#for row in c.fetchall(): # needs "explain query plan" in sql statement
|
||||||
|
# print "query plan: ", row
|
||||||
colnames = [desc[0] for desc in c.description]
|
colnames = [desc[0] for desc in c.description]
|
||||||
for row in c.fetchall():
|
for row in c.fetchall():
|
||||||
playerid = row[0]
|
playerid = row[0]
|
||||||
|
@ -2096,6 +2106,7 @@ class HandToWrite:
|
||||||
|
|
||||||
if __name__=="__main__":
|
if __name__=="__main__":
|
||||||
c = Configuration.Config()
|
c = Configuration.Config()
|
||||||
|
sql = SQL.Sql(db_server = 'sqlite')
|
||||||
|
|
||||||
db_connection = Database(c) # mysql fpdb holdem
|
db_connection = Database(c) # mysql fpdb holdem
|
||||||
# db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem
|
# db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem
|
||||||
|
@ -2113,12 +2124,25 @@ if __name__=="__main__":
|
||||||
if hero:
|
if hero:
|
||||||
print "nutOmatic is id_player = %d" % hero
|
print "nutOmatic is id_player = %d" % hero
|
||||||
|
|
||||||
|
# example of displaying query plan in sqlite:
|
||||||
|
if db_connection.backend == 4:
|
||||||
|
print
|
||||||
|
c = db_connection.get_cursor()
|
||||||
|
c.execute('explain query plan '+sql.query['get_table_name'], (h, ))
|
||||||
|
for row in c.fetchall():
|
||||||
|
print "query plan: ", row
|
||||||
|
print
|
||||||
|
|
||||||
|
t0 = time()
|
||||||
stat_dict = db_connection.get_stats_from_hand(h, "ring")
|
stat_dict = db_connection.get_stats_from_hand(h, "ring")
|
||||||
|
t1 = time()
|
||||||
for p in stat_dict.keys():
|
for p in stat_dict.keys():
|
||||||
print p, " ", stat_dict[p]
|
print p, " ", stat_dict[p]
|
||||||
|
|
||||||
print "cards =", db_connection.get_cards(u'1')
|
print "cards =", db_connection.get_cards(u'1')
|
||||||
db_connection.close_connection
|
db_connection.close_connection
|
||||||
|
|
||||||
|
print "get_stats took: %4.3f seconds" % (t1-t0)
|
||||||
|
|
||||||
print "press enter to continue"
|
print "press enter to continue"
|
||||||
sys.stdin.readline()
|
sys.stdin.readline()
|
||||||
|
|
|
@ -125,6 +125,7 @@ class Hand(object):
|
||||||
# currency symbol for this hand
|
# currency symbol for this hand
|
||||||
self.sym = self.SYMBOL[self.gametype['currency']] # save typing! delete this attr when done
|
self.sym = self.SYMBOL[self.gametype['currency']] # save typing! delete this attr when done
|
||||||
self.pot.setSym(self.sym)
|
self.pot.setSym(self.sym)
|
||||||
|
self.is_duplicate = False # i.e. don't update hudcache if true
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
vars = ( ("BB", self.bb),
|
vars = ( ("BB", self.bb),
|
||||||
|
@ -236,6 +237,7 @@ db: a connected Database object"""
|
||||||
# TourneysPlayers
|
# TourneysPlayers
|
||||||
else:
|
else:
|
||||||
log.info("Hand.insert(): hid #: %s is a duplicate" % hh['siteHandNo'])
|
log.info("Hand.insert(): hid #: %s is a duplicate" % hh['siteHandNo'])
|
||||||
|
self.is_duplicate = True # i.e. don't update hudcache
|
||||||
raise FpdbHandDuplicate(hh['siteHandNo'])
|
raise FpdbHandDuplicate(hh['siteHandNo'])
|
||||||
|
|
||||||
def updateHudCache(self, db):
|
def updateHudCache(self, db):
|
||||||
|
@ -675,6 +677,7 @@ class HoldemOmahaHand(Hand):
|
||||||
if self.maxseats is None:
|
if self.maxseats is None:
|
||||||
self.maxseats = hhc.guessMaxSeats(self)
|
self.maxseats = hhc.guessMaxSeats(self)
|
||||||
hhc.readOther(self)
|
hhc.readOther(self)
|
||||||
|
#print "\nHand:\n"+str(self)
|
||||||
elif builtFrom == "DB":
|
elif builtFrom == "DB":
|
||||||
if handid is not None:
|
if handid is not None:
|
||||||
self.select(handid) # Will need a handId
|
self.select(handid) # Will need a handId
|
||||||
|
|
|
@ -1346,6 +1346,7 @@ class Sql:
|
||||||
|
|
||||||
# same as above except stats are aggregated for all blind/limit levels
|
# same as above except stats are aggregated for all blind/limit levels
|
||||||
self.query['get_stats_from_hand_aggregated'] = """
|
self.query['get_stats_from_hand_aggregated'] = """
|
||||||
|
/* explain query plan */
|
||||||
SELECT hc.playerId AS player_id,
|
SELECT hc.playerId AS player_id,
|
||||||
max(case when hc.gametypeId = h.gametypeId
|
max(case when hc.gametypeId = h.gametypeId
|
||||||
then hp.seatNo
|
then hp.seatNo
|
||||||
|
|
|
@ -97,6 +97,7 @@ except:
|
||||||
|
|
||||||
import GuiPrefs
|
import GuiPrefs
|
||||||
import GuiLogView
|
import GuiLogView
|
||||||
|
import GuiDatabase
|
||||||
import GuiBulkImport
|
import GuiBulkImport
|
||||||
import GuiPlayerStats
|
import GuiPlayerStats
|
||||||
import GuiPositionalStats
|
import GuiPositionalStats
|
||||||
|
@ -288,10 +289,31 @@ class fpdb:
|
||||||
|
|
||||||
dia.destroy()
|
dia.destroy()
|
||||||
|
|
||||||
def dia_create_del_database(self, widget, data=None):
|
def dia_maintain_dbs(self, widget, data=None):
|
||||||
self.warning_box("Unimplemented: Create/Delete Database")
|
self.warning_box("Unimplemented: Maintain Databases")
|
||||||
self.obtain_global_lock()
|
return
|
||||||
self.release_global_lock()
|
if len(self.tab_names) == 1:
|
||||||
|
if self.obtain_global_lock(): # returns true if successful
|
||||||
|
# only main tab has been opened, open dialog
|
||||||
|
dia = gtk.Dialog("Maintain Databases",
|
||||||
|
self.window,
|
||||||
|
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||||
|
(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
|
||||||
|
gtk.STOCK_SAVE, gtk.RESPONSE_ACCEPT))
|
||||||
|
dia.set_default_size(700, 320)
|
||||||
|
|
||||||
|
prefs = GuiDatabase.GuiDatabase(self.config, self.window, dia)
|
||||||
|
response = dia.run()
|
||||||
|
if response == gtk.RESPONSE_ACCEPT:
|
||||||
|
# save updated config
|
||||||
|
self.config.save()
|
||||||
|
|
||||||
|
self.release_global_lock()
|
||||||
|
|
||||||
|
dia.destroy()
|
||||||
|
else:
|
||||||
|
self.warning_box("Cannot open Database Maintenance window because "
|
||||||
|
+ "other windows have been opened. Re-start fpdb to use this option.")
|
||||||
|
|
||||||
def dia_create_del_user(self, widget, data=None):
|
def dia_create_del_user(self, widget, data=None):
|
||||||
self.warning_box("Unimplemented: Create/Delete user")
|
self.warning_box("Unimplemented: Create/Delete user")
|
||||||
|
@ -620,7 +642,7 @@ class fpdb:
|
||||||
<menuitem action="tableviewer"/>
|
<menuitem action="tableviewer"/>
|
||||||
</menu>
|
</menu>
|
||||||
<menu action="database">
|
<menu action="database">
|
||||||
<menuitem action="createdb"/>
|
<menuitem action="maintaindbs"/>
|
||||||
<menuitem action="createuser"/>
|
<menuitem action="createuser"/>
|
||||||
<menuitem action="createtabs"/>
|
<menuitem action="createtabs"/>
|
||||||
<menuitem action="rebuildhudcache"/>
|
<menuitem action="rebuildhudcache"/>
|
||||||
|
@ -663,7 +685,7 @@ class fpdb:
|
||||||
('sessionreplay', None, '_Session Replayer (todo)', None, 'Session Replayer (todo)', self.not_implemented),
|
('sessionreplay', None, '_Session Replayer (todo)', None, 'Session Replayer (todo)', self.not_implemented),
|
||||||
('tableviewer', None, 'Poker_table Viewer (mostly obselete)', None, 'Poker_table Viewer (mostly obselete)', self.tab_table_viewer),
|
('tableviewer', None, 'Poker_table Viewer (mostly obselete)', None, 'Poker_table Viewer (mostly obselete)', self.tab_table_viewer),
|
||||||
('database', None, '_Database'),
|
('database', None, '_Database'),
|
||||||
('createdb', None, 'Create or Delete _Database (todo)', None, 'Create or Delete Database', self.dia_create_del_database),
|
('maintaindbs', None, '_Maintain Databases (todo)', None, 'Maintain Databases', self.dia_maintain_dbs),
|
||||||
('createuser', None, 'Create or Delete _User (todo)', None, 'Create or Delete User', self.dia_create_del_user),
|
('createuser', None, 'Create or Delete _User (todo)', None, 'Create or Delete User', self.dia_create_del_user),
|
||||||
('createtabs', None, 'Create or Recreate _Tables', None, 'Create or Recreate Tables ', self.dia_recreate_tables),
|
('createtabs', None, 'Create or Recreate _Tables', None, 'Create or Recreate Tables ', self.dia_recreate_tables),
|
||||||
('rebuildhudcache', None, 'Rebuild HUD Cache', None, 'Rebuild HUD Cache', self.dia_recreate_hudcache),
|
('rebuildhudcache', None, 'Rebuild HUD Cache', None, 'Rebuild HUD Cache', self.dia_recreate_hudcache),
|
||||||
|
@ -685,9 +707,15 @@ class fpdb:
|
||||||
window.add_accel_group(accel_group)
|
window.add_accel_group(accel_group)
|
||||||
return menubar
|
return menubar
|
||||||
|
|
||||||
def load_profile(self):
|
def load_profile(self, create_db = False):
|
||||||
"""Loads profile from the provided path name."""
|
"""Loads profile from the provided path name."""
|
||||||
self.config = Configuration.Config(file=options.config, dbname=options.dbname)
|
self.config = Configuration.Config(file=options.config, dbname=options.dbname)
|
||||||
|
if self.config.file_error:
|
||||||
|
self.warning_box( "There is an error in your config file\n" + self.config.file
|
||||||
|
+ "\n\nError is: " + str(self.config.file_error)
|
||||||
|
, diatitle="CONFIG FILE ERROR" )
|
||||||
|
exit()
|
||||||
|
|
||||||
log = Configuration.get_logger("logging.conf", "fpdb", log_dir=self.config.dir_log)
|
log = Configuration.get_logger("logging.conf", "fpdb", log_dir=self.config.dir_log)
|
||||||
print "Logfile is " + os.path.join(self.config.dir_log, self.config.log_file) + "\n"
|
print "Logfile is " + os.path.join(self.config.dir_log, self.config.log_file) + "\n"
|
||||||
if self.config.example_copy:
|
if self.config.example_copy:
|
||||||
|
@ -905,7 +933,7 @@ This program is licensed under the AGPL3, see docs"""+os.sep+"agpl-3.0.txt")
|
||||||
self.tab_main_help(None, None)
|
self.tab_main_help(None, None)
|
||||||
|
|
||||||
self.window.show()
|
self.window.show()
|
||||||
self.load_profile()
|
self.load_profile(create_db = True)
|
||||||
|
|
||||||
if not options.errorsToConsole:
|
if not options.errorsToConsole:
|
||||||
fileName = os.path.join(self.config.dir_log, 'fpdb-errors.txt')
|
fileName = os.path.join(self.config.dir_log, 'fpdb-errors.txt')
|
||||||
|
|
|
@ -456,7 +456,7 @@ class Importer:
|
||||||
# FIXME: Need to test for bulk import that isn't rebuilding the cache
|
# FIXME: Need to test for bulk import that isn't rebuilding the cache
|
||||||
if self.callHud:
|
if self.callHud:
|
||||||
for hand in handlist:
|
for hand in handlist:
|
||||||
if hand is not None:
|
if hand is not None and not hand.is_duplicate:
|
||||||
hand.updateHudCache(self.database)
|
hand.updateHudCache(self.database)
|
||||||
self.database.commit()
|
self.database.commit()
|
||||||
|
|
||||||
|
|
|
@ -73,6 +73,7 @@ from distutils.core import setup
|
||||||
import py2exe
|
import py2exe
|
||||||
import glob
|
import glob
|
||||||
import matplotlib
|
import matplotlib
|
||||||
|
import shutil
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
|
|
||||||
|
@ -111,7 +112,7 @@ def test_and_remove(top):
|
||||||
# remove build and dist dirs if they exist
|
# remove build and dist dirs if they exist
|
||||||
test_and_remove('dist')
|
test_and_remove('dist')
|
||||||
test_and_remove('build')
|
test_and_remove('build')
|
||||||
test_and_remove('gfx')
|
#test_and_remove('gfx')
|
||||||
|
|
||||||
|
|
||||||
today = date.today().strftime('%Y%m%d')
|
today = date.today().strftime('%Y%m%d')
|
||||||
|
@ -174,3 +175,36 @@ dest = dest.replace('\\', '\\\\')
|
||||||
os.rename( 'pyfpdb', dest )
|
os.rename( 'pyfpdb', dest )
|
||||||
|
|
||||||
|
|
||||||
|
print "Enter directory name for GTK 2.14 (e.g. c:\code\gtk_2.14.7-20090119)\n: ", # the comma means no newline
|
||||||
|
gtk_dir = sys.stdin.readline().rstrip()
|
||||||
|
|
||||||
|
|
||||||
|
print "\ncopying files and dirs from ", gtk_dir, "to", dest.replace('\\\\', '\\'), "..."
|
||||||
|
src = os.path.join(gtk_dir, 'bin', 'libgdk-win32-2.0-0.dll')
|
||||||
|
src = src.replace('\\', '\\\\')
|
||||||
|
shutil.copy( src, dest )
|
||||||
|
|
||||||
|
src = os.path.join(gtk_dir, 'bin', 'libgobject-2.0-0.dll')
|
||||||
|
src = src.replace('\\', '\\\\')
|
||||||
|
shutil.copy( src, dest )
|
||||||
|
|
||||||
|
|
||||||
|
src_dir = os.path.join(gtk_dir, 'etc')
|
||||||
|
src_dir = src_dir.replace('\\', '\\\\')
|
||||||
|
dest_dir = os.path.join(dest, 'etc')
|
||||||
|
dest_dir = dest_dir.replace('\\', '\\\\')
|
||||||
|
shutil.copytree( src_dir, dest_dir )
|
||||||
|
|
||||||
|
src_dir = os.path.join(gtk_dir, 'lib')
|
||||||
|
src_dir = src_dir.replace('\\', '\\\\')
|
||||||
|
dest_dir = os.path.join(dest, 'lib')
|
||||||
|
dest_dir = dest_dir.replace('\\', '\\\\')
|
||||||
|
shutil.copytree( src_dir, dest_dir )
|
||||||
|
|
||||||
|
src_dir = os.path.join(gtk_dir, 'share')
|
||||||
|
src_dir = src_dir.replace('\\', '\\\\')
|
||||||
|
dest_dir = os.path.join(dest, 'share')
|
||||||
|
dest_dir = dest_dir.replace('\\', '\\\\')
|
||||||
|
shutil.copytree( src_dir, dest_dir )
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user