Merge branch 'master' of git://git.assembla.com/fpdb-sql

This commit is contained in:
Worros 2009-07-19 11:29:18 +08:00
commit e113a1e692
7 changed files with 322 additions and 92 deletions

View File

@ -64,32 +64,47 @@ class Database:
# Future values may also include:
# H=Hands (last n hands)
self.hud_hands = 1000 # Max number of hands from each player to use for hud stats
self.hud_days = 90 # Max number of days from each player to use for hud stats
self.hud_days = 30 # Max number of days from each player to use for hud stats
self.hud_session_gap = 30 # Gap (minutes) between hands that indicates a change of session
# (hands every 2 mins for 1 hour = one session, if followed
# by a 40 minute gap and then more hands on same table that is
# a new session)
cur = self.connection.cursor()
self.cursor = self.fdb.cursor
if self.fdb.wrongDbVersion == False:
# self.hand_1day_ago used to fetch stats for current session (i.e. if hud_style = 'S')
self.hand_1day_ago = 0
cur.execute(self.sql.query['get_hand_1day_ago'])
row = cur.fetchone()
self.cursor.execute(self.sql.query['get_hand_1day_ago'])
row = self.cursor.fetchone()
if row and row[0]:
self.hand_1day_ago = row[0]
#print "hand 1day ago =", self.hand_1day_ago
# self.date_ndays_ago used if hud_style = 'T'
d = timedelta(days=self.hud_days)
now = datetime.utcnow() - d
self.date_ndays_ago = "d%02d%02d%02d" % (now.year-2000, now.month, now.day)
self.hand_nhands_ago = 0 # todo
#cur.execute(self.sql.query['get_table_name'], (hand_id, ))
#row = cur.fetchone()
# self.hand_nhands_ago is used for fetching stats for last n hands (hud_style = 'H')
# This option not used yet
self.hand_nhands_ago = 0
# should use aggregated version of query if appropriate
self.cursor.execute(self.sql.query['get_hand_nhands_ago'], (self.hud_hands,self.hud_hands))
row = self.cursor.fetchone()
if row and row[0]:
self.hand_nhands_ago = row[0]
print "hand n hands ago =", self.hand_nhands_ago
#self.cursor.execute(self.sql.query['get_table_name'], (hand_id, ))
#row = self.cursor.fetchone()
else:
print "Bailing on DB query, not sure it exists yet"
self.saveActions = False if self.import_options['saveActions'] == False else True
# could be used by hud to change hud style
def set_hud_style(self, style):
self.hud_style = style
def do_connect(self, c):
self.fdb.do_connect(c)

View File

@ -1064,6 +1064,7 @@ class FpdbSQLQueries:
<orderbyseats>
,case <position> when 'B' then 'B'
when 'S' then 'S'
when '0' then 'Y'
else 'Z'||<position>
end
<orderbyhgameTypeId>

View File

@ -154,31 +154,40 @@ class GuiAutoImport (threading.Thread):
# results to the same pipe. This means that self.path should be a a list of dirs
# to watch.
if widget.get_active(): # toggled on
self.doAutoImportBool = True
widget.set_label(u' _Stop Autoimport ')
if self.pipe_to_hud is None:
if os.name == 'nt':
command = "python HUD_main.py" + " " + self.settings['cl_options']
bs = 0 # windows is not happy with line buffing here
self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE,
universal_newlines=True)
else:
command = os.path.join(sys.path[0], 'HUD_main.py')
cl = [command, ] + string.split(self.settings['cl_options'])
self.pipe_to_hud = subprocess.Popen(cl, bufsize = 1, stdin = subprocess.PIPE,
universal_newlines=True)
# - Does the lock acquisition need to be more sophisticated for multiple dirs?
# (see comment above about what to do if pipe already open)
# - Ideally we want to release the lock if the auto-import is killed by some
# kind of exception - is this possible?
if self.settings['global_lock'].acquire(False): # returns false immediately if lock not acquired
print "\nGlobal lock taken ..."
self.doAutoImportBool = True
widget.set_label(u' _Stop Autoimport ')
if self.pipe_to_hud is None:
if os.name == 'nt':
command = "python HUD_main.py" + " " + self.settings['cl_options']
bs = 0 # windows is not happy with line buffing here
self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE,
universal_newlines=True)
else:
command = os.path.join(sys.path[0], 'HUD_main.py')
cl = [command, ] + string.split(self.settings['cl_options'])
self.pipe_to_hud = subprocess.Popen(cl, bufsize = 1, stdin = subprocess.PIPE,
universal_newlines=True)
# Add directories to importer object.
for site in self.input_settings:
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0])
self.do_import()
# Add directories to importer object.
for site in self.input_settings:
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0])
self.do_import()
interval=int(self.intervalEntry.get_text())
gobject.timeout_add(interval*1000, self.do_import)
interval=int(self.intervalEntry.get_text())
gobject.timeout_add(interval*1000, self.do_import)
else:
print "auto-import aborted - global lock not available"
else: # toggled off
self.settings['global_lock'].release()
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
print "Stopping autoimport"
print "Stopping autoimport - global lock released."
if self.pipe_to_hud.poll() is not None:
print "HUD already terminated"
else:
@ -186,8 +195,6 @@ class GuiAutoImport (threading.Thread):
self.pipe_to_hud.communicate('\n') # waits for process to terminate
self.pipe_to_hud = None
self.startButton.set_label(u' _Start Autoimport ')
#end def GuiAutoImport.startClicked

View File

@ -49,37 +49,47 @@ class GuiBulkImport():
self.importer.RunImportThreaded()
def load_clicked(self, widget, data=None):
# get the dir to import from the chooser
self.inputFile = self.chooser.get_filename()
# Does the lock acquisition need to be more sophisticated for multiple dirs?
# (see comment above about what to do if pipe already open)
if self.settings['global_lock'].acquire(False): # returns false immediately if lock not acquired
try:
print "\nGlobal lock taken ..."
# get the dir to import from the chooser
self.inputFile = self.chooser.get_filename()
# get the import settings from the gui and save in the importer
self.importer.setHandCount(int(self.spin_hands.get_text()))
self.importer.setMinPrint(int(self.spin_hands.get_text()))
self.importer.setQuiet(self.chk_st_st.get_active())
self.importer.setFailOnError(self.chk_fail.get_active())
self.importer.setThreads(int(self.spin_threads.get_text()))
self.importer.setHandsInDB(self.n_hands_in_db)
cb_model = self.cb_dropindexes.get_model()
cb_index = self.cb_dropindexes.get_active()
if cb_index:
self.importer.setDropIndexes(cb_model[cb_index][0])
# get the import settings from the gui and save in the importer
self.importer.setHandCount(int(self.spin_hands.get_text()))
self.importer.setMinPrint(int(self.spin_hands.get_text()))
self.importer.setQuiet(self.chk_st_st.get_active())
self.importer.setFailOnError(self.chk_fail.get_active())
self.importer.setThreads(int(self.spin_threads.get_text()))
self.importer.setHandsInDB(self.n_hands_in_db)
cb_model = self.cb_dropindexes.get_model()
cb_index = self.cb_dropindexes.get_active()
if cb_index:
self.importer.setDropIndexes(cb_model[cb_index][0])
else:
self.importer.setDropIndexes("auto")
sitename = self.cbfilter.get_model()[self.cbfilter.get_active()][0]
self.lab_info.set_text("Importing")
self.importer.addBulkImportImportFileOrDir(self.inputFile, site = sitename)
self.importer.setCallHud(False)
starttime = time()
(stored, dups, partial, errs, ttime) = self.importer.runImport()
ttime = time() - starttime
if ttime == 0:
ttime = 1
print 'GuiBulkImport.import_dir done: Stored: %d \tDuplicates: %d \tPartial: %d \tErrors: %d in %s seconds - %d/sec'\
% (stored, dups, partial, errs, ttime, stored / ttime)
self.importer.clearFileList()
self.lab_info.set_text("Import finished")
except:
pass
self.settings['global_lock'].release()
else:
self.importer.setDropIndexes("auto")
sitename = self.cbfilter.get_model()[self.cbfilter.get_active()][0]
self.lab_info.set_text("Importing")
self.importer.addBulkImportImportFileOrDir(self.inputFile, site = sitename)
self.importer.setCallHud(False)
starttime = time()
(stored, dups, partial, errs, ttime) = self.importer.runImport()
ttime = time() - starttime
if ttime == 0:
ttime = 1
print 'GuiBulkImport.import_dir done: Stored: %d \tDuplicates: %d \tPartial: %d \tErrors: %d in %s seconds - %d/sec'\
% (stored, dups, partial, errs, ttime, stored / ttime)
self.importer.clearFileList()
self.lab_info.set_text("Import finished")
print "bulk-import aborted - global lock not available"
def get_vbox(self):
"""returns the vbox of this thread"""
@ -88,8 +98,7 @@ class GuiBulkImport():
def __init__(self, settings, config):
self.settings = settings
self.config = config
self.importer = fpdb_import.Importer(self, self.settings,
config)
self.importer = fpdb_import.Importer(self, self.settings, config)
self.vbox = gtk.VBox(False, 0)
self.vbox.show()

View File

@ -20,28 +20,29 @@ import pygtk
pygtk.require('2.0')
import gtk
import os
import sys
from time import time, strftime
import Card
import fpdb_import
import Database
import fpdb_db
import Filters
import FpdbSQLQueries
class GuiPlayerStats (threading.Thread):
def __init__(self, config, querylist, mainwin, debug=True):
self.debug=debug
self.conf=config
self.main_window=mainwin
self.debug = debug
self.conf = config
self.main_window = mainwin
self.sql = querylist
self.MYSQL_INNODB = 2
self.PGSQL = 3
self.SQLITE = 4
# create new db connection to avoid conflicts with other threads
self.db = fpdb_db.fpdb_db()
self.db.do_connect(self.conf)
self.cursor=self.db.cursor
self.sql = querylist
self.db = Database.Database(self.conf, sql=self.sql)
self.cursor = self.db.cursor
settings = {}
settings.update(config.get_db_parameters())
@ -216,7 +217,7 @@ class GuiPlayerStats (threading.Thread):
flags = [True]
self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, groups, dates)
self.db.db.commit()
self.db.commit()
print "Stats page displayed in %4.2f seconds" % (time() - starttime)
#end def fillStatsFrame(self, vbox):
@ -280,8 +281,10 @@ class GuiPlayerStats (threading.Thread):
if column[colalias] == 'plposition':
if value == 'B':
value = 'BB'
if value == 'S':
elif value == 'S':
value = 'SB'
elif value == '0':
value = 'Btn'
else:
if column[colalias] == 'game':
if holecards:
@ -379,7 +382,8 @@ class GuiPlayerStats (threading.Thread):
# Group by position?
if groups['posn']:
query = query.replace("<position>", 'hp.position')
#query = query.replace("<position>", "case hp.position when '0' then 'Btn' else hp.position end")
query = query.replace("<position>", "hp.position")
# set flag in self.columns to show posn column
[x for x in self.columns if x[0] == 'plposition'][0][1] = True
else:

View File

@ -571,6 +571,183 @@ class Sql:
from Hands
where handStart < now() at time zone 'UTC' - interval '1 day'"""
#if db_server == 'mysql':
self.query['get_hand_nhands_ago'] = """
select coalesce(greatest(max(id),%s)-%s,0)
from Hands"""
# used in GuiPlayerStats:
self.query['getPlayerId'] = """SELECT id from Players where name = %s"""
# used in Filters:
self.query['getSiteId'] = """SELECT id from Sites where name = %s"""
self.query['getGames'] = """SELECT DISTINCT category from Gametypes"""
self.query['getLimits'] = """SELECT DISTINCT bigBlind from Gametypes ORDER by bigBlind DESC"""
if db_server == 'mysql':
self.query['playerDetailedStats'] = """
select <hgameTypeId> AS hgametypeid
,gt.base
,gt.category
,upper(gt.limitType) AS limittype
,s.name
,min(gt.bigBlind) AS minbigblind
,max(gt.bigBlind) AS maxbigblind
/*,<hcgametypeId> AS gtid*/
,<position> AS plposition
,count(1) AS n
,100.0*sum(cast(hp.street0VPI as <signed>integer))/count(1) AS vpip
,100.0*sum(cast(hp.street0Aggr as <signed>integer))/count(1) AS pfr
,case when sum(cast(hp.street0_3Bchance as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street0_3Bdone as <signed>integer))/sum(cast(hp.street0_3Bchance as <signed>integer))
end AS pf3
,case when sum(cast(hp.stealattemptchance as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.stealattempted as <signed>integer))/sum(cast(hp.stealattemptchance as <signed>integer))
end AS steals
,100.0*sum(cast(hp.street1Seen as <signed>integer))/count(1) AS saw_f
,100.0*sum(cast(hp.sawShowdown as <signed>integer))/count(1) AS sawsd
,case when sum(cast(hp.street1Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.sawShowdown as <signed>integer))/sum(cast(hp.street1Seen as <signed>integer))
end AS wtsdwsf
,case when sum(cast(hp.sawShowdown as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.wonAtSD as <signed>integer))/sum(cast(hp.sawShowdown as <signed>integer))
end AS wmsd
,case when sum(cast(hp.street1Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street1Aggr as <signed>integer))/sum(cast(hp.street1Seen as <signed>integer))
end AS flafq
,case when sum(cast(hp.street2Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street2Aggr as <signed>integer))/sum(cast(hp.street2Seen as <signed>integer))
end AS tuafq
,case when sum(cast(hp.street3Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street3Aggr as <signed>integer))/sum(cast(hp.street3Seen as <signed>integer))
end AS rvafq
,case when sum(cast(hp.street1Seen as <signed>integer))+sum(cast(hp.street2Seen as <signed>integer))+sum(cast(hp.street3Seen as <signed>integer)) = 0 then -999
else 100.0*(sum(cast(hp.street1Aggr as <signed>integer))+sum(cast(hp.street2Aggr as <signed>integer))+sum(cast(hp.street3Aggr as <signed>integer)))
/(sum(cast(hp.street1Seen as <signed>integer))+sum(cast(hp.street2Seen as <signed>integer))+sum(cast(hp.street3Seen as <signed>integer)))
end AS pofafq
,sum(hp.totalProfit)/100.0 AS net
,sum(hp.rake)/100.0 AS rake
,100.0*avg(hp.totalProfit/(gt.bigBlind+0.0)) AS bbper100
,avg(hp.totalProfit)/100.0 AS profitperhand
,100.0*avg((hp.totalProfit+hp.rake)/(gt.bigBlind+0.0)) AS bb100xr
,avg((hp.totalProfit+hp.rake)/100.0) AS profhndxr
,avg(h.seats+0.0) AS avgseats
,variance(hp.totalProfit/100.0) AS variance
from HandsPlayers hp
inner join Hands h on (h.id = hp.handId)
inner join Gametypes gt on (gt.Id = h.gameTypeId)
inner join Sites s on (s.Id = gt.siteId)
where hp.playerId in <player_test>
and hp.tourneysPlayersId IS NULL
and h.seats <seats_test>
<flagtest>
<gtbigBlind_test>
and date_format(h.handStart, '%Y-%m-%d') <datestest>
group by hgameTypeId
,hp.playerId
,gt.base
,gt.category
<groupbyseats>
,plposition
,upper(gt.limitType)
,s.name
order by hp.playerId
,gt.base
,gt.category
<orderbyseats>
,case <position> when 'B' then 'B'
when 'S' then 'S'
else concat('Z', <position>)
end
<orderbyhgameTypeId>
,maxbigblind desc
,upper(gt.limitType)
,s.name
"""
else: # assume postgresql
self.query['playerDetailedStats'] = """
select <hgameTypeId> AS hgametypeid
,gt.base
,gt.category
,upper(gt.limitType) AS limittype
,s.name
,min(gt.bigBlind) AS minbigblind
,max(gt.bigBlind) AS maxbigblind
/*,<hcgametypeId> AS gtid*/
,<position> AS plposition
,count(1) AS n
,100.0*sum(cast(hp.street0VPI as <signed>integer))/count(1) AS vpip
,100.0*sum(cast(hp.street0Aggr as <signed>integer))/count(1) AS pfr
,case when sum(cast(hp.street0_3Bchance as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street0_3Bdone as <signed>integer))/sum(cast(hp.street0_3Bchance as <signed>integer))
end AS pf3
,case when sum(cast(hp.stealattemptchance as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.stealattempted as <signed>integer))/sum(cast(hp.stealattemptchance as <signed>integer))
end AS steals
,100.0*sum(cast(hp.street1Seen as <signed>integer))/count(1) AS saw_f
,100.0*sum(cast(hp.sawShowdown as <signed>integer))/count(1) AS sawsd
,case when sum(cast(hp.street1Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.sawShowdown as <signed>integer))/sum(cast(hp.street1Seen as <signed>integer))
end AS wtsdwsf
,case when sum(cast(hp.sawShowdown as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.wonAtSD as <signed>integer))/sum(cast(hp.sawShowdown as <signed>integer))
end AS wmsd
,case when sum(cast(hp.street1Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street1Aggr as <signed>integer))/sum(cast(hp.street1Seen as <signed>integer))
end AS flafq
,case when sum(cast(hp.street2Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street2Aggr as <signed>integer))/sum(cast(hp.street2Seen as <signed>integer))
end AS tuafq
,case when sum(cast(hp.street3Seen as <signed>integer)) = 0 then -999
else 100.0*sum(cast(hp.street3Aggr as <signed>integer))/sum(cast(hp.street3Seen as <signed>integer))
end AS rvafq
,case when sum(cast(hp.street1Seen as <signed>integer))+sum(cast(hp.street2Seen as <signed>integer))+sum(cast(hp.street3Seen as <signed>integer)) = 0 then -999
else 100.0*(sum(cast(hp.street1Aggr as <signed>integer))+sum(cast(hp.street2Aggr as <signed>integer))+sum(cast(hp.street3Aggr as <signed>integer)))
/(sum(cast(hp.street1Seen as <signed>integer))+sum(cast(hp.street2Seen as <signed>integer))+sum(cast(hp.street3Seen as <signed>integer)))
end AS pofafq
,sum(hp.totalProfit)/100.0 AS net
,sum(hp.rake)/100.0 AS rake
,100.0*avg(hp.totalProfit/(gt.bigBlind+0.0)) AS bbper100
,avg(hp.totalProfit)/100.0 AS profitperhand
,100.0*avg((hp.totalProfit+hp.rake)/(gt.bigBlind+0.0)) AS bb100xr
,avg((hp.totalProfit+hp.rake)/100.0) AS profhndxr
,avg(h.seats+0.0) AS avgseats
,variance(hp.totalProfit/100.0) AS variance
from HandsPlayers hp
inner join Hands h on (h.id = hp.handId)
inner join Gametypes gt on (gt.Id = h.gameTypeId)
inner join Sites s on (s.Id = gt.siteId)
where hp.playerId in <player_test>
and hp.tourneysPlayersId IS NULL
and h.seats <seats_test>
<flagtest>
<gtbigBlind_test>
and to_char(h.handStart, 'YYYY-MM-DD') <datestest>
group by hgameTypeId
,hp.playerId
,gt.base
,gt.category
<groupbyseats>
,plposition
,upper(gt.limitType)
,s.name
order by hp.playerId
,gt.base
,gt.category
<orderbyseats>
,case <position> when 'B' then 'B'
when 'S' then 'S'
when '0' then 'Y'
else 'Z'||<position>
end
<orderbyhgameTypeId>
,maxbigblind desc
,upper(gt.limitType)
,s.name
"""
#elif(self.dbname == 'SQLite'):
# self.query['playerDetailedStats'] = """ """
if __name__== "__main__":
# just print the default queries and exit
s = Sql(game = 'razz', type = 'ptracks')

View File

@ -17,6 +17,7 @@
import os
import sys
import threading
import Options
import string
cl_options = string.join(sys.argv[1:])
@ -116,11 +117,13 @@ class fpdb:
def dia_create_del_database(self, widget, data=None):
print "todo: implement dia_create_del_database"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_create_del_database
def dia_create_del_user(self, widget, data=None):
print "todo: implement dia_create_del_user"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_create_del_user
def dia_database_stats(self, widget, data=None):
@ -137,16 +140,19 @@ class fpdb:
def dia_delete_db_parts(self, widget, data=None):
print "todo: implement dia_delete_db_parts"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_delete_db_parts
def dia_edit_profile(self, widget=None, data=None, create_default=False, path=None):
print "todo: implement dia_edit_profile"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_edit_profile
def dia_export_db(self, widget, data=None):
print "todo: implement dia_export_db"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_export_db
def dia_get_db_root_credentials(self):
@ -172,6 +178,7 @@ class fpdb:
def dia_import_db(self, widget, data=None):
print "todo: implement dia_import_db"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_import_db
def dia_licensing(self, widget, data=None):
@ -180,7 +187,7 @@ class fpdb:
def dia_load_profile(self, widget, data=None):
"""Dialogue to select a file to load a profile from"""
if self.obtain_global_lock() == 0: # returns 0 if successful
if self.obtain_global_lock(): # returns true if successful
#try:
# chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
# action=gtk.FILE_CHOOSER_ACTION_OPEN,
@ -195,15 +202,18 @@ class fpdb:
# print 'User cancelled loading profile'
#except:
# pass
self.load_profile()
try:
self.load_profile()
except:
pass
self.release_global_lock()
#end def dia_load_profile
def dia_recreate_tables(self, widget, data=None):
"""Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
if self.obtain_global_lock() in (0,2): # returns 0 if successful, 2 if Hands table does not exist
if self.obtain_global_lock(): # returns true if successful
lock_released = False
#lock_released = False
try:
dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
@ -214,27 +224,28 @@ class fpdb:
response = dia_confirm.run()
dia_confirm.destroy()
if response == gtk.RESPONSE_YES:
if self.db.fdb.backend == self.fdb_lock.fdb.MYSQL_INNODB:
#if self.db.fdb.backend == self.fdb_lock.fdb.MYSQL_INNODB:
# mysql requires locks on all tables or none - easier to release this lock
# than lock all the other tables
# ToDo: lock all other tables so that lock doesn't have to be released
self.release_global_lock()
lock_released = True
self.db.fdb.recreate_tables()
else:
# self.release_global_lock()
# lock_released = True
self.db.fdb.recreate_tables()
#else:
# for other dbs use same connection as holds global lock
self.fdb_lock.fdb.recreate_tables()
# self.fdb_lock.fdb.recreate_tables()
elif response == gtk.RESPONSE_NO:
print 'User cancelled recreating tables'
except:
pass
if not lock_released:
self.release_global_lock()
#if not lock_released:
self.release_global_lock()
#end def dia_recreate_tables
def dia_regression_test(self, widget, data=None):
print "todo: implement dia_regression_test"
self.obtain_global_lock()
self.release_global_lock()
#end def dia_regression_test
def dia_save_profile(self, widget, data=None):
@ -353,6 +364,7 @@ class fpdb:
"""Loads profile from the provided path name."""
self.config = Configuration.Config(file=options.config, dbname=options.dbname)
self.settings = {}
self.settings['global_lock'] = self.lock
if (os.sep=="/"):
self.settings['os']="linuxmac"
else:
@ -405,10 +417,15 @@ class fpdb:
#end def not_implemented
def obtain_global_lock(self):
print "\nTaking global lock ..."
self.fdb_lock = Database.Database(self.config, sql = self.sql)
self.fdb_lock.do_connect(self.config)
return self.fdb_lock.fdb.get_global_lock()
ret = self.lock.acquire(False) # will return false if lock is already held
if ret:
print "\nGlobal lock taken ..."
else:
print "\nFailed to get global lock."
return ret
# need to release it later:
# self.lock.release()
#end def obtain_global_lock
def quit(self, widget, data=None):
@ -419,8 +436,7 @@ class fpdb:
#end def quit_cliecked
def release_global_lock(self):
self.fdb_lock.fdb.db.rollback()
self.fdb_lock.fdb.disconnect()
self.lock.release()
print "Global lock released.\n"
#end def release_global_lock
@ -446,7 +462,7 @@ class fpdb:
#end def tab_bulk_import
def tab_player_stats(self, widget, data=None):
new_ps_thread=GuiPlayerStats.GuiPlayerStats(self.config, self.querydict, self.window)
new_ps_thread=GuiPlayerStats.GuiPlayerStats(self.config, self.sql, self.window)
self.threads.append(new_ps_thread)
ps_tab=new_ps_thread.get_vbox()
self.add_and_display_tab(ps_tab, "Player Stats")
@ -487,6 +503,7 @@ This program is licensed under the AGPL3, see docs"""+os.sep+"agpl-3.0.txt")
def __init__(self):
self.threads = []
self.lock = threading.Lock()
self.db = None
self.status_bar = None