diff --git a/pyfpdb/Configuration.py b/pyfpdb/Configuration.py
index 83648577..2859c213 100755
--- a/pyfpdb/Configuration.py
+++ b/pyfpdb/Configuration.py
@@ -688,7 +688,7 @@ class Config:
except: hui['agg_bb_mult'] = 1
try: hui['seats_style'] = self.ui.seats_style
- except: hui['seats_style'] = 'C' # A / C / E, use A(ll) / C(ustom) / E(xact) seat numbers
+ except: hui['seats_style'] = 'A' # A / C / E, use A(ll) / C(ustom) / E(xact) seat numbers
try: hui['seats_cust_nums'] = self.ui.seats_cust_nums
except: hui['seats_cust_nums'] = ['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
@@ -711,7 +711,7 @@ class Config:
except: hui['h_agg_bb_mult'] = 1
try: hui['h_seats_style'] = self.ui.h_seats_style
- except: hui['h_seats_style'] = 'E' # A / C / E, use A(ll) / C(ustom) / E(xact) seat numbers
+ except: hui['h_seats_style'] = 'A' # A / C / E, use A(ll) / C(ustom) / E(xact) seat numbers
try: hui['h_seats_cust_nums'] = self.ui.h_seats_cust_nums
except: hui['h_seats_cust_nums'] = ['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
diff --git a/pyfpdb/Database.py b/pyfpdb/Database.py
index 775e1475..cd6ad298 100755
--- a/pyfpdb/Database.py
+++ b/pyfpdb/Database.py
@@ -21,6 +21,11 @@ Create and manage the database objects.
########################################################################
+# ToDo: - rebuild indexes / vacuum option
+# - check speed of get_stats_from_hand() - add log info
+# - check size of db, seems big? (mysql)
+# - investigate size of mysql db (200K for just 7K hands? 2GB for 140K hands?)
+
# postmaster -D /var/lib/pgsql/data
# Standard Library modules
@@ -69,14 +74,9 @@ class Database:
indexes = [
[ ] # no db with index 0
, [ ] # no db with index 1
- , [ # indexes for mysql (list index 2)
+ , [ # indexes for mysql (list index 2) (foreign keys not here, in next data structure)
# {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
# {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
- {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
- , {'tab':'HandsPlayers', 'col':'handId', 'drop':0} # not needed, handled by fk
- , {'tab':'HandsPlayers', 'col':'playerId', 'drop':0} # not needed, handled by fk
- , {'tab':'HandsPlayers', 'col':'tourneyTypeId', 'drop':0}
- , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
#, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
]
, [ # indexes for postgres (list index 3)
@@ -117,6 +117,8 @@ class Database:
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsPlayers', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
+ , {'fktab':'HandsPlayers', 'fkcol':'tourneysPlayersId','rtab':'TourneysPlayers','rcol':'id', 'drop':1}
, {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
@@ -431,6 +433,7 @@ class Database:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "*** Database Error: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
+ # is get_stats_from_hand slow?
def get_stats_from_hand( self, hand, type # type is "ring" or "tour"
, hud_params = {'hud_style':'A', 'agg_bb_mult':1000
,'seats_style':'A', 'seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
@@ -1165,9 +1168,9 @@ class Database:
print "dropping mysql index ", idx['tab'], idx['col']
try:
self.get_cursor().execute( "alter table %s drop index %s"
- , (idx['tab'],idx['col']) )
+ , (idx['tab'], idx['col']) )
except:
- pass
+ print " drop idx failed: " + str(sys.exc_info())
elif self.backend == self.PGSQL:
print "dropping pg index ", idx['tab'], idx['col']
# mod to use tab_col for index name?
@@ -1175,13 +1178,119 @@ class Database:
self.get_cursor().execute( "drop index %s_%s_idx"
% (idx['tab'],idx['col']) )
except:
- pass
+ print " drop idx failed: " + str(sys.exc_info())
else:
print "Only MySQL and Postgres supported so far"
return -1
if self.backend == self.PGSQL:
self.connection.set_isolation_level(1) # go back to normal isolation level
#end def dropAllIndexes
+
+ def createAllForeignKeys(self):
+ """Create foreign keys"""
+
+ try:
+ if self.backend == self.PGSQL:
+ self.connection.set_isolation_level(0) # allow table/index operations to work
+ c = self.get_cursor()
+ except:
+ print " set_isolation_level failed: " + str(sys.exc_info())
+
+ for fk in self.foreignKeys[self.backend]:
+ if self.backend == self.MYSQL_INNODB:
+ c.execute("SELECT constraint_name " +
+ "FROM information_schema.KEY_COLUMN_USAGE " +
+ #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
+ "WHERE 1=1 " +
+ "AND table_name = %s AND column_name = %s " +
+ "AND referenced_table_name = %s " +
+ "AND referenced_column_name = %s ",
+ (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
+ cons = c.fetchone()
+ #print "afterbulk: cons=", cons
+ if cons:
+ pass
+ else:
+ print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
+ try:
+ c.execute("alter table " + fk['fktab'] + " add foreign key ("
+ + fk['fkcol'] + ") references " + fk['rtab'] + "("
+ + fk['rcol'] + ")")
+ except:
+ print " create fk failed: " + str(sys.exc_info())
+ elif self.backend == self.PGSQL:
+ print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
+ try:
+ c.execute("alter table " + fk['fktab'] + " add constraint "
+ + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
+ + " foreign key (" + fk['fkcol']
+ + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
+ except:
+ print " create fk failed: " + str(sys.exc_info())
+ else:
+ print "Only MySQL and Postgres supported so far"
+
+ try:
+ if self.backend == self.PGSQL:
+ self.connection.set_isolation_level(1) # go back to normal isolation level
+ except:
+ print " set_isolation_level failed: " + str(sys.exc_info())
+ #end def createAllForeignKeys
+
+ def dropAllForeignKeys(self):
+ """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
+ using list of indexes in indexes data structure"""
+ # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
+ if self.backend == self.PGSQL:
+ self.connection.set_isolation_level(0) # allow table/index operations to work
+ c = self.get_cursor()
+
+ for fk in self.foreignKeys[self.backend]:
+ if self.backend == self.MYSQL_INNODB:
+ c.execute("SELECT constraint_name " +
+ "FROM information_schema.KEY_COLUMN_USAGE " +
+ #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
+ "WHERE 1=1 " +
+ "AND table_name = %s AND column_name = %s " +
+ "AND referenced_table_name = %s " +
+ "AND referenced_column_name = %s ",
+ (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
+ cons = c.fetchone()
+ #print "preparebulk find fk: cons=", cons
+ if cons:
+ print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
+ try:
+ c.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
+ except:
+ print " drop failed: " + str(sys.exc_info())
+ elif self.backend == self.PGSQL:
+# DON'T FORGET TO RECREATE THEM!!
+ print "dropping pg fk", fk['fktab'], fk['fkcol']
+ try:
+ # try to lock table to see if index drop will work:
+ # hmmm, tested by commenting out rollback in grapher. lock seems to work but
+ # then drop still hangs :-( does work in some tests though??
+ # will leave code here for now pending further tests/enhancement ...
+ c.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
+ #print "after lock, status:", c.statusmessage
+ #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
+ try:
+ c.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
+ print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
+ except:
+ if "does not exist" not in str(sys.exc_value):
+ print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
+ % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
+ except:
+ print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
+ % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
+ else:
+ print "Only MySQL and Postgres supported so far"
+
+ if self.backend == self.PGSQL:
+ self.connection.set_isolation_level(1) # go back to normal isolation level
+ #end def dropAllForeignKeys
+
def fillDefaultData(self):
c = self.get_cursor()
@@ -1209,6 +1318,12 @@ class Database:
#end def fillDefaultData
+ def rebuild_indexes(self, start=None):
+ self.dropAllIndexes()
+ self.createAllIndexes()
+ self.dropAllForeignKeys()
+ self.createAllForeignKeys()
+
def rebuild_hudcache(self, start=None):
"""clears hudcache and rebuilds from the individual handsplayers records"""
@@ -1291,7 +1406,7 @@ class Database:
except:
print "Error during analyze:", str(sys.exc_value)
elif self.backend == self.PGSQL:
- self.connection.set_isolation_level(0) # allow vacuum to work
+ self.connection.set_isolation_level(0) # allow analyze to work
try:
self.get_cursor().execute(self.sql.query['analyze'])
except:
@@ -1302,6 +1417,25 @@ class Database:
print "Analyze took %.1f seconds" % (atime,)
#end def analyzeDB
+ def vacuumDB(self):
+ """Do whatever the DB can offer to update index/table statistics"""
+ stime = time()
+ if self.backend == self.MYSQL_INNODB:
+ try:
+ self.get_cursor().execute(self.sql.query['vacuum'])
+ except:
+ print "Error during vacuum:", str(sys.exc_value)
+ elif self.backend == self.PGSQL:
+ self.connection.set_isolation_level(0) # allow vacuum to work
+ try:
+ self.get_cursor().execute(self.sql.query['vacuum'])
+ except:
+ print "Error during vacuum:", str(sys.exc_value)
+ self.connection.set_isolation_level(1) # go back to normal isolation level
+ self.commit()
+ atime = time() - stime
+ print "Vacuum took %.1f seconds" % (atime,)
+ #end def analyzeDB
# Start of Hand Writing routines. Idea is to provide a mixture of routines to store Hand data
# however the calling prog requires. Main aims:
@@ -1383,7 +1517,7 @@ class Database:
q = q.replace('%s', self.sql.query['placeholder'])
- c = self.connection.cursor()
+ c = self.get_cursor()
c.execute(q, (
p['tableName'],
@@ -1569,7 +1703,7 @@ class Database:
#print "DEBUG: inserts: %s" %inserts
#print "DEBUG: q: %s" % q
- c = self.connection.cursor()
+ c = self.get_cursor()
c.executemany(q, inserts)
def storeHudCacheNew(self, gid, pid, hc):
diff --git a/pyfpdb/GuiAutoImport.py b/pyfpdb/GuiAutoImport.py
index c154d4c2..517293ec 100755
--- a/pyfpdb/GuiAutoImport.py
+++ b/pyfpdb/GuiAutoImport.py
@@ -71,22 +71,24 @@ class GuiAutoImport (threading.Thread):
self.intervalLabel = gtk.Label("Time between imports in seconds:")
self.intervalLabel.set_alignment(xalign=1.0, yalign=0.5)
- vbox1.pack_start(self.intervalLabel, True, True, 0)
+ vbox1.pack_start(self.intervalLabel, False, True, 0)
hbox = gtk.HBox(False, 0)
- vbox2.pack_start(hbox, True, True, 0)
+ vbox2.pack_start(hbox, False, True, 0)
self.intervalEntry = gtk.Entry()
self.intervalEntry.set_text(str(self.config.get_import_parameters().get("interval")))
hbox.pack_start(self.intervalEntry, False, False, 0)
lbl1 = gtk.Label()
- hbox.pack_start(lbl1, expand=True, fill=True)
+ hbox.pack_start(lbl1, expand=False, fill=True)
lbl = gtk.Label('')
- vbox1.pack_start(lbl, expand=True, fill=True)
+ vbox1.pack_start(lbl, expand=False, fill=True)
lbl = gtk.Label('')
- vbox2.pack_start(lbl, expand=True, fill=True)
+ vbox2.pack_start(lbl, expand=False, fill=True)
self.addSites(vbox1, vbox2)
+ self.textbuffer = gtk.TextBuffer()
+ self.textview = gtk.TextView(self.textbuffer)
hbox = gtk.HBox(False, 0)
self.mainVBox.pack_start(hbox, expand=True, padding=3)
@@ -102,13 +104,27 @@ class GuiAutoImport (threading.Thread):
self.startButton.connect("clicked", self.startClicked, "start clicked")
hbox.pack_start(self.startButton, expand=False, fill=False)
+
lbl2 = gtk.Label()
hbox.pack_start(lbl2, expand=True, fill=False)
hbox = gtk.HBox(False, 0)
hbox.show()
+
self.mainVBox.pack_start(hbox, expand=True, padding=3)
+
+ scrolledwindow = gtk.ScrolledWindow()
+ scrolledwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
+ self.mainVBox.pack_end(scrolledwindow, expand=True)
+ scrolledwindow.add(self.textview)
+
self.mainVBox.show_all()
+ self.addText("AutoImport Ready.")
+
+ def addText(self, text):
+ end_iter = self.textbuffer.get_end_iter()
+ self.textbuffer.insert(end_iter, text)
+ self.textview.scroll_to_mark(self.textbuffer.get_insert(), 0)
#end of GuiAutoImport.__init__
@@ -139,8 +155,9 @@ class GuiAutoImport (threading.Thread):
if self.doAutoImportBool:
self.startButton.set_label(u' I M P O R T I N G ')
self.importer.runUpdated()
- sys.stdout.write(".")
- sys.stdout.flush()
+ self.addText(".")
+ #sys.stdout.write(".")
+ #sys.stdout.flush()
gobject.timeout_add(1000, self.reset_startbutton)
return True
return False
@@ -172,7 +189,7 @@ class GuiAutoImport (threading.Thread):
# - Ideally we want to release the lock if the auto-import is killed by some
# kind of exception - is this possible?
if self.settings['global_lock'].acquire(False): # returns false immediately if lock not acquired
- print "\nGlobal lock taken ..."
+ self.addText("\nGlobal lock taken ... Auto Import Started.\n")
self.doAutoImportBool = True
widget.set_label(u' _Stop Autoimport ')
if self.pipe_to_hud is None:
@@ -190,12 +207,11 @@ class GuiAutoImport (threading.Thread):
universal_newlines=True)
except:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
- print "*** GuiAutoImport Error opening pipe: " + err[2] + "(" + str(err[1]) + "): " + str(sys.exc_info()[1])
+ self.addText( "\n*** GuiAutoImport Error opening pipe: " + err[2] + "(" + str(err[1]) + "): " + str(sys.exc_info()[1]))
else:
for site in self.input_settings:
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
- print " * Add", site, " import directory", str(self.input_settings[site][0])
- print "+Import directory - Site: " + site + " dir: " + str(self.input_settings[site][0])
+ self.addText("\n * Add "+ site+ " import directory "+ str(self.input_settings[site][0]))
self.do_import()
interval = int(self.intervalEntry.get_text())
if self.importtimer != 0:
@@ -203,14 +219,14 @@ class GuiAutoImport (threading.Thread):
self.importtimer = gobject.timeout_add(interval * 1000, self.do_import)
else:
- print "auto-import aborted - global lock not available"
+ self.addText("\nauto-import aborted - global lock not available")
else: # toggled off
gobject.source_remove(self.importtimer)
self.settings['global_lock'].release()
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
- print "Stopping autoimport - global lock released."
+ self.addText("\nStopping autoimport - global lock released.")
if self.pipe_to_hud.poll() is not None:
- print " * Stop Autoimport: HUD already terminated"
+ self.addText("\n * Stop Autoimport: HUD already terminated")
else:
#print >>self.pipe_to_hud.stdin, "\n"
self.pipe_to_hud.communicate('\n') # waits for process to terminate
diff --git a/pyfpdb/HUD_main.py b/pyfpdb/HUD_main.py
index 4dea5ad7..7e2d5fa6 100755
--- a/pyfpdb/HUD_main.py
+++ b/pyfpdb/HUD_main.py
@@ -105,10 +105,7 @@ class HUD_main(object):
def idle_func():
gtk.gdk.threads_enter()
- try: # TODO: seriously need to decrease the scope of this block.. what are we expecting to error?
- # TODO: The purpose of this try/finally block is to make darn sure that threads_leave()
- # TODO: gets called. If there is an exception and threads_leave() doesn't get called we
- # TODO: lock up. REB
+ try:
table.gdkhandle = gtk.gdk.window_foreign_new(table.number)
newlabel = gtk.Label("%s - %s" % (table.site, table_name))
self.vb.add(newlabel)
@@ -122,9 +119,12 @@ class HUD_main(object):
m.update_gui(new_hand_id)
self.hud_dict[table_name].update(new_hand_id, self.config)
self.hud_dict[table_name].reposition_windows()
+ except:
+ print "*** Exception in HUD_main::idle_func() *** "
+ traceback.print_stack()
finally:
gtk.gdk.threads_leave()
- return False
+ return False
self.hud_dict[table_name] = Hud.Hud(self, table, max, poker_game, self.config, self.db_connection)
self.hud_dict[table_name].table_name = table_name
@@ -168,7 +168,7 @@ class HUD_main(object):
pass
finally:
gtk.gdk.threads_leave()
- return False
+ return False
gobject.idle_add(idle_func)
diff --git a/pyfpdb/Hud.py b/pyfpdb/Hud.py
index 8306c364..c9420a8a 100644
--- a/pyfpdb/Hud.py
+++ b/pyfpdb/Hud.py
@@ -482,7 +482,7 @@ class Hud:
try:
# throws "invalid window handle" in WinXP (sometimes?)
s.window.destroy()
- except:
+ except: # TODO: what exception?
pass
self.stat_windows = {}
# also kill any aux windows
diff --git a/pyfpdb/SQL.py b/pyfpdb/SQL.py
index d98d9fbe..e38bc122 100644
--- a/pyfpdb/SQL.py
+++ b/pyfpdb/SQL.py
@@ -461,7 +461,7 @@ class Sql:
totalProfit INT,
comment text,
commentTs DATETIME,
- tourneysPlayersId BIGINT UNSIGNED,
+ tourneysPlayersId BIGINT UNSIGNED, FOREIGN KEY (tourneysPlayersId) REFERENCES TourneysPlayers(id),
tourneyTypeId SMALLINT UNSIGNED NOT NULL DEFAULT 1, FOREIGN KEY (tourneyTypeId) REFERENCES TourneyTypes(id),
wonWhenSeenStreet1 FLOAT,
@@ -551,9 +551,7 @@ class Sql:
street3Raises TINYINT,
street4Raises TINYINT,
- actionString VARCHAR(15),
-
- FOREIGN KEY (tourneysPlayersId) REFERENCES TourneysPlayers(id))
+ actionString VARCHAR(15))
ENGINE=INNODB"""
elif db_server == 'postgresql':
self.query['createHandsPlayersTable'] = """CREATE TABLE HandsPlayers (
@@ -3100,8 +3098,10 @@ class Sql:
analyze table Autorates, GameTypes, Hands, HandsPlayers, HudCache, Players
, Settings, Sites, Tourneys, TourneysPlayers, TourneyTypes
"""
- else: # assume postgres
- self.query['analyze'] = "vacuum analyze"
+ elif db_server == 'postgresql':
+ self.query['analyze'] = "analyze"
+ elif db_server == 'sqlite':
+ self.query['analyze'] = "analyze"
if db_server == 'mysql':
self.query['lockForInsert'] = """
@@ -3109,8 +3109,20 @@ class Sql:
, HudCache write, GameTypes write, Sites write, Tourneys write
, TourneysPlayers write, TourneyTypes write, Autorates write
"""
- else: # assume postgres
+ elif db_server == 'postgresql':
self.query['lockForInsert'] = ""
+ elif db_server == 'sqlite':
+ self.query['lockForInsert'] = ""
+
+ if db_server == 'mysql':
+ self.query['vacuum'] = """optimize table Hands, HandsPlayers, HandsActions, Players
+ , HudCache, GameTypes, Sites, Tourneys
+ , TourneysPlayers, TourneyTypes, Autorates
+ """
+ elif db_server == 'postgresql':
+ self.query['vacuum'] = """ vacuum """
+ elif db_server == 'sqlite':
+ self.query['vacuum'] = """ vacuum """
self.query['getGametypeFL'] = """SELECT id
FROM Gametypes
diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py
index 872c6672..d5f4faec 100755
--- a/pyfpdb/fpdb.py
+++ b/pyfpdb/fpdb.py
@@ -159,10 +159,10 @@ class fpdb:
def add_icon_to_button(self, button):
iconBox = gtk.HBox(False, 0)
image = gtk.Image()
- image.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_MENU)
+ image.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_SMALL_TOOLBAR)
gtk.Button.set_relief(button, gtk.RELIEF_NONE)
settings = gtk.Widget.get_settings(button);
- (w,h) = gtk.icon_size_lookup_for_settings(settings, gtk.ICON_SIZE_MENU);
+ (w,h) = gtk.icon_size_lookup_for_settings(settings, gtk.ICON_SIZE_SMALL_TOOLBAR);
gtk.Widget.set_size_request (button, w + 4, h + 4);
image.show()
iconBox.pack_start(image, True, False, 0)
@@ -357,6 +357,27 @@ class fpdb:
self.release_global_lock()
+ def dia_rebuild_indexes(self, widget, data=None):
+ if self.obtain_global_lock():
+ self.dia_confirm = gtk.MessageDialog(parent=None
+ ,flags=0
+ ,type=gtk.MESSAGE_WARNING
+ ,buttons=(gtk.BUTTONS_YES_NO)
+ ,message_format="Confirm rebuilding database indexes")
+ diastring = "Please confirm that you want to rebuild the database indexes."
+ self.dia_confirm.format_secondary_text(diastring)
+
+ response = self.dia_confirm.run()
+ self.dia_confirm.destroy()
+ if response == gtk.RESPONSE_YES:
+ self.db.rebuild_indexes()
+ self.db.vacuumDB()
+ self.db.analyzeDB()
+ elif response == gtk.RESPONSE_NO:
+ print 'User cancelled rebuilding db indexes'
+
+ self.release_global_lock()
+
def __calendar_dialog(self, widget, entry):
self.dia_confirm.set_modal(False)
d = gtk.Window(gtk.WINDOW_TOPLEVEL)
@@ -451,6 +472,7 @@ class fpdb:
+