merge from carl
This commit is contained in:
commit
eec9c94342
|
@ -75,6 +75,7 @@ class Everleaf(HandHistoryConverter):
|
||||||
self.rexx.setPlayerInfoRegex('Seat (?P<SEAT>[0-9]+): (?P<PNAME>.*) \(\s+(\$ (?P<CASH>[.0-9]+) USD|new player|All-in) \)')
|
self.rexx.setPlayerInfoRegex('Seat (?P<SEAT>[0-9]+): (?P<PNAME>.*) \(\s+(\$ (?P<CASH>[.0-9]+) USD|new player|All-in) \)')
|
||||||
self.rexx.setPostSbRegex('.*\n(?P<PNAME>.*): posts small blind \[\$? (?P<SB>[.0-9]+)')
|
self.rexx.setPostSbRegex('.*\n(?P<PNAME>.*): posts small blind \[\$? (?P<SB>[.0-9]+)')
|
||||||
self.rexx.setPostBbRegex('.*\n(?P<PNAME>.*): posts big blind \[\$? (?P<BB>[.0-9]+)')
|
self.rexx.setPostBbRegex('.*\n(?P<PNAME>.*): posts big blind \[\$? (?P<BB>[.0-9]+)')
|
||||||
|
self.rexx.setPostBothRegex('.*\n(?P<PNAME>.*): posts small \& big blinds \[\$? (?P<SBBB>[.0-9]+)')
|
||||||
# mct : what about posting small & big blinds simultaneously?
|
# mct : what about posting small & big blinds simultaneously?
|
||||||
self.rexx.setHeroCardsRegex('.*\nDealt\sto\s(?P<PNAME>.*)\s\[ (?P<HOLE1>\S\S), (?P<HOLE2>\S\S) \]')
|
self.rexx.setHeroCardsRegex('.*\nDealt\sto\s(?P<PNAME>.*)\s\[ (?P<HOLE1>\S\S), (?P<HOLE2>\S\S) \]')
|
||||||
self.rexx.setActionStepRegex('.*\n(?P<PNAME>.*)(?P<ATYPE>: bets| checks| raises| calls| folds)(\s\[\$ (?P<BET>[.\d]+) USD\])?')
|
self.rexx.setActionStepRegex('.*\n(?P<PNAME>.*)(?P<ATYPE>: bets| checks| raises| calls| folds)(\s\[\$ (?P<BET>[.\d]+) USD\])?')
|
||||||
|
@ -131,7 +132,8 @@ class Everleaf(HandHistoryConverter):
|
||||||
r"(\*\* Dealing Turn \*\* \[ \S\S \](?P<TURN>.+(?=\*\* Dealing River \*\*)|.+))?"
|
r"(\*\* Dealing Turn \*\* \[ \S\S \](?P<TURN>.+(?=\*\* Dealing River \*\*)|.+))?"
|
||||||
r"(\*\* Dealing River \*\* \[ \S\S \](?P<RIVER>.+))?", hand.string,re.DOTALL)
|
r"(\*\* Dealing River \*\* \[ \S\S \](?P<RIVER>.+))?", hand.string,re.DOTALL)
|
||||||
|
|
||||||
hand.streets = m
|
hand.addStreets(m)
|
||||||
|
|
||||||
|
|
||||||
def readCommunityCards(self, hand):
|
def readCommunityCards(self, hand):
|
||||||
# currently regex in wrong place pls fix my brain's fried
|
# currently regex in wrong place pls fix my brain's fried
|
||||||
|
@ -146,15 +148,13 @@ class Everleaf(HandHistoryConverter):
|
||||||
def readBlinds(self, hand):
|
def readBlinds(self, hand):
|
||||||
try:
|
try:
|
||||||
m = self.rexx.small_blind_re.search(hand.string)
|
m = self.rexx.small_blind_re.search(hand.string)
|
||||||
hand.addBlind(m.group('PNAME'), m.group('SB'))
|
hand.addBlind(m.group('PNAME'), 'small blind', m.group('SB'))
|
||||||
#hand.posted = [m.group('PNAME')]
|
except: # no small blind
|
||||||
except:
|
hand.addBlind(None, None, None)
|
||||||
hand.addBlind(None, 0)
|
for a in self.rexx.big_blind_re.finditer(hand.string):
|
||||||
#hand.posted = ["FpdbNBP"]
|
hand.addBlind(a.group('PNAME'), 'big blind', a.group('BB'))
|
||||||
m = self.rexx.big_blind_re.finditer(hand.string)
|
for a in self.rexx.both_blinds_re.finditer(hand.string):
|
||||||
for a in m:
|
hand.addBlind(a.group('PNAME'), 'small & big blinds', a.group('SBBB'))
|
||||||
hand.addBlind(a.group('PNAME'), a.group('BB'))
|
|
||||||
#hand.posted = hand.posted + [a.group('PNAME')]
|
|
||||||
|
|
||||||
def readHeroCards(self, hand):
|
def readHeroCards(self, hand):
|
||||||
m = self.rexx.hero_cards_re.search(hand.string)
|
m = self.rexx.hero_cards_re.search(hand.string)
|
||||||
|
@ -167,7 +167,6 @@ class Everleaf(HandHistoryConverter):
|
||||||
|
|
||||||
def readAction(self, hand, street):
|
def readAction(self, hand, street):
|
||||||
m = self.rexx.action_re.finditer(hand.streets.group(street))
|
m = self.rexx.action_re.finditer(hand.streets.group(street))
|
||||||
hand.actions[street] = []
|
|
||||||
for action in m:
|
for action in m:
|
||||||
if action.group('ATYPE') == ' raises':
|
if action.group('ATYPE') == ' raises':
|
||||||
hand.addRaiseTo( street, action.group('PNAME'), action.group('BET') )
|
hand.addRaiseTo( street, action.group('PNAME'), action.group('BET') )
|
||||||
|
@ -182,6 +181,7 @@ class Everleaf(HandHistoryConverter):
|
||||||
else:
|
else:
|
||||||
print "DEBUG: unimplemented readAction: %s %s" %(action.group('PNAME'),action.group('ATYPE'),)
|
print "DEBUG: unimplemented readAction: %s %s" %(action.group('PNAME'),action.group('ATYPE'),)
|
||||||
#hand.actions[street] += [[action.group('PNAME'), action.group('ATYPE')]]
|
#hand.actions[street] += [[action.group('PNAME'), action.group('ATYPE')]]
|
||||||
|
# TODO: Everleaf does not record uncalled bets.
|
||||||
|
|
||||||
|
|
||||||
def readShowdownActions(self, hand):
|
def readShowdownActions(self, hand):
|
||||||
|
@ -193,18 +193,13 @@ class Everleaf(HandHistoryConverter):
|
||||||
hand.addShownCards(cards, shows.group('PNAME'))
|
hand.addShownCards(cards, shows.group('PNAME'))
|
||||||
|
|
||||||
def readCollectPot(self,hand):
|
def readCollectPot(self,hand):
|
||||||
m = self.rexx.collect_pot_re.search(hand.string)
|
for m in self.rexx.collect_pot_re.finditer(hand.string):
|
||||||
if m is not None:
|
|
||||||
if m.group('HAND') is not None:
|
if m.group('HAND') is not None:
|
||||||
re_card = re.compile('(?P<CARD>[0-9tjqka][schd])') # copied from earlier
|
re_card = re.compile('(?P<CARD>[0-9tjqka][schd])') # copied from earlier
|
||||||
cards = set([hand.card(card.group('CARD')) for card in re_card.finditer(m.group('HAND'))])
|
cards = set([hand.card(card.group('CARD')) for card in re_card.finditer(m.group('HAND'))])
|
||||||
hand.addShownCards(cards=None, player=m.group('PNAME'), holeandboard=cards)
|
hand.addShownCards(cards=None, player=m.group('PNAME'), holeandboard=cards)
|
||||||
hand.addCollectPot(player=m.group('PNAME'),pot=m.group('POT'))
|
hand.addCollectPot(player=m.group('PNAME'),pot=m.group('POT'))
|
||||||
else:
|
|
||||||
print "WARNING: Unusual, no one collected; can happen if it's folded to big blind with a dead small blind."
|
|
||||||
|
|
||||||
def getRake(self, hand):
|
|
||||||
hand.rake = hand.totalpot * Decimal('0.05') # probably not quite right
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
c = Configuration.Config()
|
c = Configuration.Config()
|
||||||
|
|
|
@ -694,7 +694,7 @@ class FpdbSQLQueries:
|
||||||
AS BBlPer100
|
AS BBlPer100
|
||||||
,hprof2.profitperhand AS Profitperhand
|
,hprof2.profitperhand AS Profitperhand
|
||||||
*/
|
*/
|
||||||
,hprof2.variance as Variance
|
,format(hprof2.variance,2) AS Variance
|
||||||
FROM
|
FROM
|
||||||
(select /* stats from hudcache */
|
(select /* stats from hudcache */
|
||||||
gt.base
|
gt.base
|
||||||
|
@ -793,7 +793,7 @@ class FpdbSQLQueries:
|
||||||
AS BBper100
|
AS BBper100
|
||||||
,hprof2.profitperhand AS Profitperhand
|
,hprof2.profitperhand AS Profitperhand
|
||||||
*/
|
*/
|
||||||
,hprof2.variance as Variance
|
,round(hprof2.variance,2) AS Variance
|
||||||
FROM
|
FROM
|
||||||
(select gt.base
|
(select gt.base
|
||||||
,gt.category
|
,gt.category
|
||||||
|
@ -825,10 +825,10 @@ class FpdbSQLQueries:
|
||||||
else to_char(100.0*(sum(street1Aggr)+sum(street2Aggr)+sum(street3Aggr))
|
else to_char(100.0*(sum(street1Aggr)+sum(street2Aggr)+sum(street3Aggr))
|
||||||
/(sum(street1Seen)+sum(street2Seen)+sum(street3Seen)),'90D0')
|
/(sum(street1Seen)+sum(street2Seen)+sum(street3Seen)),'90D0')
|
||||||
end AS PoFAFq
|
end AS PoFAFq
|
||||||
,to_char(sum(totalProfit)/100.0,'9G999G990D00') AS Net
|
,round(sum(totalProfit)/100.0,2) AS Net
|
||||||
,to_char((sum(totalProfit)/(gt.bigBlind+0.0)) / (sum(HDs)/100.0), '990D00')
|
,to_char((sum(totalProfit)/(gt.bigBlind+0.0)) / (sum(HDs)/100.0), '990D00')
|
||||||
AS BBper100
|
AS BBper100
|
||||||
,to_char(sum(totalProfit) / (sum(HDs)+0.0), '990D0000') AS Profitperhand
|
,to_char(sum(totalProfit/100.0) / (sum(HDs)+0.0), '990D0000') AS Profitperhand
|
||||||
from Gametypes gt
|
from Gametypes gt
|
||||||
inner join Sites s on s.Id = gt.siteId
|
inner join Sites s on s.Id = gt.siteId
|
||||||
inner join HudCache hc on hc.gameTypeId = gt.Id
|
inner join HudCache hc on hc.gameTypeId = gt.Id
|
||||||
|
|
|
@ -29,166 +29,176 @@ import fpdb_import
|
||||||
|
|
||||||
|
|
||||||
class GuiAutoImport (threading.Thread):
|
class GuiAutoImport (threading.Thread):
|
||||||
def __init__(self, settings, config):
|
def __init__(self, settings, config):
|
||||||
"""Constructor for GuiAutoImport"""
|
"""Constructor for GuiAutoImport"""
|
||||||
self.settings=settings
|
self.settings=settings
|
||||||
self.config=config
|
self.config=config
|
||||||
|
|
||||||
imp = self.config.get_import_parameters()
|
imp = self.config.get_import_parameters()
|
||||||
|
|
||||||
print "Import parameters"
|
print "Import parameters"
|
||||||
print imp
|
print imp
|
||||||
|
|
||||||
self.input_settings = {}
|
self.input_settings = {}
|
||||||
|
self.pipe_to_hud = None
|
||||||
|
|
||||||
self.importer = fpdb_import.Importer(self,self.settings, self.config)
|
self.importer = fpdb_import.Importer(self,self.settings, self.config)
|
||||||
self.importer.setCallHud(True)
|
self.importer.setCallHud(True)
|
||||||
self.importer.setMinPrint(30)
|
self.importer.setMinPrint(30)
|
||||||
self.importer.setQuiet(False)
|
self.importer.setQuiet(False)
|
||||||
self.importer.setFailOnError(False)
|
self.importer.setFailOnError(False)
|
||||||
self.importer.setHandCount(0)
|
self.importer.setHandCount(0)
|
||||||
# self.importer.setWatchTime()
|
# self.importer.setWatchTime()
|
||||||
|
|
||||||
self.server=settings['db-host']
|
self.server=settings['db-host']
|
||||||
self.user=settings['db-user']
|
self.user=settings['db-user']
|
||||||
self.password=settings['db-password']
|
self.password=settings['db-password']
|
||||||
self.database=settings['db-databaseName']
|
self.database=settings['db-databaseName']
|
||||||
|
|
||||||
self.mainVBox=gtk.VBox(False,1)
|
self.mainVBox=gtk.VBox(False,1)
|
||||||
self.mainVBox.show()
|
self.mainVBox.show()
|
||||||
|
|
||||||
self.settingsHBox = gtk.HBox(False, 0)
|
self.settingsHBox = gtk.HBox(False, 0)
|
||||||
self.mainVBox.pack_start(self.settingsHBox, False, True, 0)
|
self.mainVBox.pack_start(self.settingsHBox, False, True, 0)
|
||||||
self.settingsHBox.show()
|
self.settingsHBox.show()
|
||||||
|
|
||||||
self.intervalLabel = gtk.Label("Interval (ie. break) between imports in seconds:")
|
self.intervalLabel = gtk.Label("Interval (ie. break) between imports in seconds:")
|
||||||
self.settingsHBox.pack_start(self.intervalLabel)
|
self.settingsHBox.pack_start(self.intervalLabel)
|
||||||
self.intervalLabel.show()
|
self.intervalLabel.show()
|
||||||
|
|
||||||
self.intervalEntry=gtk.Entry()
|
self.intervalEntry=gtk.Entry()
|
||||||
self.intervalEntry.set_text(str(self.config.get_import_parameters().get("interval")))
|
self.intervalEntry.set_text(str(self.config.get_import_parameters().get("interval")))
|
||||||
self.settingsHBox.pack_start(self.intervalEntry)
|
self.settingsHBox.pack_start(self.intervalEntry)
|
||||||
self.intervalEntry.show()
|
self.intervalEntry.show()
|
||||||
|
|
||||||
self.addSites(self.mainVBox)
|
self.addSites(self.mainVBox)
|
||||||
|
|
||||||
self.startButton=gtk.Button("Start Autoimport")
|
self.doAutoImportBool = False
|
||||||
self.startButton.connect("clicked", self.startClicked, "start clicked")
|
self.startButton=gtk.ToggleButton("Start Autoimport")
|
||||||
self.mainVBox.add(self.startButton)
|
self.startButton.connect("clicked", self.startClicked, "start clicked")
|
||||||
self.startButton.show()
|
self.mainVBox.add(self.startButton)
|
||||||
|
self.startButton.show()
|
||||||
|
|
||||||
|
|
||||||
#end of GuiAutoImport.__init__
|
#end of GuiAutoImport.__init__
|
||||||
def browseClicked(self, widget, data):
|
def browseClicked(self, widget, data):
|
||||||
"""runs when user clicks one of the browse buttons in the auto import tab"""
|
"""runs when user clicks one of the browse buttons in the auto import tab"""
|
||||||
current_path=data[1].get_text()
|
current_path=data[1].get_text()
|
||||||
|
|
||||||
dia_chooser = gtk.FileChooserDialog(title="Please choose the path that you want to auto import",
|
dia_chooser = gtk.FileChooserDialog(title="Please choose the path that you want to auto import",
|
||||||
action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
|
action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER,
|
||||||
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
|
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
|
||||||
#dia_chooser.set_current_folder(pathname)
|
#dia_chooser.set_current_folder(pathname)
|
||||||
dia_chooser.set_filename(current_path)
|
dia_chooser.set_filename(current_path)
|
||||||
#dia_chooser.set_select_multiple(select_multiple) #not in tv, but want this in bulk import
|
#dia_chooser.set_select_multiple(select_multiple) #not in tv, but want this in bulk import
|
||||||
|
|
||||||
response = dia_chooser.run()
|
response = dia_chooser.run()
|
||||||
if response == gtk.RESPONSE_OK:
|
if response == gtk.RESPONSE_OK:
|
||||||
#print dia_chooser.get_filename(), 'selected'
|
#print dia_chooser.get_filename(), 'selected'
|
||||||
data[1].set_text(dia_chooser.get_filename())
|
data[1].set_text(dia_chooser.get_filename())
|
||||||
self.input_settings[data[0]][0] = dia_chooser.get_filename()
|
self.input_settings[data[0]][0] = dia_chooser.get_filename()
|
||||||
elif response == gtk.RESPONSE_CANCEL:
|
elif response == gtk.RESPONSE_CANCEL:
|
||||||
print 'Closed, no files selected'
|
print 'Closed, no files selected'
|
||||||
dia_chooser.destroy()
|
dia_chooser.destroy()
|
||||||
#end def GuiAutoImport.browseClicked
|
#end def GuiAutoImport.browseClicked
|
||||||
|
|
||||||
def do_import(self):
|
def do_import(self):
|
||||||
"""Callback for timer to do an import iteration."""
|
"""Callback for timer to do an import iteration."""
|
||||||
self.importer.runUpdated()
|
self.importer.runUpdated()
|
||||||
print "GuiAutoImport.import_dir done"
|
print "GuiAutoImport.import_dir done"
|
||||||
return True
|
return self.doAutoImportBool
|
||||||
|
|
||||||
def startClicked(self, widget, data):
|
|
||||||
"""runs when user clicks start on auto import tab"""
|
|
||||||
|
|
||||||
# Check to see if we have an open file handle to the HUD and open one if we do not.
|
def startClicked(self, widget, data):
|
||||||
# bufsize = 1 means unbuffered
|
"""runs when user clicks start on auto import tab"""
|
||||||
# We need to close this file handle sometime.
|
|
||||||
|
|
||||||
# TODO: Allow for importing from multiple dirs - REB 29AUG2008
|
# Check to see if we have an open file handle to the HUD and open one if we do not.
|
||||||
# As presently written this function does nothing if there is already a pipe open.
|
# bufsize = 1 means unbuffered
|
||||||
# That is not correct. It should open another dir for importing while piping the
|
# We need to close this file handle sometime.
|
||||||
# results to the same pipe. This means that self.path should be a a list of dirs
|
|
||||||
# to watch.
|
|
||||||
try: #uhhh, I don't this this is the best way to check for the existence of an attr
|
|
||||||
getattr(self, "pipe_to_hud")
|
|
||||||
except AttributeError:
|
|
||||||
if os.name == 'nt':
|
|
||||||
command = "python HUD_main.py" + " %s" % (self.database)
|
|
||||||
bs = 0 # windows is not happy with line buffing here
|
|
||||||
self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE,
|
|
||||||
universal_newlines=True)
|
|
||||||
else:
|
|
||||||
command = self.config.execution_path('HUD_main.py')
|
|
||||||
bs = 1
|
|
||||||
self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE,
|
|
||||||
universal_newlines=True)
|
|
||||||
# self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE,
|
|
||||||
# universal_newlines=True)
|
|
||||||
# command = command + " %s" % (self.database)
|
|
||||||
# print "command = ", command
|
|
||||||
# self.pipe_to_hud = os.popen(command, 'w')
|
|
||||||
|
|
||||||
# Add directories to importer object.
|
# TODO: Allow for importing from multiple dirs - REB 29AUG2008
|
||||||
for site in self.input_settings:
|
# As presently written this function does nothing if there is already a pipe open.
|
||||||
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
|
# That is not correct. It should open another dir for importing while piping the
|
||||||
print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0])
|
# results to the same pipe. This means that self.path should be a a list of dirs
|
||||||
self.do_import()
|
# to watch.
|
||||||
|
if widget.get_active(): # toggled on
|
||||||
interval=int(self.intervalEntry.get_text())
|
self.doAutoImportBool = True
|
||||||
gobject.timeout_add(interval*1000, self.do_import)
|
widget.set_label(u'Stop Autoimport')
|
||||||
#end def GuiAutoImport.startClicked
|
if self.pipe_to_hud is None:
|
||||||
|
if os.name == 'nt':
|
||||||
|
command = "python HUD_main.py" + " %s" % (self.database)
|
||||||
|
bs = 0 # windows is not happy with line buffing here
|
||||||
|
self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE,
|
||||||
|
universal_newlines=True)
|
||||||
|
else:
|
||||||
|
command = self.config.execution_path('HUD_main.py')
|
||||||
|
bs = 1
|
||||||
|
self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE,
|
||||||
|
universal_newlines=True)
|
||||||
|
# self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE,
|
||||||
|
# universal_newlines=True)
|
||||||
|
# command = command + " %s" % (self.database)
|
||||||
|
# print "command = ", command
|
||||||
|
# self.pipe_to_hud = os.popen(command, 'w')
|
||||||
|
|
||||||
def get_vbox(self):
|
# Add directories to importer object.
|
||||||
"""returns the vbox of this thread"""
|
for site in self.input_settings:
|
||||||
return self.mainVBox
|
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
|
||||||
#end def get_vbox
|
print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0])
|
||||||
|
self.do_import()
|
||||||
|
|
||||||
#Create the site line given required info and setup callbacks
|
interval=int(self.intervalEntry.get_text())
|
||||||
#enabling and disabling sites from this interface not possible
|
gobject.timeout_add(interval*1000, self.do_import)
|
||||||
#expects a box to layout the line horizontally
|
else: # toggled off
|
||||||
def createSiteLine(self, hbox, site, iconpath, hhpath, filter_name, active = True):
|
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
|
||||||
label = gtk.Label(site + " auto-import:")
|
#TODO: other clean up, such as killing HUD
|
||||||
hbox.pack_start(label, False, False, 0)
|
print "Stopping autoimport"
|
||||||
label.show()
|
self.pipe_to_hud.communicate('\n') # waits for process to terminate
|
||||||
|
self.pipe_to_hud = None
|
||||||
|
widget.set_label(u'Start Autoimport')
|
||||||
|
#end def GuiAutoImport.startClicked
|
||||||
|
|
||||||
dirPath=gtk.Entry()
|
def get_vbox(self):
|
||||||
dirPath.set_text(hhpath)
|
"""returns the vbox of this thread"""
|
||||||
hbox.pack_start(dirPath, False, True, 0)
|
return self.mainVBox
|
||||||
dirPath.show()
|
#end def get_vbox
|
||||||
|
|
||||||
browseButton=gtk.Button("Browse...")
|
#Create the site line given required info and setup callbacks
|
||||||
browseButton.connect("clicked", self.browseClicked, [site] + [dirPath])
|
#enabling and disabling sites from this interface not possible
|
||||||
hbox.pack_start(browseButton, False, False, 0)
|
#expects a box to layout the line horizontally
|
||||||
browseButton.show()
|
def createSiteLine(self, hbox, site, iconpath, hhpath, filter_name, active = True):
|
||||||
|
label = gtk.Label(site + " auto-import:")
|
||||||
|
hbox.pack_start(label, False, False, 0)
|
||||||
|
label.show()
|
||||||
|
|
||||||
label = gtk.Label(site + " filter:")
|
dirPath=gtk.Entry()
|
||||||
hbox.pack_start(label, False, False, 0)
|
dirPath.set_text(hhpath)
|
||||||
label.show()
|
hbox.pack_start(dirPath, False, True, 0)
|
||||||
|
dirPath.show()
|
||||||
|
|
||||||
filter=gtk.Entry()
|
browseButton=gtk.Button("Browse...")
|
||||||
filter.set_text(filter_name)
|
browseButton.connect("clicked", self.browseClicked, [site] + [dirPath])
|
||||||
hbox.pack_start(filter, False, True, 0)
|
hbox.pack_start(browseButton, False, False, 0)
|
||||||
filter.show()
|
browseButton.show()
|
||||||
|
|
||||||
def addSites(self, vbox):
|
label = gtk.Label(site + " filter:")
|
||||||
for site in self.config.supported_sites.keys():
|
hbox.pack_start(label, False, False, 0)
|
||||||
pathHBox = gtk.HBox(False, 0)
|
label.show()
|
||||||
vbox.pack_start(pathHBox, False, True, 0)
|
|
||||||
pathHBox.show()
|
|
||||||
|
|
||||||
paths = self.config.get_default_paths(site)
|
filter=gtk.Entry()
|
||||||
params = self.config.get_site_parameters(site)
|
filter.set_text(filter_name)
|
||||||
self.createSiteLine(pathHBox, site, False, paths['hud-defaultPath'], params['converter'], params['enabled'])
|
hbox.pack_start(filter, False, True, 0)
|
||||||
self.input_settings[site] = [paths['hud-defaultPath']] + [params['converter']]
|
filter.show()
|
||||||
|
|
||||||
|
def addSites(self, vbox):
|
||||||
|
for site in self.config.supported_sites.keys():
|
||||||
|
pathHBox = gtk.HBox(False, 0)
|
||||||
|
vbox.pack_start(pathHBox, False, True, 0)
|
||||||
|
pathHBox.show()
|
||||||
|
|
||||||
|
paths = self.config.get_default_paths(site)
|
||||||
|
params = self.config.get_site_parameters(site)
|
||||||
|
self.createSiteLine(pathHBox, site, False, paths['hud-defaultPath'], params['converter'], params['enabled'])
|
||||||
|
self.input_settings[site] = [paths['hud-defaultPath']] + [params['converter']]
|
||||||
|
|
||||||
if __name__== "__main__":
|
if __name__== "__main__":
|
||||||
def destroy(*args): # call back for terminating the main eventloop
|
def destroy(*args): # call back for terminating the main eventloop
|
||||||
|
|
|
@ -60,7 +60,7 @@ class GuiPlayerStats (threading.Thread):
|
||||||
vbox.add(self.stats_table)
|
vbox.add(self.stats_table)
|
||||||
|
|
||||||
# Create header row
|
# Create header row
|
||||||
titles = ("Game", "Hands", "VPIP", "PFR", "saw_f", "sawsd", "wtsdwsf", "wmsd", "FlAFq", "TuAFq", "RvAFq", "PFAFq", "Net($)", "BB/100", "$/hand", "Variance")
|
titles = ("Game", "Hands", "VPIP", "PFR", "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq", "PoFAFq", "Net($)", "BB/100", "$/hand", "Variance")
|
||||||
|
|
||||||
col = 0
|
col = 0
|
||||||
row = 0
|
row = 0
|
||||||
|
@ -71,14 +71,17 @@ class GuiPlayerStats (threading.Thread):
|
||||||
col +=1
|
col +=1
|
||||||
|
|
||||||
for row in range(rows-1):
|
for row in range(rows-1):
|
||||||
|
if(row%2 == 0):
|
||||||
|
bgcolor = "white"
|
||||||
|
else:
|
||||||
|
bgcolor = "lightgrey"
|
||||||
for col in range(cols):
|
for col in range(cols):
|
||||||
if(row%2 == 0):
|
|
||||||
bgcolor = "white"
|
|
||||||
else:
|
|
||||||
bgcolor = "lightgrey"
|
|
||||||
eb = gtk.EventBox()
|
eb = gtk.EventBox()
|
||||||
eb.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(bgcolor))
|
eb.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(bgcolor))
|
||||||
l = gtk.Label(result[row][col])
|
if result[row][col]:
|
||||||
|
l = gtk.Label(result[row][col])
|
||||||
|
else:
|
||||||
|
l = gtk.Label(' ')
|
||||||
if col == 0:
|
if col == 0:
|
||||||
l.set_alignment(xalign=0.0, yalign=0.5)
|
l.set_alignment(xalign=0.0, yalign=0.5)
|
||||||
else:
|
else:
|
||||||
|
@ -127,7 +130,6 @@ class GuiPlayerStats (threading.Thread):
|
||||||
|
|
||||||
def __set_hero_name(self, w, site):
|
def __set_hero_name(self, w, site):
|
||||||
self.heroes[site] = w.get_text()
|
self.heroes[site] = w.get_text()
|
||||||
print "DEBUG: settings heroes[%s]: %s"%(site, self.heroes[site])
|
|
||||||
|
|
||||||
def __init__(self, db, config, querylist, debug=True):
|
def __init__(self, db, config, querylist, debug=True):
|
||||||
self.debug=debug
|
self.debug=debug
|
||||||
|
|
|
@ -87,6 +87,8 @@ class Hand:
|
||||||
|
|
||||||
self.action = []
|
self.action = []
|
||||||
self.totalpot = None
|
self.totalpot = None
|
||||||
|
self.totalcollected = None
|
||||||
|
|
||||||
self.rake = None
|
self.rake = None
|
||||||
|
|
||||||
self.bets = {}
|
self.bets = {}
|
||||||
|
@ -109,6 +111,17 @@ If a player has None chips he won't be added."""
|
||||||
self.bets[street][name] = []
|
self.bets[street][name] = []
|
||||||
|
|
||||||
|
|
||||||
|
def addStreets(self, match):
|
||||||
|
# go through m and initialise actions to empty list for each street.
|
||||||
|
if match is not None:
|
||||||
|
self.streets = match
|
||||||
|
for street in match.groupdict():
|
||||||
|
if match.group(street) is not None:
|
||||||
|
self.actions[street] = []
|
||||||
|
|
||||||
|
else:
|
||||||
|
print "empty markStreets match" # better to raise exception and put process hand in a try block
|
||||||
|
|
||||||
def addHoleCards(self, cards, player):
|
def addHoleCards(self, cards, player):
|
||||||
"""\
|
"""\
|
||||||
Assigns observed holecards to a player.
|
Assigns observed holecards to a player.
|
||||||
|
@ -161,11 +174,16 @@ For when a player shows cards for any reason (for showdown or out of choice).
|
||||||
c = c.replace(k,v)
|
c = c.replace(k,v)
|
||||||
return c
|
return c
|
||||||
|
|
||||||
def addBlind(self, player, amount):
|
def addBlind(self, player, blindtype, amount):
|
||||||
# if player is None, it's a missing small blind.
|
# if player is None, it's a missing small blind.
|
||||||
if player is not None:
|
if player is not None:
|
||||||
self.bets['PREFLOP'][player].append(Decimal(amount))
|
self.bets['PREFLOP'][player].append(Decimal(amount))
|
||||||
self.lastBet['PREFLOP'] = Decimal(amount)
|
self.actions['PREFLOP'] += [(player, 'posts', blindtype, amount)]
|
||||||
|
if blindtype == 'big blind':
|
||||||
|
self.lastBet['PREFLOP'] = Decimal(amount)
|
||||||
|
elif blindtype == 'small & big blinds':
|
||||||
|
# extra small blind is 'dead'
|
||||||
|
self.lastBet['PREFLOP'] = Decimal(self.bb)
|
||||||
self.posted += [player]
|
self.posted += [player]
|
||||||
|
|
||||||
|
|
||||||
|
@ -175,7 +193,7 @@ For when a player shows cards for any reason (for showdown or out of choice).
|
||||||
if amount is not None:
|
if amount is not None:
|
||||||
self.bets[street][player].append(Decimal(amount))
|
self.bets[street][player].append(Decimal(amount))
|
||||||
#self.lastBet[street] = Decimal(amount)
|
#self.lastBet[street] = Decimal(amount)
|
||||||
self.actions[street] += [[player, 'calls', amount]]
|
self.actions[street] += [(player, 'calls', amount)]
|
||||||
|
|
||||||
def addRaiseTo(self, street, player, amountTo):
|
def addRaiseTo(self, street, player, amountTo):
|
||||||
"""\
|
"""\
|
||||||
|
@ -192,21 +210,22 @@ Add a raise on [street] by [player] to [amountTo]
|
||||||
self.lastBet[street] = Decimal(amountTo)
|
self.lastBet[street] = Decimal(amountTo)
|
||||||
amountBy = Decimal(amountTo) - amountToCall
|
amountBy = Decimal(amountTo) - amountToCall
|
||||||
self.bets[street][player].append(amountBy+amountToCall)
|
self.bets[street][player].append(amountBy+amountToCall)
|
||||||
self.actions[street] += [[player, 'raises', amountBy, amountTo]]
|
self.actions[street] += [(player, 'raises', amountBy, amountTo, amountToCall)]
|
||||||
|
|
||||||
def addBet(self, street, player, amount):
|
def addBet(self, street, player, amount):
|
||||||
self.checkPlayerExists(player)
|
self.checkPlayerExists(player)
|
||||||
self.bets[street][player].append(Decimal(amount))
|
self.bets[street][player].append(Decimal(amount))
|
||||||
self.actions[street] += [[player, 'bets', amount]]
|
self.actions[street] += [(player, 'bets', amount)]
|
||||||
|
self.lastBet[street] = Decimal(amount)
|
||||||
|
|
||||||
def addFold(self, street, player):
|
def addFold(self, street, player):
|
||||||
self.checkPlayerExists(player)
|
self.checkPlayerExists(player)
|
||||||
self.folded.add(player)
|
self.folded.add(player)
|
||||||
self.actions[street] += [[player, 'folds']]
|
self.actions[street] += [(player, 'folds')]
|
||||||
|
|
||||||
def addCheck(self, street, player):
|
def addCheck(self, street, player):
|
||||||
self.checkPlayerExists(player)
|
self.checkPlayerExists(player)
|
||||||
self.actions[street] += [[player, 'checks']]
|
self.actions[street] += [(player, 'checks')]
|
||||||
|
|
||||||
def addCollectPot(self,player, pot):
|
def addCollectPot(self,player, pot):
|
||||||
self.checkPlayerExists(player)
|
self.checkPlayerExists(player)
|
||||||
|
@ -231,6 +250,51 @@ Known bug: doesn't take into account side pots"""
|
||||||
#print street, self.bets[street][player]
|
#print street, self.bets[street][player]
|
||||||
self.totalpot += reduce(operator.add, self.bets[street][player], 0)
|
self.totalpot += reduce(operator.add, self.bets[street][player], 0)
|
||||||
|
|
||||||
|
print "conventional totalpot:", self.totalpot
|
||||||
|
self.totalpot = 0
|
||||||
|
|
||||||
|
print self.actions
|
||||||
|
for street in self.actions:
|
||||||
|
uncalled = 0
|
||||||
|
calls = [0]
|
||||||
|
for act in self.actions[street]:
|
||||||
|
if act[1] == 'bets': # [name, 'bets', amount]
|
||||||
|
self.totalpot += Decimal(act[2])
|
||||||
|
uncalled = Decimal(act[2]) # only the last bet or raise can be uncalled
|
||||||
|
calls = [0]
|
||||||
|
print "uncalled: ", uncalled
|
||||||
|
elif act[1] == 'raises': # [name, 'raises', amountby, amountto, amountcalled]
|
||||||
|
print "calls %s and raises %s to %s" % (act[4],act[2],act[3])
|
||||||
|
self.totalpot += Decimal(act[2]) + Decimal(act[4])
|
||||||
|
calls = [0]
|
||||||
|
uncalled = Decimal(act[2])
|
||||||
|
print "uncalled: ", uncalled
|
||||||
|
elif act[1] == 'calls': # [name, 'calls', amount]
|
||||||
|
self.totalpot += Decimal(act[2])
|
||||||
|
calls = calls + [Decimal(act[2])]
|
||||||
|
print "calls:", calls
|
||||||
|
if act[1] == ('posts'):
|
||||||
|
self.totalpot += Decimal(act[3])
|
||||||
|
uncalled = Decimal(act[3])
|
||||||
|
if uncalled > 0 and max(calls+[0]) < uncalled:
|
||||||
|
|
||||||
|
print "returning some bet, calls:", calls
|
||||||
|
print "returned: %.2f from %.2f" % ((uncalled - max(calls)), self.totalpot,)
|
||||||
|
self.totalpot -= (uncalled - max(calls))
|
||||||
|
print "new totalpot:", self.totalpot
|
||||||
|
|
||||||
|
if self.totalcollected is None:
|
||||||
|
self.totalcollected = 0;
|
||||||
|
for amount in self.collected.values():
|
||||||
|
self.totalcollected += Decimal(amount)
|
||||||
|
|
||||||
|
# TODO: Some sites (Everleaf) don't record uncalled bets. Figure out if a bet is uncalled and subtract it from self.totalcollected.
|
||||||
|
# remember that portions of bets may be uncalled, so:
|
||||||
|
# bet followed by no call is an uncalled bet
|
||||||
|
# bet x followed by call y where y < x has x-y uncalled (and second player all in)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def getGameTypeAsString(self):
|
def getGameTypeAsString(self):
|
||||||
"""\
|
"""\
|
||||||
Map the tuple self.gametype onto the pokerstars string describing it
|
Map the tuple self.gametype onto the pokerstars string describing it
|
||||||
|
@ -309,7 +373,8 @@ Map the tuple self.gametype onto the pokerstars string describing it
|
||||||
print "what do they show"
|
print "what do they show"
|
||||||
|
|
||||||
print "*** SUMMARY ***"
|
print "*** SUMMARY ***"
|
||||||
print "Total pot $%s | Rake $%.2f)" % (self.totalpot, self.rake) # TODO side pots
|
print "Total pot $%s | Rake $%.2f" % (self.totalcollected, self.rake) # TODO: side pots
|
||||||
|
|
||||||
board = []
|
board = []
|
||||||
for s in self.board.values():
|
for s in self.board.values():
|
||||||
board += s
|
board += s
|
||||||
|
|
|
@ -161,7 +161,8 @@ class HandHistoryConverter:
|
||||||
def readPlayerStacks(self, hand): abstract
|
def readPlayerStacks(self, hand): abstract
|
||||||
|
|
||||||
# Needs to return a MatchObject with group names identifying the streets into the Hand object
|
# Needs to return a MatchObject with group names identifying the streets into the Hand object
|
||||||
# that is, pulls the chunks of preflop, flop, turn and river text into hand.streets MatchObject.
|
# so groups are called by street names 'PREFLOP', 'FLOP', 'STREET2' etc
|
||||||
|
# blinds are done seperately
|
||||||
def markStreets(self, hand): abstract
|
def markStreets(self, hand): abstract
|
||||||
|
|
||||||
#Needs to return a list in the format
|
#Needs to return a list in the format
|
||||||
|
@ -173,11 +174,13 @@ class HandHistoryConverter:
|
||||||
def readCollectPot(self, hand): abstract
|
def readCollectPot(self, hand): abstract
|
||||||
|
|
||||||
# Some sites don't report the rake. This will be called at the end of the hand after the pot total has been calculated
|
# Some sites don't report the rake. This will be called at the end of the hand after the pot total has been calculated
|
||||||
# so that an inheriting class can calculate it for the specific site if need be.
|
# an inheriting class can calculate it for the specific site if need be.
|
||||||
def getRake(self, hand): abstract
|
def getRake(self, hand):
|
||||||
|
hand.rake = hand.totalpot - hand.totalcollected # * Decimal('0.05') # probably not quite right
|
||||||
|
|
||||||
|
|
||||||
def sanityCheck(self):
|
def sanityCheck(self):
|
||||||
sane = True
|
sane = False
|
||||||
base_w = False
|
base_w = False
|
||||||
#Check if hhbase exists and is writable
|
#Check if hhbase exists and is writable
|
||||||
#Note: Will not try to create the base HH directory
|
#Note: Will not try to create the base HH directory
|
||||||
|
|
|
@ -108,9 +108,9 @@ class fpdb_db:
|
||||||
self.cursor.execute(self.sql.query['createHandsPlayersTable'])
|
self.cursor.execute(self.sql.query['createHandsPlayersTable'])
|
||||||
self.cursor.execute(self.sql.query['createHandsActionsTable'])
|
self.cursor.execute(self.sql.query['createHandsActionsTable'])
|
||||||
self.cursor.execute(self.sql.query['createHudCacheTable'])
|
self.cursor.execute(self.sql.query['createHudCacheTable'])
|
||||||
self.cursor.execute(self.sql.query['addTourneyIndex'])
|
#self.cursor.execute(self.sql.query['addTourneyIndex'])
|
||||||
self.cursor.execute(self.sql.query['addHandsIndex'])
|
#self.cursor.execute(self.sql.query['addHandsIndex'])
|
||||||
self.cursor.execute(self.sql.query['addPlayersIndex'])
|
#self.cursor.execute(self.sql.query['addPlayersIndex'])
|
||||||
self.fillDefaultData()
|
self.fillDefaultData()
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
#end def disconnect
|
#end def disconnect
|
||||||
|
@ -185,6 +185,7 @@ class fpdb_db:
|
||||||
|
|
||||||
self.drop_tables()
|
self.drop_tables()
|
||||||
self.create_tables()
|
self.create_tables()
|
||||||
|
fpdb_simple.createAllIndexes(self)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
print "Finished recreating tables"
|
print "Finished recreating tables"
|
||||||
#end def recreate_tables
|
#end def recreate_tables
|
||||||
|
|
|
@ -116,8 +116,11 @@ class Importer:
|
||||||
|
|
||||||
#Run full import on filelist
|
#Run full import on filelist
|
||||||
def runImport(self):
|
def runImport(self):
|
||||||
|
fpdb_simple.prepareBulkImport(self.fdb)
|
||||||
for file in self.filelist:
|
for file in self.filelist:
|
||||||
self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
|
self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
|
||||||
|
fpdb_simple.afterBulkImport(self.fdb)
|
||||||
|
fpdb_simple.analyzeDB(self.fdb)
|
||||||
|
|
||||||
#Run import on updated files, then store latest update time.
|
#Run import on updated files, then store latest update time.
|
||||||
def runUpdated(self):
|
def runUpdated(self):
|
||||||
|
|
|
@ -22,6 +22,11 @@ from time import time
|
||||||
|
|
||||||
import fpdb_simple
|
import fpdb_simple
|
||||||
|
|
||||||
|
saveActions=True # set this to False to avoid storing action data
|
||||||
|
# Pros: speeds up imports
|
||||||
|
# Cons: no action data is saved, so you need to keep the hand histories
|
||||||
|
# variance not available on stats page
|
||||||
|
|
||||||
#stores a stud/razz hand into the database
|
#stores a stud/razz hand into the database
|
||||||
def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, hand_start_time
|
def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, hand_start_time
|
||||||
,names, player_ids, start_cashes, antes, card_values, card_suits, winnings, rakes
|
,names, player_ids, start_cashes, antes, card_values, card_suits, winnings, rakes
|
||||||
|
@ -39,8 +44,9 @@ def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, ha
|
||||||
|
|
||||||
fpdb_simple.storeHudCache(cursor, base, category, gametype_id, player_ids, hudImportData)
|
fpdb_simple.storeHudCache(cursor, base, category, gametype_id, player_ids, hudImportData)
|
||||||
|
|
||||||
fpdb_simple.storeActions(cursor, hands_players_ids, action_types
|
if saveActions:
|
||||||
,allIns, action_amounts, actionNos)
|
fpdb_simple.storeActions(cursor, hands_players_ids, action_types
|
||||||
|
,allIns, action_amounts, actionNos)
|
||||||
return hands_id
|
return hands_id
|
||||||
#end def ring_stud
|
#end def ring_stud
|
||||||
|
|
||||||
|
@ -66,10 +72,10 @@ def ring_holdem_omaha(backend, db, cursor, base, category, site_hand_no, gametyp
|
||||||
t5 = time()
|
t5 = time()
|
||||||
fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits)
|
fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits)
|
||||||
t6 = time()
|
t6 = time()
|
||||||
fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos)
|
if saveActions:
|
||||||
|
fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos)
|
||||||
t7 = time()
|
t7 = time()
|
||||||
print "cards=%4.3f board=%4.3f hands=%4.3f plyrs=%4.3f hudcache=%4.3f board=%4.3f actions=%4.3f" \
|
#print "fills=(%4.3f) saves=(%4.3f,%4.3f,%4.3f,%4.3f)" % (t2-t0, t3-t2, t4-t3, t5-t4, t6-t5)
|
||||||
% (t1-t0, t2-t1, t3-t2, t4-t3, t5-t4, t6-t5, t7-t6)
|
|
||||||
return hands_id
|
return hands_id
|
||||||
#end def ring_holdem_omaha
|
#end def ring_holdem_omaha
|
||||||
|
|
||||||
|
@ -98,7 +104,8 @@ def tourney_holdem_omaha(backend, db, cursor, base, category, siteTourneyNo, buy
|
||||||
|
|
||||||
fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits)
|
fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits)
|
||||||
|
|
||||||
fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos)
|
if saveActions:
|
||||||
|
fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos)
|
||||||
return hands_id
|
return hands_id
|
||||||
#end def tourney_holdem_omaha
|
#end def tourney_holdem_omaha
|
||||||
|
|
||||||
|
@ -122,6 +129,7 @@ def tourney_stud(backend, db, cursor, base, category, siteTourneyNo, buyin, fee,
|
||||||
|
|
||||||
fpdb_simple.storeHudCache(cursor, base, category, gametypeId, playerIds, hudImportData)
|
fpdb_simple.storeHudCache(cursor, base, category, gametypeId, playerIds, hudImportData)
|
||||||
|
|
||||||
fpdb_simple.storeActions(cursor, hands_players_ids, actionTypes, allIns, actionAmounts, actionNos)
|
if saveActions:
|
||||||
|
fpdb_simple.storeActions(cursor, hands_players_ids, actionTypes, allIns, actionAmounts, actionNos)
|
||||||
return hands_id
|
return hands_id
|
||||||
#end def tourney_stud
|
#end def tourney_stud
|
||||||
|
|
|
@ -27,6 +27,307 @@ MYSQL_INNODB=2
|
||||||
PGSQL=3
|
PGSQL=3
|
||||||
SQLITE=4
|
SQLITE=4
|
||||||
|
|
||||||
|
# Data Structures for index and foreign key creation
|
||||||
|
# drop_code is an int with possible values: 0 - don't drop for bulk import
|
||||||
|
# 1 - drop during bulk import
|
||||||
|
# db differences:
|
||||||
|
# - note that mysql automatically creates indexes on constrained columns when
|
||||||
|
# foreign keys are created, while postgres does not. Hence the much longer list
|
||||||
|
# of indexes is required for postgres.
|
||||||
|
# all primary keys are left on all the time
|
||||||
|
#
|
||||||
|
# table column drop_code
|
||||||
|
|
||||||
|
indexes = [
|
||||||
|
[ ] # no db with index 0
|
||||||
|
, [ ] # no db with index 1
|
||||||
|
, [ # indexes for mysql (list index 2)
|
||||||
|
{'tab':'Players', 'col':'name', 'drop':0}
|
||||||
|
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
|
||||||
|
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
|
||||||
|
]
|
||||||
|
, [ # indexes for postgres (list index 3)
|
||||||
|
{'tab':'Boardcards', 'col':'handId', 'drop':0}
|
||||||
|
, {'tab':'Gametypes', 'col':'siteId', 'drop':0}
|
||||||
|
, {'tab':'Hands', 'col':'gametypeId', 'drop':1}
|
||||||
|
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
|
||||||
|
, {'tab':'HandsActions', 'col':'handplayerId', 'drop':0}
|
||||||
|
, {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
|
||||||
|
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
|
||||||
|
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
||||||
|
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
|
||||||
|
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
|
||||||
|
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
|
||||||
|
, {'tab':'Players', 'col':'siteId', 'drop':1}
|
||||||
|
, {'tab':'Players', 'col':'name', 'drop':0}
|
||||||
|
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
|
||||||
|
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
|
||||||
|
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
|
||||||
|
, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
|
||||||
|
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
foreignKeys = [
|
||||||
|
[ ] # no db with index 0
|
||||||
|
, [ ] # no db with index 1
|
||||||
|
, [ # foreign keys for mysql
|
||||||
|
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HandsActions', 'fkcol':'handPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
||||||
|
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
||||||
|
]
|
||||||
|
, [ # foreign keys for postgres
|
||||||
|
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HandsActions', 'fkcol':'handPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||||
|
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
||||||
|
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# MySQL Notes:
|
||||||
|
# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
|
||||||
|
# - creates index handId on <thistable>.handId
|
||||||
|
# alter table t drop foreign key fk
|
||||||
|
# alter table t add foreign key (fkcol) references tab(rcol)
|
||||||
|
# alter table t add constraint c foreign key (fkcol) references tab(rcol)
|
||||||
|
# (fkcol is used for foreigh key name)
|
||||||
|
|
||||||
|
# mysql to list indexes:
|
||||||
|
# SELECT table_name, index_name, non_unique, column_name
|
||||||
|
# FROM INFORMATION_SCHEMA.STATISTICS
|
||||||
|
# WHERE table_name = 'tbl_name'
|
||||||
|
# AND table_schema = 'db_name'
|
||||||
|
# ORDER BY table_name, index_name, seq_in_index
|
||||||
|
#
|
||||||
|
# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
|
||||||
|
# ALTER TABLE tab DROP INDEX idx
|
||||||
|
|
||||||
|
# mysql to list fks:
|
||||||
|
# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
|
||||||
|
# FROM information_schema.KEY_COLUMN_USAGE
|
||||||
|
# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
|
||||||
|
# AND REFERENCED_TABLE_NAME is not null
|
||||||
|
# ORDER BY TABLE_NAME, COLUMN_NAME;
|
||||||
|
|
||||||
|
# this may indicate missing object
|
||||||
|
# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
|
||||||
|
|
||||||
|
|
||||||
|
# PG notes:
|
||||||
|
|
||||||
|
# To add a foreign key constraint to a table:
|
||||||
|
# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
|
||||||
|
# ALTER TABLE tab DROP CONSTRAINT zipchk
|
||||||
|
#
|
||||||
|
# Note: index names must be unique across a schema
|
||||||
|
# CREATE INDEX idx ON tab(col)
|
||||||
|
# DROP INDEX idx
|
||||||
|
|
||||||
|
def prepareBulkImport(fdb):
|
||||||
|
"""Drop some indexes/foreign keys to prepare for bulk import.
|
||||||
|
Currently keeping the standalone indexes as needed to import quickly"""
|
||||||
|
# fdb is a fpdb_db object including backend, db, cursor, sql variables
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||||
|
for fk in foreignKeys[fdb.backend]:
|
||||||
|
if fk['drop'] == 1:
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
fdb.cursor.execute("SELECT constraint_name " +
|
||||||
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
||||||
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
||||||
|
"WHERE 1=1 " +
|
||||||
|
"AND table_name = %s AND column_name = %s " +
|
||||||
|
"AND referenced_table_name = %s " +
|
||||||
|
"AND referenced_column_name = %s ",
|
||||||
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
||||||
|
cons = fdb.cursor.fetchone()
|
||||||
|
print "preparebulk: cons=", cons
|
||||||
|
if cons:
|
||||||
|
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
print "dropping pg fk", fk['fktab'], fk['fkcol']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute("alter table " + fk['fktab'] + " drop constraint "
|
||||||
|
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "Only MySQL and Postgres supported so far"
|
||||||
|
return -1
|
||||||
|
|
||||||
|
for idx in indexes[fdb.backend]:
|
||||||
|
if idx['drop'] == 1:
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
print "dropping mysql index ", idx['tab'], idx['col']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
print "dropping pg index ", idx['tab'], idx['col']
|
||||||
|
# mod to use tab_col for index name?
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "drop index %s_%s_idx" % (idx['tab'],idx['col']) )
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "Only MySQL and Postgres supported so far"
|
||||||
|
return -1
|
||||||
|
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||||
|
fdb.db.commit() # seems to clear up errors if there were any in postgres
|
||||||
|
#end def prepareBulkImport
|
||||||
|
|
||||||
|
def afterBulkImport(fdb):
|
||||||
|
"""Re-create any dropped indexes/foreign keys after bulk import"""
|
||||||
|
# fdb is a fpdb_db object including backend, db, cursor, sql variables
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||||
|
for fk in foreignKeys[fdb.backend]:
|
||||||
|
if fk['drop'] == 1:
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
fdb.cursor.execute("SELECT constraint_name " +
|
||||||
|
"FROM information_schema.KEY_COLUMN_USAGE " +
|
||||||
|
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
||||||
|
"WHERE 1=1 " +
|
||||||
|
"AND table_name = %s AND column_name = %s " +
|
||||||
|
"AND referenced_table_name = %s " +
|
||||||
|
"AND referenced_column_name = %s ",
|
||||||
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
||||||
|
cons = fdb.cursor.fetchone()
|
||||||
|
print "afterbulk: cons=", cons
|
||||||
|
if cons:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
|
||||||
|
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
|
||||||
|
+ fk['rcol'] + ")")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint "
|
||||||
|
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
|
||||||
|
+ " foreign key (" + fk['fkcol']
|
||||||
|
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "Only MySQL and Postgres supported so far"
|
||||||
|
return -1
|
||||||
|
|
||||||
|
for idx in indexes[fdb.backend]:
|
||||||
|
if idx['drop'] == 1:
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
print "creating mysql index ", idx['tab'], idx['col']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "alter table %s add index %s(%s)"
|
||||||
|
, (idx['tab'],idx['col'],idx['col']) )
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
# mod to use tab_col for index name?
|
||||||
|
print "creating pg index ", idx['tab'], idx['col']
|
||||||
|
try:
|
||||||
|
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
||||||
|
fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
|
||||||
|
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
|
||||||
|
except:
|
||||||
|
print " ERROR! :-("
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "Only MySQL and Postgres supported so far"
|
||||||
|
return -1
|
||||||
|
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||||
|
fdb.db.commit() # seems to clear up errors if there were any in postgres
|
||||||
|
#end def afterBulkImport
|
||||||
|
|
||||||
|
def createAllIndexes(fdb):
|
||||||
|
"""Create new indexes"""
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||||
|
for idx in indexes[fdb.backend]:
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
print "creating mysql index ", idx['tab'], idx['col']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "alter table %s add index %s(%s)"
|
||||||
|
, (idx['tab'],idx['col'],idx['col']) )
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
# mod to use tab_col for index name?
|
||||||
|
print "creating pg index ", idx['tab'], idx['col']
|
||||||
|
try:
|
||||||
|
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
||||||
|
fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
|
||||||
|
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
|
||||||
|
except:
|
||||||
|
print " ERROR! :-("
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "Only MySQL and Postgres supported so far"
|
||||||
|
return -1
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||||
|
#end def createAllIndexes
|
||||||
|
|
||||||
|
def dropAllIndexes(fdb):
|
||||||
|
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
|
||||||
|
using list of indexes in indexes data structure"""
|
||||||
|
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||||
|
for idx in indexes[fdb.backend]:
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
print "dropping mysql index ", idx['tab'], idx['col']
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "alter table %s drop index %s"
|
||||||
|
, (idx['tab'],idx['col']) )
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
print "dropping pg index ", idx['tab'], idx['col']
|
||||||
|
# mod to use tab_col for index name?
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "drop index %s_%s_idx"
|
||||||
|
% (idx['tab'],idx['col']) )
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print "Only MySQL and Postgres supported so far"
|
||||||
|
return -1
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||||
|
#end def dropAllIndexes
|
||||||
|
|
||||||
|
def analyzeDB(fdb):
|
||||||
|
"""Do whatever the DB can offer to update index/table statistics"""
|
||||||
|
if fdb.backend == PGSQL:
|
||||||
|
fdb.db.set_isolation_level(0) # allow vacuum to work
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute("vacuum analyze")
|
||||||
|
except:
|
||||||
|
print "Error during vacuum"
|
||||||
|
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||||
|
#end def analyzeDB
|
||||||
|
|
||||||
class DuplicateError(Exception):
|
class DuplicateError(Exception):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
|
@ -39,7 +340,7 @@ class FpdbError(Exception):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
# gets value for last auto-increment key generated
|
# gets value for last auto-increment key generated
|
||||||
# returns -1 if a problem occurs
|
# returns -1 if a problem occurs
|
||||||
def getLastInsertId(backend, conn, cursor):
|
def getLastInsertId(backend, conn, cursor):
|
||||||
|
@ -90,7 +391,7 @@ def checkPositions(positions):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
### RHH modified to allow for "position 9" here (pos==9 is when you're a dead hand before the BB
|
### RHH modified to allow for "position 9" here (pos==9 is when you're a dead hand before the BB
|
||||||
if (pos!="B" and pos!="S" and pos!=0 and pos!=1 and pos!=2 and pos!=3 and pos!=4 and pos!=5 and pos!=6 and pos!=7 and pos != 8 and pos!=9):
|
if (pos!="B" and pos!="S" and pos!=0 and pos!=1 and pos!=2 and pos!=3 and pos!=4 and pos!=5 and pos!=6 and pos!=7 and pos!=9):
|
||||||
raise FpdbError("invalid position found in checkPositions. i: "+str(i)+" position: "+str(pos))
|
raise FpdbError("invalid position found in checkPositions. i: "+str(i)+" position: "+str(pos))
|
||||||
#end def fpdb_simple.checkPositions
|
#end def fpdb_simple.checkPositions
|
||||||
|
|
||||||
|
@ -2224,4 +2525,4 @@ def store_tourneys_players(cursor, tourney_id, player_ids, payin_amounts, ranks,
|
||||||
#print "created new tourneys_players.id:",tmp
|
#print "created new tourneys_players.id:",tmp
|
||||||
result.append(tmp[0])
|
result.append(tmp[0])
|
||||||
return result
|
return result
|
||||||
#end def store_tourneys_players
|
#end def store_tourneys_players
|
Loading…
Reference in New Issue
Block a user