diff --git a/pyfpdb/EverleafToFpdb.py b/pyfpdb/EverleafToFpdb.py index 68e4ce5b..188e4ade 100755 --- a/pyfpdb/EverleafToFpdb.py +++ b/pyfpdb/EverleafToFpdb.py @@ -75,6 +75,7 @@ class Everleaf(HandHistoryConverter): self.rexx.setPlayerInfoRegex('Seat (?P[0-9]+): (?P.*) \(\s+(\$ (?P[.0-9]+) USD|new player|All-in) \)') self.rexx.setPostSbRegex('.*\n(?P.*): posts small blind \[\$? (?P[.0-9]+)') self.rexx.setPostBbRegex('.*\n(?P.*): posts big blind \[\$? (?P[.0-9]+)') + self.rexx.setPostBothRegex('.*\n(?P.*): posts small \& big blinds \[\$? (?P[.0-9]+)') # mct : what about posting small & big blinds simultaneously? self.rexx.setHeroCardsRegex('.*\nDealt\sto\s(?P.*)\s\[ (?P\S\S), (?P\S\S) \]') self.rexx.setActionStepRegex('.*\n(?P.*)(?P: bets| checks| raises| calls| folds)(\s\[\$ (?P[.\d]+) USD\])?') @@ -131,7 +132,8 @@ class Everleaf(HandHistoryConverter): r"(\*\* Dealing Turn \*\* \[ \S\S \](?P.+(?=\*\* Dealing River \*\*)|.+))?" r"(\*\* Dealing River \*\* \[ \S\S \](?P.+))?", hand.string,re.DOTALL) - hand.streets = m + hand.addStreets(m) + def readCommunityCards(self, hand): # currently regex in wrong place pls fix my brain's fried @@ -146,15 +148,13 @@ class Everleaf(HandHistoryConverter): def readBlinds(self, hand): try: m = self.rexx.small_blind_re.search(hand.string) - hand.addBlind(m.group('PNAME'), m.group('SB')) - #hand.posted = [m.group('PNAME')] - except: - hand.addBlind(None, 0) - #hand.posted = ["FpdbNBP"] - m = self.rexx.big_blind_re.finditer(hand.string) - for a in m: - hand.addBlind(a.group('PNAME'), a.group('BB')) - #hand.posted = hand.posted + [a.group('PNAME')] + hand.addBlind(m.group('PNAME'), 'small blind', m.group('SB')) + except: # no small blind + hand.addBlind(None, None, None) + for a in self.rexx.big_blind_re.finditer(hand.string): + hand.addBlind(a.group('PNAME'), 'big blind', a.group('BB')) + for a in self.rexx.both_blinds_re.finditer(hand.string): + hand.addBlind(a.group('PNAME'), 'small & big blinds', a.group('SBBB')) def readHeroCards(self, hand): m = self.rexx.hero_cards_re.search(hand.string) @@ -167,7 +167,6 @@ class Everleaf(HandHistoryConverter): def readAction(self, hand, street): m = self.rexx.action_re.finditer(hand.streets.group(street)) - hand.actions[street] = [] for action in m: if action.group('ATYPE') == ' raises': hand.addRaiseTo( street, action.group('PNAME'), action.group('BET') ) @@ -182,6 +181,7 @@ class Everleaf(HandHistoryConverter): else: print "DEBUG: unimplemented readAction: %s %s" %(action.group('PNAME'),action.group('ATYPE'),) #hand.actions[street] += [[action.group('PNAME'), action.group('ATYPE')]] + # TODO: Everleaf does not record uncalled bets. def readShowdownActions(self, hand): @@ -193,18 +193,13 @@ class Everleaf(HandHistoryConverter): hand.addShownCards(cards, shows.group('PNAME')) def readCollectPot(self,hand): - m = self.rexx.collect_pot_re.search(hand.string) - if m is not None: + for m in self.rexx.collect_pot_re.finditer(hand.string): if m.group('HAND') is not None: re_card = re.compile('(?P[0-9tjqka][schd])') # copied from earlier cards = set([hand.card(card.group('CARD')) for card in re_card.finditer(m.group('HAND'))]) hand.addShownCards(cards=None, player=m.group('PNAME'), holeandboard=cards) hand.addCollectPot(player=m.group('PNAME'),pot=m.group('POT')) - else: - print "WARNING: Unusual, no one collected; can happen if it's folded to big blind with a dead small blind." - def getRake(self, hand): - hand.rake = hand.totalpot * Decimal('0.05') # probably not quite right if __name__ == "__main__": c = Configuration.Config() diff --git a/pyfpdb/FpdbSQLQueries.py b/pyfpdb/FpdbSQLQueries.py index 13899707..d233f2d2 100644 --- a/pyfpdb/FpdbSQLQueries.py +++ b/pyfpdb/FpdbSQLQueries.py @@ -694,7 +694,7 @@ class FpdbSQLQueries: AS BBlPer100 ,hprof2.profitperhand AS Profitperhand */ - ,hprof2.variance as Variance + ,format(hprof2.variance,2) AS Variance FROM (select /* stats from hudcache */ gt.base @@ -793,7 +793,7 @@ class FpdbSQLQueries: AS BBper100 ,hprof2.profitperhand AS Profitperhand */ - ,hprof2.variance as Variance + ,round(hprof2.variance,2) AS Variance FROM (select gt.base ,gt.category @@ -825,10 +825,10 @@ class FpdbSQLQueries: else to_char(100.0*(sum(street1Aggr)+sum(street2Aggr)+sum(street3Aggr)) /(sum(street1Seen)+sum(street2Seen)+sum(street3Seen)),'90D0') end AS PoFAFq - ,to_char(sum(totalProfit)/100.0,'9G999G990D00') AS Net + ,round(sum(totalProfit)/100.0,2) AS Net ,to_char((sum(totalProfit)/(gt.bigBlind+0.0)) / (sum(HDs)/100.0), '990D00') AS BBper100 - ,to_char(sum(totalProfit) / (sum(HDs)+0.0), '990D0000') AS Profitperhand + ,to_char(sum(totalProfit/100.0) / (sum(HDs)+0.0), '990D0000') AS Profitperhand from Gametypes gt inner join Sites s on s.Id = gt.siteId inner join HudCache hc on hc.gameTypeId = gt.Id diff --git a/pyfpdb/GuiAutoImport.py b/pyfpdb/GuiAutoImport.py index ffe46bb1..30fe7e67 100644 --- a/pyfpdb/GuiAutoImport.py +++ b/pyfpdb/GuiAutoImport.py @@ -29,166 +29,176 @@ import fpdb_import class GuiAutoImport (threading.Thread): - def __init__(self, settings, config): - """Constructor for GuiAutoImport""" - self.settings=settings - self.config=config + def __init__(self, settings, config): + """Constructor for GuiAutoImport""" + self.settings=settings + self.config=config - imp = self.config.get_import_parameters() + imp = self.config.get_import_parameters() - print "Import parameters" - print imp + print "Import parameters" + print imp - self.input_settings = {} + self.input_settings = {} + self.pipe_to_hud = None - self.importer = fpdb_import.Importer(self,self.settings, self.config) - self.importer.setCallHud(True) - self.importer.setMinPrint(30) - self.importer.setQuiet(False) - self.importer.setFailOnError(False) - self.importer.setHandCount(0) -# self.importer.setWatchTime() - - self.server=settings['db-host'] - self.user=settings['db-user'] - self.password=settings['db-password'] - self.database=settings['db-databaseName'] + self.importer = fpdb_import.Importer(self,self.settings, self.config) + self.importer.setCallHud(True) + self.importer.setMinPrint(30) + self.importer.setQuiet(False) + self.importer.setFailOnError(False) + self.importer.setHandCount(0) +# self.importer.setWatchTime() + + self.server=settings['db-host'] + self.user=settings['db-user'] + self.password=settings['db-password'] + self.database=settings['db-databaseName'] - self.mainVBox=gtk.VBox(False,1) - self.mainVBox.show() + self.mainVBox=gtk.VBox(False,1) + self.mainVBox.show() - self.settingsHBox = gtk.HBox(False, 0) - self.mainVBox.pack_start(self.settingsHBox, False, True, 0) - self.settingsHBox.show() + self.settingsHBox = gtk.HBox(False, 0) + self.mainVBox.pack_start(self.settingsHBox, False, True, 0) + self.settingsHBox.show() - self.intervalLabel = gtk.Label("Interval (ie. break) between imports in seconds:") - self.settingsHBox.pack_start(self.intervalLabel) - self.intervalLabel.show() + self.intervalLabel = gtk.Label("Interval (ie. break) between imports in seconds:") + self.settingsHBox.pack_start(self.intervalLabel) + self.intervalLabel.show() - self.intervalEntry=gtk.Entry() - self.intervalEntry.set_text(str(self.config.get_import_parameters().get("interval"))) - self.settingsHBox.pack_start(self.intervalEntry) - self.intervalEntry.show() + self.intervalEntry=gtk.Entry() + self.intervalEntry.set_text(str(self.config.get_import_parameters().get("interval"))) + self.settingsHBox.pack_start(self.intervalEntry) + self.intervalEntry.show() - self.addSites(self.mainVBox) + self.addSites(self.mainVBox) - self.startButton=gtk.Button("Start Autoimport") - self.startButton.connect("clicked", self.startClicked, "start clicked") - self.mainVBox.add(self.startButton) - self.startButton.show() + self.doAutoImportBool = False + self.startButton=gtk.ToggleButton("Start Autoimport") + self.startButton.connect("clicked", self.startClicked, "start clicked") + self.mainVBox.add(self.startButton) + self.startButton.show() - #end of GuiAutoImport.__init__ - def browseClicked(self, widget, data): - """runs when user clicks one of the browse buttons in the auto import tab""" - current_path=data[1].get_text() + #end of GuiAutoImport.__init__ + def browseClicked(self, widget, data): + """runs when user clicks one of the browse buttons in the auto import tab""" + current_path=data[1].get_text() - dia_chooser = gtk.FileChooserDialog(title="Please choose the path that you want to auto import", - action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, - buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) - #dia_chooser.set_current_folder(pathname) - dia_chooser.set_filename(current_path) - #dia_chooser.set_select_multiple(select_multiple) #not in tv, but want this in bulk import + dia_chooser = gtk.FileChooserDialog(title="Please choose the path that you want to auto import", + action=gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER, + buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)) + #dia_chooser.set_current_folder(pathname) + dia_chooser.set_filename(current_path) + #dia_chooser.set_select_multiple(select_multiple) #not in tv, but want this in bulk import - response = dia_chooser.run() - if response == gtk.RESPONSE_OK: - #print dia_chooser.get_filename(), 'selected' - data[1].set_text(dia_chooser.get_filename()) - self.input_settings[data[0]][0] = dia_chooser.get_filename() - elif response == gtk.RESPONSE_CANCEL: - print 'Closed, no files selected' - dia_chooser.destroy() - #end def GuiAutoImport.browseClicked + response = dia_chooser.run() + if response == gtk.RESPONSE_OK: + #print dia_chooser.get_filename(), 'selected' + data[1].set_text(dia_chooser.get_filename()) + self.input_settings[data[0]][0] = dia_chooser.get_filename() + elif response == gtk.RESPONSE_CANCEL: + print 'Closed, no files selected' + dia_chooser.destroy() + #end def GuiAutoImport.browseClicked - def do_import(self): - """Callback for timer to do an import iteration.""" - self.importer.runUpdated() - print "GuiAutoImport.import_dir done" - return True - - def startClicked(self, widget, data): - """runs when user clicks start on auto import tab""" + def do_import(self): + """Callback for timer to do an import iteration.""" + self.importer.runUpdated() + print "GuiAutoImport.import_dir done" + return self.doAutoImportBool -# Check to see if we have an open file handle to the HUD and open one if we do not. -# bufsize = 1 means unbuffered -# We need to close this file handle sometime. + def startClicked(self, widget, data): + """runs when user clicks start on auto import tab""" -# TODO: Allow for importing from multiple dirs - REB 29AUG2008 -# As presently written this function does nothing if there is already a pipe open. -# That is not correct. It should open another dir for importing while piping the -# results to the same pipe. This means that self.path should be a a list of dirs -# to watch. - try: #uhhh, I don't this this is the best way to check for the existence of an attr - getattr(self, "pipe_to_hud") - except AttributeError: - if os.name == 'nt': - command = "python HUD_main.py" + " %s" % (self.database) - bs = 0 # windows is not happy with line buffing here - self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE, - universal_newlines=True) - else: - command = self.config.execution_path('HUD_main.py') - bs = 1 - self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE, - universal_newlines=True) -# self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE, -# universal_newlines=True) -# command = command + " %s" % (self.database) -# print "command = ", command -# self.pipe_to_hud = os.popen(command, 'w') +# Check to see if we have an open file handle to the HUD and open one if we do not. +# bufsize = 1 means unbuffered +# We need to close this file handle sometime. -# Add directories to importer object. - for site in self.input_settings: - self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1]) - print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0]) - self.do_import() - - interval=int(self.intervalEntry.get_text()) - gobject.timeout_add(interval*1000, self.do_import) - #end def GuiAutoImport.startClicked +# TODO: Allow for importing from multiple dirs - REB 29AUG2008 +# As presently written this function does nothing if there is already a pipe open. +# That is not correct. It should open another dir for importing while piping the +# results to the same pipe. This means that self.path should be a a list of dirs +# to watch. + if widget.get_active(): # toggled on + self.doAutoImportBool = True + widget.set_label(u'Stop Autoimport') + if self.pipe_to_hud is None: + if os.name == 'nt': + command = "python HUD_main.py" + " %s" % (self.database) + bs = 0 # windows is not happy with line buffing here + self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE, + universal_newlines=True) + else: + command = self.config.execution_path('HUD_main.py') + bs = 1 + self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE, + universal_newlines=True) + # self.pipe_to_hud = subprocess.Popen((command, self.database), bufsize = bs, stdin = subprocess.PIPE, + # universal_newlines=True) + # command = command + " %s" % (self.database) + # print "command = ", command + # self.pipe_to_hud = os.popen(command, 'w') - def get_vbox(self): - """returns the vbox of this thread""" - return self.mainVBox - #end def get_vbox + # Add directories to importer object. + for site in self.input_settings: + self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1]) + print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0]) + self.do_import() - #Create the site line given required info and setup callbacks - #enabling and disabling sites from this interface not possible - #expects a box to layout the line horizontally - def createSiteLine(self, hbox, site, iconpath, hhpath, filter_name, active = True): - label = gtk.Label(site + " auto-import:") - hbox.pack_start(label, False, False, 0) - label.show() + interval=int(self.intervalEntry.get_text()) + gobject.timeout_add(interval*1000, self.do_import) + else: # toggled off + self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer + #TODO: other clean up, such as killing HUD + print "Stopping autoimport" + self.pipe_to_hud.communicate('\n') # waits for process to terminate + self.pipe_to_hud = None + widget.set_label(u'Start Autoimport') + #end def GuiAutoImport.startClicked - dirPath=gtk.Entry() - dirPath.set_text(hhpath) - hbox.pack_start(dirPath, False, True, 0) - dirPath.show() + def get_vbox(self): + """returns the vbox of this thread""" + return self.mainVBox + #end def get_vbox - browseButton=gtk.Button("Browse...") - browseButton.connect("clicked", self.browseClicked, [site] + [dirPath]) - hbox.pack_start(browseButton, False, False, 0) - browseButton.show() + #Create the site line given required info and setup callbacks + #enabling and disabling sites from this interface not possible + #expects a box to layout the line horizontally + def createSiteLine(self, hbox, site, iconpath, hhpath, filter_name, active = True): + label = gtk.Label(site + " auto-import:") + hbox.pack_start(label, False, False, 0) + label.show() - label = gtk.Label(site + " filter:") - hbox.pack_start(label, False, False, 0) - label.show() + dirPath=gtk.Entry() + dirPath.set_text(hhpath) + hbox.pack_start(dirPath, False, True, 0) + dirPath.show() - filter=gtk.Entry() - filter.set_text(filter_name) - hbox.pack_start(filter, False, True, 0) - filter.show() + browseButton=gtk.Button("Browse...") + browseButton.connect("clicked", self.browseClicked, [site] + [dirPath]) + hbox.pack_start(browseButton, False, False, 0) + browseButton.show() - def addSites(self, vbox): - for site in self.config.supported_sites.keys(): - pathHBox = gtk.HBox(False, 0) - vbox.pack_start(pathHBox, False, True, 0) - pathHBox.show() + label = gtk.Label(site + " filter:") + hbox.pack_start(label, False, False, 0) + label.show() - paths = self.config.get_default_paths(site) - params = self.config.get_site_parameters(site) - self.createSiteLine(pathHBox, site, False, paths['hud-defaultPath'], params['converter'], params['enabled']) - self.input_settings[site] = [paths['hud-defaultPath']] + [params['converter']] + filter=gtk.Entry() + filter.set_text(filter_name) + hbox.pack_start(filter, False, True, 0) + filter.show() + + def addSites(self, vbox): + for site in self.config.supported_sites.keys(): + pathHBox = gtk.HBox(False, 0) + vbox.pack_start(pathHBox, False, True, 0) + pathHBox.show() + + paths = self.config.get_default_paths(site) + params = self.config.get_site_parameters(site) + self.createSiteLine(pathHBox, site, False, paths['hud-defaultPath'], params['converter'], params['enabled']) + self.input_settings[site] = [paths['hud-defaultPath']] + [params['converter']] if __name__== "__main__": def destroy(*args): # call back for terminating the main eventloop diff --git a/pyfpdb/GuiPlayerStats.py b/pyfpdb/GuiPlayerStats.py index 8220e204..5a53813f 100644 --- a/pyfpdb/GuiPlayerStats.py +++ b/pyfpdb/GuiPlayerStats.py @@ -60,7 +60,7 @@ class GuiPlayerStats (threading.Thread): vbox.add(self.stats_table) # Create header row - titles = ("Game", "Hands", "VPIP", "PFR", "saw_f", "sawsd", "wtsdwsf", "wmsd", "FlAFq", "TuAFq", "RvAFq", "PFAFq", "Net($)", "BB/100", "$/hand", "Variance") + titles = ("Game", "Hands", "VPIP", "PFR", "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq", "PoFAFq", "Net($)", "BB/100", "$/hand", "Variance") col = 0 row = 0 @@ -71,14 +71,17 @@ class GuiPlayerStats (threading.Thread): col +=1 for row in range(rows-1): + if(row%2 == 0): + bgcolor = "white" + else: + bgcolor = "lightgrey" for col in range(cols): - if(row%2 == 0): - bgcolor = "white" - else: - bgcolor = "lightgrey" eb = gtk.EventBox() eb.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(bgcolor)) - l = gtk.Label(result[row][col]) + if result[row][col]: + l = gtk.Label(result[row][col]) + else: + l = gtk.Label(' ') if col == 0: l.set_alignment(xalign=0.0, yalign=0.5) else: @@ -127,7 +130,6 @@ class GuiPlayerStats (threading.Thread): def __set_hero_name(self, w, site): self.heroes[site] = w.get_text() - print "DEBUG: settings heroes[%s]: %s"%(site, self.heroes[site]) def __init__(self, db, config, querylist, debug=True): self.debug=debug diff --git a/pyfpdb/Hand.py b/pyfpdb/Hand.py index 44914048..d9af1a42 100644 --- a/pyfpdb/Hand.py +++ b/pyfpdb/Hand.py @@ -87,6 +87,8 @@ class Hand: self.action = [] self.totalpot = None + self.totalcollected = None + self.rake = None self.bets = {} @@ -109,6 +111,17 @@ If a player has None chips he won't be added.""" self.bets[street][name] = [] + def addStreets(self, match): + # go through m and initialise actions to empty list for each street. + if match is not None: + self.streets = match + for street in match.groupdict(): + if match.group(street) is not None: + self.actions[street] = [] + + else: + print "empty markStreets match" # better to raise exception and put process hand in a try block + def addHoleCards(self, cards, player): """\ Assigns observed holecards to a player. @@ -161,11 +174,16 @@ For when a player shows cards for any reason (for showdown or out of choice). c = c.replace(k,v) return c - def addBlind(self, player, amount): + def addBlind(self, player, blindtype, amount): # if player is None, it's a missing small blind. if player is not None: self.bets['PREFLOP'][player].append(Decimal(amount)) - self.lastBet['PREFLOP'] = Decimal(amount) + self.actions['PREFLOP'] += [(player, 'posts', blindtype, amount)] + if blindtype == 'big blind': + self.lastBet['PREFLOP'] = Decimal(amount) + elif blindtype == 'small & big blinds': + # extra small blind is 'dead' + self.lastBet['PREFLOP'] = Decimal(self.bb) self.posted += [player] @@ -175,7 +193,7 @@ For when a player shows cards for any reason (for showdown or out of choice). if amount is not None: self.bets[street][player].append(Decimal(amount)) #self.lastBet[street] = Decimal(amount) - self.actions[street] += [[player, 'calls', amount]] + self.actions[street] += [(player, 'calls', amount)] def addRaiseTo(self, street, player, amountTo): """\ @@ -192,21 +210,22 @@ Add a raise on [street] by [player] to [amountTo] self.lastBet[street] = Decimal(amountTo) amountBy = Decimal(amountTo) - amountToCall self.bets[street][player].append(amountBy+amountToCall) - self.actions[street] += [[player, 'raises', amountBy, amountTo]] + self.actions[street] += [(player, 'raises', amountBy, amountTo, amountToCall)] def addBet(self, street, player, amount): self.checkPlayerExists(player) self.bets[street][player].append(Decimal(amount)) - self.actions[street] += [[player, 'bets', amount]] + self.actions[street] += [(player, 'bets', amount)] + self.lastBet[street] = Decimal(amount) def addFold(self, street, player): self.checkPlayerExists(player) self.folded.add(player) - self.actions[street] += [[player, 'folds']] + self.actions[street] += [(player, 'folds')] def addCheck(self, street, player): self.checkPlayerExists(player) - self.actions[street] += [[player, 'checks']] + self.actions[street] += [(player, 'checks')] def addCollectPot(self,player, pot): self.checkPlayerExists(player) @@ -231,6 +250,51 @@ Known bug: doesn't take into account side pots""" #print street, self.bets[street][player] self.totalpot += reduce(operator.add, self.bets[street][player], 0) + print "conventional totalpot:", self.totalpot + self.totalpot = 0 + + print self.actions + for street in self.actions: + uncalled = 0 + calls = [0] + for act in self.actions[street]: + if act[1] == 'bets': # [name, 'bets', amount] + self.totalpot += Decimal(act[2]) + uncalled = Decimal(act[2]) # only the last bet or raise can be uncalled + calls = [0] + print "uncalled: ", uncalled + elif act[1] == 'raises': # [name, 'raises', amountby, amountto, amountcalled] + print "calls %s and raises %s to %s" % (act[4],act[2],act[3]) + self.totalpot += Decimal(act[2]) + Decimal(act[4]) + calls = [0] + uncalled = Decimal(act[2]) + print "uncalled: ", uncalled + elif act[1] == 'calls': # [name, 'calls', amount] + self.totalpot += Decimal(act[2]) + calls = calls + [Decimal(act[2])] + print "calls:", calls + if act[1] == ('posts'): + self.totalpot += Decimal(act[3]) + uncalled = Decimal(act[3]) + if uncalled > 0 and max(calls+[0]) < uncalled: + + print "returning some bet, calls:", calls + print "returned: %.2f from %.2f" % ((uncalled - max(calls)), self.totalpot,) + self.totalpot -= (uncalled - max(calls)) + print "new totalpot:", self.totalpot + + if self.totalcollected is None: + self.totalcollected = 0; + for amount in self.collected.values(): + self.totalcollected += Decimal(amount) + + # TODO: Some sites (Everleaf) don't record uncalled bets. Figure out if a bet is uncalled and subtract it from self.totalcollected. + # remember that portions of bets may be uncalled, so: + # bet followed by no call is an uncalled bet + # bet x followed by call y where y < x has x-y uncalled (and second player all in) + + + def getGameTypeAsString(self): """\ Map the tuple self.gametype onto the pokerstars string describing it @@ -309,7 +373,8 @@ Map the tuple self.gametype onto the pokerstars string describing it print "what do they show" print "*** SUMMARY ***" - print "Total pot $%s | Rake $%.2f)" % (self.totalpot, self.rake) # TODO side pots + print "Total pot $%s | Rake $%.2f" % (self.totalcollected, self.rake) # TODO: side pots + board = [] for s in self.board.values(): board += s diff --git a/pyfpdb/HandHistoryConverter.py b/pyfpdb/HandHistoryConverter.py index e941af33..68d00e96 100644 --- a/pyfpdb/HandHistoryConverter.py +++ b/pyfpdb/HandHistoryConverter.py @@ -161,7 +161,8 @@ class HandHistoryConverter: def readPlayerStacks(self, hand): abstract # Needs to return a MatchObject with group names identifying the streets into the Hand object - # that is, pulls the chunks of preflop, flop, turn and river text into hand.streets MatchObject. + # so groups are called by street names 'PREFLOP', 'FLOP', 'STREET2' etc + # blinds are done seperately def markStreets(self, hand): abstract #Needs to return a list in the format @@ -173,11 +174,13 @@ class HandHistoryConverter: def readCollectPot(self, hand): abstract # Some sites don't report the rake. This will be called at the end of the hand after the pot total has been calculated - # so that an inheriting class can calculate it for the specific site if need be. - def getRake(self, hand): abstract + # an inheriting class can calculate it for the specific site if need be. + def getRake(self, hand): + hand.rake = hand.totalpot - hand.totalcollected # * Decimal('0.05') # probably not quite right + def sanityCheck(self): - sane = True + sane = False base_w = False #Check if hhbase exists and is writable #Note: Will not try to create the base HH directory diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py index e1c61c6d..e51538ee 100644 --- a/pyfpdb/fpdb_db.py +++ b/pyfpdb/fpdb_db.py @@ -108,9 +108,9 @@ class fpdb_db: self.cursor.execute(self.sql.query['createHandsPlayersTable']) self.cursor.execute(self.sql.query['createHandsActionsTable']) self.cursor.execute(self.sql.query['createHudCacheTable']) - self.cursor.execute(self.sql.query['addTourneyIndex']) - self.cursor.execute(self.sql.query['addHandsIndex']) - self.cursor.execute(self.sql.query['addPlayersIndex']) + #self.cursor.execute(self.sql.query['addTourneyIndex']) + #self.cursor.execute(self.sql.query['addHandsIndex']) + #self.cursor.execute(self.sql.query['addPlayersIndex']) self.fillDefaultData() self.db.commit() #end def disconnect @@ -185,6 +185,7 @@ class fpdb_db: self.drop_tables() self.create_tables() + fpdb_simple.createAllIndexes(self) self.db.commit() print "Finished recreating tables" #end def recreate_tables diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py index 306c0880..2d35a97a 100644 --- a/pyfpdb/fpdb_import.py +++ b/pyfpdb/fpdb_import.py @@ -116,8 +116,11 @@ class Importer: #Run full import on filelist def runImport(self): + fpdb_simple.prepareBulkImport(self.fdb) for file in self.filelist: self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1]) + fpdb_simple.afterBulkImport(self.fdb) + fpdb_simple.analyzeDB(self.fdb) #Run import on updated files, then store latest update time. def runUpdated(self): diff --git a/pyfpdb/fpdb_save_to_db.py b/pyfpdb/fpdb_save_to_db.py index 3a7989ca..c8d5b208 100644 --- a/pyfpdb/fpdb_save_to_db.py +++ b/pyfpdb/fpdb_save_to_db.py @@ -22,6 +22,11 @@ from time import time import fpdb_simple +saveActions=True # set this to False to avoid storing action data + # Pros: speeds up imports + # Cons: no action data is saved, so you need to keep the hand histories + # variance not available on stats page + #stores a stud/razz hand into the database def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, hand_start_time ,names, player_ids, start_cashes, antes, card_values, card_suits, winnings, rakes @@ -39,8 +44,9 @@ def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, ha fpdb_simple.storeHudCache(cursor, base, category, gametype_id, player_ids, hudImportData) - fpdb_simple.storeActions(cursor, hands_players_ids, action_types - ,allIns, action_amounts, actionNos) + if saveActions: + fpdb_simple.storeActions(cursor, hands_players_ids, action_types + ,allIns, action_amounts, actionNos) return hands_id #end def ring_stud @@ -66,10 +72,10 @@ def ring_holdem_omaha(backend, db, cursor, base, category, site_hand_no, gametyp t5 = time() fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits) t6 = time() - fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos) + if saveActions: + fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos) t7 = time() - print "cards=%4.3f board=%4.3f hands=%4.3f plyrs=%4.3f hudcache=%4.3f board=%4.3f actions=%4.3f" \ - % (t1-t0, t2-t1, t3-t2, t4-t3, t5-t4, t6-t5, t7-t6) + #print "fills=(%4.3f) saves=(%4.3f,%4.3f,%4.3f,%4.3f)" % (t2-t0, t3-t2, t4-t3, t5-t4, t6-t5) return hands_id #end def ring_holdem_omaha @@ -98,7 +104,8 @@ def tourney_holdem_omaha(backend, db, cursor, base, category, siteTourneyNo, buy fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits) - fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos) + if saveActions: + fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos) return hands_id #end def tourney_holdem_omaha @@ -122,6 +129,7 @@ def tourney_stud(backend, db, cursor, base, category, siteTourneyNo, buyin, fee, fpdb_simple.storeHudCache(cursor, base, category, gametypeId, playerIds, hudImportData) - fpdb_simple.storeActions(cursor, hands_players_ids, actionTypes, allIns, actionAmounts, actionNos) + if saveActions: + fpdb_simple.storeActions(cursor, hands_players_ids, actionTypes, allIns, actionAmounts, actionNos) return hands_id #end def tourney_stud diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py index 6e70b699..625ce642 100644 --- a/pyfpdb/fpdb_simple.py +++ b/pyfpdb/fpdb_simple.py @@ -27,6 +27,307 @@ MYSQL_INNODB=2 PGSQL=3 SQLITE=4 +# Data Structures for index and foreign key creation +# drop_code is an int with possible values: 0 - don't drop for bulk import +# 1 - drop during bulk import +# db differences: +# - note that mysql automatically creates indexes on constrained columns when +# foreign keys are created, while postgres does not. Hence the much longer list +# of indexes is required for postgres. +# all primary keys are left on all the time +# +# table column drop_code + +indexes = [ + [ ] # no db with index 0 + , [ ] # no db with index 1 + , [ # indexes for mysql (list index 2) + {'tab':'Players', 'col':'name', 'drop':0} + , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} + , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} + ] + , [ # indexes for postgres (list index 3) + {'tab':'Boardcards', 'col':'handId', 'drop':0} + , {'tab':'Gametypes', 'col':'siteId', 'drop':0} + , {'tab':'Hands', 'col':'gametypeId', 'drop':1} + , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} + , {'tab':'HandsActions', 'col':'handplayerId', 'drop':0} + , {'tab':'HandsPlayers', 'col':'handId', 'drop':1} + , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1} + , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0} + , {'tab':'HudCache', 'col':'gametypeId', 'drop':1} + , {'tab':'HudCache', 'col':'playerId', 'drop':0} + , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0} + , {'tab':'Players', 'col':'siteId', 'drop':1} + , {'tab':'Players', 'col':'name', 'drop':0} + , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1} + , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} + , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0} + , {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} + , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0} + ] + ] + +foreignKeys = [ + [ ] # no db with index 0 + , [ ] # no db with index 1 + , [ # foreign keys for mysql + {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1} + , {'fktab':'HandsActions', 'fkcol':'handPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0} + , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1} + ] + , [ # foreign keys for postgres + {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1} + , {'fktab':'HandsActions', 'fkcol':'handPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0} + , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1} + ] + ] + + +# MySQL Notes: +# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id +# - creates index handId on .handId +# alter table t drop foreign key fk +# alter table t add foreign key (fkcol) references tab(rcol) +# alter table t add constraint c foreign key (fkcol) references tab(rcol) +# (fkcol is used for foreigh key name) + +# mysql to list indexes: +# SELECT table_name, index_name, non_unique, column_name +# FROM INFORMATION_SCHEMA.STATISTICS +# WHERE table_name = 'tbl_name' +# AND table_schema = 'db_name' +# ORDER BY table_name, index_name, seq_in_index +# +# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo) +# ALTER TABLE tab DROP INDEX idx + +# mysql to list fks: +# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name +# FROM information_schema.KEY_COLUMN_USAGE +# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here) +# AND REFERENCED_TABLE_NAME is not null +# ORDER BY TABLE_NAME, COLUMN_NAME; + +# this may indicate missing object +# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)") + + +# PG notes: + +# To add a foreign key constraint to a table: +# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL; +# ALTER TABLE tab DROP CONSTRAINT zipchk +# +# Note: index names must be unique across a schema +# CREATE INDEX idx ON tab(col) +# DROP INDEX idx + +def prepareBulkImport(fdb): + """Drop some indexes/foreign keys to prepare for bulk import. + Currently keeping the standalone indexes as needed to import quickly""" + # fdb is a fpdb_db object including backend, db, cursor, sql variables + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(0) # allow table/index operations to work + for fk in foreignKeys[fdb.backend]: + if fk['drop'] == 1: + if fdb.backend == MYSQL_INNODB: + fdb.cursor.execute("SELECT constraint_name " + + "FROM information_schema.KEY_COLUMN_USAGE " + + #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' + "WHERE 1=1 " + + "AND table_name = %s AND column_name = %s " + + "AND referenced_table_name = %s " + + "AND referenced_column_name = %s ", + (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) + cons = fdb.cursor.fetchone() + print "preparebulk: cons=", cons + if cons: + print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol'] + try: + fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0]) + except: + pass + elif fdb.backend == PGSQL: + print "dropping pg fk", fk['fktab'], fk['fkcol'] + try: + fdb.cursor.execute("alter table " + fk['fktab'] + " drop constraint " + + fk['fktab'] + '_' + fk['fkcol'] + '_fkey') + except: + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + + for idx in indexes[fdb.backend]: + if idx['drop'] == 1: + if fdb.backend == MYSQL_INNODB: + print "dropping mysql index ", idx['tab'], idx['col'] + try: + fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) ) + except: + pass + elif fdb.backend == PGSQL: + print "dropping pg index ", idx['tab'], idx['col'] + # mod to use tab_col for index name? + try: + fdb.cursor.execute( "drop index %s_%s_idx" % (idx['tab'],idx['col']) ) + except: + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(1) # go back to normal isolation level + fdb.db.commit() # seems to clear up errors if there were any in postgres +#end def prepareBulkImport + +def afterBulkImport(fdb): + """Re-create any dropped indexes/foreign keys after bulk import""" + # fdb is a fpdb_db object including backend, db, cursor, sql variables + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(0) # allow table/index operations to work + for fk in foreignKeys[fdb.backend]: + if fk['drop'] == 1: + if fdb.backend == MYSQL_INNODB: + fdb.cursor.execute("SELECT constraint_name " + + "FROM information_schema.KEY_COLUMN_USAGE " + + #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' + "WHERE 1=1 " + + "AND table_name = %s AND column_name = %s " + + "AND referenced_table_name = %s " + + "AND referenced_column_name = %s ", + (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) + cons = fdb.cursor.fetchone() + print "afterbulk: cons=", cons + if cons: + pass + else: + print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'] + try: + fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key (" + + fk['fkcol'] + ") references " + fk['rtab'] + "(" + + fk['rcol'] + ")") + except: + pass + elif fdb.backend == PGSQL: + print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'] + try: + fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint " + + fk['fktab'] + '_' + fk['fkcol'] + '_fkey' + + " foreign key (" + fk['fkcol'] + + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")") + except: + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + + for idx in indexes[fdb.backend]: + if idx['drop'] == 1: + if fdb.backend == MYSQL_INNODB: + print "creating mysql index ", idx['tab'], idx['col'] + try: + fdb.cursor.execute( "alter table %s add index %s(%s)" + , (idx['tab'],idx['col'],idx['col']) ) + except: + pass + elif fdb.backend == PGSQL: + # mod to use tab_col for index name? + print "creating pg index ", idx['tab'], idx['col'] + try: + print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col']) + fdb.cursor.execute( "create index %s_%s_idx on %s(%s)" + % (idx['tab'], idx['col'], idx['tab'], idx['col']) ) + except: + print " ERROR! :-(" + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(1) # go back to normal isolation level + fdb.db.commit() # seems to clear up errors if there were any in postgres +#end def afterBulkImport + +def createAllIndexes(fdb): + """Create new indexes""" + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(0) # allow table/index operations to work + for idx in indexes[fdb.backend]: + if fdb.backend == MYSQL_INNODB: + print "creating mysql index ", idx['tab'], idx['col'] + try: + fdb.cursor.execute( "alter table %s add index %s(%s)" + , (idx['tab'],idx['col'],idx['col']) ) + except: + pass + elif fdb.backend == PGSQL: + # mod to use tab_col for index name? + print "creating pg index ", idx['tab'], idx['col'] + try: + print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col']) + fdb.cursor.execute( "create index %s_%s_idx on %s(%s)" + % (idx['tab'], idx['col'], idx['tab'], idx['col']) ) + except: + print " ERROR! :-(" + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(1) # go back to normal isolation level +#end def createAllIndexes + +def dropAllIndexes(fdb): + """Drop all standalone indexes (i.e. not including primary keys or foreign keys) + using list of indexes in indexes data structure""" + # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK) + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(0) # allow table/index operations to work + for idx in indexes[fdb.backend]: + if fdb.backend == MYSQL_INNODB: + print "dropping mysql index ", idx['tab'], idx['col'] + try: + fdb.cursor.execute( "alter table %s drop index %s" + , (idx['tab'],idx['col']) ) + except: + pass + elif fdb.backend == PGSQL: + print "dropping pg index ", idx['tab'], idx['col'] + # mod to use tab_col for index name? + try: + fdb.cursor.execute( "drop index %s_%s_idx" + % (idx['tab'],idx['col']) ) + except: + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(1) # go back to normal isolation level +#end def dropAllIndexes + +def analyzeDB(fdb): + """Do whatever the DB can offer to update index/table statistics""" + if fdb.backend == PGSQL: + fdb.db.set_isolation_level(0) # allow vacuum to work + try: + fdb.cursor.execute("vacuum analyze") + except: + print "Error during vacuum" + fdb.db.set_isolation_level(1) # go back to normal isolation level +#end def analyzeDB class DuplicateError(Exception): def __init__(self, value): @@ -39,7 +340,7 @@ class FpdbError(Exception): self.value = value def __str__(self): return repr(self.value) - + # gets value for last auto-increment key generated # returns -1 if a problem occurs def getLastInsertId(backend, conn, cursor): @@ -90,7 +391,7 @@ def checkPositions(positions): pass ### RHH modified to allow for "position 9" here (pos==9 is when you're a dead hand before the BB - if (pos!="B" and pos!="S" and pos!=0 and pos!=1 and pos!=2 and pos!=3 and pos!=4 and pos!=5 and pos!=6 and pos!=7 and pos != 8 and pos!=9): + if (pos!="B" and pos!="S" and pos!=0 and pos!=1 and pos!=2 and pos!=3 and pos!=4 and pos!=5 and pos!=6 and pos!=7 and pos!=9): raise FpdbError("invalid position found in checkPositions. i: "+str(i)+" position: "+str(pos)) #end def fpdb_simple.checkPositions @@ -2224,4 +2525,4 @@ def store_tourneys_players(cursor, tourney_id, player_ids, payin_amounts, ranks, #print "created new tourneys_players.id:",tmp result.append(tmp[0]) return result -#end def store_tourneys_players +#end def store_tourneys_players \ No newline at end of file