merge from carl
This commit is contained in:
commit
eec9c94342
|
@ -75,6 +75,7 @@ class Everleaf(HandHistoryConverter):
|
|||
self.rexx.setPlayerInfoRegex('Seat (?P<SEAT>[0-9]+): (?P<PNAME>.*) \(\s+(\$ (?P<CASH>[.0-9]+) USD|new player|All-in) \)')
|
||||
self.rexx.setPostSbRegex('.*\n(?P<PNAME>.*): posts small blind \[\$? (?P<SB>[.0-9]+)')
|
||||
self.rexx.setPostBbRegex('.*\n(?P<PNAME>.*): posts big blind \[\$? (?P<BB>[.0-9]+)')
|
||||
self.rexx.setPostBothRegex('.*\n(?P<PNAME>.*): posts small \& big blinds \[\$? (?P<SBBB>[.0-9]+)')
|
||||
# mct : what about posting small & big blinds simultaneously?
|
||||
self.rexx.setHeroCardsRegex('.*\nDealt\sto\s(?P<PNAME>.*)\s\[ (?P<HOLE1>\S\S), (?P<HOLE2>\S\S) \]')
|
||||
self.rexx.setActionStepRegex('.*\n(?P<PNAME>.*)(?P<ATYPE>: bets| checks| raises| calls| folds)(\s\[\$ (?P<BET>[.\d]+) USD\])?')
|
||||
|
@ -131,7 +132,8 @@ class Everleaf(HandHistoryConverter):
|
|||
r"(\*\* Dealing Turn \*\* \[ \S\S \](?P<TURN>.+(?=\*\* Dealing River \*\*)|.+))?"
|
||||
r"(\*\* Dealing River \*\* \[ \S\S \](?P<RIVER>.+))?", hand.string,re.DOTALL)
|
||||
|
||||
hand.streets = m
|
||||
hand.addStreets(m)
|
||||
|
||||
|
||||
def readCommunityCards(self, hand):
|
||||
# currently regex in wrong place pls fix my brain's fried
|
||||
|
@ -146,15 +148,13 @@ class Everleaf(HandHistoryConverter):
|
|||
def readBlinds(self, hand):
|
||||
try:
|
||||
m = self.rexx.small_blind_re.search(hand.string)
|
||||
hand.addBlind(m.group('PNAME'), m.group('SB'))
|
||||
#hand.posted = [m.group('PNAME')]
|
||||
except:
|
||||
hand.addBlind(None, 0)
|
||||
#hand.posted = ["FpdbNBP"]
|
||||
m = self.rexx.big_blind_re.finditer(hand.string)
|
||||
for a in m:
|
||||
hand.addBlind(a.group('PNAME'), a.group('BB'))
|
||||
#hand.posted = hand.posted + [a.group('PNAME')]
|
||||
hand.addBlind(m.group('PNAME'), 'small blind', m.group('SB'))
|
||||
except: # no small blind
|
||||
hand.addBlind(None, None, None)
|
||||
for a in self.rexx.big_blind_re.finditer(hand.string):
|
||||
hand.addBlind(a.group('PNAME'), 'big blind', a.group('BB'))
|
||||
for a in self.rexx.both_blinds_re.finditer(hand.string):
|
||||
hand.addBlind(a.group('PNAME'), 'small & big blinds', a.group('SBBB'))
|
||||
|
||||
def readHeroCards(self, hand):
|
||||
m = self.rexx.hero_cards_re.search(hand.string)
|
||||
|
@ -167,7 +167,6 @@ class Everleaf(HandHistoryConverter):
|
|||
|
||||
def readAction(self, hand, street):
|
||||
m = self.rexx.action_re.finditer(hand.streets.group(street))
|
||||
hand.actions[street] = []
|
||||
for action in m:
|
||||
if action.group('ATYPE') == ' raises':
|
||||
hand.addRaiseTo( street, action.group('PNAME'), action.group('BET') )
|
||||
|
@ -182,6 +181,7 @@ class Everleaf(HandHistoryConverter):
|
|||
else:
|
||||
print "DEBUG: unimplemented readAction: %s %s" %(action.group('PNAME'),action.group('ATYPE'),)
|
||||
#hand.actions[street] += [[action.group('PNAME'), action.group('ATYPE')]]
|
||||
# TODO: Everleaf does not record uncalled bets.
|
||||
|
||||
|
||||
def readShowdownActions(self, hand):
|
||||
|
@ -193,18 +193,13 @@ class Everleaf(HandHistoryConverter):
|
|||
hand.addShownCards(cards, shows.group('PNAME'))
|
||||
|
||||
def readCollectPot(self,hand):
|
||||
m = self.rexx.collect_pot_re.search(hand.string)
|
||||
if m is not None:
|
||||
for m in self.rexx.collect_pot_re.finditer(hand.string):
|
||||
if m.group('HAND') is not None:
|
||||
re_card = re.compile('(?P<CARD>[0-9tjqka][schd])') # copied from earlier
|
||||
cards = set([hand.card(card.group('CARD')) for card in re_card.finditer(m.group('HAND'))])
|
||||
hand.addShownCards(cards=None, player=m.group('PNAME'), holeandboard=cards)
|
||||
hand.addCollectPot(player=m.group('PNAME'),pot=m.group('POT'))
|
||||
else:
|
||||
print "WARNING: Unusual, no one collected; can happen if it's folded to big blind with a dead small blind."
|
||||
|
||||
def getRake(self, hand):
|
||||
hand.rake = hand.totalpot * Decimal('0.05') # probably not quite right
|
||||
|
||||
if __name__ == "__main__":
|
||||
c = Configuration.Config()
|
||||
|
|
|
@ -694,7 +694,7 @@ class FpdbSQLQueries:
|
|||
AS BBlPer100
|
||||
,hprof2.profitperhand AS Profitperhand
|
||||
*/
|
||||
,hprof2.variance as Variance
|
||||
,format(hprof2.variance,2) AS Variance
|
||||
FROM
|
||||
(select /* stats from hudcache */
|
||||
gt.base
|
||||
|
@ -793,7 +793,7 @@ class FpdbSQLQueries:
|
|||
AS BBper100
|
||||
,hprof2.profitperhand AS Profitperhand
|
||||
*/
|
||||
,hprof2.variance as Variance
|
||||
,round(hprof2.variance,2) AS Variance
|
||||
FROM
|
||||
(select gt.base
|
||||
,gt.category
|
||||
|
@ -825,10 +825,10 @@ class FpdbSQLQueries:
|
|||
else to_char(100.0*(sum(street1Aggr)+sum(street2Aggr)+sum(street3Aggr))
|
||||
/(sum(street1Seen)+sum(street2Seen)+sum(street3Seen)),'90D0')
|
||||
end AS PoFAFq
|
||||
,to_char(sum(totalProfit)/100.0,'9G999G990D00') AS Net
|
||||
,round(sum(totalProfit)/100.0,2) AS Net
|
||||
,to_char((sum(totalProfit)/(gt.bigBlind+0.0)) / (sum(HDs)/100.0), '990D00')
|
||||
AS BBper100
|
||||
,to_char(sum(totalProfit) / (sum(HDs)+0.0), '990D0000') AS Profitperhand
|
||||
,to_char(sum(totalProfit/100.0) / (sum(HDs)+0.0), '990D0000') AS Profitperhand
|
||||
from Gametypes gt
|
||||
inner join Sites s on s.Id = gt.siteId
|
||||
inner join HudCache hc on hc.gameTypeId = gt.Id
|
||||
|
|
|
@ -40,6 +40,7 @@ class GuiAutoImport (threading.Thread):
|
|||
print imp
|
||||
|
||||
self.input_settings = {}
|
||||
self.pipe_to_hud = None
|
||||
|
||||
self.importer = fpdb_import.Importer(self,self.settings, self.config)
|
||||
self.importer.setCallHud(True)
|
||||
|
@ -72,7 +73,8 @@ class GuiAutoImport (threading.Thread):
|
|||
|
||||
self.addSites(self.mainVBox)
|
||||
|
||||
self.startButton=gtk.Button("Start Autoimport")
|
||||
self.doAutoImportBool = False
|
||||
self.startButton=gtk.ToggleButton("Start Autoimport")
|
||||
self.startButton.connect("clicked", self.startClicked, "start clicked")
|
||||
self.mainVBox.add(self.startButton)
|
||||
self.startButton.show()
|
||||
|
@ -104,7 +106,7 @@ class GuiAutoImport (threading.Thread):
|
|||
"""Callback for timer to do an import iteration."""
|
||||
self.importer.runUpdated()
|
||||
print "GuiAutoImport.import_dir done"
|
||||
return True
|
||||
return self.doAutoImportBool
|
||||
|
||||
def startClicked(self, widget, data):
|
||||
"""runs when user clicks start on auto import tab"""
|
||||
|
@ -118,9 +120,10 @@ class GuiAutoImport (threading.Thread):
|
|||
# That is not correct. It should open another dir for importing while piping the
|
||||
# results to the same pipe. This means that self.path should be a a list of dirs
|
||||
# to watch.
|
||||
try: #uhhh, I don't this this is the best way to check for the existence of an attr
|
||||
getattr(self, "pipe_to_hud")
|
||||
except AttributeError:
|
||||
if widget.get_active(): # toggled on
|
||||
self.doAutoImportBool = True
|
||||
widget.set_label(u'Stop Autoimport')
|
||||
if self.pipe_to_hud is None:
|
||||
if os.name == 'nt':
|
||||
command = "python HUD_main.py" + " %s" % (self.database)
|
||||
bs = 0 # windows is not happy with line buffing here
|
||||
|
@ -145,6 +148,13 @@ class GuiAutoImport (threading.Thread):
|
|||
|
||||
interval=int(self.intervalEntry.get_text())
|
||||
gobject.timeout_add(interval*1000, self.do_import)
|
||||
else: # toggled off
|
||||
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
|
||||
#TODO: other clean up, such as killing HUD
|
||||
print "Stopping autoimport"
|
||||
self.pipe_to_hud.communicate('\n') # waits for process to terminate
|
||||
self.pipe_to_hud = None
|
||||
widget.set_label(u'Start Autoimport')
|
||||
#end def GuiAutoImport.startClicked
|
||||
|
||||
def get_vbox(self):
|
||||
|
|
|
@ -60,7 +60,7 @@ class GuiPlayerStats (threading.Thread):
|
|||
vbox.add(self.stats_table)
|
||||
|
||||
# Create header row
|
||||
titles = ("Game", "Hands", "VPIP", "PFR", "saw_f", "sawsd", "wtsdwsf", "wmsd", "FlAFq", "TuAFq", "RvAFq", "PFAFq", "Net($)", "BB/100", "$/hand", "Variance")
|
||||
titles = ("Game", "Hands", "VPIP", "PFR", "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq", "PoFAFq", "Net($)", "BB/100", "$/hand", "Variance")
|
||||
|
||||
col = 0
|
||||
row = 0
|
||||
|
@ -71,14 +71,17 @@ class GuiPlayerStats (threading.Thread):
|
|||
col +=1
|
||||
|
||||
for row in range(rows-1):
|
||||
for col in range(cols):
|
||||
if(row%2 == 0):
|
||||
bgcolor = "white"
|
||||
else:
|
||||
bgcolor = "lightgrey"
|
||||
for col in range(cols):
|
||||
eb = gtk.EventBox()
|
||||
eb.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(bgcolor))
|
||||
if result[row][col]:
|
||||
l = gtk.Label(result[row][col])
|
||||
else:
|
||||
l = gtk.Label(' ')
|
||||
if col == 0:
|
||||
l.set_alignment(xalign=0.0, yalign=0.5)
|
||||
else:
|
||||
|
@ -127,7 +130,6 @@ class GuiPlayerStats (threading.Thread):
|
|||
|
||||
def __set_hero_name(self, w, site):
|
||||
self.heroes[site] = w.get_text()
|
||||
print "DEBUG: settings heroes[%s]: %s"%(site, self.heroes[site])
|
||||
|
||||
def __init__(self, db, config, querylist, debug=True):
|
||||
self.debug=debug
|
||||
|
|
|
@ -87,6 +87,8 @@ class Hand:
|
|||
|
||||
self.action = []
|
||||
self.totalpot = None
|
||||
self.totalcollected = None
|
||||
|
||||
self.rake = None
|
||||
|
||||
self.bets = {}
|
||||
|
@ -109,6 +111,17 @@ If a player has None chips he won't be added."""
|
|||
self.bets[street][name] = []
|
||||
|
||||
|
||||
def addStreets(self, match):
|
||||
# go through m and initialise actions to empty list for each street.
|
||||
if match is not None:
|
||||
self.streets = match
|
||||
for street in match.groupdict():
|
||||
if match.group(street) is not None:
|
||||
self.actions[street] = []
|
||||
|
||||
else:
|
||||
print "empty markStreets match" # better to raise exception and put process hand in a try block
|
||||
|
||||
def addHoleCards(self, cards, player):
|
||||
"""\
|
||||
Assigns observed holecards to a player.
|
||||
|
@ -161,11 +174,16 @@ For when a player shows cards for any reason (for showdown or out of choice).
|
|||
c = c.replace(k,v)
|
||||
return c
|
||||
|
||||
def addBlind(self, player, amount):
|
||||
def addBlind(self, player, blindtype, amount):
|
||||
# if player is None, it's a missing small blind.
|
||||
if player is not None:
|
||||
self.bets['PREFLOP'][player].append(Decimal(amount))
|
||||
self.actions['PREFLOP'] += [(player, 'posts', blindtype, amount)]
|
||||
if blindtype == 'big blind':
|
||||
self.lastBet['PREFLOP'] = Decimal(amount)
|
||||
elif blindtype == 'small & big blinds':
|
||||
# extra small blind is 'dead'
|
||||
self.lastBet['PREFLOP'] = Decimal(self.bb)
|
||||
self.posted += [player]
|
||||
|
||||
|
||||
|
@ -175,7 +193,7 @@ For when a player shows cards for any reason (for showdown or out of choice).
|
|||
if amount is not None:
|
||||
self.bets[street][player].append(Decimal(amount))
|
||||
#self.lastBet[street] = Decimal(amount)
|
||||
self.actions[street] += [[player, 'calls', amount]]
|
||||
self.actions[street] += [(player, 'calls', amount)]
|
||||
|
||||
def addRaiseTo(self, street, player, amountTo):
|
||||
"""\
|
||||
|
@ -192,21 +210,22 @@ Add a raise on [street] by [player] to [amountTo]
|
|||
self.lastBet[street] = Decimal(amountTo)
|
||||
amountBy = Decimal(amountTo) - amountToCall
|
||||
self.bets[street][player].append(amountBy+amountToCall)
|
||||
self.actions[street] += [[player, 'raises', amountBy, amountTo]]
|
||||
self.actions[street] += [(player, 'raises', amountBy, amountTo, amountToCall)]
|
||||
|
||||
def addBet(self, street, player, amount):
|
||||
self.checkPlayerExists(player)
|
||||
self.bets[street][player].append(Decimal(amount))
|
||||
self.actions[street] += [[player, 'bets', amount]]
|
||||
self.actions[street] += [(player, 'bets', amount)]
|
||||
self.lastBet[street] = Decimal(amount)
|
||||
|
||||
def addFold(self, street, player):
|
||||
self.checkPlayerExists(player)
|
||||
self.folded.add(player)
|
||||
self.actions[street] += [[player, 'folds']]
|
||||
self.actions[street] += [(player, 'folds')]
|
||||
|
||||
def addCheck(self, street, player):
|
||||
self.checkPlayerExists(player)
|
||||
self.actions[street] += [[player, 'checks']]
|
||||
self.actions[street] += [(player, 'checks')]
|
||||
|
||||
def addCollectPot(self,player, pot):
|
||||
self.checkPlayerExists(player)
|
||||
|
@ -231,6 +250,51 @@ Known bug: doesn't take into account side pots"""
|
|||
#print street, self.bets[street][player]
|
||||
self.totalpot += reduce(operator.add, self.bets[street][player], 0)
|
||||
|
||||
print "conventional totalpot:", self.totalpot
|
||||
self.totalpot = 0
|
||||
|
||||
print self.actions
|
||||
for street in self.actions:
|
||||
uncalled = 0
|
||||
calls = [0]
|
||||
for act in self.actions[street]:
|
||||
if act[1] == 'bets': # [name, 'bets', amount]
|
||||
self.totalpot += Decimal(act[2])
|
||||
uncalled = Decimal(act[2]) # only the last bet or raise can be uncalled
|
||||
calls = [0]
|
||||
print "uncalled: ", uncalled
|
||||
elif act[1] == 'raises': # [name, 'raises', amountby, amountto, amountcalled]
|
||||
print "calls %s and raises %s to %s" % (act[4],act[2],act[3])
|
||||
self.totalpot += Decimal(act[2]) + Decimal(act[4])
|
||||
calls = [0]
|
||||
uncalled = Decimal(act[2])
|
||||
print "uncalled: ", uncalled
|
||||
elif act[1] == 'calls': # [name, 'calls', amount]
|
||||
self.totalpot += Decimal(act[2])
|
||||
calls = calls + [Decimal(act[2])]
|
||||
print "calls:", calls
|
||||
if act[1] == ('posts'):
|
||||
self.totalpot += Decimal(act[3])
|
||||
uncalled = Decimal(act[3])
|
||||
if uncalled > 0 and max(calls+[0]) < uncalled:
|
||||
|
||||
print "returning some bet, calls:", calls
|
||||
print "returned: %.2f from %.2f" % ((uncalled - max(calls)), self.totalpot,)
|
||||
self.totalpot -= (uncalled - max(calls))
|
||||
print "new totalpot:", self.totalpot
|
||||
|
||||
if self.totalcollected is None:
|
||||
self.totalcollected = 0;
|
||||
for amount in self.collected.values():
|
||||
self.totalcollected += Decimal(amount)
|
||||
|
||||
# TODO: Some sites (Everleaf) don't record uncalled bets. Figure out if a bet is uncalled and subtract it from self.totalcollected.
|
||||
# remember that portions of bets may be uncalled, so:
|
||||
# bet followed by no call is an uncalled bet
|
||||
# bet x followed by call y where y < x has x-y uncalled (and second player all in)
|
||||
|
||||
|
||||
|
||||
def getGameTypeAsString(self):
|
||||
"""\
|
||||
Map the tuple self.gametype onto the pokerstars string describing it
|
||||
|
@ -309,7 +373,8 @@ Map the tuple self.gametype onto the pokerstars string describing it
|
|||
print "what do they show"
|
||||
|
||||
print "*** SUMMARY ***"
|
||||
print "Total pot $%s | Rake $%.2f)" % (self.totalpot, self.rake) # TODO side pots
|
||||
print "Total pot $%s | Rake $%.2f" % (self.totalcollected, self.rake) # TODO: side pots
|
||||
|
||||
board = []
|
||||
for s in self.board.values():
|
||||
board += s
|
||||
|
|
|
@ -161,7 +161,8 @@ class HandHistoryConverter:
|
|||
def readPlayerStacks(self, hand): abstract
|
||||
|
||||
# Needs to return a MatchObject with group names identifying the streets into the Hand object
|
||||
# that is, pulls the chunks of preflop, flop, turn and river text into hand.streets MatchObject.
|
||||
# so groups are called by street names 'PREFLOP', 'FLOP', 'STREET2' etc
|
||||
# blinds are done seperately
|
||||
def markStreets(self, hand): abstract
|
||||
|
||||
#Needs to return a list in the format
|
||||
|
@ -173,11 +174,13 @@ class HandHistoryConverter:
|
|||
def readCollectPot(self, hand): abstract
|
||||
|
||||
# Some sites don't report the rake. This will be called at the end of the hand after the pot total has been calculated
|
||||
# so that an inheriting class can calculate it for the specific site if need be.
|
||||
def getRake(self, hand): abstract
|
||||
# an inheriting class can calculate it for the specific site if need be.
|
||||
def getRake(self, hand):
|
||||
hand.rake = hand.totalpot - hand.totalcollected # * Decimal('0.05') # probably not quite right
|
||||
|
||||
|
||||
def sanityCheck(self):
|
||||
sane = True
|
||||
sane = False
|
||||
base_w = False
|
||||
#Check if hhbase exists and is writable
|
||||
#Note: Will not try to create the base HH directory
|
||||
|
|
|
@ -108,9 +108,9 @@ class fpdb_db:
|
|||
self.cursor.execute(self.sql.query['createHandsPlayersTable'])
|
||||
self.cursor.execute(self.sql.query['createHandsActionsTable'])
|
||||
self.cursor.execute(self.sql.query['createHudCacheTable'])
|
||||
self.cursor.execute(self.sql.query['addTourneyIndex'])
|
||||
self.cursor.execute(self.sql.query['addHandsIndex'])
|
||||
self.cursor.execute(self.sql.query['addPlayersIndex'])
|
||||
#self.cursor.execute(self.sql.query['addTourneyIndex'])
|
||||
#self.cursor.execute(self.sql.query['addHandsIndex'])
|
||||
#self.cursor.execute(self.sql.query['addPlayersIndex'])
|
||||
self.fillDefaultData()
|
||||
self.db.commit()
|
||||
#end def disconnect
|
||||
|
@ -185,6 +185,7 @@ class fpdb_db:
|
|||
|
||||
self.drop_tables()
|
||||
self.create_tables()
|
||||
fpdb_simple.createAllIndexes(self)
|
||||
self.db.commit()
|
||||
print "Finished recreating tables"
|
||||
#end def recreate_tables
|
||||
|
|
|
@ -116,8 +116,11 @@ class Importer:
|
|||
|
||||
#Run full import on filelist
|
||||
def runImport(self):
|
||||
fpdb_simple.prepareBulkImport(self.fdb)
|
||||
for file in self.filelist:
|
||||
self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
|
||||
fpdb_simple.afterBulkImport(self.fdb)
|
||||
fpdb_simple.analyzeDB(self.fdb)
|
||||
|
||||
#Run import on updated files, then store latest update time.
|
||||
def runUpdated(self):
|
||||
|
|
|
@ -22,6 +22,11 @@ from time import time
|
|||
|
||||
import fpdb_simple
|
||||
|
||||
saveActions=True # set this to False to avoid storing action data
|
||||
# Pros: speeds up imports
|
||||
# Cons: no action data is saved, so you need to keep the hand histories
|
||||
# variance not available on stats page
|
||||
|
||||
#stores a stud/razz hand into the database
|
||||
def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, hand_start_time
|
||||
,names, player_ids, start_cashes, antes, card_values, card_suits, winnings, rakes
|
||||
|
@ -39,6 +44,7 @@ def ring_stud(backend, db, cursor, base, category, site_hand_no, gametype_id, ha
|
|||
|
||||
fpdb_simple.storeHudCache(cursor, base, category, gametype_id, player_ids, hudImportData)
|
||||
|
||||
if saveActions:
|
||||
fpdb_simple.storeActions(cursor, hands_players_ids, action_types
|
||||
,allIns, action_amounts, actionNos)
|
||||
return hands_id
|
||||
|
@ -66,10 +72,10 @@ def ring_holdem_omaha(backend, db, cursor, base, category, site_hand_no, gametyp
|
|||
t5 = time()
|
||||
fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits)
|
||||
t6 = time()
|
||||
if saveActions:
|
||||
fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos)
|
||||
t7 = time()
|
||||
print "cards=%4.3f board=%4.3f hands=%4.3f plyrs=%4.3f hudcache=%4.3f board=%4.3f actions=%4.3f" \
|
||||
% (t1-t0, t2-t1, t3-t2, t4-t3, t5-t4, t6-t5, t7-t6)
|
||||
#print "fills=(%4.3f) saves=(%4.3f,%4.3f,%4.3f,%4.3f)" % (t2-t0, t3-t2, t4-t3, t5-t4, t6-t5)
|
||||
return hands_id
|
||||
#end def ring_holdem_omaha
|
||||
|
||||
|
@ -98,6 +104,7 @@ def tourney_holdem_omaha(backend, db, cursor, base, category, siteTourneyNo, buy
|
|||
|
||||
fpdb_simple.store_board_cards(cursor, hands_id, board_values, board_suits)
|
||||
|
||||
if saveActions:
|
||||
fpdb_simple.storeActions(cursor, hands_players_ids, action_types, allIns, action_amounts, actionNos)
|
||||
return hands_id
|
||||
#end def tourney_holdem_omaha
|
||||
|
@ -122,6 +129,7 @@ def tourney_stud(backend, db, cursor, base, category, siteTourneyNo, buyin, fee,
|
|||
|
||||
fpdb_simple.storeHudCache(cursor, base, category, gametypeId, playerIds, hudImportData)
|
||||
|
||||
if saveActions:
|
||||
fpdb_simple.storeActions(cursor, hands_players_ids, actionTypes, allIns, actionAmounts, actionNos)
|
||||
return hands_id
|
||||
#end def tourney_stud
|
||||
|
|
|
@ -27,6 +27,307 @@ MYSQL_INNODB=2
|
|||
PGSQL=3
|
||||
SQLITE=4
|
||||
|
||||
# Data Structures for index and foreign key creation
|
||||
# drop_code is an int with possible values: 0 - don't drop for bulk import
|
||||
# 1 - drop during bulk import
|
||||
# db differences:
|
||||
# - note that mysql automatically creates indexes on constrained columns when
|
||||
# foreign keys are created, while postgres does not. Hence the much longer list
|
||||
# of indexes is required for postgres.
|
||||
# all primary keys are left on all the time
|
||||
#
|
||||
# table column drop_code
|
||||
|
||||
indexes = [
|
||||
[ ] # no db with index 0
|
||||
, [ ] # no db with index 1
|
||||
, [ # indexes for mysql (list index 2)
|
||||
{'tab':'Players', 'col':'name', 'drop':0}
|
||||
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
|
||||
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
|
||||
]
|
||||
, [ # indexes for postgres (list index 3)
|
||||
{'tab':'Boardcards', 'col':'handId', 'drop':0}
|
||||
, {'tab':'Gametypes', 'col':'siteId', 'drop':0}
|
||||
, {'tab':'Hands', 'col':'gametypeId', 'drop':1}
|
||||
, {'tab':'Hands', 'col':'siteHandNo', 'drop':0}
|
||||
, {'tab':'HandsActions', 'col':'handplayerId', 'drop':0}
|
||||
, {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
|
||||
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
|
||||
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
|
||||
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
|
||||
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
|
||||
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
|
||||
, {'tab':'Players', 'col':'siteId', 'drop':1}
|
||||
, {'tab':'Players', 'col':'name', 'drop':0}
|
||||
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
|
||||
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
|
||||
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
|
||||
, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0}
|
||||
, {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
|
||||
]
|
||||
]
|
||||
|
||||
foreignKeys = [
|
||||
[ ] # no db with index 0
|
||||
, [ ] # no db with index 1
|
||||
, [ # foreign keys for mysql
|
||||
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HandsActions', 'fkcol':'handPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
||||
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
||||
]
|
||||
, [ # foreign keys for postgres
|
||||
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HandsActions', 'fkcol':'handPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
|
||||
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
|
||||
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
# MySQL Notes:
|
||||
# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
|
||||
# - creates index handId on <thistable>.handId
|
||||
# alter table t drop foreign key fk
|
||||
# alter table t add foreign key (fkcol) references tab(rcol)
|
||||
# alter table t add constraint c foreign key (fkcol) references tab(rcol)
|
||||
# (fkcol is used for foreigh key name)
|
||||
|
||||
# mysql to list indexes:
|
||||
# SELECT table_name, index_name, non_unique, column_name
|
||||
# FROM INFORMATION_SCHEMA.STATISTICS
|
||||
# WHERE table_name = 'tbl_name'
|
||||
# AND table_schema = 'db_name'
|
||||
# ORDER BY table_name, index_name, seq_in_index
|
||||
#
|
||||
# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
|
||||
# ALTER TABLE tab DROP INDEX idx
|
||||
|
||||
# mysql to list fks:
|
||||
# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
|
||||
# FROM information_schema.KEY_COLUMN_USAGE
|
||||
# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
|
||||
# AND REFERENCED_TABLE_NAME is not null
|
||||
# ORDER BY TABLE_NAME, COLUMN_NAME;
|
||||
|
||||
# this may indicate missing object
|
||||
# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
|
||||
|
||||
|
||||
# PG notes:
|
||||
|
||||
# To add a foreign key constraint to a table:
|
||||
# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
|
||||
# ALTER TABLE tab DROP CONSTRAINT zipchk
|
||||
#
|
||||
# Note: index names must be unique across a schema
|
||||
# CREATE INDEX idx ON tab(col)
|
||||
# DROP INDEX idx
|
||||
|
||||
def prepareBulkImport(fdb):
|
||||
"""Drop some indexes/foreign keys to prepare for bulk import.
|
||||
Currently keeping the standalone indexes as needed to import quickly"""
|
||||
# fdb is a fpdb_db object including backend, db, cursor, sql variables
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||
for fk in foreignKeys[fdb.backend]:
|
||||
if fk['drop'] == 1:
|
||||
if fdb.backend == MYSQL_INNODB:
|
||||
fdb.cursor.execute("SELECT constraint_name " +
|
||||
"FROM information_schema.KEY_COLUMN_USAGE " +
|
||||
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
||||
"WHERE 1=1 " +
|
||||
"AND table_name = %s AND column_name = %s " +
|
||||
"AND referenced_table_name = %s " +
|
||||
"AND referenced_column_name = %s ",
|
||||
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
||||
cons = fdb.cursor.fetchone()
|
||||
print "preparebulk: cons=", cons
|
||||
if cons:
|
||||
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
||||
try:
|
||||
fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
|
||||
except:
|
||||
pass
|
||||
elif fdb.backend == PGSQL:
|
||||
print "dropping pg fk", fk['fktab'], fk['fkcol']
|
||||
try:
|
||||
fdb.cursor.execute("alter table " + fk['fktab'] + " drop constraint "
|
||||
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey')
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
print "Only MySQL and Postgres supported so far"
|
||||
return -1
|
||||
|
||||
for idx in indexes[fdb.backend]:
|
||||
if idx['drop'] == 1:
|
||||
if fdb.backend == MYSQL_INNODB:
|
||||
print "dropping mysql index ", idx['tab'], idx['col']
|
||||
try:
|
||||
fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
|
||||
except:
|
||||
pass
|
||||
elif fdb.backend == PGSQL:
|
||||
print "dropping pg index ", idx['tab'], idx['col']
|
||||
# mod to use tab_col for index name?
|
||||
try:
|
||||
fdb.cursor.execute( "drop index %s_%s_idx" % (idx['tab'],idx['col']) )
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
print "Only MySQL and Postgres supported so far"
|
||||
return -1
|
||||
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||
fdb.db.commit() # seems to clear up errors if there were any in postgres
|
||||
#end def prepareBulkImport
|
||||
|
||||
def afterBulkImport(fdb):
|
||||
"""Re-create any dropped indexes/foreign keys after bulk import"""
|
||||
# fdb is a fpdb_db object including backend, db, cursor, sql variables
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||
for fk in foreignKeys[fdb.backend]:
|
||||
if fk['drop'] == 1:
|
||||
if fdb.backend == MYSQL_INNODB:
|
||||
fdb.cursor.execute("SELECT constraint_name " +
|
||||
"FROM information_schema.KEY_COLUMN_USAGE " +
|
||||
#"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
|
||||
"WHERE 1=1 " +
|
||||
"AND table_name = %s AND column_name = %s " +
|
||||
"AND referenced_table_name = %s " +
|
||||
"AND referenced_column_name = %s ",
|
||||
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
||||
cons = fdb.cursor.fetchone()
|
||||
print "afterbulk: cons=", cons
|
||||
if cons:
|
||||
pass
|
||||
else:
|
||||
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
||||
try:
|
||||
fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key ("
|
||||
+ fk['fkcol'] + ") references " + fk['rtab'] + "("
|
||||
+ fk['rcol'] + ")")
|
||||
except:
|
||||
pass
|
||||
elif fdb.backend == PGSQL:
|
||||
print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol']
|
||||
try:
|
||||
fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint "
|
||||
+ fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
|
||||
+ " foreign key (" + fk['fkcol']
|
||||
+ ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
print "Only MySQL and Postgres supported so far"
|
||||
return -1
|
||||
|
||||
for idx in indexes[fdb.backend]:
|
||||
if idx['drop'] == 1:
|
||||
if fdb.backend == MYSQL_INNODB:
|
||||
print "creating mysql index ", idx['tab'], idx['col']
|
||||
try:
|
||||
fdb.cursor.execute( "alter table %s add index %s(%s)"
|
||||
, (idx['tab'],idx['col'],idx['col']) )
|
||||
except:
|
||||
pass
|
||||
elif fdb.backend == PGSQL:
|
||||
# mod to use tab_col for index name?
|
||||
print "creating pg index ", idx['tab'], idx['col']
|
||||
try:
|
||||
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
||||
fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
|
||||
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
|
||||
except:
|
||||
print " ERROR! :-("
|
||||
pass
|
||||
else:
|
||||
print "Only MySQL and Postgres supported so far"
|
||||
return -1
|
||||
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||
fdb.db.commit() # seems to clear up errors if there were any in postgres
|
||||
#end def afterBulkImport
|
||||
|
||||
def createAllIndexes(fdb):
|
||||
"""Create new indexes"""
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||
for idx in indexes[fdb.backend]:
|
||||
if fdb.backend == MYSQL_INNODB:
|
||||
print "creating mysql index ", idx['tab'], idx['col']
|
||||
try:
|
||||
fdb.cursor.execute( "alter table %s add index %s(%s)"
|
||||
, (idx['tab'],idx['col'],idx['col']) )
|
||||
except:
|
||||
pass
|
||||
elif fdb.backend == PGSQL:
|
||||
# mod to use tab_col for index name?
|
||||
print "creating pg index ", idx['tab'], idx['col']
|
||||
try:
|
||||
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
|
||||
fdb.cursor.execute( "create index %s_%s_idx on %s(%s)"
|
||||
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
|
||||
except:
|
||||
print " ERROR! :-("
|
||||
pass
|
||||
else:
|
||||
print "Only MySQL and Postgres supported so far"
|
||||
return -1
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||
#end def createAllIndexes
|
||||
|
||||
def dropAllIndexes(fdb):
|
||||
"""Drop all standalone indexes (i.e. not including primary keys or foreign keys)
|
||||
using list of indexes in indexes data structure"""
|
||||
# maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(0) # allow table/index operations to work
|
||||
for idx in indexes[fdb.backend]:
|
||||
if fdb.backend == MYSQL_INNODB:
|
||||
print "dropping mysql index ", idx['tab'], idx['col']
|
||||
try:
|
||||
fdb.cursor.execute( "alter table %s drop index %s"
|
||||
, (idx['tab'],idx['col']) )
|
||||
except:
|
||||
pass
|
||||
elif fdb.backend == PGSQL:
|
||||
print "dropping pg index ", idx['tab'], idx['col']
|
||||
# mod to use tab_col for index name?
|
||||
try:
|
||||
fdb.cursor.execute( "drop index %s_%s_idx"
|
||||
% (idx['tab'],idx['col']) )
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
print "Only MySQL and Postgres supported so far"
|
||||
return -1
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||
#end def dropAllIndexes
|
||||
|
||||
def analyzeDB(fdb):
|
||||
"""Do whatever the DB can offer to update index/table statistics"""
|
||||
if fdb.backend == PGSQL:
|
||||
fdb.db.set_isolation_level(0) # allow vacuum to work
|
||||
try:
|
||||
fdb.cursor.execute("vacuum analyze")
|
||||
except:
|
||||
print "Error during vacuum"
|
||||
fdb.db.set_isolation_level(1) # go back to normal isolation level
|
||||
#end def analyzeDB
|
||||
|
||||
class DuplicateError(Exception):
|
||||
def __init__(self, value):
|
||||
|
@ -90,7 +391,7 @@ def checkPositions(positions):
|
|||
pass
|
||||
|
||||
### RHH modified to allow for "position 9" here (pos==9 is when you're a dead hand before the BB
|
||||
if (pos!="B" and pos!="S" and pos!=0 and pos!=1 and pos!=2 and pos!=3 and pos!=4 and pos!=5 and pos!=6 and pos!=7 and pos != 8 and pos!=9):
|
||||
if (pos!="B" and pos!="S" and pos!=0 and pos!=1 and pos!=2 and pos!=3 and pos!=4 and pos!=5 and pos!=6 and pos!=7 and pos!=9):
|
||||
raise FpdbError("invalid position found in checkPositions. i: "+str(i)+" position: "+str(pos))
|
||||
#end def fpdb_simple.checkPositions
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user