Merge branch 'master' of git://git.assembla.com/fpdboz
This commit is contained in:
commit
95a7ab8e48
|
@ -43,6 +43,7 @@ follow : whether to tail -f the input"""
|
||||||
logging.info("Initialising Betfair converter class")
|
logging.info("Initialising Betfair converter class")
|
||||||
self.filetype = "text"
|
self.filetype = "text"
|
||||||
self.codepage = "cp1252"
|
self.codepage = "cp1252"
|
||||||
|
self.siteId = 7 # Needs to match id entry in Sites database
|
||||||
if autostart:
|
if autostart:
|
||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,7 @@ class CarbonPoker(HandHistoryConverter):
|
||||||
print "Initialising Carbon Poker converter class"
|
print "Initialising Carbon Poker converter class"
|
||||||
HandHistoryConverter.__init__(self, config, filename, "Carbon") # Call super class init
|
HandHistoryConverter.__init__(self, config, filename, "Carbon") # Call super class init
|
||||||
self.setFileType("xml")
|
self.setFileType("xml")
|
||||||
|
self.siteId = 4 # Needs to match id entry in Sites database
|
||||||
|
|
||||||
def readSupportedGames(self):
|
def readSupportedGames(self):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -49,6 +49,7 @@ debugging: if False, pass on partially supported game types. If true, have a go
|
||||||
logging.info("Initialising Everleaf converter class")
|
logging.info("Initialising Everleaf converter class")
|
||||||
self.filetype = "text"
|
self.filetype = "text"
|
||||||
self.codepage = "cp1252"
|
self.codepage = "cp1252"
|
||||||
|
self.siteId = 3 # Needs to match id entry in Sites database
|
||||||
self.debugging = debugging
|
self.debugging = debugging
|
||||||
if autostart:
|
if autostart:
|
||||||
self.start()
|
self.start()
|
||||||
|
|
|
@ -900,7 +900,7 @@ class FpdbSQLQueries:
|
||||||
GROUP BY h.handStart, hp.handId, hp.totalProfit
|
GROUP BY h.handStart, hp.handId, hp.totalProfit
|
||||||
ORDER BY h.handStart"""
|
ORDER BY h.handStart"""
|
||||||
|
|
||||||
if self.dbname in ['MySQL InnoDB', 'PostgreSQL']:
|
if self.dbname in ['MySQL InnoDB']:
|
||||||
self.query['playerDetailedStats'] = """
|
self.query['playerDetailedStats'] = """
|
||||||
select <hgameTypeId> AS hgametypeid
|
select <hgameTypeId> AS hgametypeid
|
||||||
,gt.base
|
,gt.base
|
||||||
|
@ -949,14 +949,89 @@ class FpdbSQLQueries:
|
||||||
,avg(h.seats+0.0) AS avgseats
|
,avg(h.seats+0.0) AS avgseats
|
||||||
,variance(hp.totalProfit/100.0) AS variance
|
,variance(hp.totalProfit/100.0) AS variance
|
||||||
from HandsPlayers hp
|
from HandsPlayers hp
|
||||||
inner join Hands h on (h.id = hp.handId)
|
inner join Hands h on (h.id = hp.handId)
|
||||||
inner join Gametypes gt on (gt.Id = h.gameTypeId)
|
inner join Gametypes gt on (gt.Id = h.gameTypeId)
|
||||||
inner join Sites s on (s.Id = gt.siteId)
|
inner join Sites s on (s.Id = gt.siteId)
|
||||||
where hp.playerId in <player_test>
|
where hp.playerId in <player_test>
|
||||||
and hp.tourneysPlayersId IS NULL
|
and hp.tourneysPlayersId IS NULL
|
||||||
and h.seats <seats_test>
|
and h.seats <seats_test>
|
||||||
<flagtest>
|
<flagtest>
|
||||||
<gtbigBlind_test>
|
<gtbigBlind_test>
|
||||||
|
and date_format(h.handStart, '%Y-%m-%d') <datestest>
|
||||||
|
group by hgameTypeId
|
||||||
|
,hp.playerId
|
||||||
|
,gt.base
|
||||||
|
,gt.category
|
||||||
|
<groupbyseats>
|
||||||
|
,upper(gt.limitType)
|
||||||
|
,s.name
|
||||||
|
order by hp.playerId
|
||||||
|
,gt.base
|
||||||
|
,gt.category
|
||||||
|
<orderbyseats>
|
||||||
|
<orderbyhgameTypeId>
|
||||||
|
,maxbigblind desc
|
||||||
|
,upper(gt.limitType)
|
||||||
|
,s.name
|
||||||
|
"""
|
||||||
|
elif self.dbname in ['PostgreSQL']:
|
||||||
|
self.query['playerDetailedStats'] = """
|
||||||
|
select <hgameTypeId> AS hgametypeid
|
||||||
|
,gt.base
|
||||||
|
,gt.category
|
||||||
|
,upper(gt.limitType) AS limittype
|
||||||
|
,s.name
|
||||||
|
,min(gt.bigBlind) AS minbigblind
|
||||||
|
,max(gt.bigBlind) AS maxbigblind
|
||||||
|
/*,<hcgametypeId> AS gtid*/
|
||||||
|
,count(1) AS n
|
||||||
|
,100.0*sum(cast(hp.street0VPI as <signed>integer))/count(1) AS vpip
|
||||||
|
,100.0*sum(cast(hp.street0Aggr as <signed>integer))/count(1) AS pfr
|
||||||
|
,case when sum(cast(hp.street0_3Bchance as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.street0_3Bdone as <signed>integer))/sum(cast(hp.street0_3Bchance as <signed>integer))
|
||||||
|
end AS pf3
|
||||||
|
,case when sum(cast(hp.stealattemptchance as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.stealattempted as <signed>integer))/sum(cast(hp.stealattemptchance as <signed>integer))
|
||||||
|
end AS steals
|
||||||
|
,100.0*sum(cast(hp.street1Seen as <signed>integer))/count(1) AS saw_f
|
||||||
|
,100.0*sum(cast(hp.sawShowdown as <signed>integer))/count(1) AS sawsd
|
||||||
|
,case when sum(cast(hp.street1Seen as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.sawShowdown as <signed>integer))/sum(cast(hp.street1Seen as <signed>integer))
|
||||||
|
end AS wtsdwsf
|
||||||
|
,case when sum(cast(hp.sawShowdown as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.wonAtSD as <signed>integer))/sum(cast(hp.sawShowdown as <signed>integer))
|
||||||
|
end AS wmsd
|
||||||
|
,case when sum(cast(hp.street1Seen as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.street1Aggr as <signed>integer))/sum(cast(hp.street1Seen as <signed>integer))
|
||||||
|
end AS flafq
|
||||||
|
,case when sum(cast(hp.street2Seen as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.street2Aggr as <signed>integer))/sum(cast(hp.street2Seen as <signed>integer))
|
||||||
|
end AS tuafq
|
||||||
|
,case when sum(cast(hp.street3Seen as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*sum(cast(hp.street3Aggr as <signed>integer))/sum(cast(hp.street3Seen as <signed>integer))
|
||||||
|
end AS rvafq
|
||||||
|
,case when sum(cast(hp.street1Seen as <signed>integer))+sum(cast(hp.street2Seen as <signed>integer))+sum(cast(hp.street3Seen as <signed>integer)) = 0 then -999
|
||||||
|
else 100.0*(sum(cast(hp.street1Aggr as <signed>integer))+sum(cast(hp.street2Aggr as <signed>integer))+sum(cast(hp.street3Aggr as <signed>integer)))
|
||||||
|
/(sum(cast(hp.street1Seen as <signed>integer))+sum(cast(hp.street2Seen as <signed>integer))+sum(cast(hp.street3Seen as <signed>integer)))
|
||||||
|
end AS pofafq
|
||||||
|
,sum(hp.totalProfit)/100.0 AS net
|
||||||
|
,sum(hp.rake)/100.0 AS rake
|
||||||
|
,100.0*avg(hp.totalProfit/(gt.bigBlind+0.0)) AS bbper100
|
||||||
|
,avg(hp.totalProfit)/100.0 AS profitperhand
|
||||||
|
,100.0*avg((hp.totalProfit+hp.rake)/(gt.bigBlind+0.0)) AS bb100xr
|
||||||
|
,avg((hp.totalProfit+hp.rake)/100.0) AS profhndxr
|
||||||
|
,avg(h.seats+0.0) AS avgseats
|
||||||
|
,variance(hp.totalProfit/100.0) AS variance
|
||||||
|
from HandsPlayers hp
|
||||||
|
inner join Hands h on (h.id = hp.handId)
|
||||||
|
inner join Gametypes gt on (gt.Id = h.gameTypeId)
|
||||||
|
inner join Sites s on (s.Id = gt.siteId)
|
||||||
|
where hp.playerId in <player_test>
|
||||||
|
and hp.tourneysPlayersId IS NULL
|
||||||
|
and h.seats <seats_test>
|
||||||
|
<flagtest>
|
||||||
|
<gtbigBlind_test>
|
||||||
|
and to_char(h.handStart, 'YYYY-MM-DD') <datestest>
|
||||||
group by hgameTypeId
|
group by hgameTypeId
|
||||||
,hp.playerId
|
,hp.playerId
|
||||||
,gt.base
|
,gt.base
|
||||||
|
|
|
@ -45,6 +45,7 @@ follow : whether to tail -f the input"""
|
||||||
logging.info("Initialising Fulltilt converter class")
|
logging.info("Initialising Fulltilt converter class")
|
||||||
self.filetype = "text"
|
self.filetype = "text"
|
||||||
self.codepage = "cp1252"
|
self.codepage = "cp1252"
|
||||||
|
self.siteId = 1 # Needs to match id entry in Sites database
|
||||||
if autostart:
|
if autostart:
|
||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ class GuiPlayerStats (threading.Thread):
|
||||||
"LimitSep" : True,
|
"LimitSep" : True,
|
||||||
"Seats" : True,
|
"Seats" : True,
|
||||||
"SeatSep" : True,
|
"SeatSep" : True,
|
||||||
"Dates" : False,
|
"Dates" : True,
|
||||||
"Groups" : True,
|
"Groups" : True,
|
||||||
"Button1" : True,
|
"Button1" : True,
|
||||||
"Button2" : True
|
"Button2" : True
|
||||||
|
@ -93,8 +93,9 @@ class GuiPlayerStats (threading.Thread):
|
||||||
, ("rvafq", True, "RvAFq", 1.0, "%3.1f")
|
, ("rvafq", True, "RvAFq", 1.0, "%3.1f")
|
||||||
, ("pofafq", False, "PoFAFq", 1.0, "%3.1f")
|
, ("pofafq", False, "PoFAFq", 1.0, "%3.1f")
|
||||||
, ("net", True, "Net($)", 1.0, "%6.2f")
|
, ("net", True, "Net($)", 1.0, "%6.2f")
|
||||||
, ("bbper100", True, "BB/100", 1.0, "%4.2f")
|
, ("bbper100", True, "bb/100", 1.0, "%4.2f")
|
||||||
, ("rake", True, "Rake($)", 1.0, "%6.2f")
|
, ("rake", True, "Rake($)", 1.0, "%6.2f")
|
||||||
|
, ("bb100xr", True, "bbxr/100", 1.0, "%4.2f")
|
||||||
, ("variance", True, "Variance", 1.0, "%5.2f")
|
, ("variance", True, "Variance", 1.0, "%5.2f")
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -155,6 +156,7 @@ class GuiPlayerStats (threading.Thread):
|
||||||
siteids = self.filters.getSiteIds()
|
siteids = self.filters.getSiteIds()
|
||||||
limits = self.filters.getLimits()
|
limits = self.filters.getLimits()
|
||||||
seats = self.filters.getSeats()
|
seats = self.filters.getSeats()
|
||||||
|
dates = self.filters.getDates()
|
||||||
sitenos = []
|
sitenos = []
|
||||||
playerids = []
|
playerids = []
|
||||||
|
|
||||||
|
@ -178,16 +180,16 @@ class GuiPlayerStats (threading.Thread):
|
||||||
print "No limits found"
|
print "No limits found"
|
||||||
return
|
return
|
||||||
|
|
||||||
self.createStatsTable(vbox, playerids, sitenos, limits, seats)
|
self.createStatsTable(vbox, playerids, sitenos, limits, seats, dates)
|
||||||
|
|
||||||
def createStatsTable(self, vbox, playerids, sitenos, limits, seats):
|
def createStatsTable(self, vbox, playerids, sitenos, limits, seats, dates):
|
||||||
starttime = time()
|
starttime = time()
|
||||||
|
|
||||||
# Display summary table at top of page
|
# Display summary table at top of page
|
||||||
# 3rd parameter passes extra flags, currently includes:
|
# 3rd parameter passes extra flags, currently includes:
|
||||||
# holecards - whether to display card breakdown (True/False)
|
# holecards - whether to display card breakdown (True/False)
|
||||||
flags = [False]
|
flags = [False]
|
||||||
self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats)
|
self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates)
|
||||||
|
|
||||||
# Separator
|
# Separator
|
||||||
sep = gtk.HSeparator()
|
sep = gtk.HSeparator()
|
||||||
|
@ -210,13 +212,13 @@ class GuiPlayerStats (threading.Thread):
|
||||||
|
|
||||||
# Detailed table
|
# Detailed table
|
||||||
flags = [True]
|
flags = [True]
|
||||||
self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats)
|
self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates)
|
||||||
|
|
||||||
self.db.db.commit()
|
self.db.db.commit()
|
||||||
print "Stats page displayed in %4.2f seconds" % (time() - starttime)
|
print "Stats page displayed in %4.2f seconds" % (time() - starttime)
|
||||||
#end def fillStatsFrame(self, vbox):
|
#end def fillStatsFrame(self, vbox):
|
||||||
|
|
||||||
def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats):
|
def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats, dates):
|
||||||
row = 0
|
row = 0
|
||||||
sqlrow = 0
|
sqlrow = 0
|
||||||
colalias,colshow,colheading,colxalign,colformat = 0,1,2,3,4
|
colalias,colshow,colheading,colxalign,colformat = 0,1,2,3,4
|
||||||
|
@ -229,7 +231,7 @@ class GuiPlayerStats (threading.Thread):
|
||||||
self.stats_table.show()
|
self.stats_table.show()
|
||||||
|
|
||||||
tmp = self.sql.query[query]
|
tmp = self.sql.query[query]
|
||||||
tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats)
|
tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats, dates)
|
||||||
self.cursor.execute(tmp)
|
self.cursor.execute(tmp)
|
||||||
result = self.cursor.fetchall()
|
result = self.cursor.fetchall()
|
||||||
colnames = [desc[0].lower() for desc in self.cursor.description]
|
colnames = [desc[0].lower() for desc in self.cursor.description]
|
||||||
|
@ -311,7 +313,7 @@ class GuiPlayerStats (threading.Thread):
|
||||||
|
|
||||||
#end def addTable(self, query, vars, playerids, sitenos, limits, seats):
|
#end def addTable(self, query, vars, playerids, sitenos, limits, seats):
|
||||||
|
|
||||||
def refineQuery(self, query, flags, playerids, sitenos, limits, seats):
|
def refineQuery(self, query, flags, playerids, sitenos, limits, seats, dates):
|
||||||
if not flags: holecards = False
|
if not flags: holecards = False
|
||||||
else: holecards = flags[0]
|
else: holecards = flags[0]
|
||||||
|
|
||||||
|
@ -371,6 +373,9 @@ class GuiPlayerStats (threading.Thread):
|
||||||
else:
|
else:
|
||||||
query = query.replace("<signed>", '')
|
query = query.replace("<signed>", '')
|
||||||
|
|
||||||
|
# Filter on dates
|
||||||
|
query = query.replace("<datestest>", " between '" + dates[0] + "' and '" + dates[1] + "'")
|
||||||
|
|
||||||
#print "query =\n", query
|
#print "query =\n", query
|
||||||
return(query)
|
return(query)
|
||||||
#end def refineQuery(self, query, playerids, sitenos, limits):
|
#end def refineQuery(self, query, playerids, sitenos, limits):
|
||||||
|
@ -438,3 +443,6 @@ class GuiPlayerStats (threading.Thread):
|
||||||
detailDialog.destroy()
|
detailDialog.destroy()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ class GuiPositionalStats (threading.Thread):
|
||||||
)
|
)
|
||||||
self.posnheads = ( "Game", "Seats", "Posn", "VPIP", "PFR", "PF3", "Steals"
|
self.posnheads = ( "Game", "Seats", "Posn", "VPIP", "PFR", "PF3", "Steals"
|
||||||
, "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq"
|
, "Saw_F", "SawSD", "WtSDwsF", "W$SD", "FlAFq", "TuAFq", "RvAFq"
|
||||||
, "PoFAFq", "Net($)", "BB/100", "$/hand", "Variance", "Hds"
|
, "PoFAFq", "Net($)", "bb/100", "$/hand", "Variance", "Hds"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.fillStatsFrame(self.stats_frame)
|
self.fillStatsFrame(self.stats_frame)
|
||||||
|
|
|
@ -36,6 +36,7 @@ class Hand:
|
||||||
self.sitename = sitename
|
self.sitename = sitename
|
||||||
self.stats = DerivedStats.DerivedStats(self)
|
self.stats = DerivedStats.DerivedStats(self)
|
||||||
self.gametype = gametype
|
self.gametype = gametype
|
||||||
|
self.starttime = 0
|
||||||
self.handText = handText
|
self.handText = handText
|
||||||
self.handid = 0
|
self.handid = 0
|
||||||
self.tablename = "Slartibartfast"
|
self.tablename = "Slartibartfast"
|
||||||
|
@ -86,10 +87,60 @@ Should not commit, and do minimal selects. Callers may want to cache commits
|
||||||
db: a connected fpdb_db object"""
|
db: a connected fpdb_db object"""
|
||||||
# TODO:
|
# TODO:
|
||||||
# Players - base playerid and siteid tuple
|
# Players - base playerid and siteid tuple
|
||||||
|
sqlids = db.getSqlPlayerIDs([p[1] for p in self.players], self.siteId)
|
||||||
# HudCache data to come from DerivedStats class
|
# HudCache data to come from DerivedStats class
|
||||||
# HandsActions - all actions for all players for all streets - self.actions
|
# HandsActions - all actions for all players for all streets - self.actions
|
||||||
# BoardCards - ?
|
# BoardCards - Skip - no longer necessary?
|
||||||
# Hands - Summary information of hand indexed by handId - gameinfo
|
# Hands - Summary information of hand indexed by handId - gameinfo
|
||||||
|
# self.tablename = tableName
|
||||||
|
# self.handid = siteHandNo
|
||||||
|
# gametypeId SMALLINT UNSIGNED NOT NULL, FOREIGN KEY (gametypeId) REFERENCES Gametypes(id),
|
||||||
|
#
|
||||||
|
# self.starttime = handStart
|
||||||
|
# importTime DATETIME NOT NULL,
|
||||||
|
#
|
||||||
|
# seats TINYINT NOT NULL,
|
||||||
|
#
|
||||||
|
# self.maxseats = maxSeats
|
||||||
|
# boardcard1 smallint, /* 0=none, 1-13=2-Ah 14-26=2-Ad 27-39=2-Ac 40-52=2-As */
|
||||||
|
# boardcard2 smallint,
|
||||||
|
# boardcard3 smallint,
|
||||||
|
# boardcard4 smallint,
|
||||||
|
# boardcard5 smallint,
|
||||||
|
# texture smallint,
|
||||||
|
# playersVpi SMALLINT NOT NULL, /* num of players vpi */
|
||||||
|
# Needs to be recorded
|
||||||
|
# playersAtStreet1 SMALLINT NOT NULL, /* num of players seeing flop/street4 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# playersAtStreet2 SMALLINT NOT NULL,
|
||||||
|
# Needs to be recorded
|
||||||
|
# playersAtStreet3 SMALLINT NOT NULL,
|
||||||
|
# Needs to be recorded
|
||||||
|
# playersAtStreet4 SMALLINT NOT NULL,
|
||||||
|
# Needs to be recorded
|
||||||
|
# playersAtShowdown SMALLINT NOT NULL,
|
||||||
|
# Needs to be recorded
|
||||||
|
# street0Raises TINYINT NOT NULL, /* num small bets paid to see flop/street4, including blind */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street1Raises TINYINT NOT NULL, /* num small bets paid to see turn/street5 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street2Raises TINYINT NOT NULL, /* num big bets paid to see river/street6 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street3Raises TINYINT NOT NULL, /* num big bets paid to see sd/street7 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street4Raises TINYINT NOT NULL, /* num big bets paid to see showdown */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street1Pot INT, /* pot size at flop/street4 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street2Pot INT, /* pot size at turn/street5 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street3Pot INT, /* pot size at river/street6 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# street4Pot INT, /* pot size at sd/street7 */
|
||||||
|
# Needs to be recorded
|
||||||
|
# showdownPot INT, /* pot size at sd/street7 */
|
||||||
|
# comment TEXT,
|
||||||
|
# commentTs DATETIME
|
||||||
# HandsPlayers - ? ... Do we fix winnings?
|
# HandsPlayers - ? ... Do we fix winnings?
|
||||||
# Tourneys ?
|
# Tourneys ?
|
||||||
# TourneysPlayers
|
# TourneysPlayers
|
||||||
|
|
|
@ -72,6 +72,7 @@ class OnGame(HandHistoryConverter):
|
||||||
HandHistoryConverter.__init__(self, config, file, sitename="OnGame") # Call super class init.
|
HandHistoryConverter.__init__(self, config, file, sitename="OnGame") # Call super class init.
|
||||||
self.sitename = "OnGame"
|
self.sitename = "OnGame"
|
||||||
self.setFileType("text", "cp1252")
|
self.setFileType("text", "cp1252")
|
||||||
|
self.siteId = 5 # Needs to match id entry in Sites database
|
||||||
#self.rexx.setGameInfoRegex('.*Blinds \$?(?P<SB>[.0-9]+)/\$?(?P<BB>[.0-9]+)')
|
#self.rexx.setGameInfoRegex('.*Blinds \$?(?P<SB>[.0-9]+)/\$?(?P<BB>[.0-9]+)')
|
||||||
self.rexx.setSplitHandRegex('\n\n\n+')
|
self.rexx.setSplitHandRegex('\n\n\n+')
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,7 @@ follow : whether to tail -f the input"""
|
||||||
logging.info("Initialising PokerStars converter class")
|
logging.info("Initialising PokerStars converter class")
|
||||||
self.filetype = "text"
|
self.filetype = "text"
|
||||||
self.codepage = "cp1252"
|
self.codepage = "cp1252"
|
||||||
|
self.siteId = 2 # Needs to match id entry in Sites database
|
||||||
if autostart:
|
if autostart:
|
||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
|
|
|
@ -564,13 +564,13 @@ class Sql:
|
||||||
if db_server == 'mysql':
|
if db_server == 'mysql':
|
||||||
self.query['get_hand_1day_ago'] = """
|
self.query['get_hand_1day_ago'] = """
|
||||||
select coalesce(max(id),0)
|
select coalesce(max(id),0)
|
||||||
from hands
|
from Hands
|
||||||
where handstart < date_sub(utc_timestamp(), interval '1' day)"""
|
where handStart < date_sub(utc_timestamp(), interval '1' day)"""
|
||||||
else: # assume postgresql
|
else: # assume postgresql
|
||||||
self.query['get_hand_1day_ago'] = """
|
self.query['get_hand_1day_ago'] = """
|
||||||
select coalesce(max(id),0)
|
select coalesce(max(id),0)
|
||||||
from hands
|
from Hands
|
||||||
where handstart < now() at time zone 'UTC' - interval '1 day'"""
|
where handStart < now() at time zone 'UTC' - interval '1 day'"""
|
||||||
|
|
||||||
if __name__== "__main__":
|
if __name__== "__main__":
|
||||||
# just print the default queries and exit
|
# just print the default queries and exit
|
||||||
|
|
|
@ -42,6 +42,7 @@ follow : whether to tail -f the input"""
|
||||||
logging.info("Initialising UltimateBetconverter class")
|
logging.info("Initialising UltimateBetconverter class")
|
||||||
self.filetype = "text"
|
self.filetype = "text"
|
||||||
self.codepage = "cp1252"
|
self.codepage = "cp1252"
|
||||||
|
self.siteId = 6 # Needs to match id entry in Sites database
|
||||||
if autostart:
|
if autostart:
|
||||||
self.start()
|
self.start()
|
||||||
|
|
||||||
|
|
|
@ -181,35 +181,54 @@ class fpdb:
|
||||||
|
|
||||||
def dia_load_profile(self, widget, data=None):
|
def dia_load_profile(self, widget, data=None):
|
||||||
"""Dialogue to select a file to load a profile from"""
|
"""Dialogue to select a file to load a profile from"""
|
||||||
self.obtain_global_lock()
|
if self.obtain_global_lock():
|
||||||
chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
|
try:
|
||||||
action=gtk.FILE_CHOOSER_ACTION_OPEN,
|
chooser = gtk.FileChooserDialog(title="Please select a profile file to load",
|
||||||
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
|
action=gtk.FILE_CHOOSER_ACTION_OPEN,
|
||||||
chooser.set_filename(self.profile)
|
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK))
|
||||||
|
chooser.set_filename(self.profile)
|
||||||
|
|
||||||
response = chooser.run()
|
response = chooser.run()
|
||||||
chooser.destroy()
|
chooser.destroy()
|
||||||
if response == gtk.RESPONSE_OK:
|
if response == gtk.RESPONSE_OK:
|
||||||
self.load_profile(chooser.get_filename())
|
self.load_profile(chooser.get_filename())
|
||||||
elif response == gtk.RESPONSE_CANCEL:
|
elif response == gtk.RESPONSE_CANCEL:
|
||||||
print 'User cancelled loading profile'
|
print 'User cancelled loading profile'
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.release_global_lock()
|
||||||
#end def dia_load_profile
|
#end def dia_load_profile
|
||||||
|
|
||||||
def dia_recreate_tables(self, widget, data=None):
|
def dia_recreate_tables(self, widget, data=None):
|
||||||
"""Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
|
"""Dialogue that asks user to confirm that he wants to delete and recreate the tables"""
|
||||||
self.obtain_global_lock()
|
if self.obtain_global_lock():
|
||||||
|
|
||||||
dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
|
lock_released = False
|
||||||
buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
|
try:
|
||||||
diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted."
|
dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING,
|
||||||
dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted
|
buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm deleting and recreating tables")
|
||||||
|
diastring = "Please confirm that you want to (re-)create the tables. If there already are tables in the database "+self.db.database+" on "+self.db.host+" they will be deleted."
|
||||||
|
dia_confirm.format_secondary_text(diastring)#todo: make above string with bold for db, host and deleted
|
||||||
|
|
||||||
response = dia_confirm.run()
|
response = dia_confirm.run()
|
||||||
dia_confirm.destroy()
|
dia_confirm.destroy()
|
||||||
if response == gtk.RESPONSE_YES:
|
if response == gtk.RESPONSE_YES:
|
||||||
self.db.recreate_tables()
|
if self.db.backend == self.fdb_lock.MYSQL_INNODB:
|
||||||
elif response == gtk.RESPONSE_NO:
|
# mysql requires locks on all tables or none - easier to release this lock
|
||||||
print 'User cancelled recreating tables'
|
# than lock all the other tables
|
||||||
|
# ToDo: lock all other tables so that lock doesn't have to be released
|
||||||
|
self.release_global_lock()
|
||||||
|
lock_released = True
|
||||||
|
self.db.recreate_tables()
|
||||||
|
else:
|
||||||
|
# for other dbs use same connection as holds global lock
|
||||||
|
self.fdb_lock.recreate_tables()
|
||||||
|
elif response == gtk.RESPONSE_NO:
|
||||||
|
print 'User cancelled recreating tables'
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if not lock_released:
|
||||||
|
self.release_global_lock()
|
||||||
#end def dia_recreate_tables
|
#end def dia_recreate_tables
|
||||||
|
|
||||||
def dia_regression_test(self, widget, data=None):
|
def dia_regression_test(self, widget, data=None):
|
||||||
|
@ -291,7 +310,7 @@ class fpdb:
|
||||||
|
|
||||||
# Create actions
|
# Create actions
|
||||||
actiongroup.add_actions([('main', None, '_Main'),
|
actiongroup.add_actions([('main', None, '_Main'),
|
||||||
('Quit', gtk.STOCK_QUIT, '_Quit me!', None, 'Quit the Program', self.quit),
|
('Quit', gtk.STOCK_QUIT, '_Quit', None, 'Quit the Program', self.quit),
|
||||||
('LoadProf', None, '_Load Profile (broken)', '<control>L', 'Load your profile', self.dia_load_profile),
|
('LoadProf', None, '_Load Profile (broken)', '<control>L', 'Load your profile', self.dia_load_profile),
|
||||||
('EditProf', None, '_Edit Profile (todo)', '<control>E', 'Edit your profile', self.dia_edit_profile),
|
('EditProf', None, '_Edit Profile (todo)', '<control>E', 'Edit your profile', self.dia_edit_profile),
|
||||||
('SaveProf', None, '_Save Profile (todo)', '<control>S', 'Save your profile', self.dia_save_profile),
|
('SaveProf', None, '_Save Profile (todo)', '<control>S', 'Save your profile', self.dia_save_profile),
|
||||||
|
@ -380,7 +399,14 @@ class fpdb:
|
||||||
#end def not_implemented
|
#end def not_implemented
|
||||||
|
|
||||||
def obtain_global_lock(self):
|
def obtain_global_lock(self):
|
||||||
print "todo: implement obtain_global_lock (users: pls ignore this)"
|
print "\nTaking global lock ..."
|
||||||
|
self.fdb_lock = fpdb_db.fpdb_db()
|
||||||
|
self.fdb_lock.connect(self.settings['db-backend'],
|
||||||
|
self.settings['db-host'],
|
||||||
|
self.settings['db-databaseName'],
|
||||||
|
self.settings['db-user'],
|
||||||
|
self.settings['db-password'])
|
||||||
|
return fpdb_simple.get_global_lock(self.fdb_lock)
|
||||||
#end def obtain_global_lock
|
#end def obtain_global_lock
|
||||||
|
|
||||||
def quit(self, widget, data):
|
def quit(self, widget, data):
|
||||||
|
@ -391,7 +417,9 @@ class fpdb:
|
||||||
#end def quit_cliecked
|
#end def quit_cliecked
|
||||||
|
|
||||||
def release_global_lock(self):
|
def release_global_lock(self):
|
||||||
print "todo: implement release_global_lock"
|
self.fdb_lock.db.rollback()
|
||||||
|
self.fdb_lock.disconnect()
|
||||||
|
print "Global lock released."
|
||||||
#end def release_global_lock
|
#end def release_global_lock
|
||||||
|
|
||||||
def tab_abbreviations(self, widget, data=None):
|
def tab_abbreviations(self, widget, data=None):
|
||||||
|
|
|
@ -96,7 +96,7 @@ class fpdb_db:
|
||||||
try:
|
try:
|
||||||
self.cursor.execute("SELECT * FROM Settings")
|
self.cursor.execute("SELECT * FROM Settings")
|
||||||
settings=self.cursor.fetchone()
|
settings=self.cursor.fetchone()
|
||||||
if settings[0]!=118:
|
if settings[0]!=119:
|
||||||
print "outdated or too new database version - please recreate tables"
|
print "outdated or too new database version - please recreate tables"
|
||||||
self.wrongDbVersion=True
|
self.wrongDbVersion=True
|
||||||
except:# _mysql_exceptions.ProgrammingError:
|
except:# _mysql_exceptions.ProgrammingError:
|
||||||
|
@ -201,10 +201,14 @@ class fpdb_db:
|
||||||
#end def get_db_info
|
#end def get_db_info
|
||||||
|
|
||||||
def fillDefaultData(self):
|
def fillDefaultData(self):
|
||||||
self.cursor.execute("INSERT INTO Settings VALUES (118);")
|
self.cursor.execute("INSERT INTO Settings VALUES (119);")
|
||||||
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');")
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');")
|
||||||
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');")
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');")
|
||||||
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');")
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');")
|
||||||
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Carbon', 'USD');")
|
||||||
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'OnGame', 'USD');")
|
||||||
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'UltimateBet', 'USD');")
|
||||||
|
self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Betfair', 'USD');")
|
||||||
self.cursor.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
|
self.cursor.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
|
||||||
#end def fillDefaultData
|
#end def fillDefaultData
|
||||||
|
|
||||||
|
@ -217,4 +221,23 @@ class fpdb_db:
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
print "Finished recreating tables"
|
print "Finished recreating tables"
|
||||||
#end def recreate_tables
|
#end def recreate_tables
|
||||||
#end class fpdb_db
|
|
||||||
|
def getSqlPlayerIDs(names, site_id):
|
||||||
|
result = []
|
||||||
|
notfound = []
|
||||||
|
self.cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
|
||||||
|
tmp = dict(self.cursor.fetchall())
|
||||||
|
for n in names:
|
||||||
|
if n not in tmp:
|
||||||
|
notfound.append(n)
|
||||||
|
else:
|
||||||
|
result.append(tmp[n])
|
||||||
|
if notfound:
|
||||||
|
cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
|
||||||
|
cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
|
||||||
|
tmp = cursor.fetchall()
|
||||||
|
for n in tmp:
|
||||||
|
result.append(n[0])
|
||||||
|
|
||||||
|
#We proabably want to cache this
|
||||||
|
return result
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
import Card
|
import Card
|
||||||
|
|
||||||
|
@ -156,7 +157,7 @@ def prepareBulkImport(fdb):
|
||||||
"AND referenced_column_name = %s ",
|
"AND referenced_column_name = %s ",
|
||||||
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
|
||||||
cons = fdb.cursor.fetchone()
|
cons = fdb.cursor.fetchone()
|
||||||
print "preparebulk: cons=", cons
|
#print "preparebulk: cons=", cons
|
||||||
if cons:
|
if cons:
|
||||||
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
|
||||||
try:
|
try:
|
||||||
|
@ -165,13 +166,25 @@ def prepareBulkImport(fdb):
|
||||||
pass
|
pass
|
||||||
elif fdb.backend == PGSQL:
|
elif fdb.backend == PGSQL:
|
||||||
# DON'T FORGET TO RECREATE THEM!!
|
# DON'T FORGET TO RECREATE THEM!!
|
||||||
#print "dropping pg fk", fk['fktab'], fk['fkcol']
|
print "dropping pg fk", fk['fktab'], fk['fkcol']
|
||||||
try:
|
try:
|
||||||
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
|
# try to lock table to see if index drop will work:
|
||||||
fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
|
# hmmm, tested by commenting out rollback in grapher. lock seems to work but
|
||||||
print "dropped pg fk pg fk %s_%s_fkey" % (fk['fktab'], fk['fkcol'])
|
# then drop still hangs :-( does work in some tests though??
|
||||||
|
# will leave code here for now pending further tests/enhancement ...
|
||||||
|
fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
|
||||||
|
#print "after lock, status:", fdb.cursor.statusmessage
|
||||||
|
#print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
|
||||||
|
print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol'])
|
||||||
|
except:
|
||||||
|
if "does not exist" not in str(sys.exc_value):
|
||||||
|
print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \
|
||||||
|
% (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') )
|
||||||
except:
|
except:
|
||||||
print "! failed drop pg fk %s_%s_fkey" % (fk['fktab'], fk['fkcol'])
|
print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \
|
||||||
|
% (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n'))
|
||||||
else:
|
else:
|
||||||
print "Only MySQL and Postgres supported so far"
|
print "Only MySQL and Postgres supported so far"
|
||||||
return -1
|
return -1
|
||||||
|
@ -181,22 +194,32 @@ def prepareBulkImport(fdb):
|
||||||
if fdb.backend == MYSQL_INNODB:
|
if fdb.backend == MYSQL_INNODB:
|
||||||
print "dropping mysql index ", idx['tab'], idx['col']
|
print "dropping mysql index ", idx['tab'], idx['col']
|
||||||
try:
|
try:
|
||||||
|
# apparently nowait is not implemented in mysql so this just hands if there are locks
|
||||||
|
# preventing the index drop :-(
|
||||||
fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
|
fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) )
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
elif fdb.backend == PGSQL:
|
elif fdb.backend == PGSQL:
|
||||||
# DON'T FORGET TO RECREATE THEM!!
|
# DON'T FORGET TO RECREATE THEM!!
|
||||||
#print "Index dropping disabled for postgresql."
|
print "dropping pg index ", idx['tab'], idx['col']
|
||||||
#print "dropping pg index ", idx['tab'], idx['col']
|
|
||||||
# mod to use tab_col for index name?
|
|
||||||
try:
|
try:
|
||||||
fdb.cursor.execute( "drop index %s_%s_idx" % (idx['tab'],idx['col']) )
|
# try to lock table to see if index drop will work:
|
||||||
print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
|
fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
|
||||||
#print "dropped pg index ", idx['tab'], idx['col']
|
#print "after lock, status:", fdb.cursor.statusmessage
|
||||||
|
try:
|
||||||
|
# table locked ok so index drop should work:
|
||||||
|
#print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
|
||||||
|
fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
|
||||||
|
#print "dropped pg index ", idx['tab'], idx['col']
|
||||||
|
except:
|
||||||
|
if "does not exist" not in str(sys.exc_value):
|
||||||
|
print "warning: drop index %s_%s_idx failed: %s, continuing ..." \
|
||||||
|
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
|
||||||
except:
|
except:
|
||||||
print "! failed drop index %s_%s_idx" % (idx['tab'],idx['col'])
|
print "warning: index %s_%s_idx not dropped %s, continuing ..." \
|
||||||
|
% (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n'))
|
||||||
else:
|
else:
|
||||||
print "Only MySQL and Postgres supported so far"
|
print "Error: Only MySQL and Postgres supported so far"
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
if fdb.backend == PGSQL:
|
if fdb.backend == PGSQL:
|
||||||
|
@ -344,6 +367,27 @@ def analyzeDB(fdb):
|
||||||
fdb.db.commit()
|
fdb.db.commit()
|
||||||
#end def analyzeDB
|
#end def analyzeDB
|
||||||
|
|
||||||
|
def get_global_lock(fdb):
|
||||||
|
if fdb.backend == MYSQL_INNODB:
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "lock tables Hands write" )
|
||||||
|
except:
|
||||||
|
print "Error! failed to obtain global lock. Close all programs accessing " \
|
||||||
|
+ "database (including fpdb) and try again (%s)." \
|
||||||
|
% ( str(sys.exc_value).rstrip('\n'), )
|
||||||
|
return(False)
|
||||||
|
elif fdb.backend == PGSQL:
|
||||||
|
try:
|
||||||
|
fdb.cursor.execute( "lock table Hands in exclusive mode nowait" )
|
||||||
|
#print "... after lock table, status =", fdb.cursor.statusmessage
|
||||||
|
except:
|
||||||
|
print "Error! failed to obtain global lock. Close all programs accessing " \
|
||||||
|
+ "database (including fpdb) and try again (%s)." \
|
||||||
|
% ( str(sys.exc_value).rstrip('\n'), )
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
|
||||||
class DuplicateError(Exception):
|
class DuplicateError(Exception):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
|
@ -1346,27 +1390,6 @@ def recognisePlayerIDs(cursor, names, site_id):
|
||||||
#end def recognisePlayerIDs
|
#end def recognisePlayerIDs
|
||||||
|
|
||||||
|
|
||||||
# Here's a version that would work if it wasn't for the fact that it needs to have the output in the same order as input
|
|
||||||
# this version could also be improved upon using list comprehensions, etc
|
|
||||||
|
|
||||||
#def recognisePlayerIDs(cursor, names, site_id):
|
|
||||||
# result = []
|
|
||||||
# notfound = []
|
|
||||||
# cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names))
|
|
||||||
# tmp = dict(cursor.fetchall())
|
|
||||||
# for n in names:
|
|
||||||
# if n not in tmp:
|
|
||||||
# notfound.append(n)
|
|
||||||
# else:
|
|
||||||
# result.append(tmp[n])
|
|
||||||
# if notfound:
|
|
||||||
# cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound))
|
|
||||||
# cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound))
|
|
||||||
# tmp = cursor.fetchall()
|
|
||||||
# for n in tmp:
|
|
||||||
# result.append(n[0])
|
|
||||||
#
|
|
||||||
# return result
|
|
||||||
|
|
||||||
#recognises the name in the given line and returns its array position in the given array
|
#recognises the name in the given line and returns its array position in the given array
|
||||||
def recognisePlayerNo(line, names, atype):
|
def recognisePlayerNo(line, names, atype):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user