Merge branch 'master' of git://git.assembla.com/free_poker_tools

This commit is contained in:
Mika Bostrom 2009-08-09 14:27:05 +03:00
commit b1281385ec
21 changed files with 3574 additions and 2872 deletions

View File

@ -94,29 +94,32 @@ def cardFromValueSuit(value, suit):
elif suit == 's': return(value+38)
else: return(0)
def valueSuitFromCard(card):
""" Function to convert a card stored in the database (int 0-52) into value
and suit like 9s, 4c etc """
if card < 0 or card > 52 or not card:
return('')
else:
return( ['', '2h', '3h', '4h', '5h', '6h', '7h', '8h', '9h', 'Th', 'Jh', 'Qh', 'Kh', 'Ah'
suitFromCardList = ['', '2h', '3h', '4h', '5h', '6h', '7h', '8h', '9h', 'Th', 'Jh', 'Qh', 'Kh', 'Ah'
, '2d', '3d', '4d', '5d', '6d', '7d', '8d', '9d', 'Td', 'Jd', 'Qd', 'Kd', 'Ad'
, '2c', '3c', '4c', '5c', '6c', '7c', '8c', '9c', 'Tc', 'Jc', 'Qc', 'Kc', 'Ac'
, '2s', '3s', '4s', '5s', '6s', '7s', '8s', '9s', 'Ts', 'Js', 'Qs', 'Ks', 'As'
][card] )
]
def valueSuitFromCard(card):
""" Function to convert a card stored in the database (int 0-52) into value
and suit like 9s, 4c etc """
global suitFromCardList
if card < 0 or card > 52 or not card:
return('')
else:
return suitFromCardList[card]
def encodeCard(cardString):
"""Take a card string (Ah) and convert it to the db card code (1)."""
try:
return {'2h': 1, '3h': 2, '4h': 3, '5h': 4, '6h': 5, '7h': 6, '8h': 7, '9h': 8, 'Th': 9, 'Jh': 10, 'Qh': 11, 'Kh': 12, 'Ah': 13,
encodeCardList = {'2h': 1, '3h': 2, '4h': 3, '5h': 4, '6h': 5, '7h': 6, '8h': 7, '9h': 8, 'Th': 9, 'Jh': 10, 'Qh': 11, 'Kh': 12, 'Ah': 13,
'2d': 14, '3d': 15, '4d': 16, '5d': 17, '6d': 18, '7d': 19, '8d': 20, '9d': 21, 'Td': 22, 'Jd': 23, 'Qd': 24, 'Kd': 25, 'Ad': 26,
'2c': 27, '3c': 28, '4c': 29, '5c': 30, '6c': 31, '7c': 32, '8c': 33, '9c': 34, 'Tc': 35, 'Jc': 36, 'Qc': 27, 'Kc': 38, 'Ac': 39,
'2s': 40, '3s': 41, '4s': 42, '5s': 43, '6s': 44, '7s': 45, '8s': 46, '9s': 47, 'Ts': 48, 'Js': 49, 'Qs': 50, 'Ks': 51, 'As': 52,
' ': 0
}[cardString]
except:
return 0 # everthing that isn't known is a unknown!
}
def encodeCard(cardString):
"""Take a card string (Ah) and convert it to the db card code (1)."""
global encodeCardList
if cardString not in encodeCardList: return 0
return encodeCardList[cardString]
if __name__ == '__main__':
print "fpdb card encoding(same as pokersource)"

View File

@ -27,10 +27,11 @@ Create and manage the database objects.
import sys
import traceback
from datetime import datetime, date, time, timedelta
from time import time, strftime
from time import time, strftime, sleep
import string
import re
import logging
import Queue
# pyGTK modules
@ -67,7 +68,7 @@ class Database:
, {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
, {'tab':'HandsPlayers', 'col':'handId', 'drop':0} # not needed, handled by fk
, {'tab':'HandsPlayers', 'col':'playerId', 'drop':0} # not needed, handled by fk
, {'tab':'HandsPlayers', 'col':'tourneysTypeId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0}
]
@ -181,57 +182,24 @@ class Database:
else:
self.sql = sql
self.pcache = None # PlayerId cache
self.cachemiss = 0 # Delete me later - using to count player cache misses
self.cachehit = 0 # Delete me later - using to count player cache hits
# config while trying out new hudcache mechanism
self.use_date_in_hudcache = True
# To add to config:
self.hud_session_gap = 30 # Gap (minutes) between hands that indicates a change of session
# (hands every 2 mins for 1 hour = one session, if followed
# by a 40 minute gap and then more hands on same table that is
# a new session)
self.hud_style = 'T' # A=All-time
# S=Session
# T=timed (last n days)
# Future values may also include:
# H=Hands (last n hands)
self.hud_hands = 2000 # Max number of hands from each player to use for hud stats
self.hud_days = 30 # Max number of days from each player to use for hud stats
#self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
#self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
#self.hud_hero_days = 30 # but last T days or last H hands for yourself
self.hud_hero_style = 'T' # Duplicate set of vars just for hero
self.hud_hero_hands = 2000
self.hud_hero_days = 30
# vars for hand ids or dates fetched according to above config:
self.hand_1day_ago = 0 # max hand id more than 24 hrs earlier than now
self.date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD)
self.date_nhands_ago = {} # dates N hands ago per player - not used yet
self.cursor = self.fdb.cursor
if self.fdb.wrongDbVersion == False:
# self.hand_1day_ago used to fetch stats for current session (i.e. if hud_style = 'S')
self.hand_1day_ago = 0
self.cursor.execute(self.sql.query['get_hand_1day_ago'])
row = self.cursor.fetchone()
if row and row[0]:
self.hand_1day_ago = row[0]
#print "hand 1day ago =", self.hand_1day_ago
# self.date_ndays_ago used if hud_style = 'T'
d = timedelta(days=self.hud_days)
now = datetime.utcnow() - d
self.date_ndays_ago = "d%02d%02d%02d" % (now.year-2000, now.month, now.day)
# self.hand_nhands_ago is used for fetching stats for last n hands (hud_style = 'H')
# This option not used yet
self.hand_nhands_ago = 0
# should use aggregated version of query if appropriate
self.cursor.execute(self.sql.query['get_hand_nhands_ago'], (self.hud_hands,self.hud_hands))
row = self.cursor.fetchone()
if row and row[0]:
self.hand_nhands_ago = row[0]
print "hand n hands ago =", self.hand_nhands_ago
#self.cursor.execute(self.sql.query['get_table_name'], (hand_id, ))
#row = self.cursor.fetchone()
else:
print "Bailing on DB query, not sure it exists yet"
self.saveActions = False if self.import_options['saveActions'] == False else True
self.connection.rollback() # make sure any locks taken so far are released
@ -364,22 +332,67 @@ class Database:
winners[row[0]] = row[1]
return winners
def get_stats_from_hand(self, hand, aggregate = False):
if self.hud_style == 'S':
def init_hud_stat_vars(self, hud_days):
"""Initialise variables used by Hud to fetch stats."""
try:
# self.hand_1day_ago used to fetch stats for current session (i.e. if hud_style = 'S')
self.hand_1day_ago = 1
c = self.get_cursor()
c.execute(self.sql.query['get_hand_1day_ago'])
row = c.fetchone()
if row and row[0]:
self.hand_1day_ago = row[0]
#print "hand 1day ago =", self.hand_1day_ago
# self.date_ndays_ago used if hud_style = 'T'
d = timedelta(days=hud_days)
now = datetime.utcnow() - d
self.date_ndays_ago = "d%02d%02d%02d" % (now.year-2000, now.month, now.day)
except:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
def init_player_hud_stat_vars(self, playerid):
# not sure if this is workable, to be continued ...
try:
# self.date_nhands_ago is used for fetching stats for last n hands (hud_style = 'H')
# This option not used yet - needs to be called for each player :-(
self.date_nhands_ago[str(playerid)] = 'd000000'
# should use aggregated version of query if appropriate
c.execute(self.sql.query['get_date_nhands_ago'], (self.hud_hands, playerid))
row = c.fetchone()
if row and row[0]:
self.date_nhands_ago[str(playerid)] = row[0]
c.close()
print "date n hands ago = " + self.date_nhands_ago[str(playerid)] + "(playerid "+str(playerid)+")"
except:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
def get_stats_from_hand(self, hand, aggregate = False, hud_style = 'A', agg_bb_mult = 100):
if hud_style == 'S':
return( self.get_stats_from_hand_session(hand) )
else: # self.hud_style == A
else: # hud_style == A
if hud_style == 'T':
stylekey = self.date_ndays_ago
#elif hud_style == 'H':
# stylekey = date_nhands_ago needs array by player here ...
else: # assume A (all-time)
stylekey = '0000000' # all stylekey values should be higher than this
if aggregate:
query = 'get_stats_from_hand_aggregated'
subs = (hand, stylekey, agg_bb_mult, agg_bb_mult)
else:
query = 'get_stats_from_hand'
if self.hud_style == 'T':
stylekey = self.date_ndays_ago
else: # assume A (all-time)
stylekey = '0000000' # all stylekey values should be higher than this
subs = (hand, stylekey)
subs = (hand, hand, stylekey)
#print "get stats: hud style =", self.hud_style, "subs =", subs
#print "get stats: hud style =", hud_style, "query =", query, "subs =", subs
c = self.connection.cursor()
# now get the stats
@ -398,17 +411,14 @@ class Database:
# uses query on handsplayers instead of hudcache to get stats on just this session
def get_stats_from_hand_session(self, hand):
if self.hud_style == 'S':
query = self.sql.query['get_stats_from_hand_session']
if self.db_server == 'mysql':
query = query.replace("<signed>", 'signed ')
else:
query = query.replace("<signed>", '')
else: # self.hud_style == A
return None
query = self.sql.query['get_stats_from_hand_session']
if self.db_server == 'mysql':
query = query.replace("<signed>", 'signed ')
else:
query = query.replace("<signed>", '')
subs = (self.hand_1day_ago, hand)
c = self.connection.cursor()
c = self.get_cursor()
# now get the stats
#print "sess_stats: subs =", subs, "subs[0] =", subs[0]
@ -442,7 +452,7 @@ class Database:
def get_player_id(self, config, site, player_name):
c = self.connection.cursor()
c.execute(self.sql.query['get_player_id'], {'player': player_name, 'site': site})
c.execute(self.sql.query['get_player_id'], (player_name, site))
row = c.fetchone()
if row:
return row[0]
@ -469,23 +479,19 @@ class Database:
,action_types, allIns, action_amounts, actionNos, hudImportData, maxSeats, tableName
,seatNos):
try:
fpdb_simple.fillCardArrays(len(names), base, category, card_values, card_suits)
fpdb_simple.fillCardArrays(len(names), base, category, card_values, card_suits)
hands_id = self.storeHands(self.backend, site_hand_no, gametype_id
,hand_start_time, names, tableName, maxSeats, hudImportData
,(None, None, None, None, None), (None, None, None, None, None))
hands_id = self.storeHands(self.backend, site_hand_no, gametype_id
,hand_start_time, names, tableName, maxSeats, hudImportData
,(None, None, None, None, None), (None, None, None, None, None))
#print "before calling store_hands_players_stud, antes:", antes
hands_players_ids = self.store_hands_players_stud(self.backend, hands_id, player_ids
,start_cashes, antes, card_values
,card_suits, winnings, rakes, seatNos)
#print "before calling store_hands_players_stud, antes:", antes
hands_players_ids = self.store_hands_players_stud(self.backend, hands_id, player_ids
,start_cashes, antes, card_values
,card_suits, winnings, rakes, seatNos)
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
except:
print "ring_stud error: " + str(sys.exc_value) # in case exception doesn't get printed
raise fpdb_simple.FpdbError("ring_stud error: " + str(sys.exc_value))
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
return hands_id
#end def ring_stud
@ -496,30 +502,26 @@ class Database:
,action_amounts, actionNos, hudImportData, maxSeats, tableName, seatNos):
"""stores a holdem/omaha hand into the database"""
try:
t0 = time()
#print "in ring_holdem_omaha"
fpdb_simple.fillCardArrays(len(names), base, category, card_values, card_suits)
t1 = time()
fpdb_simple.fill_board_cards(board_values, board_suits)
t2 = time()
t0 = time()
#print "in ring_holdem_omaha"
fpdb_simple.fillCardArrays(len(names), base, category, card_values, card_suits)
t1 = time()
fpdb_simple.fill_board_cards(board_values, board_suits)
t2 = time()
hands_id = self.storeHands(self.backend, site_hand_no, gametype_id
,hand_start_time, names, tableName, maxSeats
,hudImportData, board_values, board_suits)
#TEMPORARY CALL! - Just until all functions are migrated
t3 = time()
hands_players_ids = self.store_hands_players_holdem_omaha(
self.backend, category, hands_id, player_ids, start_cashes
, positions, card_values, card_suits, winnings, rakes, seatNos, hudImportData)
t4 = time()
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
t5 = time()
#print "fills=(%4.3f) saves=(%4.3f,%4.3f,%4.3f)" % (t2-t0, t3-t2, t4-t3, t5-t4)
except:
print "ring_holdem_omaha error: " + str(sys.exc_value) # in case exception doesn't get printed
raise fpdb_simple.FpdbError("ring_holdem_omaha error: " + str(sys.exc_value))
hands_id = self.storeHands(self.backend, site_hand_no, gametype_id
,hand_start_time, names, tableName, maxSeats
,hudImportData, board_values, board_suits)
#TEMPORARY CALL! - Just until all functions are migrated
t3 = time()
hands_players_ids = self.store_hands_players_holdem_omaha(
self.backend, category, hands_id, player_ids, start_cashes
, positions, card_values, card_suits, winnings, rakes, seatNos, hudImportData)
t4 = time()
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
t5 = time()
#print "fills=(%4.3f) saves=(%4.3f,%4.3f,%4.3f)" % (t2-t0, t3-t2, t4-t3, t5-t4)
return hands_id
#end def ring_holdem_omaha
@ -532,28 +534,24 @@ class Database:
,actionNos, hudImportData, maxSeats, tableName, seatNos):
"""stores a tourney holdem/omaha hand into the database"""
try:
fpdb_simple.fillCardArrays(len(names), base, category, card_values, card_suits)
fpdb_simple.fill_board_cards(board_values, board_suits)
fpdb_simple.fillCardArrays(len(names), base, category, card_values, card_suits)
fpdb_simple.fill_board_cards(board_values, board_suits)
tourney_id = self.store_tourneys(tourneyTypeId, siteTourneyNo, entries, prizepool, tourney_start)
tourneys_players_ids = self.store_tourneys_players(tourney_id, player_ids, payin_amounts, ranks, winnings)
tourney_id = self.store_tourneys(tourneyTypeId, siteTourneyNo, entries, prizepool, tourney_start)
tourneys_players_ids = self.store_tourneys_players(tourney_id, player_ids, payin_amounts, ranks, winnings)
hands_id = self.storeHands(self.backend, site_hand_no, gametype_id
,hand_start_time, names, tableName, maxSeats
,hudImportData, board_values, board_suits)
hands_id = self.storeHands(self.backend, site_hand_no, gametype_id
,hand_start_time, names, tableName, maxSeats
,hudImportData, board_values, board_suits)
hands_players_ids = self.store_hands_players_holdem_omaha_tourney(
self.backend, category, hands_id, player_ids, start_cashes, positions
, card_values, card_suits, winnings, rakes, seatNos, tourneys_players_ids
, hudImportData)
hands_players_ids = self.store_hands_players_holdem_omaha_tourney(
self.backend, category, hands_id, player_ids, start_cashes, positions
, card_values, card_suits, winnings, rakes, seatNos, tourneys_players_ids
, hudImportData)
#print "tourney holdem, backend=%d" % backend
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
except:
print "tourney_holdem_omaha error: " + str(sys.exc_value) # in case exception doesn't get printed
raise fpdb_simple.FpdbError("tourney_holdem_omaha error: " + str(sys.exc_value))
#print "tourney holdem, backend=%d" % backend
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
return hands_id
#end def tourney_holdem_omaha
@ -565,26 +563,22 @@ class Database:
,actionNos, hudImportData, maxSeats, tableName, seatNos):
#stores a tourney stud/razz hand into the database
try:
fpdb_simple.fillCardArrays(len(names), base, category, cardValues, cardSuits)
fpdb_simple.fillCardArrays(len(names), base, category, cardValues, cardSuits)
tourney_id = self.store_tourneys(tourneyTypeId, siteTourneyNo, entries, prizepool, tourneyStartTime)
tourney_id = self.store_tourneys(tourneyTypeId, siteTourneyNo, entries, prizepool, tourneyStartTime)
tourneys_players_ids = self.store_tourneys_players(tourney_id, playerIds, payin_amounts, ranks, winnings)
tourneys_players_ids = self.store_tourneys_players(tourney_id, playerIds, payin_amounts, ranks, winnings)
hands_id = self.storeHands( self.backend, siteHandNo, gametypeId
, handStartTime, names, tableName, maxSeats
, hudImportData, board_values, board_suits )
hands_id = self.storeHands( self.backend, siteHandNo, gametypeId
, handStartTime, names, tableName, maxSeats
, hudImportData, board_values, board_suits )
hands_players_ids = self.store_hands_players_stud_tourney(self.backend, hands_id
, playerIds, startCashes, antes, cardValues, cardSuits
, winnings, rakes, seatNos, tourneys_players_ids)
hands_players_ids = self.store_hands_players_stud_tourney(self.backend, hands_id
, playerIds, startCashes, antes, cardValues, cardSuits
, winnings, rakes, seatNos, tourneys_players_ids)
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametypeId, hand_start_time, playerIds, hudImportData)
except:
print "tourney_stud error: " + str(sys.exc_value) # in case exception doesn't get printed
raise fpdb_simple.FpdbError("tourney_stud error: " + str(sys.exc_value))
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
self.storeHudCache(self.backend, base, category, gametypeId, hand_start_time, playerIds, hudImportData)
return hands_id
#end def tourney_stud
@ -613,7 +607,7 @@ class Database:
"AND referenced_column_name = %s ",
(fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
cons = c.fetchone()
print "preparebulk find fk: cons=", cons
#print "preparebulk find fk: cons=", cons
if cons:
print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol']
try:
@ -740,8 +734,8 @@ class Database:
if self.backend == self.MYSQL_INNODB:
print "creating mysql index ", idx['tab'], idx['col']
try:
c.execute( "alter table %s add index %s(%s)"
, (idx['tab'],idx['col'],idx['col']) )
s = "alter table %s add index %s(%s)" % (idx['tab'],idx['col'],idx['col'])
c.execute(s)
except:
print " create fk failed: " + str(sys.exc_info())
elif self.backend == self.PGSQL:
@ -749,9 +743,8 @@ class Database:
# mod to use tab_col for index name?
print "creating pg index ", idx['tab'], idx['col']
try:
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
c.execute( "create index %s_%s_idx on %s(%s)"
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
c.execute(s)
except:
print " create index failed: " + str(sys.exc_info())
else:
@ -820,9 +813,11 @@ class Database:
self.fillDefaultData()
self.commit()
except:
print "Error creating tables: ", str(sys.exc_value)
#print "Error creating tables: ", str(sys.exc_value)
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error creating tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
self.rollback()
raise fpdb_simple.FpdbError( "Error creating tables " + str(sys.exc_value) )
raise
#end def disconnect
def drop_tables(self):
@ -852,8 +847,9 @@ class Database:
self.commit()
except:
print "Error dropping tables: " + str(sys.exc_value)
raise fpdb_simple.FpdbError( "Error dropping tables " + str(sys.exc_value) )
print "***Error dropping tables: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
self.rollback()
raise
#end def drop_tables
def createAllIndexes(self):
@ -866,20 +862,18 @@ class Database:
if self.backend == self.MYSQL_INNODB:
print "creating mysql index ", idx['tab'], idx['col']
try:
self.get_cursor().execute( "alter table %s add index %s(%s)"
, (idx['tab'],idx['col'],idx['col']) )
s = "create index %s on %s(%s)" % (idx['col'],idx['tab'],idx['col'])
self.get_cursor().execute(s)
except:
pass
print " create idx failed: " + str(sys.exc_info())
elif self.backend == self.PGSQL:
# mod to use tab_col for index name?
print "creating pg index ", idx['tab'], idx['col']
try:
print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
self.get_cursor().execute( "create index %s_%s_idx on %s(%s)"
% (idx['tab'], idx['col'], idx['tab'], idx['col']) )
s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
self.get_cursor().execute(s)
except:
print " ERROR! :-("
pass
print " create idx failed: " + str(sys.exc_info())
else:
print "Only MySQL and Postgres supported so far"
return -1
@ -926,7 +920,10 @@ class Database:
c.execute("INSERT INTO Sites (name,currency) VALUES ('PokerStars', 'USD')")
c.execute("INSERT INTO Sites (name,currency) VALUES ('Everleaf', 'USD')")
c.execute("INSERT INTO Sites (name,currency) VALUES ('Win2day', 'USD')")
c.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
if self.backend == self.SQLITE:
c.execute("INSERT INTO TourneyTypes VALUES (NULL, 1, 0, 0, 0, 0);")
else:
c.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);")
#c.execute("""INSERT INTO TourneyTypes
# (siteId,buyin,fee,knockout,rebuyOrAddon) VALUES
# (1,0,0,0,?)""",(False,) )
@ -979,6 +976,35 @@ class Database:
print "Error during fdb.lock_for_insert:", str(sys.exc_value)
#end def lock_for_insert
def getSqlPlayerIDs(self, pnames, siteid):
result = {}
if(self.pcache == None):
self.pcache = LambdaDict(lambda key:self.insertPlayer(key, siteid))
for player in pnames:
result[player] = self.pcache[player]
# NOTE: Using the LambdaDict does the same thing as:
#if player in self.pcache:
# #print "DEBUG: cachehit"
# pass
#else:
# self.pcache[player] = self.insertPlayer(player, siteid)
#result[player] = self.pcache[player]
return result
def insertPlayer(self, name, site_id):
result = None
c = self.get_cursor()
c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
tmp=c.fetchall()
if (len(tmp)==0): #new player
c.execute ("INSERT INTO Players (name, siteId) VALUES (%s, %s)", (name, site_id))
#Get last id might be faster here.
c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
tmp=c.fetchall()
return tmp[0][0]
def store_the_hand(self, h):
"""Take a HandToWrite object and store it in the db"""
@ -994,7 +1020,7 @@ class Database:
result = self.tourney_holdem_omaha(
h.config, h.settings, h.base, h.category, h.siteTourneyNo, h.buyin
, h.fee, h.knockout, h.entries, h.prizepool, h.tourneyStartTime
, h.payin_amounts, h.ranks, h.tourneyTypeId, h.siteID, h.siteHandNo
, payin_amounts, ranks, h.tourneyTypeId, h.siteID, h.siteHandNo
, h.gametypeID, h.handStartTime, h.names, h.playerIDs, h.startCashes
, h.positions, h.cardValues, h.cardSuits, h.boardValues, h.boardSuits
, h.winnings, h.rakes, h.actionTypes, h.allIns, h.actionAmounts
@ -1003,13 +1029,13 @@ class Database:
result = self.tourney_stud(
h.config, h.settings, h.base, h.category, h.siteTourneyNo
, h.buyin, h.fee, h.knockout, h.entries, h.prizepool, h.tourneyStartTime
, h.payin_amounts, h.ranks, h.tourneyTypeId, h.siteID, h.siteHandNo
, payin_amounts, ranks, h.tourneyTypeId, h.siteID, h.siteHandNo
, h.gametypeID, h.handStartTime, h.names, h.playerIDs, h.startCashes
, h.antes, h.cardValues, h.cardSuits, h.winnings, h.rakes, h.actionTypes
, h.allIns, h.actionAmounts, h.actionNos, h.hudImportData, h.maxSeats
, h.tableName, h.seatNos)
else:
raise fpself.simple.Fpself.rror("unrecognised category")
raise fpdb_simple.FpdbError("unrecognised category")
else:
if h.base == "hold":
result = self.ring_holdem_omaha(
@ -1027,11 +1053,13 @@ class Database:
, h.actionAmounts, h.actionNos, h.hudImportData, h.maxSeats, h.tableName
, h.seatNos)
else:
raise fpself.simple.Fpself.rror ("unrecognised category")
self.commit()
raise fpdb_simple.FpdbError("unrecognised category")
except:
print "Error storing hand: " + str(sys.exc_value)
self.rollback()
# re-raise the exception so that the calling routine can decide what to do:
# (e.g. a write thread might try again)
raise
return result
#end def store_the_hand
@ -1576,8 +1604,85 @@ class Database:
#end def store_tourneys_players
# read HandToWrite objects from q and insert into database
def insert_queue_hands(self, q, maxwait=10, commitEachHand=True):
n,fails,maxTries,firstWait = 0,0,4,0.1
sendFinal = False
t0 = time()
while True:
try:
h = q.get(True) # (True,maxWait) has probs if 1st part of import is all dups
except Queue.Empty:
# Queue.Empty exception thrown if q was empty for
# if q.empty() also possible - no point if testing for Queue.Empty exception
# maybe increment a counter and only break after a few times?
# could also test threading.active_count() or look through threading.enumerate()
# so break immediately if no threads, but count up to X exceptions if a writer
# thread is still alive???
print "queue empty too long - writer stopping ..."
break
except:
print "writer stopping, error reading queue: " + str(sys.exc_info())
break
#print "got hand", str(h.get_finished())
tries,wait,again = 0,firstWait,True
while again:
try:
again = False # set this immediately to avoid infinite loops!
if h.get_finished():
# all items on queue processed
sendFinal = True
else:
self.store_the_hand(h)
# optional commit, could be every hand / every N hands / every time a
# commit message received?? mark flag to indicate if commits outstanding
if commitEachHand:
self.commit()
n = n + 1
except:
#print "iqh store error", sys.exc_value # debug
self.rollback()
if re.search('deadlock', str(sys.exc_info()[1]), re.I):
# deadlocks only a problem if hudcache is being updated
tries = tries + 1
if tries < maxTries and wait < 5: # wait < 5 just to make sure
print "deadlock detected - trying again ..."
sleep(wait)
wait = wait + wait
again = True
else:
print "too many deadlocks - failed to store hand " + h.get_siteHandNo()
if not again:
fails = fails + 1
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error storing hand: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
# finished trying to store hand
# always reduce q count, whether or not this hand was saved ok
q.task_done()
# while True loop
self.commit()
if sendFinal:
q.task_done()
print "db writer finished: stored %d hands (%d fails) in %.1f seconds" % (n, fails, time()-t0)
# end def insert_queue_hands():
def send_finish_msg(self, q):
try:
h = HandToWrite(True)
q.put(h)
except:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error sending finish: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
# end def send_finish_msg():
# Class used to hold all the data needed to write a hand to the db
# mainParser() in fpdb_parse_logic.py creates one of these and then passes it to
# self.insert_queue_hands()
class HandToWrite:
@ -1676,49 +1781,6 @@ class HandToWrite:
raise
# end def set_hand
def set_ring_holdem_omaha( self, config, settings, base, category, siteHandNo
, gametypeID, handStartTime, names, playerIDs
, startCashes, positions, cardValues, cardSuits
, boardValues, boardSuits, winnings, rakes
, actionTypes, allIns, actionAmounts, actionNos
, hudImportData, maxSeats, tableName, seatNos ):
self.config = config
self.settings = settings
self.base = base
self.category = category
self.siteHandNo = siteHandNo
self.gametypeID = gametypeID
self.handStartTime = handStartTime
self.names = names
self.playerIDs = playerIDs
self.startCashes = startCashes
self.positions = positions
self.cardValues = cardValues
self.cardSuits = cardSuits
self.boardValues = boardValues
self.boardSuits = boardSuits
self.winnings = winnings
self.rakes = rakes
self.actionTypes = actionTypes
self.allIns = allIns
self.actionAmounts = actionAmounts
self.actionNos = actionNos
self.hudImportData = hudImportData
self.maxSeats = maxSeats
self.tableName = tableName
self.seatNos = seatNos
# end def set_ring_holdem_omaha
def send_ring_holdem_omaha(self, db):
result = db.ring_holdem_omaha(
self.config, self.settings, self.base, self.category, self.siteHandNo
, self.gametypeID, self.handStartTime, self.names, self.playerIDs
, self.startCashes, self.positions, self.cardValues, self.cardSuits
, self.boardValues, self.boardSuits, self.winnings, self.rakes
, self.actionTypes, self.allIns, self.actionAmounts, self.actionNos
, self.hudImportData, self.maxSeats, self.tableName, self.seatNos)
# end def send_ring_holdem_omaha
def get_finished(self):
return( self.finished )
# end def get_finished
@ -1738,6 +1800,8 @@ if __name__=="__main__":
print "database connection object = ", db_connection.connection
print "database type = ", db_connection.type
db_connection.recreate_tables()
h = db_connection.get_last_hand()
print "last hand = ", h
@ -1759,3 +1823,17 @@ if __name__=="__main__":
print "press enter to continue"
sys.stdin.readline()
#Code borrowed from http://push.cx/2008/caching-dictionaries-in-python-vs-ruby
class LambdaDict(dict):
def __init__(self, l):
super(LambdaDict, self).__init__()
self.l = l
def __getitem__(self, key):
if key in self:
return self.get(key)
else:
self.__setitem__(key, self.l(key))
return self.get(key)

View File

@ -15,6 +15,9 @@
#In the "official" distribution you can find the license in
#agpl-3.0.txt in the docs folder of the package.
#fpdb modules
import Card
class DerivedStats():
def __init__(self, hand):
self.hand = hand
@ -88,6 +91,70 @@ class DerivedStats():
self.street3CheckCallRaiseDone = 0
self.street4CheckCallRaiseChance = 0
self.street4CheckCallRaiseDone = 0
self.hands = {}
self.handsplayers = {}
def getStats():
pass
def getStats(self, hand):
for player in hand.players:
self.handsplayers[player[1]] = {}
self.assembleHands(self.hand)
self.assembleHandsPlayers(self.hand)
print "hands =", self.hands
print "handsplayers =", self.handsplayers
def assembleHands(self, hand):
self.hands['tableName'] = hand.tablename
self.hands['siteHandNo'] = hand.handid
self.hands['gametypeId'] = None # Leave None, handled later after checking db
self.hands['handStart'] = hand.starttime # format this!
self.hands['importTime'] = None
self.hands['seats'] = self.countPlayers(hand)
self.hands['maxSeats'] = hand.maxseats
self.hands['boardcard1'] = None
self.hands['boardcard2'] = None
self.hands['boardcard3'] = None
self.hands['boardcard4'] = None
self.hands['boardcard5'] = None
boardCard = 1
for street in hand.communityStreets:
for card in hand.board[street]:
self.hands['boardcard%s' % str(boardCard)] = Card.encodeCard(card)
boardCard += 1
def assembleHandsPlayers(self, hand):
self.vpip(self.hand)
for i, street in enumerate(hand.actionStreets[1:]):
self.aggr(self.hand, i)
def vpip(self, hand):
vpipers = set()
for act in hand.actions[hand.actionStreets[1]]:
if act[1] in ('calls','bets', 'raises'):
vpipers.add(act[0])
for player in hand.players:
if player[1] in vpipers:
self.handsplayers[player[1]]['vpip'] = True
else:
self.handsplayers[player[1]]['vpip'] = False
self.hands['playersVpi'] = len(vpipers)
def aggr(self, hand, i):
aggrers = set()
for act in hand.actions[hand.actionStreets[i]]:
if act[1] in ('completes', 'raises'):
aggrers.add(act[0])
for player in hand.players:
if player[1] in aggrers:
self.handsplayers[player[1]]['street%sAggr' % i] = True
else:
self.handsplayers[player[1]]['street%sAggr' % i] = False
def countPlayers(self, hand):
pass

View File

@ -31,10 +31,11 @@ import Configuration
import string
class GuiAutoImport (threading.Thread):
def __init__(self, settings, config):
def __init__(self, settings, config, sql):
"""Constructor for GuiAutoImport"""
self.settings=settings
self.config=config
self.sql = sql
imp = self.config.get_import_parameters()
@ -44,7 +45,7 @@ class GuiAutoImport (threading.Thread):
self.input_settings = {}
self.pipe_to_hud = None
self.importer = fpdb_import.Importer(self,self.settings, self.config)
self.importer = fpdb_import.Importer(self, self.settings, self.config, self.sql)
self.importer.setCallHud(True)
self.importer.setMinPrint(settings['minPrint'])
self.importer.setQuiet(False)
@ -159,29 +160,33 @@ class GuiAutoImport (threading.Thread):
# - Ideally we want to release the lock if the auto-import is killed by some
# kind of exception - is this possible?
if self.settings['global_lock'].acquire(False): # returns false immediately if lock not acquired
print "\nGlobal lock taken ..."
self.doAutoImportBool = True
widget.set_label(u' _Stop Autoimport ')
if self.pipe_to_hud is None:
if os.name == 'nt':
command = "python HUD_main.py" + " " + self.settings['cl_options']
bs = 0 # windows is not happy with line buffing here
self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE,
universal_newlines=True)
else:
command = os.path.join(sys.path[0], 'HUD_main.py')
cl = [command, ] + string.split(self.settings['cl_options'])
self.pipe_to_hud = subprocess.Popen(cl, bufsize = 1, stdin = subprocess.PIPE,
universal_newlines=True)
try:
print "\nGlobal lock taken ..."
self.doAutoImportBool = True
widget.set_label(u' _Stop Autoimport ')
if self.pipe_to_hud is None:
if os.name == 'nt':
command = "python HUD_main.py" + " " + self.settings['cl_options']
bs = 0 # windows is not happy with line buffing here
self.pipe_to_hud = subprocess.Popen(command, bufsize = bs, stdin = subprocess.PIPE,
universal_newlines=True)
else:
command = os.path.join(sys.path[0], 'HUD_main.py')
cl = [command, ] + string.split(self.settings['cl_options'])
self.pipe_to_hud = subprocess.Popen(cl, bufsize = 1, stdin = subprocess.PIPE,
universal_newlines=True)
# Add directories to importer object.
for site in self.input_settings:
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0])
self.do_import()
# Add directories to importer object.
for site in self.input_settings:
self.importer.addImportDirectory(self.input_settings[site][0], True, site, self.input_settings[site][1])
print "Adding import directories - Site: " + site + " dir: "+ str(self.input_settings[site][0])
self.do_import()
interval=int(self.intervalEntry.get_text())
gobject.timeout_add(interval*1000, self.do_import)
interval=int(self.intervalEntry.get_text())
gobject.timeout_add(interval*1000, self.do_import)
except:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
else:
print "auto-import aborted - global lock not available"
else: # toggled off

View File

@ -21,6 +21,7 @@ import os
import sys
from time import time
from optparse import OptionParser
import traceback
# pyGTK modules
import pygtk
@ -34,6 +35,9 @@ import Configuration
class GuiBulkImport():
# CONFIGURATION - update these as preferred:
allowThreads = True # set to True to try out the threads field
# not used
def import_dir(self):
"""imports a directory, non-recursive. todo: move this to fpdb_import so CLI can use it"""
@ -67,12 +71,19 @@ class GuiBulkImport():
self.importer.setHandsInDB(self.n_hands_in_db)
cb_model = self.cb_dropindexes.get_model()
cb_index = self.cb_dropindexes.get_active()
cb_hmodel = self.cb_drophudcache.get_model()
cb_hindex = self.cb_drophudcache.get_active()
self.lab_info.set_text("Importing") # doesn't display :-(
if cb_index:
self.importer.setDropIndexes(cb_model[cb_index][0])
else:
self.importer.setDropIndexes("auto")
if cb_hindex:
self.importer.setDropHudCache(cb_hmodel[cb_hindex][0])
else:
self.importer.setDropHudCache("auto")
sitename = self.cbfilter.get_model()[self.cbfilter.get_active()][0]
self.lab_info.set_text("Importing")
self.importer.addBulkImportImportFileOrDir(self.inputFile, site = sitename)
self.importer.setCallHud(False)
@ -81,14 +92,21 @@ class GuiBulkImport():
ttime = time() - starttime
if ttime == 0:
ttime = 1
print 'GuiBulkImport.load done: Stored: %d \tDuplicates: %d \tPartial: %d \tErrors: %d in %s seconds - %d/sec'\
% (stored, dups, partial, errs, ttime, stored / ttime)
print 'GuiBulkImport.load done: Stored: %d \tDuplicates: %d \tPartial: %d \tErrors: %d in %s seconds - %.0f/sec'\
% (stored, dups, partial, errs, ttime, (stored+0.0) / ttime)
self.importer.clearFileList()
if self.n_hands_in_db == 0 and stored > 0:
self.cb_dropindexes.set_sensitive(True)
self.cb_dropindexes.set_active(0)
self.lab_drop.set_sensitive(True)
self.cb_drophudcache.set_sensitive(True)
self.cb_drophudcache.set_active(0)
self.lab_hdrop.set_sensitive(True)
self.lab_info.set_text("Import finished")
except:
print "bulkimport.loadclicked error: "+str(sys.exc_value)
pass
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "***Error: "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
self.settings['global_lock'].release()
else:
print "bulk-import aborted - global lock not available"
@ -111,7 +129,7 @@ class GuiBulkImport():
self.chooser.show()
# Table widget to hold the settings
self.table = gtk.Table(rows = 3, columns = 5, homogeneous = False)
self.table = gtk.Table(rows = 5, columns = 5, homogeneous = False)
self.vbox.add(self.table)
self.table.show()
@ -126,6 +144,7 @@ class GuiBulkImport():
self.table.attach(self.lab_status, 1, 2, 0, 1, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_status.show()
self.lab_status.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_status.set_alignment(1.0, 0.5)
# spin button - status
status_adj = gtk.Adjustment(value=100, lower=0, upper=300, step_incr=10, page_incr=1, page_size=0) #not sure what upper value should be!
@ -137,15 +156,18 @@ class GuiBulkImport():
self.lab_threads = gtk.Label("Number of threads:")
self.table.attach(self.lab_threads, 3, 4, 0, 1, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_threads.show()
self.lab_threads.set_sensitive(False)
if not self.allowThreads:
self.lab_threads.set_sensitive(False)
self.lab_threads.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_threads.set_alignment(1.0, 0.5)
# spin button - threads
threads_adj = gtk.Adjustment(value=0, lower=0, upper=10, step_incr=1, page_incr=1, page_size=0) #not sure what upper value should be!
threads_adj = gtk.Adjustment(value=0, lower=0, upper=32, step_incr=1, page_incr=1, page_size=0) #not sure what upper value should be!
self.spin_threads = gtk.SpinButton(adjustment=threads_adj, climb_rate=0.0, digits=0)
self.table.attach(self.spin_threads, 4, 5, 0, 1, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.spin_threads, 4, 5, 0, 1, xpadding = 10, ypadding = 0, yoptions=gtk.SHRINK)
self.spin_threads.show()
self.spin_threads.set_sensitive(False)
if not self.allowThreads:
self.spin_threads.set_sensitive(False)
# checkbox - fail on error?
self.chk_fail = gtk.CheckButton('Fail on error')
@ -157,6 +179,7 @@ class GuiBulkImport():
self.table.attach(self.lab_hands, 1, 2, 1, 2, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_hands.show()
self.lab_hands.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_hands.set_alignment(1.0, 0.5)
# spin button - hands to import
hands_adj = gtk.Adjustment(value=0, lower=0, upper=10, step_incr=1, page_incr=1, page_size=0) #not sure what upper value should be!
@ -169,6 +192,7 @@ class GuiBulkImport():
self.table.attach(self.lab_drop, 3, 4, 1, 2, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_drop.show()
self.lab_drop.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_drop.set_alignment(1.0, 0.5)
# ComboBox - drop indexes
self.cb_dropindexes = gtk.combo_box_new_text()
@ -181,9 +205,10 @@ class GuiBulkImport():
# label - filter
self.lab_filter = gtk.Label("Site filter:")
self.table.attach(self.lab_filter, 2, 3, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.lab_filter, 1, 2, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_filter.show()
self.lab_filter.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_filter.set_alignment(1.0, 0.5)
# ComboBox - filter
self.cbfilter = gtk.combo_box_new_text()
@ -191,21 +216,42 @@ class GuiBulkImport():
print w
self.cbfilter.append_text(w)
self.cbfilter.set_active(0)
self.table.attach(self.cbfilter, 3, 4, 2, 3, xpadding = 10, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.cbfilter, 2, 3, 2, 3, xpadding = 10, ypadding = 1, yoptions=gtk.SHRINK)
self.cbfilter.show()
# label - info
self.lab_info = gtk.Label()
self.table.attach(self.lab_info, 0, 4, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_info.show()
# label - drop hudcache
self.lab_hdrop = gtk.Label("Drop HudCache:")
self.table.attach(self.lab_hdrop, 3, 4, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_hdrop.show()
self.lab_hdrop.set_justify(gtk.JUSTIFY_RIGHT)
self.lab_hdrop.set_alignment(1.0, 0.5)
# ComboBox - drop hudcache
self.cb_drophudcache = gtk.combo_box_new_text()
self.cb_drophudcache.append_text('auto')
self.cb_drophudcache.append_text("don't drop")
self.cb_drophudcache.append_text('drop')
self.cb_drophudcache.set_active(0)
self.table.attach(self.cb_drophudcache, 4, 5, 2, 3, xpadding = 10, ypadding = 0, yoptions=gtk.SHRINK)
self.cb_drophudcache.show()
# button - Import
self.load_button = gtk.Button('Import') # todo: rename variables to import too
self.load_button.connect('clicked', self.load_clicked,
'Import clicked')
self.table.attach(self.load_button, 4, 5, 2, 3, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.table.attach(self.load_button, 2, 3, 4, 5, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.load_button.show()
# label - spacer (keeps rows 3 & 5 apart)
self.lab_spacer = gtk.Label()
self.table.attach(self.lab_spacer, 3, 5, 3, 4, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_spacer.show()
# label - info
self.lab_info = gtk.Label()
self.table.attach(self.lab_info, 3, 5, 4, 5, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
self.lab_info.show()
# see how many hands are in the db and adjust accordingly
tcursor = self.importer.database.cursor
tcursor.execute("Select count(1) from Hands")
@ -217,6 +263,9 @@ class GuiBulkImport():
self.cb_dropindexes.set_active(2)
self.cb_dropindexes.set_sensitive(False)
self.lab_drop.set_sensitive(False)
self.cb_drophudcache.set_active(2)
self.cb_drophudcache.set_sensitive(False)
self.lab_hdrop.set_sensitive(False)
def main(argv=None):
"""main can also be called in the python interpreter, by supplying the command line as the argument."""

View File

@ -210,7 +210,7 @@
<game cols="3" db="fpdb" game_name="holdem" rows="2" aux="mucked">
<stat click="tog_decorate" col="0" popup="default" row="0" stat_name="vpip" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="0" stat_name="pfr" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq_1" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq1" tip="tip1"> </stat>
<stat click="tog_decorate" col="0" popup="default" row="1" stat_name="n" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="1" stat_name="wtsd" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="1" stat_name="wmsd" tip="tip1"> </stat>
@ -219,7 +219,7 @@
<game cols="3" db="fpdb" game_name="razz" rows="2" aux="stud_mucked">
<stat click="tog_decorate" col="0" popup="default" row="0" stat_name="vpip" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="0" stat_name="pfr" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq_1" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq1" tip="tip1"> </stat>
<stat click="tog_decorate" col="0" popup="default" row="1" stat_name="n" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="1" stat_name="wtsd" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="1" stat_name="wmsd" tip="tip1"> </stat>
@ -228,7 +228,7 @@
<game cols="3" db="fpdb" game_name="omahahi" rows="2" aux="mucked">
<stat click="tog_decorate" col="0" popup="default" row="0" stat_name="vpip" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="0" stat_name="pfr" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq_1" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq1" tip="tip1"> </stat>
<stat click="tog_decorate" col="0" popup="default" row="1" stat_name="n" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="1" stat_name="wtsd" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="1" stat_name="wmsd" tip="tip1"> </stat>
@ -237,7 +237,7 @@
<game cols="3" db="fpdb" game_name="omahahilo" rows="2" aux="mucked">
<stat click="tog_decorate" col="0" popup="default" row="0" stat_name="vpip" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="0" stat_name="pfr" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq_1" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq1" tip="tip1"> </stat>
<stat click="tog_decorate" col="0" popup="default" row="1" stat_name="n" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="1" stat_name="wtsd" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="1" stat_name="wmsd" tip="tip1"> </stat>
@ -246,7 +246,7 @@
<game cols="3" db="fpdb" game_name="studhi" rows="2" aux="stud_mucked">
<stat click="tog_decorate" col="0" popup="default" row="0" stat_name="vpip" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="0" stat_name="pfr" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq_1" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq1" tip="tip1"> </stat>
<stat click="tog_decorate" col="0" popup="default" row="1" stat_name="n" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="1" stat_name="wtsd" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="1" stat_name="wmsd" tip="tip1"> </stat>
@ -255,7 +255,7 @@
<game cols="3" db="fpdb" game_name="studhilo" rows="2" aux="stud_mucked">
<stat click="tog_decorate" col="0" popup="default" row="0" stat_name="vpip" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="0" stat_name="pfr" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq_1" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="0" stat_name="ffreq1" tip="tip1"> </stat>
<stat click="tog_decorate" col="0" popup="default" row="1" stat_name="n" tip="tip1"> </stat>
<stat click="tog_decorate" col="1" popup="default" row="1" stat_name="wtsd" tip="tip1"> </stat>
<stat click="tog_decorate" col="2" popup="default" row="1" stat_name="wmsd" tip="tip1"> </stat>
@ -274,18 +274,18 @@
<pu_stat pu_stat_name="wmsd"> </pu_stat>
<pu_stat pu_stat_name="wtsd"> </pu_stat>
<pu_stat pu_stat_name="WMsF"> </pu_stat>
<pu_stat pu_stat_name="a_freq_1"> </pu_stat>
<pu_stat pu_stat_name="a_freq_2"> </pu_stat>
<pu_stat pu_stat_name="a_freq_3"> </pu_stat>
<pu_stat pu_stat_name="a_freq_4"> </pu_stat>
<pu_stat pu_stat_name="cb_1"> </pu_stat>
<pu_stat pu_stat_name="cb_2"> </pu_stat>
<pu_stat pu_stat_name="cb_3"> </pu_stat>
<pu_stat pu_stat_name="cb_4"> </pu_stat>
<pu_stat pu_stat_name="ffreq_1"> </pu_stat>
<pu_stat pu_stat_name="ffreq_2"> </pu_stat>
<pu_stat pu_stat_name="ffreq_3"> </pu_stat>
<pu_stat pu_stat_name="ffreq_4"> </pu_stat>
<pu_stat pu_stat_name="a_freq1"> </pu_stat>
<pu_stat pu_stat_name="a_freq2"> </pu_stat>
<pu_stat pu_stat_name="a_freq3"> </pu_stat>
<pu_stat pu_stat_name="a_freq4"> </pu_stat>
<pu_stat pu_stat_name="cb1"> </pu_stat>
<pu_stat pu_stat_name="cb2"> </pu_stat>
<pu_stat pu_stat_name="cb3"> </pu_stat>
<pu_stat pu_stat_name="cb4"> </pu_stat>
<pu_stat pu_stat_name="ffreq1"> </pu_stat>
<pu_stat pu_stat_name="ffreq2"> </pu_stat>
<pu_stat pu_stat_name="ffreq3"> </pu_stat>
<pu_stat pu_stat_name="ffreq4"> </pu_stat>
</pu>
</popup_windows>

View File

@ -31,6 +31,7 @@ Main for FreePokerTools HUD.
import sys
import os
import Options
import traceback
(options, sys.argv) = Options.fpdb_options()
@ -55,7 +56,23 @@ import Database
import Tables
import Hud
aggregate_stats = {"ring": False, "tour": False} # config file!
# To add to config:
aggregate_stats = {"ring": False, "tour": False} # uses agg_bb_mult
hud_style = 'A' # A=All-time
# S=Session
# T=timed (last n days - set hud_days to required value)
# Future values may also include:
# H=Hands (last n hands)
hud_days = 90 # Max number of days from each player to use for hud stats
agg_bb_mult = 100 # 1 = no aggregation. When aggregating stats across levels larger blinds
# must be < (agg_bb_mult * smaller blinds) to be aggregated
# ie. 100 will aggregate almost everything, 2 will probably agg just the
# next higher and lower levels into the current one, try 3/10/30/100
hud_session_gap = 30 # Gap (minutes) between hands that indicates a change of session
# (hands every 2 mins for 1 hour = one session, if followed
# by a 40 minute gap and then more hands on same table that is
# a new session)
#hud_hands = 0 # Max number of hands from each player to use for hud stats (not used)
class HUD_main(object):
"""A main() object to own both the read_stdin thread and the gui."""
@ -144,6 +161,7 @@ class HUD_main(object):
# need their own access to the database, but should open their own
# if it is required.
self.db_connection = Database.Database(self.config, self.db_name, 'temp')
self.db_connection.init_hud_stat_vars(hud_days)
tourny_finder = re.compile('(\d+) (\d+)')
while 1: # wait for a new hand number on stdin
@ -156,14 +174,18 @@ class HUD_main(object):
# if there is a db error, complain, skip hand, and proceed
try:
(table_name, max, poker_game, type) = self.db_connection.get_table_name(new_hand_id)
stat_dict = self.db_connection.get_stats_from_hand(new_hand_id, aggregate = aggregate_stats[type])
stat_dict = self.db_connection.get_stats_from_hand(new_hand_id, aggregate_stats[type]
,hud_style, agg_bb_mult)
cards = self.db_connection.get_cards(new_hand_id)
comm_cards = self.db_connection.get_common_cards(new_hand_id)
if comm_cards != {}: # stud!
cards['common'] = comm_cards['common']
except Exception, err:
print "db error: skipping ", new_hand_id, err
sys.stderr.write("Database error %s in hand %d. Skipping.\n" % (err, int(new_hand_id)))
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print "db error: skipping "+str(new_hand_id)+" "+err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1])
if new_hand_id: # new_hand_id is none if we had an error prior to the store
sys.stderr.write("Database error %s in hand %d. Skipping.\n" % (err, int(new_hand_id)))
continue
if type == "tour": # hand is from a tournament

View File

@ -40,10 +40,12 @@ class Hand(object):
LCS = {'H':'h', 'D':'d', 'C':'c', 'S':'s'}
SYMBOL = {'USD': '$', 'EUR': u'$', 'T$': '', 'play': ''}
MS = {'horse' : 'HORSE', '8game' : '8-Game', 'hose' : 'HOSE', 'ha': 'HA'}
SITEIDS = {'Fulltilt':1, 'PokerStars':2, 'Everleaf':3, 'Win2day':4, 'OnGame':5, 'UltimateBet':6, 'Betfair':7}
def __init__(self, sitename, gametype, handText, builtFrom = "HHC"):
self.sitename = sitename
self.siteId = self.SITEIDS[sitename]
self.stats = DerivedStats.DerivedStats(self)
self.gametype = gametype
self.starttime = 0

View File

@ -204,7 +204,9 @@ which it expects to find at self.re_TailSplitHands -- see for e.g. Everleaf.py.
logging.info("Unsupported game type: %s" % gametype)
if hand:
# uncomment these to calculate some stats
# print hand
# hand.stats.getStats(hand)
hand.writeHand(self.out_fh)
return hand
else:

View File

@ -32,6 +32,9 @@ def fpdb_options():
parser.add_option("-c", "--configFile",
dest="config", default=None,
help="Specifies a configuration file.")
parser.add_option("-r", "--rerunPython",
action="store_true",
help="Indicates program was restarted with a different path (only allowed once).")
(options, sys.argv) = parser.parse_args()
return (options, sys.argv)

View File

@ -29,9 +29,11 @@ class PokerStars(HandHistoryConverter):
############################################################
# Class Variables
mixes = { 'HORSE': 'horse', '8-Game': '8game', 'HOSE': 'hose'} # Legal mixed games
sym = {'USD': "\$", 'CAD': "\$", 'T$': "", "EUR": "\x80", "GBP": "\xa3"} # ADD Euro, Sterling, etc HERE
substitutions = {
'LEGAL_ISO' : "USD|EUR|GBP|CAD", # legal ISO currency codes
'LS' : "\$" # legal currency symbols
'LS' : "\$|\x80|\xa3" # legal currency symbols ADD Euro, Sterling, etc HERE
}
# Static regexes
@ -48,25 +50,32 @@ class PokerStars(HandHistoryConverter):
(-\sLevel\s(?P<LEVEL>[IVXLC]+)\s)?
\(? # open paren of the stakes
(?P<CURRENCY>%(LS)s|)?
(?P<SB>[.0-9]+)/%(LS)s?
(?P<SB>[.0-9]+)/(%(LS)s)?
(?P<BB>[.0-9]+)
\s?(?P<ISO>%(LEGAL_ISO)s)?
\)\s-\s # close paren of the stakes
(?P<DATETIME>.*$)""" % substitutions,
re.MULTILINE|re.VERBOSE)
re_PlayerInfo = re.compile("""
^Seat\s(?P<SEAT>[0-9]+):\s
(?P<PNAME>.*)\s
\((%(LS)s)?(?P<CASH>[.0-9]+)\sin\schips\)""" % substitutions,
re.MULTILINE|re.VERBOSE)
re_HandInfo = re.compile("""
^Table\s\'(?P<TABLE>[-\ a-zA-Z\d]+)\'\s
((?P<MAX>\d+)-max\s)?
(?P<PLAY>\(Play\sMoney\)\s)?
(Seat\s\#(?P<BUTTON>\d+)\sis\sthe\sbutton)?""",
re.MULTILINE|re.VERBOSE)
re_SplitHands = re.compile('\n\n+')
re_TailSplitHands = re.compile('(\n\n\n+)')
re_HandInfo = re.compile("""^Table\s\'(?P<TABLE>[-\ a-zA-Z\d]+)\'\s
((?P<MAX>\d+)-max\s)?
(?P<PLAY>\(Play\sMoney\)\s)?
(Seat\s\#(?P<BUTTON>\d+)\sis\sthe\sbutton)?""",
re.MULTILINE|re.VERBOSE)
re_Button = re.compile('Seat #(?P<BUTTON>\d+) is the button', re.MULTILINE)
re_PlayerInfo = re.compile('^Seat (?P<SEAT>[0-9]+): (?P<PNAME>.*) \(\$?(?P<CASH>[.0-9]+) in chips\)', re.MULTILINE)
re_Board = re.compile(r"\[(?P<CARDS>.+)\]")
# self.re_setHandInfoRegex('.*#(?P<HID>[0-9]+): Table (?P<TABLE>[ a-zA-Z]+) - \$?(?P<SB>[.0-9]+)/\$?(?P<BB>[.0-9]+) - (?P<GAMETYPE>.*) - (?P<HR>[0-9]+):(?P<MIN>[0-9]+) ET - (?P<YEAR>[0-9]+)/(?P<MON>[0-9]+)/(?P<DAY>[0-9]+)Table (?P<TABLE>[ a-zA-Z]+)\nSeat (?P<BUTTON>[0-9]+)')
mixes = { 'HORSE': 'horse', '8-Game': '8game', 'HOSE': 'hose'}
def __init__(self, in_path = '-', out_path = '-', follow = False, autostart=True, index=0):
"""\
@ -91,19 +100,21 @@ follow : whether to tail -f the input"""
# They still identify the hero.
self.compiledPlayers = players
player_re = "(?P<PNAME>" + "|".join(map(re.escape, players)) + ")"
subst = {'PLYR': player_re, 'CUR': self.sym[hand.gametype['currency']]}
logging.debug("player_re: " + player_re)
self.re_PostSB = re.compile(r"^%s: posts small blind \$?(?P<SB>[.0-9]+)" % player_re, re.MULTILINE)
self.re_PostBB = re.compile(r"^%s: posts big blind \$?(?P<BB>[.0-9]+)" % player_re, re.MULTILINE)
self.re_Antes = re.compile(r"^%s: posts the ante \$?(?P<ANTE>[.0-9]+)" % player_re, re.MULTILINE)
self.re_BringIn = re.compile(r"^%s: brings[- ]in( low|) for \$?(?P<BRINGIN>[.0-9]+)" % player_re, re.MULTILINE)
self.re_PostBoth = re.compile(r"^%s: posts small \& big blinds \[\$? (?P<SBBB>[.0-9]+)" % player_re, re.MULTILINE)
self.re_HeroCards = re.compile(r"^Dealt to %s(?: \[(?P<OLDCARDS>.+?)\])?( \[(?P<NEWCARDS>.+?)\])" % player_re, re.MULTILINE)
self.re_Action = re.compile(r"""^%s:(?P<ATYPE>\sbets|\schecks|\sraises|\scalls|\sfolds|\sdiscards|\sstands\spat)
(\s\$?(?P<BET>[.\d]+))?(\sto\s\$?(?P<BETTO>[.\d]+))? # the number discarded goes in <BET>
(\scards?(\s\[(?P<DISCARDED>.+?)\])?)?"""
% player_re, re.MULTILINE|re.VERBOSE)
self.re_PostSB = re.compile(r"^%(PLYR)s: posts small blind %(CUR)s(?P<SB>[.0-9]+)" % subst, re.MULTILINE)
self.re_PostBB = re.compile(r"^%(PLYR)s: posts big blind %(CUR)s(?P<BB>[.0-9]+)" % subst, re.MULTILINE)
self.re_Antes = re.compile(r"^%(PLYR)s: posts the ante %(CUR)s(?P<ANTE>[.0-9]+)" % subst, re.MULTILINE)
self.re_BringIn = re.compile(r"^%(PLYR)s: brings[- ]in( low|) for %(CUR)s(?P<BRINGIN>[.0-9]+)" % subst, re.MULTILINE)
self.re_PostBoth = re.compile(r"^%(PLYR)s: posts small \& big blinds \[%(CUR)s (?P<SBBB>[.0-9]+)" % subst, re.MULTILINE)
self.re_HeroCards = re.compile(r"^Dealt to %(PLYR)s(?: \[(?P<OLDCARDS>.+?)\])?( \[(?P<NEWCARDS>.+?)\])" % subst, re.MULTILINE)
self.re_Action = re.compile(r"""
^%(PLYR)s:(?P<ATYPE>\sbets|\schecks|\sraises|\scalls|\sfolds|\sdiscards|\sstands\spat)
(\s%(CUR)s(?P<BET>[.\d]+))?(\sto\s%(CUR)s(?P<BETTO>[.\d]+))? # the number discarded goes in <BET>
(\scards?(\s\[(?P<DISCARDED>.+?)\])?)?"""
% subst, re.MULTILINE|re.VERBOSE)
self.re_ShowdownAction = re.compile(r"^%s: shows \[(?P<CARDS>.*)\]" % player_re, re.MULTILINE)
self.re_CollectPot = re.compile(r"Seat (?P<SEAT>[0-9]+): %s (\(button\) |\(small blind\) |\(big blind\) )?(collected|showed \[.*\] and won) \(\$?(?P<POT>[.\d]+)\)(, mucked| with.*|)" % player_re, re.MULTILINE)
self.re_CollectPot = re.compile(r"Seat (?P<SEAT>[0-9]+): %(PLYR)s (\(button\) |\(small blind\) |\(big blind\) )?(collected|showed \[.*\] and won) \(%(CUR)s(?P<POT>[.\d]+)\)(, mucked| with.*|)" % subst, re.MULTILINE)
self.re_sitsOut = re.compile("^%s sits out" % player_re, re.MULTILINE)
self.re_ShownCards = re.compile("^Seat (?P<SEAT>[0-9]+): %s (\(.*\) )?(?P<SHOWED>showed|mucked) \[(?P<CARDS>.*)\].*" % player_re, re.MULTILINE)

File diff suppressed because it is too large Load Diff

View File

@ -17,6 +17,33 @@
import os
import sys
import re
# if path is set to use an old version of python look for a new one:
# (does this work in linux?)
if os.name == 'nt' and sys.version[0:3] not in ('2.5', '2.6') and '-r' not in sys.argv:
#print "old path =", os.environ['PATH']
dirs = re.split(os.pathsep, os.environ['PATH'])
# remove any trailing / or \ chars from dirs:
dirs = [re.sub('[\\/]$','',p) for p in dirs]
# remove any dirs containing 'python' apart from those ending in 'python25', 'python26' or 'python':
dirs = [p for p in dirs if not re.search('python', p, re.I) or re.search('python25$', p, re.I) or re.search('python26$', p, re.I)]
tmppath = ";".join(dirs)
#print "new path =", tmppath
if re.search('python', tmppath, re.I):
os.environ['PATH'] = tmppath
print "Python " + sys.version[0:3] + ' - press return to continue\n'
sys.stdin.readline()
os.execvpe('python.exe', ('python.exe', 'fpdb.py', '-r'), os.environ) # first arg is ignored (name of program being run)
else:
print "\npython 2.5 not found, please install python 2.5 or 2.6 for fpdb\n"
exit
else:
pass
#print "debug - not changing path"
print "Python " + sys.version[0:3] + '...\n'
import threading
import Options
import string
@ -204,10 +231,10 @@ class fpdb:
# print 'User cancelled loading profile'
#except:
# pass
try:
self.load_profile()
except:
pass
#try:
self.load_profile()
#except:
# pass
self.release_global_lock()
#end def dia_load_profile
@ -246,19 +273,16 @@ class fpdb:
def dia_recreate_hudcache(self, widget, data=None):
if self.obtain_global_lock():
try:
dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING, buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm recreating HUD cache")
diastring = "Please confirm that you want to re-create the HUD cache."
dia_confirm.format_secondary_text(diastring)
response = dia_confirm.run()
dia_confirm.destroy()
if response == gtk.RESPONSE_YES:
self.db.rebuild_hudcache()
elif response == gtk.REPSONSE_NO:
print 'User cancelled rebuilding hud cache'
except:
pass
dia_confirm = gtk.MessageDialog(parent=None, flags=0, type=gtk.MESSAGE_WARNING, buttons=(gtk.BUTTONS_YES_NO), message_format="Confirm recreating HUD cache")
diastring = "Please confirm that you want to re-create the HUD cache."
dia_confirm.format_secondary_text(diastring)
response = dia_confirm.run()
dia_confirm.destroy()
if response == gtk.RESPONSE_YES:
self.db.rebuild_hudcache()
elif response == gtk.REPSONSE_NO:
print 'User cancelled rebuilding hud cache'
self.release_global_lock()
@ -467,7 +491,7 @@ class fpdb:
def tab_auto_import(self, widget, data=None):
"""opens the auto import tab"""
new_aimp_thread=GuiAutoImport.GuiAutoImport(self.settings, self.config)
new_aimp_thread=GuiAutoImport.GuiAutoImport(self.settings, self.config, self.sql)
self.threads.append(new_aimp_thread)
aimp_tab=new_aimp_thread.get_vbox()
self.add_and_display_tab(aimp_tab, "Auto Import")

View File

@ -64,7 +64,7 @@ class fpdb_db:
if backend==fpdb_db.MYSQL_INNODB:
import MySQLdb
try:
self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True)
self.db = MySQLdb.connect(host = host, user = user, passwd = password, db = database, use_unicode=True, charset="utf8")
except:
raise fpdb_simple.FpdbError("MySQL connection failed")
elif backend==fpdb_db.PGSQL:
@ -155,7 +155,7 @@ class fpdb_db:
return (self.host, self.database, self.user, self.password)
#end def get_db_info
def getLastInsertId(self):
def getLastInsertId(self, cursor=None):
try:
if self.backend == self.MYSQL_INNODB:
ret = self.db.insert_id()
@ -177,9 +177,7 @@ class fpdb_db:
else:
ret = row[0]
elif self.backend == fpdb_db.SQLITE:
# don't know how to do this in sqlite
print "getLastInsertId(): not coded for sqlite yet"
ret = -1
ret = cursor.lastrowid
else:
print "getLastInsertId(): unknown backend ", self.backend
ret = -1

View File

@ -21,12 +21,15 @@
import os # todo: remove this once import_dir is in fpdb_import
import sys
from time import time, strftime
from time import time, strftime, sleep
import logging
import traceback
import math
import datetime
import re
import Queue
from collections import deque # using Queue for now
import threading
# fpdb/FreePokerTools modules
@ -42,11 +45,11 @@ try:
mysqlLibFound=True
except:
pass
try:
import psycopg2
pgsqlLibFound=True
import psycopg2.extensions
import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
except:
@ -61,27 +64,36 @@ class Importer:
self.config = config
self.sql = sql
self.database = None # database will be the main db interface eventually
self.filelist = {}
self.dirlist = {}
self.siteIds = {}
self.addToDirList = {}
self.removeFromFileList = {} # to remove deleted files
self.monitor = False
self.updated = {} #Time last import was run {file:mtime}
self.updatedsize = {}
self.updatedtime = {}
self.lines = None
self.faobs = None # File as one big string
self.pos_in_file = {} # dict to remember how far we have read in the file
#Set defaults
self.callHud = self.config.get_import_parameters().get("callFpdbHud")
# CONFIGURATION OPTIONS
self.settings.setdefault("minPrint", 30)
self.settings.setdefault("handCount", 0)
self.database = Database.Database(self.config, sql = self.sql) # includes .connection and .sql variables
#self.settings.setdefault("allowHudcacheRebuild", True) # NOT USED NOW
#self.settings.setdefault("forceThreads", 2) # NOT USED NOW
self.settings.setdefault("writeQSize", 1000) # no need to change
self.settings.setdefault("writeQMaxWait", 10) # not used
self.writeq = None
self.database = Database.Database(self.config, sql = self.sql)
self.writerdbs = []
self.settings.setdefault("threads", 1) # value set by GuiBulkImport
for i in xrange(self.settings['threads']):
self.writerdbs.append( Database.Database(self.config, sql = self.sql) )
self.NEWIMPORT = False
self.allow_hudcache_rebuild = False
#Set functions
def setCallHud(self, value):
@ -104,16 +116,27 @@ class Importer:
def setThreads(self, value):
self.settings['threads'] = value
if self.settings["threads"] > len(self.writerdbs):
for i in xrange(self.settings['threads'] - len(self.writerdbs)):
self.writerdbs.append( Database.Database(self.config, sql = self.sql) )
def setDropIndexes(self, value):
self.settings['dropIndexes'] = value
def setDropHudCache(self, value):
self.settings['dropHudCache'] = value
# def setWatchTime(self):
# self.updated = time()
def clearFileList(self):
self.filelist = {}
def closeDBs(self):
self.database.disconnect()
for i in xrange(len(self.writerdbs)):
self.writerdbs[i].disconnect()
#Add an individual file to filelist
def addImportFile(self, filename, site = "default", filter = "passthrough"):
#TODO: test it is a valid file -> put that in config!!
@ -164,61 +187,109 @@ class Importer:
print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory"
def runImport(self):
""""Run full import on self.filelist."""
""""Run full import on self.filelist. This is called from GuiBulkImport.py"""
#if self.settings['forceThreads'] > 0: # use forceThreads until threading enabled in GuiBulkImport
# self.setThreads(self.settings['forceThreads'])
# Initial setup
start = datetime.datetime.now()
starttime = time()
print "Started at", start, "--", len(self.filelist), "files to import.", self.settings['dropIndexes']
if self.settings['dropIndexes'] == 'auto':
self.settings['dropIndexes'] = self.calculate_auto2(12.0, 500.0)
if self.allow_hudcache_rebuild:
self.settings['dropHudCache'] = self.calculate_auto2(25.0, 500.0) # returns "drop"/"don't drop"
self.settings['dropIndexes'] = self.calculate_auto2(self.database, 12.0, 500.0)
if self.settings['dropHudCache'] == 'auto':
self.settings['dropHudCache'] = self.calculate_auto2(self.database, 25.0, 500.0) # returns "drop"/"don't drop"
if self.settings['dropIndexes'] == 'drop':
self.database.prepareBulkImport()
else:
print "No need to drop indexes."
#print "dropInd =", self.settings['dropIndexes'], " dropHudCache =", self.settings['dropHudCache']
if self.settings['threads'] <= 0:
(totstored, totdups, totpartial, toterrors) = self.importFiles(self.database, None)
else:
# create queue (will probably change to deque at some point):
self.writeq = Queue.Queue( self.settings['writeQSize'] )
# start separate thread(s) to read hands from queue and write to db:
for i in xrange(self.settings['threads']):
t = threading.Thread( target=self.writerdbs[i].insert_queue_hands
, args=(self.writeq, self.settings["writeQMaxWait"])
, name="dbwriter-"+str(i) )
t.setDaemon(True)
t.start()
# read hands and write to q:
(totstored, totdups, totpartial, toterrors) = self.importFiles(self.database, self.writeq)
if self.writeq.empty():
print "writers finished already"
pass
else:
print "waiting for writers to finish ..."
#for t in threading.enumerate():
# print " "+str(t)
#self.writeq.join()
#using empty() might be more reliable:
while not self.writeq.empty() and len(threading.enumerate()) > 1:
sleep(0.5)
print " ... writers finished"
# Tidying up after import
if self.settings['dropIndexes'] == 'drop':
self.database.afterBulkImport()
else:
print "No need to rebuild indexes."
if self.settings['dropHudCache'] == 'drop':
self.database.rebuild_hudcache()
else:
print "No need to rebuild hudcache."
self.database.analyzeDB()
endtime = time()
return (totstored, totdups, totpartial, toterrors, endtime-starttime)
# end def runImport
def importFiles(self, db, q):
""""Read filenames in self.filelist and pass to import_file_dict().
Uses a separate database connection if created as a thread (caller
passes None or no param as db)."""
totstored = 0
totdups = 0
totpartial = 0
toterrors = 0
tottime = 0
# if threads <= 1: do this bit
for file in self.filelist:
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(db, file
,self.filelist[file][0], self.filelist[file][1], q)
totstored += stored
totdups += duplicates
totpartial += partial
toterrors += errors
tottime += ttime
if self.settings['dropIndexes'] == 'drop':
self.database.afterBulkImport()
else:
print "No need to rebuild indexes."
if self.allow_hudcache_rebuild and self.settings['dropHudCache'] == 'drop':
self.database.rebuild_hudcache()
else:
print "No need to rebuild hudcache."
self.database.analyzeDB()
return (totstored, totdups, totpartial, toterrors, tottime)
# else: import threaded
def calculate_auto(self):
for i in xrange( self.settings['threads'] ):
print "sending finish msg qlen =", q.qsize()
db.send_finish_msg(q)
return (totstored, totdups, totpartial, toterrors)
# end def importFiles
# not used currently
def calculate_auto(self, db):
"""An heuristic to determine a reasonable value of drop/don't drop"""
if len(self.filelist) == 1: return "don't drop"
if 'handsInDB' not in self.settings:
try:
tmpcursor = self.database.get_cursor()
tmpcursor = db.get_cursor()
tmpcursor.execute("Select count(1) from Hands;")
self.settings['handsInDB'] = tmpcursor.fetchone()[0]
except:
pass # if this fails we're probably doomed anyway
if self.settings['handsInDB'] < 5000: return "drop"
if len(self.filelist) < 50: return "don't drop"
if len(self.filelist) < 50: return "don't drop"
if self.settings['handsInDB'] > 50000: return "don't drop"
return "drop"
def calculate_auto2(self, scale, increment):
def calculate_auto2(self, db, scale, increment):
"""A second heuristic to determine a reasonable value of drop/don't drop
This one adds up size of files to import to guess number of hands in them
Example values of scale and increment params might be 10 and 500 meaning
@ -231,7 +302,7 @@ class Importer:
# get number of hands in db
if 'handsInDB' not in self.settings:
try:
tmpcursor = self.database.get_cursor()
tmpcursor = db.get_cursor()
tmpcursor.execute("Select count(1) from Hands;")
self.settings['handsInDB'] = tmpcursor.fetchone()[0]
except:
@ -244,7 +315,7 @@ class Importer:
stat_info = os.stat(file)
total_size += stat_info.st_size
# if hands_in_db is zero or very low, we want to drop indexes, otherwise compare
# if hands_in_db is zero or very low, we want to drop indexes, otherwise compare
# import size with db size somehow:
ret = "don't drop"
if self.settings['handsInDB'] < scale * (total_size/size_per_hand) + increment:
@ -253,7 +324,7 @@ class Importer:
# size_per_hand, "inc =", increment, "return:", ret
return ret
#Run import on updated files, then store latest update time.
#Run import on updated files, then store latest update time. Called from GuiAutoImport.py
def runUpdated(self):
#Check for new files in monitored directories
#todo: make efficient - always checks for new file, should be able to use mtime of directory
@ -268,15 +339,18 @@ class Importer:
if os.path.exists(file):
stat_info = os.stat(file)
#rulog.writelines("path exists ")
if file in self.updated:
if stat_info.st_size > self.updated[file]:
self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1])
self.updated[file] = stat_info.st_size
if file in self.updatedsize: # we should be able to assume that if we're in size, we're in time as well
if stat_info.st_size > self.updatedsize[file] or stat_info.st_mtime > self.updatedtime[file]:
self.import_file_dict(self.database, file, self.filelist[file][0], self.filelist[file][1], None)
self.updatedsize[file] = stat_info.st_size
self.updatedtime[file] = time()
else:
if os.path.isdir(file) or (time() - stat_info.st_mtime) < 60:
self.updated[file] = 0
self.updatedsize[file] = 0
self.updatedtime[file] = 0
else:
self.updated[file] = stat_info.st_size
self.updatedsize[file] = stat_info.st_size
self.updatedtime[file] = time()
else:
self.removeFromFileList[file] = True
self.addToDirList = filter(lambda x: self.addImportDirectory(x, True, self.addToDirList[x][0], self.addToDirList[x][1]), self.addToDirList)
@ -284,7 +358,7 @@ class Importer:
for file in self.removeFromFileList:
if file in self.filelist:
del self.filelist[file]
self.addToDirList = {}
self.removeFromFileList = {}
self.database.rollback()
@ -292,16 +366,21 @@ class Importer:
#rulog.close()
# This is now an internal function that should not be called directly.
def import_file_dict(self, file, site, filter):
def import_file_dict(self, db, file, site, filter, q=None):
#print "import_file_dict"
if os.path.isdir(file):
self.addToDirList[file] = [site] + [filter]
return
conv = None
(stored, duplicates, partial, errors, ttime) = (0, 0, 0, 0, 0)
# Load filter, process file, pass returned filename to import_fpdb_file
print "\nConverting %s" % file
if self.writeq != None:
print "\nConverting " + file + " (" + str(q.qsize()) + ")"
else:
print "\nConverting " + file
hhbase = self.config.get_import_parameters().get("hhArchiveBase")
hhbase = os.path.expanduser(hhbase)
hhdir = os.path.join(hhbase,site)
@ -315,34 +394,34 @@ class Importer:
mod = __import__(filter)
obj = getattr(mod, filter_name, None)
if callable(obj):
conv = obj(in_path = file, out_path = out_path, index = 0) # Index into file 0 until changeover
if(conv.getStatus() and self.NEWIMPORT == False):
(stored, duplicates, partial, errors, ttime) = self.import_fpdb_file(out_path, site)
elif (conv.getStatus() and self.NEWIMPORT == True):
hhc = obj(in_path = file, out_path = out_path, index = 0) # Index into file 0 until changeover
if(hhc.getStatus() and self.NEWIMPORT == False):
(stored, duplicates, partial, errors, ttime) = self.import_fpdb_file(db, out_path, site, q)
elif (hhc.getStatus() and self.NEWIMPORT == True):
#This code doesn't do anything yet
handlist = hhc.getProcessedHands()
self.pos_in_file[file] = hhc.getLastCharacterRead()
for hand in handlist:
hand.prepInsert()
hand.insert()
#hand.prepInsert()
hand.insert(self.database)
else:
# conversion didn't work
# TODO: appropriate response?
return (0, 0, 0, 1, 0)
return (0, 0, 0, 1, 0, -1)
else:
print "Unknown filter filter_name:'%s' in filter:'%s'" %(filter_name, filter)
return
return (0, 0, 0, 1, 0, -1)
#This will barf if conv.getStatus != True
return (stored, duplicates, partial, errors, ttime)
def import_fpdb_file(self, file, site):
#print "import_fpdb_file"
def import_fpdb_file(self, db, file, site, q):
starttime = time()
last_read_hand = 0
loc = 0
(stored, duplicates, partial, errors, ttime) = (0, 0, 0, 0, 0)
# print "file =", file
if file == "stdin":
inputFile = sys.stdin
@ -369,20 +448,46 @@ class Importer:
self.pos_in_file[file] = inputFile.tell()
inputFile.close()
#self.database.lock_for_insert() # should be ok when using one thread
(stored, duplicates, partial, errors, ttime, handsId) = self.import_fpdb_lines(db, self.lines, starttime, file, site, q)
db.commit()
ttime = time() - starttime
if q == None:
print "\rTotal stored:", stored, " duplicates:", duplicates, "errors:", errors, " time:", ttime
if not stored:
if duplicates:
for line_no in xrange(len(self.lines)):
if self.lines[line_no].find("Game #")!=-1:
final_game_line=self.lines[line_no]
handsId=fpdb_simple.parseSiteHandNo(final_game_line)
else:
print "failed to read a single hand from file:", inputFile
handsId=0
#todo: this will cause return of an unstored hand number if the last hand was error
self.handsId=handsId
return (stored, duplicates, partial, errors, ttime)
# end def import_fpdb_file
def import_fpdb_lines(self, db, lines, starttime, file, site, q = None):
"""Import an fpdb hand history held in the list lines, could be one hand or many"""
#db.lock_for_insert() # should be ok when using one thread, but doesn't help??
try: # sometimes we seem to be getting an empty self.lines, in which case, we just want to return.
firstline = self.lines[0]
firstline = lines[0]
except:
# just skip the debug message and return silently:
#print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc)
return (0,0,0,1,0)
#print "DEBUG: import_fpdb_file: failed on lines[0]: '%s' '%s' '%s' '%s' " %( file, site, lines, loc)
return (0,0,0,1,0,0)
if firstline.find("Tournament Summary")!=-1:
print "TODO: implement importing tournament summaries"
#self.faobs = readfile(inputFile)
#self.parseTourneyHistory()
return (0,0,0,1,0)
return (0,0,0,1,0,0)
category=fpdb_simple.recogniseCategory(firstline)
@ -391,16 +496,18 @@ class Importer:
duplicates = 0 #counter
partial = 0 #counter
errors = 0 #counter
ttime = 0
handsId = 0
for i in xrange (len(self.lines)):
if (len(self.lines[i])<2): #Wierd way to detect for '\r\n' or '\n'
for i in xrange (len(lines)):
if (len(lines[i])<2): #Wierd way to detect for '\r\n' or '\n'
endpos=i
hand=self.lines[startpos:endpos]
hand=lines[startpos:endpos]
if (len(hand[0])<2):
hand=hand[1:]
if (len(hand)<3):
pass
#TODO: This is ugly - we didn't actually find the start of the
@ -412,10 +519,10 @@ class Importer:
self.hand=hand
try:
handsId = fpdb_parse_logic.mainParser( self.settings
, self.siteIds[site], category, hand
, self.config, self.database )
self.database.commit()
handsId = fpdb_parse_logic.mainParser( self.settings, self.siteIds[site]
, category, hand, self.config
, db, q )
db.commit()
stored += 1
if self.callHud:
@ -425,29 +532,29 @@ class Importer:
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
except fpdb_simple.DuplicateError:
duplicates += 1
self.database.rollback()
db.rollback()
except (ValueError), fe:
errors += 1
self.printEmailErrorMessage(errors, file, hand)
if (self.settings['failOnError']):
self.database.commit() #dont remove this, in case hand processing was cancelled.
db.commit() #dont remove this, in case hand processing was cancelled.
raise
else:
self.database.rollback()
db.rollback()
except (fpdb_simple.FpdbError), fe:
errors += 1
self.printEmailErrorMessage(errors, file, hand)
self.database.rollback()
db.rollback()
if self.settings['failOnError']:
self.database.commit() #dont remove this, in case hand processing was cancelled.
db.commit() #dont remove this, in case hand processing was cancelled.
raise
if self.settings['minPrint']:
if not ((stored+duplicates+errors) % self.settings['minPrint']):
print "stored:", stored, "duplicates:", duplicates, "errors:", errors
print "stored:", stored, " duplicates:", duplicates, "errors:", errors
if self.settings['handCount']:
if ((stored+duplicates+errors) >= self.settings['handCount']):
if not self.settings['quiet']:
@ -455,22 +562,8 @@ class Importer:
print "Total stored:", stored, "duplicates:", duplicates, "errors:", errors, " time:", (time() - starttime)
sys.exit(0)
startpos = endpos
ttime = time() - starttime
print "\rTotal stored:", stored, "duplicates:", duplicates, "errors:", errors, " time:", ttime
if not stored:
if duplicates:
for line_no in xrange(len(self.lines)):
if self.lines[line_no].find("Game #")!=-1:
final_game_line=self.lines[line_no]
handsId=fpdb_simple.parseSiteHandNo(final_game_line)
else:
print "failed to read a single hand from file:", inputFile
handsId=0
#todo: this will cause return of an unstored hand number if the last hand was error
self.database.commit()
self.handsId=handsId
return (stored, duplicates, partial, errors, ttime)
return (stored, duplicates, partial, errors, ttime, handsId)
# end def import_fpdb_lines
def printEmailErrorMessage(self, errors, filename, line):
traceback.print_exc(file=sys.stderr)

View File

@ -25,13 +25,12 @@ from time import time, strftime
#parses a holdem hand
def mainParser(settings, siteID, category, hand, config, db = None):
#print "mainparser"
# fdb is not used now - to be removed ...
def mainParser(settings, siteID, category, hand, config, db = None, writeq = None):
t0 = time()
#print "mainparser"
backend = settings['db-backend']
# Ideally db connection is passed in, if not use sql list if passed in, otherwise start from scratch
if db == None:
db = Database.Database(c = config, sql = None)
category = fpdb_simple.recogniseCategory(hand[0])
@ -80,7 +79,6 @@ def mainParser(settings, siteID, category, hand, config, db = None):
rebuyOrAddon = -1
tourneyTypeId = 1
fpdb_simple.isAlreadyInDB(db.get_cursor(), gametypeID, siteHandNo)
hand = fpdb_simple.filterCrap(hand, isTourney)
@ -182,7 +180,13 @@ def mainParser(settings, siteID, category, hand, config, db = None):
, positions, antes, cardValues, cardSuits, boardValues, boardSuits
, winnings, rakes, actionTypes, allIns, actionAmounts
, actionNos, hudImportData, maxSeats, tableName, seatNos)
result = db.store_the_hand(htw)
# save hand in db via direct call or via q if in a thread
if writeq == None:
result = db.store_the_hand(htw)
else:
writeq.put(htw)
result = -999 # meaning unknown
t9 = time()
#print "parse and save=(%4.3f)" % (t9-t0)

View File

@ -48,37 +48,6 @@ class FpdbError(Exception):
self.value = value
def __str__(self):
return repr(self.value)
# gets value for last auto-increment key generated
# returns -1 if a problem occurs
def getLastInsertId(backend, conn, cursor):
if backend == MYSQL_INNODB:
ret = conn.insert_id()
if ret < 1 or ret > 999999999:
print "getLastInsertId(): problem fetching insert_id? ret=", ret
ret = -1
elif backend == PGSQL:
# some options:
# currval(hands_id_seq) - use name of implicit seq here
# lastval() - still needs sequences set up?
# insert ... returning is useful syntax (but postgres specific?)
# see rules (fancy trigger type things)
cursor.execute ("SELECT lastval()")
row = cursor.fetchone()
if not row:
print "getLastInsertId(%s): problem fetching lastval? row=" % seq, row
ret = -1
else:
ret = row[0]
elif backend == SQLITE:
# don't know how to do this in sqlite
print "getLastInsertId(): not coded for sqlite yet"
ret = -1
else:
print "getLastInsertId(): unknown backend ", backend
ret = -1
return ret
#end def getLastInsertId
#returns an array of the total money paid. intending to add rebuys/addons here
def calcPayin(count, buyin, fee):

1
pyfpdb/makeexe.bat Normal file
View File

@ -0,0 +1 @@
python makeexe.py py2exe

10
pyfpdb/makeexe.py Normal file
View File

@ -0,0 +1,10 @@
from distutils.core import setup
import py2exe
opts = {
'py2exe': {
'includes': "pango,atk,gobject",
}
}
setup(name='Free Poker Database', version='0.12', console=[{"script":"fpdb.py"}])

47
pyfpdb/windows_make_bats.py Executable file
View File

@ -0,0 +1,47 @@
# create .bat scripts in windows to try out different gtk dirs
try:
import os
import sys
import re
if os.name != 'nt':
print "\nThis script is only for windows\n"
exit()
dirs = re.split(os.pathsep, os.environ['PATH'])
# remove any trailing / or \ chars from dirs:
dirs = [re.sub('[\\/]$','',p) for p in dirs]
# remove any dirs containing 'python' apart from those ending in 'python25', 'python26' or 'python':
dirs = [p for p in dirs if not re.search('python', p, re.I) or re.search('python25$', p, re.I) or re.search('python26$', p, re.I)]
# find gtk dirs:
gtkdirs = [p for p in dirs if re.search('gtk', p, re.I)]
lines = [ '@echo off\n\n'
, '<path goes here>'
, 'python fpdb.py\n\n'
, 'pause\n\n'
]
if gtkdirs:
i = 1
for gpath in gtkdirs: # enumerate converts the \\ into \
tmpdirs = [p for p in dirs if not re.search('gtk', p, re.I) or p == gpath]
tmppath = ";".join(tmpdirs)
lines[1] = 'PATH=' + tmppath + '\n\n'
bat = open('run_fpdb'+str(i)+'.bat', 'w')
bat.writelines(lines)
bat.close()
i = i + 1
else:
print "\nno gtk directories found in your path - install gtk or edit the path manually\n"
except SystemExit:
pass
except:
print "Error:", str(sys.exc_info())
pass
# sys.stdin.readline()

42
utils/fix_table_desc.py Normal file
View File

@ -0,0 +1,42 @@
#!/usr/bin/python
import re
desc = """
+-------------+---------------------+------+-----+---------+----------------+
| Field | Type | Null | Key | Default | Extra |
+-------------+---------------------+------+-----+---------+----------------+
| id | bigint(20) unsigned | NO | PRI | NULL | auto_increment |
| tourneyId | int(10) unsigned | NO | MUL | NULL | |
| playerId | int(10) unsigned | NO | MUL | NULL | |
| payinAmount | int(11) | NO | | NULL | |
| rank | int(11) | NO | | NULL | |
| winnings | int(11) | NO | | NULL | |
| comment | text | YES | | NULL | |
| commentTs | datetime | YES | | NULL | |
+-------------+---------------------+------+-----+---------+----------------+
"""
table = """
{| border="1"
|+Gametypes Table
"""
# get rid of the verticle spacing and clean up
desc = re.sub("[\+\-]+", "", desc)
desc = re.sub("^\n+", "", desc) # there's probably a better way
desc = re.sub("\n\n", "\n", desc)
# the first line is the header info
temp, desc = re.split("\n", desc, 1)
temp = re.sub("\|", "!", temp)
temp = re.sub(" !", " !!", temp)
table += temp + " Comments\n"
# the rest is he body of the table
for line in re.split("\n", desc):
line = re.sub(" \|", " ||", line)
table += "|+\n" + line + "\n"
table += "|}\n"
print table