This commit includes a set of updates which:

* implement the new SessionsCache table
    - The SessionsCache table can be used to track overall or game sepecific sessions
    - The totalProfit field is summed by gameTypeId for cash games allowing for multiple currencies
    - Tournament profit (cashes - buy-ins) are also recorded in totalProfit and its grouped by tourneyId
* change the sequence and methodology surrounding the import of hands
    - fpdb_import.py implements a unique Hand.py method for each table
    - Hands SessionCache and HudCache records themselves are 'cached' to allow for 'bulk insert' at EOF
    - import is reorganized to allow for efficient locking in multiple connection environments
* changes the name of the index created by addTPlayersIndex (so that it is unique) to accommodate a bug in MySQL 5.5

TODO
* A 'rebuild_sessionsCache' method is still required
* Further commits are expected to fix bugs created during the porting of this code
This commit is contained in:
Chaz Littlejohn 2011-03-22 19:16:22 +00:00
parent 169f4bca32
commit 66e1cc3704
8 changed files with 696 additions and 459 deletions

View File

@ -73,7 +73,7 @@ except ImportError:
use_numpy = False
DB_VERSION = 150
DB_VERSION = 151
# Variance created as sqlite has a bunch of undefined aggregate functions.
@ -134,6 +134,9 @@ class Database:
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
, {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
, {'tab':'SessionsCache', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'Players', 'col':'siteId', 'drop':1}
#, {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
@ -157,6 +160,9 @@ class Database:
, {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
, {'tab':'HudCache', 'col':'playerId', 'drop':0}
, {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
, {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
, {'tab':'SessionsCache', 'col':'tourneyTypeId', 'drop':0}
, {'tab':'Players', 'col':'siteId', 'drop':1}
, {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
, {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
@ -182,6 +188,9 @@ class Database:
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
, {'fktab':'SessionsCache','fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'SessionsCache','fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'SessionsCache','fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
]
, [ # foreign keys for postgres (index 3)
{'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
@ -193,6 +202,9 @@ class Database:
, {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
, {'fktab':'SessionsCache','fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
, {'fktab':'SessionsCache','fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
, {'fktab':'SessionsCache','fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
]
, [ # no foreign keys in sqlite (index 4)
]
@ -1253,6 +1265,12 @@ class Database:
c.execute(self.sql.query['createRawHands'])
c.execute(self.sql.query['createRawTourneys'])
# Create sessionscache indexes
log.debug("Creating SessionsCache indexes")
c.execute(self.sql.query['addSessionIdIndex'])
c.execute(self.sql.query['addHandsSessionIdIndex'])
c.execute(self.sql.query['addHandsGameSessionIdIndex'])
# Create unique indexes:
log.debug("Creating unique indexes")
c.execute(self.sql.query['addTourneyIndex'])
@ -1713,70 +1731,64 @@ class Database:
# NEWIMPORT CODE
###########################
def storeHand(self, p, printdata = False):
if printdata:
print _("######## Hands ##########")
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(p)
print _("###### End Hands ########")
# Tablename can have odd charachers
p['tableName'] = Charset.to_db_utf8(p['tableName'])
def storeHand(self, hdata, hbulk, doinsert = False):
#stores into table hands:
q = self.sql.query['store_hand']
# Tablename can have odd charachers
hdata['tableName'] = Charset.to_db_utf8(hdata['tableName'])
q = q.replace('%s', self.sql.query['placeholder'])
c = self.get_cursor()
c.execute(q, (
p['tableName'],
p['siteHandNo'],
p['tourneyId'],
p['gametypeId'],
p['sessionId'],
p['startTime'],
hbulk.append( [ hdata['tableName'],
hdata['siteHandNo'],
hdata['tourneyId'],
hdata['gameTypeId'],
hdata['sessionId'],
hdata['gameSessionId'],
hdata['startTime'],
datetime.utcnow(), #importtime
p['seats'],
p['maxSeats'],
p['texture'],
p['playersVpi'],
p['boardcard1'],
p['boardcard2'],
p['boardcard3'],
p['boardcard4'],
p['boardcard5'],
p['playersAtStreet1'],
p['playersAtStreet2'],
p['playersAtStreet3'],
p['playersAtStreet4'],
p['playersAtShowdown'],
p['street0Raises'],
p['street1Raises'],
p['street2Raises'],
p['street3Raises'],
p['street4Raises'],
p['street1Pot'],
p['street2Pot'],
p['street3Pot'],
p['street4Pot'],
p['showdownPot']
))
return self.get_last_insert_id(c)
# def storeHand
hdata['seats'],
hdata['maxSeats'],
hdata['texture'],
hdata['playersVpi'],
hdata['boardcard1'],
hdata['boardcard2'],
hdata['boardcard3'],
hdata['boardcard4'],
hdata['boardcard5'],
hdata['playersAtStreet1'],
hdata['playersAtStreet2'],
hdata['playersAtStreet3'],
hdata['playersAtStreet4'],
hdata['playersAtShowdown'],
hdata['street0Raises'],
hdata['street1Raises'],
hdata['street2Raises'],
hdata['street3Raises'],
hdata['street4Raises'],
hdata['street1Pot'],
hdata['street2Pot'],
hdata['street3Pot'],
hdata['street4Pot'],
hdata['showdownPot'],
hdata['id']
])
def storeHandsPlayers(self, hid, pids, pdata, hp_bulk = None, insert = False, printdata = False):
if doinsert:
for h in hbulk:
id = h.pop()
if hdata['sc'] and hdata['gsc']:
h[4] = hdata['sc'][id]['id']
h[5] = hdata['gsc'][id]['id']
q = self.sql.query['store_hand']
q = q.replace('%s', self.sql.query['placeholder'])
c = self.get_cursor()
c.executemany(q, hbulk)
self.commit()
return hbulk
def storeHandsPlayers(self, hid, pids, pdata, hpbulk, doinsert = False):
#print "DEBUG: %s %s %s" %(hid, pids, pdata)
if printdata:
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(pdata)
inserts = []
for p in pdata:
inserts.append( (hid,
hpbulk.append( ( hid,
pids[p],
pdata[p]['startCash'],
pdata[p]['seatNo'],
@ -1882,27 +1894,18 @@ class Database:
pdata[p]['street4Raises']
) )
if insert:
hp_bulk += inserts
if doinsert:
q = self.sql.query['store_hands_players']
q = q.replace('%s', self.sql.query['placeholder'])
c = self.get_cursor()
c.executemany(q, hp_bulk)
c.executemany(q, hpbulk)
return hpbulk
return inserts
def storeHandsActions(self, hid, pids, adata, ha_bulk = None, insert = False, printdata = False):
def storeHandsActions(self, hid, pids, adata, habulk, doinsert = False):
#print "DEBUG: %s %s %s" %(hid, pids, adata)
# This can be used to generate test data. Currently unused
#if printdata:
# import pprint
# pp = pprint.PrettyPrinter(indent=4)
# pp.pprint(adata)
inserts = []
for a in adata:
inserts.append( (hid,
habulk.append( (hid,
pids[adata[a]['player']],
adata[a]['street'],
adata[a]['actionNo'],
@ -1916,16 +1919,14 @@ class Database:
adata[a]['allIn']
) )
if insert:
ha_bulk += inserts
if doinsert:
q = self.sql.query['store_hands_actions']
q = q.replace('%s', self.sql.query['placeholder'])
c = self.get_cursor()
c.executemany(q, ha_bulk)
c.executemany(q, habulk)
return habulk
return inserts
def storeHudCache(self, gid, pids, starttime, pdata):
def storeHudCache(self, gid, pids, starttime, pdata, hcbulk, doinsert = False):
"""Update cached statistics. If update fails because no record exists, do an insert."""
tz = datetime.utcnow() - datetime.today()
@ -2042,168 +2043,358 @@ class Database:
line.append(pdata[p]['street3Raises'])
line.append(pdata[p]['street4Raises'])
line.append(gid) # gametypeId
line.append(pids[p]) # playerId
line.append(len(pids)) # activeSeats
hc['gametypeId'] = gid
hc['playerId'] = pids[p]
hc['activeSeats'] = len(pids)
pos = {'B':'B', 'S':'S', 0:'D', 1:'C', 2:'M', 3:'M', 4:'M', 5:'E', 6:'E', 7:'E', 8:'E', 9:'E' }
line.append(pos[pdata[p]['position']])
line.append(pdata[p]['tourneyTypeId'])
line.append(styleKey) # styleKey
inserts.append(line)
hc['position'] = pos[pdata[p]['position']]
hc['tourneyTypeId'] = pdata[p]['tourneyTypeId']
hc['styleKey'] = styleKey
hc['line'] = line
hc['game'] = [hc['gametypeId']
,hc['playerId']
,hc['activeSeats']
,hc['position']
,hc['tourneyTypeId']
,hc['styleKey']]
hcs.append(hc)
for h in hcs:
match = False
for b in hcbulk:
if h['game']==b['game']:
b['line'] = [sum(l) for l in zip(b['line'], h['line'])]
match = True
if not match: hcbulk.append(h)
cursor = self.get_cursor()
for row in inserts:
#convert all True/False values to numeric 0/1
# needed because columns in hudcache are not BOOL they are INT
# and are being summed if an existing hudcache entry exists
# psycopg2 module does not automatically convert these to numeric.
# mantis bug #93
for ind in range(len(row)):
if row[ind] == True: row[ind] = 1
if row[ind] == False: row[ind] = 0
# Try to do the update first:
num = cursor.execute(update_hudcache, row)
#print "DEBUG: values: %s" % row[-6:]
# Test statusmessage to see if update worked, do insert if not
# num is a cursor in sqlite
if ((self.backend == self.PGSQL and cursor.statusmessage != "UPDATE 1")
if doinsert:
inserts = []
exists = []
updates = []
for hc in hcbulk:
row = hc['line'] + hc['game']
if hc['game'] in exists:
updates.append(row)
continue
c = self.get_cursor()
num = c.execute(update_hudcache, row)
# Try to do the update first. Do insert it did not work
if ((self.backend == self.PGSQL and c.statusmessage != "UPDATE 1")
or (self.backend == self.MYSQL_INNODB and num == 0)
or (self.backend == self.SQLITE and num.rowcount == 0)):
#move the last 6 items in WHERE clause of row from the end of the array
# to the beginning for the INSERT statement
#print "DEBUG: using INSERT: %s" % num
row = row[-6:] + row[:-6]
num = cursor.execute(insert_hudcache, row)
inserts.append(hc['game'] + hc['line'])
#row = hc['game'] + hc['line']
#num = c.execute(insert_hudcache, row)
#print "DEBUG: Successfully(?: %s) updated HudCacho using INSERT" % num
else:
exists.append(hc['game'])
#print "DEBUG: Successfully updated HudCacho using UPDATE"
pass
if inserts: c.executemany(insert_hudcache, inserts)
if updates: c.executemany(update_hudcache, updates)
def storeSessionsCache(self, pids, startTime, game, pdata):
return hcbulk
def prepSessionsCache(self, hid, pids, startTime, sc, heros, doinsert = False):
"""Update cached sessions. If no record exists, do an insert"""
THRESHOLD = timedelta(seconds=int(self.sessionTimeout * 60))
select_sessionscache = self.sql.query['select_sessionscache']
select_sessionscache = select_sessionscache.replace('%s', self.sql.query['placeholder'])
select_sessionscache_mid = self.sql.query['select_sessionscache_mid']
select_sessionscache_mid = select_sessionscache_mid.replace('%s', self.sql.query['placeholder'])
select_sessionscache_start = self.sql.query['select_sessionscache_start']
select_sessionscache_start = select_sessionscache_start.replace('%s', self.sql.query['placeholder'])
select_prepSC = self.sql.query['select_prepSC'].replace('%s', self.sql.query['placeholder'])
update_Hands_sid = self.sql.query['update_Hands_sid'].replace('%s', self.sql.query['placeholder'])
update_SC_sid = self.sql.query['update_SC_sid'].replace('%s', self.sql.query['placeholder'])
update_prepSC = self.sql.query['update_prepSC'].replace('%s', self.sql.query['placeholder'])
update_sessionscache_mid = self.sql.query['update_sessionscache_mid']
update_sessionscache_mid = update_sessionscache_mid.replace('%s', self.sql.query['placeholder'])
update_sessionscache_start = self.sql.query['update_sessionscache_start']
update_sessionscache_start = update_sessionscache_start.replace('%s', self.sql.query['placeholder'])
update_sessionscache_end = self.sql.query['update_sessionscache_end']
update_sessionscache_end = update_sessionscache_end.replace('%s', self.sql.query['placeholder'])
#print "DEBUG: %s %s %s" %(hid, pids, pdata)
hand = {}
for p, id in pids.iteritems():
if id in heros:
hand['startTime'] = startTime.replace(tzinfo=None)
hand['ids'] = []
insert_sessionscache = self.sql.query['insert_sessionscache']
insert_sessionscache = insert_sessionscache.replace('%s', self.sql.query['placeholder'])
merge_sessionscache = self.sql.query['merge_sessionscache']
merge_sessionscache = merge_sessionscache.replace('%s', self.sql.query['placeholder'])
delete_sessions = self.sql.query['delete_sessions']
delete_sessions = delete_sessions.replace('%s', self.sql.query['placeholder'])
if hand:
id = []
lower = hand['startTime']-THRESHOLD
upper = hand['startTime']+THRESHOLD
for i in range(len(sc['bk'])):
if ((lower <= sc['bk'][i]['sessionEnd'])
and (upper >= sc['bk'][i]['sessionStart'])):
if ((hand['startTime'] <= sc['bk'][i]['sessionEnd'])
and (hand['startTime'] >= sc['bk'][i]['sessionStart'])):
id.append(i)
elif hand['startTime'] < sc['bk'][i]['sessionStart']:
sc['bk'][i]['sessionStart'] = hand['startTime']
id.append(i)
elif hand['startTime'] > sc['bk'][i]['sessionEnd']:
sc['bk'][i]['sessionEnd'] = hand['startTime']
id.append(i)
if len(id) == 1:
id = id[0]
sc['bk'][id]['ids'].append(hid)
elif len(id) == 2:
if sc['bk'][id[0]]['startTime'] < sc['bk'][id[1]]['startTime']:
sc['bk'][id[0]]['endTime'] = sc['bk'][id[1]]['endTime']
else:
sc['bk'][id[0]]['startTime'] = sc['bk'][id[1]]['startTime']
sc['bk'].pop[id[1]]
id = id[0]
sc['bk'][id]['ids'].append(hid)
elif len(id) == 0:
hand['id'] = None
hand['sessionStart'] = hand['startTime']
hand['sessionEnd'] = hand['startTime']
id = len(sc['bk'])
hand['ids'].append(hid)
sc['bk'].append(hand)
update_hands_sessionid = self.sql.query['update_hands_sessionid']
update_hands_sessionid = update_hands_sessionid.replace('%s', self.sql.query['placeholder'])
if doinsert:
c = self.get_cursor()
c.execute("SELECT max(sessionId) FROM SessionsCache")
id = c.fetchone()[0]
if id: sid = id
else: sid = 0
for i in range(len(sc['bk'])):
lower = sc['bk'][i]['sessionStart'] - THRESHOLD
upper = sc['bk'][i]['sessionEnd'] + THRESHOLD
c.execute(select_prepSC, (lower, upper))
r = self.fetchallDict(c)
num = len(r)
if (num == 1):
start, end, update = r[0]['sessionStart'], r[0]['sessionEnd'], False
if sc['bk'][i]['sessionStart'] < start:
start, update = sc['bk'][i]['sessionStart'], True
if sc['bk'][i]['sessionEnd'] > end:
end, update = sc['bk'][i]['sessionEnd'], True
if update:
c.execute(update_prepSC, [start, end, r[0]['id']])
for h in sc['bk'][i]['ids']:
sc[h] = {'id': r[0]['id'], 'data': [start, end]}
elif (num > 1):
start, end, merge, merge_h, merge_sc = None, None, [], [], []
sid += 1
r.append(sc['bk'][i])
for n in r:
if start:
if start > n['sessionStart']:
start = n['sessionStart']
else: start = n['sessionStart']
if end:
if end < n['sessionEnd']:
end = n['sessionEnd']
else: end = n['sessionEnd']
for n in r:
if n['id']:
if n['id'] in merge: continue
merge.append(n['id'])
merge_h.append([sid, n['id']])
merge_sc.append([start, end, sid, n['id']])
c.executemany(update_Hands_sid, merge_h)
c.executemany(update_SC_sid, merge_sc)
for k, v in sc.iteritems():
if k!='bk' and v['id'] in merge:
sc[k]['id'] = sid
for h in sc['bk'][i]['ids']:
sc[h] = {'id': sid, 'data': [start, end]}
elif (num == 0):
sid += 1
start = sc['bk'][i]['sessionStart']
end = sc['bk'][i]['sessionEnd']
for h in sc['bk'][i]['ids']:
sc[h] = {'id': sid, 'data': [start, end]}
return sc
def storeSessionsCache(self, hid, pids, startTime, game, gid, pdata, sc, gsc, tz, heros, doinsert = False):
"""Update cached sessions. If no record exists, do an insert"""
THRESHOLD = timedelta(seconds=int(self.sessionTimeout * 60))
local = startTime + timedelta(hours=int(tz))
date = "d%02d%02d%02d" % (local.year - 2000, local.month, local.day)
select_SC = self.sql.query['select_SC'].replace('%s', self.sql.query['placeholder'])
update_SC = self.sql.query['update_SC'].replace('%s', self.sql.query['placeholder'])
insert_SC = self.sql.query['insert_SC'].replace('%s', self.sql.query['placeholder'])
delete_SC = self.sql.query['delete_SC'].replace('%s', self.sql.query['placeholder'])
update_Hands_gsid = self.sql.query['update_Hands_gsid'].replace('%s', self.sql.query['placeholder'])
#print "DEBUG: %s %s %s" %(hid, pids, pdata)
hand = {}
for p, id in pids.iteritems():
if id in heros:
hand['hands'] = 0
hand['totalProfit'] = 0
hand['playerId'] = id
hand['gametypeId'] = None
hand['date'] = date
hand['startTime'] = startTime.replace(tzinfo=None)
hand['hid'] = hid
hand['tourneys'] = 0
hand['tourneyTypeId'] = None
hand['ids'] = []
if (game['type']=='summary'):
hand['type'] = 'tour'
hand['tourneys'] = 1
hand['tourneyTypeId'] = pdata.tourneyTypeId
hand['totalProfit'] = pdata.winnings[p] - (pdata.buyin + pdata.fee)
elif (game['type']=='ring'):
hand['type'] = game['type']
hand['hands'] = 1
hand['gametypeId'] = gid
hand['totalProfit'] = pdata[p]['totalProfit']
elif (game['type']=='tour'):
hand['type'] = game['type']
hand['hands'] = 1
hand['tourneyTypeId'] = pdata[p]['tourneyTypeId']
if hand:
id = []
lower = hand['startTime']-THRESHOLD
upper = hand['startTime']+THRESHOLD
for i in range(len(gsc['bk'])):
if ((hand['date'] == gsc['bk'][i]['date'])
and (hand['gametypeId'] == gsc['bk'][i]['gametypeId'])
and (hand['playerId'] == gsc['bk'][i]['playerId'])
and (hand['tourneyTypeId'] == gsc['bk'][i]['tourneyTypeId'])):
if ((lower <= gsc['bk'][i]['gameEnd'])
and (upper >= gsc['bk'][i]['gameStart'])):
if ((hand['startTime'] <= gsc['bk'][i]['gameEnd'])
and (hand['startTime'] >= gsc['bk'][i]['gameStart'])):
gsc['bk'][i]['hands'] += hand['hands']
gsc['bk'][i]['tourneys'] += hand['tourneys']
gsc['bk'][i]['totalProfit'] += hand['totalProfit']
elif hand['startTime'] < gsc['bk'][i]['gameStart']:
gsc['bk'][i]['hands'] += hand['hands']
gsc['bk'][i]['tourneys'] += hand['tourneys']
gsc['bk'][i]['totalProfit'] += hand['totalProfit']
gsc['bk'][i]['gameStart'] = hand['startTime']
elif hand['startTime'] > gsc['bk'][i]['gameEnd']:
gsc['bk'][i]['hands'] += hand['hands']
gsc['bk'][i]['tourneys'] += hand['tourneys']
gsc['bk'][i]['totalProfit'] += hand['totalProfit']
gsc['bk'][i]['gameEnd'] = hand['startTime']
id.append(i)
if len(id) == 1:
gsc['bk'][id[0]]['ids'].append(hid)
elif len(id) == 2:
if gsc['bk'][id[0]]['gameStart'] < gsc['bk'][id[1]]['gameStart']:
gsc['bk'][id[0]]['gameEnd'] = gsc['bk'][id[1]]['gameEnd']
else: gsc['bk'][id[0]]['gameStart'] = gsc['bk'][id[1]]['gameStart']
gsc['bk'][id[0]]['hands'] += hand['hands']
gsc['bk'][id[0]]['tourneys'] += hand['tourneys']
gsc['bk'][id[0]]['totalProfit'] += hand['totalProfit']
gsc['bk'].pop[id[1]]
gsc['bk'][id[0]]['ids'].append(hid)
elif len(id) == 0:
hand['gameStart'] = hand['startTime']
hand['gameEnd'] = hand['startTime']
id = len(gsc['bk'])
hand['ids'].append(hid)
gsc['bk'].append(hand)
if doinsert:
c = self.get_cursor()
for i in range(len(gsc['bk'])):
hid = gsc['bk'][i]['hid']
sid, start, end = sc[hid]['id'], sc[hid]['data'][0], sc[hid]['data'][1]
lower = gsc['bk'][i]['gameStart'] - THRESHOLD
upper = gsc['bk'][i]['gameEnd'] + THRESHOLD
game = [gsc['bk'][i]['date']
,gsc['bk'][i]['type']
,gsc['bk'][i]['gametypeId']
,gsc['bk'][i]['tourneyTypeId']
,gsc['bk'][i]['playerId']]
row = [lower, upper] + game
c.execute(select_SC, row)
r = self.fetchallDict(c)
num = len(r)
if (num == 1):
gstart, gend = r[0]['gameStart'], r[0]['gameEnd']
if gsc['bk'][i]['gameStart'] < gstart:
gstart = gsc['bk'][i]['gameStart']
if gsc['bk'][i]['gameEnd'] > gend:
gend = gsc['bk'][i]['gameEnd']
row = [start, end, gstart, gend
,gsc['bk'][i]['hands']
,gsc['bk'][i]['tourneys']
,gsc['bk'][i]['totalProfit']
,r[0]['id']]
c.execute(update_SC, row)
for h in gsc['bk'][i]['ids']: gsc[h] = {'id': r[0]['id']}
elif (num > 1):
gstart, gend, hands, tourneys, totalProfit, delete, merge = None, None, 0, 0, 0, [], []
for n in r: delete.append(n['id'])
delete.sort()
for d in delete: c.execute(delete_SC, d)
r.append(gsc['bk'][i])
for n in r:
if gstart:
if gstart > n['gameStart']:
gstart = n['gameStart']
else: gstart = n['gameStart']
if gend:
if gend < n['gameEnd']:
gend = n['gameEnd']
else: gend = n['gameEnd']
hands += n['hands']
tourneys += n['tourneys']
totalProfit += n['totalProfit']
row = [start, end, gstart, gend, sid] + game + [hands, tourneys, totalProfit]
c.execute(insert_SC, row)
gsid = self.get_last_insert_id(c)
for h in gsc['bk'][i]['ids']: gsc[h] = {'id': gsid}
for m in delete: merge.append([gsid, m])
c.executemany(update_Hands_gsid, merge)
elif (num == 0):
gstart = gsc['bk'][i]['gameStart']
gend = gsc['bk'][i]['gameEnd']
hands = gsc['bk'][i]['hands']
tourneys = gsc['bk'][i]['tourneys']
totalProfit = gsc['bk'][i]['totalProfit']
row = [start, end, gstart, gend, sid] + game + [hands, tourneys, totalProfit]
c.execute(insert_SC, row)
gsid = self.get_last_insert_id(c)
for h in gsc['bk'][i]['ids']: gsc[h] = {'id': gsid}
else:
# Something bad happened
pass
self.commit()
return gsc
def getHeroIds(self, pids, sitename):
#Grab playerIds using hero names in HUD_Config.xml
try:
# derive list of program owner's player ids
self.hero = {} # name of program owner indexed by site id
self.hero_ids = []
hero = {} # name of program owner indexed by site id
hero_ids = []
# make sure at least two values in list
# so that tuple generation creates doesn't use
# () or (1,) style
for site in self.config.get_supported_sites():
result = self.get_site_id(site)
if result:
site_id = result[0][0]
self.hero[site_id] = self.config.supported_sites[site].screen_name
p_id = self.get_player_id(self.config, site, self.hero[site_id])
if p_id:
self.hero_ids.append(int(p_id))
hero = self.config.supported_sites[site].screen_name
for n, v in pids.iteritems():
if n == hero and sitename == site:
hero_ids.append(v)
except:
err = traceback.extract_tb(sys.exc_info()[2])[-1]
print _("Error aquiring hero ids:"), str(sys.exc_value)
print err
#print _("Error aquiring hero ids:"), str(sys.exc_value)
return hero_ids
inserts = []
for p in pdata:
if pids[p] in self.hero_ids:
line = [0]*5
if (game['type']=='ring'): line[0] = 1 # count ring hands
if (game['type']=='tour'): line[1] = 1 # count tour hands
if (game['type']=='ring' and game['currency']=='USD'): line[2] = pdata[p]['totalProfit'] #sum of ring profit in USD
if (game['type']=='ring' and game['currency']=='EUR'): line[3] = pdata[p]['totalProfit'] #sum of ring profit in EUR
line[4] = startTime
inserts.append(line)
cursor = self.get_cursor()
id = None
for row in inserts:
threshold = []
threshold.append(row[-1]-THRESHOLD)
threshold.append(row[-1]+THRESHOLD)
cursor.execute(select_sessionscache, threshold)
session_records = cursor.fetchall()
num = len(session_records)
if (num == 1):
id = session_records[0][0] #grab the sessionId
# Try to do the update first:
#print "DEBUG: found 1 record to update"
update_mid = row + row[-1:]
cursor.execute(select_sessionscache_mid, update_mid[-2:])
mid = len(cursor.fetchall())
if (mid == 0):
update_startend = row[-1:] + row + threshold
cursor.execute(select_sessionscache_start, update_startend[-3:])
start = len(cursor.fetchall())
if (start == 0):
#print "DEBUG:", start, " start record found. Update stats and start time"
cursor.execute(update_sessionscache_end, update_startend)
else:
#print "DEBUG: 1 end record found. Update stats and end time time"
cursor.execute(update_sessionscache_start, update_startend)
else:
#print "DEBUG: update stats mid-session"
cursor.execute(update_sessionscache_mid, update_mid)
elif (num > 1):
session_ids = [session_records[0][0], session_records[1][0]]
session_ids.sort()
# Multiple matches found - merge them into one session and update:
# - Obtain the session start and end times for the new combined session
cursor.execute(merge_sessionscache, session_ids)
merge = cursor.fetchone()
# - Delete the old records
for id in session_ids:
cursor.execute(delete_sessions, id)
# - Insert the new updated record
cursor.execute(insert_sessionscache, merge)
# - Obtain the new sessionId and write over the old ids in Hands
id = self.get_last_insert_id(cursor) #grab the sessionId
update_hands = [id] + session_ids
cursor.execute(update_hands_sessionid, update_hands)
# - Update the newly combined record in SessionsCache with data from this hand
update_mid = row + row[-1:]
cursor.execute(update_sessionscache_mid, update_mid)
elif (num == 0):
# No matches found, insert new session:
insert = row + row[-1:]
insert = insert[-2:] + insert[:-2]
#print "DEBUG: No matches found. Insert record", insert
cursor.execute(insert_sessionscache, insert)
id = self.get_last_insert_id(cursor) #grab the sessionId
else:
# Something bad happened
pass
def fetchallDict(self, cursor):
data = cursor.fetchall()
if not data: return []
desc = cursor.description
results = [0]*len(data)
for i in range(len(data)):
results[i] = {}
for n in range(len(desc)):
name = desc[n][0]
results[i][name] = data[i][n]
return results
def nextHandId(self):
c = self.get_cursor()
c.execute("SELECT max(id) FROM Hands")
id = c.fetchone()[0]
if not id: id = 0
id += 1
return id
def isDuplicate(self, gametypeID, siteHandNo):

View File

@ -123,6 +123,7 @@ class DerivedStats():
self.hands['siteHandNo'] = hand.handid
self.hands['gametypeId'] = None # Leave None, handled later after checking db
self.hands['sessionId'] = None # Leave None, added later if caching sessions
self.hands['gameSessionId'] = None # Leave None, added later if caching sessions
self.hands['startTime'] = hand.startTime # format this!
self.hands['importTime'] = None
self.hands['seats'] = self.countPlayers(hand)

View File

@ -25,7 +25,6 @@ import datetime
from Exceptions import FpdbParseError
from HandHistoryConverter import *
import PokerStarsToFpdb
from TourneySummary import *
class FullTiltPokerSummary(TourneySummary):

View File

@ -223,13 +223,18 @@ class SummaryImporter:
print "Found %s summaries" %(len(summaryTexts))
errors = 0
imported = 0
####Lock Placeholder####
for j, summaryText in enumerate(summaryTexts, start=1):
sc, gsc = {'bk': []}, {'bk': []}
doinsert = len(summaryTexts)==j
try:
conv = obj(db=None, config=self.config, siteName=site, summaryText=summaryText, builtFrom = "IMAP")
conv = obj(db=self.database, config=self.config, siteName=site, summaryText=summaryText, builtFrom = "IMAP")
sc, gsc = conv.updateSessionsCache(sc, gsc, self.tz, doinsert)
except FpdbParseError, e:
errors += 1
print _("Finished importing %s/%s tournament summaries") %(j, len(summaryTexts))
imported = j
####Lock Placeholder####
return (imported - errors, errors)
def clearFileList(self):

View File

@ -57,6 +57,7 @@ class Hand(object):
#log.debug( _("Hand.init(): handText is ") + str(handText) )
self.config = config
self.saveActions = self.config.get_import_parameters().get('saveActions')
self.callHud = self.config.get_import_parameters().get("callFpdbHud")
self.cacheSessions = self.config.get_import_parameters().get("cacheSessions")
#log = Configuration.get_logger("logging.conf", "db", log_dir=self.config.dir_log)
self.sitename = sitename
@ -227,83 +228,77 @@ dealt whether they were seen in a 'dealt to' line
self.holecards[street][player] = [open, closed]
def prepInsert(self, db, printtest = False):
def prepInsert(self, db):
#####
# Players, Gametypes, TourneyTypes are all shared functions that are needed for additional tables
# These functions are intended for prep insert eventually
#####
# Players - base playerid and siteid tuple
self.dbid_pids = db.getSqlPlayerIDs([p[1] for p in self.players], self.siteId)
#Gametypes
hilo = "h"
if self.gametype['category'] in ['studhilo', 'omahahilo']:
hilo = "s"
elif self.gametype['category'] in ['razz','27_3draw','badugi', '27_1draw']:
hilo = "l"
self.gametyperow = (self.siteId, self.gametype['currency'], self.gametype['type'], self.gametype['base'],
self.gametype['category'], self.gametype['limitType'], hilo,
int(Decimal(self.gametype['sb'])*100), int(Decimal(self.gametype['bb'])*100),
int(Decimal(self.gametype['bb'])*100), int(Decimal(self.gametype['bb'])*200))
# Note: the above data is calculated in db.getGameTypeId
# Only being calculated above so we can grab the testdata
self.dbid_gt = db.getGameTypeId(self.siteId, self.gametype, printdata = printtest)
self.dbid_gt = db.getGameTypeId(self.siteId, self.gametype)
if self.tourNo!=None:
self.tourneyTypeId = db.createTourneyType(self)
db.commit()
self.tourneyId = db.createOrUpdateTourney(self, "HHC")
db.commit()
self.tourneysPlayersIds = db.createOrUpdateTourneysPlayers(self, "HHC")
db.commit()
#end def prepInsert
#db.commit() #commit these transactions'
def insert(self, db, hp_data = None, ha_data = None, insert_data=False, printtest = False):
def assembleHand(self):
self.stats.getStats(self)
self.hands = self.stats.getHands()
self.handsplayers = self.stats.getHandsPlayers()
def getHandId(self, db, id):
if db.isDuplicate(self.dbid_gt, self.hands['siteHandNo']):
#log.info(_("Hand.insert(): hid #: %s is a duplicate") % hh['siteHandNo'])
self.is_duplicate = True # i.e. don't update hudcache
next = id
raise FpdbHandDuplicate(self.hands['siteHandNo'])
else:
self.dbid_hands = id
self.hands['id'] = self.dbid_hands
next = id +1
return next
def insertHands(self, db, hbulk, doinsert = False):
""" Function to insert Hand into database
Should not commit, and do minimal selects. Callers may want to cache commits
db: a connected Database object"""
self.hands['gameTypeId'] = self.dbid_gt
self.hands['seats'] = len(self.dbid_pids)
hbulk = db.storeHand(self.hands, hbulk, doinsert)
return hbulk
def insertHandsPlayers(self, db, hpbulk, doinsert = False):
""" Function to inserts HandsPlayers into database"""
hpbulk = db.storeHandsPlayers(self.dbid_hands, self.dbid_pids, self.handsplayers, hpbulk, doinsert)
return hpbulk
self.stats.getStats(self)
def insertHandsActions(self, db, habulk, doinsert = False):
""" Function to inserts HandsActions into database"""
handsactions = self.stats.getHandsActions()
habulk = db.storeHandsActions(self.dbid_hands, self.dbid_pids, handsactions, habulk, doinsert)
return habulk
#####
# End prep functions
#####
hh = self.stats.getHands()
hp_inserts, ha_inserts = [], []
if not db.isDuplicate(self.dbid_gt, hh['siteHandNo']):
# Hands - Summary information of hand indexed by handId - gameinfo
hh['gametypeId'] = self.dbid_gt
# seats TINYINT NOT NULL,
hh['seats'] = len(self.dbid_pids)
hp = self.stats.getHandsPlayers()
def updateHudCache(self, db, hcbulk, doinsert = False):
""" Function to update the HudCache"""
if self.callHud:
hcbulk = db.storeHudCache(self.dbid_gt, self.dbid_pids, self.startTime, self.handsplayers, hcbulk, doinsert)
return hcbulk
def updateSessionsCache(self, db, sc, gsc, tz, doinsert = False):
""" Function to update the SessionsCache"""
if self.cacheSessions:
hh['sessionId'] = db.storeSessionsCache(self.dbid_pids, self.startTime, self.gametype, hp)
self.dbid_hands = db.storeHand(hh, printdata = printtest)
hp_inserts = db.storeHandsPlayers(self.dbid_hands, self.dbid_pids, hp,
insert=insert_data, hp_bulk = hp_data, printdata = printtest)
if self.saveActions:
ha_inserts = db.storeHandsActions(self.dbid_hands, self.dbid_pids, self.stats.getHandsActions(),
insert=insert_data, ha_bulk = ha_data, printdata = printtest)
self.heros = db.getHeroIds(self.dbid_pids, self.sitename)
sc = db.prepSessionsCache(self.dbid_hands, self.dbid_pids, self.startTime, sc, self.heros, doinsert)
gsc = db.storeSessionsCache(self.dbid_hands, self.dbid_pids, self.startTime, self.gametype
,self.dbid_gt, self.handsplayers, sc, gsc, tz, self.heros, doinsert)
if doinsert:
self.hands['sc'] = sc
self.hands['gsc'] = gsc
else:
log.info(_("Hand.insert(): hid #: %s is a duplicate") % hh['siteHandNo'])
self.is_duplicate = True # i.e. don't update hudcache
raise FpdbHandDuplicate(hh['siteHandNo'])
return hp_inserts, ha_inserts
def updateHudCache(self, db):
db.storeHudCache(self.dbid_gt, self.dbid_pids, self.startTime, self.stats.getHandsPlayers())
def updateSessionsCache(self, db):
db.storeSessionsCache(self.dbid_pids, self.startTime, self.gametype, self.stats.getHandsPlayers())
self.hands['sc'] = None
self.hands['gsc'] = None
return sc, gsc
def select(self, db, handId):
""" Function to create Hand object from database """

View File

@ -347,6 +347,7 @@ class Sql:
tourneyId INT UNSIGNED,
gametypeId SMALLINT UNSIGNED NOT NULL, FOREIGN KEY (gametypeId) REFERENCES Gametypes(id),
sessionId INT UNSIGNED,
gameSessionId INT UNSIGNED,
startTime DATETIME NOT NULL,
importTime DATETIME NOT NULL,
seats TINYINT NOT NULL,
@ -385,6 +386,7 @@ class Sql:
tourneyId INT,
gametypeId INT NOT NULL, FOREIGN KEY (gametypeId) REFERENCES Gametypes(id),
sessionId INT,
gameSessionId INT,
startTime timestamp without time zone NOT NULL,
importTime timestamp without time zone NOT NULL,
seats SMALLINT NOT NULL,
@ -422,6 +424,7 @@ class Sql:
tourneyId INT,
gametypeId INT NOT NULL,
sessionId INT,
gameSessionId INT,
startTime REAL NOT NULL,
importTime REAL NOT NULL,
seats INT NOT NULL,
@ -1430,33 +1433,62 @@ class Sql:
id BIGINT UNSIGNED AUTO_INCREMENT NOT NULL, PRIMARY KEY (id),
sessionStart DATETIME NOT NULL,
sessionEnd DATETIME NOT NULL,
ringHDs INT NOT NULL,
tourHDs INT NOT NULL,
ringProfitUSD INT NOT NULL,
ringProfitEUR INT NOT NULL)
gameStart DATETIME NOT NULL,
gameEnd DATETIME NOT NULL,
sessionId BIGINT,
date CHAR(7) NOT NULL, /* 1st char is style (A/T/H/S), other 6 are the key */
type char(7) NOT NULL,
gametypeId SMALLINT UNSIGNED, FOREIGN KEY (gametypeId) REFERENCES Gametypes(id),
tourneyTypeId SMALLINT UNSIGNED, FOREIGN KEY (tourneyTypeId) REFERENCES TourneyTypes(id),
playerId INT UNSIGNED NOT NULL, FOREIGN KEY (playerId) REFERENCES Players(id),
hands INT NOT NULL,
tourneys INT NOT NULL,
totalProfit INT)
ENGINE=INNODB
"""
ENGINE=INNODB"""
elif db_server == 'postgresql':
self.query['createSessionsCacheTable'] = """CREATE TABLE SessionsCache (
id BIGSERIAL, PRIMARY KEY (id),
sessionStart REAL NOT NULL,
sessionEnd REAL NOT NULL,
ringHDs INT NOT NULL,
tourHDs INT NOT NULL,
ringProfitUSD INT NOT NULL,
ringProfitEUR INT NOT NULL)
gameStart REAL NOT NULL,
gameEnd REAL NOT NULL,
sessionId INT,
date CHAR(7) NOT NULL, /* 1st char is style (A/T/H/S), other 6 are the key */
type char(7),
gametypeId INT, FOREIGN KEY (gametypeId) REFERENCES Gametypes(id),
tourneyTypeId INT, FOREIGN KEY (tourneyTypeId) REFERENCES TourneyTypes(id),
playerId INT, FOREIGN KEY (playerId) REFERENCES Players(id),
hands INT,
tourneys INT,
totalProfit INT)
"""
elif db_server == 'sqlite':
self.query['createSessionsCacheTable'] = """CREATE TABLE SessionsCache (
id INTEGER PRIMARY KEY,
sessionStart REAL NOT NULL,
sessionEnd REAL NOT NULL,
ringHDs INT NOT NULL,
tourHDs INT NOT NULL,
ringProfitUSD INT NOT NULL,
ringProfitEUR INT NOT NULL)
gameStart REAL NOT NULL,
gameEnd REAL NOT NULL,
sessionId INT,
date TEXT NOT NULL, /* 1st char is style (A/T/H/S), other 6 are the key */
type TEXT,
gametypeId INT,
tourneyTypeId INT,
playerId INT,
hands INT,
tourneys INT,
totalProfit INT)
"""
self.query['addSessionIdIndex'] = """CREATE INDEX index_SessionId ON SessionsCache (sessionId)"""
self.query['addHandsSessionIdIndex'] = """CREATE INDEX index_handsSessionId ON Hands (sessionId)"""
self.query['addHandsGameSessionIdIndex'] = """CREATE INDEX index_handsGameSessionId ON Hands (gameSessionId)"""
if db_server == 'mysql':
self.query['addTourneyIndex'] = """ALTER TABLE Tourneys ADD UNIQUE INDEX siteTourneyNo(siteTourneyNo, tourneyTypeId)"""
elif db_server == 'postgresql':
@ -1479,7 +1511,7 @@ class Sql:
self.query['addPlayersIndex'] = """CREATE UNIQUE INDEX name ON Players (name, siteId)"""
if db_server == 'mysql':
self.query['addTPlayersIndex'] = """ALTER TABLE TourneysPlayers ADD UNIQUE INDEX tourneyId(tourneyId, playerId)"""
self.query['addTPlayersIndex'] = """ALTER TABLE TourneysPlayers ADD UNIQUE INDEX _tourneyId(tourneyId, playerId)"""
elif db_server == 'postgresql':
self.query['addTPlayersIndex'] = """CREATE UNIQUE INDEX tourneyId ON TourneysPlayers (tourneyId, playerId)"""
elif db_server == 'sqlite':
@ -4142,7 +4174,7 @@ class Sql:
"""
self.query['insert_hudcache'] = """
INSERT INTO HudCache (
insert into HudCache (
gametypeId,
playerId,
activeSeats,
@ -4237,7 +4269,7 @@ class Sql:
street2Raises,
street3Raises,
street4Raises)
VALUES (%s, %s, %s, %s, %s,
values (%s, %s, %s, %s, %s,
%s, %s, %s, %s, %s,
%s, %s, %s, %s, %s,
%s, %s, %s, %s, %s,
@ -4364,95 +4396,97 @@ class Sql:
# Queries to rebuild/modify sessionscache
####################################
self.query['select_sessionscache'] = """
self.query['select_prepSC'] = """
SELECT sessionId as id,
sessionStart,
sessionEnd,
count(sessionId) as count
FROM SessionsCache
WHERE sessionEnd>=%s
AND sessionStart<=%s
GROUP BY sessionId, sessionStart, sessionEnd"""
self.query['update_prepSC'] = """
UPDATE SessionsCache SET
sessionStart=%s,
sessionEnd=%s
WHERE sessionId=%s"""
self.query['update_SC'] = """
UPDATE SessionsCache SET
sessionStart=%s,
sessionEnd=%s,
gameStart=%s,
gameEnd=%s,
hands=hands+%s,
tourneys=tourneys+%s,
totalProfit=totalProfit+%s
WHERE id=%s"""
self.query['select_SC'] = """
SELECT id,
sessionStart,
sessionEnd,
ringHDs,
tourHDs,
ringProfitUSD,
ringProfitEUR
gameStart,
gameEnd,
sessionId,
date,
type,
gametypeId,
tourneyTypeId,
playerId,
hands,
tourneys,
totalProfit
FROM SessionsCache
WHERE sessionEnd>=%s
AND sessionStart<=%s"""
WHERE gameEnd>=%s
AND gameStart<=%s
AND date=%s
AND type=%s
AND (case when gametypeId is NULL then 1 else
(case when gametypeId=%s then 1 else 0 end) end)=1
AND (case when tourneyTypeId is NULL then 1 else
(case when tourneyTypeId=%s then 1 else 0 end) end)=1
AND playerId=%s"""
self.query['select_sessionscache_mid'] = """
SELECT sessionStart,
sessionEnd,
ringHDs,
tourHDs,
ringProfitUSD,
ringProfitEUR
FROM SessionsCache
WHERE sessionEnd>=%s
AND sessionStart<=%s"""
self.query['select_sessionscache_start'] = """
SELECT sessionStart,
sessionEnd,
ringHDs,
tourHDs,
ringProfitUSD,
ringProfitEUR
FROM SessionsCache
WHERE sessionStart>%s
AND sessionEnd>=%s
AND sessionStart<=%s"""
self.query['update_sessionscache_mid'] = """
UPDATE SessionsCache SET
ringHDs=ringHDs+%s,
tourHDs=tourHDs+%s,
ringProfitUSD=ringProfitUSD+%s,
ringProfitEUR=ringProfitEUR+%s
WHERE sessionStart<=%s
AND sessionEnd>=%s"""
self.query['update_sessionscache_start'] = """
UPDATE SessionsCache SET
sessionStart=%s,
ringHDs=ringHDs+%s,
tourHDs=tourHDs+%s,
ringProfitUSD=ringProfitUSD+%s,
ringProfitEUR=ringProfitEUR+%s
WHERE sessionStart>%s
AND sessionEnd>=%s
AND sessionStart<=%s"""
self.query['update_sessionscache_end'] = """
UPDATE SessionsCache SET
sessionEnd=%s,
ringHDs=ringHDs+%s,
tourHDs=tourHDs+%s,
ringProfitUSD=ringProfitUSD+%s,
ringProfitEUR=ringProfitEUR+%s
WHERE sessionEnd<%s
AND sessionEnd>=%s
AND sessionStart<=%s"""
self.query['insert_sessionscache'] = """
INSERT INTO SessionsCache (
self.query['insert_SC'] = """
insert into SessionsCache (
sessionStart,
sessionEnd,
ringHDs,
tourHDs,
ringProfitUSD,
ringProfitEUR)
VALUES (%s, %s, %s, %s, %s, %s)"""
gameStart,
gameEnd,
sessionId,
date,
type,
gametypeId,
tourneyTypeId,
playerId,
hands,
tourneys,
totalProfit)
values (%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s)"""
self.query['merge_sessionscache'] = """
SELECT min(sessionStart), max(sessionEnd), sum(ringHDs), sum(tourHDs), sum(ringProfitUSD), sum(ringProfitEUR)
FROM SessionsCache
WHERE (case when id=%s or id=%s then 1 else 0 end)=1"""
self.query['update_Hands_gsid'] = """
UPDATE Hands SET
gameSessionId=%s
WHERE gameSessionId=%s"""
self.query['delete_sessions'] = """
DELETE FROM SessionsCache
WHERE id=%s"""
self.query['update_hands_sessionid'] = """
self.query['update_Hands_sid'] = """
UPDATE Hands SET
sessionId=%s
WHERE (case when sessionId=%s or sessionId=%s then 1 else 0 end)=1"""
WHERE sessionId=%s"""
self.query['update_SC_sid'] = """
UPDATE SessionsCache SET
sessionStart=%s,
sessionEnd=%s,
sessionId=%s
WHERE sessionId=%s"""
self.query['delete_SC'] = """
DELETE FROM SessionsCache
WHERE id=%s"""
####################################
# Database management queries
@ -4652,6 +4686,7 @@ class Sql:
tourneyId,
gametypeid,
sessionId,
gameSessionId,
startTime,
importtime,
seats,
@ -4682,7 +4717,7 @@ class Sql:
values
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s)"""
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"""
self.query['store_hands_players'] = """insert into HandsPlayers (

View File

@ -288,6 +288,15 @@ winnings (int) the money the player ended the tourney with (can be 0, or -1 i
print "checkPlayerExists", player, "fail"
raise FpdbParseError
def updateSessionsCache(self, sc, gsc, tz, doinsert):
self.heros = self.db.getHeroIds(self.dbid_pids, self.siteName)
sc = self.db.prepSessionsCache(self.tourNo, self.dbid_pids, self.startTime, sc , self.heros, doinsert)
gsc = self.db.storeSessionsCache(self.tourNo, self.dbid_pids, self.startTime, {'type': 'summary'}
, None, self, sc, gsc, tz, self.heros, doinsert)
return sc, gsc
def writeSummary(self, fh=sys.__stdout__):
print >>fh, "Override me"

View File

@ -242,7 +242,7 @@ class Importer:
#print "dropInd =", self.settings['dropIndexes'], " dropHudCache =", self.settings['dropHudCache']
if self.settings['threads'] <= 0:
(totstored, totdups, totpartial, toterrors) = self.importFiles(self.database, None)
(totstored, totdups, totpartial, toterrors) = self.importFiles(None)
else:
# create queue (will probably change to deque at some point):
self.writeq = Queue.Queue( self.settings['writeQSize'] )
@ -254,7 +254,7 @@ class Importer:
t.setDaemon(True)
t.start()
# read hands and write to q:
(totstored, totdups, totpartial, toterrors) = self.importFiles(self.database, self.writeq)
(totstored, totdups, totpartial, toterrors) = self.importFiles(self.writeq)
if self.writeq.empty():
print _("writers finished already")
@ -286,7 +286,7 @@ class Importer:
return (totstored, totdups, totpartial, toterrors, endtime-starttime)
# end def runImport
def importFiles(self, db, q):
def importFiles(self, q):
""""Read filenames in self.filelist and pass to import_file_dict().
Uses a separate database connection if created as a thread (caller
passes None or no param as db)."""
@ -304,7 +304,7 @@ class Importer:
ProgressDialog.progress_update()
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(db, file
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(file
,self.filelist[file][0], self.filelist[file][1], q)
totstored += stored
totdups += duplicates
@ -395,7 +395,7 @@ class Importer:
self.caller.addText("\n"+os.path.basename(file))
except KeyError: # TODO: What error happens here?
pass
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(self.database, file, self.filelist[file][0], self.filelist[file][1], None)
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1], None)
try:
if not os.path.isdir(file): # Note: This assumes that whatever calls us has an "addText" func
self.caller.addText(" %d stored, %d duplicates, %d partial, %d errors (time = %f)" % (stored, duplicates, partial, errors, ttime))
@ -426,68 +426,70 @@ class Importer:
#rulog.close()
# This is now an internal function that should not be called directly.
def import_file_dict(self, db, file, site, filter, q=None):
#print "import_file_dict"
def import_file_dict(self, file, site, filter, q=None):
if os.path.isdir(file):
self.addToDirList[file] = [site] + [filter]
return (0,0,0,0,0)
conv = None
(stored, duplicates, partial, errors, ttime) = (0, 0, 0, 0, time())
# sc: is there any need to decode this? maybe easier to skip it than guess at the encoding?
#file = file.decode("utf-8") #(Configuration.LOCALE_ENCODING)
# Load filter, process file, pass returned filename to import_fpdb_file
if self.settings['threads'] > 0 and self.writeq is not None:
log.info((_("Converting %s") % file) + " (" + str(q.qsize()) + ")")
else:
log.info(_("Converting %s") % file)
else: log.info(_("Converting %s") % file)
filter_name = filter.replace("ToFpdb", "")
mod = __import__(filter)
obj = getattr(mod, filter_name, None)
if callable(obj):
idx = 0
if file in self.pos_in_file:
idx = self.pos_in_file[file]
else:
self.pos_in_file[file] = 0
hhc = obj( self.config, in_path = file, index = idx, starsArchive = self.settings['starsArchive'], ftpArchive = self.settings['ftpArchive'], sitename = site )
if file in self.pos_in_file: idx = self.pos_in_file[file]
else: self.pos_in_file[file], idx = 0, 0
hhc = obj( self.config, in_path = file, index = idx
,starsArchive = self.settings['starsArchive']
,ftpArchive = self.settings['ftpArchive']
,sitename = site )
if hhc.getStatus():
handlist = hhc.getProcessedHands()
self.pos_in_file[file] = hhc.getLastCharacterRead()
to_hud = []
hp_bulk = []
ha_bulk = []
i = 0
(hbulk, hpbulk, habulk, hcbulk, phands, ihands) = ([], [], [], [], [], [])
sc, gsc = {'bk': []}, {'bk': []}
####Lock Placeholder####
for hand in handlist:
i += 1
if hand is not None:
hand.prepInsert(self.database, printtest = self.settings['testData'])
hand.prepInsert(self.database)
self.database.commit()
phands.append(hand)
####Lock Placeholder####
for hand in phands:
hand.assembleHand()
####Lock Placeholder####
id = self.database.nextHandId()
for i in range(len(phands)):
doinsert = len(phands)==i+1
hand = phands[i]
try:
hp_inserts, ha_inserts = hand.insert(self.database, hp_data = hp_bulk,
ha_data = ha_bulk, insert_data = len(handlist)==i,
printtest = self.settings['testData'])
hp_bulk += hp_inserts
ha_bulk += ha_inserts
id = hand.getHandId(self.database, id)
sc, gsc = hand.updateSessionsCache(self.database, sc, gsc, self.tz, doinsert)
hbulk = hand.insertHands(self.database, hbulk, doinsert)
hcbulk = hand.updateHudCache(self.database, hcbulk, doinsert)
ihands.append(hand)
to_hud.append(id)
except Exceptions.FpdbHandDuplicate:
duplicates += 1
else:
if self.callHud and hand.dbid_hands != 0:
to_hud.append(hand.dbid_hands)
else: # TODO: Treat empty as an error, or just ignore?
log.error(_("Hand processed but empty"))
self.database.commit()
####Lock Placeholder####
# Call hudcache update if not in bulk import mode
# FIXME: Need to test for bulk import that isn't rebuilding the cache
if self.callHud:
for hand in handlist:
if hand is not None and not hand.is_duplicate:
hand.updateHudCache(self.database)
for i in range(len(ihands)):
doinsert = len(ihands)==i+1
hand = ihands[i]
hpbulk = hand.insertHandsPlayers(self.database, hpbulk, doinsert)
habulk = hand.insertHandsActions(self.database, habulk, doinsert)
self.database.commit()
#pipe the Hands.id out to the HUD