finish hudcache rebuild code which speeds up bulk imports nicely - turn off permanently via allow_hudcache_rebuild in fpdb_import.py. Also some more moves into Database.py and cosmetic stuff
This commit is contained in:
parent
ab413faab9
commit
f69281e2fd
|
@ -27,7 +27,12 @@ Create and manage the database objects.
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime, date, time, timedelta
|
from datetime import datetime, date, time, timedelta
|
||||||
|
from time import time, strftime
|
||||||
import string
|
import string
|
||||||
|
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
|
||||||
|
#ISOLATION_LEVEL_AUTOCOMMIT = 0
|
||||||
|
#ISOLATION_LEVEL_READ_COMMITTED = 1
|
||||||
|
#ISOLATION_LEVEL_SERIALIZABLE = 2
|
||||||
|
|
||||||
# pyGTK modules
|
# pyGTK modules
|
||||||
|
|
||||||
|
@ -39,6 +44,11 @@ import SQL
|
||||||
import Card
|
import Card
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
|
|
||||||
|
MYSQL_INNODB = 2
|
||||||
|
PGSQL = 3
|
||||||
|
SQLITE = 4
|
||||||
|
|
||||||
def __init__(self, c, db_name = None, game = None, sql = None): # db_name and game not used any more
|
def __init__(self, c, db_name = None, game = None, sql = None): # db_name and game not used any more
|
||||||
print "\ncreating Database instance, sql =", sql
|
print "\ncreating Database instance, sql =", sql
|
||||||
self.fdb = fpdb_db.fpdb_db() # sets self.fdb.db self.fdb.cursor and self.fdb.sql
|
self.fdb = fpdb_db.fpdb_db() # sets self.fdb.db self.fdb.cursor and self.fdb.sql
|
||||||
|
@ -55,7 +65,6 @@ class Database:
|
||||||
self.sql = SQL.Sql(type = self.type, db_server = db_params['db-server'])
|
self.sql = SQL.Sql(type = self.type, db_server = db_params['db-server'])
|
||||||
else:
|
else:
|
||||||
self.sql = sql
|
self.sql = sql
|
||||||
self.connection.rollback()
|
|
||||||
|
|
||||||
# To add to config:
|
# To add to config:
|
||||||
self.hud_style = 'T' # A=All-time
|
self.hud_style = 'T' # A=All-time
|
||||||
|
@ -99,8 +108,11 @@ class Database:
|
||||||
#row = self.cursor.fetchone()
|
#row = self.cursor.fetchone()
|
||||||
else:
|
else:
|
||||||
print "Bailing on DB query, not sure it exists yet"
|
print "Bailing on DB query, not sure it exists yet"
|
||||||
|
|
||||||
self.saveActions = False if self.import_options['saveActions'] == False else True
|
self.saveActions = False if self.import_options['saveActions'] == False else True
|
||||||
|
|
||||||
|
self.connection.rollback() # make sure any locks taken so far are released
|
||||||
|
|
||||||
# could be used by hud to change hud style
|
# could be used by hud to change hud style
|
||||||
def set_hud_style(self, style):
|
def set_hud_style(self, style):
|
||||||
self.hud_style = style
|
self.hud_style = style
|
||||||
|
@ -114,6 +126,9 @@ class Database:
|
||||||
def rollback(self):
|
def rollback(self):
|
||||||
self.fdb.db.rollback()
|
self.fdb.db.rollback()
|
||||||
|
|
||||||
|
def get_cursor(self):
|
||||||
|
return self.connection.cursor()
|
||||||
|
|
||||||
def close_connection(self):
|
def close_connection(self):
|
||||||
self.connection.close()
|
self.connection.close()
|
||||||
|
|
||||||
|
@ -331,7 +346,7 @@ class Database:
|
||||||
,start_cashes, antes, card_values
|
,start_cashes, antes, card_values
|
||||||
,card_suits, winnings, rakes, seatNos)
|
,card_suits, winnings, rakes, seatNos)
|
||||||
|
|
||||||
if 'updateHudCache' not in settings or settings['updateHudCache'] != 'drop':
|
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
|
||||||
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
|
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
|
||||||
|
|
||||||
if self.saveActions:
|
if self.saveActions:
|
||||||
|
@ -362,7 +377,7 @@ class Database:
|
||||||
, positions, card_values, card_suits, winnings, rakes, seatNos, hudImportData)
|
, positions, card_values, card_suits, winnings, rakes, seatNos, hudImportData)
|
||||||
t4 = time()
|
t4 = time()
|
||||||
#print "ring holdem, backend=%d" % backend
|
#print "ring holdem, backend=%d" % backend
|
||||||
if 'updateHudCache' not in settings or settings['updateHudCache'] != 'drop':
|
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
|
||||||
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
|
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
|
||||||
t5 = time()
|
t5 = time()
|
||||||
t6 = time()
|
t6 = time()
|
||||||
|
@ -396,7 +411,7 @@ class Database:
|
||||||
, card_values, card_suits, winnings, rakes, seatNos, tourneys_players_ids)
|
, card_values, card_suits, winnings, rakes, seatNos, tourneys_players_ids)
|
||||||
|
|
||||||
#print "tourney holdem, backend=%d" % backend
|
#print "tourney holdem, backend=%d" % backend
|
||||||
if 'updateHudCache' not in settings or settings['updateHudCache'] != 'drop':
|
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
|
||||||
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
|
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametype_id, hand_start_time, player_ids, hudImportData)
|
||||||
|
|
||||||
if self.saveActions:
|
if self.saveActions:
|
||||||
|
@ -423,7 +438,7 @@ class Database:
|
||||||
, playerIds, startCashes, antes, cardValues, cardSuits
|
, playerIds, startCashes, antes, cardValues, cardSuits
|
||||||
, winnings, rakes, seatNos, tourneys_players_ids)
|
, winnings, rakes, seatNos, tourneys_players_ids)
|
||||||
|
|
||||||
if 'updateHudCache' not in settings or settings['updateHudCache'] != 'drop':
|
if 'dropHudCache' not in settings or settings['dropHudCache'] != 'drop':
|
||||||
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametypeId, hand_start_time, playerIds, hudImportData)
|
fpdb_simple.storeHudCache(self.backend, cursor, base, category, gametypeId, hand_start_time, playerIds, hudImportData)
|
||||||
|
|
||||||
if self.saveActions:
|
if self.saveActions:
|
||||||
|
@ -431,6 +446,38 @@ class Database:
|
||||||
return hands_id
|
return hands_id
|
||||||
#end def tourney_stud
|
#end def tourney_stud
|
||||||
|
|
||||||
|
def rebuild_hudcache(self):
|
||||||
|
"""clears hudcache and rebuilds from the individual handsplayers records"""
|
||||||
|
|
||||||
|
stime = time()
|
||||||
|
self.connection.cursor().execute(self.sql.query['clearHudCache'])
|
||||||
|
self.connection.cursor().execute(self.sql.query['rebuildHudCache'])
|
||||||
|
self.commit()
|
||||||
|
print "Rebuild hudcache took %.1f seconds" % (time() - stime,)
|
||||||
|
#end def rebuild_hudcache
|
||||||
|
|
||||||
|
|
||||||
|
def analyzeDB(self):
|
||||||
|
"""Do whatever the DB can offer to update index/table statistics"""
|
||||||
|
stime = time()
|
||||||
|
if self.backend == self.MYSQL_INNODB:
|
||||||
|
try:
|
||||||
|
self.cursor.execute(self.sql.query['analyze'])
|
||||||
|
except:
|
||||||
|
print "Error during analyze"
|
||||||
|
elif self.backend == self.PGSQL:
|
||||||
|
self.connection.set_isolation_level(0) # allow vacuum to work
|
||||||
|
try:
|
||||||
|
self.cursor = self.get_cursor()
|
||||||
|
self.cursor.execute(self.sql.query['analyze'])
|
||||||
|
except:
|
||||||
|
print "Error during analyze:", str(sys.exc_value)
|
||||||
|
self.connection.set_isolation_level(1) # go back to normal isolation level
|
||||||
|
self.commit()
|
||||||
|
atime = time() - stime
|
||||||
|
print "Analyze took %.1f seconds" % (atime,)
|
||||||
|
#end def analyzeDB
|
||||||
|
|
||||||
if __name__=="__main__":
|
if __name__=="__main__":
|
||||||
c = Configuration.Config()
|
c = Configuration.Config()
|
||||||
|
|
||||||
|
|
|
@ -205,10 +205,11 @@ class GuiBulkImport():
|
||||||
self.load_button.show()
|
self.load_button.show()
|
||||||
|
|
||||||
# see how many hands are in the db and adjust accordingly
|
# see how many hands are in the db and adjust accordingly
|
||||||
tcursor = self.importer.fdb.db.cursor()
|
tcursor = self.importer.database.cursor
|
||||||
tcursor.execute("Select count(1) from Hands")
|
tcursor.execute("Select count(1) from Hands")
|
||||||
row = tcursor.fetchone()
|
row = tcursor.fetchone()
|
||||||
tcursor.close()
|
tcursor.close()
|
||||||
|
self.importer.database.rollback()
|
||||||
self.n_hands_in_db = row[0]
|
self.n_hands_in_db = row[0]
|
||||||
if self.n_hands_in_db == 0:
|
if self.n_hands_in_db == 0:
|
||||||
self.cb_dropindexes.set_active(2)
|
self.cb_dropindexes.set_active(2)
|
||||||
|
|
112
pyfpdb/SQL.py
112
pyfpdb/SQL.py
|
@ -1344,58 +1344,58 @@ class Sql:
|
||||||
,count(1)
|
,count(1)
|
||||||
,sum(wonWhenSeenStreet1)
|
,sum(wonWhenSeenStreet1)
|
||||||
,sum(wonAtSD)
|
,sum(wonAtSD)
|
||||||
,sum(CAST(street0VPI as integer))
|
,sum(street0VPI)
|
||||||
,sum(CAST(street0Aggr as integer))
|
,sum(street0Aggr)
|
||||||
,sum(CAST(street0_3BChance as integer))
|
,sum(street0_3BChance)
|
||||||
,sum(CAST(street0_3BDone as integer))
|
,sum(street0_3BDone)
|
||||||
,sum(CAST(street1Seen as integer))
|
,sum(street1Seen)
|
||||||
,sum(CAST(street2Seen as integer))
|
,sum(street2Seen)
|
||||||
,sum(CAST(street3Seen as integer))
|
,sum(street3Seen)
|
||||||
,sum(CAST(street4Seen as integer))
|
,sum(street4Seen)
|
||||||
,sum(CAST(sawShowdown as integer))
|
,sum(sawShowdown)
|
||||||
,sum(CAST(street1Aggr as integer))
|
,sum(street1Aggr)
|
||||||
,sum(CAST(street2Aggr as integer))
|
,sum(street2Aggr)
|
||||||
,sum(CAST(street3Aggr as integer))
|
,sum(street3Aggr)
|
||||||
,sum(CAST(street4Aggr as integer))
|
,sum(street4Aggr)
|
||||||
,sum(CAST(otherRaisedStreet1 as integer))
|
,sum(otherRaisedStreet1)
|
||||||
,sum(CAST(otherRaisedStreet2 as integer))
|
,sum(otherRaisedStreet2)
|
||||||
,sum(CAST(otherRaisedStreet3 as integer))
|
,sum(otherRaisedStreet3)
|
||||||
,sum(CAST(otherRaisedStreet4 as integer))
|
,sum(otherRaisedStreet4)
|
||||||
,sum(CAST(foldToOtherRaisedStreet1 as integer))
|
,sum(foldToOtherRaisedStreet1)
|
||||||
,sum(CAST(foldToOtherRaisedStreet2 as integer))
|
,sum(foldToOtherRaisedStreet2)
|
||||||
,sum(CAST(foldToOtherRaisedStreet3 as integer))
|
,sum(foldToOtherRaisedStreet3)
|
||||||
,sum(CAST(foldToOtherRaisedStreet4 as integer))
|
,sum(foldToOtherRaisedStreet4)
|
||||||
,sum(CAST(stealAttemptChance as integer))
|
,sum(stealAttemptChance)
|
||||||
,sum(CAST(stealAttempted as integer))
|
,sum(stealAttempted)
|
||||||
,sum(CAST(foldBbToStealChance as integer))
|
,sum(foldBbToStealChance)
|
||||||
,sum(CAST(foldedBbToSteal as integer))
|
,sum(foldedBbToSteal)
|
||||||
,sum(CAST(foldSbToStealChance as integer))
|
,sum(foldSbToStealChance)
|
||||||
,sum(CAST(foldedSbToSteal as integer))
|
,sum(foldedSbToSteal)
|
||||||
,sum(CAST(street1CBChance as integer))
|
,sum(street1CBChance)
|
||||||
,sum(CAST(street1CBDone as integer))
|
,sum(street1CBDone)
|
||||||
,sum(CAST(street2CBChance as integer))
|
,sum(street2CBChance)
|
||||||
,sum(CAST(street2CBDone as integer))
|
,sum(street2CBDone)
|
||||||
,sum(CAST(street3CBChance as integer))
|
,sum(street3CBChance)
|
||||||
,sum(CAST(street3CBDone as integer))
|
,sum(street3CBDone)
|
||||||
,sum(CAST(street4CBChance as integer))
|
,sum(street4CBChance)
|
||||||
,sum(CAST(street4CBDone as integer))
|
,sum(street4CBDone)
|
||||||
,sum(CAST(foldToStreet1CBChance as integer))
|
,sum(foldToStreet1CBChance)
|
||||||
,sum(CAST(foldToStreet1CBDone as integer))
|
,sum(foldToStreet1CBDone)
|
||||||
,sum(CAST(foldToStreet2CBChance as integer))
|
,sum(foldToStreet2CBChance)
|
||||||
,sum(CAST(foldToStreet2CBDone as integer))
|
,sum(foldToStreet2CBDone)
|
||||||
,sum(CAST(foldToStreet3CBChance as integer))
|
,sum(foldToStreet3CBChance)
|
||||||
,sum(CAST(foldToStreet3CBDone as integer))
|
,sum(foldToStreet3CBDone)
|
||||||
,sum(CAST(foldToStreet4CBChance as integer))
|
,sum(foldToStreet4CBChance)
|
||||||
,sum(CAST(foldToStreet4CBDone as integer))
|
,sum(foldToStreet4CBDone)
|
||||||
,sum(CAST(totalProfit as integer))
|
,sum(totalProfit)
|
||||||
,sum(CAST(street1CheckCallRaiseChance as integer))
|
,sum(street1CheckCallRaiseChance)
|
||||||
,sum(CAST(street1CheckCallRaiseDone as integer))
|
,sum(street1CheckCallRaiseDone)
|
||||||
,sum(CAST(street2CheckCallRaiseChance as integer))
|
,sum(street2CheckCallRaiseChance)
|
||||||
,sum(CAST(street2CheckCallRaiseDone as integer))
|
,sum(street2CheckCallRaiseDone)
|
||||||
,sum(CAST(street3CheckCallRaiseChance as integer))
|
,sum(street3CheckCallRaiseChance)
|
||||||
,sum(CAST(street3CheckCallRaiseDone as integer))
|
,sum(street3CheckCallRaiseDone)
|
||||||
,sum(CAST(street4CheckCallRaiseChance as integer))
|
,sum(street4CheckCallRaiseChance)
|
||||||
,sum(CAST(street4CheckCallRaiseDone as integer))
|
,sum(street4CheckCallRaiseDone)
|
||||||
FROM HandsPlayers hp
|
FROM HandsPlayers hp
|
||||||
INNER JOIN Hands h ON (h.id = hp.handId)
|
INNER JOIN Hands h ON (h.id = hp.handId)
|
||||||
GROUP BY h.gametypeId
|
GROUP BY h.gametypeId
|
||||||
|
@ -1554,6 +1554,14 @@ class Sql:
|
||||||
,to_char(h.handStart, 'YYMMDD')
|
,to_char(h.handStart, 'YYMMDD')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if db_server == 'mysql':
|
||||||
|
self.query['analyze'] = """
|
||||||
|
analyze table autorates, gametypes, hands, handsplayers, hudcache, players
|
||||||
|
, settings, sites, tourneys, tourneysplayers, tourneytypes
|
||||||
|
"""
|
||||||
|
else: # assume postgres
|
||||||
|
self.query['analyze'] = "vacuum analyze"
|
||||||
|
|
||||||
if __name__== "__main__":
|
if __name__== "__main__":
|
||||||
# just print the default queries and exit
|
# just print the default queries and exit
|
||||||
s = Sql(game = 'razz', type = 'ptracks')
|
s = Sql(game = 'razz', type = 'ptracks')
|
||||||
|
|
|
@ -572,52 +572,6 @@ class fpdb_db:
|
||||||
self.db.set_isolation_level(1) # go back to normal isolation level
|
self.db.set_isolation_level(1) # go back to normal isolation level
|
||||||
#end def dropAllIndexes
|
#end def dropAllIndexes
|
||||||
|
|
||||||
def analyzeDB(self):
|
|
||||||
"""Do whatever the DB can offer to update index/table statistics"""
|
|
||||||
stime = time()
|
|
||||||
if self.backend == self.PGSQL:
|
|
||||||
self.db.set_isolation_level(0) # allow vacuum to work
|
|
||||||
try:
|
|
||||||
self.cursor.execute("vacuum analyze")
|
|
||||||
except:
|
|
||||||
print "Error during vacuum"
|
|
||||||
self.db.set_isolation_level(1) # go back to normal isolation level
|
|
||||||
self.db.commit()
|
|
||||||
atime = time() - stime
|
|
||||||
print "analyze took", atime, "seconds"
|
|
||||||
#end def analyzeDB
|
|
||||||
|
|
||||||
# Currently uses an exclusive lock on the Players table as a global lock
|
|
||||||
# ( Changed because Hands is used in Database.init() )
|
|
||||||
# Return values are Unix style, 0 for success, positive integers for errors
|
|
||||||
# 1 = generic error
|
|
||||||
# 2 = players table does not exist (error message is suppressed)
|
|
||||||
def get_global_lock(self):
|
|
||||||
if self.backend == self.MYSQL_INNODB:
|
|
||||||
try:
|
|
||||||
self.cursor.execute( "lock tables Players write" )
|
|
||||||
except:
|
|
||||||
# Table 'fpdb.players' doesn't exist
|
|
||||||
if str(sys.exc_value).find(".Players' doesn't exist") >= 0:
|
|
||||||
return(2)
|
|
||||||
print "Error! failed to obtain global lock. Close all programs accessing " \
|
|
||||||
+ "database (including fpdb) and try again (%s)." \
|
|
||||||
% ( str(sys.exc_value).rstrip('\n'), )
|
|
||||||
return(1)
|
|
||||||
elif self.backend == self.PGSQL:
|
|
||||||
try:
|
|
||||||
self.cursor.execute( "lock table Players in exclusive mode nowait" )
|
|
||||||
#print "... after lock table, status =", self.cursor.statusmessage
|
|
||||||
except:
|
|
||||||
# relation "players" does not exist
|
|
||||||
if str(sys.exc_value).find('relation "players" does not exist') >= 0:
|
|
||||||
return(2)
|
|
||||||
print "Error! failed to obtain global lock. Close all programs accessing " \
|
|
||||||
+ "database (including fpdb) and try again (%s)." \
|
|
||||||
% ( str(sys.exc_value).rstrip('\n'), )
|
|
||||||
return(1)
|
|
||||||
return(0)
|
|
||||||
|
|
||||||
def getLastInsertId(self):
|
def getLastInsertId(self):
|
||||||
if self.backend == self.MYSQL_INNODB:
|
if self.backend == self.MYSQL_INNODB:
|
||||||
ret = self.db.insert_id()
|
ret = self.db.insert_id()
|
||||||
|
|
|
@ -81,9 +81,10 @@ class Importer:
|
||||||
self.database = Database.Database(self.config) # includes .connection and .sql variables
|
self.database = Database.Database(self.config) # includes .connection and .sql variables
|
||||||
self.fdb = fpdb_db.fpdb_db() # sets self.fdb.db self.fdb.cursor and self.fdb.sql
|
self.fdb = fpdb_db.fpdb_db() # sets self.fdb.db self.fdb.cursor and self.fdb.sql
|
||||||
self.fdb.do_connect(self.config)
|
self.fdb.do_connect(self.config)
|
||||||
self.fdb.db.rollback()
|
self.fdb.db.rollback() # make sure all locks are released
|
||||||
|
|
||||||
self.NEWIMPORT = False
|
self.NEWIMPORT = False
|
||||||
|
self.allow_hudcache_rebuild = True;
|
||||||
|
|
||||||
#Set functions
|
#Set functions
|
||||||
def setCallHud(self, value):
|
def setCallHud(self, value):
|
||||||
|
@ -168,13 +169,19 @@ class Importer:
|
||||||
|
|
||||||
def runImport(self):
|
def runImport(self):
|
||||||
""""Run full import on self.filelist."""
|
""""Run full import on self.filelist."""
|
||||||
|
|
||||||
start = datetime.datetime.now()
|
start = datetime.datetime.now()
|
||||||
print "started at", start, "--", len(self.filelist), "files to import.", self.settings['dropIndexes']
|
print "Started at", start, "--", len(self.filelist), "files to import.", self.settings['dropIndexes']
|
||||||
if self.settings['dropIndexes'] == 'auto':
|
if self.settings['dropIndexes'] == 'auto':
|
||||||
self.settings['dropIndexes'] = self.calculate_auto2(10.0, 500.0)
|
self.settings['dropIndexes'] = self.calculate_auto2(12.0, 500.0)
|
||||||
|
if self.allow_hudcache_rebuild:
|
||||||
|
self.settings['dropHudCache'] = self.calculate_auto2(25.0, 500.0) # returns "drop"/"don't drop"
|
||||||
|
|
||||||
if self.settings['dropIndexes'] == 'drop':
|
if self.settings['dropIndexes'] == 'drop':
|
||||||
self.fdb.prepareBulkImport()
|
self.fdb.prepareBulkImport()
|
||||||
#self.settings['updateHudCache'] = self.calculate_auto2(10.0, 500.0)
|
else:
|
||||||
|
print "No need drop indexes."
|
||||||
|
#print "dropInd =", self.settings['dropIndexes'], " dropHudCache =", self.settings['dropHudCache']
|
||||||
totstored = 0
|
totstored = 0
|
||||||
totdups = 0
|
totdups = 0
|
||||||
totpartial = 0
|
totpartial = 0
|
||||||
|
@ -190,7 +197,13 @@ class Importer:
|
||||||
tottime += ttime
|
tottime += ttime
|
||||||
if self.settings['dropIndexes'] == 'drop':
|
if self.settings['dropIndexes'] == 'drop':
|
||||||
self.fdb.afterBulkImport()
|
self.fdb.afterBulkImport()
|
||||||
self.fdb.analyzeDB()
|
else:
|
||||||
|
print "No need rebuild indexes."
|
||||||
|
if self.settings['dropHudCache'] == 'drop':
|
||||||
|
self.database.rebuild_hudcache()
|
||||||
|
else:
|
||||||
|
print "No need to rebuild hudcache."
|
||||||
|
self.database.analyzeDB()
|
||||||
return (totstored, totdups, totpartial, toterrors, tottime)
|
return (totstored, totdups, totpartial, toterrors, tottime)
|
||||||
# else: import threaded
|
# else: import threaded
|
||||||
|
|
||||||
|
@ -237,11 +250,12 @@ class Importer:
|
||||||
|
|
||||||
# if hands_in_db is zero or very low, we want to drop indexes, otherwise compare
|
# if hands_in_db is zero or very low, we want to drop indexes, otherwise compare
|
||||||
# import size with db size somehow:
|
# import size with db size somehow:
|
||||||
#print "auto2: handsindb =", self.settings['handsInDB'], "total_size =", total_size, "size_per_hand =", \
|
ret = "don't drop"
|
||||||
# size_per_hand, "inc =", increment
|
|
||||||
if self.settings['handsInDB'] < scale * (total_size/size_per_hand) + increment:
|
if self.settings['handsInDB'] < scale * (total_size/size_per_hand) + increment:
|
||||||
return "drop"
|
ret = "drop"
|
||||||
return "don't drop"
|
#print "auto2: handsindb =", self.settings['handsInDB'], "total_size =", total_size, "size_per_hand =", \
|
||||||
|
# size_per_hand, "inc =", increment, "return:", ret
|
||||||
|
return ret
|
||||||
|
|
||||||
#Run import on updated files, then store latest update time.
|
#Run import on updated files, then store latest update time.
|
||||||
def runUpdated(self):
|
def runUpdated(self):
|
||||||
|
@ -309,6 +323,24 @@ class Importer:
|
||||||
|
|
||||||
filter_name = filter.replace("ToFpdb", "")
|
filter_name = filter.replace("ToFpdb", "")
|
||||||
|
|
||||||
|
# Example code for using threads & queues: (maybe for obj and import_fpdb_file??)
|
||||||
|
#def worker():
|
||||||
|
# while True:
|
||||||
|
# item = q.get()
|
||||||
|
# do_work(item)
|
||||||
|
# q.task_done()
|
||||||
|
#
|
||||||
|
#q = Queue()
|
||||||
|
#for i in range(num_worker_threads):
|
||||||
|
# t = Thread(target=worker)
|
||||||
|
# t.setDaemon(True)
|
||||||
|
# t.start()
|
||||||
|
#
|
||||||
|
#for item in source():
|
||||||
|
# q.put(item)
|
||||||
|
#
|
||||||
|
#q.join() # block until all tasks are done
|
||||||
|
|
||||||
mod = __import__(filter)
|
mod = __import__(filter)
|
||||||
obj = getattr(mod, filter_name, None)
|
obj = getattr(mod, filter_name, None)
|
||||||
if callable(obj):
|
if callable(obj):
|
||||||
|
@ -317,12 +349,12 @@ class Importer:
|
||||||
(stored, duplicates, partial, errors, ttime) = self.import_fpdb_file(out_path, site)
|
(stored, duplicates, partial, errors, ttime) = self.import_fpdb_file(out_path, site)
|
||||||
elif (conv.getStatus() and self.NEWIMPORT == True):
|
elif (conv.getStatus() and self.NEWIMPORT == True):
|
||||||
#This code doesn't do anything yet
|
#This code doesn't do anything yet
|
||||||
handlist = conv.getProcessedHands()
|
handlist = hhc.getProcessedHands()
|
||||||
self.pos_in_file[file] = conv.getLastCharacterRead()
|
self.pos_in_file[file] = hhc.getLastCharacterRead()
|
||||||
|
|
||||||
for hand in handlist:
|
for hand in handlist:
|
||||||
hand.prepInsert(self.fdb)
|
hand.prepInsert()
|
||||||
hand.insert(self.fdb)
|
hand.insert()
|
||||||
else:
|
else:
|
||||||
# conversion didn't work
|
# conversion didn't work
|
||||||
# TODO: appropriate response?
|
# TODO: appropriate response?
|
||||||
|
|
Loading…
Reference in New Issue
Block a user