Merge branch 'master' of git://git.assembla.com/fpdb-eric
This commit is contained in:
commit
50f5acbf5c
85
pyfpdb/Configuration.py
Executable file → Normal file
85
pyfpdb/Configuration.py
Executable file → Normal file
|
@ -59,28 +59,44 @@ def get_exec_path():
|
|||
return sys.path[0]
|
||||
|
||||
def get_config(file_name, fallback = True):
|
||||
"""Looks in exec dir and in self.default_config_path for a config file."""
|
||||
config_path = os.path.join(DIR_SELF, file_name) # look in exec dir
|
||||
if os.path.exists(config_path) and os.path.isfile(config_path):
|
||||
return config_path # there is a file in the exec dir so we use it
|
||||
else:
|
||||
config_path = os.path.join(DIR_CONFIG, file_name) # look in config dir
|
||||
if os.path.exists(config_path) and os.path.isfile(config_path):
|
||||
"""Looks in cwd and in self.default_config_path for a config file."""
|
||||
exec_dir = get_exec_path()
|
||||
config_path = os.path.join(exec_dir, file_name)
|
||||
# print "config_path=", config_path
|
||||
if os.path.exists(config_path): # there is a file in the cwd
|
||||
return config_path # so we use it
|
||||
else: # no file in the cwd, look where it should be in the first place
|
||||
default_dir = get_default_config_path()
|
||||
config_path = os.path.join(default_dir, file_name)
|
||||
# print "config path 2=", config_path
|
||||
if os.path.exists(config_path):
|
||||
return config_path
|
||||
|
||||
# No file found
|
||||
if not fallback:
|
||||
return False
|
||||
|
||||
# OK, fall back to the .example file, should be in the exec dir
|
||||
if os.path.exists(os.path.join(DIR_SELF, file_name + ".example")):
|
||||
# OK, fall back to the .example file, should be in the start dir
|
||||
if os.path.exists(file_name + ".example"):
|
||||
try:
|
||||
shutil.copyfile(os.path.join(DIR_SELF, file_name + ".example"), os.path.join(DIR_CONFIG, file_name))
|
||||
print "No %s found, using %s.example.\n" % (file_name, file_name)
|
||||
print "A %s file has been created. You will probably have to edit it." % os.path.join(DIR_CONFIG, file_name)
|
||||
log.error("No %s found, using %s.example.\n" % (file_name, file_name) )
|
||||
print ""
|
||||
if not os.path.isdir(default_dir):
|
||||
msg = "Creating directory: '%s'" % (default_dir)
|
||||
print msg
|
||||
logging.info(msg)
|
||||
os.mkdir(default_dir)
|
||||
shutil.copyfile(file_name + ".example", config_path)
|
||||
msg = "No %s found in %s or %s\n" % (file_name, exec_dir, default_dir) \
|
||||
+ "Config file has been created at %s.\n" % config_path \
|
||||
+ "Edit your screen_name and hand history path in the supported_sites "\
|
||||
+ "section of the \nPreferences window (Main menu) before trying to import hands."
|
||||
print msg
|
||||
logging.info(msg)
|
||||
file_name = config_path
|
||||
except:
|
||||
print "No %s found, cannot fall back. Exiting.\n" % file_name
|
||||
print "Error copying .example file, cannot fall back. Exiting.\n"
|
||||
sys.stderr.write("Error copying .example file, cannot fall back. Exiting.\n")
|
||||
sys.stderr.write( str(sys.exc_info()) )
|
||||
sys.exit()
|
||||
else:
|
||||
print "No %s found, cannot fall back. Exiting.\n" % file_name
|
||||
|
@ -94,26 +110,18 @@ def get_logger(file_name, config = "config", fallback = False):
|
|||
try:
|
||||
logging.config.fileConfig(conf)
|
||||
log = logging.getLogger(config)
|
||||
log.debug("%s logger initialised" % config)
|
||||
return log
|
||||
except:
|
||||
pass
|
||||
|
||||
log = logging.basicConfig()
|
||||
log = logging.getLogger()
|
||||
log.error("basicConfig logger initialised")
|
||||
log.debug("config logger initialised")
|
||||
return log
|
||||
|
||||
def check_dir(path, create = True):
|
||||
"""Check if a dir exists, optionally creates if not."""
|
||||
if os.path.exists(path):
|
||||
if os.path.isdir(path):
|
||||
return path
|
||||
else:
|
||||
return False
|
||||
if create:
|
||||
print "creating directory %s" % path
|
||||
else:
|
||||
return False
|
||||
# find a logging.conf file and set up logging
|
||||
log = get_logger("logging.conf")
|
||||
|
||||
########################################################################
|
||||
# application wide consts
|
||||
|
@ -121,32 +129,20 @@ def check_dir(path, create = True):
|
|||
APPLICATION_NAME_SHORT = 'fpdb'
|
||||
APPLICATION_VERSION = 'xx.xx.xx'
|
||||
|
||||
DIR_SELF = get_exec_path()
|
||||
DIR_CONFIG = check_dir(get_default_config_path())
|
||||
DIR_DATABASE = check_dir(os.path.join(DIR_CONFIG, 'database'))
|
||||
DIR_LOG = check_dir(os.path.join(DIR_CONFIG, 'log'))
|
||||
|
||||
DATABASE_TYPE_POSTGRESQL = 'postgresql'
|
||||
DATABASE_TYPE_SQLITE = 'sqlite'
|
||||
DATABASE_TYPE_MYSQL = 'mysql'
|
||||
#TODO: should this be a tuple or a dict
|
||||
DATABASE_TYPES = (
|
||||
DATABASE_TYPE_POSTGRESQL,
|
||||
DATABASE_TYPE_SQLITE,
|
||||
DATABASE_TYPE_MYSQL,
|
||||
)
|
||||
|
||||
# find a logging.conf file and set up logging
|
||||
log = get_logger("logging.conf", config = "config")
|
||||
log.debug("config logger initialised")
|
||||
|
||||
# and then log our consts
|
||||
log.info("DIR SELF = %s" % DIR_SELF)
|
||||
log.info("DIR CONFIG = %s" % DIR_CONFIG)
|
||||
log.info("DIR DATABASE = %s" % DIR_DATABASE)
|
||||
log.info("DIR LOG = %s" % DIR_LOG)
|
||||
NEWIMPORT = True
|
||||
LOCALE_ENCODING = locale.getdefaultlocale()[1]
|
||||
#LOCALE_ENCODING = locale.getdefaultlocale()[1]
|
||||
LOCALE_ENCODING = locale.getpreferredencoding()
|
||||
if LOCALE_ENCODING == "US-ASCII":
|
||||
print "Default encoding set to US-ASCII, defaulting to CP1252 instead -- If you're not on a Mac, please report this problem."
|
||||
LOCALE_ENCODING = "cp1252"
|
||||
|
||||
########################################################################
|
||||
def string_to_bool(string, default=True):
|
||||
|
@ -428,7 +424,8 @@ class Config:
|
|||
if file is not None: # config file path passed in
|
||||
file = os.path.expanduser(file)
|
||||
if not os.path.exists(file):
|
||||
log.error("Specified configuration file %s not found. Using defaults." % (file))
|
||||
print "Configuration file %s not found. Using defaults." % (file)
|
||||
sys.stderr.write("Configuration file %s not found. Using defaults." % (file))
|
||||
file = None
|
||||
|
||||
if file is None: file = get_config("HUD_config.xml", True)
|
||||
|
|
|
@ -39,41 +39,38 @@ import string
|
|||
import re
|
||||
import Queue
|
||||
import codecs
|
||||
import logging
|
||||
import math
|
||||
|
||||
|
||||
# pyGTK modules
|
||||
|
||||
|
||||
# FreePokerTools modules
|
||||
import SQL
|
||||
import Card
|
||||
import Tourney
|
||||
import Charset
|
||||
from Exceptions import *
|
||||
import Configuration
|
||||
log = Configuration.get_logger("logging.conf","db")
|
||||
|
||||
|
||||
# Other library modules
|
||||
try:
|
||||
import sqlalchemy.pool as pool
|
||||
use_pool = True
|
||||
except ImportError:
|
||||
logging.info("Not using sqlalchemy connection pool.")
|
||||
log.info("Not using sqlalchemy connection pool.")
|
||||
use_pool = False
|
||||
|
||||
try:
|
||||
from numpy import var
|
||||
use_numpy = True
|
||||
except ImportError:
|
||||
logging.info("Not using numpy to define variance in sqlite.")
|
||||
log.info("Not using numpy to define variance in sqlite.")
|
||||
use_numpy = False
|
||||
|
||||
|
||||
# FreePokerTools modules
|
||||
import Configuration
|
||||
import SQL
|
||||
import Card
|
||||
import Tourney
|
||||
import Charset
|
||||
from Exceptions import *
|
||||
|
||||
log = Configuration.get_logger("logging.conf", config = "db")
|
||||
log.debug("db logger initialized.")
|
||||
encoder = codecs.lookup('utf-8')
|
||||
|
||||
DB_VERSION = 119
|
||||
|
||||
|
||||
|
@ -247,13 +244,14 @@ class Database:
|
|||
|
||||
# connect to db
|
||||
self.do_connect(c)
|
||||
print "connection =", self.connection
|
||||
|
||||
if self.backend == self.PGSQL:
|
||||
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
|
||||
#ISOLATION_LEVEL_AUTOCOMMIT = 0
|
||||
#ISOLATION_LEVEL_READ_COMMITTED = 1
|
||||
#ISOLATION_LEVEL_SERIALIZABLE = 2
|
||||
|
||||
|
||||
if self.backend == self.SQLITE and self.database == ':memory:' and self.wrongDbVersion:
|
||||
log.info("sqlite/:memory: - creating")
|
||||
self.recreate_tables()
|
||||
|
@ -374,20 +372,20 @@ class Database:
|
|||
print msg
|
||||
raise FpdbError(msg)
|
||||
elif backend == Database.SQLITE:
|
||||
logging.info("Connecting to SQLite: %(database)s" % {'database':database})
|
||||
log.info("Connecting to SQLite: %(database)s" % {'database':database})
|
||||
import sqlite3
|
||||
if use_pool:
|
||||
sqlite3 = pool.manage(sqlite3, pool_size=1)
|
||||
else:
|
||||
logging.warning("SQLite won't work well without 'sqlalchemy' installed.")
|
||||
log.warning("SQLite won't work well without 'sqlalchemy' installed.")
|
||||
|
||||
if database != ":memory:":
|
||||
if not os.path.isdir(self.config.dir_databases):
|
||||
print "Creating directory: '%s'" % (self.config.dir_databases)
|
||||
logging.info("Creating directory: '%s'" % (self.config.dir_databases))
|
||||
log.info("Creating directory: '%s'" % (self.config.dir_databases))
|
||||
os.mkdir(self.config.dir_databases)
|
||||
database = os.path.join(self.config.dir_databases, database)
|
||||
logging.info(" sqlite db: " + database)
|
||||
log.info(" sqlite db: " + database)
|
||||
self.connection = sqlite3.connect(database, detect_types=sqlite3.PARSE_DECLTYPES )
|
||||
sqlite3.register_converter("bool", lambda x: bool(int(x)))
|
||||
sqlite3.register_adapter(bool, lambda x: "1" if x else "0")
|
||||
|
@ -397,7 +395,10 @@ class Database:
|
|||
if use_numpy:
|
||||
self.connection.create_aggregate("variance", 1, VARIANCE)
|
||||
else:
|
||||
logging.warning("Some database functions will not work without NumPy support")
|
||||
log.warning("Some database functions will not work without NumPy support")
|
||||
self.cursor = self.connection.cursor()
|
||||
self.cursor.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
|
||||
self.cursor.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
|
||||
else:
|
||||
raise FpdbError("unrecognised database backend:"+backend)
|
||||
|
||||
|
@ -412,7 +413,7 @@ class Database:
|
|||
self.cursor.execute("SELECT * FROM Settings")
|
||||
settings = self.cursor.fetchone()
|
||||
if settings[0] != DB_VERSION:
|
||||
logging.error("outdated or too new database version (%s) - please recreate tables"
|
||||
log.error("outdated or too new database version (%s) - please recreate tables"
|
||||
% (settings[0]))
|
||||
self.wrongDbVersion = True
|
||||
except:# _mysql_exceptions.ProgrammingError:
|
||||
|
@ -422,11 +423,6 @@ class Database:
|
|||
log.info("failed to read settings table - recreating tables")
|
||||
self.recreate_tables()
|
||||
self.check_version(database=database, create=False)
|
||||
if not self.wrongDbVersion:
|
||||
msg = "Edit your screen_name and hand history path in the supported_sites "\
|
||||
+"section of the \nPreferences window (Main menu) before trying to import hands"
|
||||
print "\n%s" % msg
|
||||
log.warning(msg)
|
||||
else:
|
||||
print "Failed to read settings table - please recreate tables"
|
||||
log.info("failed to read settings table - please recreate tables")
|
||||
|
@ -436,7 +432,27 @@ class Database:
|
|||
#end def connect
|
||||
|
||||
def commit(self):
|
||||
if self.backend != self.SQLITE:
|
||||
self.connection.commit()
|
||||
else:
|
||||
# sqlite commits can fail because of shared locks on the database (SQLITE_BUSY)
|
||||
# re-try commit if it fails in case this happened
|
||||
maxtimes = 5
|
||||
pause = 1
|
||||
ok = False
|
||||
for i in xrange(maxtimes):
|
||||
try:
|
||||
ret = self.connection.commit()
|
||||
log.debug("commit finished ok, i = "+str(i))
|
||||
ok = True
|
||||
except:
|
||||
log.debug("commit "+str(i)+" failed: info=" + str(sys.exc_info())
|
||||
+ " value=" + str(sys.exc_value))
|
||||
sleep(pause)
|
||||
if ok: break
|
||||
if not ok:
|
||||
log.debug("commit failed")
|
||||
raise FpdbError('sqlite commit failed')
|
||||
|
||||
def rollback(self):
|
||||
self.connection.rollback()
|
||||
|
@ -1311,7 +1327,6 @@ class Database:
|
|||
c.execute("INSERT INTO Sites (name,currency) VALUES ('Absolute', 'USD')")
|
||||
c.execute("INSERT INTO Sites (name,currency) VALUES ('PartyPoker', 'USD')")
|
||||
c.execute("INSERT INTO Sites (name,currency) VALUES ('Partouche', 'EUR')")
|
||||
c.execute("INSERT INTO Sites (name,currency) VALUES ('Carbon', 'USD')")
|
||||
if self.backend == self.SQLITE:
|
||||
c.execute("INSERT INTO TourneyTypes (id, siteId, buyin, fee) VALUES (NULL, 1, 0, 0);")
|
||||
elif self.backend == self.PGSQL:
|
||||
|
@ -1751,6 +1766,9 @@ class Database:
|
|||
|
||||
def insertPlayer(self, name, site_id):
|
||||
result = None
|
||||
if self.backend == self.SQLITE:
|
||||
_name = name
|
||||
else:
|
||||
_name = Charset.to_db_utf8(name)
|
||||
c = self.get_cursor()
|
||||
q = "SELECT name, id FROM Players WHERE siteid=%s and name=%s"
|
||||
|
@ -1907,7 +1925,7 @@ class Database:
|
|||
# end def send_finish_msg():
|
||||
|
||||
def tRecogniseTourneyType(self, tourney):
|
||||
logging.debug("Database.tRecogniseTourneyType")
|
||||
log.debug("Database.tRecogniseTourneyType")
|
||||
typeId = 1
|
||||
# Check if Tourney exists, and if so retrieve TTypeId : in that case, check values of the ttype
|
||||
cursor = self.get_cursor()
|
||||
|
@ -1923,10 +1941,10 @@ class Database:
|
|||
try:
|
||||
len(result)
|
||||
typeId = result[0]
|
||||
logging.debug("Tourney found in db with Tourney_Type_ID = %d" % typeId)
|
||||
log.debug("Tourney found in db with Tourney_Type_ID = %d" % typeId)
|
||||
for ev in expectedValues :
|
||||
if ( getattr( tourney, expectedValues.get(ev) ) <> result[ev] ):
|
||||
logging.debug("TypeId mismatch : wrong %s : Tourney=%s / db=%s" % (expectedValues.get(ev), getattr( tourney, expectedValues.get(ev)), result[ev]) )
|
||||
log.debug("TypeId mismatch : wrong %s : Tourney=%s / db=%s" % (expectedValues.get(ev), getattr( tourney, expectedValues.get(ev)), result[ev]) )
|
||||
typeIdMatch = False
|
||||
#break
|
||||
except:
|
||||
|
@ -1936,7 +1954,7 @@ class Database:
|
|||
if typeIdMatch == False :
|
||||
# Check for an existing TTypeId that matches tourney info (buyin/fee, knockout, rebuy, speed, matrix, shootout)
|
||||
# if not found create it
|
||||
logging.debug("Searching for a TourneyTypeId matching TourneyType data")
|
||||
log.debug("Searching for a TourneyTypeId matching TourneyType data")
|
||||
cursor.execute (self.sql.query['getTourneyTypeId'].replace('%s', self.sql.query['placeholder']),
|
||||
(tourney.siteId, tourney.buyin, tourney.fee, tourney.isKO,
|
||||
tourney.isRebuy, tourney.speed, tourney.isHU, tourney.isShootout, tourney.isMatrix)
|
||||
|
@ -1946,9 +1964,9 @@ class Database:
|
|||
try:
|
||||
len(result)
|
||||
typeId = result[0]
|
||||
logging.debug("Existing Tourney Type Id found : %d" % typeId)
|
||||
log.debug("Existing Tourney Type Id found : %d" % typeId)
|
||||
except TypeError: #this means we need to create a new entry
|
||||
logging.debug("Tourney Type Id not found : create one")
|
||||
log.debug("Tourney Type Id not found : create one")
|
||||
cursor.execute (self.sql.query['insertTourneyTypes'].replace('%s', self.sql.query['placeholder']),
|
||||
(tourney.siteId, tourney.buyin, tourney.fee, tourney.isKO, tourney.isRebuy,
|
||||
tourney.speed, tourney.isHU, tourney.isShootout, tourney.isMatrix)
|
||||
|
|
|
@ -48,5 +48,11 @@ class FpdbPostgresqlNoDatabase(FpdbDatabaseError):
|
|||
def __str__(self):
|
||||
return repr(self.value +" " + self.errmsg)
|
||||
|
||||
class DuplicateError(FpdbError):
|
||||
class FpdbHandError(FpdbError):
|
||||
pass
|
||||
|
||||
class FpdbHandDuplicate(FpdbHandError):
|
||||
pass
|
||||
|
||||
class FpdbHandPartial(FpdbHandError):
|
||||
pass
|
||||
|
|
|
@ -194,7 +194,7 @@ class GuiAutoImport (threading.Thread):
|
|||
widget.set_label(u' _Stop Autoimport ')
|
||||
if self.pipe_to_hud is None:
|
||||
if os.name == 'nt':
|
||||
command = "python HUD_main.py " + self.settings['cl_options']
|
||||
command = "python "+sys.path[0]+"\\HUD_main.py " + self.settings['cl_options']
|
||||
bs = 0
|
||||
else:
|
||||
command = os.path.join(sys.path[0], 'HUD_main.py')
|
||||
|
|
|
@ -212,6 +212,7 @@ class HUD_main(object):
|
|||
|
||||
# get basic info about the new hand from the db
|
||||
# if there is a db error, complain, skip hand, and proceed
|
||||
log.info("HUD_main.read_stdin: hand processing starting ...")
|
||||
try:
|
||||
(table_name, max, poker_game, type, site_id, site_name, num_seats, tour_number, tab_number) = \
|
||||
self.db_connection.get_table_info(new_hand_id)
|
||||
|
@ -237,6 +238,7 @@ class HUD_main(object):
|
|||
try:
|
||||
self.db_connection.init_hud_stat_vars( self.hud_dict[temp_key].hud_params['hud_days']
|
||||
, self.hud_dict[temp_key].hud_params['h_hud_days'])
|
||||
t4 = time.time()
|
||||
stat_dict = self.db_connection.get_stats_from_hand(new_hand_id, type, self.hud_dict[temp_key].hud_params, self.hero_ids[site_id])
|
||||
self.hud_dict[temp_key].stat_dict = stat_dict
|
||||
except KeyError: # HUD instance has been killed off, key is stale
|
||||
|
@ -245,6 +247,7 @@ class HUD_main(object):
|
|||
# Unlocks table, copied from end of function
|
||||
self.db_connection.connection.rollback()
|
||||
return
|
||||
t5 = time.time()
|
||||
cards = self.db_connection.get_cards(new_hand_id)
|
||||
comm_cards = self.db_connection.get_common_cards(new_hand_id)
|
||||
if comm_cards != {}: # stud!
|
||||
|
@ -257,10 +260,8 @@ class HUD_main(object):
|
|||
else:
|
||||
# get stats using default params--also get cards
|
||||
self.db_connection.init_hud_stat_vars( self.hud_params['hud_days'], self.hud_params['h_hud_days'] )
|
||||
t4 = time.time()
|
||||
stat_dict = self.db_connection.get_stats_from_hand(new_hand_id, type, self.hud_params
|
||||
,self.hero_ids[site_id], num_seats)
|
||||
t5 = time.time()
|
||||
cards = self.db_connection.get_cards(new_hand_id)
|
||||
comm_cards = self.db_connection.get_common_cards(new_hand_id)
|
||||
if comm_cards != {}: # stud!
|
||||
|
|
|
@ -229,8 +229,7 @@ db: a connected Database object"""
|
|||
# TourneysPlayers
|
||||
else:
|
||||
log.info("Hand.insert(): hid #: %s is a duplicate" % hh['siteHandNo'])
|
||||
#Raise Duplicate exception?
|
||||
pass
|
||||
raise FpdbHandDuplicate(hh['siteHandNo'])
|
||||
|
||||
def updateHudCache(self, db):
|
||||
db.storeHudCache(self.dbid_gt, self.dbid_pids, self.starttime, self.stats.getHandsPlayers())
|
||||
|
@ -1615,4 +1614,3 @@ ORDER BY
|
|||
|
||||
|
||||
return h
|
||||
|
||||
|
|
|
@ -902,8 +902,8 @@ This program is licensed under the AGPL3, see docs"""+os.sep+"agpl-3.0.txt")
|
|||
self.load_profile()
|
||||
|
||||
self.statusIcon = gtk.StatusIcon()
|
||||
if os.path.exists('../gfx/fpdb-cards.png'):
|
||||
self.statusIcon.set_from_file('../gfx/fpdb-cards.png')
|
||||
if os.path.exists(os.path.join(sys.path[0], '../gfx/fpdb-cards.png')):
|
||||
self.statusIcon.set_from_file(os.path.join(sys.path[0], '../gfx/fpdb-cards.png'))
|
||||
elif os.path.exists('/usr/share/pixmaps/fpdb-cards.png'):
|
||||
self.statusIcon.set_from_file('/usr/share/pixmaps/fpdb-cards.png')
|
||||
else:
|
||||
|
|
|
@ -365,7 +365,7 @@ class Importer:
|
|||
pass
|
||||
(stored, duplicates, partial, errors, ttime) = self.import_file_dict(self.database, file, self.filelist[file][0], self.filelist[file][1], None)
|
||||
try:
|
||||
if not os.path.isdir(file):
|
||||
if not os.path.isdir(file): # Note: This assumes that whatever calls us has an "addText" func
|
||||
self.caller.addText(" %d stored, %d duplicates, %d partial, %d errors (time = %f)" % (stored, duplicates, partial, errors, ttime))
|
||||
except KeyError: # TODO: Again, what error happens here? fix when we find out ..
|
||||
pass
|
||||
|
@ -402,7 +402,7 @@ class Importer:
|
|||
return (0,0,0,0,0)
|
||||
|
||||
conv = None
|
||||
(stored, duplicates, partial, errors, ttime) = (0, 0, 0, 0, 0)
|
||||
(stored, duplicates, partial, errors, ttime) = (0, 0, 0, 0, time())
|
||||
|
||||
file = file.decode(Configuration.LOCALE_ENCODING)
|
||||
|
||||
|
@ -437,12 +437,15 @@ class Importer:
|
|||
|
||||
for hand in handlist:
|
||||
if hand is not None:
|
||||
#try, except duplicates here?
|
||||
hand.prepInsert(self.database)
|
||||
try:
|
||||
hand.insert(self.database)
|
||||
except Exceptions.FpdbHandDuplicate:
|
||||
duplicates += 1
|
||||
else:
|
||||
if self.callHud and hand.dbid_hands != 0:
|
||||
to_hud.append(hand.dbid_hands)
|
||||
else:
|
||||
else: # TODO: Treat empty as an error, or just ignore?
|
||||
log.error("Hand processed but empty")
|
||||
self.database.commit()
|
||||
# Call hudcache update if not in bulk import mode
|
||||
|
@ -460,13 +463,16 @@ class Importer:
|
|||
|
||||
errors = getattr(hhc, 'numErrors')
|
||||
stored = getattr(hhc, 'numHands')
|
||||
stored -= duplicates
|
||||
else:
|
||||
# conversion didn't work
|
||||
# TODO: appropriate response?
|
||||
return (0, 0, 0, 1, 0)
|
||||
return (0, 0, 0, 1, time() - ttime)
|
||||
else:
|
||||
log.warning("Unknown filter filter_name:'%s' in filter:'%s'" %(filter_name, filter))
|
||||
return (0, 0, 0, 1, 0)
|
||||
return (0, 0, 0, 1, time() - ttime)
|
||||
|
||||
ttime = time() - ttime
|
||||
|
||||
#This will barf if conv.getStatus != True
|
||||
return (stored, duplicates, partial, errors, ttime)
|
||||
|
|
Loading…
Reference in New Issue
Block a user