Merge git://git.assembla.com/fpdboz
This commit is contained in:
commit
79b8f4616a
|
@ -54,5 +54,5 @@ if __name__ == "__main__":
|
||||||
(options, sys.argv) = parser.parse_args()
|
(options, sys.argv) = parser.parse_args()
|
||||||
|
|
||||||
settings={'imp-callFpdbHud':False, 'db-backend':2}
|
settings={'imp-callFpdbHud':False, 'db-backend':2}
|
||||||
importer = fpdb_import.Importer()
|
importer = fpdb_import.Importer(options,settings)
|
||||||
importer.import_file_dict(options, settings)
|
importer.import_file_dict()
|
||||||
|
|
|
@ -50,16 +50,7 @@ class GuiAutoImport (threading.Thread):
|
||||||
|
|
||||||
def do_import(self):
|
def do_import(self):
|
||||||
"""Callback for timer to do an import iteration."""
|
"""Callback for timer to do an import iteration."""
|
||||||
for file in os.listdir(self.path):
|
self.importer.runUpdated()
|
||||||
if os.path.isdir(file):
|
|
||||||
print "AutoImport is not recursive - please select the final directory in which the history files are"
|
|
||||||
else:
|
|
||||||
self.inputFile = os.path.join(self.path, file)
|
|
||||||
stat_info = os.stat(self.inputFile)
|
|
||||||
if not self.import_files.has_key(self.inputFile) or stat_info.st_mtime > self.import_files[self.inputFile]:
|
|
||||||
self.importer.import_file_dict(self, self.settings)
|
|
||||||
self.import_files[self.inputFile] = stat_info.st_mtime
|
|
||||||
|
|
||||||
print "GuiAutoImport.import_dir done"
|
print "GuiAutoImport.import_dir done"
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -96,17 +87,8 @@ class GuiAutoImport (threading.Thread):
|
||||||
# self.pipe_to_hud = os.popen(command, 'w')
|
# self.pipe_to_hud = os.popen(command, 'w')
|
||||||
self.path=self.pathTBuffer.get_text(self.pathTBuffer.get_start_iter(), self.pathTBuffer.get_end_iter())
|
self.path=self.pathTBuffer.get_text(self.pathTBuffer.get_start_iter(), self.pathTBuffer.get_end_iter())
|
||||||
|
|
||||||
# Iniitally populate the self.import_files dict, which keeps mtimes for the files watched
|
# Add directory to importer object and set the initial mtime reference.
|
||||||
|
self.importer.addImportDirectory(self.path, True)
|
||||||
self.import_files = {}
|
|
||||||
for file in os.listdir(self.path):
|
|
||||||
if os.path.isdir(file):
|
|
||||||
pass # skip subdirs for now
|
|
||||||
else:
|
|
||||||
inputFile = os.path.join(self.path, file)
|
|
||||||
stat_info = os.stat(inputFile)
|
|
||||||
self.import_files[inputFile] = stat_info.st_mtime
|
|
||||||
|
|
||||||
self.do_import()
|
self.do_import()
|
||||||
|
|
||||||
interval=int(self.intervalTBuffer.get_text(self.intervalTBuffer.get_start_iter(), self.intervalTBuffer.get_end_iter()))
|
interval=int(self.intervalTBuffer.get_text(self.intervalTBuffer.get_start_iter(), self.intervalTBuffer.get_end_iter()))
|
||||||
|
@ -121,17 +103,18 @@ class GuiAutoImport (threading.Thread):
|
||||||
def __init__(self, settings, debug=True):
|
def __init__(self, settings, debug=True):
|
||||||
"""Constructor for GuiAutoImport"""
|
"""Constructor for GuiAutoImport"""
|
||||||
self.settings=settings
|
self.settings=settings
|
||||||
self.importer = fpdb_import.Importer()
|
self.importer = fpdb_import.Importer(self,self.settings)
|
||||||
self.importer.setCallHud(True)
|
self.importer.setCallHud(True)
|
||||||
|
self.importer.setMinPrint(30)
|
||||||
|
self.importer.setQuiet(False)
|
||||||
|
self.importer.setFailOnError(False)
|
||||||
|
self.importer.setHandCount(0)
|
||||||
|
self.importer.setWatchTime()
|
||||||
|
|
||||||
self.server=settings['db-host']
|
self.server=settings['db-host']
|
||||||
self.user=settings['db-user']
|
self.user=settings['db-user']
|
||||||
self.password=settings['db-password']
|
self.password=settings['db-password']
|
||||||
self.database=settings['db-databaseName']
|
self.database=settings['db-databaseName']
|
||||||
self.quiet=False
|
|
||||||
self.failOnError=False
|
|
||||||
self.minPrint=30
|
|
||||||
self.handCount=0
|
|
||||||
|
|
||||||
self.mainVBox=gtk.VBox(False,1)
|
self.mainVBox=gtk.VBox(False,1)
|
||||||
self.mainVBox.show()
|
self.mainVBox.show()
|
||||||
|
|
|
@ -27,12 +27,8 @@ class GuiBulkImport (threading.Thread):
|
||||||
def import_dir(self):
|
def import_dir(self):
|
||||||
"""imports a directory, non-recursive. todo: move this to fpdb_import so CLI can use it"""
|
"""imports a directory, non-recursive. todo: move this to fpdb_import so CLI can use it"""
|
||||||
self.path=self.inputFile
|
self.path=self.inputFile
|
||||||
for file in os.listdir(self.path):
|
self.importer.addImportDirectory(self.path)
|
||||||
if os.path.isdir(file):
|
self.importer.runImport()
|
||||||
print "BulkImport is not recursive - please select the final directory in which the history files are"
|
|
||||||
else:
|
|
||||||
self.inputFile=self.path+os.sep+file
|
|
||||||
self.importer.import_file_dict(self, self.settings)
|
|
||||||
print "GuiBulkImport.import_dir done"
|
print "GuiBulkImport.import_dir done"
|
||||||
|
|
||||||
def load_clicked(self, widget, data=None):
|
def load_clicked(self, widget, data=None):
|
||||||
|
@ -40,36 +36,36 @@ class GuiBulkImport (threading.Thread):
|
||||||
|
|
||||||
self.handCount=self.hand_count_tbuffer.get_text(self.hand_count_tbuffer.get_start_iter(), self.hand_count_tbuffer.get_end_iter())
|
self.handCount=self.hand_count_tbuffer.get_text(self.hand_count_tbuffer.get_start_iter(), self.hand_count_tbuffer.get_end_iter())
|
||||||
if (self.handCount=="unlimited" or self.handCount=="Unlimited"):
|
if (self.handCount=="unlimited" or self.handCount=="Unlimited"):
|
||||||
self.handCount=0
|
self.importer.setHandCount(0)
|
||||||
else:
|
else:
|
||||||
self.handCount=int(self.handCount)
|
self.importer.setHandCount(int(self.handCount))
|
||||||
|
|
||||||
self.errorFile="failed.txt"
|
self.errorFile="failed.txt"
|
||||||
|
|
||||||
self.minPrint=self.min_print_tbuffer.get_text(self.min_print_tbuffer.get_start_iter(), self.min_print_tbuffer.get_end_iter())
|
self.minPrint=self.min_print_tbuffer.get_text(self.min_print_tbuffer.get_start_iter(), self.min_print_tbuffer.get_end_iter())
|
||||||
if (self.minPrint=="never" or self.minPrint=="Never"):
|
if (self.minPrint=="never" or self.minPrint=="Never"):
|
||||||
self.minPrint=0
|
self.importer.setMinPrint(0)
|
||||||
else:
|
else:
|
||||||
self.minPrint=int(self.minPrint)
|
self.importer.setMinPrint=int(self.minPrint)
|
||||||
|
|
||||||
self.quiet=self.info_tbuffer.get_text(self.info_tbuffer.get_start_iter(), self.info_tbuffer.get_end_iter())
|
self.quiet=self.info_tbuffer.get_text(self.info_tbuffer.get_start_iter(), self.info_tbuffer.get_end_iter())
|
||||||
if (self.quiet=="yes"):
|
if (self.quiet=="yes"):
|
||||||
self.quiet=False
|
self.importer.setQuiet(False)
|
||||||
else:
|
else:
|
||||||
self.quiet=True
|
self.importer.setQuiet(True)
|
||||||
|
|
||||||
self.failOnError=self.fail_error_tbuffer.get_text(self.fail_error_tbuffer.get_start_iter(), self.fail_error_tbuffer.get_end_iter())
|
self.failOnError=self.fail_error_tbuffer.get_text(self.fail_error_tbuffer.get_start_iter(), self.fail_error_tbuffer.get_end_iter())
|
||||||
if (self.failOnError=="no"):
|
if (self.failOnError=="no"):
|
||||||
self.failOnError=False
|
self.importer.setFailOnError(False)
|
||||||
else:
|
else:
|
||||||
self.failOnError=True
|
self.importer.setFailOnError(True)
|
||||||
|
|
||||||
self.server, self.database, self.user, self.password=self.db.get_db_info()
|
|
||||||
|
|
||||||
if os.path.isdir(self.inputFile):
|
if os.path.isdir(self.inputFile):
|
||||||
self.import_dir()
|
self.import_dir()
|
||||||
else:
|
else:
|
||||||
self.importer.import_file_dict(self, self.settings)
|
self.importer.addImportFile()
|
||||||
|
self.importer.runImport()
|
||||||
|
self.importer.clearFileList()
|
||||||
|
|
||||||
def get_vbox(self):
|
def get_vbox(self):
|
||||||
"""returns the vbox of this thread"""
|
"""returns the vbox of this thread"""
|
||||||
|
@ -83,7 +79,7 @@ class GuiBulkImport (threading.Thread):
|
||||||
def __init__(self, db, settings):
|
def __init__(self, db, settings):
|
||||||
self.db=db
|
self.db=db
|
||||||
self.settings=settings
|
self.settings=settings
|
||||||
self.importer = fpdb_import.Importer()
|
self.importer = fpdb_import.Importer(self,self.settings)
|
||||||
|
|
||||||
self.vbox=gtk.VBox(False,1)
|
self.vbox=gtk.VBox(False,1)
|
||||||
self.vbox.show()
|
self.vbox.show()
|
||||||
|
|
|
@ -251,13 +251,13 @@ class GuiTableViewer (threading.Thread):
|
||||||
self.user=self.db.user
|
self.user=self.db.user
|
||||||
self.password=self.db.password
|
self.password=self.db.password
|
||||||
|
|
||||||
self.quiet=False
|
self.importer = fpdb_import.Importer(self, self.settings)
|
||||||
self.failOnError=False
|
self.importer.setMinPrint(0)
|
||||||
self.minPrint=0
|
self.importer.setQuiet(False)
|
||||||
self.handCount=0
|
self.importer.setFailOnError(False)
|
||||||
self.importer = fpdb_import.Importer()
|
self.importer.setHandCount(0)
|
||||||
|
|
||||||
self.last_read_hand_id=self.importer.import_file_dict(self, self.settings)
|
self.last_read_hand_id=self.importer.import_file_dict()
|
||||||
#end def table_viewer.import_clicked
|
#end def table_viewer.import_clicked
|
||||||
|
|
||||||
def all_clicked(self, widget, data):
|
def all_clicked(self, widget, data):
|
||||||
|
|
|
@ -25,51 +25,62 @@ import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import fpdb_db
|
import fpdb_db
|
||||||
|
import fpdb_import
|
||||||
import FpdbSQLQueries
|
import FpdbSQLQueries
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
class TestSequenceFunctions(unittest.TestCase):
|
class TestSequenceFunctions(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""Configure MySQL settings/database and establish connection"""
|
"""Configure MySQL settings/database and establish connection"""
|
||||||
self.mysql_settings={ 'db-host':"localhost", 'db-backend':2, 'db-databaseName':"fpdbtest", 'db-user':"fpdb", 'db-password':"fpdb"}
|
self.mysql_settings={ 'db-host':"localhost",
|
||||||
self.mysql_db = fpdb_db.fpdb_db()
|
'db-backend':2,
|
||||||
self.mysql_db.connect(self.mysql_settings['db-backend'], self.mysql_settings['db-host'],
|
'db-databaseName':"fpdbtest",
|
||||||
self.mysql_settings['db-databaseName'], self.mysql_settings['db-user'],
|
'db-user':"fpdb",
|
||||||
self.mysql_settings['db-password'])
|
'db-password':"fpdb"}
|
||||||
self.mysqldict = FpdbSQLQueries.FpdbSQLQueries('MySQL InnoDB')
|
self.mysql_db = fpdb_db.fpdb_db()
|
||||||
|
self.mysql_db.connect(self.mysql_settings['db-backend'], self.mysql_settings['db-host'],
|
||||||
|
self.mysql_settings['db-databaseName'], self.mysql_settings['db-user'],
|
||||||
|
self.mysql_settings['db-password'])
|
||||||
|
self.mysqldict = FpdbSQLQueries.FpdbSQLQueries('MySQL InnoDB')
|
||||||
|
self.mysqlimporter = fpdb_import.Importer(self, self.mysql_settings)
|
||||||
|
|
||||||
"""Configure Postgres settings/database and establish connection"""
|
# """Configure Postgres settings/database and establish connection"""
|
||||||
self.pg_settings={ 'db-host':"localhost", 'db-backend':3, 'db-databaseName':"fpdbtest", 'db-user':"fpdb", 'db-password':"fpdb"}
|
# self.pg_settings={ 'db-host':"localhost", 'db-backend':3, 'db-databaseName':"fpdbtest", 'db-user':"fpdb", 'db-password':"fpdb"}
|
||||||
self.pg_db = fpdb_db.fpdb_db()
|
# self.pg_db = fpdb_db.fpdb_db()
|
||||||
self.pg_db.connect(self.pg_settings['db-backend'], self.pg_settings['db-host'],
|
# self.pg_db.connect(self.pg_settings['db-backend'], self.pg_settings['db-host'],
|
||||||
self.pg_settings['db-databaseName'], self.pg_settings['db-user'],
|
# self.pg_settings['db-databaseName'], self.pg_settings['db-user'],
|
||||||
self.pg_settings['db-password'])
|
# self.pg_settings['db-password'])
|
||||||
self.pgdict = FpdbSQLQueries.FpdbSQLQueries('PostgreSQL')
|
# self.pgdict = FpdbSQLQueries.FpdbSQLQueries('PostgreSQL')
|
||||||
|
|
||||||
|
|
||||||
def testDatabaseConnection(self):
|
def testDatabaseConnection(self):
|
||||||
"""Test all supported DBs"""
|
"""Test all supported DBs"""
|
||||||
self.result = self.mysql_db.cursor.execute(self.mysqldict.query['list_tables'])
|
self.result = self.mysql_db.cursor.execute(self.mysqldict.query['list_tables'])
|
||||||
self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
|
||||||
|
|
||||||
print self.pgdict.query['list_tables']
|
|
||||||
|
|
||||||
self.result = self.pg_db.cursor.execute(self.pgdict.query['list_tables'])
|
|
||||||
self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
||||||
|
|
||||||
|
# self.result = self.pg_db.cursor.execute(self.pgdict.query['list_tables'])
|
||||||
|
# self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
||||||
|
|
||||||
def testMySQLRecreateTables(self):
|
def testMySQLRecreateTables(self):
|
||||||
"""Test droping then recreating fpdb table schema"""
|
"""Test droping then recreating fpdb table schema"""
|
||||||
self.mysql_db.recreate_tables()
|
self.mysql_db.recreate_tables()
|
||||||
self.result = self.mysql_db.cursor.execute("SHOW TABLES")
|
self.result = self.mysql_db.cursor.execute("SHOW TABLES")
|
||||||
self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
||||||
|
|
||||||
def testPostgresSQLRecreateTables(self):
|
def testImportHandHistoryFiles(self):
|
||||||
"""Test droping then recreating fpdb table schema"""
|
"""Test import of single HH file"""
|
||||||
self.pg_db.recreate_tables()
|
self.mysqlimporter.addImportFile("regression-test-files/hand-histories/ps-lhe-ring-3hands.txt")
|
||||||
self.result = self.pg_db.cursor.execute(self.pgdict.query['list_tables'])
|
self.mysqlimporter.runImport()
|
||||||
self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
self.mysqlimporter.addImportDirectory("regression-test-files/hand-histories")
|
||||||
|
self.mysqlimporter.runImport()
|
||||||
|
|
||||||
|
# def testPostgresSQLRecreateTables(self):
|
||||||
|
# """Test droping then recreating fpdb table schema"""
|
||||||
|
# self.pg_db.recreate_tables()
|
||||||
|
# self.result = self.pg_db.cursor.execute(self.pgdict.query['list_tables'])
|
||||||
|
# self.failUnless(self.result==13, "Number of tables in database incorrect. Expected 13 got " + str(self.result))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
@ -40,63 +40,132 @@ from time import time
|
||||||
|
|
||||||
class Importer:
|
class Importer:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, caller, settings):
|
||||||
"""Constructor"""
|
"""Constructor"""
|
||||||
self.settings={'imp-callFpdbHud':False}
|
self.settings=settings
|
||||||
|
self.caller=caller
|
||||||
self.db = None
|
self.db = None
|
||||||
self.cursor = None
|
self.cursor = None
|
||||||
self.options = None
|
self.filelist = []
|
||||||
|
self.dirlist = []
|
||||||
|
self.monitor = False
|
||||||
|
self.updated = 0 #Time last import was run, used as mtime reference
|
||||||
self.callHud = False
|
self.callHud = False
|
||||||
self.lines = None
|
self.lines = None
|
||||||
self.pos_in_file = {} # dict to remember how far we have read in the file
|
self.pos_in_file = {} # dict to remember how far we have read in the file
|
||||||
|
#Set defaults
|
||||||
|
if not self.settings.has_key('imp-callFpdbHud'):
|
||||||
|
self.settings['imp-callFpdbHud'] = False
|
||||||
|
if not self.settings.has_key('minPrint'):
|
||||||
|
self.settings['minPrint'] = 30
|
||||||
|
self.dbConnect()
|
||||||
|
|
||||||
def dbConnect(self, options, settings):
|
def dbConnect(self):
|
||||||
#connect to DB
|
#connect to DB
|
||||||
if settings['db-backend'] == 2:
|
if self.settings['db-backend'] == 2:
|
||||||
if not mysqlLibFound:
|
if not mysqlLibFound:
|
||||||
raise fpdb_simple.FpdbError("interface library MySQLdb not found but MySQL selected as backend - please install the library or change the config file")
|
raise fpdb_simple.FpdbError("interface library MySQLdb not found but MySQL selected as backend - please install the library or change the config file")
|
||||||
self.db = MySQLdb.connect(host = options.server, user = options.user,
|
self.db = MySQLdb.connect(self.settings['db-host'], self.settings['db-user'],
|
||||||
passwd = options.password, db = options.database)
|
self.settings['db-password'], self.settings['db-databaseName'])
|
||||||
elif settings['db-backend'] == 3:
|
elif self.settings['db-backend'] == 3:
|
||||||
if not pgsqlLibFound:
|
if not pgsqlLibFound:
|
||||||
raise fpdb_simple.FpdbError("interface library psycopg2 not found but PostgreSQL selected as backend - please install the library or change the config file")
|
raise fpdb_simple.FpdbError("interface library psycopg2 not found but PostgreSQL selected as backend - please install the library or change the config file")
|
||||||
self.db = psycopg2.connect(host = options.server, user = options.user,
|
self.db = psycopg2.connect(self.settings['db-host'], self.settings['db-user'],
|
||||||
password = options.password, database = options.database)
|
self.settings['db-password'], self.settings['db-databaseName'])
|
||||||
elif settings['db-backend'] == 4:
|
elif self.settings['db-backend'] == 4:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
self.cursor = self.db.cursor()
|
self.cursor = self.db.cursor()
|
||||||
|
|
||||||
|
#Set functions
|
||||||
def setCallHud(self, value):
|
def setCallHud(self, value):
|
||||||
self.callHud = value
|
self.callHud = value
|
||||||
|
|
||||||
def import_file_dict(self, options, settings):
|
def setMinPrint(self, value):
|
||||||
self.options=options
|
self.settings['minPrint'] = int(value)
|
||||||
|
|
||||||
|
def setHandCount(self, value):
|
||||||
|
self.settings['handCount'] = int(value)
|
||||||
|
|
||||||
|
def setQuiet(self, value):
|
||||||
|
self.settings['quiet'] = value
|
||||||
|
|
||||||
|
def setFailOnError(self, value):
|
||||||
|
self.settings['failOnError'] = value
|
||||||
|
|
||||||
|
def setWatchTime(self):
|
||||||
|
self.updated = time()
|
||||||
|
|
||||||
|
def clearFileList(self):
|
||||||
|
self.filelist = []
|
||||||
|
|
||||||
|
#Add an individual file to filelist
|
||||||
|
def addImportFile(self, filename):
|
||||||
|
#todo: test it is a valid file
|
||||||
|
self.filelist = self.filelist + [filename]
|
||||||
|
#Remove duplicates
|
||||||
|
self.filelist = list(set(self.filelist))
|
||||||
|
|
||||||
|
#Add a directory of files to filelist
|
||||||
|
def addImportDirectory(self,dir,monitor = False):
|
||||||
|
#todo: test it is a valid directory
|
||||||
|
if monitor == True:
|
||||||
|
self.monitor = True
|
||||||
|
self.dirlist = self.dirlist + [dir]
|
||||||
|
|
||||||
|
for file in os.listdir(dir):
|
||||||
|
if os.path.isdir(file):
|
||||||
|
print "BulkImport is not recursive - please select the final directory in which the history files are"
|
||||||
|
else:
|
||||||
|
self.filelist = self.filelist + [os.path.join(dir, file)]
|
||||||
|
#Remove duplicates
|
||||||
|
self.filelist = list(set(self.filelist))
|
||||||
|
|
||||||
|
#Run full import on filelist
|
||||||
|
def runImport(self):
|
||||||
|
for file in self.filelist:
|
||||||
|
self.import_file_dict(file)
|
||||||
|
|
||||||
|
#Run import on updated files, then store latest update time.
|
||||||
|
def runUpdated(self):
|
||||||
|
#Check for new files in directory
|
||||||
|
#todo: make efficient - always checks for new file, should be able to use mtime of directory
|
||||||
|
# ^^ May not work on windows
|
||||||
|
for dir in self.dirlist:
|
||||||
|
for file in os.listdir(dir):
|
||||||
|
self.filelist = self.filelist + [dir+os.sep+file]
|
||||||
|
|
||||||
|
self.filelist = list(set(self.filelist))
|
||||||
|
|
||||||
|
for file in self.filelist:
|
||||||
|
stat_info = os.stat(file)
|
||||||
|
if stat_info.st_mtime > self.updated:
|
||||||
|
self.import_file_dict(file)
|
||||||
|
self.updated = time()
|
||||||
|
|
||||||
|
# This is now an internal function that should not be called directly.
|
||||||
|
def import_file_dict(self, file):
|
||||||
starttime = time()
|
starttime = time()
|
||||||
last_read_hand=0
|
last_read_hand=0
|
||||||
loc = 0
|
loc = 0
|
||||||
if (options.inputFile=="stdin"):
|
if (file=="stdin"):
|
||||||
inputFile=sys.stdin
|
inputFile=sys.stdin
|
||||||
else:
|
else:
|
||||||
inputFile=open(options.inputFile, "rU")
|
inputFile=open(file, "rU")
|
||||||
try: loc = self.pos_in_file[options.inputFile]
|
try: loc = self.pos_in_file[file]
|
||||||
except: pass
|
except: pass
|
||||||
|
|
||||||
self.dbConnect(options,settings)
|
|
||||||
|
|
||||||
# Read input file into class and close file
|
# Read input file into class and close file
|
||||||
inputFile.seek(loc)
|
inputFile.seek(loc)
|
||||||
self.lines=fpdb_simple.removeTrailingEOL(inputFile.readlines())
|
self.lines=fpdb_simple.removeTrailingEOL(inputFile.readlines())
|
||||||
self.pos_in_file[options.inputFile] = inputFile.tell()
|
self.pos_in_file[file] = inputFile.tell()
|
||||||
inputFile.close()
|
inputFile.close()
|
||||||
|
|
||||||
firstline = self.lines[0]
|
firstline = self.lines[0]
|
||||||
|
|
||||||
if firstline.find("Tournament Summary")!=-1:
|
if firstline.find("Tournament Summary")!=-1:
|
||||||
print "TODO: implement importing tournament summaries"
|
print "TODO: implement importing tournament summaries"
|
||||||
self.cursor.close()
|
|
||||||
self.db.close()
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
site=fpdb_simple.recogniseSite(firstline)
|
site=fpdb_simple.recogniseSite(firstline)
|
||||||
|
@ -151,41 +220,36 @@ class Importer:
|
||||||
stored+=1
|
stored+=1
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
# if settings['imp-callFpdbHud'] and self.callHud and os.sep=='/':
|
# if settings['imp-callFpdbHud'] and self.callHud and os.sep=='/':
|
||||||
if settings['imp-callFpdbHud'] and self.callHud:
|
if self.settings['imp-callFpdbHud'] and self.callHud:
|
||||||
#print "call to HUD here. handsId:",handsId
|
#print "call to HUD here. handsId:",handsId
|
||||||
#pipe the Hands.id out to the HUD
|
#pipe the Hands.id out to the HUD
|
||||||
# options.pipe_to_hud.write("%s" % (handsId) + os.linesep)
|
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
|
||||||
options.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
|
|
||||||
except fpdb_simple.DuplicateError:
|
except fpdb_simple.DuplicateError:
|
||||||
duplicates+=1
|
duplicates+=1
|
||||||
except (ValueError), fe:
|
except (ValueError), fe:
|
||||||
errors+=1
|
errors+=1
|
||||||
self.printEmailErrorMessage(errors, options.inputFile, hand[0])
|
self.printEmailErrorMessage(errors, file, hand[0])
|
||||||
|
|
||||||
if (options.failOnError):
|
if (self.settings['failOnError']):
|
||||||
self.db.commit() #dont remove this, in case hand processing was cancelled this ties up any open ends.
|
self.db.commit() #dont remove this, in case hand processing was cancelled.
|
||||||
self.cursor.close()
|
|
||||||
self.db.close()
|
|
||||||
raise
|
raise
|
||||||
except (fpdb_simple.FpdbError), fe:
|
except (fpdb_simple.FpdbError), fe:
|
||||||
errors+=1
|
errors+=1
|
||||||
self.printEmailErrorMessage(errors, options.inputFile, hand[0])
|
self.printEmailErrorMessage(errors, file, hand[0])
|
||||||
|
|
||||||
#fe.printStackTrace() #todo: get stacktrace
|
#fe.printStackTrace() #todo: get stacktrace
|
||||||
self.db.rollback()
|
self.db.rollback()
|
||||||
|
|
||||||
if (options.failOnError):
|
if (self.settings['failOnError']):
|
||||||
self.db.commit() #dont remove this, in case hand processing was cancelled this ties up any open ends.
|
self.db.commit() #dont remove this, in case hand processing was cancelled.
|
||||||
self.cursor.close()
|
|
||||||
self.db.close()
|
|
||||||
raise
|
raise
|
||||||
if (options.minPrint!=0):
|
if (self.settings['minPrint']!=0):
|
||||||
if ((stored+duplicates+partial+errors)%options.minPrint==0):
|
if ((stored+duplicates+partial+errors)%self.settings['minPrint']==0):
|
||||||
print "stored:", stored, "duplicates:", duplicates, "partial:", partial, "errors:", errors
|
print "stored:", stored, "duplicates:", duplicates, "partial:", partial, "errors:", errors
|
||||||
|
|
||||||
if (options.handCount!=0):
|
if (self.settings['handCount']!=0):
|
||||||
if ((stored+duplicates+partial+errors)>=options.handCount):
|
if ((stored+duplicates+partial+errors)>=self.settings['handCount']):
|
||||||
if (not options.quiet):
|
if (not self.settings['quiet']):
|
||||||
print "quitting due to reaching the amount of hands to be imported"
|
print "quitting due to reaching the amount of hands to be imported"
|
||||||
print "Total stored:", stored, "duplicates:", duplicates, "partial/damaged:", partial, "errors:", errors, " time:", (time() - starttime)
|
print "Total stored:", stored, "duplicates:", duplicates, "partial/damaged:", partial, "errors:", errors, " time:", (time() - starttime)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -203,14 +267,12 @@ class Importer:
|
||||||
handsId=0
|
handsId=0
|
||||||
#todo: this will cause return of an unstored hand number if the last hand was error or partial
|
#todo: this will cause return of an unstored hand number if the last hand was error or partial
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
self.cursor.close()
|
|
||||||
self.db.close()
|
|
||||||
return handsId
|
return handsId
|
||||||
#end def import_file_dict
|
#end def import_file_dict
|
||||||
|
|
||||||
def printEmailErrorMessage(self, errors, filename, line):
|
def printEmailErrorMessage(self, errors, filename, line):
|
||||||
print "Error No.",errors,", please send the hand causing this to steffen@sycamoretest.info so I can fix it."
|
print "Error No.",errors,", please send the hand causing this to steffen@sycamoretest.info so I can fix it."
|
||||||
print "Filename:", self.options.inputFile
|
print "Filename:", filename
|
||||||
print "Here is the first line so you can identify it. Please mention that the error was a ValueError:"
|
print "Here is the first line so you can identify it. Please mention that the error was a ValueError:"
|
||||||
print self.hand[0]
|
print self.hand[0]
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user