moved timeout_add that calls do_import outside of the for loop, so it's only created once per autoimport start, not once per site. fix addImportFile to ignore files that don't exist (not sure how they'd get there, but just in case), fixed exception looking for fpdb_simple.DuplicateError to look in it's actual location at Exceptions.DuplicateError
This commit is contained in:
parent
1f9dd8f8e9
commit
715b14f81a
|
@ -34,6 +34,7 @@ import string
|
||||||
class GuiAutoImport (threading.Thread):
|
class GuiAutoImport (threading.Thread):
|
||||||
def __init__(self, settings, config, sql):
|
def __init__(self, settings, config, sql):
|
||||||
"""Constructor for GuiAutoImport"""
|
"""Constructor for GuiAutoImport"""
|
||||||
|
self.importtimer = 0
|
||||||
self.settings=settings
|
self.settings=settings
|
||||||
self.config=config
|
self.config=config
|
||||||
self.sql = sql
|
self.sql = sql
|
||||||
|
@ -196,11 +197,14 @@ class GuiAutoImport (threading.Thread):
|
||||||
self.do_import()
|
self.do_import()
|
||||||
|
|
||||||
interval = int(self.intervalEntry.get_text())
|
interval = int(self.intervalEntry.get_text())
|
||||||
gobject.timeout_add(interval*1000, self.do_import)
|
if self.importtimer != 0:
|
||||||
|
gobject.source_remove(self.importtimer)
|
||||||
|
self.importtimer = gobject.timeout_add(interval*1000, self.do_import)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print "auto-import aborted - global lock not available"
|
print "auto-import aborted - global lock not available"
|
||||||
else: # toggled off
|
else: # toggled off
|
||||||
|
gobject.source_remove(self.importtimer)
|
||||||
self.settings['global_lock'].release()
|
self.settings['global_lock'].release()
|
||||||
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
|
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
|
||||||
print "Stopping autoimport - global lock released."
|
print "Stopping autoimport - global lock released."
|
||||||
|
|
|
@ -40,6 +40,7 @@ import fpdb_db
|
||||||
import Database
|
import Database
|
||||||
import fpdb_parse_logic
|
import fpdb_parse_logic
|
||||||
import Configuration
|
import Configuration
|
||||||
|
import Exceptions
|
||||||
|
|
||||||
import logging, logging.config
|
import logging, logging.config
|
||||||
logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
|
logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
|
||||||
|
@ -152,6 +153,8 @@ class Importer:
|
||||||
#Add an individual file to filelist
|
#Add an individual file to filelist
|
||||||
def addImportFile(self, filename, site = "default", filter = "passthrough"):
|
def addImportFile(self, filename, site = "default", filter = "passthrough"):
|
||||||
#TODO: test it is a valid file -> put that in config!!
|
#TODO: test it is a valid file -> put that in config!!
|
||||||
|
if filename in self.filelist or not os.path.exists(filename):
|
||||||
|
return
|
||||||
self.filelist[filename] = [site] + [filter]
|
self.filelist[filename] = [site] + [filter]
|
||||||
if site not in self.siteIds:
|
if site not in self.siteIds:
|
||||||
# Get id from Sites table in DB
|
# Get id from Sites table in DB
|
||||||
|
@ -353,6 +356,7 @@ class Importer:
|
||||||
#rulog.writelines("path exists ")
|
#rulog.writelines("path exists ")
|
||||||
if file in self.updatedsize: # we should be able to assume that if we're in size, we're in time as well
|
if file in self.updatedsize: # we should be able to assume that if we're in size, we're in time as well
|
||||||
if stat_info.st_size > self.updatedsize[file] or stat_info.st_mtime > self.updatedtime[file]:
|
if stat_info.st_size > self.updatedsize[file] or stat_info.st_mtime > self.updatedtime[file]:
|
||||||
|
# print "file",counter," updated", os.path.basename(file), stat_info.st_size, self.updatedsize[file], stat_info.st_mtime, self.updatedtime[file]
|
||||||
self.import_file_dict(self.database, file, self.filelist[file][0], self.filelist[file][1], None)
|
self.import_file_dict(self.database, file, self.filelist[file][0], self.filelist[file][1], None)
|
||||||
self.updatedsize[file] = stat_info.st_size
|
self.updatedsize[file] = stat_info.st_size
|
||||||
self.updatedtime[file] = time()
|
self.updatedtime[file] = time()
|
||||||
|
@ -365,6 +369,7 @@ class Importer:
|
||||||
self.updatedtime[file] = time()
|
self.updatedtime[file] = time()
|
||||||
else:
|
else:
|
||||||
self.removeFromFileList[file] = True
|
self.removeFromFileList[file] = True
|
||||||
|
|
||||||
self.addToDirList = filter(lambda x: self.addImportDirectory(x, True, self.addToDirList[x][0], self.addToDirList[x][1]), self.addToDirList)
|
self.addToDirList = filter(lambda x: self.addImportDirectory(x, True, self.addToDirList[x][0], self.addToDirList[x][1]), self.addToDirList)
|
||||||
|
|
||||||
for file in self.removeFromFileList:
|
for file in self.removeFromFileList:
|
||||||
|
@ -546,7 +551,7 @@ class Importer:
|
||||||
#pipe the Hands.id out to the HUD
|
#pipe the Hands.id out to the HUD
|
||||||
#print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud
|
#print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud
|
||||||
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
|
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
|
||||||
except fpdb_simple.DuplicateError:
|
except Exceptions.DuplicateError:
|
||||||
duplicates += 1
|
duplicates += 1
|
||||||
db.rollback()
|
db.rollback()
|
||||||
except (ValueError), fe:
|
except (ValueError), fe:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user