moved timeout_add that calls do_import outside of the for loop, so it's only created once per autoimport start, not once per site. fix addImportFile to ignore files that don't exist (not sure how they'd get there, but just in case), fixed exception looking for fpdb_simple.DuplicateError to look in it's actual location at Exceptions.DuplicateError

This commit is contained in:
Eric Blade 2009-09-11 00:12:46 -05:00
parent 1f9dd8f8e9
commit 715b14f81a
2 changed files with 12 additions and 3 deletions

View File

@ -34,6 +34,7 @@ import string
class GuiAutoImport (threading.Thread):
def __init__(self, settings, config, sql):
"""Constructor for GuiAutoImport"""
self.importtimer = 0
self.settings=settings
self.config=config
self.sql = sql
@ -196,11 +197,14 @@ class GuiAutoImport (threading.Thread):
self.do_import()
interval = int(self.intervalEntry.get_text())
gobject.timeout_add(interval*1000, self.do_import)
if self.importtimer != 0:
gobject.source_remove(self.importtimer)
self.importtimer = gobject.timeout_add(interval*1000, self.do_import)
else:
print "auto-import aborted - global lock not available"
else: # toggled off
gobject.source_remove(self.importtimer)
self.settings['global_lock'].release()
self.doAutoImportBool = False # do_import will return this and stop the gobject callback timer
print "Stopping autoimport - global lock released."

View File

@ -40,6 +40,7 @@ import fpdb_db
import Database
import fpdb_parse_logic
import Configuration
import Exceptions
import logging, logging.config
logging.config.fileConfig(os.path.join(sys.path[0],"logging.conf"))
@ -152,6 +153,8 @@ class Importer:
#Add an individual file to filelist
def addImportFile(self, filename, site = "default", filter = "passthrough"):
#TODO: test it is a valid file -> put that in config!!
if filename in self.filelist or not os.path.exists(filename):
return
self.filelist[filename] = [site] + [filter]
if site not in self.siteIds:
# Get id from Sites table in DB
@ -353,6 +356,7 @@ class Importer:
#rulog.writelines("path exists ")
if file in self.updatedsize: # we should be able to assume that if we're in size, we're in time as well
if stat_info.st_size > self.updatedsize[file] or stat_info.st_mtime > self.updatedtime[file]:
# print "file",counter," updated", os.path.basename(file), stat_info.st_size, self.updatedsize[file], stat_info.st_mtime, self.updatedtime[file]
self.import_file_dict(self.database, file, self.filelist[file][0], self.filelist[file][1], None)
self.updatedsize[file] = stat_info.st_size
self.updatedtime[file] = time()
@ -365,6 +369,7 @@ class Importer:
self.updatedtime[file] = time()
else:
self.removeFromFileList[file] = True
self.addToDirList = filter(lambda x: self.addImportDirectory(x, True, self.addToDirList[x][0], self.addToDirList[x][1]), self.addToDirList)
for file in self.removeFromFileList:
@ -546,7 +551,7 @@ class Importer:
#pipe the Hands.id out to the HUD
#print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud
self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep)
except fpdb_simple.DuplicateError:
except Exceptions.DuplicateError:
duplicates += 1
db.rollback()
except (ValueError), fe: