Added timing info for index drop/create. Some small mods to output during import (obviously just change any you don't like)

This commit is contained in:
sqlcoder 2009-06-07 22:21:58 +01:00
parent c6f3595b93
commit 0a7c340656
3 changed files with 13 additions and 5 deletions

View File

@ -132,7 +132,7 @@ Otherwise, finish at eof...
self.processHand(handText) self.processHand(handText)
numHands= len(handsList) numHands= len(handsList)
endtime = time.time() endtime = time.time()
print "Processed %d hands in %.3f seconds" % (numHands, endtime - starttime) print "read %d hands in %.3f seconds" % (numHands, endtime - starttime)
if self.out_fh != sys.stdout: if self.out_fh != sys.stdout:
self.out_fh.close() self.out_fh.close()

View File

@ -18,6 +18,7 @@
import os import os
import re import re
import sys import sys
from time import time, strftime
import fpdb_simple import fpdb_simple
import FpdbSQLQueries import FpdbSQLQueries
@ -175,7 +176,7 @@ class fpdb_db:
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
# If DB connection is made over TCP, then the variables # If DB connection is made over TCP, then the variables
# host, user and password are required # host, user and password are required
print "host=%s user=%s pass=%s." % (host, user, password) # print "host=%s user=%s pass=%s." % (host, user, password)
if self.host and self.user and self.password: if self.host and self.user and self.password:
try: try:
self.db = psycopg2.connect(host = host, self.db = psycopg2.connect(host = host,
@ -327,7 +328,7 @@ class fpdb_db:
def prepareBulkImport(self): def prepareBulkImport(self):
"""Drop some indexes/foreign keys to prepare for bulk import. """Drop some indexes/foreign keys to prepare for bulk import.
Currently keeping the standalone indexes as needed to import quickly""" Currently keeping the standalone indexes as needed to import quickly"""
# self is a fpdb_db object including backend, db, cursor, sql variables stime = time()
if self.backend == self.PGSQL: if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow table/index operations to work self.db.set_isolation_level(0) # allow table/index operations to work
for fk in self.foreignKeys[self.backend]: for fk in self.foreignKeys[self.backend]:
@ -410,11 +411,13 @@ class fpdb_db:
if self.backend == self.PGSQL: if self.backend == self.PGSQL:
self.db.set_isolation_level(1) # go back to normal isolation level self.db.set_isolation_level(1) # go back to normal isolation level
self.db.commit() # seems to clear up errors if there were any in postgres self.db.commit() # seems to clear up errors if there were any in postgres
ptime = time() - stime
print "prepare import took", ptime, "seconds"
#end def prepareBulkImport #end def prepareBulkImport
def afterBulkImport(self): def afterBulkImport(self):
"""Re-create any dropped indexes/foreign keys after bulk import""" """Re-create any dropped indexes/foreign keys after bulk import"""
# self is a fpdb_db object including backend, db, cursor, sql variables stime = time()
if self.backend == self.PGSQL: if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow table/index operations to work self.db.set_isolation_level(0) # allow table/index operations to work
for fk in self.foreignKeys[self.backend]: for fk in self.foreignKeys[self.backend]:
@ -480,6 +483,8 @@ class fpdb_db:
if self.backend == self.PGSQL: if self.backend == self.PGSQL:
self.db.set_isolation_level(1) # go back to normal isolation level self.db.set_isolation_level(1) # go back to normal isolation level
self.db.commit() # seems to clear up errors if there were any in postgres self.db.commit() # seems to clear up errors if there were any in postgres
atime = time() - stime
print "after import took", atime, "seconds"
#end def afterBulkImport #end def afterBulkImport
def createAllIndexes(self): def createAllIndexes(self):
@ -542,6 +547,7 @@ class fpdb_db:
def analyzeDB(self): def analyzeDB(self):
"""Do whatever the DB can offer to update index/table statistics""" """Do whatever the DB can offer to update index/table statistics"""
stime = time()
if self.backend == self.PGSQL: if self.backend == self.PGSQL:
self.db.set_isolation_level(0) # allow vacuum to work self.db.set_isolation_level(0) # allow vacuum to work
try: try:
@ -550,6 +556,8 @@ class fpdb_db:
print "Error during vacuum" print "Error during vacuum"
self.db.set_isolation_level(1) # go back to normal isolation level self.db.set_isolation_level(1) # go back to normal isolation level
self.db.commit() self.db.commit()
atime = time() - stime
print "analyze took", atime, "seconds"
#end def analyzeDB #end def analyzeDB
# Currently uses an exclusive lock on the Hands table as a global lock # Currently uses an exclusive lock on the Hands table as a global lock

View File

@ -256,7 +256,7 @@ class Importer:
conv = None conv = None
# Load filter, process file, pass returned filename to import_fpdb_file # Load filter, process file, pass returned filename to import_fpdb_file
print "converting %s" % file print "\nConverting %s" % file
hhbase = self.config.get_import_parameters().get("hhArchiveBase") hhbase = self.config.get_import_parameters().get("hhArchiveBase")
hhbase = os.path.expanduser(hhbase) hhbase = os.path.expanduser(hhbase)
hhdir = os.path.join(hhbase,site) hhdir = os.path.join(hhbase,site)