diff --git a/docs/tabledesign.html b/docs/tabledesign.html index e148b9cf..86898528 100644 --- a/docs/tabledesign.html +++ b/docs/tabledesign.html @@ -312,11 +312,13 @@ The program itself is licensed under AGPLv3, see agpl-3.0.txt


Table HandsPlayers

-

cardX: can be 1 through 20, one for each card. In holdem only 1-2 of these are used, in omaha 1-4, in stud/razz 1-7, in single draw 1-10, in tripple draw all 20 and in badugi 1-16 (4*4).

-

For the draw games: the first 5 (badugi: 4) cards are the initial cards, the next 5 (badugi: 4) are after the first draw, etc.
-Example 1: If a player gets 2-6 spades for his first five cards and decides to throw away the 4 and then gets a 7 of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 4, 5, 6, 2, 3, 5, 6, 7
-Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and decides to throw away the 2 and the 3 and then gets a Q and K of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 5, 8, J, 5, 8, J, Q, K.

+

cardX: can be 1 through 20, one for each card. In holdem only 1-2 of these are used, in omaha 1-4, in stud/razz 1-7, in single draw games 1-10 is used and in badugi 1-16 (4*4) is used.

+

For the draw games: the first 5 (badugi: 4) cards are the initial cards, the next 5 (badugi: 4) are after the first draw. If a player keeps some cards then those cards' spaces are filled with "k", short for "kept".
+Example 1: If a player gets 2-6 spades for his first five cards and decides to throw away the 4 and then gets a 7 of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 4, 5, 6, k, k, 7, k, k
+Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and decides to throw away the 2 and the 3 and then gets a Q and K of spades then the first 10 fields of cardXValue would be as follows: 2, 3, 5, 8, J, Q, K, k, k, k
+Note that it will k in the space of which card was there previously, so in example 2 where the player kept the last 3 cards, the last 3 fields of the first draw (ie. card8-10Value) are replaced with k.

I did not separate this into an extra table because I felt the lost space is not sufficiently large. Also the benefit for searching is far less relevant.

+

ToDo: Original plan was to implement the many flags from hudcache as booleans - need to try this out as it will save space and may therefore be quicker.

@@ -353,33 +355,24 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - @@ -388,7 +381,12 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and - + + + + + + @@ -405,6 +403,384 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Field Name

smallint

The seat in which the person was sitting - necessary for HUD

card1(..7)

smallint

0=none/unknown, 1-13=2-Ah 14-26=2-Ad 27-39=2-Ac 40-52=2-As

startCards

smallint

int representing Holdem starting cards.
Hand is stored as an int 13 * x + y where x and y +are in range 0..12, and (x+2) and (y+2) represents rank of each card (2=2 .. 14=Ace).
+If x > y then pair is suited, if x < y then unsuited.
+Omaha and other games may need to use this as a key into another table. (to be decided ...)

ante

int

note: for cash this could be boolean, but in tourneys you may enter a hand with less than the full ante

cardXValue

smallint

2-10=2-10, J=11, Q=12, K=13, A=14 (even in razz), unknown/no card=x
- see note above table

cardXSuit

char(1)

h=hearts, s=spades, d=diamonds, c=clubs, unknown/no card=x

cardXDiscarded

boolean

Whether the card was discarded (this only applies to draw games, X can be 1 through 15 since the final cards can obviously not be discarded).

DrawnX

smallint

X can be 1 through 3.
- This field denotes how many cards the player has drawn on each draw.

winnings

int

rake

int

rake for this player for this hand

rake for this player for this hand (i.e. final pot(s) size = winnings + rake)

totalProfit

int

profit for this player for this hand ( i.e. winnings - (ante + bets) )

comment

bigint

references TourneysPlayers.id

tourneyTypeId

bigint

references TourneyTypes.id (maybe this should be on Hands?)

wonWhenSeenStreet1(..4)

float

How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.
+ To be completely clear, this stores a hand count, NOT a money amount.
+ (2/3/4: Same for turn/street5, river/street6, street7)

wonAtSD

float

As wonWhenSeenStreet1, but for showdown.

street0VPI

int

did player pay to see flop, 1 or 0

street0Aggr

int

did player raise before flop, 1 or 0

street0_3BChance

int

did player have chance to 3B, 1 or 0

street0_3BDone

int

did player 3bet before flop, 1 or 0

street0_4BChance

int

did player have chance to 4B, 1 or 0

street0_4BDone

int

did player 4bet before flop, 1 or 0

other_3BStreet0

int

did other player 3bet before flop, 1 or 0

other_4BStreet0

int

did other player 4bet before flop, 1 or 0

street1Seen(/2/3/4)

int

did player see flop/street4 (.. etc)

sawShowdown

int

did player see showdown

street1Aggr

int

number of hands where player raised flop/street4

street2Aggr

int

number of hands where player raised turn/street5

street3Aggr

int

number of hands where player raised river/street6

street4Aggr

int

number of hands where player raised street7

otherRaisedStreet0

int

number of hands where someone else raised pre-flop/street3

otherRaisedStreet1

int

number of hands where someone else raised flop/street4

otherRaisedStreet2

int

number of hands where someone else raised turn/street5

otherRaisedStreet3

int

number of hands where someone else raised river/street6

otherRaisedStreet4

int

number of hands where someone else raised street7

foldToOtherRaisedStreet0

int

number of hands where someone else raised flop/street4 and the player folded

foldToOtherRaisedStreet1

int

number of hands where someone else raised flop/street4 and the player folded

foldToOtherRaisedStreet2

int

number of hands where someone else raised Turn/street5 and the player folded

foldToOtherRaisedStreet3

int

number of hands where someone else raised River/street6 and the player folded

foldToOtherRaisedStreet4

int

number of hands where someone else raised street7 and the player folded

stealAttemptChance

int

Player was in CO, BTN or SB and nobody has called yet

stealAttempted

int

Player took a chance per the above condition

foldBbToStealChance

int

Somebody tried to steal BB from player

foldedBbToSteal

int

Player folded BB to steal attempt

foldSbToStealChance

int

Somebody tried to steal SB from player

foldedSbToSteal

int

Player folded SB to steal attempt

street1CBChance

int

Player had chance to make continuation bet on flop/street4

street1CBDone

int

Player used chance to make continuation bet on flop/street4

street2CBChance

int

Player had chance to make continuation bet on turn/street5

street2CBDone

int

Player used chance to make continuation bet on turn/street5

street3CBChance

int

Player had chance to make continuation bet on river/street6

street3CBDone

int

Player used chance to make continuation bet on river/street6

street4CBChance

int

Player had chance to make continuation bet on street7

street4CBDone

int

Player used chance to make continuation bet on street7

foldToStreet1CBChance

int

Player had chance to fold to continuation bet on this street

foldToStreet1CBDone

int

Player used chance to fold to continuation bet on this street

foldToStreet2CBChance

int

Player had chance to fold to continuation bet on this street

foldToStreet2CBDone

int

Player used chance to fold to continuation bet on this street

foldToStreet3CBChance

int

Player had chance to fold to continuation bet on this street

foldToStreet3CBDone

int

Player used chance to fold to continuation bet on this street

foldToStreet4CBChance

int

Player had chance to fold to continuation bet on this street

foldToStreet4CBDone

int

Player used chance to fold to continuation bet on this street

street1CheckCallRaiseChance

int

How often player had the chance to do a check-raise or a call-raise on this street

street1CheckCallRaiseDone

int

How often player used the chance to do a check-raise or a call-raise on this street

street2CheckCallRaiseChance

int

How often player had the chance to do a check-raise or a call-raise on this street

street2CheckCallRaiseDone

int

How often player used the chance to do a check-raise or a call-raise on this street

street3CheckCallRaiseChance

int

How often player had the chance to do a check-raise or a call-raise on this street

street3CheckCallRaiseDone

int

How often player used the chance to do a check-raise or a call-raise on this street

street4CheckCallRaiseChance

int

How often player had the chance to do a check-raise or a call-raise on this street

street4CheckCallRaiseDone

int

How often player used the chance to do a check-raise or a call-raise on this street

street0Calls

int

Number of times player called on this street

street1Calls

int

Number of times player called on this street

street2Calls

int

Number of times player called on this street

street3Calls

int

Number of times player called on this street

street4Calls

int

Number of times player called on this street

street0Bets

int

Number of times player bet on this street

street1Bets

int

Number of times player bet on this street

street2Bets

int

Number of times player bet on this street

street3Bets

int

Number of times player bet on this street

street4Bets

int

Number of times player bet on this street

street0Raises

int

Number of times player raised on this street

street1Raises

int

Number of times player raised on this street

street2Raises

int

Number of times player raised on this street

street3Raises

int

Number of times player raised on this street

street4Raises

int

Number of times player raised on this street

actionString

int

Experimental - idea is to store the action on this street as a string: e.g. kkBrcfC, with + player's own choices in upper case and other players in lower case. k=check, b=bet, c=call, + r=raise. (Perhaps NL would miss out bet sizes for this?) It would then be possible to do complex + ad-hoc queries using queries like: actionString like '%B%r%C% +


Table HudCache

@@ -444,12 +820,23 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and

smallint

References TourneyTypes.id

-

HDs

int

number of hands this player played in this gametype with this number of seats

+ +

wonWhenSeenStreet1(/2/3/4)

+

float

+

How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.
+ To be completely clear, this stores a hand count, NOT a money amount.
+ (/2/3/4: Same for turn/street5, river/street6, street7)

+ + +

wonAtSD

+

float

+

As wonWhenSeenStreet1, but for showdown.

+

street0VPI

int

@@ -463,14 +850,24 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and

number of hands where player raised before flop

-

street0_3B4BChance

+

street0_3BChance

int

-

number of hands where player had chance to 3B or 4B

+

number of hands where player had chance to 3B before flop

-

street0_3B4BDone

+

street0_3BDone

int

-

number of hands where player 3bet/4bet before flop

+

number of hands where player 3bet before flop

+ + +

street0_4BChance

+

int

+

number of hands where player had chance to 4B before flop

+ + +

street0_4BDone

+

int

+

number of hands where player 4bet before flop

street1Seen

@@ -517,6 +914,11 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and

int

number of hands where player raised street7

+ +

otherRaisedStreet0

+

int

+

number of hands where someone else raised pre-flop/street3

+

otherRaisedStreet1

int

@@ -537,6 +939,11 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and

int

number of hands where someone else raised street7

+ +

foldToOtherRaisedStreet0

+

int

+

number of hands where someone else raised pre-flop/street3 and the player folded

+

foldToOtherRaisedStreet1

int

@@ -557,18 +964,6 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and

int

number of hands where someone else raised street7 and the player folded

- -

wonWhenSeenStreet1

-

float

-

How many hands the player won after seeing the flop/street4 - this can be a "partial win" if the pot is split.
- To be completely clear, this stores a hand count, NOT a money amount.

- - -

wonAtSD

-

float

-

As wonWhenSeenStreet1, but for showdown.

- -

stealAttemptChance

int

@@ -729,6 +1124,84 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and

How often player used the chance to do a check-raise or a call-raise on this street

+ +

street0Calls

+

int

+

Number of times player called on this street

+ + +

street1Calls

+

int

+

Number of times player called on this street

+ + +

street2Calls

+

int

+

Number of times player called on this street

+ + +

street3Calls

+

int

+

Number of times player called on this street

+ + +

street4Calls

+

int

+

Number of times player called on this street

+ + + +

street0Bets

+

int

+

Number of times player bet on this street

+ + +

street1Bets

+

int

+

Number of times player bet on this street

+ + +

street2Bets

+

int

+

Number of times player bet on this street

+ + +

street3Bets

+

int

+

Number of times player bet on this street

+ + +

street4Bets

+

int

+

Number of times player bet on this street

+ + + +

street0Raises

+

int

+

Number of times player raised on this street

+ + +

street1Raises

+

int

+

Number of times player raised on this street

+ + +

street2Raises

+

int

+

Number of times player raised on this street

+ + +

street3Raises

+

int

+

Number of times player raised on this street

+ + +

street4Raises

+

int

+

Number of times player raised on this street

+ +

Table HandsActions

@@ -926,5 +1399,32 @@ Example 2: If a player gets 2, 3, 5, 8, J of spades for his first five cards and


+


+

Possible Changes

+ + + + + + + + + + + + + + + + + + + + + +

Table

Comment

BoardCards

Remove as these attributes are now stored on Hands

HandsActions

Remove if/when these attributes are stored on Hands or elsewhere

HandsPlayers

Move tourneyTypeId field to Hands table.

Comments

Comment fields on various tables should probably be moved to a single comment table. Aim + should be to where possible reduce tables to a list of fixed length not-null columns and have + the larger, sparser comment columns in a dedicated table. (May not be possible or practical but + something to aim at.)

diff --git a/pyfpdb/Filters.py b/pyfpdb/Filters.py index 3a5f0365..4846b998 100644 --- a/pyfpdb/Filters.py +++ b/pyfpdb/Filters.py @@ -44,6 +44,7 @@ class Filters(threading.Thread): self.games = {} self.limits = {} self.seats = {} + self.groups = {} self.siteid = {} self.heroes = {} self.boxes = {} @@ -52,6 +53,7 @@ class Filters(threading.Thread): self.filterText = {'limitsall':'All', 'limitsnone':'None', 'limitsshow':'Show _Limits' ,'seatsbetween':'Between:', 'seatsand':'And:', 'seatsshow':'Show Number of _Players' ,'limitstitle':'Limits:', 'seatstitle':'Number of Players:' + ,'groupstitle':'Grouping:', 'posnshow':'Show Position Stats:' } # For use in date ranges. @@ -109,6 +111,15 @@ class Filters(threading.Thread): self.fillSeatsFrame(vbox, self.display) seatsFrame.add(vbox) + # Groups + groupsFrame = gtk.Frame() + groupsFrame.show() + vbox = gtk.VBox(False, 0) + self.sbGroups = {} + + self.fillGroupsFrame(vbox, self.display) + groupsFrame.add(vbox) + # Date dateFrame = gtk.Frame("Date:") dateFrame.set_label_align(0.0, 0.0) @@ -131,6 +142,7 @@ class Filters(threading.Thread): self.mainVBox.add(gamesFrame) self.mainVBox.add(limitsFrame) self.mainVBox.add(seatsFrame) + self.mainVBox.add(groupsFrame) self.mainVBox.add(dateFrame) self.mainVBox.add(self.Button1) self.mainVBox.add(self.Button2) @@ -148,6 +160,8 @@ class Filters(threading.Thread): limitsFrame.hide() if "Seats" not in self.display or self.display["Seats"] == False: seatsFrame.hide() + if "Groups" not in self.display or self.display["Groups"] == False: + groupsFrame.hide() if "Dates" not in self.display or self.display["Dates"] == False: dateFrame.hide() if "Button1" not in self.display or self.display["Button1"] == False: @@ -183,6 +197,9 @@ class Filters(threading.Thread): self.seats['to'] = self.sbSeats['to'].get_value_as_int() return self.seats + def getGroups(self): + return self.groups + def getDates(self): return self.__get_dates() @@ -274,6 +291,11 @@ class Filters(threading.Thread): self.seats[seat] = w.get_active() print "self.seats[%s] set to %s" %(seat, self.seats[seat]) + def __set_group_select(self, w, group): + #print "__set_seat_select: seat =", seat, "active =", w.get_active() + self.groups[group] = w.get_active() + print "self.groups[%s] set to %s" %(group, self.groups[group]) + def fillPlayerFrame(self, vbox): for site in self.conf.get_supported_sites(): pathHBox = gtk.HBox(False, 0) @@ -389,10 +411,33 @@ class Filters(threading.Thread): self.sbSeats['show'] = cb self.seats['show'] = False - self.sbSeats['from'] = sb1 self.sbSeats['to'] = sb2 + def fillGroupsFrame(self, vbox, display): + hbox = gtk.HBox(False, 0) + vbox.pack_start(hbox, False, False, 0) + lbl_title = gtk.Label(self.filterText['groupstitle']) + lbl_title.set_alignment(xalign=0.0, yalign=0.5) + hbox.pack_start(lbl_title, expand=True, padding=3) + showb = gtk.Button(label="hide", stock=None, use_underline=True) + showb.set_alignment(xalign=1.0, yalign=0.5) + showb.connect('clicked', self.__toggle_box, 'groups') + hbox.pack_start(showb, expand=False, padding=1) + + vbox1 = gtk.VBox(False, 0) + vbox.pack_start(vbox1, False, False, 0) + self.boxes['groups'] = vbox1 + + hbox = gtk.HBox(False, 0) + vbox1.pack_start(hbox, False, True, 0) + + cb = gtk.CheckButton(self.filterText['posnshow']) + cb.connect('clicked', self.__set_group_select, 'posn') + hbox.pack_start(cb, False, False, 0) + self.sbGroups['posn'] = cb + self.groups['posn'] = False + def fillCardsFrame(self, vbox): hbox1 = gtk.HBox(True,0) hbox1.show() diff --git a/pyfpdb/FpdbSQLQueries.py b/pyfpdb/FpdbSQLQueries.py index 4445908e..598c0868 100644 --- a/pyfpdb/FpdbSQLQueries.py +++ b/pyfpdb/FpdbSQLQueries.py @@ -910,6 +910,7 @@ class FpdbSQLQueries: ,min(gt.bigBlind) AS minbigblind ,max(gt.bigBlind) AS maxbigblind /*, AS gtid*/ + , AS plposition ,count(1) AS n ,100.0*sum(cast(hp.street0VPI as integer))/count(1) AS vpip ,100.0*sum(cast(hp.street0Aggr as integer))/count(1) AS pfr @@ -963,12 +964,17 @@ class FpdbSQLQueries: ,gt.base ,gt.category + ,plposition ,upper(gt.limitType) ,s.name order by hp.playerId ,gt.base ,gt.category + ,case when 'B' then 'B' + when 'S' then 'S' + else concat('Z', ) + end ,maxbigblind desc ,upper(gt.limitType) @@ -983,7 +989,8 @@ class FpdbSQLQueries: ,s.name ,min(gt.bigBlind) AS minbigblind ,max(gt.bigBlind) AS maxbigblind - /*, AS gtid*/ + /*, AS gtid*/ + , AS plposition ,count(1) AS n ,100.0*sum(cast(hp.street0VPI as integer))/count(1) AS vpip ,100.0*sum(cast(hp.street0Aggr as integer))/count(1) AS pfr @@ -993,8 +1000,8 @@ class FpdbSQLQueries: ,case when sum(cast(hp.stealattemptchance as integer)) = 0 then -999 else 100.0*sum(cast(hp.stealattempted as integer))/sum(cast(hp.stealattemptchance as integer)) end AS steals - ,100.0*sum(cast(hp.street1Seen as integer))/count(1) AS saw_f - ,100.0*sum(cast(hp.sawShowdown as integer))/count(1) AS sawsd + ,100.0*sum(cast(hp.street1Seen as integer))/count(1) AS saw_f + ,100.0*sum(cast(hp.sawShowdown as integer))/count(1) AS sawsd ,case when sum(cast(hp.street1Seen as integer)) = 0 then -999 else 100.0*sum(cast(hp.sawShowdown as integer))/sum(cast(hp.street1Seen as integer)) end AS wtsdwsf @@ -1037,12 +1044,17 @@ class FpdbSQLQueries: ,gt.base ,gt.category + ,plposition ,upper(gt.limitType) ,s.name order by hp.playerId ,gt.base ,gt.category + ,case when 'B' then 'B' + when 'S' then 'S' + else 'Z'|| + end ,maxbigblind desc ,upper(gt.limitType) diff --git a/pyfpdb/GuiPlayerStats.py b/pyfpdb/GuiPlayerStats.py index 28990279..268aeab3 100644 --- a/pyfpdb/GuiPlayerStats.py +++ b/pyfpdb/GuiPlayerStats.py @@ -76,27 +76,28 @@ class GuiPlayerStats (threading.Thread): # ToDo: create popup to adjust column config # columns to display, keys match column name returned by sql, values in tuple are: # is column displayed, column heading, xalignment, formatting - self.columns = [ ("game", True, "Game", 0.0, "%s") - , ("hand", False, "Hand", 0.0, "%s") # true not allowed for this line - , ("n", True, "Hds", 1.0, "%d") - , ("avgseats", True, "Seats", 1.0, "%3.1f") - , ("vpip", True, "VPIP", 1.0, "%3.1f") - , ("pfr", True, "PFR", 1.0, "%3.1f") - , ("pf3", True, "PF3", 1.0, "%3.1f") - , ("steals", True, "Steals", 1.0, "%3.1f") - , ("saw_f", True, "Saw_F", 1.0, "%3.1f") - , ("sawsd", True, "SawSD", 1.0, "%3.1f") - , ("wtsdwsf", True, "WtSDwsF", 1.0, "%3.1f") - , ("wmsd", True, "W$SD", 1.0, "%3.1f") - , ("flafq", True, "FlAFq", 1.0, "%3.1f") - , ("tuafq", True, "TuAFq", 1.0, "%3.1f") - , ("rvafq", True, "RvAFq", 1.0, "%3.1f") - , ("pofafq", False, "PoFAFq", 1.0, "%3.1f") - , ("net", True, "Net($)", 1.0, "%6.2f") - , ("bbper100", True, "bb/100", 1.0, "%4.2f") - , ("rake", True, "Rake($)", 1.0, "%6.2f") - , ("bb100xr", True, "bbxr/100", 1.0, "%4.2f") - , ("variance", True, "Variance", 1.0, "%5.2f") + self.columns = [ ["game", True, "Game", 0.0, "%s"] + , ["hand", False, "Hand", 0.0, "%s"] # true not allowed for this line + , ["plposition", False, "Posn", 1.0, "%s"] # true not allowed for this line (set in code) + , ["n", True, "Hds", 1.0, "%d"] + , ["avgseats", True, "Seats", 1.0, "%3.1f"] + , ["vpip", True, "VPIP", 1.0, "%3.1f"] + , ["pfr", True, "PFR", 1.0, "%3.1f"] + , ["pf3", True, "PF3", 1.0, "%3.1f"] + , ["steals", True, "Steals", 1.0, "%3.1f"] + , ["saw_f", True, "Saw_F", 1.0, "%3.1f"] + , ["sawsd", True, "SawSD", 1.0, "%3.1f"] + , ["wtsdwsf", True, "WtSDwsF", 1.0, "%3.1f"] + , ["wmsd", True, "W$SD", 1.0, "%3.1f"] + , ["flafq", True, "FlAFq", 1.0, "%3.1f"] + , ["tuafq", True, "TuAFq", 1.0, "%3.1f"] + , ["rvafq", True, "RvAFq", 1.0, "%3.1f"] + , ["pofafq", False, "PoFAFq", 1.0, "%3.1f"] + , ["net", True, "Net($)", 1.0, "%6.2f"] + , ["bbper100", True, "bb/100", 1.0, "%4.2f"] + , ["rake", True, "Rake($)", 1.0, "%6.2f"] + , ["bb100xr", True, "bbxr/100", 1.0, "%4.2f"] + , ["variance", True, "Variance", 1.0, "%5.2f"] ] # Detail filters: This holds the data used in the popup window, extra values are @@ -136,7 +137,7 @@ class GuiPlayerStats (threading.Thread): self.main_hbox.pack_start(self.stats_frame, expand=True, fill=True) # make sure Hand column is not displayed - [x for x in self.columns if x[0] == 'hand'][0][1] == False + [x for x in self.columns if x[0] == 'hand'][0][1] = False def get_vbox(self): """returns the vbox of this thread""" @@ -156,6 +157,7 @@ class GuiPlayerStats (threading.Thread): siteids = self.filters.getSiteIds() limits = self.filters.getLimits() seats = self.filters.getSeats() + groups = self.filters.getGroups() dates = self.filters.getDates() sitenos = [] playerids = [] @@ -180,16 +182,16 @@ class GuiPlayerStats (threading.Thread): print "No limits found" return - self.createStatsTable(vbox, playerids, sitenos, limits, seats, dates) + self.createStatsTable(vbox, playerids, sitenos, limits, seats, groups, dates) - def createStatsTable(self, vbox, playerids, sitenos, limits, seats, dates): + def createStatsTable(self, vbox, playerids, sitenos, limits, seats, groups, dates): starttime = time() # Display summary table at top of page # 3rd parameter passes extra flags, currently includes: # holecards - whether to display card breakdown (True/False) flags = [False] - self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates) + self.addTable(vbox, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, groups, dates) # Separator sep = gtk.HSeparator() @@ -212,13 +214,13 @@ class GuiPlayerStats (threading.Thread): # Detailed table flags = [True] - self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, dates) + self.addTable(vbox1, 'playerDetailedStats', flags, playerids, sitenos, limits, seats, groups, dates) self.db.db.commit() print "Stats page displayed in %4.2f seconds" % (time() - starttime) #end def fillStatsFrame(self, vbox): - def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats, dates): + def addTable(self, vbox, query, flags, playerids, sitenos, limits, seats, groups, dates): row = 0 sqlrow = 0 colalias,colshow,colheading,colxalign,colformat = 0,1,2,3,4 @@ -231,7 +233,7 @@ class GuiPlayerStats (threading.Thread): self.stats_table.show() tmp = self.sql.query[query] - tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats, dates) + tmp = self.refineQuery(tmp, flags, playerids, sitenos, limits, seats, groups, dates) self.cursor.execute(tmp) result = self.cursor.fetchall() colnames = [desc[0].lower() for desc in self.cursor.description] @@ -245,6 +247,8 @@ class GuiPlayerStats (threading.Thread): view.set_grid_lines(gtk.TREE_VIEW_GRID_LINES_BOTH) vbox.pack_start(view, expand=False, padding=3) textcell = gtk.CellRendererText() + textcell50 = gtk.CellRendererText() + textcell50.set_property('xalign', 0.5) numcell = gtk.CellRendererText() numcell.set_property('xalign', 1.0) listcols = [] @@ -258,17 +262,18 @@ class GuiPlayerStats (threading.Thread): listcols.append(gtk.TreeViewColumn(s)) view.append_column(listcols[col]) if column[colformat] == '%s': - if col == 1 and holecards: + if column[colxalign] == 0.0: listcols[col].pack_start(textcell, expand=True) + listcols[col].add_attribute(textcell, 'text', col) else: - listcols[col].pack_start(textcell, expand=True) - listcols[col].add_attribute(textcell, 'text', col) + listcols[col].pack_start(textcell50, expand=True) + listcols[col].add_attribute(textcell50, 'text', col) listcols[col].set_expand(True) else: listcols[col].pack_start(numcell, expand=True) listcols[col].add_attribute(numcell, 'text', col) - listcols[col].set_alignment(1.0) listcols[col].set_expand(True) + #listcols[col].set_alignment(column[colxalign]) # no effect? rows = len(result) # +1 for title row @@ -281,6 +286,11 @@ class GuiPlayerStats (threading.Thread): for col,column in enumerate(cols_to_show): if column[colalias] in colnames: value = result[sqlrow][colnames.index(column[colalias])] + if column[colalias] == 'plposition': + if value == 'B': + value = 'BB' + if value == 'S': + value = 'SB' else: if column[colalias] == 'game': if holecards: @@ -313,7 +323,7 @@ class GuiPlayerStats (threading.Thread): #end def addTable(self, query, vars, playerids, sitenos, limits, seats): - def refineQuery(self, query, flags, playerids, sitenos, limits, seats, dates): + def refineQuery(self, query, flags, playerids, sitenos, limits, seats, groups, dates): if not flags: holecards = False else: holecards = flags[0] @@ -376,6 +386,16 @@ class GuiPlayerStats (threading.Thread): # Filter on dates query = query.replace("", " between '" + dates[0] + "' and '" + dates[1] + "'") + # Group by position? + if groups['posn']: + query = query.replace("", 'hp.position') + # set flag in self.columns to show posn column + [x for x in self.columns if x[0] == 'plposition'][0][1] = True + else: + query = query.replace("", "'1'") + # unset flag in self.columns to hide posn column + [x for x in self.columns if x[0] == 'plposition'][0][1] = False + #print "query =\n", query return(query) #end def refineQuery(self, query, playerids, sitenos, limits): diff --git a/pyfpdb/HandHistoryConverter.py b/pyfpdb/HandHistoryConverter.py index 8b9eba63..f5c4c2a5 100644 --- a/pyfpdb/HandHistoryConverter.py +++ b/pyfpdb/HandHistoryConverter.py @@ -132,7 +132,7 @@ Otherwise, finish at eof... self.processHand(handText) numHands= len(handsList) endtime = time.time() - print "Processed %d hands in %.3f seconds" % (numHands, endtime - starttime) + print "read %d hands in %.3f seconds" % (numHands, endtime - starttime) if self.out_fh != sys.stdout: self.out_fh.close() diff --git a/pyfpdb/fpdb.py b/pyfpdb/fpdb.py index 645ddef5..8d514b90 100755 --- a/pyfpdb/fpdb.py +++ b/pyfpdb/fpdb.py @@ -181,7 +181,7 @@ class fpdb: def dia_load_profile(self, widget, data=None): """Dialogue to select a file to load a profile from""" - if self.obtain_global_lock(): + if self.obtain_global_lock() == 0: # returns 0 if successful try: chooser = gtk.FileChooserDialog(title="Please select a profile file to load", action=gtk.FILE_CHOOSER_ACTION_OPEN, @@ -201,7 +201,7 @@ class fpdb: def dia_recreate_tables(self, widget, data=None): """Dialogue that asks user to confirm that he wants to delete and recreate the tables""" - if self.obtain_global_lock(): + if self.obtain_global_lock() in (0,2): # returns 0 if successful, 2 if Hands table does not exist lock_released = False try: @@ -406,7 +406,7 @@ class fpdb: self.settings['db-databaseName'], self.settings['db-user'], self.settings['db-password']) - return fpdb_simple.get_global_lock(self.fdb_lock) + return self.fdb_lock.get_global_lock() #end def obtain_global_lock def quit(self, widget): @@ -455,7 +455,6 @@ class fpdb: ps_tab=new_ps_thread.get_vbox() self.add_and_display_tab(ps_tab, "Positional Stats") - def tab_main_help(self, widget, data=None): """Displays a tab with the main fpdb help screen""" #print "start of tab_main_help" diff --git a/pyfpdb/fpdb_db.py b/pyfpdb/fpdb_db.py index 44ea2352..583ecfc8 100644 --- a/pyfpdb/fpdb_db.py +++ b/pyfpdb/fpdb_db.py @@ -17,6 +17,9 @@ import os import re +import sys +from time import time, strftime + import fpdb_simple import FpdbSQLQueries @@ -29,6 +32,110 @@ class fpdb_db: self.MYSQL_INNODB = 2 self.PGSQL = 3 self.SQLITE = 4 + + # Data Structures for index and foreign key creation + # drop_code is an int with possible values: 0 - don't drop for bulk import + # 1 - drop during bulk import + # db differences: + # - note that mysql automatically creates indexes on constrained columns when + # foreign keys are created, while postgres does not. Hence the much longer list + # of indexes is required for postgres. + # all primary keys are left on all the time + # + # table column drop_code + + self.indexes = [ + [ ] # no db with index 0 + , [ ] # no db with index 1 + , [ # indexes for mysql (list index 2) + {'tab':'Players', 'col':'name', 'drop':0} + , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} + , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} + ] + , [ # indexes for postgres (list index 3) + {'tab':'Boardcards', 'col':'handId', 'drop':0} + , {'tab':'Gametypes', 'col':'siteId', 'drop':0} + , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09 + , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} + , {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0} + , {'tab':'HandsPlayers', 'col':'handId', 'drop':1} + , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1} + , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0} + , {'tab':'HudCache', 'col':'gametypeId', 'drop':1} + , {'tab':'HudCache', 'col':'playerId', 'drop':0} + , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0} + , {'tab':'Players', 'col':'siteId', 'drop':1} + , {'tab':'Players', 'col':'name', 'drop':0} + , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1} + , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} + , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0} + , {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} + , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0} + ] + ] + + self.foreignKeys = [ + [ ] # no db with index 0 + , [ ] # no db with index 1 + , [ # foreign keys for mysql + {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1} + , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0} + , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1} + ] + , [ # foreign keys for postgres + {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1} + , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1} + , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} + , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0} + , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1} + ] + ] + + + # MySQL Notes: + # "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id + # - creates index handId on .handId + # alter table t drop foreign key fk + # alter table t add foreign key (fkcol) references tab(rcol) + # alter table t add constraint c foreign key (fkcol) references tab(rcol) + # (fkcol is used for foreigh key name) + + # mysql to list indexes: + # SELECT table_name, index_name, non_unique, column_name + # FROM INFORMATION_SCHEMA.STATISTICS + # WHERE table_name = 'tbl_name' + # AND table_schema = 'db_name' + # ORDER BY table_name, index_name, seq_in_index + # + # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo) + # ALTER TABLE tab DROP INDEX idx + + # mysql to list fks: + # SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name + # FROM information_schema.KEY_COLUMN_USAGE + # WHERE REFERENCED_TABLE_SCHEMA = (your schema name here) + # AND REFERENCED_TABLE_NAME is not null + # ORDER BY TABLE_NAME, COLUMN_NAME; + + # this may indicate missing object + # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)") + + + # PG notes: + + # To add a foreign key constraint to a table: + # ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL; + # ALTER TABLE tab DROP CONSTRAINT zipchk + # + # Note: index names must be unique across a schema + # CREATE INDEX idx ON tab(col) + # DROP INDEX idx #end def __init__ def do_connect(self, config=None): @@ -69,7 +176,7 @@ class fpdb_db: psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) # If DB connection is made over TCP, then the variables # host, user and password are required - print "host=%s user=%s pass=%s." % (host, user, password) + # print "host=%s user=%s pass=%s." % (host, user, password) if self.host and self.user and self.password: try: self.db = psycopg2.connect(host = host, @@ -96,7 +203,7 @@ class fpdb_db: try: self.cursor.execute("SELECT * FROM Settings") settings=self.cursor.fetchone() - if settings[0]!=119: + if settings[0]!=118: print "outdated or too new database version - please recreate tables" self.wrongDbVersion=True except:# _mysql_exceptions.ProgrammingError: @@ -201,14 +308,10 @@ class fpdb_db: #end def get_db_info def fillDefaultData(self): - self.cursor.execute("INSERT INTO Settings VALUES (119);") + self.cursor.execute("INSERT INTO Settings VALUES (118);") self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Full Tilt Poker', 'USD');") self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'PokerStars', 'USD');") self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Everleaf', 'USD');") - self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Carbon', 'USD');") - self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'OnGame', 'USD');") - self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'UltimateBet', 'USD');") - self.cursor.execute("INSERT INTO Sites VALUES (DEFAULT, 'Betfair', 'USD');") self.cursor.execute("INSERT INTO TourneyTypes VALUES (DEFAULT, 1, 0, 0, 0, False);") #end def fillDefaultData @@ -217,30 +320,275 @@ class fpdb_db: self.drop_tables() self.create_tables() - fpdb_simple.createAllIndexes(self) + self.createAllIndexes() self.db.commit() print "Finished recreating tables" #end def recreate_tables - def getSqlPlayerIDs(names, site_id): - result = [] - notfound = [] - self.cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names)) - tmp = dict(self.cursor.fetchall()) - for n in names: - if n not in tmp: - notfound.append(n) - else: - result.append(tmp[n]) - if notfound: - cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound)) - cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound)) - tmp = cursor.fetchall() - for n in tmp: - result.append(n[0]) + def prepareBulkImport(self): + """Drop some indexes/foreign keys to prepare for bulk import. + Currently keeping the standalone indexes as needed to import quickly""" + stime = time() + if self.backend == self.PGSQL: + self.db.set_isolation_level(0) # allow table/index operations to work + for fk in self.foreignKeys[self.backend]: + if fk['drop'] == 1: + if self.backend == self.MYSQL_INNODB: + self.cursor.execute("SELECT constraint_name " + + "FROM information_schema.KEY_COLUMN_USAGE " + + #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' + "WHERE 1=1 " + + "AND table_name = %s AND column_name = %s " + + "AND referenced_table_name = %s " + + "AND referenced_column_name = %s ", + (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) + cons = self.cursor.fetchone() + #print "preparebulk: cons=", cons + if cons: + print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol'] + try: + self.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0]) + except: + pass + elif self.backend == self.PGSQL: + # DON'T FORGET TO RECREATE THEM!! + print "dropping pg fk", fk['fktab'], fk['fkcol'] + try: + # try to lock table to see if index drop will work: + # hmmm, tested by commenting out rollback in grapher. lock seems to work but + # then drop still hangs :-( does work in some tests though?? + # will leave code here for now pending further tests/enhancement ... + self.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) ) + #print "after lock, status:", self.cursor.statusmessage + #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']) + try: + self.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])) + print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol']) + except: + if "does not exist" not in str(sys.exc_value): + print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \ + % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') ) + except: + print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \ + % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n')) + else: + print "Only MySQL and Postgres supported so far" + return -1 + + for idx in self.indexes[self.backend]: + if idx['drop'] == 1: + if self.backend == self.MYSQL_INNODB: + print "dropping mysql index ", idx['tab'], idx['col'] + try: + # apparently nowait is not implemented in mysql so this just hands if there are locks + # preventing the index drop :-( + self.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) ) + except: + pass + elif self.backend == self.PGSQL: + # DON'T FORGET TO RECREATE THEM!! + print "dropping pg index ", idx['tab'], idx['col'] + try: + # try to lock table to see if index drop will work: + self.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) ) + #print "after lock, status:", self.cursor.statusmessage + try: + # table locked ok so index drop should work: + #print "drop index %s_%s_idx" % (idx['tab'],idx['col']) + self.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) ) + #print "dropped pg index ", idx['tab'], idx['col'] + except: + if "does not exist" not in str(sys.exc_value): + print "warning: drop index %s_%s_idx failed: %s, continuing ..." \ + % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) + except: + print "warning: index %s_%s_idx not dropped %s, continuing ..." \ + % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) + else: + print "Error: Only MySQL and Postgres supported so far" + return -1 - #We proabably want to cache this - return result + if self.backend == self.PGSQL: + self.db.set_isolation_level(1) # go back to normal isolation level + self.db.commit() # seems to clear up errors if there were any in postgres + ptime = time() - stime + print "prepare import took", ptime, "seconds" + #end def prepareBulkImport + + def afterBulkImport(self): + """Re-create any dropped indexes/foreign keys after bulk import""" + stime = time() + if self.backend == self.PGSQL: + self.db.set_isolation_level(0) # allow table/index operations to work + for fk in self.foreignKeys[self.backend]: + if fk['drop'] == 1: + if self.backend == self.MYSQL_INNODB: + self.cursor.execute("SELECT constraint_name " + + "FROM information_schema.KEY_COLUMN_USAGE " + + #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' + "WHERE 1=1 " + + "AND table_name = %s AND column_name = %s " + + "AND referenced_table_name = %s " + + "AND referenced_column_name = %s ", + (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) + cons = self.cursor.fetchone() + print "afterbulk: cons=", cons + if cons: + pass + else: + print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'] + try: + self.cursor.execute("alter table " + fk['fktab'] + " add foreign key (" + + fk['fkcol'] + ") references " + fk['rtab'] + "(" + + fk['rcol'] + ")") + except: + pass + elif self.backend == self.PGSQL: + print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'] + try: + self.cursor.execute("alter table " + fk['fktab'] + " add constraint " + + fk['fktab'] + '_' + fk['fkcol'] + '_fkey' + + " foreign key (" + fk['fkcol'] + + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")") + except: + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + + for idx in self.indexes[self.backend]: + if idx['drop'] == 1: + if self.backend == self.MYSQL_INNODB: + print "creating mysql index ", idx['tab'], idx['col'] + try: + self.cursor.execute( "alter table %s add index %s(%s)" + , (idx['tab'],idx['col'],idx['col']) ) + except: + pass + elif self.backend == self.PGSQL: + # pass + # mod to use tab_col for index name? + print "creating pg index ", idx['tab'], idx['col'] + try: + print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col']) + self.cursor.execute( "create index %s_%s_idx on %s(%s)" + % (idx['tab'], idx['col'], idx['tab'], idx['col']) ) + except: + print " ERROR! :-(" + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + + if self.backend == self.PGSQL: + self.db.set_isolation_level(1) # go back to normal isolation level + self.db.commit() # seems to clear up errors if there were any in postgres + atime = time() - stime + print "after import took", atime, "seconds" + #end def afterBulkImport + + def createAllIndexes(self): + """Create new indexes""" + if self.backend == self.PGSQL: + self.db.set_isolation_level(0) # allow table/index operations to work + for idx in self.indexes[self.backend]: + if self.backend == self.MYSQL_INNODB: + print "creating mysql index ", idx['tab'], idx['col'] + try: + self.cursor.execute( "alter table %s add index %s(%s)" + , (idx['tab'],idx['col'],idx['col']) ) + except: + pass + elif self.backend == self.PGSQL: + # mod to use tab_col for index name? + print "creating pg index ", idx['tab'], idx['col'] + try: + print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col']) + self.cursor.execute( "create index %s_%s_idx on %s(%s)" + % (idx['tab'], idx['col'], idx['tab'], idx['col']) ) + except: + print " ERROR! :-(" + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + if self.backend == self.PGSQL: + self.db.set_isolation_level(1) # go back to normal isolation level + #end def createAllIndexes + + def dropAllIndexes(self): + """Drop all standalone indexes (i.e. not including primary keys or foreign keys) + using list of indexes in indexes data structure""" + # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK) + if self.backend == self.PGSQL: + self.db.set_isolation_level(0) # allow table/index operations to work + for idx in self.indexes[self.backend]: + if self.backend == self.MYSQL_INNODB: + print "dropping mysql index ", idx['tab'], idx['col'] + try: + self.cursor.execute( "alter table %s drop index %s" + , (idx['tab'],idx['col']) ) + except: + pass + elif self.backend == self.PGSQL: + print "dropping pg index ", idx['tab'], idx['col'] + # mod to use tab_col for index name? + try: + self.cursor.execute( "drop index %s_%s_idx" + % (idx['tab'],idx['col']) ) + except: + pass + else: + print "Only MySQL and Postgres supported so far" + return -1 + if self.backend == self.PGSQL: + self.db.set_isolation_level(1) # go back to normal isolation level + #end def dropAllIndexes + + def analyzeDB(self): + """Do whatever the DB can offer to update index/table statistics""" + stime = time() + if self.backend == self.PGSQL: + self.db.set_isolation_level(0) # allow vacuum to work + try: + self.cursor.execute("vacuum analyze") + except: + print "Error during vacuum" + self.db.set_isolation_level(1) # go back to normal isolation level + self.db.commit() + atime = time() - stime + print "analyze took", atime, "seconds" + #end def analyzeDB + + # Currently uses an exclusive lock on the Hands table as a global lock + # Return values are Unix style, 0 for success, positive integers for errors + # 1 = generic error + # 2 = hands table does not exist (error message is suppressed) + def get_global_lock(self): + if self.backend == self.MYSQL_INNODB: + try: + self.cursor.execute( "lock tables Hands write" ) + except: + # Table 'fpdb.hands' doesn't exist + if str(sys.exc_value).find(".hands' doesn't exist") >= 0: + return(2) + print "Error! failed to obtain global lock. Close all programs accessing " \ + + "database (including fpdb) and try again (%s)." \ + % ( str(sys.exc_value).rstrip('\n'), ) + return(1) + elif self.backend == self.PGSQL: + try: + self.cursor.execute( "lock table Hands in exclusive mode nowait" ) + #print "... after lock table, status =", self.cursor.statusmessage + except: + # relation "hands" does not exist + if str(sys.exc_value).find('relation "hands" does not exist') >= 0: + return(2) + print "Error! failed to obtain global lock. Close all programs accessing " \ + + "database (including fpdb) and try again (%s)." \ + % ( str(sys.exc_value).rstrip('\n'), ) + return(1) + return(0) def storeHand(self, p): #stores into table hands: @@ -265,3 +613,4 @@ class fpdb_db: ) ) #return getLastInsertId(backend, conn, cursor) +#end class fpdb_db diff --git a/pyfpdb/fpdb_import.py b/pyfpdb/fpdb_import.py index 2dbb4807..cffe8598 100644 --- a/pyfpdb/fpdb_import.py +++ b/pyfpdb/fpdb_import.py @@ -150,7 +150,9 @@ class Importer: self.monitor = True self.dirlist[site] = [dir] + [filter] + #print "addImportDirectory: checking files in", dir for file in os.listdir(dir): + #print " adding file ", file self.addImportFile(os.path.join(dir, file), site, filter) else: print "Warning: Attempted to add non-directory: '" + str(dir) + "' as an import directory" @@ -162,7 +164,7 @@ class Importer: if self.settings['dropIndexes'] == 'auto': self.settings['dropIndexes'] = self.calculate_auto() if self.settings['dropIndexes'] == 'drop': - fpdb_simple.prepareBulkImport(self.fdb) + self.fdb.prepareBulkImport() totstored = 0 totdups = 0 totpartial = 0 @@ -177,8 +179,8 @@ class Importer: toterrors += errors tottime += ttime if self.settings['dropIndexes'] == 'drop': - fpdb_simple.afterBulkImport(self.fdb) - fpdb_simple.analyzeDB(self.fdb) + self.fdb.afterBulkImport() + self.fdb.analyzeDB() return (totstored, totdups, totpartial, toterrors, tottime) # else: import threaded @@ -203,14 +205,18 @@ class Importer: #todo: make efficient - always checks for new file, should be able to use mtime of directory # ^^ May not work on windows + #rulog = open('runUpdated.txt', 'a') + #rulog.writelines("runUpdated ... ") for site in self.dirlist: self.addImportDirectory(self.dirlist[site][0], False, site, self.dirlist[site][1]) for file in self.filelist: if os.path.exists(file): stat_info = os.stat(file) + #rulog.writelines("path exists ") try: lastupdate = self.updated[file] + #rulog.writelines("lastupdate = %d, mtime = %d" % (lastupdate,stat_info.st_mtime)) if stat_info.st_mtime > lastupdate: self.import_file_dict(file, self.filelist[file][0], self.filelist[file][1]) self.updated[file] = time() @@ -236,7 +242,8 @@ class Importer: self.addToDirList = {} self.removeFromFileList = {} self.fdb.db.rollback() - + #rulog.writelines(" finished\n") + #rulog.close() # This is now an internal function that should not be called directly. def import_file_dict(self, file, site, filter): @@ -249,7 +256,7 @@ class Importer: conv = None # Load filter, process file, pass returned filename to import_fpdb_file - print "converting %s" % file + print "\nConverting %s" % file hhbase = self.config.get_import_parameters().get("hhArchiveBase") hhbase = os.path.expanduser(hhbase) hhdir = os.path.join(hhbase,site) @@ -282,6 +289,7 @@ class Importer: starttime = time() last_read_hand = 0 loc = 0 + #print "file =", file if file == "stdin": inputFile = sys.stdin else: @@ -292,10 +300,17 @@ class Importer: return (0, 0, 0, 1, 0) try: loc = self.pos_in_file[file] + #size = os.path.getsize(file) + #print "loc =", loc, 'size =', size except: pass # Read input file into class and close file inputFile.seek(loc) + #tmplines = inputFile.readlines() + #if tmplines == None or tmplines == []: + # print "tmplines = ", tmplines + #else: + # print "tmplines[0] =", tmplines[0] self.lines = fpdb_simple.removeTrailingEOL(inputFile.readlines()) self.pos_in_file[file] = inputFile.tell() inputFile.close() @@ -303,7 +318,8 @@ class Importer: try: # sometimes we seem to be getting an empty self.lines, in which case, we just want to return. firstline = self.lines[0] except: - print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc) + # just skip the debug message and return silently: + #print "DEBUG: import_fpdb_file: failed on self.lines[0]: '%s' '%s' '%s' '%s' " %( file, site, self.lines, loc) return (0,0,0,1,0) if firstline.find("Tournament Summary")!=-1: @@ -348,6 +364,7 @@ class Importer: if self.callHud: #print "call to HUD here. handsId:",handsId #pipe the Hands.id out to the HUD + print "sending hand to hud", handsId, "pipe =", self.caller.pipe_to_hud self.caller.pipe_to_hud.stdin.write("%s" % (handsId) + os.linesep) except fpdb_simple.DuplicateError: duplicates += 1 @@ -364,7 +381,6 @@ class Importer: except (fpdb_simple.FpdbError), fe: errors += 1 self.printEmailErrorMessage(errors, file, hand) - self.fdb.db.rollback() if self.settings['failOnError']: diff --git a/pyfpdb/fpdb_simple.py b/pyfpdb/fpdb_simple.py index 2241feab..72b7b656 100644 --- a/pyfpdb/fpdb_simple.py +++ b/pyfpdb/fpdb_simple.py @@ -16,6 +16,10 @@ #agpl-3.0.txt in the docs folder of the package. #This file contains simple functions for fpdb + +#Aiming to eventually remove this module, functions will move to, eg: +#fpdb_db db create/re-create/management/etc +#Hands or related files for saving hands to db, etc import datetime import time @@ -28,6 +32,7 @@ PS = 1 FTP = 2 # TODO: these constants are also used in fpdb_save_to_db and others, is there a way to do like C #define, and #include ? +# answer - yes. These are defined in fpdb_db so are accessible through that class. MYSQL_INNODB = 2 PGSQL = 3 SQLITE = 4 @@ -35,359 +40,6 @@ SQLITE = 4 # config while trying out new hudcache mechanism use_date_in_hudcache = True -# Data Structures for index and foreign key creation -# drop_code is an int with possible values: 0 - don't drop for bulk import -# 1 - drop during bulk import -# db differences: -# - note that mysql automatically creates indexes on constrained columns when -# foreign keys are created, while postgres does not. Hence the much longer list -# of indexes is required for postgres. -# all primary keys are left on all the time -# -# table column drop_code - -indexes = [ - [ ] # no db with index 0 - , [ ] # no db with index 1 - , [ # indexes for mysql (list index 2) - {'tab':'Players', 'col':'name', 'drop':0} - , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} - , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} - ] - , [ # indexes for postgres (list index 3) - {'tab':'Boardcards', 'col':'handId', 'drop':0} - , {'tab':'Gametypes', 'col':'siteId', 'drop':0} - , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09 - , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} - , {'tab':'HandsActions', 'col':'handsPlayerId', 'drop':0} - , {'tab':'HandsPlayers', 'col':'handId', 'drop':1} - , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1} - , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0} - , {'tab':'HudCache', 'col':'gametypeId', 'drop':1} - , {'tab':'HudCache', 'col':'playerId', 'drop':0} - , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0} - , {'tab':'Players', 'col':'siteId', 'drop':1} - , {'tab':'Players', 'col':'name', 'drop':0} - , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1} - , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} - , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0} - , {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} - , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0} - ] - ] - -foreignKeys = [ - [ ] # no db with index 0 - , [ ] # no db with index 1 - , [ # foreign keys for mysql - {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} - , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1} - , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1} - , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1} - , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} - , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0} - , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1} - ] - , [ # foreign keys for postgres - {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} - , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1} - , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1} - , {'fktab':'HandsActions', 'fkcol':'handsPlayerId', 'rtab':'HandsPlayers', 'rcol':'id', 'drop':1} - , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1} - , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0} - , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1} - ] - ] - - -# MySQL Notes: -# "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id -# - creates index handId on .handId -# alter table t drop foreign key fk -# alter table t add foreign key (fkcol) references tab(rcol) -# alter table t add constraint c foreign key (fkcol) references tab(rcol) -# (fkcol is used for foreigh key name) - -# mysql to list indexes: -# SELECT table_name, index_name, non_unique, column_name -# FROM INFORMATION_SCHEMA.STATISTICS -# WHERE table_name = 'tbl_name' -# AND table_schema = 'db_name' -# ORDER BY table_name, index_name, seq_in_index -# -# ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo) -# ALTER TABLE tab DROP INDEX idx - -# mysql to list fks: -# SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name -# FROM information_schema.KEY_COLUMN_USAGE -# WHERE REFERENCED_TABLE_SCHEMA = (your schema name here) -# AND REFERENCED_TABLE_NAME is not null -# ORDER BY TABLE_NAME, COLUMN_NAME; - -# this may indicate missing object -# _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)") - - -# PG notes: - -# To add a foreign key constraint to a table: -# ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL; -# ALTER TABLE tab DROP CONSTRAINT zipchk -# -# Note: index names must be unique across a schema -# CREATE INDEX idx ON tab(col) -# DROP INDEX idx - -def prepareBulkImport(fdb): - """Drop some indexes/foreign keys to prepare for bulk import. - Currently keeping the standalone indexes as needed to import quickly""" - # fdb is a fpdb_db object including backend, db, cursor, sql variables - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(0) # allow table/index operations to work - for fk in foreignKeys[fdb.backend]: - if fk['drop'] == 1: - if fdb.backend == MYSQL_INNODB: - fdb.cursor.execute("SELECT constraint_name " + - "FROM information_schema.KEY_COLUMN_USAGE " + - #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' - "WHERE 1=1 " + - "AND table_name = %s AND column_name = %s " + - "AND referenced_table_name = %s " + - "AND referenced_column_name = %s ", - (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) - cons = fdb.cursor.fetchone() - #print "preparebulk: cons=", cons - if cons: - print "dropping mysql fk", cons[0], fk['fktab'], fk['fkcol'] - try: - fdb.cursor.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0]) - except: - pass - elif fdb.backend == PGSQL: -# DON'T FORGET TO RECREATE THEM!! - print "dropping pg fk", fk['fktab'], fk['fkcol'] - try: - # try to lock table to see if index drop will work: - # hmmm, tested by commenting out rollback in grapher. lock seems to work but - # then drop still hangs :-( does work in some tests though?? - # will leave code here for now pending further tests/enhancement ... - fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) ) - #print "after lock, status:", fdb.cursor.statusmessage - #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']) - try: - fdb.cursor.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])) - print "dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol']) - except: - if "does not exist" not in str(sys.exc_value): - print "warning: drop pg fk %s_%s_fkey failed: %s, continuing ..." \ - % (fk['fktab'], fk['fkcol'], str(sys.exc_value).rstrip('\n') ) - except: - print "warning: constraint %s_%s_fkey not dropped: %s, continuing ..." \ - % (fk['fktab'],fk['fkcol'], str(sys.exc_value).rstrip('\n')) - else: - print "Only MySQL and Postgres supported so far" - return -1 - - for idx in indexes[fdb.backend]: - if idx['drop'] == 1: - if fdb.backend == MYSQL_INNODB: - print "dropping mysql index ", idx['tab'], idx['col'] - try: - # apparently nowait is not implemented in mysql so this just hands if there are locks - # preventing the index drop :-( - fdb.cursor.execute( "alter table %s drop index %s", (idx['tab'],idx['col']) ) - except: - pass - elif fdb.backend == PGSQL: -# DON'T FORGET TO RECREATE THEM!! - print "dropping pg index ", idx['tab'], idx['col'] - try: - # try to lock table to see if index drop will work: - fdb.cursor.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) ) - #print "after lock, status:", fdb.cursor.statusmessage - try: - # table locked ok so index drop should work: - #print "drop index %s_%s_idx" % (idx['tab'],idx['col']) - fdb.cursor.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) ) - #print "dropped pg index ", idx['tab'], idx['col'] - except: - if "does not exist" not in str(sys.exc_value): - print "warning: drop index %s_%s_idx failed: %s, continuing ..." \ - % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) - except: - print "warning: index %s_%s_idx not dropped %s, continuing ..." \ - % (idx['tab'],idx['col'], str(sys.exc_value).rstrip('\n')) - else: - print "Error: Only MySQL and Postgres supported so far" - return -1 - - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(1) # go back to normal isolation level - fdb.db.commit() # seems to clear up errors if there were any in postgres -#end def prepareBulkImport - -def afterBulkImport(fdb): - """Re-create any dropped indexes/foreign keys after bulk import""" - # fdb is a fpdb_db object including backend, db, cursor, sql variables - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(0) # allow table/index operations to work - for fk in foreignKeys[fdb.backend]: - if fk['drop'] == 1: - if fdb.backend == MYSQL_INNODB: - fdb.cursor.execute("SELECT constraint_name " + - "FROM information_schema.KEY_COLUMN_USAGE " + - #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' - "WHERE 1=1 " + - "AND table_name = %s AND column_name = %s " + - "AND referenced_table_name = %s " + - "AND referenced_column_name = %s ", - (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) ) - cons = fdb.cursor.fetchone() - print "afterbulk: cons=", cons - if cons: - pass - else: - print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'] - try: - fdb.cursor.execute("alter table " + fk['fktab'] + " add foreign key (" - + fk['fkcol'] + ") references " + fk['rtab'] + "(" - + fk['rcol'] + ")") - except: - pass - elif fdb.backend == PGSQL: - print "creating fk ", fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'] - try: - fdb.cursor.execute("alter table " + fk['fktab'] + " add constraint " - + fk['fktab'] + '_' + fk['fkcol'] + '_fkey' - + " foreign key (" + fk['fkcol'] - + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")") - except: - pass - else: - print "Only MySQL and Postgres supported so far" - return -1 - - for idx in indexes[fdb.backend]: - if idx['drop'] == 1: - if fdb.backend == MYSQL_INNODB: - print "creating mysql index ", idx['tab'], idx['col'] - try: - fdb.cursor.execute( "alter table %s add index %s(%s)" - , (idx['tab'],idx['col'],idx['col']) ) - except: - pass - elif fdb.backend == PGSQL: -# pass - # mod to use tab_col for index name? - print "creating pg index ", idx['tab'], idx['col'] - try: - print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col']) - fdb.cursor.execute( "create index %s_%s_idx on %s(%s)" - % (idx['tab'], idx['col'], idx['tab'], idx['col']) ) - except: - print " ERROR! :-(" - pass - else: - print "Only MySQL and Postgres supported so far" - return -1 - - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(1) # go back to normal isolation level - fdb.db.commit() # seems to clear up errors if there were any in postgres -#end def afterBulkImport - -def createAllIndexes(fdb): - """Create new indexes""" - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(0) # allow table/index operations to work - for idx in indexes[fdb.backend]: - if fdb.backend == MYSQL_INNODB: - print "creating mysql index ", idx['tab'], idx['col'] - try: - fdb.cursor.execute( "alter table %s add index %s(%s)" - , (idx['tab'],idx['col'],idx['col']) ) - except: - pass - elif fdb.backend == PGSQL: - # mod to use tab_col for index name? - print "creating pg index ", idx['tab'], idx['col'] - try: - print "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col']) - fdb.cursor.execute( "create index %s_%s_idx on %s(%s)" - % (idx['tab'], idx['col'], idx['tab'], idx['col']) ) - except: - print " ERROR! :-(" - pass - else: - print "Only MySQL and Postgres supported so far" - return -1 - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(1) # go back to normal isolation level -#end def createAllIndexes - -def dropAllIndexes(fdb): - """Drop all standalone indexes (i.e. not including primary keys or foreign keys) - using list of indexes in indexes data structure""" - # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK) - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(0) # allow table/index operations to work - for idx in indexes[fdb.backend]: - if fdb.backend == MYSQL_INNODB: - print "dropping mysql index ", idx['tab'], idx['col'] - try: - fdb.cursor.execute( "alter table %s drop index %s" - , (idx['tab'],idx['col']) ) - except: - pass - elif fdb.backend == PGSQL: - print "dropping pg index ", idx['tab'], idx['col'] - # mod to use tab_col for index name? - try: - fdb.cursor.execute( "drop index %s_%s_idx" - % (idx['tab'],idx['col']) ) - except: - pass - else: - print "Only MySQL and Postgres supported so far" - return -1 - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(1) # go back to normal isolation level -#end def dropAllIndexes - -def analyzeDB(fdb): - """Do whatever the DB can offer to update index/table statistics""" - if fdb.backend == PGSQL: - fdb.db.set_isolation_level(0) # allow vacuum to work - try: - fdb.cursor.execute("vacuum analyze") - except: - print "Error during vacuum" - fdb.db.set_isolation_level(1) # go back to normal isolation level - fdb.db.commit() -#end def analyzeDB - -def get_global_lock(fdb): - if fdb.backend == MYSQL_INNODB: - try: - fdb.cursor.execute( "lock tables Hands write" ) - except: - print "Error! failed to obtain global lock. Close all programs accessing " \ - + "database (including fpdb) and try again (%s)." \ - % ( str(sys.exc_value).rstrip('\n'), ) - return(False) - elif fdb.backend == PGSQL: - try: - fdb.cursor.execute( "lock table Hands in exclusive mode nowait" ) - #print "... after lock table, status =", fdb.cursor.statusmessage - except: - print "Error! failed to obtain global lock. Close all programs accessing " \ - + "database (including fpdb) and try again (%s)." \ - % ( str(sys.exc_value).rstrip('\n'), ) - return(False) - return(True) - - class DuplicateError(Exception): def __init__(self, value): self.value = value @@ -1390,6 +1042,27 @@ def recognisePlayerIDs(cursor, names, site_id): #end def recognisePlayerIDs +# Here's a version that would work if it wasn't for the fact that it needs to have the output in the same order as input +# this version could also be improved upon using list comprehensions, etc + +#def recognisePlayerIDs(cursor, names, site_id): +# result = [] +# notfound = [] +# cursor.execute("SELECT name,id FROM Players WHERE name='%s'" % "' OR name='".join(names)) +# tmp = dict(cursor.fetchall()) +# for n in names: +# if n not in tmp: +# notfound.append(n) +# else: +# result.append(tmp[n]) +# if notfound: +# cursor.executemany("INSERT INTO Players (name, siteId) VALUES (%s, "+str(site_id)+")", (notfound)) +# cursor.execute("SELECT id FROM Players WHERE name='%s'" % "' OR name='".join(notfound)) +# tmp = cursor.fetchall() +# for n in tmp: +# result.append(n[0]) +# +# return result #recognises the name in the given line and returns its array position in the given array def recognisePlayerNo(line, names, atype):