annotate mercurial/revlog.py @ 116:e484cd5ec282

Only use lazy indexing for big indices and avoid the overhead of the lazy index in the small index case.
author mpm@selenic.com
date Fri, 20 May 2005 17:35:20 -0800
parents 39b438eeb25a
children 2ac722ad1a9d
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
1 # revlog.py - storage back-end for mercurial
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
2 #
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
3 # This provides efficient delta storage with O(1) retrieve and append
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
4 # and O(changes) merge between branches
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
5 #
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
7 #
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
8 # This software may be used and distributed according to the terms
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
9 # of the GNU General Public License, incorporated herein by reference.
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
10
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
11 import zlib, struct, sha, os, tempfile, binascii
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
12 from mercurial import mdiff
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
13
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
14 def hex(node): return binascii.hexlify(node)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
15 def bin(node): return binascii.unhexlify(node)
83
9fd5b35cfc45 Add -q quiet option
mpm@selenic.com
parents: 77
diff changeset
16 def short(node): return hex(node[:4])
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
17
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
18 def compress(text):
112
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
19 if not text: return text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
20 if len(text) < 44:
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
21 if text[0] == '\0': return text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
22 return 'u' + text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
23 bin = zlib.compress(text)
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
24 if len(bin) > len(text):
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
25 if text[0] == '\0': return text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
26 return 'u' + text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
27 return bin
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
28
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
29 def decompress(bin):
112
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
30 if not bin: return bin
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
31 t = bin[0]
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
32 if t == '\0': return bin
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
33 if t == 'x': return zlib.decompress(bin)
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
34 if t == 'u': return bin[1:]
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
35 raise "unknown compression type %s" % t
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
36
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
37 def hash(text, p1, p2):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
38 l = [p1, p2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
39 l.sort()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
40 return sha.sha(l[0] + l[1] + text).digest()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
41
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
42 nullid = "\0" * 20
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
43 indexformat = ">4l20s20s20s"
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
44
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
45 class lazyparser:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
46 def __init__(self, data):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
47 self.data = data
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
48 self.s = struct.calcsize(indexformat)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
49 self.l = len(data)/self.s
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
50 self.index = [None] * self.l
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
51 self.map = {nullid: -1}
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
52
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
53 if 0:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
54 n = 0
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
55 i = self.data
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
56 s = struct.calcsize(indexformat)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
57 for f in xrange(0, len(i), s):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
58 # offset, size, base, linkrev, p1, p2, nodeid
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
59 e = struct.unpack(indexformat, i[f:f + s])
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
60 self.map[e[6]] = n
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
61 self.index.append(e)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
62 n += 1
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
63
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
64 def load(self, pos):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
65 block = pos / 1000
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
66 i = block * 1000
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
67 end = min(self.l, i + 1000)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
68 while i < end:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
69 d = self.data[i * self.s: (i + 1) * self.s]
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
70 e = struct.unpack(indexformat, d)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
71 self.index[i] = e
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
72 self.map[e[6]] = i
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
73 i += 1
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
74
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
75 class lazyindex:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
76 def __init__(self, parser):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
77 self.p = parser
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
78 def __len__(self):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
79 return len(self.p.index)
115
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
80 def load(self, pos):
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
81 self.p.load(pos)
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
82 return self.p.index[pos]
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
83 def __getitem__(self, pos):
115
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
84 return self.p.index[pos] or self.load(pos)
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
85 def append(self, e):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
86 self.p.index.append(e)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
87
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
88 class lazymap:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
89 def __init__(self, parser):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
90 self.p = parser
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
91 def load(self, key):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
92 n = self.p.data.find(key)
86
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
93 if n < 0: raise KeyError("node " + hex(key))
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
94 pos = n / self.p.s
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
95 self.p.load(pos)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
96 def __contains__(self, key):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
97 try:
77
bed15e766511 Fix bug in lazymap code
mpm@selenic.com
parents: 76
diff changeset
98 self[key]
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
99 return True
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
100 except KeyError:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
101 return False
97
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
102 def __iter__(self):
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
103 for i in xrange(self.p.l):
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
104 try:
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
105 yield self.p.index[i][6]
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
106 except:
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
107 self.p.load(i)
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
108 yield self.p.index[i][6]
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
109 def __getitem__(self, key):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
110 try:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
111 return self.p.map[key]
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
112 except KeyError:
86
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
113 try:
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
114 self.load(key)
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
115 return self.p.map[key]
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
116 except KeyError:
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
117 raise KeyError("node " + hex(key))
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
118 def __setitem__(self, key, val):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
119 self.p.map[key] = val
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
120
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
121 class revlog:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
122 def __init__(self, opener, indexfile, datafile):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
123 self.indexfile = indexfile
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
124 self.datafile = datafile
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
125 self.opener = opener
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
126 self.cache = None
116
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
127
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
128 try:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
129 i = self.opener(self.indexfile).read()
73
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
130 except IOError:
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
131 i = ""
116
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
132
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
133 if len(i) > 10000:
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
134 # big index, let's parse it on demand
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
135 parser = lazyparser(i)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
136 self.index = lazyindex(parser)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
137 self.nodemap = lazymap(parser)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
138 else:
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
139 s = struct.calcsize(indexformat)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
140 l = len(i) / s
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
141 self.index = [None] * l
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
142 m = [None] * l
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
143
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
144 n = 0
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
145 for f in xrange(0, len(i), s):
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
146 # offset, size, base, linkrev, p1, p2, nodeid
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
147 e = struct.unpack(indexformat, i[f:f + s])
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
148 m[n] = (e[6], n)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
149 self.index[n] = e
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
150 n += 1
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
151
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
152 self.nodemap = dict(m)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
153 self.nodemap[nullid] = -1
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
154
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
155
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
156 def tip(self): return self.node(len(self.index) - 1)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
157 def count(self): return len(self.index)
26
9cf83bf9ad38 Simplify integrity checking
mpm@selenic.com
parents: 14
diff changeset
158 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
159 def rev(self, node): return self.nodemap[node]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
160 def linkrev(self, node): return self.index[self.nodemap[node]][3]
2
ecf3fd948051 Handle nullid better for ancestor
mpm@selenic.com
parents: 0
diff changeset
161 def parents(self, node):
ecf3fd948051 Handle nullid better for ancestor
mpm@selenic.com
parents: 0
diff changeset
162 if node == nullid: return (nullid, nullid)
ecf3fd948051 Handle nullid better for ancestor
mpm@selenic.com
parents: 0
diff changeset
163 return self.index[self.nodemap[node]][4:6]
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
164
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
165 def start(self, rev): return self.index[rev][0]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
166 def length(self, rev): return self.index[rev][1]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
167 def end(self, rev): return self.start(rev) + self.length(rev)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
168 def base(self, rev): return self.index[rev][2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
169
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
170 def lookup(self, id):
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
171 try:
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
172 rev = int(id)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
173 return self.node(rev)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
174 except ValueError:
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
175 c = []
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
176 for n in self.nodemap:
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
177 if id in hex(n):
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
178 c.append(n)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
179 if len(c) > 1: raise KeyError("Ambiguous identifier")
67
a182f2561c8e Add tag support
mpm@selenic.com
parents: 65
diff changeset
180 if len(c) < 1: raise KeyError("No match found")
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
181 return c[0]
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
182
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
183 return None
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
184
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
185 def diff(self, a, b):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
186 return mdiff.textdiff(a, b)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
187
73
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
188 def patches(self, t, pl):
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
189 return mdiff.patches(t, pl)
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
190
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
191 def revision(self, node):
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
192 if node == nullid: return ""
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
193 if self.cache and self.cache[0] == node: return self.cache[2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
194
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
195 text = None
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
196 rev = self.rev(node)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
197 base = self.base(rev)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
198 start = self.start(base)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
199 end = self.end(rev)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
200
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
201 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
202 base = self.cache[1]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
203 start = self.start(base + 1)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
204 text = self.cache[2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
205 last = 0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
206
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
207 f = self.opener(self.datafile)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
208 f.seek(start)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
209 data = f.read(end - start)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
210
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
211 if not text:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
212 last = self.length(base)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
213 text = decompress(data[:last])
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
214
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
215 bins = []
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
216 for r in xrange(base + 1, rev + 1):
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
217 s = self.length(r)
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
218 bins.append(decompress(data[last:last + s]))
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
219 last = last + s
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
220
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
221 text = mdiff.patches(text, bins)
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
222
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
223 (p1, p2) = self.parents(node)
26
9cf83bf9ad38 Simplify integrity checking
mpm@selenic.com
parents: 14
diff changeset
224 if node != hash(text, p1, p2):
98
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
225 raise IOError("integrity check failed on %s:%d"
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
226 % (self.datafile, rev))
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
227
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
228 self.cache = (node, rev, text)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
229 return text
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
230
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
231 def addrevision(self, text, transaction, link, p1=None, p2=None):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
232 if text is None: text = ""
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
233 if p1 is None: p1 = self.tip()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
234 if p2 is None: p2 = nullid
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
235
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
236 node = hash(text, p1, p2)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
237
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
238 n = self.count()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
239 t = n - 1
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
240
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
241 if n:
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
242 base = self.base(t)
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
243 start = self.start(base)
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
244 end = self.end(t)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
245 prev = self.revision(self.tip())
98
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
246 d = self.diff(prev, text)
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
247 if self.patches(prev, [d]) != text:
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
248 raise AssertionError("diff failed")
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
249 data = compress(d)
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
250 dist = end - start + len(data)
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
251
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
252 # full versions are inserted when the needed deltas
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
253 # become comparable to the uncompressed text
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
254 if not n or dist > len(text) * 2:
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
255 data = compress(text)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
256 base = n
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
257 else:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
258 base = self.base(t)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
259
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
260 offset = 0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
261 if t >= 0:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
262 offset = self.end(t)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
263
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
264 e = (offset, len(data), base, link, p1, p2, node)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
265
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
266 self.index.append(e)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
267 self.nodemap[node] = n
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
268 entry = struct.pack(indexformat, *e)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
269
26
9cf83bf9ad38 Simplify integrity checking
mpm@selenic.com
parents: 14
diff changeset
270 transaction.add(self.datafile, e[0])
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
271 self.opener(self.datafile, "a").write(data)
41
df3f46253878 Fix truncate logic for indices again
mpm@selenic.com
parents: 36
diff changeset
272 transaction.add(self.indexfile, n * len(entry))
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
273 self.opener(self.indexfile, "a").write(entry)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
274
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
275 self.cache = (node, n, text)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
276 return node
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
277
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
278 def ancestor(self, a, b):
45
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
279 def expand(list, map):
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
280 a = []
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
281 while list:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
282 n = list.pop(0)
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
283 map[n] = 1
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
284 yield n
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
285 for p in self.parents(n):
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
286 if p != nullid and p not in map:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
287 list.append(p)
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
288 yield nullid
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
289
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
290 amap = {}
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
291 bmap = {}
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
292 ag = expand([a], amap)
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
293 bg = expand([b], bmap)
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
294 adone = bdone = 0
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
295
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
296 while not adone or not bdone:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
297 if not adone:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
298 an = ag.next()
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
299 if an == nullid:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
300 adone = 1
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
301 elif an in bmap:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
302 return an
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
303 if not bdone:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
304 bn = bg.next()
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
305 if bn == nullid:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
306 bdone = 1
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
307 elif bn in amap:
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
308 return bn
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
309
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
310 return nullid
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
311
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
312 def group(self, linkmap):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
313 # given a list of changeset revs, return a set of deltas and
94
7daef883134f Refactor merge code
mpm@selenic.com
parents: 86
diff changeset
314 # metadata corresponding to nodes. the first delta is
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
315 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
316 # have this parent as it has all history before these
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
317 # changesets. parent is parent[0]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
318
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
319 revs = []
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
320 needed = {}
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
321
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
322 # find file nodes/revs that match changeset revs
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
323 for i in xrange(0, self.count()):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
324 if self.index[i][3] in linkmap:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
325 revs.append(i)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
326 needed[i] = 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
327
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
328 # if we don't have any revisions touched by these changesets, bail
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
329 if not revs: return struct.pack(">l", 0)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
330
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
331 # add the parent of the first rev
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
332 p = self.parents(self.node(revs[0]))[0]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
333 revs.insert(0, self.rev(p))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
334
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
335 # for each delta that isn't contiguous in the log, we need to
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
336 # reconstruct the base, reconstruct the result, and then
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
337 # calculate the delta. We also need to do this where we've
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
338 # stored a full version and not a delta
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
339 for i in xrange(0, len(revs) - 1):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
340 a, b = revs[i], revs[i + 1]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
341 if a + 1 != b or self.base(b) == b:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
342 for j in xrange(self.base(a), a + 1):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
343 needed[j] = 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
344 for j in xrange(self.base(b), b + 1):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
345 needed[j] = 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
346
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
347 # calculate spans to retrieve from datafile
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
348 needed = needed.keys()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
349 needed.sort()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
350 spans = []
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
351 for n in needed:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
352 if n < 0: continue
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
353 o = self.start(n)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
354 l = self.length(n)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
355 spans.append((o, l, [(n, l)]))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
356
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
357 # merge spans
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
358 merge = [spans.pop(0)]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
359 while spans:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
360 e = spans.pop(0)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
361 f = merge[-1]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
362 if e[0] == f[0] + f[1]:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
363 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
364 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
365 merge.append(e)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
366
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
367 # read spans in, divide up chunks
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
368 chunks = {}
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
369 for span in merge:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
370 # we reopen the file for each span to make http happy for now
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
371 f = self.opener(self.datafile)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
372 f.seek(span[0])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
373 data = f.read(span[1])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
374
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
375 # divide up the span
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
376 pos = 0
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
377 for r, l in span[2]:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
378 chunks[r] = data[pos: pos + l]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
379 pos += l
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
380
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
381 # helper to reconstruct intermediate versions
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
382 def construct(text, base, rev):
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
383 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)]
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
384 return mdiff.patches(text, bins)
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
385
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
386 # build deltas
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
387 deltas = []
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
388 for d in xrange(0, len(revs) - 1):
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
389 a, b = revs[d], revs[d + 1]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
390 n = self.node(b)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
391
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
392 if a + 1 != b or self.base(b) == b:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
393 if a >= 0:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
394 base = self.base(a)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
395 ta = decompress(chunks[self.base(a)])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
396 ta = construct(ta, base, a)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
397 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
398 ta = ""
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
399
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
400 base = self.base(b)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
401 if a > base:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
402 base = a
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
403 tb = ta
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
404 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
405 tb = decompress(chunks[self.base(b)])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
406 tb = construct(tb, base, b)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
407 d = self.diff(ta, tb)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
408 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
409 d = decompress(chunks[b])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
410
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
411 p = self.parents(n)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
412 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
413 l = struct.pack(">l", len(meta) + len(d) + 4)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
414 deltas.append(l + meta + d)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
415
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
416 l = struct.pack(">l", sum(map(len, deltas)) + 4)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
417 deltas.insert(0, l)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
418 return "".join(deltas)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
419
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
420 def addgroup(self, data, linkmapper, transaction):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
421 # given a set of deltas, add them to the revision log. the
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
422 # first delta is against its parent, which should be in our
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
423 # log, the rest are against the previous delta.
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
424
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
425 if not data: return self.tip()
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
426
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
427 # retrieve the parent revision of the delta chain
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
428 chain = data[24:44]
84
b2e3528115da More useful message on broken addgroup chain
mpm@selenic.com
parents: 83
diff changeset
429 if not chain in self.nodemap:
b2e3528115da More useful message on broken addgroup chain
mpm@selenic.com
parents: 83
diff changeset
430 raise "unknown base %s" % short(chain[:4])
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
431
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
432 # track the base of the current delta log
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
433 r = self.count()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
434 t = r - 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
435
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
436 base = prev = -1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
437 start = end = 0
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
438 if r:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
439 start = self.start(self.base(t))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
440 end = self.end(t)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
441 measure = self.length(self.base(t))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
442 base = self.base(t)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
443 prev = self.tip()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
444
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
445 transaction.add(self.datafile, end)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
446 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
447 dfh = self.opener(self.datafile, "a")
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
448 ifh = self.opener(self.indexfile, "a")
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
449
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
450 # loop through our set of deltas
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
451 pos = 0
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
452 while pos < len(data):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
453 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
454 data[pos:pos+84])
94
7daef883134f Refactor merge code
mpm@selenic.com
parents: 86
diff changeset
455 link = linkmapper(cs)
77
bed15e766511 Fix bug in lazymap code
mpm@selenic.com
parents: 76
diff changeset
456 if node in self.nodemap:
bed15e766511 Fix bug in lazymap code
mpm@selenic.com
parents: 76
diff changeset
457 raise "already have %s" % hex(node[:4])
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
458 delta = data[pos + 84:pos + l]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
459 pos += l
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
460
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
461 # full versions are inserted when the needed deltas become
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
462 # comparable to the uncompressed text or when the previous
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
463 # version is not the one we have a delta against. We use
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
464 # the size of the previous full rev as a proxy for the
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
465 # current size.
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
466
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
467 if chain == prev:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
468 cdelta = compress(delta)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
469
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
470 if chain != prev or (end - start + len(cdelta)) > measure * 2:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
471 # flush our writes here so we can read it in revision
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
472 dfh.flush()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
473 ifh.flush()
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
474 text = self.revision(chain)
73
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
475 text = self.patches(text, [delta])
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
476 chk = self.addrevision(text, transaction, link, p1, p2)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
477 if chk != node:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
478 raise "consistency error adding group"
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
479 measure = len(text)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
480 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
481 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
482 self.index.append(e)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
483 self.nodemap[node] = r
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
484 dfh.write(cdelta)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
485 ifh.write(struct.pack(indexformat, *e))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
486
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
487 t, r, chain, prev = r, r + 1, node, node
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
488 start = self.start(self.base(t))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
489 end = self.end(t)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
490
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
491 dfh.close()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
492 ifh.close()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
493 return node