annotate mercurial/revlog.py @ 147:b6d8ed7aeba0

A new ancestor algorithm The old ancestor algorithm could get fooled into returning ancestors closer to root than it ought to. Hopefully this one, which strictly orders its search by distance from room, will be foolproof.
author mpm@selenic.com
date Tue, 24 May 2005 23:11:44 -0800
parents f6d1f8a84372
children 083c38bdfa64 083c38bdfa64
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
1 # revlog.py - storage back-end for mercurial
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
2 #
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
3 # This provides efficient delta storage with O(1) retrieve and append
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
4 # and O(changes) merge between branches
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
5 #
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
6 # Copyright 2005 Matt Mackall <mpm@selenic.com>
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
7 #
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
8 # This software may be used and distributed according to the terms
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
9 # of the GNU General Public License, incorporated herein by reference.
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
10
147
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
11 import zlib, struct, sha, os, tempfile, binascii, heapq
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
12 from mercurial import mdiff
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
13
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
14 def hex(node): return binascii.hexlify(node)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
15 def bin(node): return binascii.unhexlify(node)
83
9fd5b35cfc45 Add -q quiet option
mpm@selenic.com
parents: 77
diff changeset
16 def short(node): return hex(node[:4])
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
17
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
18 def compress(text):
112
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
19 if not text: return text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
20 if len(text) < 44:
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
21 if text[0] == '\0': return text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
22 return 'u' + text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
23 bin = zlib.compress(text)
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
24 if len(bin) > len(text):
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
25 if text[0] == '\0': return text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
26 return 'u' + text
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
27 return bin
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
28
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
29 def decompress(bin):
112
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
30 if not bin: return bin
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
31 t = bin[0]
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
32 if t == '\0': return bin
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
33 if t == 'x': return zlib.decompress(bin)
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
34 if t == 'u': return bin[1:]
aea6562add6c Make compression more intelligent:
mpm@selenic.com
parents: 98
diff changeset
35 raise "unknown compression type %s" % t
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
36
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
37 def hash(text, p1, p2):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
38 l = [p1, p2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
39 l.sort()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
40 return sha.sha(l[0] + l[1] + text).digest()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
41
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
42 nullid = "\0" * 20
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
43 indexformat = ">4l20s20s20s"
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
44
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
45 class lazyparser:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
46 def __init__(self, data):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
47 self.data = data
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
48 self.s = struct.calcsize(indexformat)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
49 self.l = len(data)/self.s
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
50 self.index = [None] * self.l
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
51 self.map = {nullid: -1}
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
52
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
53 def load(self, pos):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
54 block = pos / 1000
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
55 i = block * 1000
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
56 end = min(self.l, i + 1000)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
57 while i < end:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
58 d = self.data[i * self.s: (i + 1) * self.s]
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
59 e = struct.unpack(indexformat, d)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
60 self.index[i] = e
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
61 self.map[e[6]] = i
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
62 i += 1
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
63
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
64 class lazyindex:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
65 def __init__(self, parser):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
66 self.p = parser
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
67 def __len__(self):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
68 return len(self.p.index)
115
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
69 def load(self, pos):
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
70 self.p.load(pos)
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
71 return self.p.index[pos]
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
72 def __getitem__(self, pos):
115
39b438eeb25a Make lazyindex load slightly faster
mpm@selenic.com
parents: 112
diff changeset
73 return self.p.index[pos] or self.load(pos)
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
74 def append(self, e):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
75 self.p.index.append(e)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
77 class lazymap:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
78 def __init__(self, parser):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
79 self.p = parser
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
80 def load(self, key):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
81 n = self.p.data.find(key)
86
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
82 if n < 0: raise KeyError("node " + hex(key))
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
83 pos = n / self.p.s
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
84 self.p.load(pos)
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
85 def __contains__(self, key):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
86 try:
77
bed15e766511 Fix bug in lazymap code
mpm@selenic.com
parents: 76
diff changeset
87 self[key]
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
88 return True
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
89 except KeyError:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
90 return False
97
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
91 def __iter__(self):
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
92 for i in xrange(self.p.l):
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
93 try:
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
94 yield self.p.index[i][6]
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
95 except:
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
96 self.p.load(i)
7a2abee6b0c2 Add iterator to the lazymap code
mpm@selenic.com
parents: 94
diff changeset
97 yield self.p.index[i][6]
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
98 def __getitem__(self, key):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
99 try:
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
100 return self.p.map[key]
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
101 except KeyError:
86
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
102 try:
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
103 self.load(key)
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
104 return self.p.map[key]
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
105 except KeyError:
1b945e8ba67b Friendlier exceptions for unknown node errors
mpm@selenic.com
parents: 84
diff changeset
106 raise KeyError("node " + hex(key))
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
107 def __setitem__(self, key, val):
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
108 self.p.map[key] = val
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
109
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
110 class revlog:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
111 def __init__(self, opener, indexfile, datafile):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
112 self.indexfile = indexfile
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
113 self.datafile = datafile
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
114 self.opener = opener
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
115 self.cache = None
116
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
116
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
117 try:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
118 i = self.opener(self.indexfile).read()
73
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
119 except IOError:
76
d993ebd69d28 Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents: 73
diff changeset
120 i = ""
116
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
121
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
122 if len(i) > 10000:
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
123 # big index, let's parse it on demand
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
124 parser = lazyparser(i)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
125 self.index = lazyindex(parser)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
126 self.nodemap = lazymap(parser)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
127 else:
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
128 s = struct.calcsize(indexformat)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
129 l = len(i) / s
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
130 self.index = [None] * l
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
131 m = [None] * l
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
132
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
133 n = 0
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
134 for f in xrange(0, len(i), s):
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
135 # offset, size, base, linkrev, p1, p2, nodeid
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
136 e = struct.unpack(indexformat, i[f:f + s])
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
137 m[n] = (e[6], n)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
138 self.index[n] = e
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
139 n += 1
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
140
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
141 self.nodemap = dict(m)
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
142 self.nodemap[nullid] = -1
e484cd5ec282 Only use lazy indexing for big indices and avoid the overhead of the
mpm@selenic.com
parents: 115
diff changeset
143
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
144
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
145 def tip(self): return self.node(len(self.index) - 1)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
146 def count(self): return len(self.index)
26
9cf83bf9ad38 Simplify integrity checking
mpm@selenic.com
parents: 14
diff changeset
147 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
148 def rev(self, node): return self.nodemap[node]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
149 def linkrev(self, node): return self.index[self.nodemap[node]][3]
2
ecf3fd948051 Handle nullid better for ancestor
mpm@selenic.com
parents: 0
diff changeset
150 def parents(self, node):
ecf3fd948051 Handle nullid better for ancestor
mpm@selenic.com
parents: 0
diff changeset
151 if node == nullid: return (nullid, nullid)
ecf3fd948051 Handle nullid better for ancestor
mpm@selenic.com
parents: 0
diff changeset
152 return self.index[self.nodemap[node]][4:6]
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
153
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
154 def start(self, rev): return self.index[rev][0]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
155 def length(self, rev): return self.index[rev][1]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
156 def end(self, rev): return self.start(rev) + self.length(rev)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
157 def base(self, rev): return self.index[rev][2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
158
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
159 def lookup(self, id):
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
160 try:
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
161 rev = int(id)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
162 return self.node(rev)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
163 except ValueError:
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
164 c = []
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
165 for n in self.nodemap:
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
166 if id in hex(n):
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
167 c.append(n)
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
168 if len(c) > 1: raise KeyError("Ambiguous identifier")
67
a182f2561c8e Add tag support
mpm@selenic.com
parents: 65
diff changeset
169 if len(c) < 1: raise KeyError("No match found")
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
170 return c[0]
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
171
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
172 return None
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
173
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
174 def diff(self, a, b):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
175 return mdiff.textdiff(a, b)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
176
73
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
177 def patches(self, t, pl):
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
178 return mdiff.patches(t, pl)
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
179
119
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
180 def delta(self, node):
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
181 r = self.rev(node)
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
182 b = self.base(r)
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
183 if r == b:
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
184 return self.diff(self.revision(self.node(r - 1)),
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
185 self.revision(node))
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
186 else:
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
187 f = self.opener(self.datafile)
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
188 f.seek(self.start(r))
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
189 data = f.read(self.length(r))
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
190 return decompress(data)
c7a66f9752a4 Add code to retrieve or construct a revlog delta
mpm@selenic.com
parents: 117
diff changeset
191
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
192 def revision(self, node):
36
da28286bf6b7 Add smart node lookup by substring or by rev number
mpm@selenic.com
parents: 26
diff changeset
193 if node == nullid: return ""
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
194 if self.cache and self.cache[0] == node: return self.cache[2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
195
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
196 text = None
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
197 rev = self.rev(node)
117
2ac722ad1a9d Make revision code slightly faster
mpm@selenic.com
parents: 116
diff changeset
198 start, length, base, link, p1, p2, node = self.index[rev]
2ac722ad1a9d Make revision code slightly faster
mpm@selenic.com
parents: 116
diff changeset
199 end = start + length
2ac722ad1a9d Make revision code slightly faster
mpm@selenic.com
parents: 116
diff changeset
200 if base != rev: start = self.start(base)
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
201
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
202 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
203 base = self.cache[1]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
204 start = self.start(base + 1)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
205 text = self.cache[2]
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
206 last = 0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
207
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
208 f = self.opener(self.datafile)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
209 f.seek(start)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
210 data = f.read(end - start)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
211
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
212 if not text:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
213 last = self.length(base)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
214 text = decompress(data[:last])
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
215
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
216 bins = []
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
217 for r in xrange(base + 1, rev + 1):
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
218 s = self.length(r)
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
219 bins.append(decompress(data[last:last + s]))
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
220 last = last + s
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
221
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
222 text = mdiff.patches(text, bins)
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
223
26
9cf83bf9ad38 Simplify integrity checking
mpm@selenic.com
parents: 14
diff changeset
224 if node != hash(text, p1, p2):
98
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
225 raise IOError("integrity check failed on %s:%d"
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
226 % (self.datafile, rev))
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
227
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
228 self.cache = (node, rev, text)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
229 return text
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
230
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
231 def addrevision(self, text, transaction, link, p1=None, p2=None):
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
232 if text is None: text = ""
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
233 if p1 is None: p1 = self.tip()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
234 if p2 is None: p2 = nullid
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
235
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
236 node = hash(text, p1, p2)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
237
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
238 n = self.count()
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
239 t = n - 1
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
240
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
241 if n:
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
242 base = self.base(t)
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
243 start = self.start(base)
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
244 end = self.end(t)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
245 prev = self.revision(self.tip())
98
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
246 d = self.diff(prev, text)
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
247 if self.patches(prev, [d]) != text:
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
248 raise AssertionError("diff failed")
3dde7c87e36d Add paranoia to diff code
mpm@selenic.com
parents: 97
diff changeset
249 data = compress(d)
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
250 dist = end - start + len(data)
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
251
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
252 # full versions are inserted when the needed deltas
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
253 # become comparable to the uncompressed text
64
b3e2ddff0159 Diff in subdirectories from Jake Edge
mpm@selenic.com
parents: 46
diff changeset
254 if not n or dist > len(text) * 2:
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
255 data = compress(text)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
256 base = n
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
257 else:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
258 base = self.base(t)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
259
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
260 offset = 0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
261 if t >= 0:
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
262 offset = self.end(t)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
263
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
264 e = (offset, len(data), base, link, p1, p2, node)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
265
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
266 self.index.append(e)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
267 self.nodemap[node] = n
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
268 entry = struct.pack(indexformat, *e)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
269
26
9cf83bf9ad38 Simplify integrity checking
mpm@selenic.com
parents: 14
diff changeset
270 transaction.add(self.datafile, e[0])
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
271 self.opener(self.datafile, "a").write(data)
41
df3f46253878 Fix truncate logic for indices again
mpm@selenic.com
parents: 36
diff changeset
272 transaction.add(self.indexfile, n * len(entry))
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
273 self.opener(self.indexfile, "a").write(entry)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
274
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
275 self.cache = (node, n, text)
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
276 return node
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
277
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
278 def ancestor(self, a, b):
147
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
279 # calculate the distance of every node from root
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
280 dist = {nullid: 0}
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
281 for i in xrange(self.count()):
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
282 n = self.node(i)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
283 p1, p2 = self.parents(n)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
284 dist[n] = max(dist[p1], dist[p2]) + 1
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
285
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
286 # traverse ancestors in order of decreasing distance from root
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
287 def ancestors(node):
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
288 # we store negative distances because heap returns smallest member
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
289 h = [(-dist[node], node)]
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
290 seen = {}
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
291 earliest = self.count()
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
292 while h:
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
293 d, n = heapq.heappop(h)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
294 r = self.rev(n)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
295 if n not in seen:
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
296 seen[n] = 1
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
297 yield (-d, n)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
298 for p in self.parents(n):
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
299 heapq.heappush(h, (-dist[p], p))
45
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
300
147
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
301 x = ancestors(a)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
302 y = ancestors(b)
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
303 lx = x.next()
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
304 ly = y.next()
45
f2b2d5daec30 Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents: 41
diff changeset
305
147
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
306 # increment each ancestor list until it is closer to root than
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
307 # the other, or they match
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
308 while 1:
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
309 if lx == ly:
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
310 return lx[1]
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
311 elif lx < ly:
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
312 ly = y.next()
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
313 elif lx > ly:
b6d8ed7aeba0 A new ancestor algorithm
mpm@selenic.com
parents: 126
diff changeset
314 lx = x.next()
0
9117c6561b0b Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff changeset
315
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
316 def group(self, linkmap):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
317 # given a list of changeset revs, return a set of deltas and
94
7daef883134f Refactor merge code
mpm@selenic.com
parents: 86
diff changeset
318 # metadata corresponding to nodes. the first delta is
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
319 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
320 # have this parent as it has all history before these
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
321 # changesets. parent is parent[0]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
322
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
323 revs = []
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
324 needed = {}
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
325
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
326 # find file nodes/revs that match changeset revs
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
327 for i in xrange(0, self.count()):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
328 if self.index[i][3] in linkmap:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
329 revs.append(i)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
330 needed[i] = 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
331
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
332 # if we don't have any revisions touched by these changesets, bail
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
333 if not revs: return struct.pack(">l", 0)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
334
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
335 # add the parent of the first rev
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
336 p = self.parents(self.node(revs[0]))[0]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
337 revs.insert(0, self.rev(p))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
338
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
339 # for each delta that isn't contiguous in the log, we need to
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
340 # reconstruct the base, reconstruct the result, and then
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
341 # calculate the delta. We also need to do this where we've
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
342 # stored a full version and not a delta
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
343 for i in xrange(0, len(revs) - 1):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
344 a, b = revs[i], revs[i + 1]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
345 if a + 1 != b or self.base(b) == b:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
346 for j in xrange(self.base(a), a + 1):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
347 needed[j] = 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
348 for j in xrange(self.base(b), b + 1):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
349 needed[j] = 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
350
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
351 # calculate spans to retrieve from datafile
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
352 needed = needed.keys()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
353 needed.sort()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
354 spans = []
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
355 for n in needed:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
356 if n < 0: continue
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
357 o = self.start(n)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
358 l = self.length(n)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
359 spans.append((o, l, [(n, l)]))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
360
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
361 # merge spans
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
362 merge = [spans.pop(0)]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
363 while spans:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
364 e = spans.pop(0)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
365 f = merge[-1]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
366 if e[0] == f[0] + f[1]:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
367 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
368 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
369 merge.append(e)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
370
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
371 # read spans in, divide up chunks
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
372 chunks = {}
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
373 for span in merge:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
374 # we reopen the file for each span to make http happy for now
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
375 f = self.opener(self.datafile)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
376 f.seek(span[0])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
377 data = f.read(span[1])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
378
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
379 # divide up the span
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
380 pos = 0
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
381 for r, l in span[2]:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
382 chunks[r] = data[pos: pos + l]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
383 pos += l
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
384
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
385 # helper to reconstruct intermediate versions
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
386 def construct(text, base, rev):
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
387 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)]
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
388 return mdiff.patches(text, bins)
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
389
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
390 # build deltas
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
391 deltas = []
71
47c9a869adee Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents: 67
diff changeset
392 for d in xrange(0, len(revs) - 1):
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
393 a, b = revs[d], revs[d + 1]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
394 n = self.node(b)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
395
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
396 if a + 1 != b or self.base(b) == b:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
397 if a >= 0:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
398 base = self.base(a)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
399 ta = decompress(chunks[self.base(a)])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
400 ta = construct(ta, base, a)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
401 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
402 ta = ""
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
403
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
404 base = self.base(b)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
405 if a > base:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
406 base = a
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
407 tb = ta
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
408 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
409 tb = decompress(chunks[self.base(b)])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
410 tb = construct(tb, base, b)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
411 d = self.diff(ta, tb)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
412 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
413 d = decompress(chunks[b])
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
414
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
415 p = self.parents(n)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
416 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
417 l = struct.pack(">l", len(meta) + len(d) + 4)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
418 deltas.append(l + meta + d)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
419
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
420 l = struct.pack(">l", sum(map(len, deltas)) + 4)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
421 deltas.insert(0, l)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
422 return "".join(deltas)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
423
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
424 def addgroup(self, data, linkmapper, transaction):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
425 # given a set of deltas, add them to the revision log. the
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
426 # first delta is against its parent, which should be in our
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
427 # log, the rest are against the previous delta.
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
428
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
429 if not data: return self.tip()
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
430
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
431 # retrieve the parent revision of the delta chain
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
432 chain = data[24:44]
84
b2e3528115da More useful message on broken addgroup chain
mpm@selenic.com
parents: 83
diff changeset
433 if not chain in self.nodemap:
b2e3528115da More useful message on broken addgroup chain
mpm@selenic.com
parents: 83
diff changeset
434 raise "unknown base %s" % short(chain[:4])
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
435
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
436 # track the base of the current delta log
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
437 r = self.count()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
438 t = r - 1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
439
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
440 base = prev = -1
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
441 start = end = 0
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
442 if r:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
443 start = self.start(self.base(t))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
444 end = self.end(t)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
445 measure = self.length(self.base(t))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
446 base = self.base(t)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
447 prev = self.tip()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
448
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
449 transaction.add(self.datafile, end)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
450 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
451 dfh = self.opener(self.datafile, "a")
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
452 ifh = self.opener(self.indexfile, "a")
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
453
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
454 # loop through our set of deltas
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
455 pos = 0
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
456 while pos < len(data):
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
457 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s",
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
458 data[pos:pos+84])
94
7daef883134f Refactor merge code
mpm@selenic.com
parents: 86
diff changeset
459 link = linkmapper(cs)
77
bed15e766511 Fix bug in lazymap code
mpm@selenic.com
parents: 76
diff changeset
460 if node in self.nodemap:
bed15e766511 Fix bug in lazymap code
mpm@selenic.com
parents: 76
diff changeset
461 raise "already have %s" % hex(node[:4])
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
462 delta = data[pos + 84:pos + l]
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
463 pos += l
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
464
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
465 # full versions are inserted when the needed deltas become
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
466 # comparable to the uncompressed text or when the previous
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
467 # version is not the one we have a delta against. We use
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
468 # the size of the previous full rev as a proxy for the
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
469 # current size.
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
470
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
471 if chain == prev:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
472 cdelta = compress(delta)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
473
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
474 if chain != prev or (end - start + len(cdelta)) > measure * 2:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
475 # flush our writes here so we can read it in revision
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
476 dfh.flush()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
477 ifh.flush()
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
478 text = self.revision(chain)
73
ee1cbe841e01 Change revlog to use new patch code
mpm@selenic.com
parents: 71
diff changeset
479 text = self.patches(text, [delta])
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
480 chk = self.addrevision(text, transaction, link, p1, p2)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
481 if chk != node:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
482 raise "consistency error adding group"
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
483 measure = len(text)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
484 else:
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
485 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
486 self.index.append(e)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
487 self.nodemap[node] = r
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
488 dfh.write(cdelta)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
489 ifh.write(struct.pack(indexformat, *e))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
490
65
d40cc5aacc31 Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents: 64
diff changeset
491 t, r, chain, prev = r, r + 1, node, node
46
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
492 start = self.start(self.base(t))
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
493 end = self.end(t)
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
494
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
495 dfh.close()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
496 ifh.close()
93e868fa0db8 Add changegroup support
mpm@selenic.com
parents: 45
diff changeset
497 return node