view mercurial/ @ 536:c15b4bc0a11c

Refactor diffrevs/diffdir into changes -----BEGIN PGP SIGNED MESSAGE----- Hash: SHA1 Refactor diffrevs/diffdir into changes Add dirstate.changes to replace most of diffdir Add localrepository.changes to replace diffrevs/diffdir This code can now efficiently check for changes in single files, and often without consulting the manifest. This should eventually make 'hg diff Makefile' in a large project much faster. This also fixes a bug where 'hg diff -r tip' failed to account for files that had been added but not committed yet. manifest hash: 20fde5d4b4cee49a76bcfe50f2dacf58b1f2258b -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.0 (GNU/Linux) iD8DBQFCxMxpywK+sNU5EO8RAhzOAJ9VLQJoC+hiRYQtTSPbDhXBEJfQZwCgpDx9 GAwQ9jZHNsgXckBfXNCkJV8= =hMuc -----END PGP SIGNATURE-----
date Thu, 30 Jun 2005 20:54:01 -0800
parents aace5b681fe9
children 411e05b04ffa
line wrap: on
line source

# - repository classes for mercurial
# Copyright 2005 Matt Mackall <>
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.

import sys, struct, os
import util
from revlog import *
from demandload import *
demandload(globals(), "re lock urllib urllib2 transaction time socket")
demandload(globals(), "tempfile httprangereader bdiff")

class filelog(revlog):
    def __init__(self, opener, path):
        revlog.__init__(self, opener,
                        os.path.join("data", path + ".i"),
                        os.path.join("data", path + ".d"))

    def read(self, node):
        t = self.revision(node)
        if t[:2] != '\1\n':
            return t
        s = t.find('\1\n', 2)
        return t[s+2:]

    def readmeta(self, node):
        t = self.revision(node)
        if t[:2] != '\1\n':
            return t
        s = t.find('\1\n', 2)
        mt = t[2:s]
        for l in mt.splitlines():
            k, v = l.split(": ", 1)
            m[k] = v
        return m

    def add(self, text, meta, transaction, link, p1=None, p2=None):
        if meta or text[:2] == '\1\n':
            mt = ""
            if meta:
                mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
            text = "\1\n" + "".join(mt) + "\1\n" + text
        return self.addrevision(text, transaction, link, p1, p2)

    def annotate(self, node):

        def decorate(text, rev):
            return ([rev] * len(text.splitlines()), text)

        def pair(parent, child):
            for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
                child[0][b1:b2] = parent[0][a1:a2]
            return child

        # find all ancestors
        needed = {node:1}
        visit = [node]
        while visit:
            n = visit.pop(0)
            for p in self.parents(n):
                if p not in needed:
                    needed[p] = 1
                    # count how many times we'll use this
                    needed[p] += 1

        # sort by revision which is a topological order
        visit = [ (self.rev(n), n) for n in needed.keys() ]
        hist = {}

        for r,n in visit:
            curr = decorate(, self.linkrev(n))
            for p in self.parents(n):
                if p != nullid:
                    curr = pair(hist[p], curr)
                    # trim the history of unneeded revs
                    needed[p] -= 1
                    if not needed[p]:
                        del hist[p]
            hist[n] = curr

        return zip(hist[n][0], hist[n][1].splitlines(1))

class manifest(revlog):
    def __init__(self, opener):
        self.mapcache = None
        self.listcache = None
        self.addlist = None
        revlog.__init__(self, opener, "00manifest.i", "00manifest.d")

    def read(self, node):
        if node == nullid: return {} # don't upset local cache
        if self.mapcache and self.mapcache[0] == node:
            return self.mapcache[1].copy()
        text = self.revision(node)
        map = {}
        flag = {}
        self.listcache = (text, text.splitlines(1))
        for l in self.listcache[1]:
            (f, n) = l.split('\0')
            map[f] = bin(n[:40])
            flag[f] = (n[40:-1] == "x")
        self.mapcache = (node, map, flag)
        return map

    def readflags(self, node):
        if node == nullid: return {} # don't upset local cache
        if not self.mapcache or self.mapcache[0] != node:
        return self.mapcache[2]

    def diff(self, a, b):
        # this is sneaky, as we're not actually using a and b
        if self.listcache and self.addlist and self.listcache[0] == a:
            d = mdiff.diff(self.listcache[1], self.addlist, 1)
            if mdiff.patch(a, d) != b:
                sys.stderr.write("*** sortdiff failed, falling back ***\n")
                return mdiff.textdiff(a, b)
            return d
            return mdiff.textdiff(a, b)

    def add(self, map, flags, transaction, link, p1=None, p2=None):
        files = map.keys()

        self.addlist = ["%s\000%s%s\n" %
                        (f, hex(map[f]), flags[f] and "x" or '')
                        for f in files]
        text = "".join(self.addlist)

        n = self.addrevision(text, transaction, link, p1, p2)
        self.mapcache = (n, map, flags)
        self.listcache = (text, self.addlist)
        self.addlist = None

        return n

class changelog(revlog):
    def __init__(self, opener):
        revlog.__init__(self, opener, "00changelog.i", "00changelog.d")

    def extract(self, text):
        if not text:
            return (nullid, "", "0", [], "")
        last = text.index("\n\n")
        desc = text[last + 2:]
        l = text[:last].splitlines()
        manifest = bin(l[0])
        user = l[1]
        date = l[2]
        files = l[3:]
        return (manifest, user, date, files, desc)

    def read(self, node):
        return self.extract(self.revision(node))

    def add(self, manifest, list, desc, transaction, p1=None, p2=None,
                  user=None, date=None):
        user = (user or
                os.environ.get("HGUSER") or
                os.environ.get("EMAIL") or
                               os.environ.get("USERNAME", "unknown"))
                 + '@' + socket.getfqdn()))
        date = date or "%d %d" % (time.time(), time.timezone)
        l = [hex(manifest), user, date] + list + ["", desc]
        text = "\n".join(l)
        return self.addrevision(text, transaction, self.count(), p1, p2)

class dirstate:
    def __init__(self, opener, ui, root):
        self.opener = opener
        self.root = root
        self.dirty = 0
        self.ui = ui = None = None
        self.copies = {}

    def __del__(self):
        if self.dirty:

    def __getitem__(self, key):
        except TypeError:
            return self[key]

    def __contains__(self, key):
        if not
        return key in

    def parents(self):
        if not

    def setparents(self, p1, p2 = nullid):
        self.dirty = 1 = p1, p2

    def state(self, key):
            return self[key][0]
        except KeyError:
            return "?"

    def read(self):
        if is not None: return = {} = [nullid, nullid]
            st = self.opener("dirstate").read()
            if not st: return
        except: return = [st[:20], st[20: 40]]

        pos = 40
        while pos < len(st):
            e = struct.unpack(">cllll", st[pos:pos+17])
            l = e[4]
            pos += 17
            f = st[pos:pos + l]
            if '\0' in f:
                f, c = f.split('\0')
                self.copies[f] = c
  [f] = e[:4]
            pos += l

    def copy(self, source, dest):
        self.dirty = 1
        self.copies[dest] = source

    def copied(self, file):
        return self.copies.get(file, None)

    def update(self, files, state):
        ''' current states:
        n  normal
        m  needs merging
        r  marked for removal
        a  marked for addition'''

        if not files: return
        self.dirty = 1
        for f in files:
            if state == "r":
      [f] = ('r', 0, 0, 0)
                s = os.stat(os.path.join(self.root, f))
      [f] = (state, s.st_mode, s.st_size, s.st_mtime)

    def forget(self, files):
        if not files: return
        self.dirty = 1
        for f in files:
            except KeyError:
                self.ui.warn("not in dirstate: %s!\n" % f)

    def clear(self): = {}
        self.dirty = 1

    def write(self):
        st = self.opener("dirstate", "w")
        for f, e in
            c = self.copied(f)
            if c:
                f = f + "\0" + c
            e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
            st.write(e + f)
        self.dirty = 0

    def changes(self, files, ignore):
        dc =
        lookup, changed, added, unknown = [], [], [], []

        # compare all files by default
        if not files: files = [self.root]

        def uniq(g):
            seen = {}
            for f in g:
                if f not in seen:
                    seen[f] = 1
                    yield f

        # recursive generator of all files listed
        def walk(files):
            for f in uniq(files):
                if os.path.isdir(f):
                    for dir, subdirs, fl in os.walk(f):
                        d = dir[len(self.root) + 1:]
                        if ".hg" in subdirs: subdirs.remove(".hg")
                        for fn in fl:
                            fn = util.pconvert(os.path.join(d, fn))
                            yield fn
                    yield f[len(self.root) + 1:]

        for fn in uniq(walk(files)):
            try: s = os.stat(os.path.join(self.root, fn))
            except: continue

            if fn in dc:
                c = dc[fn]
                del dc[fn]

                if c[0] == 'm':
                elif c[0] == 'a':
                elif c[0] == 'r':
                elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
                elif c[1] != s.st_mode or c[3] != s.st_mtime:
                if not ignore(fn): unknown.append(fn)

        return (lookup, changed, added, dc.keys(), unknown)

# used to avoid circular references so destructors work
def opener(base):
    p = base
    def o(path, mode="r"):
        if p[:7] == "http://":
            f = os.path.join(p, urllib.quote(path))
            return httprangereader.httprangereader(f)

        f = os.path.join(p, path)

        mode += "b" # for that other OS

        if mode[0] != "r":
                s = os.stat(f)
            except OSError:
                d = os.path.dirname(f)
                if not os.path.isdir(d):
                if s.st_nlink > 1:
                    file(f + ".tmp", "wb").write(file(f, "rb").read())
                    util.rename(f+".tmp", f)

        return file(f, mode)

    return o

class RepoError(Exception): pass

class localrepository:
    def __init__(self, ui, path=None, create=0):
        self.remote = 0
        if path and path[:7] == "http://":
            self.remote = 1
            self.path = path
            if not path:
                p = os.getcwd()
                while not os.path.isdir(os.path.join(p, ".hg")):
                    oldp = p
                    p = os.path.dirname(p)
                    if p == oldp: raise RepoError("no repo found")
                path = p
            self.path = os.path.join(path, ".hg")

            if not create and not os.path.isdir(self.path):
                raise RepoError("repository %s not found" % self.path)

        self.root = path
        self.ui = ui

        if create:

        self.opener = opener(self.path)
        self.wopener = opener(self.root)
        self.manifest = manifest(self.opener)
        self.changelog = changelog(self.opener)
        self.ignorelist = None
        self.tagscache = None
        self.nodetagscache = None

        if not self.remote:
            self.dirstate = dirstate(self.opener, ui, self.root)
            except IOError: pass

    def ignore(self, f):
        if self.ignorelist is None:
            self.ignorelist = []
                l = file(self.wjoin(".hgignore"))
                for pat in l:
                    if pat != "\n":
            except IOError: pass
        for pat in self.ignorelist:
            if return True
        return False

    def hook(self, name, **args):
        s = self.ui.config("hooks", name)
        if s:
            self.ui.note("running hook %s: %s\n" % (name, s))
            old = {}
            for k, v in args.items():
                k = k.upper()
                old[k] = os.environ.get(k, None)
                os.environ[k] = v

            r = os.system(s)

            for k, v in old.items():
                if v != None:
                    os.environ[k] = v
                    del os.environ[k]

            if r:
                self.ui.warn("abort: %s hook failed with status %d!\n" %
                             (name, r))
                return False
        return True

    def tags(self):
        '''return a mapping of tag to node'''
        if not self.tagscache:
            self.tagscache = {}
                # read each head of the tags file, ending with the tip
                # and add each tag found to the map, with "newer" ones
                # taking precedence
                fl = self.file(".hgtags")
                h = fl.heads()
                for r in h:
                    for l in fl.revision(r).splitlines():
                        if l:
                            n, k = l.split(" ", 1)
                                bin_n = bin(n)
                            except TypeError:
                                bin_n = ''
                            self.tagscache[k.strip()] = bin_n
            except KeyError:
            for k, n in self.ui.configitems("tags"):
                    bin_n = bin(n)
                except TypeError:
                    bin_n = ''
                self.tagscache[k] = bin_n

            self.tagscache['tip'] = self.changelog.tip()

        return self.tagscache

    def tagslist(self):
        '''return a list of tags ordered by revision'''
        l = []
        for t, n in self.tags().items():
                r = self.changelog.rev(n)
                r = -2 # sort to the beginning of the list if unknown
        return [(t,n) for r,t,n in l]

    def nodetags(self, node):
        '''return the tags associated with a node'''
        if not self.nodetagscache:
            self.nodetagscache = {}
            for t,n in self.tags().items():
        return self.nodetagscache.get(node, [])

    def lookup(self, key):
            return self.tags()[key]
        except KeyError:
            return self.changelog.lookup(key)

    def join(self, f):
        return os.path.join(self.path, f)

    def wjoin(self, f):
        return os.path.join(self.root, f)

    def file(self, f):
        if f[0] == '/': f = f[1:]
        return filelog(self.opener, f)

    def wfile(self, f, mode='r'):
        return self.wopener(f, mode)

    def transaction(self):
        # save dirstate for undo
            ds = self.opener("dirstate").read()
        except IOError:
            ds = ""
        self.opener("undo.dirstate", "w").write(ds)

        return transaction.transaction(self.opener, self.join("journal"),

    def recover(self):
        lock = self.lock()
        if os.path.exists(self.join("recover")):
            self.ui.status("rolling back interrupted transaction\n")
            return transaction.rollback(self.opener, self.join("recover"))
            self.ui.warn("no interrupted transaction available\n")

    def undo(self):
        lock = self.lock()
        if os.path.exists(self.join("undo")):
            self.ui.status("rolling back last transaction\n")
            transaction.rollback(self.opener, self.join("undo"))
            self.dirstate = None
            util.rename(self.join("undo.dirstate"), self.join("dirstate"))
            self.dirstate = dirstate(self.opener, self.ui, self.root)
            self.ui.warn("no undo information available\n")

    def lock(self, wait = 1):
            return lock.lock(self.join("lock"), 0)
        except lock.LockHeld, inst:
            if wait:
                self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
                return lock.lock(self.join("lock"), wait)
            raise inst

    def rawcommit(self, files, text, user, date, p1=None, p2=None):
        orig_parent = self.dirstate.parents()[0] or nullid
        p1 = p1 or self.dirstate.parents()[0] or nullid
        p2 = p2 or self.dirstate.parents()[1] or nullid
        c1 =
        c2 =
        m1 =[0])
        mf1 = self.manifest.readflags(c1[0])
        m2 =[0])

        if orig_parent == p1:
            update_dirstate = 1
            update_dirstate = 0

        tr = self.transaction()
        mm = m1.copy()
        mfm = mf1.copy()
        linkrev = self.changelog.count()
        for f in files:
                t = self.wfile(f).read()
                tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
                r = self.file(f)
                mfm[f] = tm
                mm[f] = r.add(t, {}, tr, linkrev,
                              m1.get(f, nullid), m2.get(f, nullid))
                if update_dirstate:
                    self.dirstate.update([f], "n")
            except IOError:
                    del mm[f]
                    del mfm[f]
                    if update_dirstate:
                    # deleted from p2?

        mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
        n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
        if update_dirstate:
            self.dirstate.setparents(n, nullid)

    def commit(self, files = None, text = "", user = None, date = None):
        commit = []
        remove = []
        if files:
            for f in files:
                s = self.dirstate.state(f)
                if s in 'nmai':
                elif s == 'r':
                    self.ui.warn("%s not tracked!\n" % f)
            (c, a, d, u) = self.changes(None, None)
            commit = c + a
            remove = d

        if not commit and not remove:
            self.ui.status("nothing changed\n")

        if not self.hook("precommit"):
            return 1

        p1, p2 = self.dirstate.parents()
        c1 =
        c2 =
        m1 =[0])
        mf1 = self.manifest.readflags(c1[0])
        m2 =[0])
        lock = self.lock()
        tr = self.transaction()

        # check in files
        new = {}
        linkrev = self.changelog.count()
        for f in commit:
            self.ui.note(f + "\n")
                mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
                t = self.wfile(f).read()
            except IOError:
                self.warn("trouble committing %s!\n" % f)

            meta = {}
            cp = self.dirstate.copied(f)
            if cp:
                meta["copy"] = cp
                meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
                self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"])) 

            r = self.file(f)
            fp1 = m1.get(f, nullid)
            fp2 = m2.get(f, nullid)
            new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)

        # update manifest
        for f in remove:
            if f in m1:
                del m1[f]
        mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0])

        # add changeset
        new = new.keys()

        if not text:
            edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
            edittext += "".join(["HG: changed %s\n" % f for f in new])
            edittext += "".join(["HG: removed %s\n" % f for f in remove])
            edittext = self.ui.edit(edittext)
            if not edittext.rstrip():
                return 1
            text = edittext

        n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)

        if not self.hook("commit", node=hex(n)):
            return 1


        self.dirstate.update(new, "n")

    def changes(self, node1, node2, *files):
        # changed, added, deleted, unknown
        c, a, d, u, mf1 = [], [], [], [], None

        def fcmp(fn, mf):
            t1 = self.wfile(fn).read()
            t2 = self.file(fn).revision(mf[fn])
            return cmp(t1, t2)

        # are we comparing the working directory?
        if not node1:
            l, c, a, d, u = self.dirstate.changes(files, self.ignore)

            # are we comparing working dir against its parent?
            if not node2:
                if l:
                    # do a full compare of any files that might have changed
                    change =[0])
                    mf1 =[0])
                    for f in lookup:
                        if fcmp(f, mf):
                return (c, a, d, u)

        # are we comparing working dir against non-tip?
        # generate a pseudo-manifest for the working dir
        if not node1:
            if not mf1:
                change =[0])
                mf1 =[0])
            for f in a + c + l:
                mf1[f] = ""
            for f in d:
                if f in mf1: del mf1[f]
            change =
            mf1 =[0])

        change =
        mf2 =[0])

        for fn in mf2:
            if mf1.has_key(fn):
                if mf1[fn] != mf2[fn]:
                    if mf1[fn] != "" or fcmp(fn, mf2):
                del mf1[fn]

        d = mf1.keys()

        return (c, a, d, u)

    def add(self, list):
        for f in list:
            p = self.wjoin(f)
            if not os.path.isfile(p):
                self.ui.warn("%s does not exist!\n" % f)
            elif self.dirstate.state(f) == 'n':
                self.ui.warn("%s already tracked!\n" % f)
                self.dirstate.update([f], "a")

    def forget(self, list):
        for f in list:
            if self.dirstate.state(f) not in 'ai':
                self.ui.warn("%s not added!\n" % f)

    def remove(self, list):
        for f in list:
            p = self.wjoin(f)
            if os.path.isfile(p):
                self.ui.warn("%s still exists!\n" % f)
            elif self.dirstate.state(f) == 'a':
                self.ui.warn("%s never committed!\n" % f)
            elif f not in self.dirstate:
                self.ui.warn("%s not tracked!\n" % f)
                self.dirstate.update([f], "r")

    def copy(self, source, dest):
        p = self.wjoin(dest)
        if not os.path.isfile(dest):
            self.ui.warn("%s does not exist!\n" % dest)
            if self.dirstate.state(dest) == '?':
                self.dirstate.update([dest], "a")
            self.dirstate.copy(source, dest)

    def heads(self):
        return self.changelog.heads()

    def branches(self, nodes):
        if not nodes: nodes = [self.changelog.tip()]
        b = []
        for n in nodes:
            t = n
            while n:
                p = self.changelog.parents(n)
                if p[1] != nullid or p[0] == nullid:
                    b.append((t, n, p[0], p[1]))
                n = p[0]
        return b

    def between(self, pairs):
        r = []

        for top, bottom in pairs:
            n, l, i = top, [], 0
            f = 1

            while n != bottom:
                p = self.changelog.parents(n)[0]
                if i == f:
                    f = f * 2
                n = p
                i += 1


        return r

    def newer(self, nodes):
        m = {}
        nl = []
        pm = {}
        cl = self.changelog
        t = l = cl.count()

        # find the lowest numbered node
        for n in nodes:
            l = min(l, cl.rev(n))
            m[n] = 1

        for i in xrange(l, t):
            n = cl.node(i)
            if n in m: # explicitly listed
                pm[n] = 1
            for p in cl.parents(n):
                if p in pm: # parent listed
                    pm[n] = 1

        return nl

    def findincoming(self, remote):
        m = self.changelog.nodemap
        search = []
        fetch = []
        seen = {}
        seenbranch = {}

        # if we have an empty repo, fetch everything
        if self.changelog.tip() == nullid:
            self.ui.status("requesting all changes\n")
            return [nullid]

        # otherwise, assume we're closer to the tip than the root
        self.ui.status("searching for changes\n")
        heads = remote.heads()
        unknown = []
        for h in heads:
            if h not in m:

        if not unknown:
            return None

        rep = {}
        reqcnt = 0

        unknown = remote.branches(unknown)
        while unknown:
            r = []
            while unknown:
                n = unknown.pop(0)
                if n[0] in seen:

                self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
                if n[0] == nullid:
                if n in seenbranch:
                    self.ui.debug("branch already found\n")
                if n[1] and n[1] in m: # do we know the base?
                    self.ui.debug("found incomplete branch %s:%s\n"
                                  % (short(n[0]), short(n[1])))
                    search.append(n) # schedule branch range for scanning
                    seenbranch[n] = 1
                    if n[1] not in seen and n[1] not in fetch:
                        if n[2] in m and n[3] in m:
                            self.ui.debug("found new changeset %s\n" %
                            fetch.append(n[1]) # earliest unknown

                    for a in n[2:4]:
                        if a not in rep:
                            rep[a] = 1

                seen[n[0]] = 1

            if r:
                reqcnt += 1
                self.ui.debug("request %d: %s\n" %
                            (reqcnt, " ".join(map(short, r))))
                for p in range(0, len(r), 10):
                    for b in remote.branches(r[p:p+10]):
                        self.ui.debug("received %s:%s\n" %
                                      (short(b[0]), short(b[1])))
                        if b[0] not in m and b[0] not in seen:

        while search:
            n = search.pop(0)
            reqcnt += 1
            l = remote.between([(n[0], n[1])])[0]
            p = n[0]
            f = 1
            for i in l:
                self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
                if i in m:
                    if f <= 2:
                        self.ui.debug("found new branch changeset %s\n" %
                        self.ui.debug("narrowed branch search to %s:%s\n"
                                      % (short(p), short(i)))
                        search.append((p, i))
                p, f = i, f * 2

        for f in fetch:
            if f in m:
                raise RepoError("already have changeset " + short(f[:4]))

        if fetch == [nullid]:
            self.ui.warn("warning: pulling from an unrelated repository!\n")

        self.ui.note("adding new changesets starting at " +
                     " ".join([short(f) for f in fetch]) + "\n")

        self.ui.debug("%d total queries\n" % reqcnt)

        return fetch

    def changegroup(self, basenodes):
        nodes = self.newer(basenodes)

        # construct the link map
        linkmap = {}
        for n in nodes:
            linkmap[self.changelog.rev(n)] = n

        # construct a list of all changed files
        changed = {}
        for n in nodes:
            c =
            for f in c[3]:
                changed[f] = 1
        changed = changed.keys()

        # the changegroup is changesets + manifests + all file revs
        revs = [ self.changelog.rev(n) for n in nodes ]

        for y in yield y
        for y in yield y
        for f in changed:
            yield struct.pack(">l", len(f) + 4) + f
            g = self.file(f).group(linkmap)
            for y in g:
                yield y

    def addchangegroup(self, generator):

        class genread:
            def __init__(self, generator):
                self.g = generator
                self.buf = ""
            def read(self, l):
                while l > len(self.buf):
                        self.buf +=
                    except StopIteration:
                d, self.buf = self.buf[:l], self.buf[l:]
                return d

        def getchunk():
            d =
            if not d: return ""
            l = struct.unpack(">l", d)[0]
            if l <= 4: return ""
            return - 4)

        def getgroup():
            while 1:
                c = getchunk()
                if not c: break
                yield c

        def csmap(x):
            self.ui.debug("add changeset %s\n" % short(x))
            return self.changelog.count()

        def revmap(x):
            return self.changelog.rev(x)

        if not generator: return
        changesets = files = revisions = 0

        source = genread(generator)
        lock = self.lock()
        tr = self.transaction()

        # pull off the changeset group
        self.ui.status("adding changesets\n")
        co = self.changelog.tip()
        cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
        changesets = self.changelog.rev(cn) - self.changelog.rev(co)

        # pull off the manifest group
        self.ui.status("adding manifests\n")
        mm = self.manifest.tip()
        mo = self.manifest.addgroup(getgroup(), revmap, tr)

        # process the files
        self.ui.status("adding file revisions\n")
        while 1:
            f = getchunk()
            if not f: break
            self.ui.debug("adding %s revisions\n" % f)
            fl = self.file(f)
            o = fl.count()
            n = fl.addgroup(getgroup(), revmap, tr)
            revisions += fl.count() - o
            files += 1

        self.ui.status(("modified %d files, added %d changesets" +
                        " and %d new revisions\n")
                       % (files, changesets, revisions))


    def update(self, node, allow=False, force=False):
        pl = self.dirstate.parents()
        if not force and pl[1] != nullid:
            self.ui.warn("aborting: outstanding uncommitted merges\n")

        p1, p2 = pl[0], node
        pa = self.changelog.ancestor(p1, p2)
        m1n =[0]
        m2n =[0]
        man = self.manifest.ancestor(m1n, m2n)
        m1 =
        mf1 = self.manifest.readflags(m1n)
        m2 =
        mf2 = self.manifest.readflags(m2n)
        ma =
        mfa = self.manifest.readflags(man)

        (c, a, d, u) = self.changes(None, None)

        # is this a jump, or a merge?  i.e. is there a linear path
        # from p1 to p2?
        linear_path = (pa == p1 or pa == p2)

        # resolve the manifest to determine which files
        # we care about merging
        self.ui.note("resolving manifests\n")
        self.ui.debug(" ancestor %s local %s remote %s\n" %
                      (short(man), short(m1n), short(m2n)))

        merge = {}
        get = {}
        remove = []
        mark = {}

        # construct a working dir manifest
        mw = m1.copy()
        mfw = mf1.copy()
        for f in a + c + u:
            mw[f] = ""
            mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
        for f in d:
            if f in mw: del mw[f]

            # If we're jumping between revisions (as opposed to merging),
            # and if neither the working directory nor the target rev has
            # the file, then we need to remove it from the dirstate, to
            # prevent the dirstate from listing the file when it is no
            # longer in the manifest.
            if linear_path and f not in m2:

        for f, n in mw.iteritems():
            if f in m2:
                s = 0

                # is the wfile new since m1, and match m2?
                if f not in m1:
                    t1 = self.wfile(f).read()
                    t2 = self.file(f).revision(m2[f])
                    if cmp(t1, t2) == 0:
                        mark[f] = 1
                        n = m2[f]
                    del t1, t2

                # are files different?
                if n != m2[f]:
                    a = ma.get(f, nullid)
                    # are both different from the ancestor?
                    if n != a and m2[f] != a:
                        self.ui.debug(" %s versions differ, resolve\n" % f)
                        # merge executable bits
                        # "if we changed or they changed, change in merge"
                        a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
                        mode = ((a^b) | (a^c)) ^ a
                        merge[f] = (m1.get(f, nullid), m2[f], mode)
                        s = 1
                    # are we clobbering?
                    # is remote's version newer?
                    # or are we going back in time?
                    elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
                        self.ui.debug(" remote %s is newer, get\n" % f)
                        get[f] = m2[f]
                        s = 1
                        mark[f] = 1

                if not s and mfw[f] != mf2[f]:
                    if force:
                        self.ui.debug(" updating permissions for %s\n" % f)
                        util.set_exec(self.wjoin(f), mf2[f])
                        a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
                        mode = ((a^b) | (a^c)) ^ a
                        if mode != b:
                            self.ui.debug(" updating permissions for %s\n" % f)
                            util.set_exec(self.wjoin(f), mode)
                            mark[f] = 1
                del m2[f]
            elif f in ma:
                if not force and n != ma[f]:
                    r = ""
                    if linear_path or allow:
                        r = self.ui.prompt(
                            (" local changed %s which remote deleted\n" % f) +
                            "(k)eep or (d)elete?", "[kd]", "k")
                    if r == "d":
                    self.ui.debug("other deleted %s\n" % f)
                    remove.append(f) # other deleted it
                if n == m1.get(f, nullid): # same as parent
                    if p2 == pa: # going backwards?
                        self.ui.debug("remote deleted %s\n" % f)
                        self.ui.debug("local created %s, keeping\n" % f)
                    self.ui.debug("working dir created %s, keeping\n" % f)

        for f, n in m2.iteritems():
            if f[0] == "/": continue
            if not force and f in ma and n != ma[f]:
                r = ""
                if linear_path or allow:
                    r = self.ui.prompt(
                        ("remote changed %s which local deleted\n" % f) +
                        "(k)eep or (d)elete?", "[kd]", "k")
                if r == "d": remove.append(f)
                self.ui.debug("remote created %s\n" % f)
                get[f] = n

        del mw, m1, m2, ma

        if force:
            for f in merge:
                get[f] = merge[f][1]
            merge = {}

        if linear_path:
            # we don't need to do any magic, just jump to the new rev
            mode = 'n'
            p1, p2 = p2, nullid
            if not allow:
                self.ui.status("this update spans a branch" +
                               " affecting the following files:\n")
                fl = merge.keys() + get.keys()
                for f in fl:
                    cf = ""
                    if f in merge: cf = " (resolve)"
                    self.ui.status(" %s%s\n" % (f, cf))
                self.ui.warn("aborting update spanning branches!\n")
                self.ui.status("(use update -m to perform a branch merge)\n")
                return 1
            # we have to remember what files we needed to get/change
            # because any file that's different from either one of its
            # parents must be in the changeset
            mode = 'm'
            self.dirstate.update(mark.keys(), "m")

        self.dirstate.setparents(p1, p2)

        # get the files we don't need to change
        files = get.keys()
        for f in files:
            if f[0] == "/": continue
            self.ui.note("getting %s\n" % f)
            t = self.file(f).read(get[f])
                self.wfile(f, "w").write(t)
            except IOError:
                self.wfile(f, "w").write(t)
            util.set_exec(self.wjoin(f), mf2[f])
            self.dirstate.update([f], mode)

        # merge the tricky bits
        files = merge.keys()
        for f in files:
            self.ui.status("merging %s\n" % f)
            m, o, flag = merge[f]
            self.merge3(f, m, o)
            util.set_exec(self.wjoin(f), flag)
            self.dirstate.update([f], 'm')

        for f in remove:
            self.ui.note("removing %s\n" % f)
        if mode == 'n':
            self.dirstate.update(remove, 'r')

    def merge3(self, fn, my, other):
        """perform a 3-way merge in the working directory"""

        def temp(prefix, node):
            pre = "%s~%s." % (os.path.basename(fn), prefix)
            (fd, name) = tempfile.mkstemp("", pre)
            f = os.fdopen(fd, "wb")
            return name

        fl = self.file(fn)
        base = fl.ancestor(my, other)
        a = self.wjoin(fn)
        b = temp("base", base)
        c = temp("other", other)

        self.ui.note("resolving %s\n" % fn)
        self.ui.debug("file %s: other %s ancestor %s\n" %
                              (fn, short(other), short(base)))

        cmd = os.environ.get("HGMERGE", "hgmerge")
        r = os.system("%s %s %s %s" % (cmd, a, b, c))
        if r:
            self.ui.warn("merging %s failed!\n" % fn)


    def verify(self):
        filelinkrevs = {}
        filenodes = {}
        changesets = revisions = files = 0
        errors = 0

        seen = {}
        self.ui.status("checking changesets\n")
        for i in range(self.changelog.count()):
            changesets += 1
            n = self.changelog.node(i)
            if n in seen:
                self.ui.warn("duplicate changeset at revision %d\n" % i)
                errors += 1
            seen[n] = 1

            for p in self.changelog.parents(n):
                if p not in self.changelog.nodemap:
                    self.ui.warn("changeset %s has unknown parent %s\n" %
                                 (short(n), short(p)))
                    errors += 1
                changes =
            except Exception, inst:
                self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
                errors += 1

            for f in changes[3]:
                filelinkrevs.setdefault(f, []).append(i)

        seen = {}
        self.ui.status("checking manifests\n")
        for i in range(self.manifest.count()):
            n = self.manifest.node(i)
            if n in seen:
                self.ui.warn("duplicate manifest at revision %d\n" % i)
                errors += 1
            seen[n] = 1

            for p in self.manifest.parents(n):
                if p not in self.manifest.nodemap:
                    self.ui.warn("manifest %s has unknown parent %s\n" %
                            (short(n), short(p)))
                    errors += 1

                delta = mdiff.patchtext(
            except KeyboardInterrupt:
                print "aborted"
            except Exception, inst:
                self.ui.warn("unpacking manifest %s: %s\n"
                             % (short(n), inst))
                errors += 1

            ff = [ l.split('\0') for l in delta.splitlines() ]
            for f, fn in ff:
                filenodes.setdefault(f, {})[bin(fn[:40])] = 1

        self.ui.status("crosschecking files in changesets and manifests\n")
        for f in filenodes:
            if f not in filelinkrevs:
                self.ui.warn("file %s in manifest but not in changesets\n" % f)
                errors += 1

        for f in filelinkrevs:
            if f not in filenodes:
                self.ui.warn("file %s in changeset but not in manifest\n" % f)
                errors += 1

        self.ui.status("checking files\n")
        ff = filenodes.keys()
        for f in ff:
            if f == "/dev/null": continue
            files += 1
            fl = self.file(f)
            nodes = { nullid: 1 }
            seen = {}
            for i in range(fl.count()):
                revisions += 1
                n = fl.node(i)

                if n in seen:
                    self.ui.warn("%s: duplicate revision %d\n" % (f, i))
                    errors += 1

                if n not in filenodes[f]:
                    self.ui.warn("%s: %d:%s not in manifests\n"
                                 % (f, i, short(n)))
                    print len(filenodes[f].keys()), fl.count(), f
                    errors += 1
                    del filenodes[f][n]

                flr = fl.linkrev(n)
                if flr not in filelinkrevs[f]:
                    self.ui.warn("%s:%s points to unexpected changeset %d\n"
                            % (f, short(n), fl.linkrev(n)))
                    errors += 1

                # verify contents
                    t =
                except Exception, inst:
                    self.ui.warn("unpacking file %s %s: %s\n"
                                 % (f, short(n), inst))
                    errors += 1

                # verify parents
                (p1, p2) = fl.parents(n)
                if p1 not in nodes:
                    self.ui.warn("file %s:%s unknown parent 1 %s" %
                            (f, short(n), short(p1)))
                    errors += 1
                if p2 not in nodes:
                    self.ui.warn("file %s:%s unknown parent 2 %s" %
                            (f, short(n), short(p1)))
                    errors += 1
                nodes[n] = 1

            # cross-check
            for node in filenodes[f]:
                self.ui.warn("node %s in manifests not in %s\n"
                             % (hex(n), f))
                errors += 1

        self.ui.status("%d files, %d changesets, %d total revisions\n" %
                       (files, changesets, revisions))

        if errors:
            self.ui.warn("%d integrity errors encountered!\n" % errors)
            return 1

class remoterepository:
    def __init__(self, ui, path):
        self.url = path
        self.ui = ui
        no_list = [ "localhost", "" ]
        host = ui.config("http_proxy", "host")
        if host is None:
            host = os.environ.get("http_proxy")
        if host and host.startswith('http://'):
            host = host[7:]
        user = ui.config("http_proxy", "user")
        passwd = ui.config("http_proxy", "passwd")
        no = ui.config("http_proxy", "no")
        if no is None:
            no = os.environ.get("no_proxy")
        if no:
            no_list = no_list + no.split(",")

        no_proxy = 0
        for h in no_list:
            if (path.startswith("http://" + h + "/") or
                path.startswith("http://" + h + ":") or
                path == "http://" + h):
                no_proxy = 1

        # Note: urllib2 takes proxy values from the environment and those will
        # take precedence
        for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
            if os.environ.has_key(env):
                del os.environ[env]

        proxy_handler = urllib2.BaseHandler()
        if host and not no_proxy:
            proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})

        authinfo = None
        if user and passwd:
            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
            passmgr.add_password(None, host, user, passwd)
            authinfo = urllib2.ProxyBasicAuthHandler(passmgr)

        opener = urllib2.build_opener(proxy_handler, authinfo)

    def do_cmd(self, cmd, **args):
        self.ui.debug("sending %s command\n" % cmd)
        q = {"cmd": cmd}
        qs = urllib.urlencode(q)
        cu = "%s?%s" % (self.url, qs)
        return urllib2.urlopen(cu)

    def heads(self):
        d = self.do_cmd("heads").read()
            return map(bin, d[:-1].split(" "))
            self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")

    def branches(self, nodes):
        n = " ".join(map(hex, nodes))
        d = self.do_cmd("branches", nodes=n).read()
            br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
            return br
            self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")

    def between(self, pairs):
        n = "\n".join(["-".join(map(hex, p)) for p in pairs])
        d = self.do_cmd("between", pairs=n).read()
            p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
            return p
            self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")

    def changegroup(self, nodes):
        n = " ".join(map(hex, nodes))
        zd = zlib.decompressobj()
        f = self.do_cmd("changegroup", roots=n)
        bytes = 0
        while 1:
            d =
            bytes += len(d)
            if not d:
                yield zd.flush()
            yield zd.decompress(d)
        self.ui.note("%d bytes of data transfered\n" % bytes)

def repository(ui, path=None, create=0):
    if path and path[:7] == "http://":
        return remoterepository(ui, path)
    if path and path[:5] == "hg://":
        return remoterepository(ui, path.replace("hg://", "http://"))
    if path and path[:11] == "old-http://":
        return localrepository(ui, path.replace("old-http://", "http://"))
        return localrepository(ui, path, create)