changeset 2803:987c31e2a08c

Merge with crew
author Matt Mackall <mpm@selenic.com>
date Mon, 07 Aug 2006 16:47:06 -0500
parents fdc232d8a193 (diff) df220d0974dd (current diff)
children 4b20daa25f15
files hgext/mq.py mercurial/commands.py mercurial/localrepo.py mercurial/merge.py
diffstat 7 files changed, 591 insertions(+), 552 deletions(-) [+]
line wrap: on
line diff
--- a/hgext/mq.py	Mon Aug 07 19:08:55 2006 -0700
+++ b/hgext/mq.py	Mon Aug 07 16:47:06 2006 -0500
@@ -190,11 +190,11 @@
         self.ui.warn("patch didn't work out, merging %s\n" % patch)
 
         # apply failed, strip away that rev and merge.
-        repo.update(head, allow=False, force=True, wlock=wlock)
+        hg.update(repo, head, allow=False, force=True, wlock=wlock)
         self.strip(repo, n, update=False, backup='strip', wlock=wlock)
 
         c = repo.changelog.read(rev)
-        ret = repo.update(rev, allow=True, wlock=wlock)
+        ret = hg.update(repo, rev, allow=True, wlock=wlock)
         if ret:
             raise util.Abort(_("update returned %d") % ret)
         n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
@@ -530,7 +530,7 @@
             if c or a or d or r:
                 raise util.Abort(_("local changes found"))
             urev = self.qparents(repo, rev)
-            repo.update(urev, allow=False, force=True, wlock=wlock)
+            hg.update(repo, urev, allow=False, force=True, wlock=wlock)
             repo.dirstate.write()
 
         # save is a list of all the branches we are truncating away
@@ -1019,7 +1019,7 @@
                 if not r:
                     self.ui.warn("Unable to load queue repository\n")
                     return 1
-                r.update(qpp[0], allow=False, force=True)
+                hg.update(r, qpp[0], allow=False, force=True)
 
     def save(self, repo, msg=None):
         if len(self.applied) == 0:
@@ -1242,7 +1242,7 @@
             dr.mq.strip(dr, qbase, update=False, backup=None)
         if not opts['noupdate']:
             ui.note(_('updating destination repo\n'))
-            dr.update(dr.changelog.tip())
+            hg.update(dr, dr.changelog.tip())
 
 def commit(ui, repo, *pats, **opts):
     """commit changes in the queue repository"""
--- a/mercurial/commands.py	Mon Aug 07 19:08:55 2006 -0700
+++ b/mercurial/commands.py	Mon Aug 07 16:47:06 2006 -0500
@@ -976,7 +976,7 @@
         if opts['parent']:
             raise util.Abort(_('cannot use --parent on non-merge changeset'))
         parent = p1
-    repo.update(node, force=True, show_stats=False)
+    hg.update(repo, node, force=True, show_stats=False)
     revert_opts = opts.copy()
     revert_opts['rev'] = hex(parent)
     revert(ui, repo, **revert_opts)
@@ -993,7 +993,7 @@
     if op1 != node:
         if opts['merge']:
             ui.status(_('merging with changeset %s\n') % nice(op1))
-            doupdate(ui, repo, hex(op1), **opts)
+            doupdate(ui, repo, hex(op1), merge=True)
         else:
             ui.status(_('the backout changeset is a new head - '
                         'do not forget to merge\n'))
@@ -2386,7 +2386,7 @@
     operation. It should only be necessary when Mercurial suggests it.
     """
     if repo.recover():
-        return repo.verify()
+        return hg.verify(repo)
     return 1
 
 def remove(ui, repo, *pats, **opts):
@@ -2603,8 +2603,8 @@
 
     if not opts.get('dry_run'):
         repo.dirstate.forget(forget[0])
-        r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
-                        show_stats=False)
+        r = hg.update(repo, node, False, True, update.has_key, False,
+                      wlock=wlock, show_stats=False)
         repo.dirstate.update(add[0], 'a')
         repo.dirstate.update(undelete[0], 'n')
         repo.dirstate.update(remove[0], 'r')
@@ -2887,7 +2887,7 @@
     repo.rollback()
 
 def update(ui, repo, node=None, merge=False, clean=False, force=None,
-           branch=None, **opts):
+           branch=None):
     """update or merge working directory
 
     Update the working directory to the specified revision.
@@ -2905,10 +2905,10 @@
     if merge:
         ui.warn(_('(the -m/--merge option is deprecated; '
                   'use the merge command instead)\n'))
-    return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
+    return doupdate(ui, repo, node, merge, clean, force, branch)
 
 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
-             branch=None, **opts):
+             branch=None):
     if branch:
         br = repo.branchlookup(branch=branch)
         found = []
@@ -2918,7 +2918,7 @@
         if len(found) > 1:
             ui.warn(_("Found multiple heads for %s\n") % branch)
             for x in found:
-                show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
+                show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
             return 1
         if len(found) == 1:
             node = found[0]
@@ -2928,7 +2928,7 @@
             return 1
     else:
         node = node and repo.lookup(node) or repo.changelog.tip()
-    return repo.update(node, allow=merge, force=clean, forcemerge=force)
+    return hg.update(repo, node, allow=merge, force=clean, forcemerge=force)
 
 def verify(ui, repo):
     """verify the integrity of the repository
@@ -2940,7 +2940,7 @@
     the changelog, manifest, and tracked files, as well as the
     integrity of their crosslinks and indices.
     """
-    return repo.verify()
+    return hg.verify(repo)
 
 # Command options and aliases are listed here, alphabetically
 
--- a/mercurial/hg.py	Mon Aug 07 19:08:55 2006 -0700
+++ b/mercurial/hg.py	Mon Aug 07 16:47:06 2006 -0500
@@ -10,7 +10,7 @@
 from demandload import *
 from i18n import gettext as _
 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
-demandload(globals(), "errno lock os shutil util")
+demandload(globals(), "errno lock os shutil util merge")
 
 def _local(path):
     return (os.path.isfile(path and util.drop_scheme('file', path)) and
@@ -38,7 +38,7 @@
         return thing(path)
     except TypeError:
         return thing
-    
+
 def islocal(repo):
     '''return true if repo or path is local'''
     if isinstance(repo, str):
@@ -49,12 +49,13 @@
     return repo.local()
 
 def repository(ui, path=None, create=False):
+    """return a repository object for the specified path"""
     return _lookup(path).instance(ui, path, create)
 
 def defaultdest(source):
     '''return default destination of clone if none is given'''
     return os.path.basename(os.path.normpath(source))
-    
+
 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
           stream=False):
     """Make a copy of an existing repository.
@@ -199,8 +200,22 @@
             dest_lock.release()
 
         if update:
-            dest_repo.update(dest_repo.changelog.tip())
+            merge.update(dest_repo, dest_repo.changelog.tip())
     if dir_cleanup:
         dir_cleanup.close()
 
     return src_repo, dest_repo
+
+
+# This should instead be several functions with short arglists, like
+# update/merge/revert
+
+def update(repo, node, allow=False, force=False, choose=None,
+           moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
+    return merge.update(repo, node, allow, force, choose, moddirstate,
+                        forcemerge, wlock, show_stats)
+
+def verify(repo):
+    """verify the consistency of a repository"""
+    import verify as _verify
+    return _verify.verify(repo)
--- a/mercurial/localrepo.py	Mon Aug 07 19:08:55 2006 -0700
+++ b/mercurial/localrepo.py	Mon Aug 07 16:47:06 2006 -0500
@@ -1697,532 +1697,6 @@
 
         return newheads - oldheads + 1
 
-    def update(self, node, allow=False, force=False, choose=None,
-               moddirstate=True, forcemerge=False, wlock=None,
-               show_stats=True, remind=True):
-        pl = self.dirstate.parents()
-        if not force and pl[1] != nullid:
-            raise util.Abort(_("outstanding uncommitted merges"))
-
-        err = False
-
-        p1, p2 = pl[0], node
-        pa = self.changelog.ancestor(p1, p2)
-        m1n = self.changelog.read(p1)[0]
-        m2n = self.changelog.read(p2)[0]
-        man = self.manifest.ancestor(m1n, m2n)
-        m1 = self.manifest.read(m1n)
-        mf1 = self.manifest.readflags(m1n)
-        m2 = self.manifest.read(m2n).copy()
-        mf2 = self.manifest.readflags(m2n)
-        ma = self.manifest.read(man)
-        mfa = self.manifest.readflags(man)
-
-        modified, added, removed, deleted, unknown = self.changes()
-
-        # is this a jump, or a merge?  i.e. is there a linear path
-        # from p1 to p2?
-        linear_path = (pa == p1 or pa == p2)
-
-        if allow and linear_path:
-            raise util.Abort(_("there is nothing to merge, just use "
-                               "'hg update' or look at 'hg heads'"))
-        if allow and not forcemerge:
-            if modified or added or removed:
-                raise util.Abort(_("outstanding uncommitted changes"))
-
-        if not forcemerge and not force:
-            for f in unknown:
-                if f in m2:
-                    t1 = self.wread(f)
-                    t2 = self.file(f).read(m2[f])
-                    if cmp(t1, t2) != 0:
-                        raise util.Abort(_("'%s' already exists in the working"
-                                           " dir and differs from remote") % f)
-
-        # resolve the manifest to determine which files
-        # we care about merging
-        self.ui.note(_("resolving manifests\n"))
-        self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
-                      (force, allow, moddirstate, linear_path))
-        self.ui.debug(_(" ancestor %s local %s remote %s\n") %
-                      (short(man), short(m1n), short(m2n)))
-
-        merge = {}
-        get = {}
-        remove = []
-
-        # construct a working dir manifest
-        mw = m1.copy()
-        mfw = mf1.copy()
-        umap = dict.fromkeys(unknown)
-
-        for f in added + modified + unknown:
-            mw[f] = ""
-            mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
-
-        if moddirstate and not wlock:
-            wlock = self.wlock()
-
-        for f in deleted + removed:
-            if f in mw:
-                del mw[f]
-
-            # If we're jumping between revisions (as opposed to merging),
-            # and if neither the working directory nor the target rev has
-            # the file, then we need to remove it from the dirstate, to
-            # prevent the dirstate from listing the file when it is no
-            # longer in the manifest.
-            if moddirstate and linear_path and f not in m2:
-                self.dirstate.forget((f,))
-
-        # Compare manifests
-        for f, n in mw.iteritems():
-            if choose and not choose(f):
-                continue
-            if f in m2:
-                s = 0
-
-                # is the wfile new since m1, and match m2?
-                if f not in m1:
-                    t1 = self.wread(f)
-                    t2 = self.file(f).read(m2[f])
-                    if cmp(t1, t2) == 0:
-                        n = m2[f]
-                    del t1, t2
-
-                # are files different?
-                if n != m2[f]:
-                    a = ma.get(f, nullid)
-                    # are both different from the ancestor?
-                    if n != a and m2[f] != a:
-                        self.ui.debug(_(" %s versions differ, resolve\n") % f)
-                        # merge executable bits
-                        # "if we changed or they changed, change in merge"
-                        a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
-                        mode = ((a^b) | (a^c)) ^ a
-                        merge[f] = (m1.get(f, nullid), m2[f], mode)
-                        s = 1
-                    # are we clobbering?
-                    # is remote's version newer?
-                    # or are we going back in time?
-                    elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
-                        self.ui.debug(_(" remote %s is newer, get\n") % f)
-                        get[f] = m2[f]
-                        s = 1
-                elif f in umap or f in added:
-                    # this unknown file is the same as the checkout
-                    # we need to reset the dirstate if the file was added
-                    get[f] = m2[f]
-
-                if not s and mfw[f] != mf2[f]:
-                    if force:
-                        self.ui.debug(_(" updating permissions for %s\n") % f)
-                        util.set_exec(self.wjoin(f), mf2[f])
-                    else:
-                        a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
-                        mode = ((a^b) | (a^c)) ^ a
-                        if mode != b:
-                            self.ui.debug(_(" updating permissions for %s\n")
-                                          % f)
-                            util.set_exec(self.wjoin(f), mode)
-                del m2[f]
-            elif f in ma:
-                if n != ma[f]:
-                    r = _("d")
-                    if not force and (linear_path or allow):
-                        r = self.ui.prompt(
-                            (_(" local changed %s which remote deleted\n") % f) +
-                             _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
-                    if r == _("d"):
-                        remove.append(f)
-                else:
-                    self.ui.debug(_("other deleted %s\n") % f)
-                    remove.append(f) # other deleted it
-            else:
-                # file is created on branch or in working directory
-                if force and f not in umap:
-                    self.ui.debug(_("remote deleted %s, clobbering\n") % f)
-                    remove.append(f)
-                elif n == m1.get(f, nullid): # same as parent
-                    if p2 == pa: # going backwards?
-                        self.ui.debug(_("remote deleted %s\n") % f)
-                        remove.append(f)
-                    else:
-                        self.ui.debug(_("local modified %s, keeping\n") % f)
-                else:
-                    self.ui.debug(_("working dir created %s, keeping\n") % f)
-
-        for f, n in m2.iteritems():
-            if choose and not choose(f):
-                continue
-            if f[0] == "/":
-                continue
-            if f in ma and n != ma[f]:
-                r = _("k")
-                if not force and (linear_path or allow):
-                    r = self.ui.prompt(
-                        (_("remote changed %s which local deleted\n") % f) +
-                         _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
-                if r == _("k"):
-                    get[f] = n
-            elif f not in ma:
-                self.ui.debug(_("remote created %s\n") % f)
-                get[f] = n
-            else:
-                if force or p2 == pa: # going backwards?
-                    self.ui.debug(_("local deleted %s, recreating\n") % f)
-                    get[f] = n
-                else:
-                    self.ui.debug(_("local deleted %s\n") % f)
-
-        del mw, m1, m2, ma
-
-        if force:
-            for f in merge:
-                get[f] = merge[f][1]
-            merge = {}
-
-        if linear_path or force:
-            # we don't need to do any magic, just jump to the new rev
-            branch_merge = False
-            p1, p2 = p2, nullid
-        else:
-            if not allow:
-                self.ui.status(_("this update spans a branch"
-                                 " affecting the following files:\n"))
-                fl = merge.keys() + get.keys()
-                fl.sort()
-                for f in fl:
-                    cf = ""
-                    if f in merge:
-                        cf = _(" (resolve)")
-                    self.ui.status(" %s%s\n" % (f, cf))
-                self.ui.warn(_("aborting update spanning branches!\n"))
-                self.ui.status(_("(use 'hg merge' to merge across branches"
-                                 " or 'hg update -C' to lose changes)\n"))
-                return 1
-            branch_merge = True
-
-        xp1 = hex(p1)
-        xp2 = hex(p2)
-        if p2 == nullid: xxp2 = ''
-        else: xxp2 = xp2
-
-        self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
-
-        # get the files we don't need to change
-        files = get.keys()
-        files.sort()
-        for f in files:
-            if f[0] == "/":
-                continue
-            self.ui.note(_("getting %s\n") % f)
-            t = self.file(f).read(get[f])
-            self.wwrite(f, t)
-            util.set_exec(self.wjoin(f), mf2[f])
-            if moddirstate:
-                if branch_merge:
-                    self.dirstate.update([f], 'n', st_mtime=-1)
-                else:
-                    self.dirstate.update([f], 'n')
-
-        # merge the tricky bits
-        failedmerge = []
-        files = merge.keys()
-        files.sort()
-        for f in files:
-            self.ui.status(_("merging %s\n") % f)
-            my, other, flag = merge[f]
-            ret = self.merge3(f, my, other, xp1, xp2)
-            if ret:
-                err = True
-                failedmerge.append(f)
-            util.set_exec(self.wjoin(f), flag)
-            if moddirstate:
-                if branch_merge:
-                    # We've done a branch merge, mark this file as merged
-                    # so that we properly record the merger later
-                    self.dirstate.update([f], 'm')
-                else:
-                    # We've update-merged a locally modified file, so
-                    # we set the dirstate to emulate a normal checkout
-                    # of that file some time in the past. Thus our
-                    # merge will appear as a normal local file
-                    # modification.
-                    f_len = len(self.file(f).read(other))
-                    self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
-
-        remove.sort()
-        for f in remove:
-            self.ui.note(_("removing %s\n") % f)
-            util.audit_path(f)
-            try:
-                util.unlink(self.wjoin(f))
-            except OSError, inst:
-                if inst.errno != errno.ENOENT:
-                    self.ui.warn(_("update failed to remove %s: %s!\n") %
-                                 (f, inst.strerror))
-        if moddirstate:
-            if branch_merge:
-                self.dirstate.update(remove, 'r')
-            else:
-                self.dirstate.forget(remove)
-
-        if moddirstate:
-            self.dirstate.setparents(p1, p2)
-
-        if show_stats:
-            stats = ((len(get), _("updated")),
-                     (len(merge) - len(failedmerge), _("merged")),
-                     (len(remove), _("removed")),
-                     (len(failedmerge), _("unresolved")))
-            note = ", ".join([_("%d files %s") % s for s in stats])
-            self.ui.status("%s\n" % note)
-        if moddirstate:
-            if branch_merge:
-                if failedmerge:
-                    self.ui.status(_("There are unresolved merges,"
-                                    " you can redo the full merge using:\n"
-                                    "  hg update -C %s\n"
-                                    "  hg merge %s\n"
-                                    % (self.changelog.rev(p1),
-                                        self.changelog.rev(p2))))
-                elif remind:
-                    self.ui.status(_("(branch merge, don't forget "
-                                     "to commit)\n"))
-            elif failedmerge:
-                self.ui.status(_("There are unresolved merges with"
-                                 " locally modified files.\n"))
-
-        self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
-        return err
-
-    def merge3(self, fn, my, other, p1, p2):
-        """perform a 3-way merge in the working directory"""
-
-        def temp(prefix, node):
-            pre = "%s~%s." % (os.path.basename(fn), prefix)
-            (fd, name) = tempfile.mkstemp(prefix=pre)
-            f = os.fdopen(fd, "wb")
-            self.wwrite(fn, fl.read(node), f)
-            f.close()
-            return name
-
-        fl = self.file(fn)
-        base = fl.ancestor(my, other)
-        a = self.wjoin(fn)
-        b = temp("base", base)
-        c = temp("other", other)
-
-        self.ui.note(_("resolving %s\n") % fn)
-        self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
-                              (fn, short(my), short(other), short(base)))
-
-        cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
-               or "hgmerge")
-        r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
-                        environ={'HG_FILE': fn,
-                                 'HG_MY_NODE': p1,
-                                 'HG_OTHER_NODE': p2,
-                                 'HG_FILE_MY_NODE': hex(my),
-                                 'HG_FILE_OTHER_NODE': hex(other),
-                                 'HG_FILE_BASE_NODE': hex(base)})
-        if r:
-            self.ui.warn(_("merging %s failed!\n") % fn)
-
-        os.unlink(b)
-        os.unlink(c)
-        return r
-
-    def verify(self):
-        filelinkrevs = {}
-        filenodes = {}
-        changesets = revisions = files = 0
-        errors = [0]
-        warnings = [0]
-        neededmanifests = {}
-
-        def err(msg):
-            self.ui.warn(msg + "\n")
-            errors[0] += 1
-
-        def warn(msg):
-            self.ui.warn(msg + "\n")
-            warnings[0] += 1
-
-        def checksize(obj, name):
-            d = obj.checksize()
-            if d[0]:
-                err(_("%s data length off by %d bytes") % (name, d[0]))
-            if d[1]:
-                err(_("%s index contains %d extra bytes") % (name, d[1]))
-
-        def checkversion(obj, name):
-            if obj.version != revlog.REVLOGV0:
-                if not revlogv1:
-                    warn(_("warning: `%s' uses revlog format 1") % name)
-            elif revlogv1:
-                warn(_("warning: `%s' uses revlog format 0") % name)
-
-        revlogv1 = self.revlogversion != revlog.REVLOGV0
-        if self.ui.verbose or revlogv1 != self.revlogv1:
-            self.ui.status(_("repository uses revlog format %d\n") %
-                           (revlogv1 and 1 or 0))
-
-        seen = {}
-        self.ui.status(_("checking changesets\n"))
-        checksize(self.changelog, "changelog")
-
-        for i in range(self.changelog.count()):
-            changesets += 1
-            n = self.changelog.node(i)
-            l = self.changelog.linkrev(n)
-            if l != i:
-                err(_("incorrect link (%d) for changeset revision %d") %(l, i))
-            if n in seen:
-                err(_("duplicate changeset at revision %d") % i)
-            seen[n] = 1
-
-            for p in self.changelog.parents(n):
-                if p not in self.changelog.nodemap:
-                    err(_("changeset %s has unknown parent %s") %
-                                 (short(n), short(p)))
-            try:
-                changes = self.changelog.read(n)
-            except KeyboardInterrupt:
-                self.ui.warn(_("interrupted"))
-                raise
-            except Exception, inst:
-                err(_("unpacking changeset %s: %s") % (short(n), inst))
-                continue
-
-            neededmanifests[changes[0]] = n
-
-            for f in changes[3]:
-                filelinkrevs.setdefault(f, []).append(i)
-
-        seen = {}
-        self.ui.status(_("checking manifests\n"))
-        checkversion(self.manifest, "manifest")
-        checksize(self.manifest, "manifest")
-
-        for i in range(self.manifest.count()):
-            n = self.manifest.node(i)
-            l = self.manifest.linkrev(n)
-
-            if l < 0 or l >= self.changelog.count():
-                err(_("bad manifest link (%d) at revision %d") % (l, i))
-
-            if n in neededmanifests:
-                del neededmanifests[n]
-
-            if n in seen:
-                err(_("duplicate manifest at revision %d") % i)
-
-            seen[n] = 1
-
-            for p in self.manifest.parents(n):
-                if p not in self.manifest.nodemap:
-                    err(_("manifest %s has unknown parent %s") %
-                        (short(n), short(p)))
-
-            try:
-                delta = mdiff.patchtext(self.manifest.delta(n))
-            except KeyboardInterrupt:
-                self.ui.warn(_("interrupted"))
-                raise
-            except Exception, inst:
-                err(_("unpacking manifest %s: %s") % (short(n), inst))
-                continue
-
-            try:
-                ff = [ l.split('\0') for l in delta.splitlines() ]
-                for f, fn in ff:
-                    filenodes.setdefault(f, {})[bin(fn[:40])] = 1
-            except (ValueError, TypeError), inst:
-                err(_("broken delta in manifest %s: %s") % (short(n), inst))
-
-        self.ui.status(_("crosschecking files in changesets and manifests\n"))
-
-        for m, c in neededmanifests.items():
-            err(_("Changeset %s refers to unknown manifest %s") %
-                (short(m), short(c)))
-        del neededmanifests
-
-        for f in filenodes:
-            if f not in filelinkrevs:
-                err(_("file %s in manifest but not in changesets") % f)
-
-        for f in filelinkrevs:
-            if f not in filenodes:
-                err(_("file %s in changeset but not in manifest") % f)
-
-        self.ui.status(_("checking files\n"))
-        ff = filenodes.keys()
-        ff.sort()
-        for f in ff:
-            if f == "/dev/null":
-                continue
-            files += 1
-            if not f:
-                err(_("file without name in manifest %s") % short(n))
-                continue
-            fl = self.file(f)
-            checkversion(fl, f)
-            checksize(fl, f)
-
-            nodes = {nullid: 1}
-            seen = {}
-            for i in range(fl.count()):
-                revisions += 1
-                n = fl.node(i)
-
-                if n in seen:
-                    err(_("%s: duplicate revision %d") % (f, i))
-                if n not in filenodes[f]:
-                    err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
-                else:
-                    del filenodes[f][n]
-
-                flr = fl.linkrev(n)
-                if flr not in filelinkrevs.get(f, []):
-                    err(_("%s:%s points to unexpected changeset %d")
-                            % (f, short(n), flr))
-                else:
-                    filelinkrevs[f].remove(flr)
-
-                # verify contents
-                try:
-                    t = fl.read(n)
-                except KeyboardInterrupt:
-                    self.ui.warn(_("interrupted"))
-                    raise
-                except Exception, inst:
-                    err(_("unpacking file %s %s: %s") % (f, short(n), inst))
-
-                # verify parents
-                (p1, p2) = fl.parents(n)
-                if p1 not in nodes:
-                    err(_("file %s:%s unknown parent 1 %s") %
-                        (f, short(n), short(p1)))
-                if p2 not in nodes:
-                    err(_("file %s:%s unknown parent 2 %s") %
-                            (f, short(n), short(p1)))
-                nodes[n] = 1
-
-            # cross-check
-            for node in filenodes[f]:
-                err(_("node %s in manifests not in %s") % (hex(node), f))
-
-        self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
-                       (files, changesets, revisions))
-
-        if warnings[0]:
-            self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
-        if errors[0]:
-            self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
-            return 1
 
     def stream_in(self, remote):
         fp = remote.stream_out()
@@ -2248,7 +1722,7 @@
                         util.bytecount(total_bytes / elapsed)))
         self.reload()
         return len(self.heads()) + 1
-        
+
     def clone(self, remote, heads=[], stream=False):
         '''clone remote repository.
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/merge.py	Mon Aug 07 16:47:06 2006 -0500
@@ -0,0 +1,349 @@
+# merge.py - directory-level update/merge handling for Mercurial
+#
+# Copyright 2006 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from i18n import gettext as _
+from demandload import *
+demandload(globals(), "util os tempfile")
+
+def merge3(repo, fn, my, other, p1, p2):
+    """perform a 3-way merge in the working directory"""
+
+    def temp(prefix, node):
+        pre = "%s~%s." % (os.path.basename(fn), prefix)
+        (fd, name) = tempfile.mkstemp(prefix=pre)
+        f = os.fdopen(fd, "wb")
+        repo.wwrite(fn, fl.read(node), f)
+        f.close()
+        return name
+
+    fl = repo.file(fn)
+    base = fl.ancestor(my, other)
+    a = repo.wjoin(fn)
+    b = temp("base", base)
+    c = temp("other", other)
+
+    repo.ui.note(_("resolving %s\n") % fn)
+    repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
+                          (fn, short(my), short(other), short(base)))
+
+    cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
+           or "hgmerge")
+    r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
+                    environ={'HG_FILE': fn,
+                             'HG_MY_NODE': p1,
+                             'HG_OTHER_NODE': p2,
+                             'HG_FILE_MY_NODE': hex(my),
+                             'HG_FILE_OTHER_NODE': hex(other),
+                             'HG_FILE_BASE_NODE': hex(base)})
+    if r:
+        repo.ui.warn(_("merging %s failed!\n") % fn)
+
+    os.unlink(b)
+    os.unlink(c)
+    return r
+
+def update(repo, node, allow=False, force=False, choose=None,
+           moddirstate=True, forcemerge=False, wlock=None, show_stats=True,
+           remind=True):
+    pl = repo.dirstate.parents()
+    if not force and pl[1] != nullid:
+        raise util.Abort(_("outstanding uncommitted merges"))
+
+    err = False
+
+    p1, p2 = pl[0], node
+    pa = repo.changelog.ancestor(p1, p2)
+    m1n = repo.changelog.read(p1)[0]
+    m2n = repo.changelog.read(p2)[0]
+    man = repo.manifest.ancestor(m1n, m2n)
+    m1 = repo.manifest.read(m1n)
+    mf1 = repo.manifest.readflags(m1n)
+    m2 = repo.manifest.read(m2n).copy()
+    mf2 = repo.manifest.readflags(m2n)
+    ma = repo.manifest.read(man)
+    mfa = repo.manifest.readflags(man)
+
+    modified, added, removed, deleted, unknown = repo.changes()
+
+    # is this a jump, or a merge?  i.e. is there a linear path
+    # from p1 to p2?
+    linear_path = (pa == p1 or pa == p2)
+
+    if allow and linear_path:
+        raise util.Abort(_("there is nothing to merge, just use "
+                           "'hg update' or look at 'hg heads'"))
+    if allow and not forcemerge:
+        if modified or added or removed:
+            raise util.Abort(_("outstanding uncommitted changes"))
+
+    if not forcemerge and not force:
+        for f in unknown:
+            if f in m2:
+                t1 = repo.wread(f)
+                t2 = repo.file(f).read(m2[f])
+                if cmp(t1, t2) != 0:
+                    raise util.Abort(_("'%s' already exists in the working"
+                                       " dir and differs from remote") % f)
+
+    # resolve the manifest to determine which files
+    # we care about merging
+    repo.ui.note(_("resolving manifests\n"))
+    repo.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
+                  (force, allow, moddirstate, linear_path))
+    repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
+                  (short(man), short(m1n), short(m2n)))
+
+    merge = {}
+    get = {}
+    remove = []
+
+    # construct a working dir manifest
+    mw = m1.copy()
+    mfw = mf1.copy()
+    umap = dict.fromkeys(unknown)
+
+    for f in added + modified + unknown:
+        mw[f] = ""
+        mfw[f] = util.is_exec(repo.wjoin(f), mfw.get(f, False))
+
+    if moddirstate and not wlock:
+        wlock = repo.wlock()
+
+    for f in deleted + removed:
+        if f in mw:
+            del mw[f]
+
+        # If we're jumping between revisions (as opposed to merging),
+        # and if neither the working directory nor the target rev has
+        # the file, then we need to remove it from the dirstate, to
+        # prevent the dirstate from listing the file when it is no
+        # longer in the manifest.
+        if moddirstate and linear_path and f not in m2:
+            repo.dirstate.forget((f,))
+
+    # Compare manifests
+    for f, n in mw.iteritems():
+        if choose and not choose(f):
+            continue
+        if f in m2:
+            s = 0
+
+            # is the wfile new since m1, and match m2?
+            if f not in m1:
+                t1 = repo.wread(f)
+                t2 = repo.file(f).read(m2[f])
+                if cmp(t1, t2) == 0:
+                    n = m2[f]
+                del t1, t2
+
+            # are files different?
+            if n != m2[f]:
+                a = ma.get(f, nullid)
+                # are both different from the ancestor?
+                if n != a and m2[f] != a:
+                    repo.ui.debug(_(" %s versions differ, resolve\n") % f)
+                    # merge executable bits
+                    # "if we changed or they changed, change in merge"
+                    a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
+                    mode = ((a^b) | (a^c)) ^ a
+                    merge[f] = (m1.get(f, nullid), m2[f], mode)
+                    s = 1
+                # are we clobbering?
+                # is remote's version newer?
+                # or are we going back in time?
+                elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
+                    repo.ui.debug(_(" remote %s is newer, get\n") % f)
+                    get[f] = m2[f]
+                    s = 1
+            elif f in umap or f in added:
+                # this unknown file is the same as the checkout
+                # we need to reset the dirstate if the file was added
+                get[f] = m2[f]
+
+            if not s and mfw[f] != mf2[f]:
+                if force:
+                    repo.ui.debug(_(" updating permissions for %s\n") % f)
+                    util.set_exec(repo.wjoin(f), mf2[f])
+                else:
+                    a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
+                    mode = ((a^b) | (a^c)) ^ a
+                    if mode != b:
+                        repo.ui.debug(_(" updating permissions for %s\n")
+                                      % f)
+                        util.set_exec(repo.wjoin(f), mode)
+            del m2[f]
+        elif f in ma:
+            if n != ma[f]:
+                r = _("d")
+                if not force and (linear_path or allow):
+                    r = repo.ui.prompt(
+                        (_(" local changed %s which remote deleted\n") % f) +
+                         _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
+                if r == _("d"):
+                    remove.append(f)
+            else:
+                repo.ui.debug(_("other deleted %s\n") % f)
+                remove.append(f) # other deleted it
+        else:
+            # file is created on branch or in working directory
+            if force and f not in umap:
+                repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
+                remove.append(f)
+            elif n == m1.get(f, nullid): # same as parent
+                if p2 == pa: # going backwards?
+                    repo.ui.debug(_("remote deleted %s\n") % f)
+                    remove.append(f)
+                else:
+                    repo.ui.debug(_("local modified %s, keeping\n") % f)
+            else:
+                repo.ui.debug(_("working dir created %s, keeping\n") % f)
+
+    for f, n in m2.iteritems():
+        if choose and not choose(f):
+            continue
+        if f[0] == "/":
+            continue
+        if f in ma and n != ma[f]:
+            r = _("k")
+            if not force and (linear_path or allow):
+                r = repo.ui.prompt(
+                    (_("remote changed %s which local deleted\n") % f) +
+                     _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
+            if r == _("k"):
+                get[f] = n
+        elif f not in ma:
+            repo.ui.debug(_("remote created %s\n") % f)
+            get[f] = n
+        else:
+            if force or p2 == pa: # going backwards?
+                repo.ui.debug(_("local deleted %s, recreating\n") % f)
+                get[f] = n
+            else:
+                repo.ui.debug(_("local deleted %s\n") % f)
+
+    del mw, m1, m2, ma
+
+    if force:
+        for f in merge:
+            get[f] = merge[f][1]
+        merge = {}
+
+    if linear_path or force:
+        # we don't need to do any magic, just jump to the new rev
+        branch_merge = False
+        p1, p2 = p2, nullid
+    else:
+        if not allow:
+            repo.ui.status(_("this update spans a branch"
+                             " affecting the following files:\n"))
+            fl = merge.keys() + get.keys()
+            fl.sort()
+            for f in fl:
+                cf = ""
+                if f in merge:
+                    cf = _(" (resolve)")
+                repo.ui.status(" %s%s\n" % (f, cf))
+            repo.ui.warn(_("aborting update spanning branches!\n"))
+            repo.ui.status(_("(use 'hg merge' to merge across branches"
+                             " or 'hg update -C' to lose changes)\n"))
+            return 1
+        branch_merge = True
+
+    xp1 = hex(p1)
+    xp2 = hex(p2)
+    if p2 == nullid: xxp2 = ''
+    else: xxp2 = xp2
+
+    repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
+
+    # get the files we don't need to change
+    files = get.keys()
+    files.sort()
+    for f in files:
+        if f[0] == "/":
+            continue
+        repo.ui.note(_("getting %s\n") % f)
+        t = repo.file(f).read(get[f])
+        repo.wwrite(f, t)
+        util.set_exec(repo.wjoin(f), mf2[f])
+        if moddirstate:
+            if branch_merge:
+                repo.dirstate.update([f], 'n', st_mtime=-1)
+            else:
+                repo.dirstate.update([f], 'n')
+
+    # merge the tricky bits
+    failedmerge = []
+    files = merge.keys()
+    files.sort()
+    for f in files:
+        repo.ui.status(_("merging %s\n") % f)
+        my, other, flag = merge[f]
+        ret = merge3(repo, f, my, other, xp1, xp2)
+        if ret:
+            err = True
+            failedmerge.append(f)
+        util.set_exec(repo.wjoin(f), flag)
+        if moddirstate:
+            if branch_merge:
+                # We've done a branch merge, mark this file as merged
+                # so that we properly record the merger later
+                repo.dirstate.update([f], 'm')
+            else:
+                # We've update-merged a locally modified file, so
+                # we set the dirstate to emulate a normal checkout
+                # of that file some time in the past. Thus our
+                # merge will appear as a normal local file
+                # modification.
+                f_len = len(repo.file(f).read(other))
+                repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
+
+    remove.sort()
+    for f in remove:
+        repo.ui.note(_("removing %s\n") % f)
+        util.audit_path(f)
+        try:
+            util.unlink(repo.wjoin(f))
+        except OSError, inst:
+            if inst.errno != errno.ENOENT:
+                repo.ui.warn(_("update failed to remove %s: %s!\n") %
+                             (f, inst.strerror))
+    if moddirstate:
+        if branch_merge:
+            repo.dirstate.update(remove, 'r')
+        else:
+            repo.dirstate.forget(remove)
+
+    if moddirstate:
+        repo.dirstate.setparents(p1, p2)
+
+    if show_stats:
+        stats = ((len(get), _("updated")),
+                 (len(merge) - len(failedmerge), _("merged")),
+                 (len(remove), _("removed")),
+                 (len(failedmerge), _("unresolved")))
+        note = ", ".join([_("%d files %s") % s for s in stats])
+        repo.ui.status("%s\n" % note)
+    if moddirstate:
+        if branch_merge:
+            if failedmerge:
+                repo.ui.status(_("There are unresolved merges,"
+                                " you can redo the full merge using:\n"
+                                "  hg update -C %s\n"
+                                "  hg merge %s\n"
+                                % (repo.changelog.rev(p1),
+                                    repo.changelog.rev(p2))))
+            elif remind:
+                repo.ui.status(_("(branch merge, don't forget to commit)\n"))
+        elif failedmerge:
+            repo.ui.status(_("There are unresolved merges with"
+                             " locally modified files.\n"))
+
+    repo.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
+    return err
+
--- a/mercurial/revlog.py	Mon Aug 07 19:08:55 2006 -0700
+++ b/mercurial/revlog.py	Mon Aug 07 16:47:06 2006 -0500
@@ -744,8 +744,6 @@
 
     def lookup(self, id):
         """locate a node based on revision number or subset of hex nodeid"""
-        if id in self.nodemap:
-            return id
         if type(id) == type(0):
             return self.node(id)
         try:
@@ -760,10 +758,13 @@
                 if hex(n).startswith(id):
                     c.append(n)
             if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
-            if len(c) < 1: raise RevlogError(_("No match found"))
-            return c[0]
+            if len(c) == 1: return c[0]
 
-        return None
+        # might need fixing if we change hash lengths
+        if len(id) == 20 and id in self.nodemap:
+            return id
+
+        raise RevlogError(_("No match found"))
 
     def diff(self, a, b):
         """return a delta between two revisions"""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/verify.py	Mon Aug 07 16:47:06 2006 -0500
@@ -0,0 +1,200 @@
+# verify.py - repository integrity checking for Mercurial
+#
+# Copyright 2006 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms
+# of the GNU General Public License, incorporated herein by reference.
+
+from node import *
+from i18n import gettext as _
+import revlog, mdiff
+
+def verify(repo):
+    filelinkrevs = {}
+    filenodes = {}
+    changesets = revisions = files = 0
+    errors = [0]
+    warnings = [0]
+    neededmanifests = {}
+
+    def err(msg):
+        repo.ui.warn(msg + "\n")
+        errors[0] += 1
+
+    def warn(msg):
+        repo.ui.warn(msg + "\n")
+        warnings[0] += 1
+
+    def checksize(obj, name):
+        d = obj.checksize()
+        if d[0]:
+            err(_("%s data length off by %d bytes") % (name, d[0]))
+        if d[1]:
+            err(_("%s index contains %d extra bytes") % (name, d[1]))
+
+    def checkversion(obj, name):
+        if obj.version != revlog.REVLOGV0:
+            if not revlogv1:
+                warn(_("warning: `%s' uses revlog format 1") % name)
+        elif revlogv1:
+            warn(_("warning: `%s' uses revlog format 0") % name)
+
+    revlogv1 = repo.revlogversion != revlog.REVLOGV0
+    if repo.ui.verbose or revlogv1 != repo.revlogv1:
+        repo.ui.status(_("repository uses revlog format %d\n") %
+                       (revlogv1 and 1 or 0))
+
+    seen = {}
+    repo.ui.status(_("checking changesets\n"))
+    checksize(repo.changelog, "changelog")
+
+    for i in range(repo.changelog.count()):
+        changesets += 1
+        n = repo.changelog.node(i)
+        l = repo.changelog.linkrev(n)
+        if l != i:
+            err(_("incorrect link (%d) for changeset revision %d") %(l, i))
+        if n in seen:
+            err(_("duplicate changeset at revision %d") % i)
+        seen[n] = 1
+
+        for p in repo.changelog.parents(n):
+            if p not in repo.changelog.nodemap:
+                err(_("changeset %s has unknown parent %s") %
+                             (short(n), short(p)))
+        try:
+            changes = repo.changelog.read(n)
+        except KeyboardInterrupt:
+            repo.ui.warn(_("interrupted"))
+            raise
+        except Exception, inst:
+            err(_("unpacking changeset %s: %s") % (short(n), inst))
+            continue
+
+        neededmanifests[changes[0]] = n
+
+        for f in changes[3]:
+            filelinkrevs.setdefault(f, []).append(i)
+
+    seen = {}
+    repo.ui.status(_("checking manifests\n"))
+    checkversion(repo.manifest, "manifest")
+    checksize(repo.manifest, "manifest")
+
+    for i in range(repo.manifest.count()):
+        n = repo.manifest.node(i)
+        l = repo.manifest.linkrev(n)
+
+        if l < 0 or l >= repo.changelog.count():
+            err(_("bad manifest link (%d) at revision %d") % (l, i))
+
+        if n in neededmanifests:
+            del neededmanifests[n]
+
+        if n in seen:
+            err(_("duplicate manifest at revision %d") % i)
+
+        seen[n] = 1
+
+        for p in repo.manifest.parents(n):
+            if p not in repo.manifest.nodemap:
+                err(_("manifest %s has unknown parent %s") %
+                    (short(n), short(p)))
+
+        try:
+            delta = mdiff.patchtext(repo.manifest.delta(n))
+        except KeyboardInterrupt:
+            repo.ui.warn(_("interrupted"))
+            raise
+        except Exception, inst:
+            err(_("unpacking manifest %s: %s") % (short(n), inst))
+            continue
+
+        try:
+            ff = [ l.split('\0') for l in delta.splitlines() ]
+            for f, fn in ff:
+                filenodes.setdefault(f, {})[bin(fn[:40])] = 1
+        except (ValueError, TypeError), inst:
+            err(_("broken delta in manifest %s: %s") % (short(n), inst))
+
+    repo.ui.status(_("crosschecking files in changesets and manifests\n"))
+
+    for m, c in neededmanifests.items():
+        err(_("Changeset %s refers to unknown manifest %s") %
+            (short(m), short(c)))
+    del neededmanifests
+
+    for f in filenodes:
+        if f not in filelinkrevs:
+            err(_("file %s in manifest but not in changesets") % f)
+
+    for f in filelinkrevs:
+        if f not in filenodes:
+            err(_("file %s in changeset but not in manifest") % f)
+
+    repo.ui.status(_("checking files\n"))
+    ff = filenodes.keys()
+    ff.sort()
+    for f in ff:
+        if f == "/dev/null":
+            continue
+        files += 1
+        if not f:
+            err(_("file without name in manifest %s") % short(n))
+            continue
+        fl = repo.file(f)
+        checkversion(fl, f)
+        checksize(fl, f)
+
+        nodes = {nullid: 1}
+        seen = {}
+        for i in range(fl.count()):
+            revisions += 1
+            n = fl.node(i)
+
+            if n in seen:
+                err(_("%s: duplicate revision %d") % (f, i))
+            if n not in filenodes[f]:
+                err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
+            else:
+                del filenodes[f][n]
+
+            flr = fl.linkrev(n)
+            if flr not in filelinkrevs.get(f, []):
+                err(_("%s:%s points to unexpected changeset %d")
+                        % (f, short(n), flr))
+            else:
+                filelinkrevs[f].remove(flr)
+
+            # verify contents
+            try:
+                t = fl.read(n)
+            except KeyboardInterrupt:
+                repo.ui.warn(_("interrupted"))
+                raise
+            except Exception, inst:
+                err(_("unpacking file %s %s: %s") % (f, short(n), inst))
+
+            # verify parents
+            (p1, p2) = fl.parents(n)
+            if p1 not in nodes:
+                err(_("file %s:%s unknown parent 1 %s") %
+                    (f, short(n), short(p1)))
+            if p2 not in nodes:
+                err(_("file %s:%s unknown parent 2 %s") %
+                        (f, short(n), short(p1)))
+            nodes[n] = 1
+
+        # cross-check
+        for node in filenodes[f]:
+            err(_("node %s in manifests not in %s") % (hex(node), f))
+
+    repo.ui.status(_("%d files, %d changesets, %d total revisions\n") %
+                   (files, changesets, revisions))
+
+    if warnings[0]:
+        repo.ui.warn(_("%d warnings encountered!\n") % warnings[0])
+    if errors[0]:
+        repo.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
+        return 1
+