changeset 17839:d118a4f4fd16 stable 2.4-rc

merge default into stable for 2.4 code freeze
author Matt Mackall <mpm@selenic.com>
date Fri, 19 Oct 2012 01:34:50 -0500
parents 605fe310691f (current diff) d51364b318ea (diff)
children af7ccbf2addc
files mercurial/templates/template-vars.txt tests/filtercr.py tests/test-convert-mtn-rename-directory.out
diffstat 254 files changed, 7434 insertions(+), 2250 deletions(-) [+]
line wrap: on
line diff
--- a/.hgignore	Mon Oct 08 00:19:30 2012 +0200
+++ b/.hgignore	Fri Oct 19 01:34:50 2012 -0500
@@ -32,6 +32,7 @@
 MANIFEST.in
 patches
 mercurial/__version__.py
+mercurial/hgpythonlib.h
 mercurial.egg-info
 .DS_Store
 tags
--- a/contrib/bash_completion	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/bash_completion	Fri Oct 19 01:34:50 2012 -0500
@@ -386,6 +386,13 @@
     fi
 }
 
+_hg_cmd_rebase() {
+   if [[ "$prev" = @(-s|--source|-d|--dest|-b|--base|-r|--rev) ]]; then
+       _hg_labels
+       return
+   fi
+}
+
 _hg_cmd_strip()
 {
     _hg_labels
--- a/contrib/check-code.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/check-code.py	Fri Oct 19 01:34:50 2012 -0500
@@ -91,7 +91,7 @@
 uprefix = r"^  \$ "
 utestpats = [
   [
-    (r'^(\S|  $ ).*(\S[ \t]+|^[ \t]+)\n', "trailing whitespace on non-output"),
+    (r'^(\S.*||  [$>] .*)[ \t]\n', "trailing whitespace on non-output"),
     (uprefix + r'.*\|\s*sed[^|>\n]*\n',
      "use regex test output patterns instead of sed"),
     (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
@@ -116,6 +116,7 @@
         utestpats[i].append((p, m))
 
 utestfilters = [
+    (r"<<(\S+)((.|\n)*?\n  > \1)", rephere),
     (r"( *)(#([^\n]*\S)?)", repcomment),
 ]
 
@@ -136,7 +137,10 @@
     (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
     (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
     (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n'
-     r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Py2.4'),
+     r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Python 2.4'),
+    (r'(\s+)try:\n((?:\n|\1\s.*\n)*?)\1\s*yield\b.*?'
+     r'((?:\n|\1\s.*\n)+?)\1finally:',
+     'no yield inside try/finally in Python 2.4'),
     (r'.{81}', "line too long"),
     (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
     (r'[^\n]\Z', "no trailing newline"),
@@ -190,8 +194,8 @@
      'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
     (r'opener\([^)]*\).read\(',
      "use opener.read() instead"),
-    (r'BaseException', 'not in Py2.4, use Exception'),
-    (r'os\.path\.relpath', 'os.path.relpath is not in Py2.5'),
+    (r'BaseException', 'not in Python 2.4, use Exception'),
+    (r'os\.path\.relpath', 'os.path.relpath is not in Python 2.5'),
     (r'opener\([^)]*\).write\(',
      "use opener.write() instead"),
     (r'[\s\(](open|file)\([^)]*\)\.read\(',
@@ -321,7 +325,7 @@
     :f: filepath
     :logfunc: function used to report error
               logfunc(filename, linenumber, linecontent, errormessage)
-    :maxerr: number of error to display before arborting.
+    :maxerr: number of error to display before aborting.
              Set to false (default) to report all errors
 
     return True if no error is found, False otherwise.
@@ -365,7 +369,7 @@
                 p, msg = pat
                 ignore = None
 
-            # fix-up regexes for multiline searches
+            # fix-up regexes for multi-line searches
             po = p
             # \s doesn't match \n
             p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/lock-checker.py	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,48 @@
+"""Extension to verify locks are obtained in the required places.
+
+This works by wrapping functions that should be surrounded by a lock
+and asserting the lock is held. Missing locks are called out with a
+traceback printed to stderr.
+
+This currently only checks store locks, not working copy locks.
+"""
+import os
+import traceback
+
+def _warnstack(ui, msg, skip=1):
+    '''issue warning with the message and the current stack, skipping the
+    skip last entries'''
+    ui.warn('%s at:\n' % msg)
+    entries = traceback.extract_stack()[:-skip]
+    fnmax = max(len(entry[0]) for entry in entries)
+    for fn, ln, func, _text in entries:
+        ui.warn(' %*s:%-4s in %s\n' % (fnmax, fn, ln, func))
+
+def _checklock(repo):
+    l = repo._lockref and repo._lockref()
+    if l is None or not l.held:
+        _warnstack(repo.ui, 'missing lock', skip=2)
+
+def reposetup(ui, repo):
+    orig = repo.__class__
+    class lockcheckrepo(repo.__class__):
+        def _writejournal(self, *args, **kwargs):
+            _checklock(self)
+            return orig._writejournal(self, *args, **kwargs)
+
+        def transaction(self, *args, **kwargs):
+            _checklock(self)
+            return orig.transaction(self, *args, **kwargs)
+
+        # TODO(durin42): kiilerix had a commented-out lock check in
+        # _writebranchcache and _writerequirements
+
+        def _tag(self, *args, **kwargs):
+            _checklock(self)
+            return orig._tag(self, *args, **kwargs)
+
+        def write(self, *args, **kwargs):
+            assert os.path.lexists(self._join('.hg/wlock'))
+            return orig.write(self, *args, **kwargs)
+
+    repo.__class__ = lockcheckrepo
--- a/contrib/mercurial.el	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/mercurial.el	Fri Oct 19 01:34:50 2012 -0500
@@ -461,7 +461,7 @@
 (defun hg-complete-repo (string predicate all)
   "Attempt to complete a repository name.
 We complete on either symbolic names from Mercurial's config or real
-directory names from the file system.  We do not penalise URLs."
+directory names from the file system.  We do not penalize URLs."
   (or (if all
 	  (all-completions string hg-repo-completion-table predicate)
 	(try-completion string hg-repo-completion-table predicate))
--- a/contrib/perf.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/perf.py	Fri Oct 19 01:34:50 2012 -0500
@@ -180,21 +180,27 @@
     timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
 
 def perffncacheload(ui, repo):
-    from mercurial import scmutil, store
-    s = store.store(set(['store','fncache']), repo.path, scmutil.opener)
+    s = repo.store
     def d():
         s.fncache._load()
     timer(d)
 
 def perffncachewrite(ui, repo):
-    from mercurial import scmutil, store
-    s = store.store(set(['store','fncache']), repo.path, scmutil.opener)
+    s = repo.store
     s.fncache._load()
     def d():
         s.fncache._dirty = True
         s.fncache.write()
     timer(d)
 
+def perffncacheencode(ui, repo):
+    s = repo.store
+    s.fncache._load()
+    def d():
+        for p in s.fncache.entries:
+            s.encode(p)
+    timer(d)
+
 def perfdiffwd(ui, repo):
     """Profile diff of working directory changes"""
     options = {
@@ -226,6 +232,7 @@
     'perfcca': (perfcca, []),
     'perffncacheload': (perffncacheload, []),
     'perffncachewrite': (perffncachewrite, []),
+    'perffncacheencode': (perffncacheencode, []),
     'perflookup': (perflookup, []),
     'perfrevrange': (perfrevrange, []),
     'perfnodelookup': (perfnodelookup, []),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/synthrepo.py	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,377 @@
+# synthrepo.py - repo synthesis
+#
+# Copyright 2012 Facebook
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''synthesize structurally interesting change history
+
+This extension is useful for creating a repository with properties
+that are statistically similar to an existing repository. During
+analysis, a simple probability table is constructed from the history
+of an existing repository.  During synthesis, these properties are
+reconstructed.
+
+Properties that are analyzed and synthesized include the following:
+
+- Lines added or removed when an existing file is modified
+- Number and sizes of files added
+- Number of files removed
+- Line lengths
+- Topological distance to parent changeset(s)
+- Probability of a commit being a merge
+- Probability of a newly added file being added to a new directory
+- Interarrival time, and time zone, of commits
+
+A few obvious properties that are not currently handled realistically:
+
+- Merges are treated as regular commits with two parents, which is not
+  realistic
+- Modifications are not treated as operations on hunks of lines, but
+  as insertions and deletions of randomly chosen single lines
+- Committer ID (always random)
+- Executability of files
+- Symlinks and binary files are ignored
+'''
+
+import bisect, collections, json, os, random, time
+from mercurial import cmdutil, context, patch, scmutil, url, util
+from mercurial.i18n import _
+from mercurial.node import nullrev, nullid
+
+testedwith = 'internal'
+
+cmdtable = {}
+command = cmdutil.command(cmdtable)
+
+newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
+
+def zerodict():
+    return collections.defaultdict(lambda: 0)
+
+def roundto(x, k):
+    if x > k * 2:
+        return int(round(x / float(k)) * k)
+    return int(round(x))
+
+def parsegitdiff(lines):
+    filename, mar, lineadd, lineremove = None, None, zerodict(), 0
+    binary = False
+    for line in lines:
+        start = line[:6]
+        if start == 'diff -':
+            if filename:
+                yield filename, mar, lineadd, lineremove, binary
+            mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
+            filename = patch.gitre.match(line).group(1)
+        elif start in newfile:
+            mar = 'a'
+        elif start == 'GIT bi':
+            binary = True
+        elif start == 'delete':
+            mar = 'r'
+        elif start:
+            s = start[0]
+            if s == '-' and not line.startswith('--- '):
+                lineremove += 1
+            elif s == '+' and not line.startswith('+++ '):
+                lineadd[roundto(len(line) - 1, 5)] += 1
+    if filename:
+        yield filename, mar, lineadd, lineremove, binary
+
+@command('analyze',
+         [('o', 'output', [], _('write output to given file'), _('FILE')),
+          ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
+         _('hg analyze'))
+def analyze(ui, repo, *revs, **opts):
+    '''create a simple model of a repository to use for later synthesis
+
+    This command examines every changeset in the given range (or all
+    of history if none are specified) and creates a simple statistical
+    model of the history of the repository.
+
+    The model is written out to a JSON file, and can be used by
+    :hg:`synthesize` to create or augment a repository with synthetic
+    commits that have a structure that is statistically similar to the
+    analyzed repository.
+    '''
+
+    revs = list(revs)
+    revs.extend(opts['rev'])
+    if not revs:
+        revs = [':']
+
+    output = opts['output']
+    if not output:
+        output = os.path.basename(repo.root) + '.json'
+
+    if output == '-':
+        fp = sys.stdout
+    else:
+        fp = open(output, 'w')
+
+    revs = scmutil.revrange(repo, revs)
+    revs.sort()
+
+    lineschanged = zerodict()
+    children = zerodict()
+    p1distance = zerodict()
+    p2distance = zerodict()
+    linesinfilesadded = zerodict()
+    fileschanged = zerodict()
+    filesadded = zerodict()
+    filesremoved = zerodict()
+    linelengths = zerodict()
+    interarrival = zerodict()
+    parents = zerodict()
+    dirsadded = zerodict()
+    tzoffset = zerodict()
+
+    progress = ui.progress
+    _analyzing = _('analyzing')
+    _changesets = _('changesets')
+    _total = len(revs)
+
+    for i, rev in enumerate(revs):
+        progress(_analyzing, i, unit=_changesets, total=_total)
+        ctx = repo[rev]
+        pl = ctx.parents()
+        pctx = pl[0]
+        prev = pctx.rev()
+        children[prev] += 1
+        p1distance[rev - prev] += 1
+        parents[len(pl)] += 1
+        tzoffset[ctx.date()[1]] += 1
+        if len(pl) > 1:
+            p2distance[rev - pl[1].rev()] += 1
+        if prev == rev - 1:
+            lastctx = pctx
+        else:
+            lastctx = repo[rev - 1]
+        if lastctx.rev() != nullrev:
+            interarrival[roundto(ctx.date()[0] - lastctx.date()[0], 300)] += 1
+        diff = sum((d.splitlines()
+                    for d in ctx.diff(pctx, opts=dict(git=True))), [])
+        fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
+        for filename, mar, lineadd, lineremove, binary in parsegitdiff(diff):
+            if binary:
+                continue
+            added = sum(lineadd.itervalues(), 0)
+            if mar == 'm':
+                if added and lineremove:
+                    lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
+                    filechanges += 1
+            elif mar == 'a':
+                fileadds += 1
+                if '/' in filename:
+                    filedir = filename.rsplit('/', 1)[0]
+                    if filedir not in pctx.dirs():
+                        diradds += 1
+                linesinfilesadded[roundto(added, 5)] += 1
+            elif mar == 'r':
+                fileremoves += 1
+            for length, count in lineadd.iteritems():
+                linelengths[length] += count
+        fileschanged[filechanges] += 1
+        filesadded[fileadds] += 1
+        dirsadded[diradds] += 1
+        filesremoved[fileremoves] += 1
+
+    invchildren = zerodict()
+
+    for rev, count in children.iteritems():
+        invchildren[count] += 1
+
+    if output != '-':
+        ui.status(_('writing output to %s\n') % output)
+
+    def pronk(d):
+        return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
+
+    json.dump(dict(revs=len(revs),
+                   lineschanged=pronk(lineschanged),
+                   children=pronk(invchildren),
+                   fileschanged=pronk(fileschanged),
+                   filesadded=pronk(filesadded),
+                   linesinfilesadded=pronk(linesinfilesadded),
+                   dirsadded=pronk(dirsadded),
+                   filesremoved=pronk(filesremoved),
+                   linelengths=pronk(linelengths),
+                   parents=pronk(parents),
+                   p1distance=pronk(p1distance),
+                   p2distance=pronk(p2distance),
+                   interarrival=pronk(interarrival),
+                   tzoffset=pronk(tzoffset),
+                   ),
+              fp)
+    fp.close()
+
+@command('synthesize',
+         [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
+          ('', 'dict', '', _('path to a dictionary of words'), _('FILE'))],
+         _('hg synthesize [OPTION].. DESCFILE'))
+def synthesize(ui, repo, descpath, **opts):
+    '''synthesize commits based on a model of an existing repository
+
+    The model must have been generated by :hg:`analyze`. Commits will
+    be generated randomly according to the probabilities described in
+    the model.
+
+    When synthesizing new content, commit descriptions, and user
+    names, words will be chosen randomly from a dictionary that is
+    presumed to contain one word per line. Use --dict to specify the
+    path to an alternate dictionary to use.
+    '''
+    try:
+        fp = url.open(ui, descpath)
+    except Exception, err:
+        raise util.Abort('%s: %s' % (descpath, err[0].strerror))
+    desc = json.load(fp)
+    fp.close()
+
+    def cdf(l):
+        vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
+        t = float(sum(probs, 0))
+        s, cdfs = 0, []
+        for v in probs:
+            s += v
+            cdfs.append(s / t)
+        return vals, cdfs
+
+    lineschanged = cdf(desc['lineschanged'])
+    fileschanged = cdf(desc['fileschanged'])
+    filesadded = cdf(desc['filesadded'])
+    dirsadded = cdf(desc['dirsadded'])
+    filesremoved = cdf(desc['filesremoved'])
+    linelengths = cdf(desc['linelengths'])
+    parents = cdf(desc['parents'])
+    p1distance = cdf(desc['p1distance'])
+    p2distance = cdf(desc['p2distance'])
+    interarrival = cdf(desc['interarrival'])
+    linesinfilesadded = cdf(desc['linesinfilesadded'])
+    tzoffset = cdf(desc['tzoffset'])
+
+    dictfile = opts.get('dict') or '/usr/share/dict/words'
+    try:
+        fp = open(dictfile, 'rU')
+    except IOError, err:
+        raise util.Abort('%s: %s' % (dictfile, err.strerror))
+    words = fp.read().splitlines()
+    fp.close()
+
+    def pick(cdf):
+        return cdf[0][bisect.bisect_left(cdf[1], random.random())]
+
+    def makeline(minimum=0):
+        total = max(minimum, pick(linelengths))
+        c, l = 0, []
+        while c < total:
+            w = random.choice(words)
+            c += len(w) + 1
+            l.append(w)
+        return ' '.join(l)
+
+    wlock = repo.wlock()
+    lock = repo.lock()
+
+    nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
+
+    progress = ui.progress
+    _synthesizing = _('synthesizing')
+    _changesets = _('changesets')
+
+    count = int(opts['count'])
+    heads = set(map(repo.changelog.rev, repo.heads()))
+    for i in xrange(count):
+        progress(_synthesizing, i, unit=_changesets, total=count)
+
+        node = repo.changelog.node
+        revs = len(repo)
+
+        def pickhead(heads, distance):
+            if heads:
+                lheads = sorted(heads)
+                rev = revs - min(pick(distance), revs)
+                if rev < lheads[-1]:
+                    rev = lheads[bisect.bisect_left(lheads, rev)]
+                else:
+                    rev = lheads[-1]
+                return rev, node(rev)
+            return nullrev, nullid
+
+        r1 = revs - min(pick(p1distance), revs)
+        p1 = node(r1)
+
+        # the number of heads will grow without bound if we use a pure
+        # model, so artificially constrain their proliferation
+        if pick(parents) == 2 or len(heads) > random.randint(1, 20):
+            r2, p2 = pickhead(heads.difference([r1]), p2distance)
+        else:
+            r2, p2 = nullrev, nullid
+
+        pl = [p1, p2]
+        pctx = repo[r1]
+        mf = pctx.manifest()
+        mfk = mf.keys()
+        changes = {}
+        if mfk:
+            for __ in xrange(pick(fileschanged)):
+                for __ in xrange(10):
+                    fctx = pctx.filectx(random.choice(mfk))
+                    path = fctx.path()
+                    if not (path in nevertouch or fctx.isbinary() or
+                            'l' in fctx.flags()):
+                        break
+                lines = fctx.data().splitlines()
+                add, remove = pick(lineschanged)
+                for __ in xrange(remove):
+                    if not lines:
+                        break
+                    del lines[random.randrange(0, len(lines))]
+                for __ in xrange(add):
+                    lines.insert(random.randint(0, len(lines)), makeline())
+                path = fctx.path()
+                changes[path] = context.memfilectx(path,
+                                                   '\n'.join(lines) + '\n')
+            for __ in xrange(pick(filesremoved)):
+                path = random.choice(mfk)
+                for __ in xrange(10):
+                    path = random.choice(mfk)
+                    if path not in changes:
+                        changes[path] = None
+                        break
+        if filesadded:
+            dirs = list(pctx.dirs())
+            dirs.append('')
+        for __ in xrange(pick(filesadded)):
+            path = [random.choice(dirs)]
+            if pick(dirsadded):
+                path.append(random.choice(words))
+            path.append(random.choice(words))
+            path = '/'.join(filter(None, path))
+            data = '\n'.join(makeline()
+                             for __ in xrange(pick(linesinfilesadded))) + '\n'
+            changes[path] = context.memfilectx(path, data)
+        def filectxfn(repo, memctx, path):
+            data = changes[path]
+            if data is None:
+                raise IOError
+            return data
+        if not changes:
+            continue
+        if revs:
+            date = repo['tip'].date()[0] + pick(interarrival)
+        else:
+            date = time.time() - (86400 * count)
+        user = random.choice(words) + '@' + random.choice(words)
+        mc = context.memctx(repo, pl, makeline(minimum=2),
+                            sorted(changes.iterkeys()),
+                            filectxfn, user, '%d %d' % (date, pick(tzoffset)))
+        newnode = mc.commit()
+        heads.add(repo.changelog.rev(newnode))
+        heads.discard(r1)
+        heads.discard(r2)
+
+    lock.release()
+    wlock.release()
--- a/contrib/vim/hgcommand.vim	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/vim/hgcommand.vim	Fri Oct 19 01:34:50 2012 -0500
@@ -27,7 +27,7 @@
 "
 " You still can read the documentation at the end of this file. Locate it by
 " searching the "hgcommand-contents" string (and set ft=help to have
-" appropriate syntaxic coloration).
+" appropriate syntactic coloration).
 
 " Section: Plugin header {{{1
 
@@ -752,7 +752,7 @@
     " Protect against case and backslash issues in Windows.
     let autoPattern = '\c' . messageFileName
 
-    " Ensure existance of group
+    " Ensure existence of group
     augroup HGCommit
     augroup END
 
@@ -1442,7 +1442,7 @@
 
                                                  *hgcommand-mappings-override*
 
-   The default mappings can be overriden by user-provided instead by mapping
+   The default mappings can be overridden by user-provided instead by mapping
    to <Plug>CommandName.  This is especially useful when these mappings
    collide with other existing mappings (vim will warn of this during plugin
    initialization, but will not clobber the existing mappings).
--- a/contrib/vim/patchreview.vim	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/vim/patchreview.vim	Fri Oct 19 01:34:50 2012 -0500
@@ -17,7 +17,7 @@
 "
 "   0.2.1 - Minor temp directory autodetection logic and cleanup
 "
-"   0.2 - Removed the need for filterdiff by implemeting it in pure vim script
+"   0.2 - Removed the need for filterdiff by implementing it in pure vim script
 "       - Added DiffReview command for reverse (changed repository to
 "         pristine state) reviews.
 "         (PatchReview does pristine repository to patch review)
@@ -55,7 +55,7 @@
 "   3) Optional (but recommended for speed)
 "
 "      Install patchutils ( http://cyberelk.net/tim/patchutils/ ) for your
-"      OS. For windows it is availble from Cygwin
+"      OS. For windows it is available from Cygwin
 "
 "         http://www.cygwin.com
 "
--- a/contrib/win32/hgwebdir_wsgi.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/win32/hgwebdir_wsgi.py	Fri Oct 19 01:34:50 2012 -0500
@@ -27,7 +27,7 @@
 #   On 64-bit systems, make sure it's assigned a 32-bit app pool.
 #
 # - In the application, setup a wildcard script handler mapping of type
-#   IpsapiModule with the shim dll as its executable. This file MUST reside
+#   IsapiModule with the shim dll as its executable. This file MUST reside
 #   in the same directory as the shim. Remove all other handlers, if you wish.
 #
 # - Make sure the ISAPI and CGI restrictions (configured globally on the
--- a/contrib/wix/templates.wxs	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/wix/templates.wxs	Fri Oct 19 01:34:50 2012 -0500
@@ -33,7 +33,6 @@
           <File Name="map-cmdline.default" />
           <File Name="map-cmdline.bisect" />
           <File Name="map-cmdline.xml" />
-          <File Name="template-vars.txt" />
         </Component>
 
         <Directory Id="templates.atomdir" Name="atom">
--- a/contrib/zsh_completion	Mon Oct 08 00:19:30 2012 +0200
+++ b/contrib/zsh_completion	Fri Oct 19 01:34:50 2012 -0500
@@ -361,6 +361,19 @@
     'urls:URL:_hg_urls'
 }
 
+_hg_add_help_topics=(
+    config dates diffs environment extensions filesets glossary hgignore hgweb
+    merge-tools multirevs obsolescence patterns phases revisions revsets
+    subrepos templating urls
+)
+
+_hg_help_topics() {
+    local topics
+    (( $#_hg_cmd_list )) || _hg_get_commands
+    topics=($_hg_cmd_list $_hg_add_help_topics)
+    _describe -t help_topics 'help topics' topics
+}
+
 # Common options
 _hg_global_opts=(
     '(--repository -R)'{-R+,--repository}'[repository root directory]:repository:_files -/'
@@ -385,18 +398,49 @@
   '*'{-I+,--include}'[include names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/'
   '*'{-X+,--exclude}'[exclude names matching the given patterns]:dir:_files -W $(_hg_cmd root) -/')
 
+_hg_clone_opts=(
+  $_hg_remote_opts
+  '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]'
+  '--pull[use pull protocol to copy metadata]'
+  '--uncompressed[use uncompressed transfer (fast over LAN)]')
+
+_hg_date_user_opts=(
+  '(--currentdate -D)'{-D,--currentdate}'[record the current date as commit date]'
+  '(--currentuser -U)'{-U,--currentuser}'[record the current user as committer]'
+  '(--date -d)'{-d+,--date}'[record the specified date as commit date]:date:'
+  '(--user -u)'{-u+,--user}'[record the specified user as committer]:user:')
+
+_hg_gitlike_opts=(
+  '(--git -g)'{-g,--git}'[use git extended diff format]')
+
 _hg_diff_opts=(
+  $_hg_gitlike_opts
   '(--text -a)'{-a,--text}'[treat all files as text]'
-  '(--git -g)'{-g,--git}'[use git extended diff format]'
-  "--nodates[omit dates from diff headers]")
+  '--nodates[omit dates from diff headers]')
+
+_hg_mergetool_opts=(
+  '(--tool -t)'{-t+,--tool}'[specify merge tool]:tool:')
 
 _hg_dryrun_opts=(
   '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]')
 
+_hg_ignore_space_opts=(
+  '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]'
+  '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]'
+  '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]')
+
 _hg_style_opts=(
   '--style[display using template map file]:'
   '--template[display with template]:')
 
+_hg_log_opts=(
+  $_hg_global_opts $_hg_style_opts $_hg_gitlike_opts
+  '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:'
+  '(--no-merges -M)'{-M,--no-merges}'[do not show merges]'
+  '(--patch -p)'{-p,--patch}'[show patch]'
+  '--stat[output diffstat-style summary of changes]'
+)
+
 _hg_commit_opts=(
   '(-m --message -l --logfile --edit -e)'{-e,--edit}'[edit commit message]'
   '(-e --edit -l --logfile --message -m)'{-m+,--message}'[use <text> as commit message]:message:'
@@ -406,12 +450,20 @@
   '(--ssh -e)'{-e+,--ssh}'[specify ssh command to use]:'
   '--remotecmd[specify hg command to run on the remote side]:')
 
+_hg_branch_bmark_opts=(
+  '(--bookmark -B)'{-B+,--bookmark}'[specify bookmark(s)]:bookmark:_hg_bookmarks'
+  '(--branch -b)'{-b+,--branch}'[specify branch(es)]:branch:_hg_branches'
+)
+
+_hg_subrepos_opts=(
+  '(--subrepos -S)'{-S,--subrepos}'[recurse into subrepositories]')
+
 _hg_cmd() {
   _call_program hg HGPLAIN=1 hg "$_hg_cmd_globals[@]" "$@" 2> /dev/null
 }
 
 _hg_cmd_add() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_dryrun_opts $_hg_subrepos_opts \
   '*:unknown files:_hg_unknown'
 }
 
@@ -434,7 +486,7 @@
 }
 
 _hg_cmd_archive() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \
   '--no-decode[do not pass files through decoders]' \
   '(--prefix -p)'{-p+,--prefix}'[directory prefix for files in archive]:' \
   '(--rev -r)'{-r+,--rev}'[revision to distribute]:revision:_hg_labels' \
@@ -443,7 +495,7 @@
 }
 
 _hg_cmd_backout() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_mergetool_opts $_hg_pat_opts \
     '--merge[merge with old dirstate parent after backout]' \
     '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
     '--parent[parent to choose when backing out merge]' \
@@ -456,6 +508,7 @@
 _hg_cmd_bisect() {
   _arguments -s -w : $_hg_global_opts \
   '(-)'{-r,--reset}'[reset bisect state]' \
+  '(--extend -e)'{-e,--extend}'[extend the bisect range]' \
   '(--good -g --bad -b --skip -s --reset -r)'{-g,--good}'[mark changeset good]'::revision:_hg_labels \
   '(--good -g --bad -b --skip -s --reset -r)'{-b,--bad}'[mark changeset bad]'::revision:_hg_labels \
   '(--good -g --bad -b --skip -s --reset -r)'{-s,--skip}'[skip testing changeset]' \
@@ -466,6 +519,7 @@
 _hg_cmd_bookmarks() {
   _arguments -s -w : $_hg_global_opts \
   '(--force -f)'{-f,--force}'[force]' \
+  '(--inactive -i)'{-i,--inactive}'[mark a bookmark inactive]' \
   '(--rev -r --delete -d --rename -m)'{-r+,--rev}'[revision]:revision:_hg_labels' \
   '(--rev -r --delete -d --rename -m)'{-d,--delete}'[delete a given bookmark]' \
   '(--rev -r --delete -d --rename -m)'{-m+,--rename}'[rename a given bookmark]:bookmark:_hg_bookmarks' \
@@ -480,13 +534,17 @@
 
 _hg_cmd_branches() {
   _arguments -s -w : $_hg_global_opts \
-  '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]'
+  '(--active -a)'{-a,--active}'[show only branches that have unmerge heads]' \
+  '(--closed -c)'{-c,--closed}'[show normal and closed branches]'
 }
 
 _hg_cmd_bundle() {
   _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[run even when remote repository is unrelated]' \
   '(2)*--base[a base changeset to specify instead of a destination]:revision:_hg_labels' \
+  '(--branch -b)'{-b+,--branch}'[a specific branch to bundle]' \
+  '(--rev -r)'{-r+,--rev}'[changeset(s) to bundle]:' \
+  '--all[bundle all changesets in the repository]' \
   ':output file:_files' \
   ':destination repository:_files -/'
 }
@@ -495,26 +553,28 @@
   _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
   '(--output -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
   '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
+  '--decode[apply any matching decode filter]' \
   '*:file:_hg_files'
 }
 
 _hg_cmd_clone() {
-  _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
-  '(--noupdate -U)'{-U,--noupdate}'[do not update the new working directory]' \
+  _arguments -s -w : $_hg_global_opts $_hg_clone_opts \
   '(--rev -r)'{-r+,--rev}'[a changeset you would like to have after cloning]:' \
-  '--uncompressed[use uncompressed transfer (fast over LAN)]' \
+  '(--updaterev -u)'{-u+,--updaterev}'[revision, tag or branch to check out]' \
+  '(--branch -b)'{-b+,--branch}'[clone only the specified branch]' \
   ':source repository:_hg_remote' \
   ':destination:_hg_clone_dest'
 }
 
 _hg_cmd_commit() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \
   '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \
   '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
   '(--logfile -l)'{-l+,--logfile}'[read commit message from <file>]:log file:_files -g \*.txt' \
   '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
   '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
   '--amend[amend the parent of the working dir]' \
+  '--close-branch[mark a branch as closed]' \
   '*:file:_hg_files'
 }
 
@@ -527,12 +587,15 @@
 
 _hg_cmd_diff() {
   typeset -A opt_args
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_diff_opts $_hg_ignore_space_opts \
+                     $_hg_pat_opts $_hg_subrepos_opts \
   '*'{-r,--rev}'+[revision]:revision:_hg_revrange' \
   '(--show-function -p)'{-p,--show-function}'[show which function each change is in]' \
-  '(--ignore-all-space -w)'{-w,--ignore-all-space}'[ignore white space when comparing lines]' \
-  '(--ignore-space-change -b)'{-b,--ignore-space-change}'[ignore changes in the amount of white space]' \
-  '(--ignore-blank-lines -B)'{-B,--ignore-blank-lines}'[ignore changes whose lines are all blank]' \
+  '(--change -c)'{-c,--change}'[change made by revision]' \
+  '(--text -a)'{-a,--text}'[treat all files as text]' \
+  '--reverse[produce a diff that undoes the changes]' \
+  '(--unified -U)'{-U,--unified}'[number of lines of context to show]' \
+  '--stat[output diffstat-style summary of changes]' \
   '*:file:->diff_files'
 
   if [[ $state == 'diff_files' ]]
@@ -550,20 +613,21 @@
   _arguments -s -w : $_hg_global_opts $_hg_diff_opts \
   '(--outout -o)'{-o+,--output}'[print output to file with formatted name]:filespec:' \
   '--switch-parent[diff against the second parent]' \
+  '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
   '*:revision:_hg_labels'
 }
 
+_hg_cmd_forget() {
+  _arguments -s -w : $_hg_global_opts \
+  '*:file:_hg_files'
+}
+
 _hg_cmd_graft() {
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_dryrun_opts \
+                     $_hg_date_user_opts $_hg_mergetool_opts \
   '(--continue -c)'{-c,--continue}'[resume interrupted graft]' \
   '(--edit -e)'{-e,--edit}'[invoke editor on commit messages]' \
   '--log[append graft info to log message]' \
-  '(--currentdate -D)'{-D,--currentdate}'[record the current date as commit date]' \
-  '(--currentuser -U)'{-U,--currentuser}'[record the current user as committer]' \
-  '(--date -d)'{-d,--date}'[record the specified date as commit date]' \
-  '(--user -u)'{-u,--user}'[record the specified user as committer]' \
-  '(--tool -t)'{-t,--tool}'[specify merge tool]' \
-  '(--dry-run -n)'{-n,--dry-run}'[do not perform actions, just print output]' \
   '*:revision:_hg_labels'
 }
 
@@ -577,44 +641,55 @@
   '(--line-number -n)'{-n,--line-number}'[print matching line numbers]' \
   '*'{-r+,--rev}'[search in given revision range]:revision:_hg_revrange' \
   '(--user -u)'{-u,--user}'[print user who committed change]' \
+  '(--date -d)'{-d,--date}'[print date of a changeset]' \
   '1:search pattern:' \
   '*:files:_hg_files'
 }
 
 _hg_cmd_heads() {
   _arguments -s -w : $_hg_global_opts $_hg_style_opts \
+  '(--topo -t)'{-t,--topo}'[show topological heads only]' \
+  '(--closed -c)'{-c,--closed}'[show normal and closed branch heads]' \
   '(--rev -r)'{-r+,--rev}'[show only heads which are descendants of rev]:revision:_hg_labels'
 }
 
 _hg_cmd_help() {
   _arguments -s -w : $_hg_global_opts \
-  '*:mercurial command:_hg_commands'
+  '(--extension -e)'{-e,--extension}'[show only help for extensions]' \
+  '(--command -c)'{-c,--command}'[show only help for commands]' \
+  '(--keyword -k)'{-k+,--keyword}'[show topics matching keyword]' \
+  '*:mercurial help topic:_hg_help_topics'
 }
 
 _hg_cmd_identify() {
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
   '(--rev -r)'{-r+,--rev}'[identify the specified rev]:revision:_hg_labels' \
   '(--num -n)'{-n+,--num}'[show local revision number]' \
   '(--id -i)'{-i+,--id}'[show global revision id]' \
   '(--branch -b)'{-b+,--branch}'[show branch]' \
+  '(--bookmark -B)'{-B+,--bookmark}'[show bookmarks]' \
   '(--tags -t)'{-t+,--tags}'[show tags]'
 }
 
 _hg_cmd_import() {
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_commit_opts \
   '(--strip -p)'{-p+,--strip}'[directory strip option for patch (default: 1)]:count:' \
-  '(--message -m)'{-m+,--message}'[use <text> as commit message]:text:' \
   '(--force -f)'{-f,--force}'[skip check for outstanding uncommitted changes]' \
   '--bypass[apply patch without touching the working directory]' \
+  '--no-commit[do not commit, just update the working directory]' \
+  '--exact[apply patch to the nodes from which it was generated]' \
+  '--import-branch[use any branch information in patch (implied by --exact)]' \
+  '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
+  '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
+  '(--similarity -s)'{-s+,--similarity}'[guess renamed files by similarity (0<=s<=100)]:' \
   '*:patch:_files'
 }
 
 _hg_cmd_incoming() {
-  _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
-  '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
+  _arguments -s -w : $_hg_log_opts $_hg_branch_bmark_opts $_hg_remote_opts \
+                     $_hg_subrepos_opts \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
-  '(--patch -p)'{-p,--patch}'[show patch]' \
-  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_tags' \
+  '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:_hg_labels' \
   '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
   '--bundle[file to store the bundles into]:bundle file:_files' \
   ':source:_hg_remote'
@@ -634,42 +709,41 @@
 }
 
 _hg_cmd_log() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_style_opts \
+  _arguments -s -w : $_hg_log_opts $_hg_pat_opts \
   '(--follow --follow-first -f)'{-f,--follow}'[follow changeset or history]' \
   '(-f --follow)--follow-first[only follow the first parent of merge changesets]' \
   '(--copies -C)'{-C,--copies}'[show copied files]' \
   '(--keyword -k)'{-k+,--keyword}'[search for a keyword]:' \
-  '(--limit -l)'{-l+,--limit}'[limit number of changes displayed]:' \
   '*'{-r,--rev}'[show the specified revision or range]:revision:_hg_revrange' \
-  '(--no-merges -M)'{-M,--no-merges}'[do not show merges]' \
   '(--only-merges -m)'{-m,--only-merges}'[show only merges]' \
-  '(--patch -p)'{-p,--patch}'[show patch]' \
   '(--prune -P)'{-P+,--prune}'[do not display revision or any of its ancestors]:revision:_hg_labels' \
+  '(--graph -G)'{-G+,--graph}'[show the revision DAG]' \
   '(--branch -b)'{-b+,--branch}'[show changesets within the given named branch]:branch:_hg_branches' \
+  '(--user -u)'{-u+,--user}'[revisions committed by user]:user:' \
+  '(--date -d)'{-d+,--date}'[show revisions matching date spec]:date:' \
   '*:files:_hg_files'
 }
 
 _hg_cmd_manifest() {
   _arguments -s -w : $_hg_global_opts \
   '--all[list files from all revisions]' \
+  '(--rev -r)'{-r+,--rev}'[revision to display]:revision:_hg_labels' \
   ':revision:_hg_labels'
 }
 
 _hg_cmd_merge() {
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_mergetool_opts \
   '(--force -f)'{-f,--force}'[force a merge with outstanding changes]' \
   '(--rev -r 1)'{-r,--rev}'[revision to merge]:revision:_hg_mergerevs' \
   '(--preview -P)'{-P,--preview}'[review revisions to merge (no merge is performed)]' \
-  '(--tool -t)'{-t,--tool}'[specify merge tool]' \
   ':revision:_hg_mergerevs'
 }
 
 _hg_cmd_outgoing() {
-  _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_style_opts \
-  '(--no-merges -M)'{-M,--no-merges}'[do not show merge revisions]' \
+  _arguments -s -w : $_hg_log_opts $_hg_branch_bmark_opts $_hg_remote_opts \
+                     $_hg_subrepos_opts \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
-  '(--patch -p)'{-p,--patch}'[show patch]' \
-  '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]' \
+  '*'{-r,--rev}'[a specific revision you would like to push]:revision:_hg_revrange' \
   '(--newest-first -n)'{-n,--newest-first}'[show newest record first]' \
   ':destination:_hg_remote'
 }
@@ -696,7 +770,7 @@
 }
 
 _hg_cmd_pull() {
-  _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_branch_bmark_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[run even when the remote repository is unrelated]' \
   '(--update -u)'{-u,--update}'[update to new tip if changesets were pulled]' \
   '(--rev -r)'{-r+,--rev}'[a specific revision up to which you would like to pull]:revision:' \
@@ -704,9 +778,10 @@
 }
 
 _hg_cmd_push() {
-  _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_branch_bmark_opts $_hg_remote_opts \
   '(--force -f)'{-f,--force}'[force push]' \
   '(--rev -r)'{-r+,--rev}'[a specific revision you would like to push]:revision:_hg_labels' \
+  '--new-branch[allow pushing a new branch]' \
   ':destination:_hg_remote'
 }
 
@@ -728,7 +803,9 @@
   local context state line
   typeset -A opt_args
 
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_mergetool_opts $_hg_pat_opts \
+  '(--all -a)'{-a,--all}'[select all unresolved files]' \
+  '(--no-status -n)'{-n,--no-status}'[hide status prefix]' \
   '(--list -l --mark -m --unmark -u)'{-l,--list}'[list state of files needing merge]:*:merged files:->resolve_files' \
   '(--mark -m --list -l --unmark -u)'{-m,--mark}'[mark files as resolved]:*:unresolved files:_hg_unresolved' \
   '(--unmark -u --list -l --mark -m)'{-u,--unmark}'[unmark files as resolved]:*:resolved files:_hg_resolved' \
@@ -749,6 +826,7 @@
   '(--all -a :)'{-a,--all}'[revert all changes when no arguments given]' \
   '(--rev -r)'{-r+,--rev}'[revision to revert to]:revision:_hg_labels' \
   '(--no-backup -C)'{-C,--no-backup}'[do not save backup copies of files]' \
+  '(--date -d)'{-d+,--date}'[tipmost revision matching date]:date code:' \
   '*:file:->diff_files'
 
   if [[ $state == 'diff_files' ]]
@@ -764,6 +842,11 @@
   fi
 }
 
+_hg_cmd_rollback() {
+  _arguments -s -w : $_hg_global_opts $_hg_dryrun_opts \
+  '(--force -f)'{-f,--force}'[ignore safety measures]' \
+}
+
 _hg_cmd_serve() {
   _arguments -s -w : $_hg_global_opts \
   '(--accesslog -A)'{-A+,--accesslog}'[name of access log file]:log file:_files' \
@@ -771,10 +854,15 @@
   '(--daemon -d)'{-d,--daemon}'[run server in background]' \
   '(--port -p)'{-p+,--port}'[listen port]:listen port:' \
   '(--address -a)'{-a+,--address}'[interface address]:interface address:' \
+  '--prefix[prefix path to serve from]:directory:_files' \
   '(--name -n)'{-n+,--name}'[name to show in web pages]:repository name:' \
+  '--web-conf[name of the hgweb config file]:webconf_file:_files' \
+  '--pid-file[name of file to write process ID to]:pid_file:_files' \
+  '--cmdserver[cmdserver mode]:mode:' \
   '(--templates -t)'{-t,--templates}'[web template directory]:template dir:_files -/' \
   '--style[web template style]:style' \
   '--stdio[for remote clients]' \
+  '--certificate[certificate file]:cert_file:_files' \
   '(--ipv6 -6)'{-6,--ipv6}'[use IPv6 in addition to IPv4]'
 }
 
@@ -785,7 +873,7 @@
 }
 
 _hg_cmd_status() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \
   '(--all -A)'{-A,--all}'[show status of all files]' \
   '(--modified -m)'{-m,--modified}'[show only modified files]' \
   '(--added -a)'{-a,--added}'[show only added files]' \
@@ -798,6 +886,7 @@
   '(--copies -C)'{-C,--copies}'[show source of copied files]' \
   '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs]' \
   '--rev[show difference from revision]:revision:_hg_labels' \
+  '--change[list the changed files of a revision]:revision:_hg_labels' \
   '*:files:_files'
 }
 
@@ -813,11 +902,14 @@
   '(--date -d)'{-d+,--date}'[record datecode as commit date]:date code:' \
   '(--user -u)'{-u+,--user}'[record user as commiter]:user:' \
   '(--rev -r)'{-r+,--rev}'[revision to tag]:revision:_hg_labels' \
+  '(--force -f)'{-f,--force}'[force tag]' \
+  '--remove[remove a tag]' \
+  '(--edit -e)'{-e,--edit}'[edit commit message]' \
   ':tag name:'
 }
 
 _hg_cmd_tip() {
-  _arguments -s -w : $_hg_global_opts $_hg_style_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_gitlike_opts $_hg_style_opts \
   '(--patch -p)'{-p,--patch}'[show patch]'
 }
 
@@ -831,6 +923,8 @@
   _arguments -s -w : $_hg_global_opts \
   '(--clean -C)'{-C,--clean}'[overwrite locally modified files]' \
   '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
+  '(--check -c)'{-c,--check}'[update across branches if no uncommitted changes]' \
+  '(--date -d)'{-d+,--date}'[tipmost revision matching date]' \
   ':revision:_hg_labels'
 }
 
@@ -893,7 +987,16 @@
   '(--summary -s)'{-s,--summary}'[print first line of patch header]')
 
 _hg_cmd_qapplied() {
-  _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
+  _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \
+  '(--last -1)'{-1,--last}'[show only the preceding applied patch]' \
+  '*:patch:_hg_qapplied'
+}
+
+_hg_cmd_qclone() {
+  _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_clone_opts \
+  '(--patches -p)'{-p+,--patches}'[location of source patch repository]' \
+  ':source repository:_hg_remote' \
+  ':destination:_hg_clone_dest'
 }
 
 _hg_cmd_qdelete() {
@@ -904,7 +1007,8 @@
 }
 
 _hg_cmd_qdiff() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_diff_opts \
+                     $_hg_ignore_space_opts \
   '*:pattern:_hg_files'
 }
 
@@ -917,12 +1021,15 @@
 _hg_cmd_qfold() {
   _arguments -s -w : $_hg_global_opts $_h_commit_opts \
   '(--keep,-k)'{-k,--keep}'[keep folded patch files]' \
+  '(--force -f)'{-f,--force}'[overwrite any local changes]' \
+  '--no-backup[do not save backup copies of files]' \
   '*:unapplied patch:_hg_qunapplied'
 }
 
 _hg_cmd_qgoto() {
   _arguments -s -w : $_hg_global_opts \
   '(--force -f)'{-f,--force}'[overwrite any local changes]' \
+  '--keep-changes[tolerate non-conflicting local changes]' \
   ':patch:_hg_qseries'
 }
 
@@ -940,17 +1047,17 @@
 }
 
 _hg_cmd_qimport() {
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_gitlike_opts \
   '(--existing -e)'{-e,--existing}'[import file in patch dir]' \
   '(--name -n 2)'{-n+,--name}'[patch file name]:name:' \
   '(--force -f)'{-f,--force}'[overwrite existing files]' \
   '*'{-r+,--rev}'[place existing revisions under mq control]:revision:_hg_revrange' \
+  '(--push -P)'{-P,--push}'[qpush after importing]' \
   '*:patch:_files'
 }
 
 _hg_cmd_qnew() {
-  _arguments -s -w : $_hg_global_opts $_hg_commit_opts \
-  '(--force -f)'{-f,--force}'[import uncommitted changes into patch]' \
+  _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_date_user_opts $_hg_gitlike_opts \
   ':patch:'
 }
 
@@ -961,8 +1068,9 @@
 _hg_cmd_qpop() {
   _arguments -s -w : $_hg_global_opts \
   '(--all -a :)'{-a,--all}'[pop all patches]' \
-  '(--name -n)'{-n+,--name}'[queue name to pop]:' \
   '(--force -f)'{-f,--force}'[forget any local changes]' \
+  '--keep-changes[tolerate non-conflicting local changes]' \
+  '--no-backup[do not save backup copies of files]' \
   ':patch:_hg_qapplied'
 }
 
@@ -974,24 +1082,23 @@
   _arguments -s -w : $_hg_global_opts \
   '(--all -a :)'{-a,--all}'[apply all patches]' \
   '(--list -l)'{-l,--list}'[list patch name in commit text]' \
-  '(--merge -m)'{-m+,--merge}'[merge from another queue]:' \
-  '(--name -n)'{-n+,--name}'[merge queue name]:' \
   '(--force -f)'{-f,--force}'[apply if the patch has rejects]' \
   '(--exact -e)'{-e,--exact}'[apply the target patch to its recorded parent]' \
   '--move[reorder patch series and apply only the patch]' \
+  '--keep-changes[tolerate non-conflicting local changes]' \
+  '--no-backup[do not save backup copies of files]' \
   ':patch:_hg_qunapplied'
 }
 
 _hg_cmd_qrefresh() {
-  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts \
-  '(--git -g)'{-g,--git}'[use git extended diff format]' \
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_commit_opts $_hg_gitlike_opts \
   '(--short -s)'{-s,--short}'[short refresh]' \
   '*:files:_hg_files'
 }
 
 _hg_cmd_qrename() {
   _arguments -s -w : $_hg_global_opts \
-  ':patch:_hg_qseries' \
+  ':patch:_hg_qunapplied' \
   ':destination:'
 }
 
@@ -1010,7 +1117,8 @@
 }
 
 _hg_cmd_qunapplied() {
-  _arguments -s -w : $_hg_global_opts $_hg_qseries_opts
+  _arguments -s -w : $_hg_global_opts $_hg_qseries_opts \
+  '(--first -1)'{-1,--first}'[show only the first patch]'
 }
 
 _hg_cmd_qtop() {
@@ -1019,16 +1127,17 @@
 
 _hg_cmd_strip() {
   _arguments -s -w : $_hg_global_opts \
-  '(--force -f)'{-f,--force}'[force multi-head removal]' \
-  '(--backup -b)'{-b,--backup}'[bundle unrelated changesets]' \
-  '(--nobackup -n)'{-n,--nobackup}'[no backups]' \
+  '(--force -f)'{-f,--force}'[force removal, discard uncommitted changes, no backup]' \
+  '(--no-backup -n)'{-n,--no-backup}'[no backups]' \
+  '(--keep -k)'{-k,--keep}'[do not modify working copy during strip]' \
+  '(--bookmark -B)'{-B+,--bookmark}'[remove revs only reachable from given bookmark]:bookmark:_hg_bookmarks' \
+  '(--rev -r)'{-r+,--rev}'[revision]:revision:_hg_labels' \
   ':revision:_hg_labels'
 }
 
 # Patchbomb
 _hg_cmd_email() {
-  _arguments -s -w : $_hg_global_opts $_hg_remote_opts \
-  '(--git -g)'{-g,--git}'[use git extended diff format]' \
+  _arguments -s -w : $_hg_global_opts $_hg_remote_opts $_hg_gitlike_opts \
   '--plain[omit hg patch header]' \
   '--body[send patches as inline message text (default)]' \
   '(--outgoing -o)'{-o,--outgoing}'[send changes not found in the target repository]' \
@@ -1058,20 +1167,61 @@
 
 # Rebase
 _hg_cmd_rebase() {
-  _arguments -s -w : $_hg_global_opts \
+  _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_mergetool_opts \
   '*'{-r,--rev}'[rebase these revisions]:revision:_hg_revrange' \
-  '(--source -s)'{-s,--source}'[rebase from the specified changeset]:revision:_hg_labels' \
-  '(--base -b)'{-b,--base}'[rebase from the base of the specified changeset]:revision:_hg_labels' \
-  '(--dest -d)'{-d,--dest}'[rebase onto the specified changeset]' \
+  '(--source -s)'{-s+,--source}'[rebase from the specified changeset]:revision:_hg_labels' \
+  '(--base -b)'{-b+,--base}'[rebase from the base of the specified changeset]:revision:_hg_labels' \
+  '(--dest -d)'{-d+,--dest}'[rebase onto the specified changeset]:revision:_hg_labels' \
   '--collapse[collapse the rebased changeset]' \
-  '(--message -m)'{-m+,--message}'[use <text> as collapse commit message]:text:' \
-  '(--edit -e)'{-e,--edit}'[invoke editor on commit messages]' \
-  '(--logfile -l)'{-l+,--logfile}'[read collapse commit message from <file>]:log file:_files -g \*.txt' \
   '--keep[keep original changeset]' \
   '--keepbranches[keep original branch name]' \
-  '(--tool -t)'{-t,--tool}'[specify merge tool]' \
   '(--continue -c)'{-c,--continue}'[continue an interrupted rebase]' \
   '(--abort -a)'{-a,--abort}'[abort an interrupted rebase]' \
 }
 
+# Record
+_hg_cmd_record() {
+  _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_pat_opts \
+                     $_hg_ignore_space_opts $_hg_subrepos_opts \
+  '(--addremove -A)'{-A,--addremove}'[mark new/missing files as added/removed before committing]' \
+  '--close-branch[mark a branch as closed, hiding it from the branch list]' \
+  '--amend[amend the parent of the working dir]' \
+  '(--date -d)'{-d+,--date}'[record the specified date as commit date]:date:' \
+  '(--user -u)'{-u+,--user}'[record the specified user as committer]:user:'
+}
+
+_hg_cmd_qrecord() {
+  _arguments -s -w : $_hg_global_opts $_hg_commit_opts $_hg_date_user_opts $_hg_gitlike_opts \
+                     $_hg_pat_opts $_hg_ignore_space_opts $_hg_subrepos_opts
+}
+
+# Convert
+_hg_cmd_convert() {
+_arguments -s -w : $_hg_global_opts \
+  '(--source-type -s)'{-s,--source-type}'[source repository type]' \
+  '(--dest-type -d)'{-d,--dest-type}'[destination repository type]' \
+  '(--rev -r)'{-r+,--rev}'[import up to target revision]:revision:' \
+  '(--authormap -A)'{-A+,--authormap}'[remap usernames using this file]:file:_files' \
+  '--filemap[remap file names using contents of file]:file:_files' \
+  '--splicemap[splice synthesized history into place]:file:_files' \
+  '--branchmap[change branch names while converting]:file:_files' \
+  '--branchsort[try to sort changesets by branches]' \
+  '--datesort[try to sort changesets by date]' \
+  '--sourcesort[preserve source changesets order]'
+}
+
+# Graphlog
+_hg_cmd_glog() {
+  _hg_cmd_log $@
+}
+
+# Purge
+_hg_cmd_purge() {
+  _arguments -s -w : $_hg_global_opts $_hg_pat_opts $_hg_subrepos_opts \
+  '(--abort-on-err -a)'{-a,--abort-on-err}'[abort if an error occurs]' \
+  '--all[purge ignored files too]' \
+  '(--print -p)'{-p,--print}'[print filenames instead of deleting them]' \
+  '(--print0 -0)'{-0,--print0}'[end filenames with NUL, for use with xargs (implies -p/--print)]'
+}
+
 _hg "$@"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/check-seclevel.py	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+#
+# checkseclevel - checking section title levels in each online help documents
+
+import sys, os
+import optparse
+
+# import from the live mercurial repo
+sys.path.insert(0, "..")
+# fall back to pure modules if required C extensions are not available
+sys.path.append(os.path.join('..', 'mercurial', 'pure'))
+from mercurial import demandimport; demandimport.enable()
+from mercurial.commands import table
+from mercurial.help import helptable
+from mercurial import extensions
+from mercurial import minirst
+from mercurial import util
+
+_verbose = False
+
+def verbose(msg):
+    if _verbose:
+        print msg
+
+def error(msg):
+    sys.stderr.write('%s\n' % msg)
+
+level2mark = ['"', '=', '-', '.', '#']
+reservedmarks = ['"']
+
+mark2level = {}
+for m, l in zip(level2mark, xrange(len(level2mark))):
+    if m not in reservedmarks:
+        mark2level[m] = l
+
+initlevel_topic = 0
+initlevel_cmd = 1
+initlevel_ext = 1
+initlevel_ext_cmd = 3
+
+def showavailables(initlevel):
+    error('    available marks and order of them in this help: %s' %
+          (', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]])))
+
+def checkseclevel(doc, name, initlevel):
+    verbose('checking "%s"' % name)
+    blocks, pruned = minirst.parse(doc, 0, ['verbose'])
+    errorcnt = 0
+    curlevel = initlevel
+    for block in blocks:
+        if block['type'] != 'section':
+            continue
+        mark = block['underline']
+        title = block['lines'][0]
+        if (mark not in mark2level) or (mark2level[mark] <= initlevel):
+            error('invalid section mark %r for "%s" of %s' %
+                  (mark * 4, title, name))
+            showavailables(initlevel)
+            errorcnt += 1
+            continue
+        nextlevel = mark2level[mark]
+        if curlevel < nextlevel and curlevel + 1 != nextlevel:
+            error('gap of section level at "%s" of %s' %
+                  (title, name))
+            showavailables(initlevel)
+            errorcnt += 1
+            continue
+        verbose('appropriate section level for "%s %s"' %
+                (mark * (nextlevel * 2), title))
+        curlevel = nextlevel
+
+    return errorcnt
+
+def checkcmdtable(cmdtable, namefmt, initlevel):
+    errorcnt = 0
+    for k, entry in cmdtable.items():
+        name = k.split("|")[0].lstrip("^")
+        if not entry[0].__doc__:
+            verbose('skip checking %s: no help document' %
+                    (namefmt % name))
+            continue
+        errorcnt += checkseclevel(entry[0].__doc__,
+                                  namefmt % name,
+                                  initlevel)
+    return errorcnt
+
+def checkhghelps():
+    errorcnt = 0
+    for names, sec, doc in helptable:
+        if util.safehasattr(doc, '__call__'):
+            doc = doc()
+        errorcnt += checkseclevel(doc,
+                                  '%s help topic' % names[0],
+                                  initlevel_topic)
+
+    errorcnt += checkcmdtable(table, '%s command', initlevel_cmd)
+
+    for name in sorted(extensions.enabled().keys() +
+                       extensions.disabled().keys()):
+        mod = extensions.load(None, name, None)
+        if not mod.__doc__:
+            verbose('skip checking %s extension: no help document' % name)
+            continue
+        errorcnt += checkseclevel(mod.__doc__,
+                                  '%s extension' % name,
+                                  initlevel_ext)
+
+        cmdtable = getattr(mod, 'cmdtable', None)
+        if cmdtable:
+            errorcnt += checkcmdtable(cmdtable,
+                                      '%s command of ' + name + ' extension',
+                                      initlevel_ext_cmd)
+    return errorcnt
+
+def checkfile(filename, initlevel):
+    if filename == '-':
+        filename = 'stdin'
+        doc = sys.stdin.read()
+    else:
+        fp = open(filename)
+        try:
+            doc = fp.read()
+        finally:
+            fp.close()
+
+    verbose('checking input from %s with initlevel %d' %
+            (filename, initlevel))
+    return checkseclevel(doc, 'input from %s' % filename, initlevel)
+
+if __name__ == "__main__":
+    optparser = optparse.OptionParser("""%prog [options]
+
+This checks all help documents of Mercurial (topics, commands,
+extensions and commands of them), if no file is specified by --file
+option.
+""")
+    optparser.add_option("-v", "--verbose",
+                         help="enable additional output",
+                         action="store_true")
+    optparser.add_option("-f", "--file",
+                         help="filename to read in (or '-' for stdin)",
+                         action="store", default="")
+
+    optparser.add_option("-t", "--topic",
+                         help="parse file as help topic",
+                         action="store_const", dest="initlevel", const=0)
+    optparser.add_option("-c", "--command",
+                         help="parse file as help of core command",
+                         action="store_const", dest="initlevel", const=1)
+    optparser.add_option("-e", "--extension",
+                         help="parse file as help of extension",
+                         action="store_const", dest="initlevel", const=1)
+    optparser.add_option("-C", "--extension-command",
+                         help="parse file as help of extension command",
+                         action="store_const", dest="initlevel", const=3)
+
+    optparser.add_option("-l", "--initlevel",
+                         help="set initial section level manually",
+                         action="store", type="int", default=0)
+
+    (options, args) = optparser.parse_args()
+
+    _verbose = options.verbose
+
+    if options.file:
+        if checkfile(options.file, options.initlevel):
+            sys.exit(1)
+    else:
+        if checkhghelps():
+            sys.exit(1)
--- a/doc/hgmanpage.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/doc/hgmanpage.py	Fri Oct 19 01:34:50 2012 -0500
@@ -158,7 +158,7 @@
     """"""
 
     words_and_spaces = re.compile(r'\S+| +|\n')
-    document_start = """Man page generated from reStructeredText."""
+    document_start = """Man page generated from reStructuredText."""
 
     def __init__(self, document):
         nodes.NodeVisitor.__init__(self, document)
@@ -900,7 +900,7 @@
         # ``.PP`` : Start standard indented paragraph.
         # ``.LP`` : Start block paragraph, all except the first.
         # ``.P [type]``  : Start paragraph type.
-        # NOTE dont use paragraph starts because they reset indentation.
+        # NOTE don't use paragraph starts because they reset indentation.
         # ``.sp`` is only vertical space
         self.ensure_eol()
         self.body.append('.sp\n')
--- a/hgext/bugzilla.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/bugzilla.py	Fri Oct 19 01:34:50 2012 -0500
@@ -45,7 +45,7 @@
 email to the Bugzilla email interface to submit comments to bugs.
 The From: address in the email is set to the email address of the Mercurial
 user, so the comment appears to come from the Mercurial user. In the event
-that the Mercurial user email is not recognised by Bugzilla as a Bugzilla
+that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
 user, the email associated with the Bugzilla username used to log into
 Bugzilla is used instead as the source of the comment. Marking bugs fixed
 works on all supported Bugzilla versions.
@@ -53,7 +53,7 @@
 Configuration items common to all access modes:
 
 bugzilla.version
-  This access type to use. Values recognised are:
+  The access type to use. Values recognized are:
 
   :``xmlrpc``:       Bugzilla XMLRPC interface.
   :``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
@@ -303,7 +303,7 @@
     # Methods to be implemented by access classes.
     #
     # 'bugs' is a dict keyed on bug id, where values are a dict holding
-    # updates to bug state. Recognised dict keys are:
+    # updates to bug state. Recognized dict keys are:
     #
     # 'hours': Value, float containing work hours to be updated.
     # 'fix':   If key present, bug is to be marked fixed. Value ignored.
@@ -516,7 +516,7 @@
             raise util.Abort(_('unknown database schema'))
         return ids[0][0]
 
-# Buzgilla via XMLRPC interface.
+# Bugzilla via XMLRPC interface.
 
 class cookietransportrequest(object):
     """A Transport request method that retains cookies over its lifetime.
--- a/hgext/children.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/children.py	Fri Oct 19 01:34:50 2012 -0500
@@ -14,7 +14,7 @@
 "children(REV)"` instead.
 '''
 
-from mercurial import cmdutil
+from mercurial import cmdutil, commands
 from mercurial.commands import templateopts
 from mercurial.i18n import _
 
@@ -48,3 +48,5 @@
          ] + templateopts,
          _('hg children [-r REV] [FILE]')),
 }
+
+commands.inferrepo += " children"
--- a/hgext/churn.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/churn.py	Fri Oct 19 01:34:50 2012 -0500
@@ -197,3 +197,5 @@
           ] + commands.walkopts,
          _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]")),
 }
+
+commands.inferrepo += " churn"
--- a/hgext/color.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/color.py	Fri Oct 19 01:34:50 2012 -0500
@@ -224,6 +224,13 @@
     _terminfo_params = False
 
 _styles = {'grep.match': 'red bold',
+           'grep.linenumber': 'green',
+           'grep.rev': 'green',
+           'grep.change': 'green',
+           'grep.sep': 'cyan',
+           'grep.filename': 'magenta',
+           'grep.user': 'magenta',
+           'grep.date': 'magenta',
            'bookmarks.current': 'green',
            'branches.active': 'none',
            'branches.closed': 'black bold',
@@ -499,5 +506,5 @@
                 orig(m.group(2), **opts)
                 m = re.match(ansire, m.group(3))
         finally:
-            # Explicity reset original attributes
+            # Explicitly reset original attributes
             _kernel32.SetConsoleTextAttribute(stdout, origattr)
--- a/hgext/convert/__init__.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/__init__.py	Fri Oct 19 01:34:50 2012 -0500
@@ -74,7 +74,7 @@
 
     The authormap is a simple text file that maps each source commit
     author to a destination commit author. It is handy for source SCMs
-    that use unix logins to identify authors (eg: CVS). One line per
+    that use unix logins to identify authors (e.g.: CVS). One line per
     author mapping and the line format is::
 
       source author = destination author
--- a/hgext/convert/bzr.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/bzr.py	Fri Oct 19 01:34:50 2012 -0500
@@ -246,7 +246,7 @@
                         # register the change as move
                         renames[topath] = frompath
 
-                # no futher changes, go to the next change
+                # no further changes, go to the next change
                 continue
 
             # we got unicode paths, need to convert them
--- a/hgext/convert/convcmd.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/convcmd.py	Fri Oct 19 01:34:50 2012 -0500
@@ -167,7 +167,7 @@
 
     def toposort(self, parents, sortmode):
         '''Return an ordering such that every uncommitted changeset is
-        preceeded by all its uncommitted ancestors.'''
+        preceded by all its uncommitted ancestors.'''
 
         def mapchildren(parents):
             """Return a (children, roots) tuple where 'children' maps parent
--- a/hgext/convert/cvs.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/cvs.py	Fri Oct 19 01:34:50 2012 -0500
@@ -202,7 +202,7 @@
     def getfile(self, name, rev):
 
         def chunkedread(fp, count):
-            # file-objects returned by socked.makefile() do not handle
+            # file-objects returned by socket.makefile() do not handle
             # large read() requests very well.
             chunksize = 65536
             output = StringIO()
--- a/hgext/convert/cvsps.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/cvsps.py	Fri Oct 19 01:34:50 2012 -0500
@@ -156,8 +156,8 @@
         # The cvsps cache pickle needs a uniquified name, based on the
         # repository location. The address may have all sort of nasties
         # in it, slashes, colons and such. So here we take just the
-        # alphanumerics, concatenated in a way that does not mix up the
-        # various components, so that
+        # alphanumeric characters, concatenated in a way that does not
+        # mix up the various components, so that
         #    :pserver:user@server:/path
         # and
         #    /pserver/user/server/path
@@ -503,7 +503,7 @@
 
         # Check if log entry belongs to the current changeset or not.
 
-        # Since CVS is file centric, two different file revisions with
+        # Since CVS is file-centric, two different file revisions with
         # different branchpoints should be treated as belonging to two
         # different changesets (and the ordering is important and not
         # honoured by cvsps at this point).
--- a/hgext/convert/filemap.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/filemap.py	Fri Oct 19 01:34:50 2012 -0500
@@ -4,6 +4,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
+import posixpath
 import shlex
 from mercurial.i18n import _
 from mercurial import util
@@ -16,6 +17,13 @@
         e = name.rfind('/', 0, e)
     yield '.', name
 
+def normalize(path):
+    ''' We use posixpath.normpath to support cross-platform path format.
+    However, it doesn't handle None input. So we wrap it up. '''
+    if path is None:
+        return None
+    return posixpath.normpath(path)
+
 class filemapper(object):
     '''Map and filter filenames when importing.
     A name can be mapped to itself, a new name, or None (omit from new
@@ -53,21 +61,21 @@
         cmd = lex.get_token()
         while cmd:
             if cmd == 'include':
-                name = lex.get_token()
+                name = normalize(lex.get_token())
                 errs += check(name, self.exclude, 'exclude')
                 self.include[name] = name
             elif cmd == 'exclude':
-                name = lex.get_token()
+                name = normalize(lex.get_token())
                 errs += check(name, self.include, 'include')
                 errs += check(name, self.rename, 'rename')
                 self.exclude[name] = name
             elif cmd == 'rename':
-                src = lex.get_token()
-                dest = lex.get_token()
+                src = normalize(lex.get_token())
+                dest = normalize(lex.get_token())
                 errs += check(src, self.exclude, 'exclude')
                 self.rename[src] = dest
             elif cmd == 'source':
-                errs += self.parse(lex.get_token())
+                errs += self.parse(normalize(lex.get_token()))
             else:
                 self.ui.warn(_('%s:%d: unknown directive %r\n') %
                              (lex.infile, lex.lineno, cmd))
@@ -76,6 +84,7 @@
         return errs
 
     def lookup(self, name, mapping):
+        name = normalize(name)
         for pre, suf in rpairs(name):
             try:
                 return mapping[pre], pre, suf
--- a/hgext/convert/gnuarch.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/gnuarch.py	Fri Oct 19 01:34:50 2012 -0500
@@ -89,7 +89,7 @@
 
             # Get the complete list of revisions for that tree version
             output, status = self.runlines('revisions', '-r', '-f', treeversion)
-            self.checkexit(status, 'failed retrieveing revisions for %s'
+            self.checkexit(status, 'failed retrieving revisions for %s'
                            % treeversion)
 
             # No new iteration unless a revision has a continuation-of header
--- a/hgext/convert/monotone.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/monotone.py	Fri Oct 19 01:34:50 2012 -0500
@@ -225,7 +225,6 @@
             return [self.rev]
 
     def getchanges(self, rev):
-        #revision = self.mtncmd("get_revision %s" % rev).split("\n\n")
         revision = self.mtnrun("get_revision", rev).split("\n\n")
         files = {}
         ignoremove = {}
--- a/hgext/convert/subversion.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/subversion.py	Fri Oct 19 01:34:50 2012 -0500
@@ -887,8 +887,8 @@
             io = StringIO()
             info = svn.ra.get_file(self.ra, file, revnum, io)
             data = io.getvalue()
-            # ra.get_files() seems to keep a reference on the input buffer
-            # preventing collection. Release it explicitely.
+            # ra.get_file() seems to keep a reference on the input buffer
+            # preventing collection. Release it explicitly.
             io.close()
             if isinstance(info, list):
                 info = info[-1]
@@ -923,7 +923,7 @@
         # Given the repository url of this wc, say
         #   "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
         # extract the "entry" portion (a relative path) from what
-        # svn log --xml says, ie
+        # svn log --xml says, i.e.
         #   "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
         # that is to say "tests/PloneTestCase.py"
         if path.startswith(module):
--- a/hgext/convert/transport.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/convert/transport.py	Fri Oct 19 01:34:50 2012 -0500
@@ -43,7 +43,7 @@
         svn.client.get_ssl_client_cert_pw_file_provider(pool),
         svn.client.get_ssl_server_trust_file_provider(pool),
         ]
-    # Platform-dependant authentication methods
+    # Platform-dependent authentication methods
     getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
                           None)
     if getprovider:
--- a/hgext/eol.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/eol.py	Fri Oct 19 01:34:50 2012 -0500
@@ -99,7 +99,7 @@
 # Matches a lone LF, i.e., one that is not part of CRLF.
 singlelf = re.compile('(^|[^\r])\n')
 # Matches a single EOL which can either be a CRLF where repeated CR
-# are removed or a LF. We do not care about old Machintosh files, so a
+# are removed or a LF. We do not care about old Macintosh files, so a
 # stray CR is an error.
 eolre = re.compile('\r*\n')
 
--- a/hgext/extdiff.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/extdiff.py	Fri Oct 19 01:34:50 2012 -0500
@@ -109,7 +109,7 @@
     return dirname, fns_and_mtime
 
 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
-    '''Do the actuall diff:
+    '''Do the actual diff:
 
     - copy to a temp structure if diffing 2 internal revisions
     - copy to a temp structure if diffing working revision with
@@ -329,3 +329,5 @@
         cmdtable[cmd] = (save(cmd, path, diffopts),
                          cmdtable['extdiff'][1][1:],
                          _('hg %s [OPTION]... [FILE]...') % cmd)
+
+commands.inferrepo += " extdiff"
--- a/hgext/graphlog.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/graphlog.py	Fri Oct 19 01:34:50 2012 -0500
@@ -52,3 +52,5 @@
     directory.
     """
     return cmdutil.graphlog(ui, repo, *pats, **opts)
+
+commands.inferrepo += " glog"
--- a/hgext/hgcia.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/hgcia.py	Fri Oct 19 01:34:50 2012 -0500
@@ -22,7 +22,7 @@
   # Style to use (optional)
   #style = foo
   # The URL of the CIA notification service (optional)
-  # You can use mailto: URLs to send by email, eg
+  # You can use mailto: URLs to send by email, e.g.
   # mailto:cia@cia.vc
   # Make sure to set email.from if you do this.
   #url = http://cia.vc/
--- a/hgext/hgk.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/hgk.py	Fri Oct 19 01:34:50 2012 -0500
@@ -350,3 +350,5 @@
           ('n', 'max-count', 0, _('max-count'))],
          _('hg debug-rev-list [OPTION]... REV...')),
 }
+
+commands.inferrepo += " debug-diff-tree debug-cat-file"
--- a/hgext/histedit.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/histedit.py	Fri Oct 19 01:34:50 2012 -0500
@@ -142,20 +142,22 @@
     import cPickle as pickle
 except ImportError:
     import pickle
-import tempfile
 import os
 
 from mercurial import bookmarks
 from mercurial import cmdutil
 from mercurial import discovery
 from mercurial import error
+from mercurial import copies
+from mercurial import context
 from mercurial import hg
 from mercurial import lock as lockmod
 from mercurial import node
-from mercurial import patch
 from mercurial import repair
 from mercurial import scmutil
 from mercurial import util
+from mercurial import obsolete
+from mercurial import merge as mergemod
 from mercurial.i18n import _
 
 cmdtable = {}
@@ -175,82 +177,123 @@
 #
 """)
 
-def between(repo, old, new, keep):
-    revs = [old]
-    current = old
-    while current != new:
-        ctx = repo[current]
-        if not keep and len(ctx.children()) > 1:
-            raise util.Abort(_('cannot edit history that would orphan nodes'))
-        if len(ctx.parents()) != 1 and ctx.parents()[1] != node.nullid:
-            raise util.Abort(_("can't edit history with merges"))
-        if not ctx.children():
-            current = new
+def applychanges(ui, repo, ctx, opts):
+    """Merge changeset from ctx (only) in the current working directory"""
+    wcpar = repo.dirstate.parents()[0]
+    if ctx.p1().node() == wcpar:
+        # edition ar "in place" we do not need to make any merge,
+        # just applies changes on parent for edition
+        cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
+        stats = None
+    else:
+        try:
+            # ui.forcemerge is an internal variable, do not document
+            repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
+            stats = mergemod.update(repo, ctx.node(), True, True, False,
+                                    ctx.p1().node())
+        finally:
+            repo.ui.setconfig('ui', 'forcemerge', '')
+        repo.setparents(wcpar, node.nullid)
+        repo.dirstate.write()
+        # fix up dirstate for copies and renames
+    cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
+    return stats
+
+def collapse(repo, first, last, commitopts):
+    """collapse the set of revisions from first to last as new one.
+
+    Expected commit options are:
+        - message
+        - date
+        - username
+    Commit message is edited in all cases.
+
+    This function works in memory."""
+    ctxs = list(repo.set('%d::%d', first, last))
+    if not ctxs:
+        return None
+    base = first.parents()[0]
+
+    # commit a new version of the old changeset, including the update
+    # collect all files which might be affected
+    files = set()
+    for ctx in ctxs:
+        files.update(ctx.files())
+
+    # Recompute copies (avoid recording a -> b -> a)
+    copied = copies.pathcopies(first, last)
+
+    # prune files which were reverted by the updates
+    def samefile(f):
+        if f in last.manifest():
+            a = last.filectx(f)
+            if f in base.manifest():
+                b = base.filectx(f)
+                return (a.data() == b.data()
+                        and a.flags() == b.flags())
+            else:
+                return False
         else:
-            current = ctx.children()[0].node()
-            revs.append(current)
-    if len(repo[current].children()) and not keep:
-        raise util.Abort(_('cannot edit history that would orphan nodes'))
-    return revs
+            return f not in base.manifest()
+    files = [f for f in files if not samefile(f)]
+    # commit version of these files as defined by head
+    headmf = last.manifest()
+    def filectxfn(repo, ctx, path):
+        if path in headmf:
+            fctx = last[path]
+            flags = fctx.flags()
+            mctx = context.memfilectx(fctx.path(), fctx.data(),
+                                      islink='l' in flags,
+                                      isexec='x' in flags,
+                                      copied=copied.get(path))
+            return mctx
+        raise IOError()
 
+    if commitopts.get('message'):
+        message = commitopts['message']
+    else:
+        message = first.description()
+    user = commitopts.get('user')
+    date = commitopts.get('date')
+    extra = first.extra()
+
+    parents = (first.p1().node(), first.p2().node())
+    new = context.memctx(repo,
+                         parents=parents,
+                         text=message,
+                         files=files,
+                         filectxfn=filectxfn,
+                         user=user,
+                         date=date,
+                         extra=extra)
+    new._text = cmdutil.commitforceeditor(repo, new, [])
+    return repo.commitctx(new)
 
 def pick(ui, repo, ctx, ha, opts):
     oldctx = repo[ha]
     if oldctx.parents()[0] == ctx:
         ui.debug('node %s unchanged\n' % ha)
-        return oldctx, [], [], []
+        return oldctx, []
     hg.update(repo, ctx.node())
-    fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-')
-    fp = os.fdopen(fd, 'w')
-    diffopts = patch.diffopts(ui, opts)
-    diffopts.git = True
-    diffopts.ignorews = False
-    diffopts.ignorewsamount = False
-    diffopts.ignoreblanklines = False
-    gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts)
-    for chunk in gen:
-        fp.write(chunk)
-    fp.close()
-    try:
-        files = set()
-        try:
-            patch.patch(ui, repo, patchfile, files=files, eolmode=None)
-            if not files:
-                ui.warn(_('%s: empty changeset')
-                             % node.hex(ha))
-                return ctx, [], [], []
-        finally:
-            os.unlink(patchfile)
-    except Exception:
+    stats = applychanges(ui, repo, oldctx, opts)
+    if stats and stats[3] > 0:
         raise util.Abort(_('Fix up the change and run '
                            'hg histedit --continue'))
+    # drop the second merge parent
     n = repo.commit(text=oldctx.description(), user=oldctx.user(),
                     date=oldctx.date(), extra=oldctx.extra())
-    return repo[n], [n], [oldctx.node()], []
+    if n is None:
+        ui.warn(_('%s: empty changeset\n')
+                     % node.hex(ha))
+        return ctx, []
+    new = repo[n]
+    return new, [(oldctx.node(), (n,))]
 
 
 def edit(ui, repo, ctx, ha, opts):
     oldctx = repo[ha]
     hg.update(repo, ctx.node())
-    fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-')
-    fp = os.fdopen(fd, 'w')
-    diffopts = patch.diffopts(ui, opts)
-    diffopts.git = True
-    diffopts.ignorews = False
-    diffopts.ignorewsamount = False
-    diffopts.ignoreblanklines = False
-    gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts)
-    for chunk in gen:
-        fp.write(chunk)
-    fp.close()
-    try:
-        files = set()
-        try:
-            patch.patch(ui, repo, patchfile, files=files, eolmode=None)
-        finally:
-            os.unlink(patchfile)
-    except Exception:
-        pass
+    applychanges(ui, repo, oldctx, opts)
     raise util.Abort(_('Make changes as needed, you may commit or record as '
                        'needed now.\nWhen you are finished, run hg'
                        ' histedit --continue to resume.'))
@@ -258,92 +301,58 @@
 def fold(ui, repo, ctx, ha, opts):
     oldctx = repo[ha]
     hg.update(repo, ctx.node())
-    fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-')
-    fp = os.fdopen(fd, 'w')
-    diffopts = patch.diffopts(ui, opts)
-    diffopts.git = True
-    diffopts.ignorews = False
-    diffopts.ignorewsamount = False
-    diffopts.ignoreblanklines = False
-    gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts)
-    for chunk in gen:
-        fp.write(chunk)
-    fp.close()
-    try:
-        files = set()
-        try:
-            patch.patch(ui, repo, patchfile, files=files, eolmode=None)
-            if not files:
-                ui.warn(_('%s: empty changeset')
-                             % node.hex(ha))
-                return ctx, [], [], []
-        finally:
-            os.unlink(patchfile)
-    except Exception:
+    stats = applychanges(ui, repo, oldctx, opts)
+    if stats and stats[3] > 0:
         raise util.Abort(_('Fix up the change and run '
                            'hg histedit --continue'))
     n = repo.commit(text='fold-temp-revision %s' % ha, user=oldctx.user(),
                     date=oldctx.date(), extra=oldctx.extra())
+    if n is None:
+        ui.warn(_('%s: empty changeset')
+                     % node.hex(ha))
+        return ctx, []
     return finishfold(ui, repo, ctx, oldctx, n, opts, [])
 
 def finishfold(ui, repo, ctx, oldctx, newnode, opts, internalchanges):
     parent = ctx.parents()[0].node()
     hg.update(repo, parent)
-    fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-')
-    fp = os.fdopen(fd, 'w')
-    diffopts = patch.diffopts(ui, opts)
-    diffopts.git = True
-    diffopts.ignorews = False
-    diffopts.ignorewsamount = False
-    diffopts.ignoreblanklines = False
-    gen = patch.diff(repo, parent, newnode, opts=diffopts)
-    for chunk in gen:
-        fp.write(chunk)
-    fp.close()
-    files = set()
-    try:
-        patch.patch(ui, repo, patchfile, files=files, eolmode=None)
-    finally:
-        os.unlink(patchfile)
+    ### prepare new commit data
+    commitopts = opts.copy()
+    # username
+    if ctx.user() == oldctx.user():
+        username = ctx.user()
+    else:
+        username = ui.username()
+    commitopts['user'] = username
+    # commit message
     newmessage = '\n***\n'.join(
         [ctx.description()] +
         [repo[r].description() for r in internalchanges] +
         [oldctx.description()]) + '\n'
-    # If the changesets are from the same author, keep it.
-    if ctx.user() == oldctx.user():
-        username = ctx.user()
-    else:
-        username = ui.username()
-    newmessage = ui.edit(newmessage, username)
-    n = repo.commit(text=newmessage, user=username,
-                    date=max(ctx.date(), oldctx.date()), extra=oldctx.extra())
-    return repo[n], [n], [oldctx.node(), ctx.node()], [newnode]
+    commitopts['message'] = newmessage
+    # date
+    commitopts['date'] = max(ctx.date(), oldctx.date())
+    n = collapse(repo, ctx, repo[newnode], commitopts)
+    if n is None:
+        return ctx, []
+    hg.update(repo, n)
+    replacements = [(oldctx.node(), (newnode,)),
+                     (ctx.node(), (n,)),
+                     (newnode, (n,)),
+                    ]
+    for ich in internalchanges:
+        replacements.append((ich, (n,)))
+    return repo[n], replacements
 
 def drop(ui, repo, ctx, ha, opts):
-    return ctx, [], [repo[ha].node()], []
+    return ctx, [(repo[ha].node(), ())]
 
 
 def message(ui, repo, ctx, ha, opts):
     oldctx = repo[ha]
     hg.update(repo, ctx.node())
-    fd, patchfile = tempfile.mkstemp(prefix='hg-histedit-')
-    fp = os.fdopen(fd, 'w')
-    diffopts = patch.diffopts(ui, opts)
-    diffopts.git = True
-    diffopts.ignorews = False
-    diffopts.ignorewsamount = False
-    diffopts.ignoreblanklines = False
-    gen = patch.diff(repo, oldctx.parents()[0].node(), ha, opts=diffopts)
-    for chunk in gen:
-        fp.write(chunk)
-    fp.close()
-    try:
-        files = set()
-        try:
-            patch.patch(ui, repo, patchfile, files=files, eolmode=None)
-        finally:
-            os.unlink(patchfile)
-    except Exception:
+    stats = applychanges(ui, repo, oldctx, opts)
+    if stats and stats[3] > 0:
         raise util.Abort(_('Fix up the change and run '
                            'hg histedit --continue'))
     message = oldctx.description() + '\n'
@@ -352,17 +361,9 @@
                       extra=oldctx.extra())
     newctx = repo[new]
     if oldctx.node() != newctx.node():
-        return newctx, [new], [oldctx.node()], []
+        return newctx, [(oldctx.node(), (new,))]
     # We didn't make an edit, so just indicate no replaced nodes
-    return newctx, [new], [], []
-
-
-def makedesc(c):
-    summary = ''
-    if c.description():
-        summary = c.description().splitlines()[0]
-    line = 'pick %s %d %s' % (c.hex()[:12], c.rev(), summary)
-    return line[:80]  # trim to 80 chars so it's not stupidly wide in my editor
+    return newctx, []
 
 actiontable = {'p': pick,
                'pick': pick,
@@ -424,86 +425,20 @@
     if opts.get('continue', False):
         if len(parent) != 0:
             raise util.Abort(_('no arguments allowed with --continue'))
-        (parentctxnode, created, replaced,
-         tmpnodes, existing, rules, keep, tip, replacemap) = readstate(repo)
+        (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
         currentparent, wantnull = repo.dirstate.parents()
         parentctx = repo[parentctxnode]
-        # existing is the list of revisions initially considered by
-        # histedit. Here we use it to list new changesets, descendants
-        # of parentctx without an 'existing' changeset in-between. We
-        # also have to exclude 'existing' changesets which were
-        # previously dropped.
-        descendants = set(c.node() for c in
-                repo.set('(%n::) - %n', parentctxnode, parentctxnode))
-        existing = set(existing)
-        notdropped = set(n for n in existing if n in descendants and
-                (n not in replacemap or replacemap[n] in descendants))
-        # Discover any nodes the user has added in the interim. We can
-        # miss changesets which were dropped and recreated the same.
-        newchildren = list(c.node() for c in repo.set(
-            'sort(%ln - (%ln or %ln::))', descendants, existing, notdropped))
-        action, currentnode = rules.pop(0)
-        if action in ('f', 'fold'):
-            tmpnodes.extend(newchildren)
-        else:
-            created.extend(newchildren)
-
-        m, a, r, d = repo.status()[:4]
-        oldctx = repo[currentnode]
-        message = oldctx.description() + '\n'
-        if action in ('e', 'edit', 'm', 'mess'):
-            message = ui.edit(message, ui.username())
-        elif action in ('f', 'fold'):
-            message = 'fold-temp-revision %s' % currentnode
-        new = None
-        if m or a or r or d:
-            new = repo.commit(text=message, user=oldctx.user(),
-                              date=oldctx.date(), extra=oldctx.extra())
-
-        # If we're resuming a fold and we have new changes, mark the
-        # replacements and finish the fold. If not, it's more like a
-        # drop of the changesets that disappeared, and we can skip
-        # this step.
-        if action in ('f', 'fold') and (new or newchildren):
-            if new:
-                tmpnodes.append(new)
-            else:
-                new = newchildren[-1]
-            (parentctx, created_, replaced_, tmpnodes_) = finishfold(
-                ui, repo, parentctx, oldctx, new, opts, newchildren)
-            replaced.extend(replaced_)
-            created.extend(created_)
-            tmpnodes.extend(tmpnodes_)
-        elif action not in ('d', 'drop'):
-            if new != oldctx.node():
-                replaced.append(oldctx.node())
-            if new:
-                if new != oldctx.node():
-                    created.append(new)
-                parentctx = repo[new]
-
+        parentctx, repl = bootstrapcontinue(ui, repo, parentctx, rules, opts)
+        replacements.extend(repl)
     elif opts.get('abort', False):
         if len(parent) != 0:
             raise util.Abort(_('no arguments allowed with --abort'))
-        (parentctxnode, created, replaced, tmpnodes,
-         existing, rules, keep, tip, replacemap) = readstate(repo)
-        ui.debug('restore wc to old tip %s\n' % node.hex(tip))
-        hg.clean(repo, tip)
-        ui.debug('should strip created nodes %s\n' %
-                 ', '.join([node.hex(n)[:12] for n in created]))
-        ui.debug('should strip temp nodes %s\n' %
-                 ', '.join([node.hex(n)[:12] for n in tmpnodes]))
-        for nodes in (created, tmpnodes):
-            lock = None
-            try:
-                lock = repo.lock()
-                for n in reversed(nodes):
-                    try:
-                        repair.strip(ui, repo, n)
-                    except error.LookupError:
-                        pass
-            finally:
-                lockmod.release(lock)
+        (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
+        mapping, tmpnodes, leafs, _ntm = processreplacement(repo, replacements)
+        ui.debug('restore wc to old parent %s\n' % node.short(topmost))
+        hg.clean(repo, topmost)
+        cleanupnode(ui, repo, 'created', tmpnodes)
+        cleanupnode(ui, repo, 'temp', leafs)
         os.unlink(os.path.join(repo.path, 'histedit-state'))
         return
     else:
@@ -512,23 +447,24 @@
             raise util.Abort(_('history edit already in progress, try '
                                '--continue or --abort'))
 
-        tip, empty = repo.dirstate.parents()
-
+        topmost, empty = repo.dirstate.parents()
 
         if len(parent) != 1:
             raise util.Abort(_('histedit requires exactly one parent revision'))
         parent = scmutil.revsingle(repo, parent[0]).node()
 
         keep = opts.get('keep', False)
-        revs = between(repo, parent, tip, keep)
+        revs = between(repo, parent, topmost, keep)
+        if not revs:
+            ui.warn(_('nothing to edit\n'))
+            return 1
 
         ctxs = [repo[r] for r in revs]
-        existing = [r.node() for r in ctxs]
         rules = opts.get('commands', '')
         if not rules:
             rules = '\n'.join([makedesc(c) for c in ctxs])
             rules += '\n\n'
-            rules += editcomment % (node.hex(parent)[:12], node.hex(tip)[:12])
+            rules += editcomment % (node.short(parent), node.short(topmost))
             rules = ui.edit(rules, ui.username())
             # Save edit rules in .hg/histedit-last-edit.txt in case
             # the user needs to ask for help after something
@@ -546,147 +482,151 @@
 
         parentctx = repo[parent].parents()[0]
         keep = opts.get('keep', False)
-        replaced = []
-        replacemap = {}
-        tmpnodes = []
-        created = []
+        replacements = []
 
 
     while rules:
-        writestate(repo, parentctx.node(), created, replaced,
-                   tmpnodes, existing, rules, keep, tip, replacemap)
+        writestate(repo, parentctx.node(), rules, keep, topmost, replacements)
         action, ha = rules.pop(0)
-        (parentctx, created_, replaced_, tmpnodes_) = actiontable[action](
-            ui, repo, parentctx, ha, opts)
-
-        if replaced_:
-            clen, rlen = len(created_), len(replaced_)
-            if clen == rlen == 1:
-                ui.debug('histedit: exact replacement of %s with %s\n' % (
-                    node.short(replaced_[0]), node.short(created_[0])))
-
-                replacemap[replaced_[0]] = created_[0]
-            elif clen > rlen:
-                assert rlen == 1, ('unexpected replacement of '
-                                   '%d changes with %d changes' % (rlen, clen))
-                # made more changesets than we're replacing
-                # TODO synthesize patch names for created patches
-                replacemap[replaced_[0]] = created_[-1]
-                ui.debug('histedit: created many, assuming %s replaced by %s' %
-                         (node.short(replaced_[0]), node.short(created_[-1])))
-            elif rlen > clen:
-                if not created_:
-                    # This must be a drop. Try and put our metadata on
-                    # the parent change.
-                    assert rlen == 1
-                    r = replaced_[0]
-                    ui.debug('histedit: %s seems replaced with nothing, '
-                            'finding a parent\n' % (node.short(r)))
-                    pctx = repo[r].parents()[0]
-                    if pctx.node() in replacemap:
-                        ui.debug('histedit: parent is already replaced\n')
-                        replacemap[r] = replacemap[pctx.node()]
-                    else:
-                        replacemap[r] = pctx.node()
-                    ui.debug('histedit: %s best replaced by %s\n' % (
-                        node.short(r), node.short(replacemap[r])))
-                else:
-                    assert len(created_) == 1
-                    for r in replaced_:
-                        ui.debug('histedit: %s replaced by %s\n' % (
-                            node.short(r), node.short(created_[0])))
-                        replacemap[r] = created_[0]
-            else:
-                assert False, (
-                    'Unhandled case in replacement mapping! '
-                    'replacing %d changes with %d changes' % (rlen, clen))
-        created.extend(created_)
-        replaced.extend(replaced_)
-        tmpnodes.extend(tmpnodes_)
+        ui.debug('histedit: processing %s %s\n' % (action, ha))
+        actfunc = actiontable[action]
+        parentctx, replacement_ = actfunc(ui, repo, parentctx, ha, opts)
+        replacements.extend(replacement_)
 
     hg.update(repo, parentctx.node())
 
-    if not keep:
-        if replacemap:
-            ui.note(_('histedit: Should update metadata for the following '
-                      'changes:\n'))
-
-            def copybms(old, new):
-                if old in tmpnodes or old in created:
-                    # can't have any metadata we'd want to update
-                    return
-                while new in replacemap:
-                    new = replacemap[new]
-                ui.note(_('histedit:  %s to %s\n') % (node.short(old),
-                                                      node.short(new)))
-                octx = repo[old]
-                marks = octx.bookmarks()
-                if marks:
-                    ui.note(_('histedit:     moving bookmarks %s\n') %
-                              ', '.join(marks))
-                    for mark in marks:
-                        repo._bookmarks[mark] = new
-                    bookmarks.write(repo)
+    mapping, tmpnodes, created, ntm = processreplacement(repo, replacements)
+    if mapping:
+        for prec, succs in mapping.iteritems():
+            if not succs:
+                ui.debug('histedit: %s is dropped\n' % node.short(prec))
+            else:
+                ui.debug('histedit: %s is replaced by %s\n' % (
+                    node.short(prec), node.short(succs[0])))
+                if len(succs) > 1:
+                    m = 'histedit:                            %s'
+                    for n in succs[1:]:
+                        ui.debug(m % node.short(n))
 
-            # We assume that bookmarks on the tip should remain
-            # tipmost, but bookmarks on non-tip changesets should go
-            # to their most reasonable successor. As a result, find
-            # the old tip and new tip and copy those bookmarks first,
-            # then do the rest of the bookmark copies.
-            oldtip = sorted(replacemap.keys(), key=repo.changelog.rev)[-1]
-            newtip = sorted(replacemap.values(), key=repo.changelog.rev)[-1]
-            copybms(oldtip, newtip)
-
-            for old, new in sorted(replacemap.iteritems()):
-                copybms(old, new)
-                # TODO update mq state
+    if not keep:
+        if mapping:
+            movebookmarks(ui, repo, mapping, topmost, ntm)
+            # TODO update mq state
+        if obsolete._enabled:
+            markers = []
+            # sort by revision number because it sound "right"
+            for prec in sorted(mapping, key=repo.changelog.rev):
+                succs = mapping[prec]
+                markers.append((repo[prec],
+                                tuple(repo[s] for s in succs)))
+            if markers:
+                obsolete.createmarkers(repo, markers)
+        else:
+            cleanupnode(ui, repo, 'replaced', mapping)
 
-        ui.debug('should strip replaced nodes %s\n' %
-                 ', '.join([node.hex(n)[:12] for n in replaced]))
-        lock = None
-        try:
-            lock = repo.lock()
-            for n in sorted(replaced, key=lambda x: repo[x].rev()):
-                try:
-                    repair.strip(ui, repo, n)
-                except error.LookupError:
-                    pass
-        finally:
-            lockmod.release(lock)
-
-    ui.debug('should strip temp nodes %s\n' %
-             ', '.join([node.hex(n)[:12] for n in tmpnodes]))
-    lock = None
-    try:
-        lock = repo.lock()
-        for n in reversed(tmpnodes):
-            try:
-                repair.strip(ui, repo, n)
-            except error.LookupError:
-                pass
-    finally:
-        lockmod.release(lock)
+    cleanupnode(ui, repo, 'temp', tmpnodes)
     os.unlink(os.path.join(repo.path, 'histedit-state'))
     if os.path.exists(repo.sjoin('undo')):
         os.unlink(repo.sjoin('undo'))
 
 
-def writestate(repo, parentctxnode, created, replaced,
-               tmpnodes, existing, rules, keep, oldtip, replacemap):
+def bootstrapcontinue(ui, repo, parentctx, rules, opts):
+    action, currentnode = rules.pop(0)
+    ctx = repo[currentnode]
+    # is there any new commit between the expected parent and "."
+    #
+    # note: does not take non linear new change in account (but previous
+    #       implementation didn't used them anyway (issue3655)
+    newchildren = [c.node() for c in repo.set('(%d::.)', parentctx)]
+    if not newchildren:
+        # `parentctxnode` should match but no result. This means that
+        # currentnode is not a descendant from parentctxnode.
+        msg = _('working directory parent is not a descendant of %s')
+        hint = _('update to %s or descendant and run "hg histedit '
+                 '--continue" again') % parentctx
+        raise util.Abort(msg % parentctx, hint=hint)
+    newchildren.pop(0)  # remove parentctxnode
+    # Commit dirty working directory if necessary
+    new = None
+    m, a, r, d = repo.status()[:4]
+    if m or a or r or d:
+        # prepare the message for the commit to comes
+        if action in ('f', 'fold'):
+            message = 'fold-temp-revision %s' % currentnode
+        else:
+            message = ctx.description() + '\n'
+        if action in ('e', 'edit', 'm', 'mess'):
+            editor = cmdutil.commitforceeditor
+        else:
+            editor = False
+        new = repo.commit(text=message, user=ctx.user(),
+                          date=ctx.date(), extra=ctx.extra(),
+                          editor=editor)
+        if new is not None:
+            newchildren.append(new)
+
+    replacements = []
+    # track replacements
+    if ctx.node() not in newchildren:
+        # note: new children may be empty when the changeset is dropped.
+        # this happen e.g during conflicting pick where we revert content
+        # to parent.
+        replacements.append((ctx.node(), tuple(newchildren)))
+
+    if action in ('f', 'fold'):
+        # finalize fold operation if applicable
+        if new is None:
+            new = newchildren[-1]
+        else:
+            newchildren.pop()  # remove new from internal changes
+        parentctx, repl = finishfold(ui, repo, parentctx, ctx, new, opts,
+                                     newchildren)
+        replacements.extend(repl)
+    elif newchildren:
+        # otherwize update "parentctx" before proceding to further operation
+        parentctx = repo[newchildren[-1]]
+    return parentctx, replacements
+
+
+def between(repo, old, new, keep):
+    """select and validate the set of revision to edit
+
+    When keep is false, the specified set can't have children."""
+    ctxs = list(repo.set('%n::%n', old, new))
+    if ctxs and not keep:
+        if repo.revs('(%ld::) - (%ld + hidden())', ctxs, ctxs):
+            raise util.Abort(_('cannot edit history that would orphan nodes'))
+        root = ctxs[0] # list is already sorted by repo.set
+        if not root.phase():
+            raise util.Abort(_('cannot edit immutable changeset: %s') % root)
+    return [c.node() for c in ctxs]
+
+
+def writestate(repo, parentnode, rules, keep, topmost, replacements):
     fp = open(os.path.join(repo.path, 'histedit-state'), 'w')
-    pickle.dump((parentctxnode, created, replaced,
-                 tmpnodes, existing, rules, keep, oldtip, replacemap),
-                fp)
+    pickle.dump((parentnode, rules, keep, topmost, replacements), fp)
     fp.close()
 
 def readstate(repo):
-    """Returns a tuple of (parentnode, created, replaced, tmp, existing, rules,
-                           keep, oldtip, replacemap ).
+    """Returns a tuple of (parentnode, rules, keep, topmost, replacements).
     """
     fp = open(os.path.join(repo.path, 'histedit-state'))
     return pickle.load(fp)
 
 
+def makedesc(c):
+    """build a initial action line for a ctx `c`
+
+    line are in the form:
+
+      pick <hash> <rev> <summary>
+    """
+    summary = ''
+    if c.description():
+        summary = c.description().splitlines()[0]
+    line = 'pick %s %d %s' % (c, c.rev(), summary)
+    return line[:80]  # trim to 80 chars so it's not stupidly wide in my editor
+
 def verifyrules(rules, repo, ctxs):
     """Verify that there exists exactly one edit rule per given changeset.
 
@@ -714,3 +654,117 @@
             raise util.Abort(_('unknown action "%s"') % action)
         parsed.append([action, ha])
     return parsed
+
+def processreplacement(repo, replacements):
+    """process the list of replacements to return
+
+    1) the final mapping between original and created nodes
+    2) the list of temporary node created by histedit
+    3) the list of new commit created by histedit"""
+    allsuccs = set()
+    replaced = set()
+    fullmapping = {}
+    # initialise basic set
+    # fullmapping record all operation recorded in replacement
+    for rep in replacements:
+        allsuccs.update(rep[1])
+        replaced.add(rep[0])
+        fullmapping.setdefault(rep[0], set()).update(rep[1])
+    new = allsuccs - replaced
+    tmpnodes = allsuccs & replaced
+    # Reduce content fullmapping  into direct relation between original nodes
+    # and final node created during history edition
+    # Dropped changeset are replaced by an empty list
+    toproceed = set(fullmapping)
+    final = {}
+    while toproceed:
+        for x in list(toproceed):
+            succs = fullmapping[x]
+            for s in list(succs):
+                if s in toproceed:
+                    # non final node with unknown closure
+                    # We can't process this now
+                    break
+                elif s in final:
+                    # non final node, replace with closure
+                    succs.remove(s)
+                    succs.update(final[s])
+            else:
+                final[x] = succs
+                toproceed.remove(x)
+    # remove tmpnodes from final mapping
+    for n in tmpnodes:
+        del final[n]
+    # we expect all changes involved in final to exist in the repo
+    # turn `final` into list (topologically sorted)
+    nm = repo.changelog.nodemap
+    for prec, succs in final.items():
+        final[prec] = sorted(succs, key=nm.get)
+
+    # computed topmost element (necessary for bookmark)
+    if new:
+        newtopmost = sorted(new, key=repo.changelog.rev)[-1]
+    elif not final:
+        # Nothing rewritten at all. we won't need `newtopmost`
+        # It is the same as `oldtopmost` and `processreplacement` know it
+        newtopmost = None
+    else:
+        # every body died. The newtopmost is the parent of the root.
+        newtopmost = repo[sorted(final, key=repo.changelog.rev)[0]].p1().node()
+
+    return final, tmpnodes, new, newtopmost
+
+def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
+    """Move bookmark from old to newly created node"""
+    if not mapping:
+        # if nothing got rewritten there is not purpose for this function
+        return
+    moves = []
+    for bk, old in repo._bookmarks.iteritems():
+        if old == oldtopmost:
+            # special case ensure bookmark stay on tip. 
+            #
+            # This is arguably a feature and we may only want that for the
+            # active bookmark. But the behavior is kept compatible with the old
+            # version for now.
+            moves.append((bk, newtopmost))
+            continue
+        base = old
+        new = mapping.get(base, None)
+        if new is None:
+            continue
+        while not new:
+            # base is killed, trying with parent
+            base = repo[base].p1().node()
+            new = mapping.get(base, (base,))
+            # nothing to move
+        moves.append((bk, new[-1]))
+    if moves:
+        for mark, new in moves:
+            old = repo._bookmarks[mark]
+            ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
+                    % (mark, node.short(old), node.short(new)))
+            repo._bookmarks[mark] = new
+        bookmarks.write(repo)
+
+def cleanupnode(ui, repo, name, nodes):
+    """strip a group of nodes from the repository
+
+    The set of node to strip may contains unknown nodes."""
+    ui.debug('should strip %s nodes %s\n' %
+             (name, ', '.join([node.short(n) for n in nodes])))
+    lock = None
+    try:
+        lock = repo.lock()
+        # Find all node that need to be stripped
+        # (we hg %lr instead of %ln to silently ignore unknown item
+        nm = repo.changelog.nodemap
+        nodes = [n for n in nodes if n in nm]
+        roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
+        for c in roots:
+            # We should process node in reverse order to strip tip most first.
+            # but this trigger a bug in changegroup hook.
+            # This would reduce bundle overhead
+            repair.strip(ui, repo, c)
+    finally:
+        lockmod.release(lock)
--- a/hgext/inotify/linux/_inotify.c	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/inotify/linux/_inotify.c	Fri Oct 19 01:34:50 2012 -0500
@@ -61,7 +61,7 @@
 	init_doc,
 	"init() -> fd\n"
 	"\n"
-	"Initialise an inotify instance.\n"
+	"Initialize an inotify instance.\n"
 	"Return a file descriptor associated with a new inotify event queue.");
 
 static PyObject *add_watch(PyObject *self, PyObject *args)
--- a/hgext/inotify/linux/watcher.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/inotify/linux/watcher.py	Fri Oct 19 01:34:50 2012 -0500
@@ -282,7 +282,7 @@
         callable that takes one parameter.  It will be called each time
         a directory is about to be automatically watched.  If it returns
         True, the directory will be watched if it still exists,
-        otherwise, it will beb skipped.'''
+        otherwise, it will be skipped.'''
 
         super(autowatcher, self).__init__()
         self.addfilter = addfilter
--- a/hgext/keyword.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/keyword.py	Fri Oct 19 01:34:50 2012 -0500
@@ -7,7 +7,7 @@
 #
 # $Id$
 #
-# Keyword expansion hack against the grain of a DSCM
+# Keyword expansion hack against the grain of a Distributed SCM
 #
 # There are many good reasons why this is not needed in a distributed
 # SCM, still it may be useful in very small projects based on single
@@ -89,6 +89,7 @@
 import os, re, shutil, tempfile
 
 commands.optionalrepo += ' kwdemo'
+commands.inferrepo += ' kwexpand kwfiles kwshrink'
 
 cmdtable = {}
 command = cmdutil.command(cmdtable)
@@ -117,7 +118,7 @@
 def utcdate(text):
     ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
     '''
-    return util.datestr((text[0], 0), '%Y/%m/%d %H:%M:%S')
+    return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
 # date like in svn's $Date
 def svnisodate(text):
     ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
@@ -129,7 +130,7 @@
     ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
     11:00:13Z".
     '''
-    return util.datestr((text[0], 0), '%Y-%m-%d %H:%M:%SZ')
+    return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
 
 templatefilters.filters.update({'utcdate': utcdate,
                                 'svnisodate': svnisodate,
@@ -168,7 +169,7 @@
     return subfunc(r'$\1$', text)
 
 def _preselect(wstatus, changed):
-    '''Retrieves modfied and added files from a working directory state
+    '''Retrieves modified and added files from a working directory state
     and returns the subset of each contained in given changed files
     retrieved from a change context.'''
     modified, added = wstatus[:2]
--- a/hgext/largefiles/basestore.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/basestore.py	Fri Oct 19 01:34:50 2012 -0500
@@ -55,7 +55,7 @@
     def get(self, files):
         '''Get the specified largefiles from the store and write to local
         files under repo.root.  files is a list of (filename, hash)
-        tuples.  Return (success, missing), lists of files successfuly
+        tuples.  Return (success, missing), lists of files successfully
         downloaded and those not found in the store.  success is a list
         of (filename, hash) tuples; missing is a list of filenames that
         we could not get.  (The detailed error message will already have
--- a/hgext/largefiles/lfcommands.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/lfcommands.py	Fri Oct 19 01:34:50 2012 -0500
@@ -12,7 +12,7 @@
 import shutil
 
 from mercurial import util, match as match_, hg, node, context, error, \
-    cmdutil, scmutil
+    cmdutil, scmutil, commands
 from mercurial.i18n import _
 from mercurial.lock import release
 
@@ -65,7 +65,7 @@
         dstlock = rdst.lock()
 
         # Get a list of all changesets in the source.  The easy way to do this
-        # is to simply walk the changelog, using changelog.nodesbewteen().
+        # is to simply walk the changelog, using changelog.nodesbetween().
         # Take a look at mercurial/revlog.py:639 for more details.
         # Use a generator instead of a list to decrease memory usage
         ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
@@ -141,7 +141,17 @@
 
             hash = fctx.data().strip()
             path = lfutil.findfile(rsrc, hash)
-            ### TODO: What if the file is not cached?
+
+            # If one file is missing, likely all files from this rev are
+            if path is None:
+                cachelfiles(ui, rsrc, ctx.node())
+                path = lfutil.findfile(rsrc, hash)
+
+                if path is None:
+                    raise util.Abort(
+                        _("missing largefile \'%s\' from revision %s")
+                         % (f, node.hex(ctx.node())))
+
             data = ''
             fd = None
             try:
@@ -177,7 +187,7 @@
         if f not in lfiles and f not in normalfiles:
             islfile = _islfile(f, ctx, matcher, size)
             # If this file was renamed or copied then copy
-            # the lfileness of its predecessor
+            # the largefile-ness of its predecessor
             if f in ctx.manifest():
                 fctx = ctx.filectx(f)
                 renamed = fctx.renamed()
@@ -389,7 +399,7 @@
         # If we are mid-merge, then we have to trust the standin that is in the
         # working copy to have the correct hashvalue.  This is because the
         # original hg.merge() already updated the standin as part of the normal
-        # merge process -- we just have to udpate the largefile to match.
+        # merge process -- we just have to update the largefile to match.
         if (getattr(repo, "_ismerging", False) and
              os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
             expectedhash = lfutil.readstandin(repo, lfile)
@@ -444,11 +454,13 @@
             cachelfiles(ui, repo, '.', lfiles)
 
         updated, removed = 0, 0
-        for i in map(lambda f: _updatelfile(repo, lfdirstate, f), lfiles):
-            # increment the appropriate counter according to _updatelfile's
-            # return value
-            updated += i > 0 and i or 0
-            removed -= i < 0 and i or 0
+        for f in lfiles:
+            i = _updatelfile(repo, lfdirstate, f)
+            if i:
+                if i > 0:
+                    updated += i
+                else:
+                    removed -= i
             if printmessage and (removed or updated) and not printed:
                 ui.status(_('getting changed largefiles\n'))
                 printed = True
@@ -547,3 +559,5 @@
                   ],
                   _('hg lfconvert SOURCE DEST [FILE ...]')),
     }
+
+commands.inferrepo += " lfconvert"
--- a/hgext/largefiles/lfutil.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/lfutil.py	Fri Oct 19 01:34:50 2012 -0500
@@ -52,9 +52,8 @@
 
 def findoutgoing(repo, remote, force):
     from mercurial import discovery
-    common, _anyinc, _heads = discovery.findcommonincoming(repo,
-        remote.peer(), force=force)
-    return repo.changelog.findmissing(common)
+    outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=force)
+    return outgoing.missing
 
 # -- Private worker functions ------------------------------------------
 
@@ -296,8 +295,8 @@
     '''Return the repo-relative path to the standin for the specified big
     file.'''
     # Notes:
-    # 1) Most callers want an absolute path, but _createstandin() needs
-    #    it repo-relative so lfadd() can pass it to repoadd().  So leave
+    # 1) Some callers want an absolute path, but for instance addlargefiles
+    #    needs it repo-relative so it can be passed to repoadd().  So leave
     #    it up to the caller to use repo.wjoin() to get an absolute path.
     # 2) Join with '/' because that's what dirstate always uses, even on
     #    Windows. Change existing separator to '/' first in case we are
--- a/hgext/largefiles/localstore.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/localstore.py	Fri Oct 19 01:34:50 2012 -0500
@@ -18,7 +18,7 @@
 
 class localstore(basestore.basestore):
     '''localstore first attempts to grab files out of the store in the remote
-    Mercurial repository.  Failling that, it attempts to grab the files from
+    Mercurial repository.  Failing that, it attempts to grab the files from
     the user cache.'''
 
     def __init__(self, ui, repo, remote):
--- a/hgext/largefiles/overrides.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/overrides.py	Fri Oct 19 01:34:50 2012 -0500
@@ -158,7 +158,7 @@
             ui.status(_('removing %s\n') % m.rel(f))
 
     # Need to lock because standin files are deleted then removed from the
-    # repository and we could race inbetween.
+    # repository and we could race in-between.
     wlock = repo.wlock()
     try:
         lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -256,7 +256,7 @@
 
 # Override needs to refresh standins so that update's normal merge
 # will go through properly. Then the other update hook (overriding repo.update)
-# will get the new files. Filemerge is also overriden so that the merge
+# will get the new files. Filemerge is also overridden so that the merge
 # will merge standins correctly.
 def overrideupdate(orig, ui, repo, *pats, **opts):
     lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -701,7 +701,7 @@
         result = orig(ui, repo, source, **opts)
         # If we do not have the new largefiles for any new heads we pulled, we
         # will run into a problem later if we try to merge or rebase with one of
-        # these heads, so cache the largefiles now direclty into the system
+        # these heads, so cache the largefiles now directly into the system
         # cache.
         ui.status(_("caching new largefiles\n"))
         numcached = 0
@@ -733,24 +733,29 @@
 def hgclone(orig, ui, opts, *args, **kwargs):
     result = orig(ui, opts, *args, **kwargs)
 
-    if result is not None and opts.get('all_largefiles'):
+    if result is not None:
         sourcerepo, destrepo = result
         repo = destrepo.local()
 
         # The .hglf directory must exist for the standin matcher to match
         # anything (which listlfiles uses for each rev), and .hg/largefiles is
         # assumed to exist by the code that caches the downloaded file.  These
-        # directories exist if clone updated to any rev.
-        if opts.get('noupdate'):
-            util.makedirs(repo.pathto(lfutil.shortname))
-            util.makedirs(repo.join(lfutil.longname))
+        # directories exist if clone updated to any rev.  (If the repo does not
+        # have largefiles, download never gets to the point of needing
+        # .hg/largefiles, and the standin matcher won't match anything anyway.)
+        if 'largefiles' in repo.requirements:
+            if opts.get('noupdate'):
+                util.makedirs(repo.pathto(lfutil.shortname))
+                util.makedirs(repo.join(lfutil.longname))
 
         # Caching is implicitly limited to 'rev' option, since the dest repo was
-        # truncated at that point.
-        success, missing = lfcommands.downloadlfiles(ui, repo, None)
+        # truncated at that point.  The user may expect a download count with
+        # this option, so attempt whether or not this is a largefile repo.
+        if opts.get('all_largefiles'):
+            success, missing = lfcommands.downloadlfiles(ui, repo, None)
 
-        if missing != 0:
-            return None
+            if missing != 0:
+                return None
 
     return result
 
@@ -933,7 +938,7 @@
             ui.status(_('removing %s\n') % m.rel(f))
 
     # Need to lock because standin files are deleted then removed from the
-    # repository and we could race inbetween.
+    # repository and we could race in-between.
     wlock = repo.wlock()
     try:
         lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -963,7 +968,7 @@
         return None
     o = lfutil.findoutgoing(repo, remote, False)
     if not o:
-        return None
+        return o
     o = repo.changelog.nodesbetween(o, revs)[0]
     if opts.get('newest_first'):
         o.reverse()
@@ -997,6 +1002,8 @@
         toupload = getoutgoinglfiles(ui, repo, dest, **opts)
         if toupload is None:
             ui.status(_('largefiles: No remote repo\n'))
+        elif not toupload:
+            ui.status(_('largefiles: no files to upload\n'))
         else:
             ui.status(_('largefiles to upload:\n'))
             for file in toupload:
@@ -1016,6 +1023,8 @@
         toupload = getoutgoinglfiles(ui, repo, None, **opts)
         if toupload is None:
             ui.status(_('largefiles: No remote repo\n'))
+        elif not toupload:
+            ui.status(_('largefiles: (no files to upload)\n'))
         else:
             ui.status(_('largefiles: %d to upload\n') % len(toupload))
 
--- a/hgext/largefiles/remotestore.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/remotestore.py	Fri Oct 19 01:34:50 2012 -0500
@@ -4,7 +4,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-'''remote largefile store; the base class for servestore'''
+'''remote largefile store; the base class for wirestore'''
 
 import urllib2
 
--- a/hgext/largefiles/reposetup.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/largefiles/reposetup.py	Fri Oct 19 01:34:50 2012 -0500
@@ -100,11 +100,11 @@
                 if isinstance(node1, context.changectx):
                     ctx1 = node1
                 else:
-                    ctx1 = repo[node1]
+                    ctx1 = self[node1]
                 if isinstance(node2, context.changectx):
                     ctx2 = node2
                 else:
-                    ctx2 = repo[node2]
+                    ctx2 = self[node2]
                 working = ctx2.rev() is None
                 parentworking = working and ctx1 == self['.']
 
@@ -140,7 +140,7 @@
                     if not working:
                         return files
                     newfiles = []
-                    dirstate = repo.dirstate
+                    dirstate = self.dirstate
                     for f in files:
                         sf = lfutil.standin(f)
                         if sf in dirstate:
@@ -156,7 +156,7 @@
                 # Create a function that we can use to override what is
                 # normally the ignore matcher.  We've already checked
                 # for ignored files on the first dirstate walk, and
-                # unecessarily re-checking here causes a huge performance
+                # unnecessarily re-checking here causes a huge performance
                 # hit because lfdirstate only knows about largefiles
                 def _ignoreoverride(self):
                     return False
@@ -181,7 +181,7 @@
 
                         def sfindirstate(f):
                             sf = lfutil.standin(f)
-                            dirstate = repo.dirstate
+                            dirstate = self.dirstate
                             return sf in dirstate or sf in dirstate.dirs()
                         match._files = [f for f in match._files
                                         if sfindirstate(f)]
@@ -192,7 +192,7 @@
                         (unsure, modified, added, removed, missing, unknown,
                                 ignored, clean) = s
                         # Replace the list of ignored and unknown files with
-                        # the previously caclulated lists, and strip out the
+                        # the previously calculated lists, and strip out the
                         # largefiles
                         lfiles = set(lfdirstate._map)
                         ignored = set(result[5]).difference(lfiles)
@@ -244,13 +244,13 @@
                     # files are not really removed if it's still in
                     # lfdirstate. This happens in merges where files
                     # change type.
-                    removed = [f for f in removed if f not in repo.dirstate]
+                    removed = [f for f in removed if f not in self.dirstate]
                     result[2] = [f for f in result[2] if f not in lfdirstate]
 
                     # Unknown files
                     unknown = set(unknown).difference(ignored)
                     result[4] = [f for f in unknown
-                                 if (repo.dirstate[f] == '?' and
+                                 if (self.dirstate[f] == '?' and
                                      not lfutil.isstandin(f))]
                     # Ignored files were calculated earlier by the dirstate,
                     # and we already stripped out the largefiles from the list
@@ -292,7 +292,7 @@
                 force=False, editor=False, extra={}):
             orig = super(lfilesrepo, self).commit
 
-            wlock = repo.wlock()
+            wlock = self.wlock()
             try:
                 # Case 0: Rebase or Transplant
                 # We have to take the time to pull down the new largefiles now.
@@ -301,9 +301,9 @@
                 # or in the first commit after the rebase or transplant.
                 # updatelfiles will update the dirstate to mark any pulled
                 # largefiles as modified
-                if getattr(repo, "_isrebasing", False) or \
-                        getattr(repo, "_istransplanting", False):
-                    lfcommands.updatelfiles(repo.ui, repo, filelist=None,
+                if getattr(self, "_isrebasing", False) or \
+                        getattr(self, "_istransplanting", False):
+                    lfcommands.updatelfiles(self.ui, self, filelist=None,
                                             printmessage=False)
                     result = orig(text=text, user=user, date=date, match=match,
                                     force=force, editor=editor, extra=extra)
@@ -319,7 +319,7 @@
                     # otherwise to update all standins if the largefiles are
                     # large.
                     lfdirstate = lfutil.openlfdirstate(ui, self)
-                    dirtymatch = match_.always(repo.root, repo.getcwd())
+                    dirtymatch = match_.always(self.root, self.getcwd())
                     s = lfdirstate.status(dirtymatch, [], False, False, False)
                     modifiedfiles = []
                     for i in s:
@@ -345,9 +345,9 @@
                     if result is not None:
                         for lfile in lfdirstate:
                             if lfile in modifiedfiles:
-                                if (not os.path.exists(repo.wjoin(
+                                if (not os.path.exists(self.wjoin(
                                    lfutil.standin(lfile)))) or \
-                                   (not os.path.exists(repo.wjoin(lfile))):
+                                   (not os.path.exists(self.wjoin(lfile))):
                                     lfdirstate.drop(lfile)
 
                     # This needs to be after commit; otherwise precommit hooks
@@ -390,7 +390,7 @@
                 # standins corresponding to the big files requested by the
                 # user.  Have to modify _files to prevent commit() from
                 # complaining "not tracked" for big files.
-                lfiles = lfutil.listlfiles(repo)
+                lfiles = lfutil.listlfiles(self)
                 match = copy.copy(match)
                 origmatchfn = match.matchfn
 
@@ -431,14 +431,14 @@
                 wlock.release()
 
         def push(self, remote, force=False, revs=None, newbranch=False):
-            o = lfutil.findoutgoing(repo, remote, force)
+            o = lfutil.findoutgoing(self, remote, force)
             if o:
                 toupload = set()
-                o = repo.changelog.nodesbetween(o, revs)[0]
+                o = self.changelog.nodesbetween(o, revs)[0]
                 for n in o:
-                    parents = [p for p in repo.changelog.parents(n)
+                    parents = [p for p in self.changelog.parents(n)
                                if p != node_.nullid]
-                    ctx = repo[n]
+                    ctx = self[n]
                     files = set(ctx.files())
                     if len(parents) == 2:
                         mc = ctx.manifest()
--- a/hgext/mq.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/mq.py	Fri Oct 19 01:34:50 2012 -0500
@@ -63,7 +63,7 @@
 from mercurial.node import bin, hex, short, nullid, nullrev
 from mercurial.lock import release
 from mercurial import commands, cmdutil, hg, scmutil, util, revset
-from mercurial import repair, extensions, url, error, phases
+from mercurial import repair, extensions, url, error, phases, bookmarks
 from mercurial import patch as patchmod
 import os, re, errno, shutil
 
@@ -1522,7 +1522,7 @@
             #
             # this should really read:
             #   mm, dd, aa = repo.status(top, patchparent)[:3]
-            # but we do it backwards to take advantage of manifest/chlog
+            # but we do it backwards to take advantage of manifest/changelog
             # caching against the next repo.status call
             mm, aa, dd = repo.status(patchparent, top)[:3]
             changes = repo.changelog.read(top)
@@ -1535,7 +1535,7 @@
                 # if amending a patch, we start with existing
                 # files plus specified files - unfiltered
                 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
-                # filter with inc/exl options
+                # filter with include/exclude options
                 matchfn = scmutil.match(repo[None], opts=opts)
             else:
                 match = scmutil.matchall(repo)
@@ -1577,6 +1577,7 @@
             a = list(aa)
             c = [filter(matchfn, l) for l in (m, a, r)]
             match = scmutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
+            bmlist = repo[top].bookmarks()
 
             try:
                 if diffopts.git or diffopts.upgrade:
@@ -1662,6 +1663,11 @@
                 for chunk in chunks:
                     patchf.write(chunk)
                 patchf.close()
+
+                for bm in bmlist:
+                    repo._bookmarks[bm] = n
+                bookmarks.write(repo)
+
                 self.applied.append(statusentry(n, patchfn))
             except: # re-raises
                 ctx = repo[cparents[0]]
@@ -3185,9 +3191,9 @@
     revs = scmutil.revrange(repo, revrange)
     if repo['.'].rev() in revs and repo[None].files():
         ui.warn(_('warning: uncommitted changes in the working directory\n'))
-    # queue.finish may changes phases but leave the responsability to lock the
+    # queue.finish may changes phases but leave the responsibility to lock the
     # repo to the caller to avoid deadlock with wlock. This command code is
-    # responsability for this locking.
+    # responsibility for this locking.
     lock = repo.lock()
     try:
         q.finish(repo, revs)
@@ -3262,7 +3268,8 @@
 
     def _setactive(name):
         if q.applied:
-            raise util.Abort(_('patches applied - cannot set new queue active'))
+            raise util.Abort(_('new queue created, but cannot make active '
+                               'as patches are applied'))
         _setactivenocheck(name)
 
     def _setactivenocheck(name):
@@ -3595,3 +3602,5 @@
               'qseries.guarded': 'black bold',
               'qseries.missing': 'red bold',
               'qseries.unapplied': 'black bold'}
+
+commands.inferrepo += " qnew qrefresh qdiff qcommit"
--- a/hgext/notify.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/notify.py	Fri Oct 19 01:34:50 2012 -0500
@@ -30,17 +30,22 @@
 multiple recipients to a single repository::
 
   [usersubs]
-  # key is subscriber email, value is a comma-separated list of repo glob
-  # patterns
+  # key is subscriber email, value is a comma-separated list of repo patterns
   user@host = pattern
 
   [reposubs]
-  # key is glob pattern, value is a comma-separated list of subscriber
-  # emails
+  # key is repo pattern, value is a comma-separated list of subscriber emails
   pattern = user@host
 
-Glob patterns are matched against absolute path to repository
-root.
+A ``pattern`` is a ``glob`` matching the absolute path to a repository,
+optionally combined with a revset expression. A revset expression, if
+present, is separated from the glob by a hash. Example::
+
+  [reposubs]
+  */widgets#branch(release) = qa-team@example.com
+
+This sends to ``qa-team@example.com`` whenever a changeset on the ``release``
+branch triggers a notification in any repository ending in ``widgets``.
 
 In order to place them under direct user management, ``[usersubs]`` and
 ``[reposubs]`` sections may be placed in a separate ``hgrc`` file and
@@ -217,14 +222,22 @@
         subs = set()
         for user, pats in self.ui.configitems('usersubs'):
             for pat in pats.split(','):
+                if '#' in pat:
+                    pat, revs = pat.split('#', 1)
+                else:
+                    revs = None
                 if fnmatch.fnmatch(self.repo.root, pat.strip()):
-                    subs.add(self.fixmail(user))
+                    subs.add((self.fixmail(user), revs))
         for pat, users in self.ui.configitems('reposubs'):
+            if '#' in pat:
+                pat, revs = pat.split('#', 1)
+            else:
+                revs = None
             if fnmatch.fnmatch(self.repo.root, pat):
                 for user in users.split(','):
-                    subs.add(self.fixmail(user))
-        return [mail.addressencode(self.ui, s, self.charsets, self.test)
-                for s in sorted(subs)]
+                    subs.add((self.fixmail(user), revs))
+        return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
+                for s, r in sorted(subs)]
 
     def node(self, ctx, **props):
         '''format one changeset, unless it is a suppressed merge.'''
@@ -243,6 +256,21 @@
     def send(self, ctx, count, data):
         '''send message.'''
 
+        # Select subscribers by revset
+        subs = set()
+        for sub, spec in self.subs:
+            if spec is None:
+                subs.add(sub)
+                continue
+            revs = self.repo.revs('%r and %d:', spec, ctx.rev())
+            if len(revs):
+                subs.add(sub)
+                continue
+        if len(subs) == 0:
+            self.ui.debug('notify: no subscribers to selected repo '
+                          'and revset\n')
+            return
+
         p = email.Parser.Parser()
         try:
             msg = p.parsestr(data)
@@ -292,7 +320,7 @@
             msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
                                  (ctx, int(time.time()),
                                   hash(self.repo.root), socket.getfqdn()))
-        msg['To'] = ', '.join(self.subs)
+        msg['To'] = ', '.join(sorted(subs))
 
         msgtext = msg.as_string()
         if self.test:
@@ -301,9 +329,9 @@
                 self.ui.write('\n')
         else:
             self.ui.status(_('notify: sending %d subscribers %d changes\n') %
-                           (len(self.subs), count))
+                           (len(subs), count))
             mail.sendmail(self.ui, util.email(msg['From']),
-                          self.subs, msgtext, mbox=self.mbox)
+                          subs, msgtext, mbox=self.mbox)
 
     def diff(self, ctx, ref=None):
 
--- a/hgext/rebase.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/rebase.py	Fri Oct 19 01:34:50 2012 -0500
@@ -15,7 +15,7 @@
 '''
 
 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
-from mercurial import extensions, patch, scmutil, phases
+from mercurial import extensions, patch, scmutil, phases, obsolete
 from mercurial.commands import templateopts
 from mercurial.node import nullrev
 from mercurial.lock import release
@@ -184,7 +184,8 @@
                 rebaseset = repo.revs(
                     '(children(ancestor(%ld, %d)) and ::(%ld))::',
                     base, dest, base)
-
+            # temporary top level filtering of extinct revisions
+            rebaseset = repo.revs('%ld - hidden()', rebaseset)
             if rebaseset:
                 root = min(rebaseset)
             else:
@@ -193,8 +194,8 @@
             if not rebaseset:
                 repo.ui.debug('base is ancestor of destination\n')
                 result = None
-            elif not keepf and list(repo.revs('first(children(%ld) - %ld)',
-                                              rebaseset, rebaseset)):
+            elif not keepf and repo.revs('first(children(%ld) - %ld)-hidden()',
+                                         rebaseset, rebaseset):
                 raise util.Abort(
                     _("can't remove original changesets with"
                       " unrebased descendants"),
@@ -310,15 +311,10 @@
                     nstate[repo[k].node()] = repo[v].node()
 
         if not keepf:
-            # Remove no more useful revisions
-            rebased = [rev for rev in state if state[rev] != nullmerge]
-            if rebased:
-                if set(repo.changelog.descendants([min(rebased)])) - set(state):
-                    ui.warn(_("warning: new changesets detected "
-                              "on source branch, not stripping\n"))
-                else:
-                    # backup the old csets by default
-                    repair.strip(ui, repo, repo[min(rebased)].node(), "all")
+            collapsedas = None
+            if collapsef:
+                collapsedas = newrev
+            clearrebased(ui, repo, state, collapsedas)
 
         if currentbookmarks:
             updatebookmarks(repo, nstate, currentbookmarks, **opts)
@@ -664,6 +660,31 @@
         state.update(dict.fromkeys(detachset, nullmerge))
     return repo['.'].rev(), dest.rev(), state
 
+def clearrebased(ui, repo, state, collapsedas=None):
+    """dispose of rebased revision at the end of the rebase
+
+    If `collapsedas` is not None, the rebase was a collapse whose result if the
+    `collapsedas` node."""
+    if obsolete._enabled:
+        markers = []
+        for rev, newrev in sorted(state.items()):
+            if newrev >= 0:
+                if collapsedas is not None:
+                    newrev = collapsedas
+                markers.append((repo[rev], (repo[newrev],)))
+        if markers:
+            obsolete.createmarkers(repo, markers)
+    else:
+        rebased = [rev for rev in state if state[rev] != nullmerge]
+        if rebased:
+            if set(repo.changelog.descendants([min(rebased)])) - set(state):
+                ui.warn(_("warning: new changesets detected "
+                          "on source branch, not stripping\n"))
+            else:
+                # backup the old csets by default
+                repair.strip(ui, repo, repo[min(rebased)].node(), "all")
+
+
 def pullrebase(orig, ui, repo, *args, **opts):
     'Call rebase after pull if the latter has been invoked with --rebase'
     if opts.get('rebase'):
--- a/hgext/record.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/record.py	Fri Oct 19 01:34:50 2012 -0500
@@ -33,7 +33,7 @@
     - ('file',    [header_lines + fromfile + tofile])
     - ('context', [context_lines])
     - ('hunk',    [hunk_lines])
-    - ('range',   (-start,len, +start,len, diffp))
+    - ('range',   (-start,len, +start,len, proc))
     """
     lr = patch.linereader(fp)
 
@@ -81,7 +81,7 @@
 class header(object):
     """patch header
 
-    XXX shoudn't we move this to mercurial/patch.py ?
+    XXX shouldn't we move this to mercurial/patch.py ?
     """
     diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
     diff_re = re.compile('diff -r .* (.*)$')
@@ -393,11 +393,11 @@
             if skipfile is None and skipall is None:
                 chunk.pretty(ui)
             if total == 1:
-                msg = _('record this change to %r?') % chunk.filename()
+                msg = _("record this change to '%s'?") % chunk.filename()
             else:
                 idx = pos - len(h.hunks) + i
-                msg = _('record change %d/%d to %r?') % (idx, total,
-                                                         chunk.filename())
+                msg = _("record change %d/%d to '%s'?") % (idx, total,
+                                                           chunk.filename())
             r, skipfile, skipall, newpatches = prompt(skipfile,
                     skipall, msg, chunk)
             if r:
@@ -496,6 +496,9 @@
         raise util.Abort(_('running non-interactively, use %s instead') %
                          cmdsuggest)
 
+    # make sure username is set before going interactive
+    ui.username()
+
     def recordfunc(ui, repo, message, match, opts):
         """This is generic record driver.
 
@@ -664,3 +667,5 @@
 def _wrapcmd(cmd, table, wrapfn, msg):
     entry = extensions.wrapcommand(table, cmd, wrapfn)
     entry[1].append(('i', 'interactive', None, msg))
+
+commands.inferrepo += " record qrecord"
--- a/hgext/schemes.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/schemes.py	Fri Oct 19 01:34:50 2012 -0500
@@ -61,7 +61,7 @@
         return '<ShortRepository: %s>' % self.scheme
 
     def instance(self, ui, url, create):
-        # Should this use urlmod.url(), or is manual parsing better?
+        # Should this use the util.url class, or is manual parsing better?
         url = url.split('://', 1)[1]
         parts = url.split('/', self.parts)
         if len(parts) > self.parts:
--- a/hgext/win32mbcs.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/win32mbcs.py	Fri Oct 19 01:34:50 2012 -0500
@@ -89,19 +89,28 @@
         s += os.sep
     return s
 
-def wrapper(func, args, kwds):
-    # check argument is unicode, then call original
+
+def basewrapper(func, argtype, enc, dec, args, kwds):
+    # check check already converted, then call original
     for arg in args:
-        if isinstance(arg, unicode):
+        if isinstance(arg, argtype):
             return func(*args, **kwds)
 
     try:
-        # convert arguments to unicode, call func, then convert back
-        return encode(func(*decode(args), **decode(kwds)))
+        # convert string arguments, call func, then convert back the
+        # return value.
+        return enc(func(*dec(args), **dec(kwds)))
     except UnicodeError:
         raise util.Abort(_("[win32mbcs] filename conversion failed with"
                          " %s encoding\n") % (_encoding))
 
+def wrapper(func, args, kwds):
+    return basewrapper(func, unicode, encode, decode, args, kwds)
+
+
+def reversewrapper(func, args, kwds):
+    return basewrapper(func, str, decode, encode, args, kwds)
+
 def wrapperforlistdir(func, args, kwds):
     # Ensure 'path' argument ends with os.sep to avoids
     # misinterpreting last 0x5c of MBCS 2nd byte as path separator.
@@ -119,7 +128,7 @@
     def f(*args, **kwds):
         return wrapper(func, args, kwds)
     try:
-        f.__name__ = func.__name__                # fail with python23
+        f.__name__ = func.__name__ # fails with Python 2.3
     except Exception:
         pass
     setattr(module, name, f)
@@ -133,6 +142,11 @@
  mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath
  mercurial.util.checkwinfilename mercurial.util.checkosfilename'''
 
+# These functions are required to be called with local encoded string
+# because they expects argument is local encoded string and cause
+# problem with unicode string.
+rfuncs = '''mercurial.encoding.upper mercurial.encoding.lower'''
+
 # List of Windows specific functions to be wrapped.
 winfuncs = '''os.path.splitunc'''
 
@@ -159,6 +173,9 @@
             for f in winfuncs.split():
                 wrapname(f, wrapper)
         wrapname("mercurial.osutil.listdir", wrapperforlistdir)
+        # wrap functions to be called with local byte string arguments
+        for f in rfuncs.split():
+            wrapname(f, reversewrapper)
         # Check sys.args manually instead of using ui.debug() because
         # command line options is not yet applied when
         # extensions.loadall() is called.
--- a/hgext/zeroconf/Zeroconf.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/hgext/zeroconf/Zeroconf.py	Fri Oct 19 01:34:50 2012 -0500
@@ -66,7 +66,7 @@
 				 using select() for socket reads
 				 tested on Debian unstable with Python 2.2.2"""
 
-"""0.05 update - ensure case insensitivty on domain names
+"""0.05 update - ensure case insensitivity on domain names
                  support for unicast DNS queries"""
 
 """0.04 update - added some unit tests
@@ -114,7 +114,7 @@
 _FLAGS_QR_QUERY = 0x0000 # query
 _FLAGS_QR_RESPONSE = 0x8000 # response
 
-_FLAGS_AA = 0x0400 # Authorative answer
+_FLAGS_AA = 0x0400 # Authoritative answer
 _FLAGS_TC = 0x0200 # Truncated
 _FLAGS_RD = 0x0100 # Recursion desired
 _FLAGS_RA = 0x8000 # Recursion available
@@ -335,7 +335,7 @@
 		raise AbstractMethodException
 
 	def toString(self, other):
-		"""String representation with addtional information"""
+		"""String representation with additional information"""
 		arg = "%s/%s,%s" % (self.ttl, self.getRemainingTTL(currentTimeMillis()), other)
 		return DNSEntry.toString(self, "record", arg)
 
@@ -650,7 +650,7 @@
 			if now == 0 or not record.isExpired(now):
 				self.answers.append((record, now))
 
-	def addAuthorativeAnswer(self, record):
+	def addAuthoritativeAnswer(self, record):
 		"""Adds an authoritative answer"""
 		self.authorities.append(record)
 
@@ -904,7 +904,7 @@
 	to cache information as it arrives.
 
 	It requires registration with an Engine object in order to have
-	the read() method called when a socket is availble for reading."""
+	the read() method called when a socket is available for reading."""
 
 	def __init__(self, zeroconf):
 		self.zeroconf = zeroconf
@@ -1140,7 +1140,7 @@
 		return self.port
 
 	def getPriority(self):
-		"""Pirority accessor"""
+		"""Priority accessor"""
 		return self.priority
 
 	def getWeight(self):
@@ -1259,7 +1259,7 @@
 			# SO_REUSEADDR should be equivalent to SO_REUSEPORT for
 			# multicast UDP sockets (p 731, "TCP/IP Illustrated,
 			# Volume 2"), but some BSD-derived systems require
-			# SO_REUSEPORT to be specified explicity.  Also, not all
+			# SO_REUSEPORT to be specified explicitly.  Also, not all
 			# versions of Python have SO_REUSEPORT available.  So
 			# if you're on a BSD-based system, and haven't upgraded
 			# to Python 2.3 yet, you may find this library doesn't
@@ -1272,10 +1272,8 @@
 			self.socket.bind(self.group)
 		except Exception:
 			# Some versions of linux raise an exception even though
-			# the SO_REUSE* options have been set, so ignore it
-			#
+			# SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it
 			pass
-		#self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.intf) + socket.inet_aton('0.0.0.0'))
 		self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
 
 		self.listeners = []
@@ -1433,7 +1431,7 @@
 			out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
 			self.debug = out
 			out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
-			out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name))
+			out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name))
 			self.send(out)
 			i += 1
 			nextTime += _CHECK_TIME
--- a/i18n/ru.po	Mon Oct 08 00:19:30 2012 +0200
+++ b/i18n/ru.po	Fri Oct 19 01:34:50 2012 -0500
@@ -14555,7 +14555,7 @@
 "    Update sets the working directory's parent revision to the specified\n"
 "    changeset (see :hg:`help parents`)."
 msgstr ""
-"    Update устанавливает ревизию родителя рабочего каталога в заданный\n"
+"    Update делает заданный набор изменений родительской ревизией рабочего\n"
 "    набор изменений (см. :hg:`help parents`)."
 
 msgid ""
--- a/mercurial/archival.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/archival.py	Fri Oct 19 01:34:50 2012 -0500
@@ -14,6 +14,10 @@
 import zlib, gzip
 import struct
 
+# from unzip source code:
+_UNX_IFREG = 0x8000
+_UNX_IFLNK = 0xa000
+
 def tidyprefix(dest, kind, prefix):
     '''choose prefix to use for names in archive.  make sure prefix is
     safe for consumers.'''
@@ -175,10 +179,10 @@
         # unzip will not honor unix file modes unless file creator is
         # set to unix (id 3).
         i.create_system = 3
-        ftype = 0x8000 # UNX_IFREG in unzip source code
+        ftype = _UNX_IFREG
         if islink:
             mode = 0777
-            ftype = 0xa000 # UNX_IFLNK in unzip source code
+            ftype = _UNX_IFLNK
         i.external_attr = (mode | ftype) << 16L
         # add "extended-timestamp" extra block, because zip archives
         # without this will be extracted with unexpected timestamp,
--- a/mercurial/bookmarks.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/bookmarks.py	Fri Oct 19 01:34:50 2012 -0500
@@ -7,15 +7,9 @@
 
 from mercurial.i18n import _
 from mercurial.node import hex
-from mercurial import encoding, error, util
+from mercurial import encoding, error, util, obsolete, phases
 import errno, os
 
-def valid(mark):
-    for c in (':', '\0', '\n', '\r'):
-        if c in mark:
-            return False
-    return True
-
 def read(repo):
     '''Parse .hg/bookmarks file and return a dictionary
 
@@ -58,7 +52,7 @@
             raise
         return None
     try:
-        # No readline() in posixfile_nt, reading everything is cheap
+        # No readline() in osutil.posixfile, reading everything is cheap
         mark = encoding.tolocal((file.readlines() or [''])[0])
         if mark == '' or mark not in repo._bookmarks:
             mark = None
@@ -79,10 +73,6 @@
 
     if repo._bookmarkcurrent not in refs:
         setcurrent(repo, None)
-    for mark in refs.keys():
-        if not valid(mark):
-            raise util.Abort(_("bookmark '%s' contains illegal "
-                "character" % mark))
 
     wlock = repo.wlock()
     try:
@@ -113,9 +103,6 @@
 
     if mark not in repo._bookmarks:
         mark = ''
-    if not valid(mark):
-        raise util.Abort(_("bookmark '%s' contains illegal "
-            "character" % mark))
 
     wlock = repo.wlock()
     try:
@@ -159,7 +146,7 @@
         if mark and marks[mark] in parents:
             old = repo[marks[mark]]
             new = repo[node]
-            if new in old.descendants() and mark == cur:
+            if old.descendant(new) and mark == cur:
                 marks[cur] = new.node()
                 update = True
             if mark != cur:
@@ -209,21 +196,25 @@
                 cl = repo[nl]
                 if cl.rev() >= cr.rev():
                     continue
-                if cr in cl.descendants():
+                if validdest(repo, cl, cr):
                     repo._bookmarks[k] = cr.node()
                     changed = True
                     ui.status(_("updating bookmark %s\n") % k)
                 else:
+                    if k == '@':
+                        kd = ''
+                    else:
+                        kd = k
                     # find a unique @ suffix
                     for x in range(1, 100):
-                        n = '%s@%d' % (k, x)
+                        n = '%s@%d' % (kd, x)
                         if n not in repo._bookmarks:
                             break
                     # try to use an @pathalias suffix
                     # if an @pathalias already exists, we overwrite (update) it
                     for p, u in ui.configitems("paths"):
                         if path == u:
-                            n = '%s@%s' % (k, p)
+                            n = '%s@%s' % (kd, p)
 
                     repo._bookmarks[n] = cr.node()
                     changed = True
@@ -237,18 +228,48 @@
     if changed:
         write(repo)
 
-def diff(ui, repo, remote):
+def diff(ui, dst, src):
     ui.status(_("searching for changed bookmarks\n"))
 
-    lmarks = repo.listkeys('bookmarks')
-    rmarks = remote.listkeys('bookmarks')
+    smarks = src.listkeys('bookmarks')
+    dmarks = dst.listkeys('bookmarks')
 
-    diff = sorted(set(rmarks) - set(lmarks))
+    diff = sorted(set(smarks) - set(dmarks))
     for k in diff:
-        mark = ui.debugflag and rmarks[k] or rmarks[k][:12]
+        mark = ui.debugflag and smarks[k] or smarks[k][:12]
         ui.write("   %-25s %s\n" % (k, mark))
 
     if len(diff) <= 0:
         ui.status(_("no changed bookmarks found\n"))
         return 1
     return 0
+
+def validdest(repo, old, new):
+    """Is the new bookmark destination a valid update from the old one"""
+    if old == new:
+        # Old == new -> nothing to update.
+        return False
+    elif not old:
+        # old is nullrev, anything is valid.
+        # (new != nullrev has been excluded by the previous check)
+        return True
+    elif repo.obsstore:
+        # We only need this complicated logic if there is obsolescence
+        # XXX will probably deserve an optimised revset.
+
+        validdests = set([old])
+        plen = -1
+        # compute the whole set of successors or descendants
+        while len(validdests) != plen:
+            plen = len(validdests)
+            succs = set(c.node() for c in validdests)
+            for c in validdests:
+                if c.phase() > phases.public:
+                    # obsolescence marker does not apply to public changeset
+                    succs.update(obsolete.allsuccessors(repo.obsstore,
+                                                        [c.node()]))
+            validdests = set(repo.set('%ln::', succs))
+        validdests.remove(old)
+        return new in validdests
+    else:
+        return old.descendant(new)
--- a/mercurial/byterange.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/byterange.py	Fri Oct 19 01:34:50 2012 -0500
@@ -32,7 +32,7 @@
 
     This was extremely simple. The Range header is a HTTP feature to
     begin with so all this class does is tell urllib2 that the
-    "206 Partial Content" reponse from the HTTP server is what we
+    "206 Partial Content" response from the HTTP server is what we
     expected.
 
     Example:
@@ -64,7 +64,7 @@
 
 class RangeableFileObject(object):
     """File object wrapper to enable raw range handling.
-    This was implemented primarilary for handling range
+    This was implemented primarily for handling range
     specifications for file:// urls. This object effectively makes
     a file object look like it consists only of a range of bytes in
     the stream.
@@ -431,7 +431,7 @@
     Return a tuple whose first element is guaranteed to be an int
     and whose second element will be '' (meaning: the last byte) or
     an int. Finally, return None if the normalized tuple == (0,'')
-    as that is equivelant to retrieving the entire file.
+    as that is equivalent to retrieving the entire file.
     """
     if range_tup is None:
         return None
--- a/mercurial/changelog.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/changelog.py	Fri Oct 19 01:34:50 2012 -0500
@@ -49,6 +49,10 @@
     items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)]
     return "\0".join(items)
 
+def stripdesc(desc):
+    """strip trailing whitespace and leading and trailing empty lines"""
+    return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
+
 class appender(object):
     '''the changelog index must be updated last on disk, so we use this class
     to delay writes to it'''
@@ -120,6 +124,82 @@
         self._realopener = opener
         self._delayed = False
         self._divert = False
+        self.filteredrevs = ()
+
+    def tip(self):
+        """filtered version of revlog.tip"""
+        for i in xrange(len(self) -1, -2, -1):
+            if i not in self.filteredrevs:
+                return self.node(i)
+
+    def __iter__(self):
+        """filtered version of revlog.__iter__"""
+        for i in xrange(len(self)):
+            if i not in self.filteredrevs:
+                yield i
+
+    def revs(self, start=0, stop=None):
+        """filtered version of revlog.revs"""
+        for i in super(changelog, self).revs(start, stop):
+            if i not in self.filteredrevs:
+                yield i
+
+    @util.propertycache
+    def nodemap(self):
+        # XXX need filtering too
+        self.rev(self.node(0))
+        return self._nodecache
+
+    def hasnode(self, node):
+        """filtered version of revlog.hasnode"""
+        try:
+            i = self.rev(node)
+            return i not in self.filteredrevs
+        except KeyError:
+            return False
+
+    def headrevs(self):
+        if self.filteredrevs:
+            # XXX we should fix and use the C version
+            return self._headrevs()
+        return super(changelog, self).headrevs()
+
+    def strip(self, *args, **kwargs):
+        # XXX make something better than assert
+        # We can't expect proper strip behavior if we are filtered.
+        assert not self.filteredrevs
+        super(changelog, self).strip(*args, **kwargs)
+
+    def rev(self, node):
+        """filtered version of revlog.rev"""
+        r = super(changelog, self).rev(node)
+        if r in self.filteredrevs:
+            raise error.LookupError(node, self.indexfile, _('no node'))
+        return r
+
+    def node(self, rev):
+        """filtered version of revlog.node"""
+        if rev in self.filteredrevs:
+            raise IndexError(rev)
+        return super(changelog, self).node(rev)
+
+    def linkrev(self, rev):
+        """filtered version of revlog.linkrev"""
+        if rev in self.filteredrevs:
+            raise IndexError(rev)
+        return super(changelog, self).linkrev(rev)
+
+    def parentrevs(self, rev):
+        """filtered version of revlog.parentrevs"""
+        if rev in self.filteredrevs:
+            raise IndexError(rev)
+        return super(changelog, self).parentrevs(rev)
+
+    def flags(self, rev):
+        """filtered version of revlog.flags"""
+        if rev in self.filteredrevs:
+            raise IndexError(rev)
+        return super(changelog, self).flags(rev)
 
     def delayupdate(self):
         "delay visibility of index updates to other readers"
@@ -183,7 +263,7 @@
         nodeid\n        : manifest node in ascii
         user\n          : user, no \n or \r allowed
         time tz extra\n : date (time is int or float, timezone is int)
-                        : extra is metadatas, encoded and separated by '\0'
+                        : extra is metadata, encoded and separated by '\0'
                         : older versions ignore it
         files\n\n       : files modified by the cset, no \n or \r allowed
         (.*)            : comment (free text, ideally utf-8)
@@ -232,8 +312,7 @@
             raise error.RevlogError(_("username %s contains a newline")
                                     % repr(user))
 
-        # strip trailing whitespace and leading and trailing empty lines
-        desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
+        desc = stripdesc(desc)
 
         if date:
             parseddate = "%d %d" % util.parsedate(date)
--- a/mercurial/cmdutil.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/cmdutil.py	Fri Oct 19 01:34:50 2012 -0500
@@ -10,7 +10,9 @@
 import os, sys, errno, re, tempfile
 import util, scmutil, templater, patch, error, templatekw, revlog, copies
 import match as matchmod
-import subrepo, context, repair, bookmarks, graphmod, revset, phases
+import subrepo, context, repair, bookmarks, graphmod, revset, phases, obsolete
+import changelog
+import lock as lockmod
 
 def parsealiases(cmd):
     return cmd.lstrip("^").split("|")
@@ -547,30 +549,37 @@
         prev = (parents and parents[0]) or nullid
 
         shouldclose = False
-        if not fp:
+        if not fp and len(template) > 0:
             desc_lines = ctx.description().rstrip().split('\n')
             desc = desc_lines[0]    #Commit always has a first line.
             fp = makefileobj(repo, template, node, desc=desc, total=total,
                              seqno=seqno, revwidth=revwidth, mode='ab')
             if fp != template:
                 shouldclose = True
-        if fp != sys.stdout and util.safehasattr(fp, 'name'):
+        if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
             repo.ui.note("%s\n" % fp.name)
 
-        fp.write("# HG changeset patch\n")
-        fp.write("# User %s\n" % ctx.user())
-        fp.write("# Date %d %d\n" % ctx.date())
+        if not fp:
+            write = repo.ui.write
+        else:
+            def write(s, **kw):
+                fp.write(s)
+
+
+        write("# HG changeset patch\n")
+        write("# User %s\n" % ctx.user())
+        write("# Date %d %d\n" % ctx.date())
         if branch and branch != 'default':
-            fp.write("# Branch %s\n" % branch)
-        fp.write("# Node ID %s\n" % hex(node))
-        fp.write("# Parent  %s\n" % hex(prev))
+            write("# Branch %s\n" % branch)
+        write("# Node ID %s\n" % hex(node))
+        write("# Parent  %s\n" % hex(prev))
         if len(parents) > 1:
-            fp.write("# Parent  %s\n" % hex(parents[1]))
-        fp.write(ctx.description().rstrip())
-        fp.write("\n\n")
+            write("# Parent  %s\n" % hex(parents[1]))
+        write(ctx.description().rstrip())
+        write("\n\n")
 
-        for chunk in patch.diff(repo, prev, node, opts=opts):
-            fp.write(chunk)
+        for chunk, label in patch.diffui(repo, prev, node, opts=opts):
+            write(chunk, label=label)
 
         if shouldclose:
             fp.close()
@@ -679,7 +688,7 @@
                    for p in self._meaningful_parentrevs(log, rev)]
 
         self.ui.write(_("changeset:   %d:%s\n") % (rev, hexfunc(changenode)),
-                      label='log.changeset')
+                      label='log.changeset changeset.%s' % ctx.phasestr())
 
         branch = ctx.branch()
         # don't show the default branch name
@@ -697,7 +706,7 @@
                           label='log.phase')
         for parent in parents:
             self.ui.write(_("parent:      %d:%s\n") % parent,
-                          label='log.parent')
+                          label='log.parent changeset.%s' % ctx.phasestr())
 
         if self.ui.debugflag:
             mnode = ctx.manifestnode()
@@ -991,12 +1000,13 @@
 
     if not len(repo):
         return []
-
-    if follow:
-        defrange = '%s:0' % repo['.'].rev()
+    if opts.get('rev'):
+        revs = scmutil.revrange(repo, opts.get('rev'))
+    elif follow:
+        revs = repo.revs('reverse(:.)')
     else:
-        defrange = '-1:0'
-    revs = scmutil.revrange(repo, opts.get('rev') or [defrange])
+        revs = list(repo)
+        revs.reverse()
     if not revs:
         return []
     wanted = set()
@@ -1102,6 +1112,17 @@
                 wanted.add(rev)
                 if copied:
                     copies.append(copied)
+
+        # We decided to fall back to the slowpath because at least one
+        # of the paths was not a file. Check to see if at least one of them
+        # existed in history, otherwise simply return
+        if slowpath:
+            for path in match.files():
+                if path == '.' or path in repo.store:
+                    break
+                else:
+                    return []
+
     if slowpath:
         # We have to read the changelog to match filenames against
         # changed files
@@ -1258,7 +1279,7 @@
     opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
     opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
     # pats/include/exclude are passed to match.match() directly in
-    # _matchfile() revset but walkchangerevs() builds its matcher with
+    # _matchfiles() revset but walkchangerevs() builds its matcher with
     # scmutil.match(). The difference is input pats are globbed on
     # platforms without shell expansion (windows).
     pctx = repo[None]
@@ -1277,6 +1298,18 @@
                     raise util.Abort(
                         _('cannot follow nonexistent file: "%s"') % f)
                 slowpath = True
+
+        # We decided to fall back to the slowpath because at least one
+        # of the paths was not a file. Check to see if at least one of them
+        # existed in history - in that case, we'll continue down the
+        # slowpath; otherwise, we can turn off the slowpath
+        if slowpath:
+            for path in match.files():
+                if path == '.' or path in repo.store:
+                    break
+            else:
+                slowpath = False
+
     if slowpath:
         # See walkchangerevs() slow path.
         #
@@ -1304,7 +1337,7 @@
             fnopats = (('_ancestors', '_fancestors'),
                        ('_descendants', '_fdescendants'))
             if pats:
-                # follow() revset inteprets its file argument as a
+                # follow() revset interprets its file argument as a
                 # manifest entry, so use match.files(), not pats.
                 opts[fpats[followfirst]] = list(match.files())
             else:
@@ -1384,9 +1417,10 @@
         revs = scmutil.revrange(repo, opts['rev'])
     else:
         if follow and len(repo) > 0:
-            revs = scmutil.revrange(repo, ['.:0'])
+            revs = repo.revs('reverse(:.)')
         else:
-            revs = range(len(repo) - 1, -1, -1)
+            revs = list(repo.changelog)
+            revs.reverse()
     if not revs:
         return iter([]), None, None
     expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
@@ -1568,135 +1602,163 @@
     ui.note(_('amending changeset %s\n') % old)
     base = old.p1()
 
-    wlock = repo.wlock()
+    wlock = lock = None
     try:
-        # First, do a regular commit to record all changes in the working
-        # directory (if there are any)
-        ui.callhooks = False
+        wlock = repo.wlock()
+        lock = repo.lock()
+        tr = repo.transaction('amend')
         try:
-            node = commit(ui, repo, commitfunc, pats, opts)
-        finally:
-            ui.callhooks = True
-        ctx = repo[node]
-
-        # Participating changesets:
-        #
-        # node/ctx o - new (intermediate) commit that contains changes from
-        #          |   working dir to go into amending commit (or a workingctx
-        #          |   if there were no changes)
-        #          |
-        # old      o - changeset to amend
-        #          |
-        # base     o - parent of amending changeset
-
-        # Update extra dict from amended commit (e.g. to preserve graft source)
-        extra.update(old.extra())
-
-        # Also update it from the intermediate commit or from the wctx
-        extra.update(ctx.extra())
-
-        files = set(old.files())
-
-        # Second, we use either the commit we just did, or if there were no
-        # changes the parent of the working directory as the version of the
-        # files in the final amend commit
-        if node:
-            ui.note(_('copying changeset %s to %s\n') % (ctx, base))
-
-            user = ctx.user()
-            date = ctx.date()
-            message = ctx.description()
-            # Recompute copies (avoid recording a -> b -> a)
-            copied = copies.pathcopies(base, ctx)
-
-            # Prune files which were reverted by the updates: if old introduced
-            # file X and our intermediate commit, node, renamed that file, then
-            # those two files are the same and we can discard X from our list
-            # of files. Likewise if X was deleted, it's no longer relevant
-            files.update(ctx.files())
-
-            def samefile(f):
-                if f in ctx.manifest():
-                    a = ctx.filectx(f)
-                    if f in base.manifest():
-                        b = base.filectx(f)
-                        return (not a.cmp(b)
-                                and a.flags() == b.flags())
-                    else:
-                        return False
-                else:
-                    return f not in base.manifest()
-            files = [f for f in files if not samefile(f)]
-
-            def filectxfn(repo, ctx_, path):
-                try:
-                    fctx = ctx[path]
-                    flags = fctx.flags()
-                    mctx = context.memfilectx(fctx.path(), fctx.data(),
-                                              islink='l' in flags,
-                                              isexec='x' in flags,
-                                              copied=copied.get(path))
-                    return mctx
-                except KeyError:
-                    raise IOError
-        else:
-            ui.note(_('copying changeset %s to %s\n') % (old, base))
-
-            # Use version of files as in the old cset
-            def filectxfn(repo, ctx_, path):
-                try:
-                    return old.filectx(path)
-                except KeyError:
-                    raise IOError
-
             # See if we got a message from -m or -l, if not, open the editor
             # with the message of the changeset to amend
-            user = opts.get('user') or old.user()
-            date = opts.get('date') or old.date()
             message = logmessage(ui, opts)
-            if not message:
-                cctx = context.workingctx(repo, old.description(), user, date,
-                                          extra,
-                                          repo.status(base.node(), old.node()))
-                message = commitforceeditor(repo, cctx, [])
+            # First, do a regular commit to record all changes in the working
+            # directory (if there are any)
+            ui.callhooks = False
+            try:
+                opts['message'] = 'temporary amend commit for %s' % old
+                node = commit(ui, repo, commitfunc, pats, opts)
+            finally:
+                ui.callhooks = True
+            ctx = repo[node]
+
+            # Participating changesets:
+            #
+            # node/ctx o - new (intermediate) commit that contains changes
+            #          |   from working dir to go into amending commit
+            #          |   (or a workingctx if there were no changes)
+            #          |
+            # old      o - changeset to amend
+            #          |
+            # base     o - parent of amending changeset
+
+            # Update extra dict from amended commit (e.g. to preserve graft
+            # source)
+            extra.update(old.extra())
+
+            # Also update it from the intermediate commit or from the wctx
+            extra.update(ctx.extra())
+
+            files = set(old.files())
+
+            # Second, we use either the commit we just did, or if there were no
+            # changes the parent of the working directory as the version of the
+            # files in the final amend commit
+            if node:
+                ui.note(_('copying changeset %s to %s\n') % (ctx, base))
+
+                user = ctx.user()
+                date = ctx.date()
+                # Recompute copies (avoid recording a -> b -> a)
+                copied = copies.pathcopies(base, ctx)
+
+                # Prune files which were reverted by the updates: if old
+                # introduced file X and our intermediate commit, node,
+                # renamed that file, then those two files are the same and
+                # we can discard X from our list of files. Likewise if X
+                # was deleted, it's no longer relevant
+                files.update(ctx.files())
+
+                def samefile(f):
+                    if f in ctx.manifest():
+                        a = ctx.filectx(f)
+                        if f in base.manifest():
+                            b = base.filectx(f)
+                            return (not a.cmp(b)
+                                    and a.flags() == b.flags())
+                        else:
+                            return False
+                    else:
+                        return f not in base.manifest()
+                files = [f for f in files if not samefile(f)]
 
-        new = context.memctx(repo,
-                             parents=[base.node(), nullid],
-                             text=message,
-                             files=files,
-                             filectxfn=filectxfn,
-                             user=user,
-                             date=date,
-                             extra=extra)
-        ph = repo.ui.config('phases', 'new-commit', phases.draft)
-        try:
-            repo.ui.setconfig('phases', 'new-commit', old.phase())
-            newid = repo.commitctx(new)
+                def filectxfn(repo, ctx_, path):
+                    try:
+                        fctx = ctx[path]
+                        flags = fctx.flags()
+                        mctx = context.memfilectx(fctx.path(), fctx.data(),
+                                                  islink='l' in flags,
+                                                  isexec='x' in flags,
+                                                  copied=copied.get(path))
+                        return mctx
+                    except KeyError:
+                        raise IOError
+            else:
+                ui.note(_('copying changeset %s to %s\n') % (old, base))
+
+                # Use version of files as in the old cset
+                def filectxfn(repo, ctx_, path):
+                    try:
+                        return old.filectx(path)
+                    except KeyError:
+                        raise IOError
+
+                user = opts.get('user') or old.user()
+                date = opts.get('date') or old.date()
+            if not message:
+                message = old.description()
+
+            pureextra = extra.copy()
+            extra['amend_source'] = old.hex()
+
+            new = context.memctx(repo,
+                                 parents=[base.node(), nullid],
+                                 text=message,
+                                 files=files,
+                                 filectxfn=filectxfn,
+                                 user=user,
+                                 date=date,
+                                 extra=extra)
+            new._text = commitforceeditor(repo, new, [])
+
+            newdesc =  changelog.stripdesc(new.description())
+            if ((not node)
+                and newdesc == old.description()
+                and user == old.user()
+                and date == old.date()
+                and pureextra == old.extra()):
+                # nothing changed. continuing here would create a new node
+                # anyway because of the amend_source noise.
+                #
+                # This not what we expect from amend.
+                return old.node()
+
+            ph = repo.ui.config('phases', 'new-commit', phases.draft)
+            try:
+                repo.ui.setconfig('phases', 'new-commit', old.phase())
+                newid = repo.commitctx(new)
+            finally:
+                repo.ui.setconfig('phases', 'new-commit', ph)
+            if newid != old.node():
+                # Reroute the working copy parent to the new changeset
+                repo.setparents(newid, nullid)
+
+                # Move bookmarks from old parent to amend commit
+                bms = repo.nodebookmarks(old.node())
+                if bms:
+                    for bm in bms:
+                        repo._bookmarks[bm] = newid
+                    bookmarks.write(repo)
+            #commit the whole amend process
+            if obsolete._enabled and newid != old.node():
+                # mark the new changeset as successor of the rewritten one
+                new = repo[newid]
+                obs = [(old, (new,))]
+                if node:
+                    obs.append((ctx, ()))
+
+                obsolete.createmarkers(repo, obs)
+            tr.close()
         finally:
-            repo.ui.setconfig('phases', 'new-commit', ph)
-        if newid != old.node():
-            # Reroute the working copy parent to the new changeset
-            repo.setparents(newid, nullid)
-
-            # Move bookmarks from old parent to amend commit
-            bms = repo.nodebookmarks(old.node())
-            if bms:
-                for bm in bms:
-                    repo._bookmarks[bm] = newid
-                bookmarks.write(repo)
-
+            tr.release()
+        if (not obsolete._enabled) and newid != old.node():
             # Strip the intermediate commit (if there was one) and the amended
             # commit
-            lock = repo.lock()
-            try:
-                if node:
-                    ui.note(_('stripping intermediate changeset %s\n') % ctx)
-                ui.note(_('stripping amended changeset %s\n') % old)
-                repair.strip(ui, repo, old.node(), topic='amend-backup')
-            finally:
-                lock.release()
+            if node:
+                ui.note(_('stripping intermediate changeset %s\n') % ctx)
+            ui.note(_('stripping amended changeset %s\n') % old)
+            repair.strip(ui, repo, old.node(), topic='amend-backup')
     finally:
-        wlock.release()
+        lockmod.release(wlock, lock)
     return newid
 
 def commiteditor(repo, ctx, subs):
--- a/mercurial/commands.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/commands.py	Fri Oct 19 01:34:50 2012 -0500
@@ -543,7 +543,7 @@
           hg bisect --good
           hg bisect --bad
 
-      - mark the current revision, or a known revision, to be skipped (eg. if
+      - mark the current revision, or a known revision, to be skipped (e.g. if
         that revision is not usable because of another issue)::
 
           hg bisect --skip
@@ -789,42 +789,15 @@
     marks = repo._bookmarks
     cur   = repo.changectx('.').node()
 
-    if delete:
-        if mark is None:
-            raise util.Abort(_("bookmark name required"))
-        if mark not in marks:
-            raise util.Abort(_("bookmark '%s' does not exist") % mark)
-        if mark == repo._bookmarkcurrent:
-            bookmarks.setcurrent(repo, None)
-        del marks[mark]
-        bookmarks.write(repo)
-        return
-
-    if rename:
-        if rename not in marks:
-            raise util.Abort(_("bookmark '%s' does not exist") % rename)
-        if mark in marks and not force:
-            raise util.Abort(_("bookmark '%s' already exists "
-                               "(use -f to force)") % mark)
-        if mark is None:
-            raise util.Abort(_("new bookmark name required"))
-        marks[mark] = marks[rename]
-        if repo._bookmarkcurrent == rename and not inactive:
-            bookmarks.setcurrent(repo, mark)
-        del marks[rename]
-        bookmarks.write(repo)
-        return
-
-    if mark is not None:
-        if "\n" in mark:
-            raise util.Abort(_("bookmark name cannot contain newlines"))
+    def checkformat(mark):
         mark = mark.strip()
         if not mark:
             raise util.Abort(_("bookmark names cannot consist entirely of "
                                "whitespace"))
-        if inactive and mark == repo._bookmarkcurrent:
-            bookmarks.setcurrent(repo, None)
-            return
+        scmutil.checknewlabel(repo, mark, 'bookmark')
+        return mark
+
+    def checkconflict(repo, mark, force=False):
         if mark in marks and not force:
             raise util.Abort(_("bookmark '%s' already exists "
                                "(use -f to force)") % mark)
@@ -832,41 +805,76 @@
             and not force):
             raise util.Abort(
                 _("a bookmark cannot have the name of an existing branch"))
+
+    if delete and rename:
+        raise util.Abort(_("--delete and --rename are incompatible"))
+    if delete and rev:
+        raise util.Abort(_("--rev is incompatible with --delete"))
+    if rename and rev:
+        raise util.Abort(_("--rev is incompatible with --rename"))
+    if mark is None and (delete or rev):
+        raise util.Abort(_("bookmark name required"))
+
+    if delete:
+        if mark not in marks:
+            raise util.Abort(_("bookmark '%s' does not exist") % mark)
+        if mark == repo._bookmarkcurrent:
+            bookmarks.setcurrent(repo, None)
+        del marks[mark]
+        bookmarks.write(repo)
+
+    elif rename:
+        if mark is None:
+            raise util.Abort(_("new bookmark name required"))
+        mark = checkformat(mark)
+        if rename not in marks:
+            raise util.Abort(_("bookmark '%s' does not exist") % rename)
+        checkconflict(repo, mark, force)
+        marks[mark] = marks[rename]
+        if repo._bookmarkcurrent == rename and not inactive:
+            bookmarks.setcurrent(repo, mark)
+        del marks[rename]
+        bookmarks.write(repo)
+
+    elif mark is not None:
+        mark = checkformat(mark)
+        if inactive and mark == repo._bookmarkcurrent:
+            bookmarks.setcurrent(repo, None)
+            return
+        checkconflict(repo, mark, force)
         if rev:
-            marks[mark] = repo.lookup(rev)
+            marks[mark] = scmutil.revsingle(repo, rev).node()
         else:
             marks[mark] = cur
         if not inactive and cur == marks[mark]:
             bookmarks.setcurrent(repo, mark)
         bookmarks.write(repo)
-        return
-
-    if mark is None:
-        if rev:
-            raise util.Abort(_("bookmark name required"))
-        if len(marks) == 0:
-            ui.status(_("no bookmarks set\n"))
-        if inactive:
-            if not repo._bookmarkcurrent:
-                ui.status(_("no active bookmark\n"))
-            else:
-                bookmarks.setcurrent(repo, None)
-            return
+
+    # Same message whether trying to deactivate the current bookmark (-i
+    # with no NAME) or listing bookmarks
+    elif len(marks) == 0:
+        ui.status(_("no bookmarks set\n"))
+
+    elif inactive:
+        if not repo._bookmarkcurrent:
+            ui.status(_("no active bookmark\n"))
         else:
-            for bmark, n in sorted(marks.iteritems()):
-                current = repo._bookmarkcurrent
-                if bmark == current and n == cur:
-                    prefix, label = '*', 'bookmarks.current'
-                else:
-                    prefix, label = ' ', ''
-
-                if ui.quiet:
-                    ui.write("%s\n" % bmark, label=label)
-                else:
-                    ui.write(" %s %-25s %d:%s\n" % (
-                        prefix, bmark, repo.changelog.rev(n), hexfn(n)),
-                        label=label)
-        return
+            bookmarks.setcurrent(repo, None)
+
+    else: # show bookmarks
+        for bmark, n in sorted(marks.iteritems()):
+            current = repo._bookmarkcurrent
+            if bmark == current and n == cur:
+                prefix, label = '*', 'bookmarks.current'
+            else:
+                prefix, label = ' ', ''
+
+            if ui.quiet:
+                ui.write("%s\n" % bmark, label=label)
+            else:
+                ui.write(" %s %-25s %d:%s\n" % (
+                    prefix, bmark, repo.changelog.rev(n), hexfn(n)),
+                    label=label)
 
 @command('branch',
     [('f', 'force', None,
@@ -977,7 +985,7 @@
                 label = 'branches.current'
             rev = str(ctx.rev()).rjust(31 - encoding.colwidth(ctx.branch()))
             rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
-                           'log.changeset')
+                           'log.changeset changeset.%s' % ctx.phasestr())
             tag = ui.label(ctx.branch(), label)
             if ui.quiet:
                 ui.write("%s\n" % tag)
@@ -1258,7 +1266,7 @@
     Returns 0 on success, 1 if nothing changed.
     """
     if opts.get('subrepos'):
-        # Let --subrepos on the command line overide config setting.
+        # Let --subrepos on the command line override config setting.
         ui.setconfig('ui', 'commitsubrepos', True)
 
     extra = {}
@@ -1358,20 +1366,20 @@
         # printed anyway.
         #
         # Par Msg Comment
-        # NN   y  additional topo root
+        # N N  y  additional topo root
         #
-        # BN   y  additional branch root
-        # CN   y  additional topo head
-        # HN   n  usual case
+        # B N  y  additional branch root
+        # C N  y  additional topo head
+        # H N  n  usual case
         #
-        # BB   y  weird additional branch root
-        # CB   y  branch merge
-        # HB   n  merge with named branch
+        # B B  y  weird additional branch root
+        # C B  y  branch merge
+        # H B  n  merge with named branch
         #
-        # CC   y  additional head from merge
-        # CH   n  merge with a head
+        # C C  y  additional head from merge
+        # C H  n  merge with a head
         #
-        # HH   n  head merge: head count decreases
+        # H H  n  head merge: head count decreases
 
     if not opts.get('close_branch'):
         for r in parents:
@@ -1702,7 +1710,7 @@
     """format the changelog or an index DAG as a concise textual description
 
     If you pass a revlog index, the revlog's DAG is emitted. If you list
-    revision numbers, they get labelled in the output as rN.
+    revision numbers, they get labeled in the output as rN.
 
     Otherwise, the changelog DAG of the current repo is emitted.
     """
@@ -2076,7 +2084,9 @@
     flags = repo.known([bin(s) for s in ids])
     ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
 
-@command('debugobsolete', [] + commitopts2,
+@command('debugobsolete',
+        [('', 'flags', 0, _('markers flag')),
+        ] + commitopts2,
          _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
     """create arbitrary obsolete marker"""
@@ -2103,8 +2113,8 @@
         try:
             tr = repo.transaction('debugobsolete')
             try:
-                repo.obsstore.create(tr, parsenodeid(precursor), succs, 0,
-                                     metadata)
+                repo.obsstore.create(tr, parsenodeid(precursor), succs,
+                                     opts['flags'], metadata)
                 tr.close()
             finally:
                 tr.release()
@@ -2987,16 +2997,17 @@
         else:
             iter = [('', l) for l in states]
         for change, l in iter:
-            cols = [fn, str(rev)]
+            cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
             before, match, after = None, None, None
+
             if opts.get('line_number'):
-                cols.append(str(l.linenum))
+                cols.append((str(l.linenum), 'grep.linenumber'))
             if opts.get('all'):
-                cols.append(change)
+                cols.append((change, 'grep.change'))
             if opts.get('user'):
-                cols.append(ui.shortuser(ctx.user()))
+                cols.append((ui.shortuser(ctx.user()), 'grep.user'))
             if opts.get('date'):
-                cols.append(datefunc(ctx.date()))
+                cols.append((datefunc(ctx.date()), 'grep.date'))
             if opts.get('files_with_matches'):
                 c = (fn, rev)
                 if c in filerevmatches:
@@ -3006,12 +3017,16 @@
                 before = l.line[:l.colstart]
                 match = l.line[l.colstart:l.colend]
                 after = l.line[l.colend:]
-            ui.write(sep.join(cols))
+            for col, label in cols[:-1]:
+                ui.write(col, label=label)
+                ui.write(sep, label='grep.sep')
+            ui.write(cols[-1][0], label=cols[-1][1])
             if before is not None:
+                ui.write(sep, label='grep.sep')
                 if not opts.get('text') and binary():
-                    ui.write(sep + " Binary file matches")
+                    ui.write(" Binary file matches")
                 else:
-                    ui.write(sep + before)
+                    ui.write(before)
                     ui.write(match, label='grep.match')
                     ui.write(after)
             ui.write(eol)
@@ -3250,8 +3265,12 @@
                 rst.append(_('\nuse "hg help %s" to show the full help text\n')
                            % name)
             elif not ui.quiet:
-                rst.append(_('\nuse "hg -v help %s" to show more info\n')
-                           % name)
+                omitted = _('use "hg -v help %s" to show more complete'
+                            ' help and the global options') % name
+                notomitted = _('use "hg -v help %s" to show'
+                               ' the global options') % name
+                help.indicateomitted(rst, omitted, notomitted)
+
         return rst
 
 
@@ -3354,6 +3373,11 @@
         if util.safehasattr(doc, '__call__'):
             rst += ["    %s\n" % l for l in doc().splitlines()]
 
+        if not ui.verbose:
+            omitted = (_('use "hg help -v %s" to show more complete help') %
+                       name)
+            help.indicateomitted(rst, omitted)
+
         try:
             cmdutil.findcmd(name, table)
             rst.append(_('\nuse "hg help -c %s" to see help for '
@@ -3381,6 +3405,11 @@
             rst.extend(tail.splitlines(True))
             rst.append('\n')
 
+        if not ui.verbose:
+            omitted = (_('use "hg help -v %s" to show more complete help') %
+                       name)
+            help.indicateomitted(rst, omitted)
+
         if mod:
             try:
                 ct = mod.cmdtable
@@ -3444,7 +3473,13 @@
         rst.extend(helplist())
 
     keep = ui.verbose and ['verbose'] or []
-    formatted, pruned = minirst.format(''.join(rst), textwidth, keep=keep)
+    text = ''.join(rst)
+    formatted, pruned = minirst.format(text, textwidth, keep=keep)
+    if 'verbose' in pruned:
+        keep.append('omitted')
+    else:
+        keep.append('notomitted')
+    formatted, pruned = minirst.format(text, textwidth, keep=keep)
     ui.write(formatted)
 
 
@@ -4187,7 +4222,7 @@
                     res.append(fn[plen:-slen])
         finally:
             lock.release()
-        for f in sorted(res):
+        for f in res:
             ui.write("%s\n" % f)
         return
 
@@ -4969,6 +5004,7 @@
                         ret = 1
                 finally:
                     ui.setconfig('ui', 'forcemerge', '')
+                    ms.commit()
 
                 # replace filemerge's .orig file with our resolve file
                 util.rename(a + ".resolve", a + ".orig")
@@ -5422,7 +5458,7 @@
         # label with log.changeset (instead of log.parent) since this
         # shows a working directory parent *changeset*:
         ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
-                 label='log.changeset')
+                 label='log.changeset changeset.%s' % p.phasestr())
         ui.write(' '.join(p.tags()), label='log.tag')
         if p.bookmarks():
             marks.extend(p.bookmarks())
@@ -5630,8 +5666,7 @@
         if len(names) != len(set(names)):
             raise util.Abort(_('tag names must be unique'))
         for n in names:
-            if n in ['tip', '.', 'null']:
-                raise util.Abort(_("the name '%s' is reserved") % n)
+            scmutil.checknewlabel(repo, n, 'tag')
             if not n:
                 raise util.Abort(_('tag names cannot consist entirely of '
                                    'whitespace'))
@@ -5709,7 +5744,7 @@
 
         hn = hexfunc(n)
         r = "%5d:%s" % (repo.changelog.rev(n), hn)
-        rev = ui.label(r, 'log.changeset')
+        rev = ui.label(r, 'log.changeset changeset.%s' % repo[n].phasestr())
         spaces = " " * (30 - encoding.colwidth(t))
 
         tag = ui.label(t, 'tags.normal')
@@ -5835,7 +5870,7 @@
 
     # with no argument, we also move the current bookmark, if any
     movemarkfrom = None
-    if rev is None or node == '':
+    if rev is None:
         movemarkfrom = repo['.'].node()
 
     # if we defined a bookmark, we have to remember the original bookmark name
@@ -5884,6 +5919,10 @@
     the changelog, manifest, and tracked files, as well as the
     integrity of their crosslinks and indices.
 
+    Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
+    for more information about recovery from corruption of the
+    repository.
+
     Returns 0 on success, 1 if errors are encountered.
     """
     return hg.verify(repo)
@@ -5906,3 +5945,5 @@
           " debugknown debuggetbundle debugbundle")
 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
                 " debugdata debugindex debugindexdot debugrevlog")
+inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
+             " remove resolve status debugwalk")
--- a/mercurial/commandserver.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/commandserver.py	Fri Oct 19 01:34:50 2012 -0500
@@ -137,7 +137,7 @@
         if logpath:
             global logfile
             if logpath == '-':
-                # write log on a special 'd'ebug channel
+                # write log on a special 'd' (debug) channel
                 logfile = channeledoutput(sys.stdout, sys.stdout, 'd')
             else:
                 logfile = open(logpath, 'a')
--- a/mercurial/config.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/config.py	Fri Oct 19 01:34:50 2012 -0500
@@ -67,9 +67,9 @@
         return self._data.get(section, {}).get(item, default)
 
     def backup(self, section, item):
-        """return a tuple allowing restore to reinstall a previous valuesi
+        """return a tuple allowing restore to reinstall a previous value
 
-        The main reason we need it is because it handle the "no data" case.
+        The main reason we need it is because it handles the "no data" case.
         """
         try:
             value = self._data[section][item]
--- a/mercurial/context.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/context.py	Fri Oct 19 01:34:50 2012 -0500
@@ -11,6 +11,7 @@
 import copies
 import match as matchmod
 import os, errno, stat
+import obsolete as obsmod
 
 propertycache = util.propertycache
 
@@ -232,38 +233,22 @@
 
     def obsolete(self):
         """True if the changeset is obsolete"""
-        return (self.node() in self._repo.obsstore.precursors
-                and self.phase() > phases.public)
+        return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
 
     def extinct(self):
         """True if the changeset is extinct"""
-        # We should just compute a cache a check againts it.
-        # see revset implementation for details
-        #
-        # But this naive implementation does not require cache
-        if self.phase() <= phases.public:
-            return False
-        if not self.obsolete():
-            return False
-        for desc in self.descendants():
-            if not desc.obsolete():
-                return False
-        return True
+        return self.rev() in obsmod.getrevs(self._repo, 'extinct')
 
     def unstable(self):
         """True if the changeset is not obsolete but it's ancestor are"""
-        # We should just compute /(obsolete()::) - obsolete()/
-        # and keep it in a cache.
-        #
-        # But this naive implementation does not require cache
-        if self.phase() <= phases.public:
-            return False
-        if self.obsolete():
-            return False
-        for anc in self.ancestors():
-            if anc.obsolete():
-                return True
-        return False
+        return self.rev() in obsmod.getrevs(self._repo, 'unstable')
+
+    def bumped(self):
+        """True if the changeset try to be a successor of a public changeset
+
+        Only non-public and non-obsolete changesets may be bumped.
+        """
+        return self.rev() in obsmod.getrevs(self._repo, 'bumped')
 
     def _fileinfo(self, path):
         if '_manifest' in self.__dict__:
@@ -310,6 +295,10 @@
         n = self._repo.changelog.ancestor(self._node, n2)
         return changectx(self._repo, n)
 
+    def descendant(self, other):
+        """True if other is descendant of this changeset"""
+        return self._repo.changelog.descendant(self._rev, other._rev)
+
     def walk(self, match):
         fset = set(match.files())
         # for dirstate.walk, files=['.'] means "walk the whole tree".
@@ -489,6 +478,10 @@
         return self._changectx.branch()
     def extra(self):
         return self._changectx.extra()
+    def phase(self):
+        return self._changectx.phase()
+    def phasestr(self):
+        return self._changectx.phasestr()
     def manifest(self):
         return self._changectx.manifest()
     def changectx(self):
@@ -885,8 +878,7 @@
         p = self._repo.dirstate.parents()
         if p[1] == nullid:
             p = p[:-1]
-        self._parents = [changectx(self._repo, x) for x in p]
-        return self._parents
+        return [changectx(self._repo, x) for x in p]
 
     def status(self, ignored=False, clean=False, unknown=False):
         """Explicit status query
@@ -1168,7 +1160,7 @@
 
         returns True if different than fctx.
         """
-        # fctx should be a filectx (not a wfctx)
+        # fctx should be a filectx (not a workingfilectx)
         # invert comparison to reuse the same code path
         return fctx.cmp(self)
 
--- a/mercurial/dagparser.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/dagparser.py	Fri Oct 19 01:34:50 2012 -0500
@@ -56,13 +56,13 @@
         ... :forkhere  # a label for the last of the 3 nodes from above
         ... +5         # 5 more nodes on one branch
         ... :mergethis # label again
-        ... <forkhere  # set default parent to labelled fork node
+        ... <forkhere  # set default parent to labeled fork node
         ... +10        # 10 more nodes on a parallel branch
         ... @stable    # following nodes will be annotated as "stable"
         ... +5         # 5 nodes in stable
         ... !addfile   # custom command; could trigger new file in next node
         ... +2         # two more nodes
-        ... /mergethis # merge last node with labelled node
+        ... /mergethis # merge last node with labeled node
         ... +4         # 4 more nodes descending from merge node
         ...
         ... """)))
--- a/mercurial/dirstate.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/dirstate.py	Fri Oct 19 01:34:50 2012 -0500
@@ -15,6 +15,7 @@
 _format = ">cllll"
 propertycache = util.propertycache
 filecache = scmutil.filecache
+_rangemask = 0x7fffffff
 
 class repocache(filecache):
     """filecache for files in .hg/"""
@@ -259,8 +260,8 @@
         return copies
 
     def setbranch(self, branch):
-        if branch in ['tip', '.', 'null']:
-            raise util.Abort(_('the name \'%s\' is reserved') % branch)
+        # no repo object here, just check for reserved names
+        scmutil.checknewlabel(None, branch, 'branch')
         self._branch = encoding.fromlocal(branch)
         f = self._opener('branch', 'w', atomictemp=True)
         try:
@@ -334,7 +335,8 @@
         '''Mark a file normal and clean.'''
         s = os.lstat(self._join(f))
         mtime = int(s.st_mtime)
-        self._addpath(f, 'n', s.st_mode, s.st_size, mtime)
+        self._addpath(f, 'n', s.st_mode,
+                      s.st_size & _rangemask, mtime & _rangemask)
         if f in self._copymap:
             del self._copymap[f]
         if mtime > self._lastnormaltime:
@@ -401,7 +403,8 @@
         if self._pl[1] == nullid:
             return self.normallookup(f)
         s = os.lstat(self._join(f))
-        self._addpath(f, 'm', s.st_mode, s.st_size, int(s.st_mtime))
+        self._addpath(f, 'm', s.st_mode,
+                      s.st_size & _rangemask, int(s.st_mtime) & _rangemask)
         if f in self._copymap:
             del self._copymap[f]
 
@@ -769,13 +772,13 @@
                 # means "can we check links?".
                 mtime = int(st.st_mtime)
                 if (size >= 0 and
-                    (size != st.st_size
+                    ((size != st.st_size and size != st.st_size & _rangemask)
                      or ((mode ^ st.st_mode) & 0100 and self._checkexec))
                     and (mode & lnkkind != lnkkind or self._checklink)
                     or size == -2 # other parent
                     or fn in self._copymap):
                     madd(fn)
-                elif (mtime != time
+                elif ((time != mtime and time != mtime & _rangemask)
                       and (mode & lnkkind != lnkkind or self._checklink)):
                     ladd(fn)
                 elif mtime == self._lastnormaltime:
--- a/mercurial/discovery.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/discovery.py	Fri Oct 19 01:34:50 2012 -0500
@@ -7,7 +7,7 @@
 
 from node import nullid, short
 from i18n import _
-import util, setdiscovery, treediscovery, phases, obsolete
+import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
 
 def findcommonincoming(repo, remote, heads=None, force=False):
     """Return a tuple (common, anyincoming, heads) used to identify the common
@@ -21,7 +21,7 @@
       any longer.
     "heads" is either the supplied heads, or else the remote's heads.
 
-    If you pass heads and they are all known locally, the reponse lists justs
+    If you pass heads and they are all known locally, the response lists just
     these heads in "common" and in "heads".
 
     Please use findcommonoutgoing to compute the set of outgoing nodes to give
@@ -255,7 +255,7 @@
         rnode = remotebookmarks.get(bm)
         if rnode and rnode in repo:
             lctx, rctx = repo[bm], repo[rnode]
-            if rctx == lctx.ancestor(rctx):
+            if bookmarks.validdest(repo, rctx, lctx):
                 bookmarkedheads.add(lctx.node())
 
     # 3. Check for new heads.
@@ -264,24 +264,26 @@
     error = None
     unsynced = False
     allmissing = set(outgoing.missing)
+    allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
+    allfuturecommon.update(allmissing)
     for branch, heads in headssum.iteritems():
         if heads[0] is None:
             # Maybe we should abort if we push more that one head
             # for new branches ?
             continue
-        if heads[2]:
-            unsynced = True
-        oldhs = set(heads[0])
         candidate_newhs = set(heads[1])
         # add unsynced data
+        oldhs = set(heads[0])
         oldhs.update(heads[2])
         candidate_newhs.update(heads[2])
         dhs = None
+        discardedheads = set()
         if repo.obsstore:
             # remove future heads which are actually obsolete by another
             # pushed element:
             #
-            # XXX There is several case this case does not handle properly
+            # XXX as above, There are several cases this case does not handle
+            # XXX properly
             #
             # (1) if <nh> is public, it won't be affected by obsolete marker
             #     and a new is created
@@ -293,13 +295,19 @@
             # more tricky for unsynced changes.
             newhs = set()
             for nh in candidate_newhs:
-                for suc in obsolete.anysuccessors(repo.obsstore, nh):
-                    if suc != nh and suc in allmissing:
-                        break
+                if nh in repo and repo[nh].phase() <= phases.public:
+                    newhs.add(nh)
                 else:
-                    newhs.add(nh)
+                    for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
+                        if suc != nh and suc in allfuturecommon:
+                            discardedheads.add(nh)
+                            break
+                    else:
+                        newhs.add(nh)
         else:
             newhs = candidate_newhs
+        if [h for h in heads[2] if h not in discardedheads]:
+            unsynced = True
         if len(newhs) > len(oldhs):
             # strip updates to existing remote heads from the new heads list
             dhs = list(newhs - bookmarkedheads - oldhs)
@@ -348,7 +356,7 @@
 def visiblebranchmap(repo):
     """return a branchmap for the visible set"""
     # XXX Recomputing this data on the fly is very slow.  We should build a
-    # XXX cached version while computin the standard branchmap version.
+    # XXX cached version while computing the standard branchmap version.
     sroots = repo._phasecache.phaseroots[phases.secret]
     if sroots or repo.obsstore:
         vbranchmap = {}
--- a/mercurial/dispatch.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/dispatch.py	Fri Oct 19 01:34:50 2012 -0500
@@ -715,7 +715,8 @@
                 raise
             except error.RepoError:
                 if cmd not in commands.optionalrepo.split():
-                    if args and not path: # try to infer -R from command args
+                    if (cmd in commands.inferrepo.split() and
+                        args and not path): # try to infer -R from command args
                         repos = map(cmdutil.findrepo, args)
                         guess = repos[0]
                         if guess and repos.count(guess) == len(repos):
--- a/mercurial/encoding.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/encoding.py	Fri Oct 19 01:34:50 2012 -0500
@@ -223,7 +223,7 @@
 
     Principles of operation:
 
-    - ASCII and UTF-8 data sucessfully round-trips and is understood
+    - ASCII and UTF-8 data successfully round-trips and is understood
       by Unicode-oriented clients
     - filenames and file contents in arbitrary other encodings can have
       be round-tripped or recovered by clueful clients
--- a/mercurial/exewrapper.c	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/exewrapper.c	Fri Oct 19 01:34:50 2012 -0500
@@ -7,23 +7,31 @@
  GNU General Public License version 2 or any later version.
 */
 
-#include <Python.h>
+#include <stdio.h>
 #include <windows.h>
 
+#include "hgpythonlib.h"
 
 #ifdef __GNUC__
 int strcat_s(char *d, size_t n, const char *s)
 {
 	return !strncat(d, s, n);
 }
+int strcpy_s(char *d, size_t n, const char *s)
+{
+	return !strncpy(d, s, n);
+}
 #endif
 
 
 static char pyscript[MAX_PATH + 10];
+static char pyhome[MAX_PATH + 10];
+static char envpyhome[MAX_PATH + 10];
+static char pydllfile[MAX_PATH + 10];
 
 int main(int argc, char *argv[])
 {
-	char *dot;
+	char *p;
 	int ret;
 	int i;
 	int n;
@@ -31,6 +39,9 @@
 	WIN32_FIND_DATA fdata;
 	HANDLE hfind;
 	const char *err;
+	HMODULE pydll;
+	void (__cdecl *Py_SetPythonHome)(char *home);
+	int (__cdecl *Py_Main)(int argc, char *argv[]);
 
 	if (GetModuleFileName(NULL, pyscript, sizeof(pyscript)) == 0)
 	{
@@ -38,12 +49,13 @@
 		goto bail;
 	}
 
-	dot = strrchr(pyscript, '.');
-	if (dot == NULL) {
+	p = strrchr(pyscript, '.');
+	if (p == NULL) {
 		err = "malformed module filename";
 		goto bail;
 	}
-	*dot = 0; /* cut trailing ".exe" */
+	*p = 0; /* cut trailing ".exe" */
+	strcpy_s(pyhome, sizeof(pyhome), pyscript);
 
 	hfind = FindFirstFile(pyscript, &fdata);
 	if (hfind != INVALID_HANDLE_VALUE) {
@@ -54,6 +66,57 @@
 		strcat_s(pyscript, sizeof(pyscript), "exe.py");
 	}
 
+	pydll = NULL;
+	if (GetEnvironmentVariable("PYTHONHOME", envpyhome,
+				   sizeof(envpyhome)) == 0)
+	{
+		/* environment var PYTHONHOME is not set */
+
+		p = strrchr(pyhome, '\\');
+		if (p == NULL) {
+			err = "can't find backslash in module filename";
+			goto bail;
+		}
+		*p = 0; /* cut at directory */
+
+		/* check for private Python of HackableMercurial */
+		strcat_s(pyhome, sizeof(pyhome), "\\hg-python");
+
+		hfind = FindFirstFile(pyhome, &fdata);
+		if (hfind != INVALID_HANDLE_VALUE) {
+			/* path pyhome exists, let's use it */
+			FindClose(hfind);
+			strcpy_s(pydllfile, sizeof(pydllfile), pyhome);
+			strcat_s(pydllfile, sizeof(pydllfile), "\\" HGPYTHONLIB);
+			pydll = LoadLibrary(pydllfile);
+			if (pydll == NULL) {
+				err = "failed to load private Python DLL";
+				goto bail;
+			}
+			Py_SetPythonHome = (void*)GetProcAddress(pydll,
+							"Py_SetPythonHome");
+			if (Py_SetPythonHome == NULL) {
+				err = "failed to get Py_SetPythonHome";
+				goto bail;
+			}
+			Py_SetPythonHome(pyhome);
+		}
+	}
+
+	if (pydll == NULL) {
+		pydll = LoadLibrary(HGPYTHONLIB);
+		if (pydll == NULL) {
+			err = "failed to load Python DLL";
+			goto bail;
+		}
+	}
+
+	Py_Main = (void*)GetProcAddress(pydll, "Py_Main");
+	if (Py_Main == NULL) {
+		err = "failed to get Py_Main";
+		goto bail;
+	}
+
 	/*
 	Only add the pyscript to the args, if it's not already there. It may
 	already be there, if the script spawned a child process of itself, in
--- a/mercurial/fancyopts.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/fancyopts.py	Fri Oct 19 01:34:50 2012 -0500
@@ -5,7 +5,8 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-import getopt
+import getopt, util
+from i18n import _
 
 def gnugetopt(args, options, longoptions):
     """Parse options mostly like getopt.gnu_getopt.
@@ -105,7 +106,11 @@
         if t is type(fancyopts):
             state[name] = defmap[name](val)
         elif t is type(1):
-            state[name] = int(val)
+            try:
+                state[name] = int(val)
+            except ValueError:
+                raise util.Abort(_('invalid value %r for option %s, '
+                                   'expected int') % (val, opt))
         elif t is type(''):
             state[name] = val
         elif t is type([]):
--- a/mercurial/formatter.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/formatter.py	Fri Oct 19 01:34:50 2012 -0500
@@ -26,6 +26,7 @@
         self._item = {}
     def data(self, **data):
         '''insert data into item that's not shown in default output'''
+        self._item.update(data)
     def write(self, fields, deftext, *fielddata, **opts):
         '''do default text output while assigning data to item'''
         for k, v in zip(fields.split(), fielddata):
--- a/mercurial/hbisect.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hbisect.py	Fri Oct 19 01:34:50 2012 -0500
@@ -159,7 +159,7 @@
     Return a list of revision(s) that match the given status:
 
     - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
-    - ``goods``, ``bads``      : csets topologicaly good/bad
+    - ``goods``, ``bads``      : csets topologically good/bad
     - ``range``              : csets taking part in the bisection
     - ``pruned``             : csets that are goods, bads or skipped
     - ``untested``           : csets whose fate is yet unknown
@@ -170,8 +170,8 @@
     if status in ('good', 'bad', 'skip', 'current'):
         return map(repo.changelog.rev, state[status])
     else:
-        # In the floowing sets, we do *not* call 'bisect()' with more
-        # than one level of recusrsion, because that can be very, very
+        # In the following sets, we do *not* call 'bisect()' with more
+        # than one level of recursion, because that can be very, very
         # time consuming. Instead, we always develop the expression as
         # much as possible.
 
@@ -200,7 +200,7 @@
 
         # 'ignored' is all csets that were not used during the bisection
         # due to DAG topology, but may however have had an impact.
-        # Eg., a branch merged between bads and goods, but whose branch-
+        # E.g., a branch merged between bads and goods, but whose branch-
         # point is out-side of the range.
         iba = '::bisect(bad) - ::bisect(good)'  # Ignored bads' ancestors
         iga = '::bisect(good) - ::bisect(bad)'  # Ignored goods' ancestors
--- a/mercurial/help.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/help.py	Fri Oct 19 01:34:50 2012 -0500
@@ -61,6 +61,11 @@
 
     return ''.join(rst)
 
+def indicateomitted(rst, omitted, notomitted=None):
+    rst.append('\n\n.. container:: omitted\n\n    %s\n\n' % omitted)
+    if notomitted:
+        rst.append('\n\n.. container:: notomitted\n\n    %s\n\n' % notomitted)
+
 def topicmatch(kw):
     """Return help topics matching kw.
 
--- a/mercurial/help/config.txt	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/help/config.txt	Fri Oct 19 01:34:50 2012 -0500
@@ -604,9 +604,9 @@
 ``post-<command>``
   Run after successful invocations of the associated command. The
   contents of the command line are passed as ``$HG_ARGS`` and the result
-  code in ``$HG_RESULT``. Parsed command line arguments are passed as 
+  code in ``$HG_RESULT``. Parsed command line arguments are passed as
   ``$HG_PATS`` and ``$HG_OPTS``. These contain string representations of
-  the python data internally passed to <command>. ``$HG_OPTS`` is a 
+  the python data internally passed to <command>. ``$HG_OPTS`` is a
   dictionary of options (with unspecified options set to their defaults).
   ``$HG_PATS`` is a list of arguments. Hook failure is ignored.
 
@@ -616,7 +616,7 @@
   are passed as ``$HG_PATS`` and ``$HG_OPTS``. These contain string
   representations of the data internally passed to <command>. ``$HG_OPTS``
   is a  dictionary of options (with unspecified options set to their
-  defaults). ``$HG_PATS`` is a list of arguments. If the hook returns 
+  defaults). ``$HG_PATS`` is a list of arguments. If the hook returns
   failure, the command doesn't execute and Mercurial returns the failure
   code.
 
@@ -923,6 +923,17 @@
     Optional. Directory or URL to use when pushing if no destination
     is specified.
 
+Custom paths can be defined by assigning the path to a name that later can be
+used from the command line. Example::
+
+    [paths]
+    my_path = http://example.com/path
+
+To push to the path defined in ``my_path`` run the command::
+
+    hg push my_path
+
+
 ``phases``
 ----------
 
@@ -1431,7 +1442,7 @@
     Example: ``http://hgserver/static/``.
 
 ``stripes``
-    How many lines a "zebra stripe" should span in multiline output.
+    How many lines a "zebra stripe" should span in multi-line output.
     Default is 1; set to 0 to disable.
 
 ``style``
--- a/mercurial/help/hgweb.txt	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/help/hgweb.txt	Fri Oct 19 01:34:50 2012 -0500
@@ -11,7 +11,7 @@
   - paths
   - collections
 
-The ``web`` options are thorougly described in :hg:`help config`.
+The ``web`` options are thoroughly described in :hg:`help config`.
 
 The ``paths`` section maps URL paths to paths of repositories in the
 filesystem. hgweb will not expose the filesystem directly - only
--- a/mercurial/hg.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hg.py	Fri Oct 19 01:34:50 2012 -0500
@@ -10,7 +10,7 @@
 from lock import release
 from node import hex, nullid
 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
-import lock, util, extensions, error, node, scmutil
+import lock, util, extensions, error, node, scmutil, phases
 import cmdutil, discovery
 import merge as mergemod
 import verify as verifymod
@@ -303,7 +303,7 @@
 
         copy = False
         if (srcrepo and srcrepo.cancopy() and islocal(dest)
-            and not srcrepo.revs("secret()")):
+            and not phases.hassecret(srcrepo)):
             copy = not pull and not rev
 
         if copy:
@@ -336,6 +336,16 @@
 
             destlock = copystore(ui, srcrepo, destpath)
 
+            # Recomputing branch cache might be slow on big repos,
+            # so just copy it
+            dstcachedir = os.path.join(destpath, 'cache')
+            srcbranchcache = srcrepo.sjoin('cache/branchheads')
+            dstbranchcache = os.path.join(dstcachedir, 'branchheads')
+            if os.path.exists(srcbranchcache):
+                if not os.path.exists(dstcachedir):
+                    os.mkdir(dstcachedir)
+                util.copyfile(srcbranchcache, dstbranchcache)
+
             # we need to re-init the repo after manually copying the data
             # into it
             destpeer = peer(ui, peeropts, dest)
@@ -399,7 +409,7 @@
             if update:
                 if update is not True:
                     checkout = srcpeer.lookup(update)
-                for test in (checkout, 'default', 'tip'):
+                for test in (checkout, '@', 'default', 'tip'):
                     if test is None:
                         continue
                     try:
--- a/mercurial/hgweb/common.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hgweb/common.py	Fri Oct 19 01:34:50 2012 -0500
@@ -48,7 +48,7 @@
     # and replayed
     scheme = req.env.get('wsgi.url_scheme')
     if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https':
-        raise ErrorResponse(HTTP_OK, 'ssl required')
+        raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required')
 
     deny = hgweb.configlist('web', 'deny_push')
     if deny and (not user or deny == ['*'] or user in deny):
--- a/mercurial/hgweb/hgwebdir_mod.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hgweb/hgwebdir_mod.py	Fri Oct 19 01:34:50 2012 -0500
@@ -293,9 +293,11 @@
                     except OSError:
                         continue
 
+                    # add '/' to the name to make it obvious that
+                    # the entry is a directory, not a regular repository
                     row = dict(contact="",
                                contact_sort="",
-                               name=name,
+                               name=name + '/',
                                name_sort=name,
                                url=url,
                                description="",
--- a/mercurial/hgweb/server.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hgweb/server.py	Fri Oct 19 01:34:50 2012 -0500
@@ -12,7 +12,7 @@
 from mercurial.i18n import _
 
 def _splitURI(uri):
-    """ Return path and query splited from uri
+    """Return path and query that has been split from uri
 
     Just like CGI environment, the path is unquoted, the query is
     not.
--- a/mercurial/hgweb/wsgicgi.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hgweb/wsgicgi.py	Fri Oct 19 01:34:50 2012 -0500
@@ -19,7 +19,7 @@
     environ = dict(os.environ.iteritems())
     environ.setdefault('PATH_INFO', '')
     if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'):
-        # IIS includes script_name in path_info
+        # IIS includes script_name in PATH_INFO
         scriptname = environ['SCRIPT_NAME']
         if environ['PATH_INFO'].startswith(scriptname):
             environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):]
--- a/mercurial/hook.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/hook.py	Fri Oct 19 01:34:50 2012 -0500
@@ -154,7 +154,7 @@
                 oldstdout = os.dup(stdoutno)
                 os.dup2(stderrno, stdoutno)
         except AttributeError:
-            # __stdout/err__ doesn't have fileno(), it's not a real file
+            # __stdout__/__stderr__ doesn't have fileno(), it's not a real file
             pass
 
     try:
--- a/mercurial/httpclient/__init__.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/httpclient/__init__.py	Fri Oct 19 01:34:50 2012 -0500
@@ -170,7 +170,7 @@
         except socket.sslerror, e:
             if e.args[0] != socket.SSL_ERROR_WANT_READ:
                 raise
-            logger.debug('SSL_WANT_READ in _select, should retry later')
+            logger.debug('SSL_ERROR_WANT_READ in _select, should retry later')
             return True
         logger.debug('response read %d data during _select', len(data))
         # If the socket was readable and no data was read, that means
@@ -293,7 +293,7 @@
           host: The host to which we'll connect.
           port: Optional. The port over which we'll connect. Default 80 for
                 non-ssl, 443 for ssl.
-          use_ssl: Optional. Wether to use ssl. Defaults to False if port is
+          use_ssl: Optional. Whether to use ssl. Defaults to False if port is
                    not 443, true if port is 443.
           ssl_validator: a function(socket) to validate the ssl cert
           timeout: Optional. Connection timeout, default is TIMEOUT_DEFAULT.
@@ -374,7 +374,7 @@
         if self.ssl:
             # This is the default, but in the case of proxied SSL
             # requests the proxy logic above will have cleared
-            # blocking mode, so reenable it just to be safe.
+            # blocking mode, so re-enable it just to be safe.
             sock.setblocking(1)
             logger.debug('wrapping socket for ssl with options %r',
                          self.ssl_opts)
@@ -414,7 +414,7 @@
         """Close the connection to the server.
 
         This is a no-op if the connection is already closed. The
-        connection may automatically close if requessted by the server
+        connection may automatically close if requested by the server
         or required by the nature of a response.
         """
         if self.sock is None:
@@ -532,7 +532,7 @@
                         if e.args[0] != socket.SSL_ERROR_WANT_READ:
                             raise
                         logger.debug(
-                            'SSL_WANT_READ while sending data, retrying...')
+                            'SSL_ERROR_WANT_READ while sending data, retrying...')
                         continue
                     if not data:
                         logger.info('socket appears closed in read')
--- a/mercurial/httpclient/_readers.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/httpclient/_readers.py	Fri Oct 19 01:34:50 2012 -0500
@@ -120,7 +120,7 @@
         if data:
             assert not self._finished, (
                 'tried to add data (%r) to a closed reader!' % data)
-        logger.debug('%s read an addtional %d data', self.name, len(data))
+        logger.debug('%s read an additional %d data', self.name, len(data))
         self._done_chunks.append(data)
 
 
@@ -162,7 +162,7 @@
 
     def _load(self, data):
         assert not self._finished, 'tried to add data to a closed reader!'
-        logger.debug('chunked read an addtional %d data', len(data))
+        logger.debug('chunked read an additional %d data', len(data))
         position = 0
         if self._leftover_data:
             logger.debug('chunked reader trying to finish block from leftover data')
@@ -188,7 +188,7 @@
                 return
             if amt == 0:
                 self._finished = True
-                logger.debug('closing chunked redaer due to chunk of length 0')
+                logger.debug('closing chunked reader due to chunk of length 0')
                 return
             self._done_chunks.append(data[block_start:block_start + amt])
             position = block_start + amt + len(self._eol)
--- a/mercurial/httpconnection.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/httpconnection.py	Fri Oct 19 01:34:50 2012 -0500
@@ -73,7 +73,7 @@
     if '://' in uri:
         scheme, hostpath = uri.split('://', 1)
     else:
-        # py2.4.1 doesn't provide the full URI
+        # Python 2.4.1 doesn't provide the full URI
         scheme, hostpath = 'http', uri
     bestuser = None
     bestlen = 0
@@ -233,7 +233,11 @@
     def http_open(self, req):
         if req.get_full_url().startswith('https'):
             return self.https_open(req)
-        return self.do_open(HTTPConnection, req, False)
+        def makehttpcon(*args, **kwargs):
+            k2 = dict(kwargs)
+            k2['use_ssl'] = False
+            return HTTPConnection(*args, **k2)
+        return self.do_open(makehttpcon, req, False)
 
     def https_open(self, req):
         # req.get_full_url() does not contain credentials and we may
--- a/mercurial/keepalive.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/keepalive.py	Fri Oct 19 01:34:50 2012 -0500
@@ -67,8 +67,8 @@
 
     close_connection()  -  close the connection to the host
     readlines()         -  you know, readlines()
-    status              -  the return status (ie 404)
-    reason              -  english translation of status (ie 'File not found')
+    status              -  the return status (i.e. 404)
+    reason              -  english translation of status (i.e. 'File not found')
 
   If you want the best of both worlds, use this inside an
   AttributeError-catching try:
@@ -297,7 +297,7 @@
             # first.  We previously got into a nasty loop
             # where an exception was uncaught, and so the
             # connection stayed open.  On the next try, the
-            # same exception was raised, etc.  The tradeoff is
+            # same exception was raised, etc.  The trade-off is
             # that it's now possible this call will raise
             # a DIFFERENT exception
             if DEBUG:
@@ -370,17 +370,14 @@
     # so if you THEN do a normal read, you must first take stuff from
     # the buffer.
 
-    # the read method wraps the original to accomodate buffering,
+    # the read method wraps the original to accommodate buffering,
     # although read() never adds to the buffer.
     # Both readline and readlines have been stolen with almost no
     # modification from socket.py
 
 
     def __init__(self, sock, debuglevel=0, strict=0, method=None):
-        if method: # the httplib in python 2.3 uses the method arg
-            httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
-        else: # 2.2 doesn't
-            httplib.HTTPResponse.__init__(self, sock, debuglevel)
+        httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
         self.fileno = sock.fileno
         self.code = None
         self._rbuf = ''
@@ -442,7 +439,7 @@
                 try:
                     chunk_left = int(line, 16)
                 except ValueError:
-                    # close the connection as protocol synchronisation is
+                    # close the connection as protocol synchronization is
                     # probably lost
                     self.close()
                     raise httplib.IncompleteRead(value)
@@ -548,7 +545,7 @@
         read = getattr(str, 'read', None)
         if read is not None:
             if self.debuglevel > 0:
-                print "sendIng a read()able"
+                print "sending a read()able"
             data = read(blocksize)
             while data:
                 self.sock.sendall(data)
@@ -737,7 +734,7 @@
 
 
 def test(url, N=10):
-    print "checking error hander (do this on a non-200)"
+    print "checking error handler (do this on a non-200)"
     try: error_handler(url)
     except IOError:
         print "exiting - exception will prevent further tests"
--- a/mercurial/localrepo.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/localrepo.py	Fri Oct 19 01:34:50 2012 -0500
@@ -117,14 +117,14 @@
         return self.requirements[:]
 
     def __init__(self, baseui, path=None, create=False):
-        self.wopener = scmutil.opener(path, expand=True)
-        self.wvfs = self.wopener
+        self.wvfs = scmutil.vfs(path, expand=True)
+        self.wopener = self.wvfs
         self.root = self.wvfs.base
         self.path = self.wvfs.join(".hg")
         self.origroot = path
         self.auditor = scmutil.pathauditor(self.root, self._checknested)
-        self.opener = scmutil.opener(self.path)
-        self.vfs = self.opener
+        self.vfs = scmutil.vfs(self.path)
+        self.opener = self.vfs
         self.baseui = baseui
         self.ui = baseui.copy()
         # A list of callback to shape the phase if no data were found.
@@ -182,12 +182,12 @@
             if inst.errno != errno.ENOENT:
                 raise
 
-        self.store = store.store(requirements, self.sharedpath, scmutil.opener)
+        self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
         self.spath = self.store.path
-        self.sopener = self.store.opener
-        self.svfs = self.sopener
+        self.svfs = self.store.vfs
+        self.sopener = self.svfs
         self.sjoin = self.store.join
-        self.opener.createmode = self.store.createmode
+        self.vfs.createmode = self.store.createmode
         self._applyrequirements(requirements)
         if create:
             self._writerequirements()
@@ -299,18 +299,20 @@
     def hiddenrevs(self):
         """hiddenrevs: revs that should be hidden by command and tools
 
-        This set is carried on the repo to ease initialisation and lazy
-        loading it'll probably move back to changelog for efficienty and
-        consistency reason
+        This set is carried on the repo to ease initialization and lazy
+        loading; it'll probably move back to changelog for efficiency and
+        consistency reasons.
 
         Note that the hiddenrevs will needs invalidations when
         - a new changesets is added (possible unstable above extinct)
         - a new obsolete marker is added (possible new extinct changeset)
+
+        hidden changesets cannot have non-hidden descendants
         """
         hidden = set()
         if self.obsstore:
             ### hide extinct changeset that are not accessible by any mean
-            hiddenquery = 'extinct() - ::(. + bookmark() + tagged())'
+            hiddenquery = 'extinct() - ::(. + bookmark())'
             hidden.update(self.revs(hiddenquery))
         return hidden
 
@@ -361,14 +363,13 @@
         return len(self.changelog)
 
     def __iter__(self):
-        for i in xrange(len(self)):
-            yield i
+        return iter(self.changelog)
 
     def revs(self, expr, *args):
         '''Return a list of revisions matching the given revset'''
         expr = revset.formatspec(expr, *args)
         m = revset.match(None, expr)
-        return [r for r in m(self, range(len(self)))]
+        return [r for r in m(self, list(self))]
 
     def set(self, expr, *args):
         '''
@@ -384,17 +385,9 @@
     def hook(self, name, throw=False, **args):
         return hook.hook(self.ui, self, name, throw, **args)
 
-    tag_disallowed = ':\r\n'
-
     def _tag(self, names, node, message, local, user, date, extra={}):
         if isinstance(names, str):
-            allchars = names
             names = (names,)
-        else:
-            allchars = ''.join(names)
-        for c in self.tag_disallowed:
-            if c in allchars:
-                raise util.Abort(_('%r cannot be used in a tag name') % c)
 
         branches = self.branchmap()
         for name in names:
@@ -515,7 +508,11 @@
     def tags(self):
         '''return a mapping of tag to node'''
         t = {}
-        for k, v in self._tagscache.tags.iteritems():
+        if self.changelog.filteredrevs:
+            tags, tt = self._findtags()
+        else:
+            tags = self._tagscache.tags
+        for k, v in tags.iteritems():
             try:
                 # ignore tags to unknown nodes
                 self.changelog.rev(v)
@@ -601,7 +598,7 @@
         # TODO: rename this function?
         tiprev = len(self) - 1
         if lrev != tiprev:
-            ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
+            ctxgen = (self[r] for r in self.changelog.revs(lrev + 1, tiprev))
             self._updatebranchcache(partial, ctxgen)
             self._writebranchcache(partial, self.changelog.tip(), tiprev)
 
@@ -626,8 +623,15 @@
 
     def branchmap(self):
         '''returns a dictionary {branch: [branchheads]}'''
-        self.updatebranchcache()
-        return self._branchcache
+        if self.changelog.filteredrevs:
+            # some changeset are excluded we can't use the cache
+            branchmap = {}
+            self._updatebranchcache(branchmap, (self[r] for r in self))
+            return branchmap
+        else:
+            self.updatebranchcache()
+            return self._branchcache
+
 
     def _branchtip(self, heads):
         '''return the tipmost branch head in heads'''
@@ -712,7 +716,7 @@
             # Remove candidate heads that no longer are in the repo (e.g., as
             # the result of a strip that just happened).  Avoid using 'node in
             # self' here because that dives down into branchcache code somewhat
-            # recrusively.
+            # recursively.
             bheadrevs = [self.changelog.rev(node) for node in bheads
                          if self.changelog.hasnode(node)]
             newheadrevs = [self.changelog.rev(node) for node in newnodes
@@ -732,7 +736,7 @@
                 iterrevs = list(bheadrevs)
 
             # This loop prunes out two kinds of heads - heads that are
-            # superceded by a head in newheadrevs, and newheadrevs that are not
+            # superseded by a head in newheadrevs, and newheadrevs that are not
             # heads because an existing head is their descendant.
             while iterrevs:
                 latest = iterrevs.pop()
@@ -1040,6 +1044,7 @@
 
         self._branchcache = None # in UTF-8
         self._branchcachetip = None
+        obsolete.clearobscaches(self)
 
     def invalidatedirstate(self):
         '''Invalidates the dirstate, causing the next call to dirstate
@@ -1479,7 +1484,7 @@
         and you also know the set of candidate new heads that may have resulted
         from the destruction, you can set newheadnodes.  This will enable the
         code to update the branchheads cache, rather than having future code
-        decide it's invalid and regenrating it from scratch.
+        decide it's invalid and regenerating it from scratch.
         '''
         # If we have info, newheadnodes, on how to update the branch cache, do
         # it, Otherwise, since nodes were destroyed, the cache is stale and this
@@ -1865,8 +1870,9 @@
                         # then, save the iteration
                         if self.obsstore:
                             # this message are here for 80 char limit reason
-                            mso = _("push includes an obsolete changeset: %s!")
-                            msu = _("push includes an unstable changeset: %s!")
+                            mso = _("push includes obsolete changeset: %s!")
+                            msu = _("push includes unstable changeset: %s!")
+                            msb = _("push includes bumped changeset: %s!")
                             # If we are to push if there is at least one
                             # obsolete or unstable changeset in missing, at
                             # least one of the missinghead will be obsolete or
@@ -1877,6 +1883,8 @@
                                     raise util.Abort(_(mso) % ctx)
                                 elif ctx.unstable():
                                     raise util.Abort(_(msu) % ctx)
+                                elif ctx.bumped():
+                                    raise util.Abort(_(msb) % ctx)
                         discovery.checkheads(self, remote, outgoing,
                                              remoteheads, newbranch,
                                              bool(inc))
@@ -1906,7 +1914,7 @@
                         ret = remote.addchangegroup(cg, 'push', self.url())
 
                 if ret:
-                    # push succeed, synchonize target of the push
+                    # push succeed, synchronize target of the push
                     cheads = outgoing.missingheads
                 elif revs is None:
                     # All out push fails. synchronize all common
@@ -1925,7 +1933,7 @@
                     #     missing = ((commonheads::missingheads) - commonheads)
                     #
                     # We can pick:
-                    # * missingheads part of comon (::commonheads)
+                    # * missingheads part of common (::commonheads)
                     common = set(outgoing.common)
                     cheads = [node for node in revs if node in common]
                     # and
@@ -1989,7 +1997,7 @@
                 if nr in self:
                     cr = self[nr]
                     cl = self[nl]
-                    if cl in cr.descendants():
+                    if bookmarks.validdest(self, cr, cl):
                         r = remote.pushkey('bookmarks', k, nr, nl)
                         if r:
                             self.ui.status(_("updating bookmark %s\n") % k)
@@ -2402,6 +2410,7 @@
             self.ui.status(_("added %d changesets"
                              " with %d changes to %d files%s\n")
                              % (changesets, revisions, files, htext))
+            obsolete.clearobscaches(self)
 
             if changesets > 0:
                 p = lambda: cl.writepending() and self.root or ""
@@ -2435,10 +2444,10 @@
             tr.close()
 
             if changesets > 0:
+                self.updatebranchcache()
                 def runhooks():
                     # forcefully update the on-disk branch cache
                     self.ui.debug("updating the branch cache\n")
-                    self.updatebranchcache()
                     self.hook("changegroup", node=hex(cl.node(clstart)),
                               source=srctype, url=url)
 
@@ -2458,6 +2467,12 @@
     def stream_in(self, remote, requirements):
         lock = self.lock()
         try:
+            # Save remote branchmap. We will use it later
+            # to speed up branchcache creation
+            rbranchmap = None
+            if remote.capable("branchmap"):
+                rbranchmap = remote.branchmap()
+
             fp = remote.stream_out()
             l = fp.readline()
             try:
@@ -2518,6 +2533,17 @@
             self._applyrequirements(requirements)
             self._writerequirements()
 
+            if rbranchmap:
+                rbheads = []
+                for bheads in rbranchmap.itervalues():
+                    rbheads.extend(bheads)
+
+                self.branchcache = rbranchmap
+                if rbheads:
+                    rtiprev = max((int(self.changelog.rev(node))
+                            for node in rbheads))
+                    self._writebranchcache(self.branchcache,
+                            self[rtiprev].node(), rtiprev)
             self.invalidate()
             return len(self.heads()) + 1
         finally:
@@ -2539,7 +2565,7 @@
         # uncompressed only if compatible.
 
         if not stream:
-            # if the server explicitely prefer to stream (for fast LANs)
+            # if the server explicitly prefers to stream (for fast LANs)
             stream = remote.capable('stream-preferred')
 
         if stream and not heads:
--- a/mercurial/lock.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/lock.py	Fri Oct 19 01:34:50 2012 -0500
@@ -127,8 +127,8 @@
     def release(self):
         """release the lock and execute callback function if any
 
-        If the lock have been aquired multiple time, the actual release is
-        delayed to the last relase call."""
+        If the lock has been acquired multiple times, the actual release is
+        delayed to the last release call."""
         if self.held > 1:
             self.held -= 1
         elif self.held == 1:
--- a/mercurial/mail.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/mail.py	Fri Oct 19 01:34:50 2012 -0500
@@ -13,7 +13,7 @@
 _oldheaderinit = email.Header.Header.__init__
 def _unifiedheaderinit(self, *args, **kw):
     """
-    Python2.7 introduces a backwards incompatible change
+    Python 2.7 introduces a backwards incompatible change
     (Python issue1974, r70772) in email.Generator.Generator code:
     pre-2.7 code passed "continuation_ws='\t'" to the Header
     constructor, and 2.7 removed this parameter.
@@ -151,7 +151,7 @@
 
 def mimetextqp(body, subtype, charset):
     '''Return MIME message.
-    Qouted-printable transfer encoding will be used if necessary.
+    Quoted-printable transfer encoding will be used if necessary.
     '''
     enc = None
     for line in body.splitlines():
--- a/mercurial/manifest.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/manifest.py	Fri Oct 19 01:34:50 2012 -0500
@@ -64,9 +64,7 @@
 
         If the string is found m[start:end] are the line containing
         that string.  If start == end the string was not found and
-        they indicate the proper sorted insertion point.  This was
-        taken from bisect_left, and modified to find line start/end as
-        it goes along.
+        they indicate the proper sorted insertion point.
 
         m should be a buffer or a string
         s is a string'''
@@ -156,7 +154,7 @@
             # combine the changed lists into one list for sorting
             work = [(x, False) for x in added]
             work.extend((x, True) for x in removed)
-            # this could use heapq.merge() (from python2.6+) or equivalent
+            # this could use heapq.merge() (from Python 2.6+) or equivalent
             # since the lists are already sorted
             work.sort()
 
--- a/mercurial/match.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/match.py	Fri Oct 19 01:34:50 2012 -0500
@@ -49,7 +49,7 @@
         a pattern is one of:
         'glob:<glob>' - a glob relative to cwd
         're:<regexp>' - a regular expression
-        'path:<path>' - a path relative to canonroot
+        'path:<path>' - a path relative to repository root
         'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
         'relpath:<path>' - a path relative to cwd
         'relre:<regexp>' - a regexp that needn't match the start of a name
--- a/mercurial/minirst.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/minirst.py	Fri Oct 19 01:34:50 2012 -0500
@@ -133,7 +133,7 @@
     def match(lines, i, itemre, singleline):
         """Does itemre match an item at line i?
 
-        A list item can be followed by an idented line or another list
+        A list item can be followed by an indented line or another list
         item (but only if singleline is True).
         """
         line1 = lines[i]
--- a/mercurial/obsolete.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/obsolete.py	Fri Oct 19 01:34:50 2012 -0500
@@ -20,6 +20,37 @@
 besides old and news changeset identifiers, such as creation date or
 author name.
 
+The old obsoleted changeset is called "precursor" and possible replacements are
+called "successors".  Markers that used changeset X as a precursors are called
+"successor markers of X" because they hold information about the successors of
+X. Markers that use changeset Y as a successors are call "precursor markers of
+Y" because they hold information about the precursors of Y.
+
+Examples:
+
+- When changeset A is replacement by a changeset A', one marker is stored:
+
+    (A, (A'))
+
+- When changesets A and B are folded into a new changeset C two markers are
+  stored:
+
+    (A, (C,)) and (B, (C,))
+
+- When changeset A is simply "pruned" from the graph, a marker in create:
+
+    (A, ())
+
+- When changeset A is split into B and C, a single marker are used:
+
+    (A, (C, C))
+
+  We use a single marker to distinct the "split" case from the "divergence"
+  case. If two independants operation rewrite the same changeset A in to A' and
+  A'' when have an error case: divergent rewriting. We can detect it because
+  two markers will be created independently:
+
+  (A, (B,)) and (A, (C,))
 
 Format
 ------
@@ -52,17 +83,15 @@
   cannot contain '\0'.
 """
 import struct
-import util, base85
+import util, base85, node
 from i18n import _
 
-# the obsolete feature is not mature enought to be enabled by default.
-# you have to rely on third party extension extension to enable this.
-_enabled = False
-
 _pack = struct.pack
 _unpack = struct.unpack
 
-# the obsolete feature is not mature enought to be enabled by default.
+_SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5
+
+# the obsolete feature is not mature enough to be enabled by default.
 # you have to rely on third party extension extension to enable this.
 _enabled = False
 
@@ -73,6 +102,37 @@
 _fmfsize = struct.calcsize(_fmfixed)
 _fnodesize = struct.calcsize(_fmnode)
 
+### obsolescence marker flag
+
+## bumpedfix flag
+#
+# When a changeset A' succeed to a changeset A which became public, we call A'
+# "bumped" because it's a successors of a public changesets
+#
+# o    A' (bumped)
+# |`:
+# | o  A
+# |/
+# o    Z
+#
+# The way to solve this situation is to create a new changeset Ad as children
+# of A. This changeset have the same content than A'. So the diff from A to A'
+# is the same than the diff from A to Ad. Ad is marked as a successors of A'
+#
+# o   Ad
+# |`:
+# | x A'
+# |'|
+# o | A
+# |/
+# o Z
+#
+# But by transitivity Ad is also a successors of A. To avoid having Ad marked
+# as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
+# This flag mean that the successors are an interdiff that fix the bumped
+# situation, breaking the transitivity of "bumped" here.
+bumpedfix = 1
+
 def _readmarkers(data):
     """Read and enumerate markers from raw data"""
     off = 0
@@ -158,11 +218,13 @@
     """Store obsolete markers
 
     Markers can be accessed with two mappings:
-    - precursors: old -> set(new)
-    - successors: new -> set(old)
+    - precursors[x] -> set(markers on precursors edges of x)
+    - successors[x] -> set(markers on successors edges of x)
     """
 
     def __init__(self, sopener):
+        # caches for various obsolescence related cache
+        self.caches = {}
         self._all = []
         # new markers to serialize
         self.precursors = {}
@@ -211,7 +273,7 @@
                 # defined. So we must seek to the end before calling tell(),
                 # or we may get a zero offset for non-zero sized files on
                 # some platforms (issue3543).
-                f.seek(0, 2) # os.SEEK_END
+                f.seek(0, _SEEK_END)
                 offset = f.tell()
                 transaction.add('obsstore', offset)
                 # offset == 0: new file - add the version header
@@ -222,19 +284,24 @@
                 # call 'filecacheentry.refresh()'  here
                 f.close()
             self._load(new)
+            # new marker *may* have changed several set. invalidate the cache.
+            self.caches.clear()
         return len(new)
 
-    def mergemarkers(self, transation, data):
+    def mergemarkers(self, transaction, data):
         markers = _readmarkers(data)
-        self.add(transation, markers)
+        self.add(transaction, markers)
 
     def _load(self, markers):
         for mark in markers:
             self._all.append(mark)
             pre, sucs = mark[:2]
-            self.precursors.setdefault(pre, set()).add(mark)
+            self.successors.setdefault(pre, set()).add(mark)
             for suc in sucs:
-                self.successors.setdefault(suc, set()).add(mark)
+                self.precursors.setdefault(suc, set()).add(mark)
+        if node.nullid in self.precursors:
+            raise util.Abort(_('bad obsolescence marker detected: '
+                               'invalid successors nullid'))
 
 def _encodemarkers(markers, addheader=False):
     # Kept separate from flushmarkers(), it will be reused for
@@ -306,26 +373,154 @@
         yield marker(repo, markerdata)
 
 def precursormarkers(ctx):
-    """obsolete marker making this changeset obsolete"""
+    """obsolete marker marking this changeset as a successors"""
     for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
         yield marker(ctx._repo, data)
 
 def successormarkers(ctx):
-    """obsolete marker marking this changeset as a successors"""
+    """obsolete marker making this changeset obsolete"""
     for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
         yield marker(ctx._repo, data)
 
-def anysuccessors(obsstore, node):
-    """Yield every successor of <node>
+def allsuccessors(obsstore, nodes, ignoreflags=0):
+    """Yield node for every successor of <nodes>.
 
-    This this a linear yield unsuitable to detect splitted changeset."""
-    remaining = set([node])
+    Some successors may be unknown locally.
+
+    This is a linear yield unsuited to detecting split changesets."""
+    remaining = set(nodes)
     seen = set(remaining)
     while remaining:
         current = remaining.pop()
         yield current
-        for mark in obsstore.precursors.get(current, ()):
+        for mark in obsstore.successors.get(current, ()):
+            # ignore marker flagged with with specified flag
+            if mark[2] & ignoreflags:
+                continue
             for suc in mark[1]:
                 if suc not in seen:
                     seen.add(suc)
                     remaining.add(suc)
+
+def _knownrevs(repo, nodes):
+    """yield revision numbers of known nodes passed in parameters
+
+    Unknown revisions are silently ignored."""
+    torev = repo.changelog.nodemap.get
+    for n in nodes:
+        rev = torev(n)
+        if rev is not None:
+            yield rev
+
+# mapping of 'set-name' -> <function to compute this set>
+cachefuncs = {}
+def cachefor(name):
+    """Decorator to register a function as computing the cache for a set"""
+    def decorator(func):
+        assert name not in cachefuncs
+        cachefuncs[name] = func
+        return func
+    return decorator
+
+def getrevs(repo, name):
+    """Return the set of revision that belong to the <name> set
+
+    Such access may compute the set and cache it for future use"""
+    if not repo.obsstore:
+        return ()
+    if name not in repo.obsstore.caches:
+        repo.obsstore.caches[name] = cachefuncs[name](repo)
+    return repo.obsstore.caches[name]
+
+# To be simple we need to invalidate obsolescence cache when:
+#
+# - new changeset is added:
+# - public phase is changed
+# - obsolescence marker are added
+# - strip is used a repo
+def clearobscaches(repo):
+    """Remove all obsolescence related cache from a repo
+
+    This remove all cache in obsstore is the obsstore already exist on the
+    repo.
+
+    (We could be smarter here given the exact event that trigger the cache
+    clearing)"""
+    # only clear cache is there is obsstore data in this repo
+    if 'obsstore' in repo._filecache:
+        repo.obsstore.caches.clear()
+
+@cachefor('obsolete')
+def _computeobsoleteset(repo):
+    """the set of obsolete revisions"""
+    obs = set()
+    nm = repo.changelog.nodemap
+    for node in repo.obsstore.successors:
+        rev = nm.get(node)
+        if rev is not None:
+            obs.add(rev)
+    return set(repo.revs('%ld - public()', obs))
+
+@cachefor('unstable')
+def _computeunstableset(repo):
+    """the set of non obsolete revisions with obsolete parents"""
+    return set(repo.revs('(obsolete()::) - obsolete()'))
+
+@cachefor('suspended')
+def _computesuspendedset(repo):
+    """the set of obsolete parents with non obsolete descendants"""
+    return set(repo.revs('obsolete() and obsolete()::unstable()'))
+
+@cachefor('extinct')
+def _computeextinctset(repo):
+    """the set of obsolete parents without non obsolete descendants"""
+    return set(repo.revs('obsolete() - obsolete()::unstable()'))
+
+
+@cachefor('bumped')
+def _computebumpedset(repo):
+    """the set of revs trying to obsolete public revisions"""
+    # get all possible bumped changesets
+    tonode = repo.changelog.node
+    publicnodes = (tonode(r) for r in repo.revs('public()'))
+    successors = allsuccessors(repo.obsstore, publicnodes,
+                               ignoreflags=bumpedfix)
+    # revision public or already obsolete don't count as bumped
+    query = '%ld - obsolete() - public()'
+    return set(repo.revs(query, _knownrevs(repo, successors)))
+
+def createmarkers(repo, relations, flag=0, metadata=None):
+    """Add obsolete markers between changesets in a repo
+
+    <relations> must be an iterable of (<old>, (<new>, ...)) tuple.
+    `old` and `news` are changectx.
+
+    Trying to obsolete a public changeset will raise an exception.
+
+    Current user and date are used except if specified otherwise in the
+    metadata attribute.
+
+    This function operates within a transaction of its own, but does
+    not take any lock on the repo.
+    """
+    # prepare metadata
+    if metadata is None:
+        metadata = {}
+    if 'date' not in metadata:
+        metadata['date'] = '%i %i' % util.makedate()
+    if 'user' not in metadata:
+        metadata['user'] = repo.ui.username()
+    tr = repo.transaction('add-obsolescence-marker')
+    try:
+        for prec, sucs in relations:
+            if not prec.mutable():
+                raise util.Abort("cannot obsolete immutable changeset: %s"
+                                 % prec)
+            nprec = prec.node()
+            nsucs = tuple(s.node() for s in sucs)
+            if nprec in nsucs:
+                raise util.Abort("changeset %s cannot obsolete itself" % prec)
+            repo.obsstore.create(tr, nprec, nsucs, flag, metadata)
+        tr.close()
+    finally:
+        tr.release()
--- a/mercurial/parser.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/parser.py	Fri Oct 19 01:34:50 2012 -0500
@@ -13,7 +13,7 @@
 # tokenizer is an iterator that returns type, value pairs
 # elements is a mapping of types to binding strength, prefix and infix actions
 # an action is a tree node name, a tree label, and an optional match
-# __call__(program) parses program into a labelled tree
+# __call__(program) parses program into a labeled tree
 
 import error
 from i18n import _
--- a/mercurial/parsers.c	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/parsers.c	Fri Oct 19 01:34:50 2012 -0500
@@ -9,6 +9,7 @@
 
 #include <Python.h>
 #include <ctype.h>
+#include <stddef.h>
 #include <string.h>
 
 #include "util.h"
@@ -72,7 +73,7 @@
 	for (start = cur = str, zero = NULL; cur < str + len; cur++) {
 		PyObject *file = NULL, *node = NULL;
 		PyObject *flags = NULL;
-		int nlen;
+		ptrdiff_t nlen;
 
 		if (!*cur) {
 			zero = cur;
@@ -94,7 +95,7 @@
 
 		nlen = cur - zero - 1;
 
-		node = unhexlify(zero + 1, nlen > 40 ? 40 : nlen);
+		node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen);
 		if (!node)
 			goto bail;
 
@@ -1505,11 +1506,16 @@
 
 static char parsers_doc[] = "Efficient content parsing.";
 
+PyObject *encodedir(PyObject *self, PyObject *args);
+PyObject *pathencode(PyObject *self, PyObject *args);
+
 static PyMethodDef methods[] = {
 	{"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
 	{"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
 	{"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
 	{"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
+	{"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"},
+	{"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"},
 	{NULL, NULL}
 };
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/pathencode.c	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,531 @@
+/*
+ pathencode.c - efficient path name encoding
+
+ Copyright 2012 Facebook
+
+ This software may be used and distributed according to the terms of
+ the GNU General Public License, incorporated herein by reference.
+*/
+
+/*
+ * An implementation of the name encoding scheme used by the fncache
+ * store.  The common case is of a path < 120 bytes long, which is
+ * handled either in a single pass with no allocations or two passes
+ * with a single allocation.  For longer paths, multiple passes are
+ * required.
+ */
+
+#include <Python.h>
+#include <assert.h>
+#include <ctype.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "util.h"
+
+/* state machine for the fast path */
+enum path_state {
+	START,   /* first byte of a path component */
+	A,       /* "AUX" */
+	AU,
+	THIRD,   /* third of a 3-byte sequence, e.g. "AUX", "NUL" */
+	C,       /* "CON" or "COMn" */
+	CO,
+	COMLPT,  /* "COM" or "LPT" */
+	COMLPTn,
+	L,
+	LP,
+	N,
+	NU,
+	P,       /* "PRN" */
+	PR,
+	LDOT,    /* leading '.' */
+	DOT,     /* '.' in a non-leading position */
+	H,       /* ".h" */
+	HGDI,    /* ".hg", ".d", or ".i" */
+	SPACE,
+	DEFAULT, /* byte of a path component after the first */
+};
+
+/* state machine for dir-encoding */
+enum dir_state {
+	DDOT,
+	DH,
+	DHGDI,
+	DDEFAULT,
+};
+
+static inline int inset(const uint32_t bitset[], char c)
+{
+	return bitset[((uint8_t)c) >> 5] & (1 << (((uint8_t)c) & 31));
+}
+
+static inline void charcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
+                            char c)
+{
+	if (dest) {
+		assert(*destlen < destsize);
+		dest[*destlen] = c;
+	}
+	(*destlen)++;
+}
+
+static inline void memcopy(char *dest, Py_ssize_t *destlen, size_t destsize,
+                           const void *src, Py_ssize_t len)
+{
+	if (dest) {
+		assert(*destlen + len < destsize);
+		memcpy((void *)&dest[*destlen], src, len);
+	}
+	*destlen += len;
+}
+
+static inline void hexencode(char *dest, Py_ssize_t *destlen, size_t destsize,
+			     uint8_t c)
+{
+	static const char hexdigit[] = "0123456789abcdef";
+
+	charcopy(dest, destlen, destsize, hexdigit[c >> 4]);
+	charcopy(dest, destlen, destsize, hexdigit[c & 15]);
+}
+
+/* 3-byte escape: tilde followed by two hex digits */
+static inline void escape3(char *dest, Py_ssize_t *destlen, size_t destsize,
+			   char c)
+{
+	charcopy(dest, destlen, destsize, '~');
+	hexencode(dest, destlen, destsize, c);
+}
+
+static Py_ssize_t _encodedir(char *dest, size_t destsize,
+                             const char *src, Py_ssize_t len)
+{
+	enum dir_state state = DDEFAULT;
+	Py_ssize_t i = 0, destlen = 0;
+
+	while (i < len) {
+		switch (state) {
+		case DDOT:
+			switch (src[i]) {
+			case 'd':
+			case 'i':
+				state = DHGDI;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'h':
+				state = DH;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			default:
+				state = DDEFAULT;
+				break;
+			}
+			break;
+		case DH:
+			if (src[i] == 'g') {
+				state = DHGDI;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DDEFAULT;
+			break;
+		case DHGDI:
+			if (src[i] == '/') {
+				memcopy(dest, &destlen, destsize, ".hg", 3);
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			state = DDEFAULT;
+			break;
+		case DDEFAULT:
+			if (src[i] == '.')
+				state = DDOT;
+			charcopy(dest, &destlen, destsize, src[i++]);
+			break;
+		}
+	}
+
+	return destlen;
+}
+
+PyObject *encodedir(PyObject *self, PyObject *args)
+{
+	Py_ssize_t len, newlen;
+	PyObject *pathobj, *newobj;
+	char *path;
+
+	if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj))
+		return NULL;
+
+	if (PyString_AsStringAndSize(pathobj, &path, &len) == -1) {
+		PyErr_SetString(PyExc_TypeError, "expected a string");
+		return NULL;
+	}
+
+	newlen = len ? _encodedir(NULL, 0, path, len + 1) : 1;
+
+	if (newlen == len + 1) {
+		Py_INCREF(pathobj);
+		return pathobj;
+	}
+
+	newobj = PyString_FromStringAndSize(NULL, newlen);
+
+	if (newobj) {
+		PyString_GET_SIZE(newobj)--;
+		_encodedir(PyString_AS_STRING(newobj), newlen, path,
+			   len + 1);
+	}
+
+	return newobj;
+}
+
+static Py_ssize_t _encode(const uint32_t twobytes[8], const uint32_t onebyte[8],
+			  char *dest, Py_ssize_t destlen, size_t destsize,
+			  const char *src, Py_ssize_t len,
+			  int encodedir)
+{
+	enum path_state state = START;
+	Py_ssize_t i = 0;
+
+	/*
+	 * Python strings end with a zero byte, which we use as a
+	 * terminal token as they are not valid inside path names.
+	 */
+
+	while (i < len) {
+		switch (state) {
+		case START:
+			switch (src[i]) {
+			case '/':
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case '.':
+				state = LDOT;
+				escape3(dest, &destlen, destsize, src[i++]);
+				break;
+			case ' ':
+				state = DEFAULT;
+				escape3(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'a':
+				state = A;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'c':
+				state = C;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'l':
+				state = L;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'n':
+				state = N;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'p':
+				state = P;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			default:
+				state = DEFAULT;
+				break;
+			}
+			break;
+		case A:
+			if (src[i] == 'u') {
+				state = AU;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case AU:
+			if (src[i] == 'x') {
+				state = THIRD;
+				i++;
+			}
+			else state = DEFAULT;
+			break;
+		case THIRD:
+			state = DEFAULT;
+			switch (src[i]) {
+			case '.':
+			case '/':
+			case '\0':
+				escape3(dest, &destlen, destsize, src[i - 1]);
+				break;
+			default:
+				i--;
+				break;
+			}
+			break;
+		case C:
+			if (src[i] == 'o') {
+				state = CO;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case CO:
+			if (src[i] == 'm') {
+				state = COMLPT;
+				i++;
+			}
+			else if (src[i] == 'n') {
+				state = THIRD;
+				i++;
+			}
+			else state = DEFAULT;
+			break;
+		case COMLPT:
+			switch (src[i]) {
+			case '1': case '2': case '3': case '4': case '5':
+			case '6': case '7': case '8': case '9':
+				state = COMLPTn;
+				i++;
+				break;
+			default:
+				state = DEFAULT;
+				charcopy(dest, &destlen, destsize, src[i - 1]);
+				break;
+			}
+			break;
+		case COMLPTn:
+			state = DEFAULT;
+			switch (src[i]) {
+			case '.':
+			case '/':
+			case '\0':
+				escape3(dest, &destlen, destsize, src[i - 2]);
+				charcopy(dest, &destlen, destsize, src[i - 1]);
+				break;
+			default:
+				memcopy(dest, &destlen, destsize,
+					&src[i - 2], 2);
+				break;
+			}
+			break;
+		case L:
+			if (src[i] == 'p') {
+				state = LP;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case LP:
+			if (src[i] == 't') {
+				state = COMLPT;
+				i++;
+			}
+			else state = DEFAULT;
+			break;
+		case N:
+			if (src[i] == 'u') {
+				state = NU;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case NU:
+			if (src[i] == 'l') {
+				state = THIRD;
+				i++;
+			}
+			else state = DEFAULT;
+			break;
+		case P:
+			if (src[i] == 'r') {
+				state = PR;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case PR:
+			if (src[i] == 'n') {
+				state = THIRD;
+				i++;
+			}
+			else state = DEFAULT;
+			break;
+		case LDOT:
+			switch (src[i]) {
+			case 'd':
+			case 'i':
+				state = HGDI;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'h':
+				state = H;
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			default:
+				state = DEFAULT;
+				break;
+			}
+			break;
+		case DOT:
+			switch (src[i]) {
+			case '/':
+			case '\0':
+				state = START;
+				memcopy(dest, &destlen, destsize, "~2e", 3);
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'd':
+			case 'i':
+				state = HGDI;
+				charcopy(dest, &destlen, destsize, '.');
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			case 'h':
+				state = H;
+				memcopy(dest, &destlen, destsize, ".h", 2);
+				i++;
+				break;
+			default:
+				state = DEFAULT;
+				charcopy(dest, &destlen, destsize, '.');
+				break;
+			}
+			break;
+		case H:
+			if (src[i] == 'g') {
+				state = HGDI;
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case HGDI:
+			if (src[i] == '/') {
+				state = START;
+				if (encodedir)
+					memcopy(dest, &destlen, destsize, ".hg",
+						3);
+				charcopy(dest, &destlen, destsize, src[i++]);
+			}
+			else state = DEFAULT;
+			break;
+		case SPACE:
+			switch (src[i]) {
+			case '/':
+			case '\0':
+				state = START;
+				memcopy(dest, &destlen, destsize, "~20", 3);
+				charcopy(dest, &destlen, destsize, src[i++]);
+				break;
+			default:
+				state = DEFAULT;
+				charcopy(dest, &destlen, destsize, ' ');
+				break;
+			}
+			break;
+		case DEFAULT:
+			while (inset(onebyte, src[i])) {
+				charcopy(dest, &destlen, destsize, src[i++]);
+				if (i == len)
+					goto done;
+			}
+			switch (src[i]) {
+			case '.':
+				state = DOT;
+				i++;
+				break;
+			case ' ':
+				state = SPACE;
+				i++;
+				break;
+			case '/':
+				state = START;
+				charcopy(dest, &destlen, destsize, '/');
+				i++;
+				break;
+			default:
+				if (inset(onebyte, src[i])) {
+					do {
+						charcopy(dest, &destlen,
+							 destsize, src[i++]);
+					} while (i < len &&
+						 inset(onebyte, src[i]));
+				}
+				else if (inset(twobytes, src[i])) {
+					char c = src[i++];
+					charcopy(dest, &destlen, destsize, '_');
+					charcopy(dest, &destlen, destsize,
+						 c == '_' ? '_' : c + 32);
+				}
+				else
+					escape3(dest, &destlen, destsize,
+						src[i++]);
+				break;
+			}
+			break;
+		}
+	}
+done:
+	return destlen;
+}
+
+static Py_ssize_t basicencode(char *dest, size_t destsize,
+			      const char *src, Py_ssize_t len)
+{
+	static const uint32_t twobytes[8] = { 0, 0, 0x87fffffe };
+
+	static const uint32_t onebyte[8] = {
+		1, 0x2bff3bfa, 0x68000001, 0x2fffffff,
+	};
+
+	Py_ssize_t destlen = 0;
+
+	return _encode(twobytes, onebyte, dest, destlen, destsize,
+		       src, len, 1);
+}
+
+static const Py_ssize_t maxstorepathlen = 120;
+
+/*
+ * We currently implement only basic encoding.
+ *
+ * If a name is too long to encode due to Windows path name limits,
+ * this function returns None.
+ */
+PyObject *pathencode(PyObject *self, PyObject *args)
+{
+	Py_ssize_t len, newlen;
+	PyObject *pathobj, *newobj;
+	char *path;
+
+	if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj))
+		return NULL;
+
+	if (PyString_AsStringAndSize(pathobj, &path, &len) == -1) {
+		PyErr_SetString(PyExc_TypeError, "expected a string");
+		return NULL;
+	}
+
+	if (len > maxstorepathlen) {
+		newobj = Py_None;
+		Py_INCREF(newobj);
+		return newobj;
+	}
+
+	newlen = len ? basicencode(NULL, 0, path, len + 1) : 1;
+
+	if (newlen <= maxstorepathlen + 1) {
+		if (newlen == len + 1) {
+			Py_INCREF(pathobj);
+			return pathobj;
+		}
+
+		newobj = PyString_FromStringAndSize(NULL, newlen);
+
+		if (newobj) {
+			PyString_GET_SIZE(newobj)--;
+			basicencode(PyString_AS_STRING(newobj), newlen, path,
+				    len + 1);
+		}
+	} else {
+		newobj = Py_None;
+		Py_INCREF(newobj);
+	}
+
+	return newobj;
+}
--- a/mercurial/phases.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/phases.py	Fri Oct 19 01:34:50 2012 -0500
@@ -104,6 +104,7 @@
 from node import nullid, nullrev, bin, hex, short
 from i18n import _
 import util
+import obsolete
 
 allphases = public, draft, secret = range(3)
 trackedphases = allphases[1:]
@@ -195,7 +196,7 @@
         return self._phaserevs
 
     def phase(self, repo, rev):
-        # We need a repo argument here to be able to build _phaserev
+        # We need a repo argument here to be able to build _phaserevs
         # if necessary. The repository instance is not stored in
         # phasecache to avoid reference cycles. The changelog instance
         # is not stored because it is a filecache() property and can
@@ -244,6 +245,7 @@
             # declare deleted root in the target phase
             if targetphase != 0:
                 self.retractboundary(repo, targetphase, delroots)
+        obsolete.clearobscaches(repo)
 
     def retractboundary(self, repo, targetphase, nodes):
         # Be careful to preserve shallow-copied values: do not update
@@ -260,6 +262,7 @@
             ctxs = repo.set('roots(%ln::)', currentroots)
             currentroots.intersection_update(ctx.node() for ctx in ctxs)
             self._updateroots(targetphase, currentroots)
+        obsolete.clearobscaches(repo)
 
 def advanceboundary(repo, targetphase, nodes):
     """Add nodes to a phase changing other nodes phases if necessary.
@@ -312,7 +315,7 @@
     return keys
 
 def pushphase(repo, nhex, oldphasestr, newphasestr):
-    """List phases root for serialisation over pushkey"""
+    """List phases root for serialization over pushkey"""
     lock = repo.lock()
     try:
         currentphase = repo[nhex].phase()
@@ -363,7 +366,7 @@
     """compute new head of a subset minus another
 
     * `heads`: define the first subset
-    * `rroots`: define the second we substract to the first"""
+    * `roots`: define the second we subtract from the first"""
     revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))',
                       heads, roots, roots, heads)
     return [c.node() for c in revset]
@@ -385,3 +388,6 @@
             msg = _("phases.new-commit: not a valid phase name ('%s')")
             raise error.ConfigError(msg % v)
 
+def hassecret(repo):
+    """utility function that check if a repo have any secret changeset."""
+    return bool(repo._phasecache.phaseroots[2])
--- a/mercurial/posix.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/posix.py	Fri Oct 19 01:34:50 2012 -0500
@@ -20,6 +20,16 @@
 umask = os.umask(0)
 os.umask(umask)
 
+def split(p):
+    '''Same as os.path.split, but faster'''
+    ht = p.rsplit('/', 1)
+    if len(ht) == 1:
+        return '', p
+    nh = ht[0].rstrip('/')
+    if nh:
+        return nh, ht[1]
+    return ht
+
 def openhardlinks():
     '''return true if it is safe to hold open file handles to hardlinks'''
     return True
--- a/mercurial/pure/osutil.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/pure/osutil.py	Fri Oct 19 01:34:50 2012 -0500
@@ -82,7 +82,7 @@
 
     _FILE_ATTRIBUTE_NORMAL = 0x80
 
-    # _open_osfhandle
+    # open_osfhandle flags
     _O_RDONLY = 0x0000
     _O_RDWR = 0x0002
     _O_APPEND = 0x0008
--- a/mercurial/pure/parsers.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/pure/parsers.py	Fri Oct 19 01:34:50 2012 -0500
@@ -70,7 +70,7 @@
 
 def parse_dirstate(dmap, copymap, st):
     parents = [st[:20], st[20: 40]]
-    # deref fields so they will be local in loop
+    # dereference fields so they will be local in loop
     format = ">cllll"
     e_size = struct.calcsize(format)
     pos1 = 40
--- a/mercurial/pvec.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/pvec.py	Fri Oct 19 01:34:50 2012 -0500
@@ -57,7 +57,7 @@
 _depthbytes = _depthbits / 8
 _vecbytes = _bytes - _depthbytes
 _vecbits = _vecbytes * 8
-_radius = (_vecbits - 30) / 2 # high probability vecs are related
+_radius = (_vecbits - 30) / 2 # high probability vectors are related
 
 def _bin(bs):
     '''convert a bytestring to a long'''
--- a/mercurial/py3kcompat.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/py3kcompat.py	Fri Oct 19 01:34:50 2012 -0500
@@ -12,7 +12,7 @@
 def bytesformatter(format, args):
     '''Custom implementation of a formatter for bytestrings.
 
-    This function currently relias on the string formatter to do the
+    This function currently relies on the string formatter to do the
     formatting and always returns bytes objects.
 
     >>> bytesformatter(20, 10)
--- a/mercurial/repair.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/repair.py	Fri Oct 19 01:34:50 2012 -0500
@@ -114,7 +114,7 @@
     newbmtarget = repo.revs('sort(heads((::%ld) - (%ld)), -rev)',
                             tostrip, tostrip)
     if newbmtarget:
-        newbmtarget = newbmtarget[0]
+        newbmtarget = repo[newbmtarget[0]].node()
     else:
         newbmtarget = '.'
 
--- a/mercurial/revlog.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/revlog.py	Fri Oct 19 01:34:50 2012 -0500
@@ -256,6 +256,13 @@
     def __iter__(self):
         for i in xrange(len(self)):
             yield i
+    def revs(self, start=0, stop=None):
+        """iterate over all rev in this revlog (from start to stop)"""
+        if stop is None:
+            stop = len(self)
+        else:
+            stop += 1
+        return xrange(start, stop)
 
     @util.propertycache
     def nodemap(self):
@@ -374,7 +381,7 @@
             return
 
         seen = set(revs)
-        for i in xrange(first + 1, len(self)):
+        for i in self.revs(start=first + 1):
             for x in self.parentrevs(i):
                 if x != nullrev and x in seen:
                     seen.add(i)
@@ -547,9 +554,9 @@
         # Our topologically sorted list of output nodes.
         orderedout = []
         # Don't start at nullid since we don't want nullid in our output list,
-        # and if nullid shows up in descedents, empty parents will look like
+        # and if nullid shows up in descendants, empty parents will look like
         # they're descendants.
-        for r in xrange(max(lowestrev, 0), highestrev + 1):
+        for r in self.revs(start=max(lowestrev, 0), stop=highestrev + 1):
             n = self.node(r)
             isdescendant = False
             if lowestrev == nullrev:  # Everybody is a descendant of nullid
@@ -600,16 +607,20 @@
         try:
             return self.index.headrevs()
         except AttributeError:
-            pass
+            return self._headrevs()
+
+    def _headrevs(self):
         count = len(self)
         if not count:
             return [nullrev]
-        ishead = [1] * (count + 1)
+        # we won't iter over filtered rev so nobody is a head at start
+        ishead = [0] * (count + 1)
         index = self.index
-        for r in xrange(count):
+        for r in self:
+            ishead[r] = 1  # I may be an head
             e = index[r]
-            ishead[e[5]] = ishead[e[6]] = 0
-        return [r for r in xrange(count) if ishead[r]]
+            ishead[e[5]] = ishead[e[6]] = 0  # my parent are not
+        return [r for r, val in enumerate(ishead) if val]
 
     def heads(self, start=None, stop=None):
         """return the list of all nodes that have no children
@@ -634,7 +645,7 @@
         heads = set((startrev,))
 
         parentrevs = self.parentrevs
-        for r in xrange(startrev + 1, len(self)):
+        for r in self.revs(start=startrev + 1):
             for p in parentrevs(r):
                 if p in reachable:
                     if r not in stoprevs:
@@ -649,7 +660,7 @@
         """find the children of a given node"""
         c = []
         p = self.rev(node)
-        for r in range(p + 1, len(self)):
+        for r in self.revs(start=p + 1):
             prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
             if prevs:
                 for pr in prevs:
@@ -1015,7 +1026,7 @@
         see addrevision for argument descriptions.
         invariants:
         - text is optional (can be None); if not set, cachedelta must be set.
-          if both are set, they must correspond to eachother.
+          if both are set, they must correspond to each other.
         """
         btext = [text]
         def buildtext():
--- a/mercurial/revset.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/revset.py	Fri Oct 19 01:34:50 2012 -0500
@@ -12,6 +12,7 @@
 import match as matchmod
 from i18n import _
 import encoding
+import obsolete as obsmod
 
 def _revancestors(repo, revs, followfirst):
     """Like revlog.ancestors(), but supports followfirst."""
@@ -40,7 +41,7 @@
         return
 
     seen = set(revs)
-    for i in xrange(first + 1, len(cl)):
+    for i in cl.revs(first + 1):
         for x in cl.parentrevs(i)[:cut]:
             if x != nullrev and x in seen:
                 seen.add(i)
@@ -214,11 +215,11 @@
 def rangeset(repo, subset, x, y):
     m = getset(repo, subset, x)
     if not m:
-        m = getset(repo, range(len(repo)), x)
+        m = getset(repo, list(repo), x)
 
     n = getset(repo, subset, y)
     if not n:
-        n = getset(repo, range(len(repo)), y)
+        n = getset(repo, list(repo), y)
 
     if not m or not n:
         return []
@@ -233,7 +234,7 @@
 
 def dagrange(repo, subset, x, y):
     if subset:
-        r = range(len(repo))
+        r = list(repo)
         xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
         s = set(subset)
         return [r for r in xs if r in s]
@@ -276,7 +277,7 @@
     """
     # i18n: "ancestor" is a keyword
     l = getargs(x, 2, 2, _("ancestor requires two arguments"))
-    r = range(len(repo))
+    r = list(repo)
     a = getset(repo, r, l[0])
     b = getset(repo, r, l[1])
     if len(a) != 1 or len(b) != 1:
@@ -287,7 +288,7 @@
     return [r for r in an if r in subset]
 
 def _ancestors(repo, subset, x, followfirst=False):
-    args = getset(repo, range(len(repo)), x)
+    args = getset(repo, list(repo), x)
     if not args:
         return []
     s = set(_revancestors(repo, args, followfirst)) | set(args)
@@ -335,7 +336,7 @@
     Changesets marked in the specified bisect status:
 
     - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
-    - ``goods``, ``bads``      : csets topologicaly good/bad
+    - ``goods``, ``bads``      : csets topologically good/bad
     - ``range``              : csets taking part in the bisection
     - ``pruned``             : csets that are goods, bads or skipped
     - ``untested``           : csets whose fate is yet unknown
@@ -414,13 +415,24 @@
         else:
             return [r for r in subset if matcher(repo[r].branch())]
 
-    s = getset(repo, range(len(repo)), x)
+    s = getset(repo, list(repo), x)
     b = set()
     for r in s:
         b.add(repo[r].branch())
     s = set(s)
     return [r for r in subset if r in s or repo[r].branch() in b]
 
+def bumped(repo, subset, x):
+    """``bumped()``
+    Mutable changesets marked as successors of public changesets.
+
+    Only non-public and non-obsolete changesets can be `bumped`.
+    """
+    # i18n: "bumped" is a keyword
+    getargs(x, 0, 0, _("bumped takes no arguments"))
+    bumped = obsmod.getrevs(repo, 'bumped')
+    return [r for r in subset if r in bumped]
+
 def checkstatus(repo, subset, pat, field):
     m = None
     s = []
@@ -465,7 +477,7 @@
     """``children(set)``
     Child changesets of changesets in set.
     """
-    s = set(getset(repo, range(len(repo)), x))
+    s = set(getset(repo, list(repo), x))
     cs = _children(repo, subset, s)
     return [r for r in subset if r in cs]
 
@@ -546,7 +558,7 @@
     return l
 
 def _descendants(repo, subset, x, followfirst=False):
-    args = getset(repo, range(len(repo)), x)
+    args = getset(repo, list(repo), x)
     if not args:
         return []
     s = set(_revdescendants(repo, args, followfirst)) | set(args)
@@ -570,9 +582,9 @@
     is the same as passing all().
     """
     if x is not None:
-        args = set(getset(repo, range(len(repo)), x))
+        args = set(getset(repo, list(repo), x))
     else:
-        args = set(getall(repo, range(len(repo)), x))
+        args = set(getall(repo, list(repo), x))
 
     dests = set()
 
@@ -594,7 +606,7 @@
 
             # The visited lineage is a match if the current source is in the arg
             # set.  Since every candidate dest is visited by way of iterating
-            # subset, any dests futher back in the lineage will be tested by a
+            # subset, any dests further back in the lineage will be tested by a
             # different iteration over subset.  Likewise, if the src was already
             # selected, the current lineage can be selected without going back
             # further.
@@ -621,8 +633,8 @@
     """
     # i18n: "extinct" is a keyword
     getargs(x, 0, 0, _("extinct takes no arguments"))
-    extinctset = set(repo.revs('(obsolete()::) - (::(not obsolete()))'))
-    return [r for r in subset if r in extinctset]
+    extincts = obsmod.getrevs(repo, 'extinct')
+    return [r for r in subset if r in extincts]
 
 def extra(repo, subset, x):
     """``extra(label, [value])``
@@ -838,6 +850,14 @@
     ps = set(parents(repo, subset, x))
     return [r for r in s if r not in ps]
 
+def hidden(repo, subset, x):
+    """``hidden()``
+    Hidden changesets.
+    """
+    # i18n: "hidden" is a keyword
+    getargs(x, 0, 0, _("hidden takes no arguments"))
+    return [r for r in subset if r in repo.hiddenrevs]
+
 def keyword(repo, subset, x):
     """``keyword(string)``
     Search commit message, user name, and names of changed files for
@@ -868,7 +888,7 @@
         # i18n: "limit" is a keyword
         raise error.ParseError(_("limit expects a number"))
     ss = set(subset)
-    os = getset(repo, range(len(repo)), l[0])[:lim]
+    os = getset(repo, list(repo), l[0])[:lim]
     return [r for r in os if r in ss]
 
 def last(repo, subset, x):
@@ -886,14 +906,14 @@
         # i18n: "last" is a keyword
         raise error.ParseError(_("last expects a number"))
     ss = set(subset)
-    os = getset(repo, range(len(repo)), l[0])[-lim:]
+    os = getset(repo, list(repo), l[0])[-lim:]
     return [r for r in os if r in ss]
 
 def maxrev(repo, subset, x):
     """``max(set)``
     Changeset with highest revision number in set.
     """
-    os = getset(repo, range(len(repo)), x)
+    os = getset(repo, list(repo), x)
     if os:
         m = max(os)
         if m in subset:
@@ -909,11 +929,28 @@
     cl = repo.changelog
     return [r for r in subset if cl.parentrevs(r)[1] != -1]
 
+def branchpoint(repo, subset, x):
+    """``branchpoint()``
+    Changesets with more than one child.
+    """
+    # i18n: "branchpoint" is a keyword
+    getargs(x, 0, 0, _("branchpoint takes no arguments"))
+    cl = repo.changelog
+    if not subset:
+        return []
+    baserev = min(subset)
+    parentscount = [0]*(len(repo) - baserev)
+    for r in cl.revs(start=baserev + 1):
+        for p in cl.parentrevs(r):
+            if p >= baserev:
+                parentscount[p - baserev] += 1
+    return [r for r in subset if (parentscount[r - baserev] > 1)]
+
 def minrev(repo, subset, x):
     """``min(set)``
     Changeset with lowest revision number in set.
     """
-    os = getset(repo, range(len(repo)), x)
+    os = getset(repo, list(repo), x)
     if os:
         m = min(os)
         if m in subset:
@@ -951,7 +988,8 @@
     Mutable changeset with a newer version."""
     # i18n: "obsolete" is a keyword
     getargs(x, 0, 0, _("obsolete takes no arguments"))
-    return [r for r in subset if repo[r].obsolete()]
+    obsoletes = obsmod.getrevs(repo, 'obsolete')
+    return [r for r in subset if r in obsoletes]
 
 def origin(repo, subset, x):
     """``origin([set])``
@@ -962,9 +1000,9 @@
     for the first operation is selected.
     """
     if x is not None:
-        args = set(getset(repo, range(len(repo)), x))
+        args = set(getset(repo, list(repo), x))
     else:
-        args = set(getall(repo, range(len(repo)), x))
+        args = set(getall(repo, list(repo), x))
 
     def _firstsrc(rev):
         src = _getrevsource(repo, rev)
@@ -1014,7 +1052,7 @@
 
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, range(len(repo)), x):
+    for r in getset(repo, list(repo), x):
         ps.add(cl.parentrevs(r)[0])
     return [r for r in subset if r in ps]
 
@@ -1032,7 +1070,7 @@
 
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, range(len(repo)), x):
+    for r in getset(repo, list(repo), x):
         ps.add(cl.parentrevs(r)[1])
     return [r for r in subset if r in ps]
 
@@ -1046,7 +1084,7 @@
 
     ps = set()
     cl = repo.changelog
-    for r in getset(repo, range(len(repo)), x):
+    for r in getset(repo, list(repo), x):
         ps.update(cl.parentrevs(r))
     return [r for r in subset if r in ps]
 
@@ -1182,7 +1220,7 @@
     # i18n: "matching" is a keyword
     l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
 
-    revs = getset(repo, xrange(len(repo)), l[0])
+    revs = getset(repo, repo.changelog, l[0])
 
     fieldlist = ['metadata']
     if len(l) > 1:
@@ -1280,7 +1318,7 @@
     """``roots(set)``
     Changesets in set with no parent changeset in set.
     """
-    s = set(getset(repo, xrange(len(repo)), x))
+    s = set(getset(repo, repo.changelog, x))
     subset = [r for r in subset if r in s]
     cs = _children(repo, subset, s)
     return [r for r in subset if r not in cs]
@@ -1429,8 +1467,8 @@
     """
     # i18n: "unstable" is a keyword
     getargs(x, 0, 0, _("unstable takes no arguments"))
-    unstableset = set(repo.revs('(obsolete()::) - obsolete()'))
-    return [r for r in subset if r in unstableset]
+    unstables = obsmod.getrevs(repo, 'unstable')
+    return [r for r in subset if r in unstables]
 
 
 def user(repo, subset, x):
@@ -1464,6 +1502,8 @@
     "bisected": bisected,
     "bookmark": bookmark,
     "branch": branch,
+    "branchpoint": branchpoint,
+    "bumped": bumped,
     "children": children,
     "closed": closed,
     "contains": contains,
@@ -1484,6 +1524,7 @@
     "grep": grep,
     "head": head,
     "heads": heads,
+    "hidden": hidden,
     "id": node_,
     "keyword": keyword,
     "last": last,
--- a/mercurial/scmutil.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/scmutil.py	Fri Oct 19 01:34:50 2012 -0500
@@ -27,6 +27,14 @@
     else:
         ui.status(_("no changes found\n"))
 
+def checknewlabel(repo, lbl, kind):
+    if lbl in ['tip', '.', 'null']:
+        raise util.Abort(_("the name '%s' is reserved") % lbl)
+    for c in (':', '\0', '\n', '\r'):
+        if c in lbl:
+            raise util.Abort(_("%r cannot be used in a %s name") %
+                               (c, kind))
+
 def checkfilename(f):
     '''Check that the filename f is an acceptable filename for a tracked file'''
     if '\r' in f or '\n' in f:
@@ -167,7 +175,7 @@
         # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
         self.auditeddir.update(prefixes)
 
-class abstractopener(object):
+class abstractvfs(object):
     """Abstract base class; cannot be instantiated"""
 
     def __init__(self, *args, **kwargs):
@@ -204,9 +212,6 @@
         finally:
             fp.close()
 
-    def mkdir(self, path=None):
-        return os.mkdir(self.join(path))
-
     def exists(self, path=None):
         return os.path.exists(self.join(path))
 
@@ -219,8 +224,17 @@
     def makedirs(self, path=None, mode=None):
         return util.makedirs(self.join(path), mode)
 
-class opener(abstractopener):
-    '''Open files relative to a base directory
+    def mkdir(self, path=None):
+        return os.mkdir(self.join(path))
+
+    def readdir(self, path=None, stat=None, skip=None):
+        return osutil.listdir(self.join(path), stat, skip)
+
+    def stat(self, path=None):
+        return os.stat(self.join(path))
+
+class vfs(abstractvfs):
+    '''Operate files relative to a base directory
 
     This class is used to hide the details of COW semantics and
     remote file access from higher level code.
@@ -229,13 +243,21 @@
         if expand:
             base = os.path.realpath(util.expandpath(base))
         self.base = base
-        self._audit = audit
-        if audit:
-            self.auditor = pathauditor(base)
+        self._setmustaudit(audit)
+        self.createmode = None
+        self._trustnlink = None
+
+    def _getmustaudit(self):
+        return self._audit
+
+    def _setmustaudit(self, onoff):
+        self._audit = onoff
+        if onoff:
+            self.auditor = pathauditor(self.base)
         else:
             self.auditor = util.always
-        self.createmode = None
-        self._trustnlink = None
+
+    mustaudit = property(_getmustaudit, _setmustaudit)
 
     @util.propertycache
     def _cansymlink(self):
@@ -258,7 +280,7 @@
             mode += "b" # for that other OS
 
         nlink = -1
-        dirname, basename = os.path.split(f)
+        dirname, basename = util.split(f)
         # If basename is empty, then the path is malformed because it points
         # to a directory. Let the posixfile() call below raise IOError.
         if basename and mode not in ('r', 'rb'):
@@ -313,10 +335,7 @@
                 raise OSError(err.errno, _('could not symlink to %r: %s') %
                               (src, err.strerror), linkname)
         else:
-            f = self(dst, "w")
-            f.write(src)
-            f.close()
-            self._fixfilemode(dst)
+            self.write(dst, src)
 
     def audit(self, path):
         self.auditor(path)
@@ -327,8 +346,10 @@
         else:
             return self.base
 
-class filteropener(abstractopener):
-    '''Wrapper opener for filtering filenames with a function.'''
+opener = vfs
+
+class filtervfs(abstractvfs):
+    '''Wrapper vfs for filtering filenames with a function.'''
 
     def __init__(self, opener, filter):
         self._filter = filter
@@ -337,6 +358,14 @@
     def __call__(self, path, *args, **kwargs):
         return self._orig(self._filter(path), *args, **kwargs)
 
+    def join(self, path):
+        if path:
+            return self._orig.join(self._filter(path))
+        else:
+            return self._orig.join(path)
+
+filteropener = filtervfs
+
 def canonpath(root, cwd, myname, auditor=None):
     '''return the canonical path of myname, given cwd and root'''
     if util.endswithsep(root):
@@ -375,7 +404,7 @@
                 name = os.path.join(*rel)
                 auditor(name)
                 return util.pconvert(name)
-            dirname, basename = os.path.split(name)
+            dirname, basename = util.split(name)
             rel.append(basename)
             if dirname == name:
                 break
@@ -619,7 +648,7 @@
 
         # fall through to new-style queries if old-style fails
         m = revset.match(repo.ui, spec)
-        dl = [r for r in m(repo, xrange(len(repo))) if r not in seen]
+        dl = [r for r in m(repo, list(repo)) if r not in seen]
         l.extend(dl)
         seen.update(dl)
 
--- a/mercurial/setdiscovery.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/setdiscovery.py	Fri Oct 19 01:34:50 2012 -0500
@@ -84,9 +84,6 @@
                     abortwhenunrelated=True):
     '''Return a tuple (common, anyincoming, remoteheads) used to identify
     missing nodes from or in remote.
-
-    shortcutlocal determines whether we try use direct access to localrepo if
-    remote is actually local.
     '''
     roundtrips = 0
     cl = local.changelog
@@ -109,7 +106,8 @@
         srvheadhashes = srvheadhashesref.value
         yesno = yesnoref.value
     else:
-        # compatibitity with pre-batch, but post-known remotes during 1.9 devel
+        # compatibility with pre-batch, but post-known remotes during 1.9
+        # development
         srvheadhashes = remote.heads()
         sample = []
 
--- a/mercurial/sshserver.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/sshserver.py	Fri Oct 19 01:34:50 2012 -0500
@@ -71,8 +71,9 @@
         self.fout.flush()
 
     def sendstream(self, source):
+        write = self.fout.write
         for chunk in source.gen:
-            self.fout.write(chunk)
+            write(chunk)
         self.fout.flush()
 
     def sendpushresponse(self, rsp):
--- a/mercurial/statichttprepo.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/statichttprepo.py	Fri Oct 19 01:34:50 2012 -0500
@@ -64,7 +64,7 @@
     urlopener = url.opener(ui, authinfo)
     urlopener.add_handler(byterange.HTTPRangeHandler())
 
-    class statichttpopener(scmutil.abstractopener):
+    class statichttpvfs(scmutil.abstractvfs):
         def __init__(self, base):
             self.base = base
 
@@ -74,7 +74,13 @@
             f = "/".join((self.base, urllib.quote(path)))
             return httprangereader(f, urlopener)
 
-    return statichttpopener
+        def join(self, path):
+            if path:
+                return os.path.join(self.base, path)
+            else:
+                return self.base
+
+    return statichttpvfs
 
 class statichttppeer(localrepo.localpeer):
     def local(self):
--- a/mercurial/store.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/store.py	Fri Oct 19 01:34:50 2012 -0500
@@ -6,29 +6,31 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import osutil, scmutil, util
+import scmutil, util, parsers
 import os, stat, errno
 
 _sha = util.sha1
 
 # This avoids a collision between a file named foo and a dir named
 # foo.i or foo.d
-def encodedir(path):
+def _encodedir(path):
     '''
-    >>> encodedir('data/foo.i')
+    >>> _encodedir('data/foo.i')
     'data/foo.i'
-    >>> encodedir('data/foo.i/bla.i')
+    >>> _encodedir('data/foo.i/bla.i')
     'data/foo.i.hg/bla.i'
-    >>> encodedir('data/foo.i.hg/bla.i')
+    >>> _encodedir('data/foo.i.hg/bla.i')
     'data/foo.i.hg.hg/bla.i'
+    >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
+    'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
     '''
-    if not path.startswith('data/'):
-        return path
     return (path
             .replace(".hg/", ".hg.hg/")
             .replace(".i/", ".i.hg/")
             .replace(".d/", ".d.hg/"))
 
+encodedir = getattr(parsers, 'encodedir', _encodedir)
+
 def decodedir(path):
     '''
     >>> decodedir('data/foo.i')
@@ -38,7 +40,7 @@
     >>> decodedir('data/foo.i.hg.hg/bla.i')
     'data/foo.i.hg/bla.i'
     '''
-    if not path.startswith('data/') or ".hg/" not in path:
+    if ".hg/" not in path:
         return path
     return (path
             .replace(".d.hg/", ".d/")
@@ -91,10 +93,24 @@
                     pass
             else:
                 raise KeyError
-    return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
-            lambda s: decodedir("".join(list(decode(s)))))
+    return (lambda s: ''.join([cmap[c] for c in s]),
+            lambda s: ''.join(list(decode(s))))
+
+_encodefname, _decodefname = _buildencodefun()
 
-encodefilename, decodefilename = _buildencodefun()
+def encodefilename(s):
+    '''
+    >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
+    'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
+    '''
+    return _encodefname(encodedir(s))
+
+def decodefilename(s):
+    '''
+    >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
+    'foo.i/bar.d/bla.hg/hi:world?/HELLO'
+    '''
+    return decodedir(_decodefname(s))
 
 def _buildlowerencodefun():
     '''
@@ -118,47 +134,89 @@
 
 lowerencode = _buildlowerencodefun()
 
-_winreservednames = '''con prn aux nul
-    com1 com2 com3 com4 com5 com6 com7 com8 com9
-    lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
+# Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
+_winres3 = ('aux', 'con', 'prn', 'nul') # length 3
+_winres4 = ('com', 'lpt')               # length 4 (with trailing 1..9)
 def _auxencode(path, dotencode):
     '''
     Encodes filenames containing names reserved by Windows or which end in
     period or space. Does not touch other single reserved characters c.
     Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
     Additionally encodes space or period at the beginning, if dotencode is
-    True.
-    path is assumed to be all lowercase.
+    True. Parameter path is assumed to be all lowercase.
+    A segment only needs encoding if a reserved name appears as a
+    basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
+    doesn't need encoding.
 
-    >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
-    '~2efoo/au~78.txt/txt.aux/co~6e/pr~6e/nu~6c/foo~2e'
-    >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/foo.', False)
-    '.com1com2/lp~749.lpt4.lpt1/conprn/foo~2e'
-    >>> _auxencode('foo. ', True)
-    'foo.~20'
-    >>> _auxencode(' .foo', True)
-    '~20.foo'
+    >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
+    >>> _auxencode(s.split('/'), True)
+    ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
+    >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
+    >>> _auxencode(s.split('/'), False)
+    ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
+    >>> _auxencode(['foo. '], True)
+    ['foo.~20']
+    >>> _auxencode([' .foo'], True)
+    ['~20.foo']
     '''
-    res = []
-    for n in path.split('/'):
-        if n:
-            base = n.split('.')[0]
-            if base and (base in _winreservednames):
+    for i, n in enumerate(path):
+        if not n:
+            continue
+        if dotencode and n[0] in '. ':
+            n = "~%02x" % ord(n[0]) + n[1:]
+            path[i] = n
+        else:
+            l = n.find('.')
+            if l == -1:
+                l = len(n)
+            if ((l == 3 and n[:3] in _winres3) or
+                (l == 4 and n[3] <= '9' and n[3] >= '1'
+                        and n[:3] in _winres4)):
                 # encode third letter ('aux' -> 'au~78')
                 ec = "~%02x" % ord(n[2])
                 n = n[0:2] + ec + n[3:]
-            if n[-1] in '. ':
-                # encode last period or space ('foo...' -> 'foo..~2e')
-                n = n[:-1] + "~%02x" % ord(n[-1])
-            if dotencode and n[0] in '. ':
-                n = "~%02x" % ord(n[0]) + n[1:]
-        res.append(n)
-    return '/'.join(res)
+                path[i] = n
+        if n[-1] in '. ':
+            # encode last period or space ('foo...' -> 'foo..~2e')
+            path[i] = n[:-1] + "~%02x" % ord(n[-1])
+    return path
 
 _maxstorepathlen = 120
 _dirprefixlen = 8
 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
-def _hybridencode(path, auxencode):
+
+def _hashencode(path, dotencode):
+    digest = _sha(path).hexdigest()
+    le = lowerencode(path).split('/')[1:]
+    parts = _auxencode(le, dotencode)
+    basename = parts[-1]
+    _root, ext = os.path.splitext(basename)
+    sdirs = []
+    sdirslen = 0
+    for p in parts[:-1]:
+        d = p[:_dirprefixlen]
+        if d[-1] in '. ':
+            # Windows can't access dirs ending in period or space
+            d = d[:-1] + '_'
+        if sdirslen == 0:
+            t = len(d)
+        else:
+            t = sdirslen + 1 + len(d)
+            if t > _maxshortdirslen:
+                break
+        sdirs.append(d)
+        sdirslen = t
+    dirs = '/'.join(sdirs)
+    if len(dirs) > 0:
+        dirs += '/'
+    res = 'dh/' + dirs + digest + ext
+    spaceleft = _maxstorepathlen - len(res)
+    if spaceleft > 0:
+        filler = basename[:spaceleft]
+        res = 'dh/' + dirs + filler + digest + ext
+    return res
+
+def _hybridencode(path, dotencode):
     '''encodes path with a length limit
 
     Encodes all paths that begin with 'data/', according to the following.
@@ -169,7 +227,7 @@
     characters are encoded as '~xx', where xx is the two digit hex code
     of the character (see encodefilename).
     Relevant path components consisting of Windows reserved filenames are
-    masked by encoding the third character ('aux' -> 'au~78', see auxencode).
+    masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
 
     Hashed encoding (not reversible):
 
@@ -189,42 +247,37 @@
     The string 'data/' at the beginning is replaced with 'dh/', if the hashed
     encoding was used.
     '''
-    if not path.startswith('data/'):
-        return path
-    # escape directories ending with .i and .d
     path = encodedir(path)
-    ndpath = path[len('data/'):]
-    res = 'data/' + auxencode(encodefilename(ndpath))
+    ef = _encodefname(path).split('/')
+    res = '/'.join(_auxencode(ef, dotencode))
     if len(res) > _maxstorepathlen:
-        digest = _sha(path).hexdigest()
-        aep = auxencode(lowerencode(ndpath))
-        _root, ext = os.path.splitext(aep)
-        parts = aep.split('/')
-        basename = parts[-1]
-        sdirs = []
-        for p in parts[:-1]:
-            d = p[:_dirprefixlen]
-            if d[-1] in '. ':
-                # Windows can't access dirs ending in period or space
-                d = d[:-1] + '_'
-            t = '/'.join(sdirs) + '/' + d
-            if len(t) > _maxshortdirslen:
-                break
-            sdirs.append(d)
-        dirs = '/'.join(sdirs)
-        if len(dirs) > 0:
-            dirs += '/'
-        res = 'dh/' + dirs + digest + ext
-        spaceleft = _maxstorepathlen - len(res)
-        if spaceleft > 0:
-            filler = basename[:spaceleft]
-            res = 'dh/' + dirs + filler + digest + ext
+        res = _hashencode(path, dotencode)
+    return res
+
+def _pathencode(path):
+    if len(path) > _maxstorepathlen:
+        return None
+    ef = _encodefname(encodedir(path)).split('/')
+    res = '/'.join(_auxencode(ef, True))
+    if len(res) > _maxstorepathlen:
+        return None
     return res
 
-def _calcmode(path):
+_pathencode = getattr(parsers, 'pathencode', _pathencode)
+
+def _dothybridencode(f):
+    ef = _pathencode(f)
+    if ef is None:
+        return _hashencode(encodedir(f), True)
+    return ef
+
+def _plainhybridencode(f):
+    return _hybridencode(f, False)
+
+def _calcmode(vfs):
     try:
         # files in .hg/ will be created using this mode
-        mode = os.stat(path).st_mode
+        mode = vfs.stat().st_mode
             # avoid some useless chmods
         if (0777 & ~util.umask) == (0777 & mode):
             mode = None
@@ -237,12 +290,14 @@
 
 class basicstore(object):
     '''base class for local repository stores'''
-    def __init__(self, path, openertype):
-        self.path = path
-        self.createmode = _calcmode(path)
-        op = openertype(self.path)
-        op.createmode = self.createmode
-        self.opener = scmutil.filteropener(op, encodedir)
+    def __init__(self, path, vfstype):
+        vfs = vfstype(path)
+        self.path = vfs.base
+        self.createmode = _calcmode(vfs)
+        vfs.createmode = self.createmode
+        self.rawvfs = vfs
+        self.vfs = scmutil.filtervfs(vfs, encodedir)
+        self.opener = self.vfs
 
     def join(self, f):
         return self.path + '/' + encodedir(f)
@@ -254,11 +309,12 @@
             path += '/' + relpath
         striplen = len(self.path) + 1
         l = []
-        if os.path.isdir(path):
+        if self.rawvfs.isdir(path):
             visit = [path]
+            readdir = self.rawvfs.readdir
             while visit:
                 p = visit.pop()
-                for f, kind, st in osutil.listdir(p, stat=True):
+                for f, kind, st in readdir(p, stat=True):
                     fp = p + '/' + f
                     if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
                         n = util.pconvert(fp[striplen:])
@@ -286,13 +342,26 @@
     def write(self):
         pass
 
+    def __contains__(self, path):
+        '''Checks if the store contains path'''
+        path = "/".join(("data", path))
+        # file?
+        if os.path.exists(self.join(path + ".i")):
+            return True
+        # dir?
+        if not path.endswith("/"):
+            path = path + "/"
+        return os.path.exists(self.join(path))
+
 class encodedstore(basicstore):
-    def __init__(self, path, openertype):
-        self.path = path + '/store'
-        self.createmode = _calcmode(self.path)
-        op = openertype(self.path)
-        op.createmode = self.createmode
-        self.opener = scmutil.filteropener(op, encodefilename)
+    def __init__(self, path, vfstype):
+        vfs = vfstype(path + '/store')
+        self.path = vfs.base
+        self.createmode = _calcmode(vfs)
+        vfs.createmode = self.createmode
+        self.rawvfs = vfs
+        self.vfs = scmutil.filtervfs(vfs, encodefilename)
+        self.opener = self.vfs
 
     def datafiles(self):
         for a, b, size in self._walk('data', True):
@@ -312,8 +381,8 @@
 class fncache(object):
     # the filename used to be partially encoded
     # hence the encodedir/decodedir dance
-    def __init__(self, opener):
-        self.opener = opener
+    def __init__(self, vfs):
+        self.vfs = vfs
         self.entries = None
         self._dirty = False
 
@@ -321,12 +390,12 @@
         '''fill the entries from the fncache file'''
         self._dirty = False
         try:
-            fp = self.opener('fncache', mode='rb')
+            fp = self.vfs('fncache', mode='rb')
         except IOError:
             # skip nonexistent file
             self.entries = set()
             return
-        self.entries = set(map(decodedir, fp.read().splitlines()))
+        self.entries = set(decodedir(fp.read()).splitlines())
         if '' in self.entries:
             fp.seek(0)
             for n, line in enumerate(fp):
@@ -336,9 +405,9 @@
         fp.close()
 
     def _write(self, files, atomictemp):
-        fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
+        fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
         if files:
-            fp.write('\n'.join(map(encodedir, files)) + '\n')
+            fp.write(encodedir('\n'.join(files) + '\n'))
         fp.close()
         self._dirty = False
 
@@ -367,33 +436,54 @@
             self._load()
         return iter(self.entries)
 
-class _fncacheopener(scmutil.abstractopener):
-    def __init__(self, op, fnc, encode):
-        self.opener = op
+class _fncachevfs(scmutil.abstractvfs):
+    def __init__(self, vfs, fnc, encode):
+        self.vfs = vfs
         self.fncache = fnc
         self.encode = encode
 
+    def _getmustaudit(self):
+        return self.vfs.mustaudit
+
+    def _setmustaudit(self, onoff):
+        self.vfs.mustaudit = onoff
+
+    mustaudit = property(_getmustaudit, _setmustaudit)
+
     def __call__(self, path, mode='r', *args, **kw):
         if mode not in ('r', 'rb') and path.startswith('data/'):
             self.fncache.add(path)
-        return self.opener(self.encode(path), mode, *args, **kw)
+        return self.vfs(self.encode(path), mode, *args, **kw)
+
+    def join(self, path):
+        if path:
+            return self.vfs.join(self.encode(path))
+        else:
+            return self.vfs.join(path)
 
 class fncachestore(basicstore):
-    def __init__(self, path, openertype, encode):
+    def __init__(self, path, vfstype, dotencode):
+        if dotencode:
+            encode = _dothybridencode
+        else:
+            encode = _plainhybridencode
         self.encode = encode
-        self.path = path + '/store'
-        self.createmode = _calcmode(self.path)
-        op = openertype(self.path)
-        op.createmode = self.createmode
-        fnc = fncache(op)
+        vfs = vfstype(path + '/store')
+        self.path = vfs.base
+        self.pathsep = self.path + '/'
+        self.createmode = _calcmode(vfs)
+        vfs.createmode = self.createmode
+        self.rawvfs = vfs
+        fnc = fncache(vfs)
         self.fncache = fnc
-        self.opener = _fncacheopener(op, fnc, encode)
+        self.vfs = _fncachevfs(vfs, fnc, encode)
+        self.opener = self.vfs
 
     def join(self, f):
-        return self.path + '/' + self.encode(f)
+        return self.pathsep + self.encode(f)
 
     def getsize(self, path):
-        return os.stat(self.path + '/' + path).st_size
+        return self.rawvfs.stat(path).st_size
 
     def datafiles(self):
         rewrite = False
@@ -422,11 +512,35 @@
     def write(self):
         self.fncache.write()
 
-def store(requirements, path, openertype):
+    def _exists(self, f):
+        ef = self.encode(f)
+        try:
+            self.getsize(ef)
+            return True
+        except OSError, err:
+            if err.errno != errno.ENOENT:
+                raise
+            # nonexistent entry
+            return False
+
+    def __contains__(self, path):
+        '''Checks if the store contains path'''
+        path = "/".join(("data", path))
+        # check for files (exact match)
+        e = path + '.i'
+        if e in self.fncache and self._exists(e):
+            return True
+        # now check for directories (prefix match)
+        if not path.endswith('/'):
+            path += '/'
+        for e in self.fncache:
+            if e.startswith(path) and self._exists(e):
+                return True
+        return False
+
+def store(requirements, path, vfstype):
     if 'store' in requirements:
         if 'fncache' in requirements:
-            auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
-            encode = lambda f: _hybridencode(f, auxencode)
-            return fncachestore(path, openertype, encode)
-        return encodedstore(path, openertype)
-    return basicstore(path, openertype)
+            return fncachestore(path, vfstype, 'dotencode' in requirements)
+        return encodedstore(path, vfstype)
+    return basicstore(path, vfstype)
--- a/mercurial/subrepo.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/subrepo.py	Fri Oct 19 01:34:50 2012 -0500
@@ -674,8 +674,8 @@
 
     @propertycache
     def _svnversion(self):
-        output, err = self._svncommand(['--version'], filename=None)
-        m = re.search(r'^svn,\s+version\s+(\d+)\.(\d+)', output)
+        output, err = self._svncommand(['--version', '--quiet'], filename=None)
+        m = re.search(r'^(\d+)\.(\d+)', output)
         if not m:
             raise util.Abort(_('cannot retrieve svn tool version'))
         return (int(m.group(1)), int(m.group(2)))
@@ -892,7 +892,7 @@
     def _gitnodir(self, commands, env=None, stream=False, cwd=None):
         """Calls the git command
 
-        The methods tries to call the git command. versions previor to 1.6.0
+        The methods tries to call the git command. versions prior to 1.6.0
         are not supported and very probably fail.
         """
         self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
--- a/mercurial/tags.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/tags.py	Fri Oct 19 01:34:50 2012 -0500
@@ -124,8 +124,8 @@
             continue
 
         # we prefer alltags[name] if:
-        #  it supercedes us OR
-        #  mutual supercedes and it has a higher rank
+        #  it supersedes us OR
+        #  mutual supersedes and it has a higher rank
         # otherwise we win because we're tip-most
         anode, ahist = nodehist
         bnode, bhist = alltags[name]
--- a/mercurial/templatefilters.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/templatefilters.py	Fri Oct 19 01:34:50 2012 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 import cgi, re, os, time, urllib
-import encoding, node, util
+import encoding, node, util, error
 import hbisect
 
 def addbreaks(text):
@@ -92,9 +92,9 @@
 
 def escape(text):
     """:escape: Any text. Replaces the special XML/XHTML characters "&", "<"
-    and ">" with XML entities.
+    and ">" with XML entities, and filters out NUL characters.
     """
-    return cgi.escape(text, True)
+    return cgi.escape(text.replace('\0', ''), True)
 
 para_re = None
 space_re = None
@@ -221,7 +221,7 @@
 
 def localdate(text):
     """:localdate: Date. Converts a date to local date."""
-    return (text[0], util.makedate()[1])
+    return (util.parsedate(text)[0], util.makedate()[1])
 
 def nonempty(str):
     """:nonempty: Any text. Returns '(none)' if the string is empty."""
@@ -391,5 +391,34 @@
     "xmlescape": xmlescape,
 }
 
+def fillfunc(context, mapping, args):
+    if not (1 <= len(args) <= 2):
+        raise error.ParseError(_("fill expects one or two arguments"))
+
+    text = stringify(args[0][0](context, mapping, args[0][1]))
+    width = 76
+    if len(args) == 2:
+        try:
+            width = int(stringify(args[1][0](context, mapping, args[1][1])))
+        except ValueError:
+            raise error.ParseError(_("fill expects an integer width"))
+
+    return fill(text, width)
+
+def datefunc(context, mapping, args):
+    if not (1 <= len(args) <= 2):
+        raise error.ParseError(_("date expects one or two arguments"))
+
+    date = args[0][0](context, mapping, args[0][1])
+    if len(args) == 2:
+        fmt = stringify(args[1][0](context, mapping, args[1][1]))
+        return util.datestr(date, fmt)
+    return util.datestr(date)
+
+funcs = {
+    "fill": fillfunc,
+    "date": datefunc,
+}
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = filters.values()
--- a/mercurial/templatekw.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/templatekw.py	Fri Oct 19 01:34:50 2012 -0500
@@ -9,7 +9,27 @@
 import patch, util, error
 import hbisect
 
-def showlist(name, values, plural=None, **args):
+# This helper class allows us to handle both:
+#  "{files}" (legacy command-line-specific list hack) and
+#  "{files % '{file}\n'}" (hgweb-style with inlining and function support)
+
+class _hybrid(object):
+    def __init__(self, gen, values):
+        self.gen = gen
+        self.values = values
+    def __iter__(self):
+        return self.gen
+    def __call__(self):
+        for x in self.values:
+            yield x
+
+def showlist(name, values, plural=None, element=None, **args):
+    if not element:
+        element = name
+    f = _showlist(name, values, plural, **args)
+    return _hybrid(f, [{element: x} for x in values])
+
+def _showlist(name, values, plural=None, **args):
     '''expand set of values.
     name is name of key in template map.
     values is list of strings or dicts.
@@ -176,7 +196,7 @@
     """:children: List of strings. The children of the changeset."""
     ctx = args['ctx']
     childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
-    return showlist('children', childrevs, **args)
+    return showlist('children', childrevs, element='child', **args)
 
 def showdate(repo, ctx, templ, **args):
     """:date: Date information. The date when the changeset was committed."""
@@ -204,7 +224,8 @@
 def showfileadds(**args):
     """:file_adds: List of strings. Files added by this changeset."""
     repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
-    return showlist('file_add', getfiles(repo, ctx, revcache)[1], **args)
+    return showlist('file_add', getfiles(repo, ctx, revcache)[1],
+                    element='file', **args)
 
 def showfilecopies(**args):
     """:file_copies: List of strings. Files copied in this changeset with
@@ -223,7 +244,8 @@
                 copies.append((fn, rename[0]))
 
     c = [{'name': x[0], 'source': x[1]} for x in copies]
-    return showlist('file_copy', c, plural='file_copies', **args)
+    return showlist('file_copy', c, plural='file_copies',
+                    element='file', **args)
 
 # showfilecopiesswitch() displays file copies only if copy records are
 # provided before calling the templater, usually with a --copies
@@ -234,17 +256,20 @@
     """
     copies = args['revcache'].get('copies') or []
     c = [{'name': x[0], 'source': x[1]} for x in copies]
-    return showlist('file_copy', c, plural='file_copies', **args)
+    return showlist('file_copy', c, plural='file_copies',
+                    element='file', **args)
 
 def showfiledels(**args):
     """:file_dels: List of strings. Files removed by this changeset."""
     repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
-    return showlist('file_del', getfiles(repo, ctx, revcache)[2], **args)
+    return showlist('file_del', getfiles(repo, ctx, revcache)[2],
+                    element='file', **args)
 
 def showfilemods(**args):
     """:file_mods: List of strings. Files modified by this changeset."""
     repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
-    return showlist('file_mod', getfiles(repo, ctx, revcache)[0], **args)
+    return showlist('file_mod', getfiles(repo, ctx, revcache)[0],
+                    element='file', **args)
 
 def showfiles(**args):
     """:files: List of strings. All files modified, added, or removed by this
@@ -275,6 +300,28 @@
     """
     return ctx.hex()
 
+def showp1rev(repo, ctx, templ, **args):
+    """:p1rev: Integer. The repository-local revision number of the changeset's
+    first parent, or -1 if the changeset has no parents."""
+    return ctx.p1().rev()
+
+def showp2rev(repo, ctx, templ, **args):
+    """:p2rev: Integer. The repository-local revision number of the changeset's
+    second parent, or -1 if the changeset has no second parent."""
+    return ctx.p2().rev()
+
+def showp1node(repo, ctx, templ, **args):
+    """:p1node: String. The identification hash of the changeset's first parent,
+    as a 40 digit hexadecimal string. If the changeset has no parents, all
+    digits are 0."""
+    return ctx.p1().hex()
+
+def showp2node(repo, ctx, templ, **args):
+    """:p2node: String. The identification hash of the changeset's second
+    parent, as a 40 digit hexadecimal string. If the changeset has no second
+    parent, all digits are 0."""
+    return ctx.p2().hex()
+
 def showphase(repo, ctx, templ, **args):
     """:phase: String. The changeset phase name."""
     return ctx.phasestr()
@@ -320,6 +367,10 @@
     'latesttagdistance': showlatesttagdistance,
     'manifest': showmanifest,
     'node': shownode,
+    'p1rev': showp1rev,
+    'p1node': showp1node,
+    'p2rev': showp2rev,
+    'p2node': showp2node,
     'phase': showphase,
     'phaseidx': showphaseidx,
     'rev': showrev,
--- a/mercurial/templater.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/templater.py	Fri Oct 19 01:34:50 2012 -0500
@@ -6,7 +6,7 @@
 # GNU General Public License version 2 or any later version.
 
 from i18n import _
-import sys, os
+import sys, os, re
 import util, config, templatefilters, parser, error
 
 # template parsing
@@ -36,18 +36,21 @@
             if c == 'r':
                 pos += 1
                 c = program[pos]
-                decode = lambda x: x
+                decode = False
             else:
-                decode = lambda x: x.decode('string-escape')
+                decode = True
             pos += 1
             s = pos
             while pos < end: # find closing quote
                 d = program[pos]
-                if d == '\\': # skip over escaped characters
+                if decode and d == '\\': # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
-                    yield ('string', decode(program[s:pos]), s)
+                    if not decode:
+                        yield ('string', program[s:pos].replace('\\', r'\\'), s)
+                        break
+                    yield ('string', program[s:pos].decode('string-escape'), s)
                     break
                 pos += 1
             else:
@@ -146,23 +149,37 @@
 
 def runfilter(context, mapping, data):
     func, data, filt = data
-    return filt(func(context, mapping, data))
+    try:
+        return filt(func(context, mapping, data))
+    except (ValueError, AttributeError, TypeError):
+        if isinstance(data, tuple):
+            dt = data[1]
+        else:
+            dt = data
+        raise util.Abort(_("template filter '%s' is not compatible with "
+                           "keyword '%s'") % (filt.func_name, dt))
 
 def buildmap(exp, context):
     func, data = compileexp(exp[1], context)
     ctmpl = gettemplate(exp[2], context)
     return (runmap, (func, data, ctmpl))
 
+def runtemplate(context, mapping, template):
+    for func, data in template:
+        yield func(context, mapping, data)
+
 def runmap(context, mapping, data):
     func, data, ctmpl = data
     d = func(context, mapping, data)
+    if util.safehasattr(d, '__call__'):
+        d = d()
+
     lm = mapping.copy()
 
     for i in d:
         if isinstance(i, dict):
             lm.update(i)
-            for f, d in ctmpl:
-                yield f(context, lm, d)
+            yield runtemplate(context, lm, ctmpl)
         else:
             # v is not an iterable of dicts, this happen when 'key'
             # has been fully expanded already and format is useless.
@@ -175,12 +192,69 @@
     if n in funcs:
         f = funcs[n]
         return (f, args)
+    if n in templatefilters.funcs:
+        f = templatefilters.funcs[n]
+        return (f, args)
     if n in context._filters:
         if len(args) != 1:
             raise error.ParseError(_("filter %s expects one argument") % n)
         f = context._filters[n]
         return (runfilter, (args[0][0], args[0][1], f))
 
+def join(context, mapping, args):
+    if not (1 <= len(args) <= 2):
+        raise error.ParseError(_("join expects one or two arguments"))
+
+    joinset = args[0][0](context, mapping, args[0][1])
+    if util.safehasattr(joinset, '__call__'):
+        joinset = [x.values()[0] for x in joinset()]
+
+    joiner = " "
+    if len(args) > 1:
+        joiner = args[1][0](context, mapping, args[1][1])
+
+    first = True
+    for x in joinset:
+        if first:
+            first = False
+        else:
+            yield joiner
+        yield x
+
+def sub(context, mapping, args):
+    if len(args) != 3:
+        raise error.ParseError(_("sub expects three arguments"))
+
+    pat = stringify(args[0][0](context, mapping, args[0][1]))
+    rpl = stringify(args[1][0](context, mapping, args[1][1]))
+    src = stringify(args[2][0](context, mapping, args[2][1]))
+    yield re.sub(pat, rpl, src)
+
+def if_(context, mapping, args):
+    if not (2 <= len(args) <= 3):
+        raise error.ParseError(_("if expects two or three arguments"))
+
+    test = stringify(args[0][0](context, mapping, args[0][1]))
+    if test:
+        t = stringify(args[1][0](context, mapping, args[1][1]))
+        yield runtemplate(context, mapping, compiletemplate(t, context))
+    elif len(args) == 3:
+        t = stringify(args[2][0](context, mapping, args[2][1]))
+        yield runtemplate(context, mapping, compiletemplate(t, context))
+
+def ifeq(context, mapping, args):
+    if not (3 <= len(args) <= 4):
+        raise error.ParseError(_("ifeq expects three or four arguments"))
+
+    test = stringify(args[0][0](context, mapping, args[0][1]))
+    match = stringify(args[1][0](context, mapping, args[1][1]))
+    if test == match:
+        t = stringify(args[2][0](context, mapping, args[2][1]))
+        yield runtemplate(context, mapping, compiletemplate(t, context))
+    elif len(args) == 4:
+        t = stringify(args[3][0](context, mapping, args[3][1]))
+        yield runtemplate(context, mapping, compiletemplate(t, context))
+
 methods = {
     "string": lambda e, c: (runstring, e[1]),
     "symbol": lambda e, c: (runsymbol, e[1]),
@@ -192,6 +266,10 @@
     }
 
 funcs = {
+    "if": if_,
+    "ifeq": ifeq,
+    "join": join,
+    "sub": sub,
 }
 
 # template engine
@@ -263,8 +341,7 @@
         '''Perform expansion. t is name of map element to expand.
         mapping contains added elements for use during expansion. Is a
         generator.'''
-        return _flatten(func(self, mapping, data) for func, data in
-                         self._load(t))
+        return _flatten(runtemplate(self, mapping, self._load(t)))
 
 engines = {'default': engine}
 
--- a/mercurial/templates/static/mercurial.js	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/templates/static/mercurial.js	Fri Oct 19 01:34:50 2012 -0500
@@ -25,7 +25,7 @@
 function Graph() {
 	
 	this.canvas = document.getElementById('graph');
-	if (navigator.userAgent.indexOf('MSIE') >= 0) this.canvas = window.G_vmlCanvasManager.initElement(this.canvas);
+	if (window.G_vmlCanvasManager) this.canvas = window.G_vmlCanvasManager.initElement(this.canvas);
 	this.ctx = this.canvas.getContext('2d');
 	this.ctx.strokeStyle = 'rgb(0, 0, 0)';
 	this.ctx.fillStyle = 'rgb(0, 0, 0)';
--- a/mercurial/templates/template-vars.txt	Mon Oct 08 00:19:30 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,42 +0,0 @@
-repo          the name of the repo
-rev           a changeset.manifest revision
-node          a changeset node
-changesets    total number of changesets
-file          a filename
-filerev       a file revision
-filerevs      total number of file revisions
-up            the directory of the relevant file
-path          a path in the manifest, starting with "/"
-basename      a short pathname
-date          a date string
-age           age in hours, days, etc
-line          a line of text (escaped)
-desc          a description (escaped, with breaks)
-shortdesc     a short description (escaped)
-author        a name or email addressv(obfuscated)
-parent        a list of the parent
-child         a list of the children
-tags          a list of tag
-
-header        the global page header
-footer        the global page footer
-
-files         a list of file links
-file_copies   a list of pairs of name, source filenames
-dirs          a set of directory links
-diff          a diff of one or more files
-annotate      an annotated file
-entries       the entries relevant to the page
-
-url           base url of hgweb interface
-logourl       base url of logo
-staticurl     base url for static resources
-
-
-Templates and commands:
-  changelog(rev) - a page for browsing changesets
-    naventry - a link for jumping to a changeset number
-    filenodelink - jump to file diff
-    fileellipses - printed after maxfiles
-    changelogentry - an entry in the log
-  manifest - browse a manifest as a directory tree
--- a/mercurial/transaction.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/transaction.py	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-# transaction.py - simple journalling scheme for mercurial
+# transaction.py - simple journaling scheme for mercurial
 #
 # This transaction scheme is intended to gracefully handle program
 # errors and interruptions. More serious failures like system crashes
--- a/mercurial/ui.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/ui.py	Fri Oct 19 01:34:50 2012 -0500
@@ -713,8 +713,8 @@
         With stock hg, this is simply a debug message that is hidden
         by default, but with extensions or GUI tools it may be
         visible. 'topic' is the current operation, 'item' is a
-        non-numeric marker of the current position (ie the currently
-        in-process file), 'pos' is the current numeric position (ie
+        non-numeric marker of the current position (i.e. the currently
+        in-process file), 'pos' is the current numeric position (i.e.
         revision, bytes, etc.), unit is a corresponding unit label,
         and total is the highest expected pos.
 
--- a/mercurial/url.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/url.py	Fri Oct 19 01:34:50 2012 -0500
@@ -175,7 +175,7 @@
             self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
             self.sock.connect((self.host, self.port))
             if _generic_proxytunnel(self):
-                # we do not support client x509 certificates
+                # we do not support client X.509 certificates
                 self.sock = sslutil.ssl_wrap_socket(self.sock, None, None)
         else:
             keepalive.HTTPConnection.connect(self)
@@ -278,7 +278,8 @@
     res.will_close = res._check_close()
 
     # do we have a Content-Length?
-    # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
+    # NOTE: RFC 2616, section 4.4, #3 says we ignore this if
+    # transfer-encoding is "chunked"
     length = res.msg.getheader('content-length')
     if length and not res.chunked:
         try:
--- a/mercurial/util.h	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/util.h	Fri Oct 19 01:34:50 2012 -0500
@@ -121,6 +121,7 @@
 #ifdef _MSC_VER
 /* msvc 6.0 has problems */
 #define inline __inline
+typedef unsigned char uint8_t;
 typedef unsigned long uint32_t;
 typedef unsigned __int64 uint64_t;
 #else
--- a/mercurial/util.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/util.py	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-# util.py - Mercurial utility functions and platform specfic implementations
+# util.py - Mercurial utility functions and platform specific implementations
 #
 #  Copyright 2005 K. Thananchayan <thananck@yahoo.com>
 #  Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
@@ -7,7 +7,7 @@
 # This software may be used and distributed according to the terms of the
 # GNU General Public License version 2 or any later version.
 
-"""Mercurial utility functions and platform specfic implementations.
+"""Mercurial utility functions and platform specific implementations.
 
 This contains helper routines that are independent of the SCM core and
 hide platform-specific details from the core.
@@ -62,6 +62,7 @@
 setsignalhandler = platform.setsignalhandler
 shellquote = platform.shellquote
 spawndetached = platform.spawndetached
+split = platform.split
 sshargs = platform.sshargs
 statfiles = platform.statfiles
 termwidth = platform.termwidth
@@ -799,7 +800,7 @@
     return temp
 
 class atomictempfile(object):
-    '''writeable file object that atomically updates a file
+    '''writable file object that atomically updates a file
 
     All writes will go to a temporary copy of the original file. Call
     close() when you are done writing, and atomictempfile will rename
@@ -1239,7 +1240,7 @@
         so overriding is needed to use width information of each characters.
 
         In addition, characters classified into 'ambiguous' width are
-        treated as wide in east asian area, but as narrow in other.
+        treated as wide in East Asian area, but as narrow in other.
 
         This requires use decision to determine width of such characters.
         """
@@ -1300,7 +1301,7 @@
                 width = self.width - len(indent)
 
                 # First chunk on line is whitespace -- drop it, unless this
-                # is the very beginning of the text (ie. no lines started yet).
+                # is the very beginning of the text (i.e. no lines started yet).
                 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
                     del chunks[-1]
 
@@ -1477,7 +1478,11 @@
                  for a in _hexdig for b in _hexdig)
 
 def _urlunquote(s):
-    """unquote('abc%20def') -> 'abc def'."""
+    """Decode HTTP/HTML % encoding.
+
+    >>> _urlunquote('abc%20def')
+    'abc def'
+    """
     res = s.split('%')
     # fastpath
     if len(res) == 1:
--- a/mercurial/verify.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/verify.py	Fri Oct 19 01:34:50 2012 -0500
@@ -96,16 +96,16 @@
             p1, p2 = obj.parents(node)
             if p1 not in seen and p1 != nullid:
                 err(lr, _("unknown parent 1 %s of %s") %
-                    (short(p1), short(n)), f)
+                    (short(p1), short(node)), f)
             if p2 not in seen and p2 != nullid:
                 err(lr, _("unknown parent 2 %s of %s") %
-                    (short(p2), short(p1)), f)
+                    (short(p2), short(node)), f)
         except Exception, inst:
             exc(lr, _("checking parents of %s") % short(node), inst, f)
 
         if node in seen:
-            err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
-        seen[n] = i
+            err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
+        seen[node] = i
         return lr
 
     if os.path.exists(repo.sjoin("journal")):
@@ -120,7 +120,7 @@
     havemf = len(mf) > 0
 
     ui.status(_("checking changesets\n"))
-    hasmanifest = False
+    refersmf = False
     seen = {}
     checklog(cl, "changelog", 0)
     total = len(repo)
@@ -133,17 +133,17 @@
             changes = cl.read(n)
             if changes[0] != nullid:
                 mflinkrevs.setdefault(changes[0], []).append(i)
-                hasmanifest = True
+                refersmf = True
             for f in changes[3]:
                 filelinkrevs.setdefault(f, []).append(i)
         except Exception, inst:
-            hasmanifest = True
+            refersmf = True
             exc(i, _("unpacking changeset %s") % short(n), inst)
     ui.progress(_('checking'), None)
 
     ui.status(_("checking manifests\n"))
     seen = {}
-    if hasmanifest:
+    if refersmf:
         # Do not check manifest if there are only changelog entries with
         # null manifests.
         checklog(mf, "manifest", 0)
--- a/mercurial/win32.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/win32.py	Fri Oct 19 01:34:50 2012 -0500
@@ -328,7 +328,7 @@
     env += '\0'
 
     args = subprocess.list2cmdline(args)
-    # Not running the command in shell mode makes python26 hang when
+    # Not running the command in shell mode makes Python 2.6 hang when
     # writing to hgweb output socket.
     comspec = os.environ.get("COMSPEC", "cmd.exe")
     args = comspec + " /c " + args
--- a/mercurial/windows.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/windows.py	Fri Oct 19 01:34:50 2012 -0500
@@ -20,6 +20,7 @@
 samefile = win32.samefile
 setsignalhandler = win32.setsignalhandler
 spawndetached = win32.spawndetached
+split = os.path.split
 termwidth = win32.termwidth
 testpid = win32.testpid
 unlink = win32.unlink
@@ -152,7 +153,7 @@
 #   backslash
 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
 # So, to quote a string, we must surround it in double quotes, double
-# the number of backslashes that preceed double quotes and add another
+# the number of backslashes that precede double quotes and add another
 # backslash before every double quote (being careful with the double
 # quote we've appended to the end)
 _quotere = None
--- a/mercurial/wireproto.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/mercurial/wireproto.py	Fri Oct 19 01:34:50 2012 -0500
@@ -503,6 +503,20 @@
     else:
         new = encoding.tolocal(new) # normal path
 
+    if util.safehasattr(proto, 'restore'):
+
+        proto.redirect()
+
+        try:
+            r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
+                             encoding.tolocal(old), new) or False
+        except util.Abort:
+            r = False
+
+        output = proto.restore()
+
+        return '%s\n%s' % (int(r), output)
+
     r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
                      encoding.tolocal(old), new)
     return '%s\n' % int(r)
@@ -516,7 +530,7 @@
     it is serving. Client checks to see if it understands the format.
 
     The format is simple: the server writes out a line with the amount
-    of files, then the total amount of bytes to be transfered (separated
+    of files, then the total amount of bytes to be transferred (separated
     by a space). Then, for each file, the server first writes the filename
     and filesize (separated by the null character), then the file contents.
     '''
@@ -545,12 +559,33 @@
         repo.ui.debug('%d files, %d bytes to transfer\n' %
                       (len(entries), total_bytes))
         yield '%d %d\n' % (len(entries), total_bytes)
-        for name, size in entries:
-            repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
-            # partially encode name over the wire for backwards compat
-            yield '%s\0%d\n' % (store.encodedir(name), size)
-            for chunk in util.filechunkiter(repo.sopener(name), limit=size):
-                yield chunk
+
+        sopener = repo.sopener
+        oldaudit = sopener.mustaudit
+        debugflag = repo.ui.debugflag
+        sopener.mustaudit = False
+
+        try:
+            for name, size in entries:
+                if debugflag:
+                    repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
+                # partially encode name over the wire for backwards compat
+                yield '%s\0%d\n' % (store.encodedir(name), size)
+                if size <= 65536:
+                    fp = sopener(name)
+                    try:
+                        data = fp.read(size)
+                    finally:
+                        fp.close()
+                    yield data
+                else:
+                    for chunk in util.filechunkiter(sopener(name), limit=size):
+                        yield chunk
+        # replace with "finally:" when support for python 2.4 has been dropped
+        except Exception:
+            sopener.mustaudit = oldaudit
+            raise
+        sopener.mustaudit = oldaudit
 
     return streamres(streamer(repo, entries, total_bytes))
 
--- a/setup.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/setup.py	Fri Oct 19 01:34:50 2012 -0500
@@ -172,18 +172,17 @@
     env['SystemRoot'] = os.environ['SystemRoot']
 
 if os.path.isdir('.hg'):
-    cmd = [sys.executable, 'hg', 'id', '-i', '-t']
-    l = runhg(cmd, env).split()
-    while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
-        l.pop()
-    if len(l) > 1: # tag found
-        version = l[-1]
-        if l[0].endswith('+'): # propagate the dirty status to the tag
+    cmd = [sys.executable, 'hg', 'log', '-r', '.', '--template', '{tags}\n']
+    numerictags = [t for t in runhg(cmd, env).split() if t[0].isdigit()]
+    hgid = runhg([sys.executable, 'hg', 'id', '-i'], env).strip()
+    if numerictags: # tag(s) found
+        version = numerictags[-1]
+        if hgid.endswith('+'): # propagate the dirty status to the tag
             version += '+'
-    elif len(l) == 1: # no tag found
+    else: # no tag found
         cmd = [sys.executable, 'hg', 'parents', '--template',
                '{latesttag}+{latesttagdistance}-']
-        version = runhg(cmd, env) + l[0]
+        version = runhg(cmd, env) + hgid
     if version.endswith('+'):
         version += time.strftime('%Y%m%d')
 elif os.path.exists('.hg_archival.txt'):
@@ -345,14 +344,18 @@
         if isinstance(self.compiler, HackedMingw32CCompiler):
             self.compiler.compiler_so = self.compiler.compiler # no -mdll
             self.compiler.dll_libraries = [] # no -lmsrvc90
+        hv = sys.hexversion
+        pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
+        f = open('mercurial/hgpythonlib.h', 'wb')
+        f.write('/* this file is autogenerated by setup.py */\n')
+        f.write('#define HGPYTHONLIB "%s"\n' % pythonlib)
+        f.close()
         objects = self.compiler.compile(['mercurial/exewrapper.c'],
                                          output_dir=self.build_temp)
         dir = os.path.dirname(self.get_ext_fullpath('dummy'))
         target = os.path.join(dir, 'hg')
-        pythonlib = ("python%d%d" %
-               (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
         self.compiler.link_executable(objects, target,
-                                      libraries=[pythonlib],
+                                      libraries=[],
                                       output_dir=self.build_temp)
 
 class hginstallscripts(install_scripts):
@@ -421,7 +424,8 @@
     Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
     Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
     Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
-    Extension('mercurial.parsers', ['mercurial/parsers.c']),
+    Extension('mercurial.parsers', ['mercurial/parsers.c',
+                                    'mercurial/pathencode.c']),
     ]
 
 osutil_ldflags = []
--- a/tests/blacklists/inotify-failures	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/blacklists/inotify-failures	Fri Oct 19 01:34:50 2012 -0500
@@ -10,7 +10,7 @@
 test-strict
 
 # --inotify activates de facto the inotify extension. It does not play well
-# with inotify-specific tests, which activate/desactivate inotify at will:
+# with inotify-specific tests, which activate/deactivate inotify at will:
 test-inotify
 test-inotify-debuginotify
 test-inotify-dirty-dirstate
--- a/tests/filtercr.py	Mon Oct 08 00:19:30 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-# Filter output by the progress extension to make it readable in tests
-
-import sys, re
-
-for line in sys.stdin:
-    line = re.sub(r'\r+[^\n]', lambda m: '\n' + m.group()[-1:], line)
-    sys.stdout.write(line)
-print
--- a/tests/hghave.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/hghave.py	Fri Oct 19 01:34:50 2012 -0500
@@ -118,6 +118,9 @@
     except OSError:
         return False
 
+def has_killdaemons():
+    return True
+
 def has_cacheable_fs():
     from mercurial import util
 
@@ -149,7 +152,7 @@
         return False
 
 def getsvnversion():
-    m = matchoutput('svn --version 2>&1', r'^svn,\s+version\s+(\d+)\.(\d+)')
+    m = matchoutput('svn --version --quiet 2>&1', r'^(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
@@ -286,6 +289,7 @@
     "hardlink": (has_hardlink, "hardlinks"),
     "icasefs": (has_icasefs, "case insensitive file system"),
     "inotify": (has_inotify, "inotify extension support"),
+    "killdaemons": (has_killdaemons, 'killdaemons.py support'),
     "lsprof": (has_lsprof, "python lsprof module"),
     "mtn": (has_mtn, "monotone client (>= 1.0)"),
     "outer-repo": (has_outer_repo, "outer repo"),
--- a/tests/killdaemons.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/killdaemons.py	Fri Oct 19 01:34:50 2012 -0500
@@ -1,25 +1,54 @@
 #!/usr/bin/env python
 
-import os, time, errno, signal
+import os, sys, time, errno, signal
 
-# Kill off any leftover daemon processes
-try:
-    fp = open(os.environ['DAEMON_PIDS'])
-    for line in fp:
-        try:
-            pid = int(line)
-        except ValueError:
-            continue
+if os.name =='nt':
+    import ctypes
+    def kill(pid, logfn, tryhard=True):
+        logfn('# Killing daemon process %d' % pid)
+        PROCESS_TERMINATE = 1
+        handle = ctypes.windll.kernel32.OpenProcess(
+                PROCESS_TERMINATE, False, pid)
+        ctypes.windll.kernel32.TerminateProcess(handle, -1)
+        ctypes.windll.kernel32.CloseHandle(handle)
+else:
+    def kill(pid, logfn, tryhard=True):
         try:
             os.kill(pid, 0)
+            logfn('# Killing daemon process %d' % pid)
             os.kill(pid, signal.SIGTERM)
-            for i in range(10):
-                time.sleep(0.05)
+            if tryhard:
+                for i in range(10):
+                    time.sleep(0.05)
+                    os.kill(pid, 0)
+            else:
+                time.sleep(0.1)
                 os.kill(pid, 0)
+            logfn('# Daemon process %d is stuck - really killing it' % pid)
             os.kill(pid, signal.SIGKILL)
         except OSError, err:
             if err.errno != errno.ESRCH:
                 raise
-    fp.close()
-except IOError:
-    pass
+
+def killdaemons(pidfile, tryhard=True, remove=False, logfn=None):
+    if not logfn:
+        logfn = lambda s: s
+    # Kill off any leftover daemon processes
+    try:
+        fp = open(pidfile)
+        for line in fp:
+            try:
+                pid = int(line)
+            except ValueError:
+                continue
+            kill(pid, logfn, tryhard)
+        fp.close()
+        if remove:
+            os.unlink(pidfile)
+    except IOError:
+        pass
+
+if __name__ == '__main__':
+    path, = sys.argv[1:]
+    killdaemons(path)
+
--- a/tests/run-tests.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/run-tests.py	Fri Oct 19 01:34:50 2012 -0500
@@ -54,6 +54,7 @@
 import time
 import re
 import threading
+import killdaemons as killmod
 
 processlock = threading.Lock()
 
@@ -282,21 +283,6 @@
     shutil.copy(src, dst)
     os.remove(src)
 
-def splitnewlines(text):
-    '''like str.splitlines, but only split on newlines.
-    keep line endings.'''
-    i = 0
-    lines = []
-    while True:
-        n = text.find('\n', i)
-        if n == -1:
-            last = text[i:]
-            if last:
-                lines.append(last)
-            return lines
-        lines.append(text[i:n + 1])
-        i = n + 1
-
 def parsehghaveoutput(lines):
     '''Parse hghave log lines.
     Return tuple of lists (missing, failed):
@@ -348,29 +334,8 @@
         pass
 
 def killdaemons():
-    # Kill off any leftover daemon processes
-    try:
-        fp = open(DAEMON_PIDS)
-        for line in fp:
-            try:
-                pid = int(line)
-            except ValueError:
-                continue
-            try:
-                os.kill(pid, 0)
-                vlog('# Killing daemon process %d' % pid)
-                os.kill(pid, signal.SIGTERM)
-                time.sleep(0.1)
-                os.kill(pid, 0)
-                vlog('# Daemon process %d is stuck - really killing it' % pid)
-                os.kill(pid, signal.SIGKILL)
-            except OSError, err:
-                if err.errno != errno.ESRCH:
-                    raise
-        fp.close()
-        os.unlink(DAEMON_PIDS)
-    except IOError:
-        pass
+    return killmod.killdaemons(DAEMON_PIDS, tryhard=False, remove=True,
+                               logfn=vlog)
 
 def cleanup(options):
     if not options.keep_tmpdir:
@@ -511,11 +476,8 @@
     py3kswitch = options.py3k_warnings and ' -3' or ''
     cmd = '%s%s "%s"' % (PYTHON, py3kswitch, test)
     vlog("# Running", cmd)
-    return run(cmd, wd, options, replacements)
-
-def shtest(test, wd, options, replacements):
-    cmd = '%s "%s"' % (options.shell, test)
-    vlog("# Running", cmd)
+    if os.name == 'nt':
+        replacements.append((r'\r\n', '\n'))
     return run(cmd, wd, options, replacements)
 
 needescape = re.compile(r'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
@@ -529,8 +491,10 @@
 
 def rematch(el, l):
     try:
-        # ensure that the regex matches to the end of the string
-        return re.match(el + r'\Z', l)
+        # use \Z to ensure that the regex matches to the end of the string
+        if os.name == 'nt':
+            return re.match(el + r'\r?\n\Z', l)
+        return re.match(el + r'\n\Z', l)
     except re.error:
         # el is an invalid regex
         return False
@@ -559,14 +523,14 @@
 def linematch(el, l):
     if el == l: # perfect match (fast)
         return True
-    if (el and
-        (el.endswith(" (re)\n") and rematch(el[:-6] + '\n', l) or
-         el.endswith(" (glob)\n") and globmatch(el[:-8] + '\n', l) or
-         el.endswith(" (esc)\n") and
-             (el[:-7].decode('string-escape') + '\n' == l or
-              el[:-7].decode('string-escape').replace('\r', '') +
-                  '\n' == l and os.name == 'nt'))):
-        return True
+    if el:
+        if el.endswith(" (esc)\n"):
+            el = el[:-7].decode('string-escape') + '\n'
+        if el == l or os.name == 'nt' and el[:-1] + '\r\n' == l:
+            return True
+        if (el.endswith(" (re)\n") and rematch(el[:-6], l) or
+            el.endswith(" (glob)\n") and globmatch(el[:-8], l)):
+            return True
     return False
 
 def tsttest(test, wd, options, replacements):
@@ -704,14 +668,13 @@
     pos = -1
     postout = []
     ret = 0
-    for n, l in enumerate(output):
+    for l in output:
         lout, lcmd = l, None
         if salt in l:
             lout, lcmd = l.split(salt, 1)
 
         if lout:
-            if lcmd:
-                # output block had no trailing newline, clean up
+            if not lout.endswith('\n'):
                 lout += ' (no-eol)\n'
 
             # find the expected output at the current position
@@ -782,7 +745,7 @@
 
     for s, r in replacements:
         output = re.sub(s, r, output)
-    return ret, splitnewlines(output)
+    return ret, output.splitlines(True)
 
 def runone(options, test):
     '''tristate output:
@@ -906,10 +869,7 @@
         runner = tsttest
         ref = testpath
     else:
-        # do not try to run non-executable programs
-        if not os.access(testpath, os.X_OK):
-            return skip("not executable")
-        runner = shtest
+        return skip("unknown test type")
 
     # Make a tmp subdirectory to work in
     testtmp = os.environ["TESTTMP"] = os.environ["HOME"] = \
@@ -921,7 +881,6 @@
         (r':%s\b' % (options.port + 2), ':$HGPORT2'),
         ]
     if os.name == 'nt':
-        replacements.append((r'\r\n', '\n'))
         replacements.append(
             (''.join(c.isalpha() and '[%s%s]' % (c.lower(), c.upper()) or
                      c in '/\\' and r'[/\\]' or
@@ -945,7 +904,7 @@
         refout = None                   # to match "out is None"
     elif os.path.exists(ref):
         f = open(ref, "r")
-        refout = list(splitnewlines(f.read()))
+        refout = f.read().splitlines(True)
         f.close()
     else:
         refout = []
@@ -957,6 +916,11 @@
             f.write(line)
         f.close()
 
+    def describe(ret):
+        if ret < 0:
+            return 'killed by signal %d' % -ret
+        return 'returned error code %d' % ret
+
     if skipped:
         mark = 's'
         if out is None:                 # debug mode: nothing to parse
@@ -984,13 +948,13 @@
                 showdiff(refout, out, ref, err)
             iolock.release()
         if ret:
-            fail("output changed and returned error code %d" % ret, ret)
+            fail("output changed and " + describe(ret), ret)
         else:
             fail("output changed", ret)
         ret = 1
     elif ret:
         mark = '!'
-        fail("returned error code %d" % ret, ret)
+        fail(describe(ret), ret)
     else:
         success()
 
@@ -1231,6 +1195,10 @@
             # can't remove on solaris
             os.environ[k] = ''
             del os.environ[k]
+    if 'HG' in os.environ:
+        # can't remove on solaris
+        os.environ['HG'] = ''
+        del os.environ['HG']
 
     global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
     TESTDIR = os.environ["TESTDIR"] = os.getcwd()
--- a/tests/test-annotate.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-annotate.t	Fri Oct 19 01:34:50 2012 -0500
@@ -279,10 +279,10 @@
   > EOF
   $ hg ci -Am "adda"
   adding a
-  $ cat > a <<EOF
+  $ sed 's/EOL$//g' > a <<EOF
   > a  a
   > 
-  >  
+  >  EOL
   > b  b
   > EOF
   $ hg ci -m "changea"
--- a/tests/test-archive.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-archive.t	Fri Oct 19 01:34:50 2012 -0500
@@ -24,7 +24,7 @@
   >     echo % $3 and $4 disallowed should both give 403
   >     "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$3" | head -n 1
   >     "$TESTDIR/get-with-headers.py" localhost:$HGPORT "archive/tip.$4" | head -n 1
-  >     "$TESTDIR/killdaemons.py"
+  >     "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   >     cat errors.log
   >     cp .hg/hgrc-base .hg/hgrc
   > }
@@ -93,7 +93,7 @@
       testing: test-archive-2c0277f05ed4/foo   OK
   No errors detected in compressed data of archive.zip.
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
   $ hg archive -t tar test.tar
   $ tar tf test.tar
@@ -224,19 +224,19 @@
   > width = 60
   > EOF
 
-  $ hg archive ../with-progress 2>&1 | "$TESTDIR/filtercr.py"
-  
-  archiving [                                           ] 0/4
-  archiving [                                           ] 0/4
-  archiving [=========>                                 ] 1/4
-  archiving [=========>                                 ] 1/4
-  archiving [====================>                      ] 2/4
-  archiving [====================>                      ] 2/4
-  archiving [===============================>           ] 3/4
-  archiving [===============================>           ] 3/4
-  archiving [==========================================>] 4/4
-  archiving [==========================================>] 4/4
-                                                              \r (esc)
+  $ hg archive ../with-progress
+  \r (no-eol) (esc)
+  archiving [                                           ] 0/4\r (no-eol) (esc)
+  archiving [                                           ] 0/4\r (no-eol) (esc)
+  archiving [=========>                                 ] 1/4\r (no-eol) (esc)
+  archiving [=========>                                 ] 1/4\r (no-eol) (esc)
+  archiving [====================>                      ] 2/4\r (no-eol) (esc)
+  archiving [====================>                      ] 2/4\r (no-eol) (esc)
+  archiving [===============================>           ] 3/4\r (no-eol) (esc)
+  archiving [===============================>           ] 3/4\r (no-eol) (esc)
+  archiving [==========================================>] 4/4\r (no-eol) (esc)
+  archiving [==========================================>] 4/4\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 cleanup after progress extension test:
 
--- a/tests/test-basic.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-basic.t	Fri Oct 19 01:34:50 2012 -0500
@@ -33,7 +33,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg identify -n
   0
- 
+
 
 Poke around at hashes:
 
--- a/tests/test-bookmarks-pushpull.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-bookmarks-pushpull.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,5 +1,16 @@
   $ "$TESTDIR/hghave" serve || exit 80
 
+  $ cat << EOF >> $HGRCPATH
+  > [phases]
+  > publish=False
+  > [extensions]
+  > EOF
+  $ cat > obs.py << EOF
+  > import mercurial.obsolete
+  > mercurial.obsolete._enabled = True
+  > EOF
+  $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
+
 initialize
 
   $ hg init a
@@ -40,6 +51,7 @@
   bookmarks	
   phases	
   namespaces	
+  obsolete	
   $ hg debugpushkey ../a bookmarks
   Y	4e3505fd95835d721066b76e75dbb8cc554d7f77
   X	4e3505fd95835d721066b76e75dbb8cc554d7f77
@@ -99,8 +111,10 @@
   $ echo c1 > f1
   $ hg ci -Am1
   adding f1
+  $ hg book -f @
   $ hg book -f X
   $ hg book
+     @                         1:0d2164f0ce0d
    * X                         1:0d2164f0ce0d
      Y                         0:4e3505fd9583
      Z                         1:0d2164f0ce0d
@@ -112,8 +126,10 @@
   $ echo c2 > f2
   $ hg ci -Am2
   adding f2
+  $ hg book -f @
   $ hg book -f X
   $ hg book
+     @                         1:9b140be10808
    * X                         1:9b140be10808
      Y                         0:4e3505fd9583
      Z                         0:4e3505fd9583
@@ -129,8 +145,11 @@
   added 1 changesets with 1 changes to 1 files (+1 heads)
   divergent bookmark X stored as X@foo
   updating bookmark Z
+  divergent bookmark @ stored as @foo
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg book
+     @                         1:9b140be10808
+     @foo                      2:0d2164f0ce0d
    * X                         1:9b140be10808
      X@foo                     2:0d2164f0ce0d
      Y                         0:4e3505fd9583
@@ -145,6 +164,7 @@
   adding file changes
   added 1 changesets with 1 changes to 1 files (+1 heads)
   $ hg -R ../a book
+     @                         1:0d2164f0ce0d
    * X                         1:0d2164f0ce0d
      Y                         0:4e3505fd9583
      Z                         1:0d2164f0ce0d
@@ -165,6 +185,7 @@
   added 1 changesets with 1 changes to 1 files (+1 heads)
   updating bookmark Y
   $ hg -R ../a book
+     @                         1:0d2164f0ce0d
    * X                         1:0d2164f0ce0d
      Y                         3:f6fc62dde3c0
      Z                         1:0d2164f0ce0d
@@ -194,10 +215,48 @@
   (did you forget to merge? use push -f to force)
   [255]
   $ hg -R ../a book
+     @                         1:0d2164f0ce0d
+   * X                         1:0d2164f0ce0d
+     Y                         3:f6fc62dde3c0
+     Z                         1:0d2164f0ce0d
+
+
+Unrelated marker does not alter the decision
+
+  $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+  $ hg push http://localhost:$HGPORT2/
+  pushing to http://localhost:$HGPORT2/
+  searching for changes
+  abort: push creates new remote head 4efff6d98829!
+  (did you forget to merge? use push -f to force)
+  [255]
+  $ hg -R ../a book
+     @                         1:0d2164f0ce0d
    * X                         1:0d2164f0ce0d
      Y                         3:f6fc62dde3c0
      Z                         1:0d2164f0ce0d
 
+Update to a successor works
+
+  $ hg id --debug -r 3
+  f6fc62dde3c0771e29704af56ba4d8af77abcc2f
+  $ hg id --debug -r 4
+  4efff6d98829d9c824c621afd6e3f01865f5439f tip Y
+  $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f 4efff6d98829d9c824c621afd6e3f01865f5439f
+  $ hg push http://localhost:$HGPORT2/
+  pushing to http://localhost:$HGPORT2/
+  searching for changes
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files (+1 heads)
+  updating bookmark Y
+  $ hg -R ../a book
+     @                         1:0d2164f0ce0d
+   * X                         1:0d2164f0ce0d
+     Y                         4:4efff6d98829
+     Z                         1:0d2164f0ce0d
+
 hgweb
 
   $ cat <<EOF > .hg/hgrc
@@ -210,16 +269,18 @@
   $ cat ../hg.pid >> $DAEMON_PIDS
   $ cd ../a
 
-  $ hg debugpushkey http://localhost:$HGPORT/ namespaces 
+  $ hg debugpushkey http://localhost:$HGPORT/ namespaces
   bookmarks	
   phases	
   namespaces	
+  obsolete	
   $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
-  Y	4efff6d98829d9c824c621afd6e3f01865f5439f
+  @	9b140be1080824d768c5a4691a564088eede71f9
+  foo	0000000000000000000000000000000000000000
   foobar	9b140be1080824d768c5a4691a564088eede71f9
+  Y	4efff6d98829d9c824c621afd6e3f01865f5439f
+  X	9b140be1080824d768c5a4691a564088eede71f9
   Z	0d2164f0ce0d8f1d6f94351eba04b794909be66c
-  foo	0000000000000000000000000000000000000000
-  X	9b140be1080824d768c5a4691a564088eede71f9
   $ hg out -B http://localhost:$HGPORT/
   comparing with http://localhost:$HGPORT/
   searching for changed bookmarks
@@ -241,26 +302,28 @@
   $ hg pull -B Z http://localhost:$HGPORT/
   pulling from http://localhost:$HGPORT/
   no changes found
+  divergent bookmark @ stored as @1
+  adding remote bookmark foo
   adding remote bookmark foobar
+  divergent bookmark X stored as X@1
   adding remote bookmark Z
-  adding remote bookmark foo
-  divergent bookmark X stored as X@1
   importing bookmark Z
   $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
   requesting all changes
   adding changesets
   adding manifests
   adding file changes
-  added 5 changesets with 5 changes to 3 files (+3 heads)
+  added 4 changesets with 4 changes to 3 files (+2 heads)
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg -R cloned-bookmarks bookmarks
+     @                         1:9b140be10808
      X                         1:9b140be10808
-     Y                         4:4efff6d98829
+     Y                         3:4efff6d98829
      Z                         2:0d2164f0ce0d
      foo                       -1:000000000000
      foobar                    1:9b140be10808
- 
+
   $ cd ..
 
 Pushing a bookmark should only push the changes required by that
@@ -270,7 +333,7 @@
   adding changesets
   adding manifests
   adding file changes
-  added 5 changesets with 5 changes to 3 files (+3 heads)
+  added 4 changesets with 4 changes to 3 files (+2 heads)
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd addmarks
@@ -286,7 +349,7 @@
   $ hg book -r tip add-bar
 Note: this push *must* push only a single changeset, as that's the point
 of this test.
-  $ hg push -B add-foo
+  $ hg push -B add-foo --traceback
   pushing to http://localhost:$HGPORT/
   searching for changes
   remote: adding changesets
--- a/tests/test-bookmarks-strip.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-bookmarks-strip.t	Fri Oct 19 01:34:50 2012 -0500
@@ -34,7 +34,7 @@
 
   $ hg book test2
 
-update to -2 (inactives the active bookmark)
+update to -2 (deactivates the active bookmark)
 
   $ hg update -r -2
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-bookmarks.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-bookmarks.t	Fri Oct 19 01:34:50 2012 -0500
@@ -154,6 +154,19 @@
    * Y                         2:db815d6d32e6
      Z                         0:f7b1eb17ad24
 
+bookmarks from a revset
+  $ hg bookmark -r '.^1' REVSET
+  $ hg bookmark -r ':tip' TIP
+  $ hg bookmarks
+     REVSET                    0:f7b1eb17ad24
+   * TIP                       2:db815d6d32e6
+     X2                        1:925d80f479bb
+     Y                         2:db815d6d32e6
+     Z                         0:f7b1eb17ad24
+
+  $ hg bookmark -d REVSET
+  $ hg bookmark -d TIP
+
 rename without new name
 
   $ hg bookmark -m Y
@@ -201,15 +214,63 @@
 
   $ hg bookmark '
   > '
-  abort: bookmark name cannot contain newlines
+  abort: bookmark names cannot consist entirely of whitespace
+  [255]
+
+  $ hg bookmark -m Z '
+  > '
+  abort: bookmark names cannot consist entirely of whitespace
   [255]
 
+bookmark with reserved name
+
+  $ hg bookmark tip
+  abort: the name 'tip' is reserved
+  [255]
+
+  $ hg bookmark .
+  abort: the name '.' is reserved
+  [255]
+
+  $ hg bookmark null
+  abort: the name 'null' is reserved
+  [255]
+
+
 bookmark with existing name
 
   $ hg bookmark Z
   abort: bookmark 'Z' already exists (use -f to force)
   [255]
 
+  $ hg bookmark -m Y Z
+  abort: bookmark 'Z' already exists (use -f to force)
+  [255]
+
+bookmark with name of branch
+
+  $ hg bookmark default
+  abort: a bookmark cannot have the name of an existing branch
+  [255]
+
+  $ hg bookmark -m Y default
+  abort: a bookmark cannot have the name of an existing branch
+  [255]
+
+incompatible options
+
+  $ hg bookmark -m Y -d Z
+  abort: --delete and --rename are incompatible
+  [255]
+
+  $ hg bookmark -r 1 -d Z
+  abort: --rev is incompatible with --delete
+  [255]
+
+  $ hg bookmark -r 1 -m Z Y
+  abort: --rev is incompatible with --rename
+  [255]
+
 force bookmark with existing name
 
   $ hg bookmark -f Z
@@ -234,10 +295,19 @@
   abort: bookmark names cannot consist entirely of whitespace
   [255]
 
+  $ hg bookmark -m Y ' '
+  abort: bookmark names cannot consist entirely of whitespace
+  [255]
+
 invalid bookmark
 
   $ hg bookmark 'foo:bar'
-  abort: bookmark 'foo:bar' contains illegal character
+  abort: ':' cannot be used in a bookmark name
+  [255]
+
+  $ hg bookmark 'foo
+  > bar'
+  abort: '\n' cannot be used in a bookmark name
   [255]
 
 the bookmark extension should be ignored now that it is part of core
--- a/tests/test-branches.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-branches.t	Fri Oct 19 01:34:50 2012 -0500
@@ -45,6 +45,8 @@
   (branches are permanent and global, did you want a bookmark?)
   $ hg commit -d '5 0' -m "Adding c branch"
 
+reserved names
+
   $ hg branch tip
   abort: the name 'tip' is reserved
   [255]
@@ -55,6 +57,17 @@
   abort: the name '.' is reserved
   [255]
 
+invalid characters
+
+  $ hg branch 'foo:bar'
+  abort: ':' cannot be used in a branch name
+  [255]
+
+  $ hg branch 'foo
+  > bar'
+  abort: '\n' cannot be used in a branch name
+  [255]
+
   $ echo 'd' >d
   $ hg add d
   $ hg branch 'a branch name much longer than the default justification used by branches'
--- a/tests/test-check-code-hg.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-check-code-hg.t	Fri Oct 19 01:34:50 2012 -0500
@@ -162,19 +162,6 @@
   tests/autodiff.py:0:
    >         ui.write('data lost for: %s\n' % fn)
    warning: unwrapped ui message
-  tests/test-convert-mtn.t:0:
-   >   > function get_passphrase(keypair_id)
-   don't use 'function', use old style
-  tests/test-import-git.t:0:
-   >   > Mc\${NkU|\`?^000jF3jhEB
-   ^ must be quoted
-  tests/test-import.t:0:
-   >   > diff -Naur proj-orig/foo proj-new/foo
-   don't use 'diff -N'
-   don't use 'diff -N'
-  tests/test-schemes.t:0:
-   >   > z = file:\$PWD/
-   don't use $PWD, use `pwd`
   tests/test-ui-color.py:0:
    > testui.warn('warning\n')
    warning: unwrapped ui message
--- a/tests/test-check-code.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-check-code.t	Fri Oct 19 01:34:50 2012 -0500
@@ -43,6 +43,19 @@
   >             pass
   >     finally:
   >         pass
+  > 
+  > # yield inside a try/finally block is not allowed in Python 2.4
+  >     try:
+  >         pass
+  >         yield 1
+  >     finally:
+  >         pass
+  >     try:
+  >         yield
+  >         pass
+  >     finally:
+  >         pass
+  > 
   > EOF
   $ cat > classstyle.py <<EOF
   > class newstyle_class(object):
@@ -83,7 +96,13 @@
    any/all/format not available in Python 2.4
   ./non-py24.py:11:
    >     try:
-   no try/except/finally in Py2.4
+   no try/except/finally in Python 2.4
+  ./non-py24.py:28:
+   >     try:
+   no yield inside try/finally in Python 2.4
+  ./non-py24.py:33:
+   >     try:
+   no yield inside try/finally in Python 2.4
   ./classstyle.py:4:
    > class oldstyle_class:
    old-style class, use class foo(object)
--- a/tests/test-clone.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-clone.t	Fri Oct 19 01:34:50 2012 -0500
@@ -468,6 +468,16 @@
   $ rm -r ua
 
 
+Test clone with special '@' bookmark:
+  $ cd a
+  $ hg bookmark -r a7949464abda @  # branch point of stable from default
+  $ hg clone . ../i
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg id -i ../i
+  a7949464abda
+
+
 Testing failures:
 
   $ mkdir fail
@@ -533,7 +543,7 @@
 
 destination directory not empty
 
-  $ mkdir a 
+  $ mkdir a
   $ echo stuff > a/a
   $ hg clone q a
   abort: destination 'a' is not empty
@@ -558,7 +568,7 @@
   $ test -d d/.hg
   [1]
 
-reenable perm to allow deletion
+re-enable perm to allow deletion
 
   $ chmod +rx c/.hg/store/data
 
--- a/tests/test-command-template.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-command-template.t	Fri Oct 19 01:34:50 2012 -0500
@@ -592,7 +592,8 @@
 
   $ for key in author branch branches date desc file_adds file_dels file_mods \
   >         file_copies file_copies_switch files \
-  >         manifest node parents rev tags diffstat extras; do
+  >         manifest node parents rev tags diffstat extras \
+  >         p1rev p2rev p1node p2node; do
   >     for mode in '' --verbose --debug; do
   >         hg log $mode --template "$key$mode: {$key}\n"
   >     done
@@ -1095,7 +1096,114 @@
   extras--debug: branch=default
   extras--debug: branch=default
   extras--debug: branch=default
-
+  p1rev: 7
+  p1rev: -1
+  p1rev: 5
+  p1rev: 3
+  p1rev: 3
+  p1rev: 2
+  p1rev: 1
+  p1rev: 0
+  p1rev: -1
+  p1rev--verbose: 7
+  p1rev--verbose: -1
+  p1rev--verbose: 5
+  p1rev--verbose: 3
+  p1rev--verbose: 3
+  p1rev--verbose: 2
+  p1rev--verbose: 1
+  p1rev--verbose: 0
+  p1rev--verbose: -1
+  p1rev--debug: 7
+  p1rev--debug: -1
+  p1rev--debug: 5
+  p1rev--debug: 3
+  p1rev--debug: 3
+  p1rev--debug: 2
+  p1rev--debug: 1
+  p1rev--debug: 0
+  p1rev--debug: -1
+  p2rev: -1
+  p2rev: -1
+  p2rev: 4
+  p2rev: -1
+  p2rev: -1
+  p2rev: -1
+  p2rev: -1
+  p2rev: -1
+  p2rev: -1
+  p2rev--verbose: -1
+  p2rev--verbose: -1
+  p2rev--verbose: 4
+  p2rev--verbose: -1
+  p2rev--verbose: -1
+  p2rev--verbose: -1
+  p2rev--verbose: -1
+  p2rev--verbose: -1
+  p2rev--verbose: -1
+  p2rev--debug: -1
+  p2rev--debug: -1
+  p2rev--debug: 4
+  p2rev--debug: -1
+  p2rev--debug: -1
+  p2rev--debug: -1
+  p2rev--debug: -1
+  p2rev--debug: -1
+  p2rev--debug: -1
+  p1node: 29114dbae42b9f078cf2714dbe3a86bba8ec7453
+  p1node: 0000000000000000000000000000000000000000
+  p1node: 13207e5a10d9fd28ec424934298e176197f2c67f
+  p1node: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+  p1node: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+  p1node: 97054abb4ab824450e9164180baf491ae0078465
+  p1node: b608e9d1a3f0273ccf70fb85fd6866b3482bf965
+  p1node: 1e4e1b8f71e05681d422154f5421e385fec3454f
+  p1node: 0000000000000000000000000000000000000000
+  p1node--verbose: 29114dbae42b9f078cf2714dbe3a86bba8ec7453
+  p1node--verbose: 0000000000000000000000000000000000000000
+  p1node--verbose: 13207e5a10d9fd28ec424934298e176197f2c67f
+  p1node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+  p1node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+  p1node--verbose: 97054abb4ab824450e9164180baf491ae0078465
+  p1node--verbose: b608e9d1a3f0273ccf70fb85fd6866b3482bf965
+  p1node--verbose: 1e4e1b8f71e05681d422154f5421e385fec3454f
+  p1node--verbose: 0000000000000000000000000000000000000000
+  p1node--debug: 29114dbae42b9f078cf2714dbe3a86bba8ec7453
+  p1node--debug: 0000000000000000000000000000000000000000
+  p1node--debug: 13207e5a10d9fd28ec424934298e176197f2c67f
+  p1node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+  p1node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47
+  p1node--debug: 97054abb4ab824450e9164180baf491ae0078465
+  p1node--debug: b608e9d1a3f0273ccf70fb85fd6866b3482bf965
+  p1node--debug: 1e4e1b8f71e05681d422154f5421e385fec3454f
+  p1node--debug: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node: bbe44766e73d5f11ed2177f1838de10c53ef3e74
+  p2node: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: bbe44766e73d5f11ed2177f1838de10c53ef3e74
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--verbose: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: bbe44766e73d5f11ed2177f1838de10c53ef3e74
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
+  p2node--debug: 0000000000000000000000000000000000000000
 
 Filters work:
 
@@ -1245,7 +1353,7 @@
   $ hg add a
   $ hg commit -m future -d "`cat a`"
 
-  $ hg log -l1 --template '{date|age}\n' 
+  $ hg log -l1 --template '{date|age}\n'
   7 years from now
 
 Error on syntax:
@@ -1255,6 +1363,30 @@
   abort: t:3: unmatched quotes
   [255]
 
+Behind the scenes, this will throw TypeError
+
+  $ hg log -l 3 --template '{date|obfuscate}\n'
+  abort: template filter 'obfuscate' is not compatible with keyword 'date'
+  [255]
+
+Behind the scenes, this will throw a ValueError
+
+  $ hg log -l 3 --template 'line: {desc|shortdate}\n'
+  abort: template filter 'shortdate' is not compatible with keyword 'desc'
+  [255]
+
+Behind the scenes, this will throw AttributeError
+
+  $ hg log -l 3 --template 'line: {date|escape}\n'
+  abort: template filter 'escape' is not compatible with keyword 'date'
+  [255]
+
+Behind the scenes, this will throw ValueError
+
+  $ hg tip --template '{author|email|date}\n'
+  abort: template filter 'datefilter' is not compatible with keyword 'author'
+  [255]
+
   $ cd ..
 
 
@@ -1388,3 +1520,8 @@
   10,test
   branch: test
 
+Test new-style inline templating:
+
+  $ hg log -R latesttag -r tip --template 'modified files: {file_mods % " {file}\n"}\n'
+  modified files:  .hgtags
+  
--- a/tests/test-commit-amend.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-commit-amend.t	Fri Oct 19 01:34:50 2012 -0500
@@ -32,12 +32,12 @@
 
   $ echo a >> a
   $ hg ci --amend -m 'amend base1'
-  pretxncommit 9cd25b479c51be2f4ed2c38e7abdf7ce67d8e0dc
-  9cd25b479c51 tip
+  pretxncommit 43f1ba15f28a50abf0aae529cf8a16bfced7b149
+  43f1ba15f28a tip
   saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-amend-backup.hg (glob)
   $ echo 'pretxncommit.foo = ' >> $HGRCPATH
   $ hg diff -c .
-  diff -r ad120869acf0 -r 9cd25b479c51 a
+  diff -r ad120869acf0 -r 43f1ba15f28a a
   --- a/a	Thu Jan 01 00:00:00 1970 +0000
   +++ b/a	Thu Jan 01 00:00:00 1970 +0000
   @@ -1,1 +1,3 @@
@@ -45,7 +45,7 @@
   +a
   +a
   $ hg log
-  changeset:   1:9cd25b479c51
+  changeset:   1:43f1ba15f28a
   tag:         tip
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
@@ -62,36 +62,36 @@
   $ echo b > b
   $ hg ci --amend -Am 'amend base1 new file'
   adding b
-  saved backup bundle to $TESTTMP/.hg/strip-backup/9cd25b479c51-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-amend-backup.hg (glob)
 
 Remove file that was added in amended commit:
 
   $ hg rm b
   $ hg ci --amend -m 'amend base1 remove new file'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/e2bb3ecffd2f-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-amend-backup.hg (glob)
 
   $ hg cat b
-  b: no such file in rev 664a9b2d60cd
+  b: no such file in rev 74609c7f506e
   [1]
 
 No changes, just a different message:
 
   $ hg ci -v --amend -m 'no changes, new message'
-  amending changeset 664a9b2d60cd
-  copying changeset 664a9b2d60cd to ad120869acf0
+  amending changeset 74609c7f506e
+  copying changeset 74609c7f506e to ad120869acf0
   a
-  stripping amended changeset 664a9b2d60cd
+  stripping amended changeset 74609c7f506e
   1 changesets found
-  saved backup bundle to $TESTTMP/.hg/strip-backup/664a9b2d60cd-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-amend-backup.hg (glob)
   1 changesets found
   adding branch
   adding changesets
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  committed changeset 1:ea6e356ff2ad
+  committed changeset 1:1cd866679df8
   $ hg diff -c .
-  diff -r ad120869acf0 -r ea6e356ff2ad a
+  diff -r ad120869acf0 -r 1cd866679df8 a
   --- a/a	Thu Jan 01 00:00:00 1970 +0000
   +++ b/a	Thu Jan 01 00:00:00 1970 +0000
   @@ -1,1 +1,3 @@
@@ -99,7 +99,7 @@
   +a
   +a
   $ hg log
-  changeset:   1:ea6e356ff2ad
+  changeset:   1:1cd866679df8
   tag:         tip
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
@@ -119,12 +119,12 @@
 Test -u/-d:
 
   $ hg ci --amend -u foo -d '1 0'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/ea6e356ff2ad-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-amend-backup.hg (glob)
   $ echo a >> a
   $ hg ci --amend -u foo -d '1 0'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/377b91ce8b56-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-amend-backup.hg (glob)
   $ hg log -r .
-  changeset:   1:2c94e4a5756f
+  changeset:   1:5f357c7560ab
   tag:         tip
   user:        foo
   date:        Thu Jan 01 00:00:01 1970 +0000
@@ -139,8 +139,8 @@
   > echo "another precious commit message" > "$1"
   > __EOF__
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v
-  amending changeset 2c94e4a5756f
-  copying changeset 2c94e4a5756f to ad120869acf0
+  amending changeset 5f357c7560ab
+  copying changeset 5f357c7560ab to ad120869acf0
   no changes, new message
   
   
@@ -151,22 +151,24 @@
   HG: branch 'default'
   HG: changed a
   a
-  stripping amended changeset 2c94e4a5756f
+  stripping amended changeset 5f357c7560ab
   1 changesets found
-  saved backup bundle to $TESTTMP/.hg/strip-backup/2c94e4a5756f-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-amend-backup.hg (glob)
   1 changesets found
   adding branch
   adding changesets
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  committed changeset 1:ffb49186f961
+  committed changeset 1:7ab3bf440b54
 
 Same, but with changes in working dir (different code path):
 
   $ echo a >> a
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v
-  amending changeset ffb49186f961
+  amending changeset 7ab3bf440b54
+  a
+  copying changeset a0ea9b1a4c8c to ad120869acf0
   another precious commit message
   
   
@@ -177,23 +179,21 @@
   HG: branch 'default'
   HG: changed a
   a
-  copying changeset 27f3aacd3011 to ad120869acf0
-  a
-  stripping intermediate changeset 27f3aacd3011
-  stripping amended changeset ffb49186f961
+  stripping intermediate changeset a0ea9b1a4c8c
+  stripping amended changeset 7ab3bf440b54
   2 changesets found
-  saved backup bundle to $TESTTMP/.hg/strip-backup/ffb49186f961-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-amend-backup.hg (glob)
   1 changesets found
   adding branch
   adding changesets
   adding manifests
   adding file changes
   added 1 changesets with 1 changes to 1 files
-  committed changeset 1:fb6cca43446f
+  committed changeset 1:ea22a388757c
 
   $ rm editor.sh
   $ hg log -r .
-  changeset:   1:fb6cca43446f
+  changeset:   1:ea22a388757c
   tag:         tip
   user:        foo
   date:        Thu Jan 01 00:00:01 1970 +0000
@@ -205,16 +205,16 @@
   $ hg book book1
   $ hg book book2
   $ hg ci --amend -m 'move bookmarks'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/fb6cca43446f-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-amend-backup.hg (glob)
   $ hg book
-     book1                     1:0cf1c7a51bcf
-   * book2                     1:0cf1c7a51bcf
+     book1                     1:6cec5aa930e2
+   * book2                     1:6cec5aa930e2
   $ echo a >> a
   $ hg ci --amend -m 'move bookmarks'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/0cf1c7a51bcf-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-amend-backup.hg (glob)
   $ hg book
-     book1                     1:7344472bd951
-   * book2                     1:7344472bd951
+     book1                     1:48bb6e53a15f
+   * book2                     1:48bb6e53a15f
 
   $ echo '[defaults]' >> $HGRCPATH
   $ echo "commit=-d '0 0'" >> $HGRCPATH
@@ -230,9 +230,9 @@
   marked working directory as branch default
   (branches are permanent and global, did you want a bookmark?)
   $ hg ci --amend -m 'back to default'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/1661ca36a2db-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-amend-backup.hg (glob)
   $ hg branches
-  default                        2:f24ee5961967
+  default                        2:ce12b0b57d46
 
 Close branch:
 
@@ -255,9 +255,9 @@
   reopening closed branch head 4
   $ echo b >> b
   $ hg ci --amend --close-branch
-  saved backup bundle to $TESTTMP/.hg/strip-backup/5e302dcc12b8-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-amend-backup.hg (glob)
   $ hg branches
-  default                        2:f24ee5961967
+  default                        2:ce12b0b57d46
 
 Refuse to amend merges:
 
@@ -279,7 +279,7 @@
   $ hg ci -m 'b -> c'
   $ hg mv c d
   $ hg ci --amend -m 'b -> d'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/9c207120aa98-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-amend-backup.hg (glob)
   $ hg st --rev '.^' --copies d
   A d
     b
@@ -287,7 +287,7 @@
   $ hg ci -m 'e = d'
   $ hg cp e f
   $ hg ci --amend -m 'f = d'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/fda2b3b27b22-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-amend-backup.hg (glob)
   $ hg st --rev '.^' --copies f
   A f
     d
@@ -298,7 +298,7 @@
   $ hg cp a f
   $ mv f.orig f
   $ hg ci --amend -m replacef
-  saved backup bundle to $TESTTMP/.hg/strip-backup/20a7413547f9-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-amend-backup.hg (glob)
   $ hg st --change . --copies
   $ hg log -r . --template "{file_copies}\n"
   
@@ -310,7 +310,7 @@
   adding g
   $ hg mv g h
   $ hg ci --amend
-  saved backup bundle to $TESTTMP/.hg/strip-backup/5daa77a5d616-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-amend-backup.hg (glob)
   $ hg st --change . --copies h
   A h
   $ hg log -r . --template "{file_copies}\n"
@@ -330,11 +330,11 @@
   $ echo a >> a
   $ hg ci -ma
   $ hg ci --amend -m "a'"
-  saved backup bundle to $TESTTMP/.hg/strip-backup/167f8e3031df-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-amend-backup.hg (glob)
   $ hg log -r . --template "{branch}\n"
   a
   $ hg ci --amend -m "a''"
-  saved backup bundle to $TESTTMP/.hg/strip-backup/ceac1a44c806-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-amend-backup.hg (glob)
   $ hg log -r . --template "{branch}\n"
   a
 
@@ -351,8 +351,9 @@
   $ hg graft 12
   grafting revision 12
   $ hg ci --amend -m 'graft amend'
-  saved backup bundle to $TESTTMP/.hg/strip-backup/18a5124daf7a-amend-backup.hg (glob)
+  saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-amend-backup.hg (glob)
   $ hg log -r . --debug | grep extra
+  extra:       amend_source=bd010aea3f39f3fb2a2f884b9ccb0471cd77398e
   extra:       branch=a
   extra:       source=2647734878ef0236dda712fae9c1651cf694ea8a
 
@@ -370,3 +371,86 @@
   11: draft
   13: secret
 
+Test amend with obsolete
+---------------------------
+
+Enable obsolete
+
+  $ cat > ${TESTTMP}/obs.py << EOF
+  > import mercurial.obsolete
+  > mercurial.obsolete._enabled = True
+  > EOF
+  $ echo '[extensions]' >> $HGRCPATH
+  $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
+
+
+Amend with no files changes
+
+  $ hg id -n
+  13
+  $ hg ci --amend -m 'babar'
+  $ hg id -n
+  14
+  $ hg log -Gl 3 --style=compact
+  @  14[tip]:11   b650e6ee8614   1970-01-01 00:00 +0000   test
+  |    babar
+  |
+  | o  12:0   2647734878ef   1970-01-01 00:00 +0000   test
+  | |    fork
+  | |
+  o |  11   3334b7925910   1970-01-01 00:00 +0000   test
+  | |    a''
+  | |
+  $ hg log -Gl 4 --hidden --style=compact
+  @  14[tip]:11   b650e6ee8614   1970-01-01 00:00 +0000   test
+  |    babar
+  |
+  | x  13:11   68ff8ff97044   1970-01-01 00:00 +0000   test
+  |/     amend for phase
+  |
+  | o  12:0   2647734878ef   1970-01-01 00:00 +0000   test
+  | |    fork
+  | |
+  o |  11   3334b7925910   1970-01-01 00:00 +0000   test
+  | |    a''
+  | |
+
+Amend with files changes
+
+(note: the extra commit over 15 is a temporary junk I would be happy to get
+ride of)
+
+  $ echo 'babar' >> a
+  $ hg commit --amend
+  $ hg log -Gl 6 --hidden --style=compact
+  @  16[tip]:11   9f9e9bccf56c   1970-01-01 00:00 +0000   test
+  |    babar
+  |
+  | x  15   90fef497c56f   1970-01-01 00:00 +0000   test
+  | |    temporary amend commit for b650e6ee8614
+  | |
+  | x  14:11   b650e6ee8614   1970-01-01 00:00 +0000   test
+  |/     babar
+  |
+  | x  13:11   68ff8ff97044   1970-01-01 00:00 +0000   test
+  |/     amend for phase
+  |
+  | o  12:0   2647734878ef   1970-01-01 00:00 +0000   test
+  | |    fork
+  | |
+  o |  11   3334b7925910   1970-01-01 00:00 +0000   test
+  | |    a''
+  | |
+
+
+Test that amend does not make it easy to create obsoletescence cycle
+---------------------------------------------------------------------
+
+
+  $ hg id -r 14
+  b650e6ee8614 (a)
+  $ hg revert -ar 14
+  reverting a
+  $ hg commit --amend
+  $ hg id
+  b99e5df575f7 (a) tip
--- a/tests/test-convert-cvsnt-mergepoints.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-cvsnt-mergepoints.t	Fri Oct 19 01:34:50 2012 -0500
@@ -43,7 +43,7 @@
   $ cvscall -Q add foo
   $ cd foo
   $ echo foo > foo.txt
-  $ cvscall -Q add foo.txt 
+  $ cvscall -Q add foo.txt
   $ cvsci -m "add foo.txt" foo.txt
   $ cd ../..
   $ rm -rf cvsworktmp
--- a/tests/test-convert-darcs.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-darcs.t	Fri Oct 19 01:34:50 2012 -0500
@@ -5,25 +5,6 @@
   $ echo 'graphlog =' >> $HGRCPATH
   $ DARCS_EMAIL='test@example.org'; export DARCS_EMAIL
 
-skip if we can't import elementtree
-
-  $ mkdir dummy
-  $ mkdir dummy/_darcs
-  $ if hg convert dummy 2>&1 | grep ElementTree > /dev/null; then
-  >     echo 'skipped: missing feature: elementtree module'
-  >     exit 80
-  > fi
-
-#if no-outer-repo
-
-try converting darcs1 repository
-
-  $ hg clone -q "$TESTDIR/bundles/darcs1.hg" darcs
-  $ hg convert -s darcs darcs/darcs1 2>&1 | grep darcs-1.0
-  darcs-1.0 repository format is unsupported, please upgrade
-
-#endif
-
 initialize darcs repo
 
   $ mkdir darcs-repo
@@ -44,6 +25,13 @@
   Finished recording patch 'p1.1'
   $ cd ..
 
+skip if we can't import elementtree
+
+  $ if hg convert darcs-repo darcs-dummy 2>&1 | grep ElementTree > /dev/null; then
+  >     echo 'skipped: missing feature: elementtree module'
+  >     exit 80
+  > fi
+
 update source
 
   $ cd darcs-repo
@@ -108,3 +96,13 @@
   1e88685f5ddec574a34c70af492f95b6debc8741 644   b
   37406831adc447ec2385014019599dfec953c806 644   dir2/d
   b783a337463792a5c7d548ad85a7d3253c16ba8c 644   ff
+
+#if no-outer-repo
+
+try converting darcs1 repository
+
+  $ hg clone -q "$TESTDIR/bundles/darcs1.hg" darcs
+  $ hg convert -s darcs darcs/darcs1 2>&1 | grep darcs-1.0
+  darcs-1.0 repository format is unsupported, please upgrade
+
+#endif
--- a/tests/test-convert-filemap.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-filemap.t	Fri Oct 19 01:34:50 2012 -0500
@@ -229,11 +229,11 @@
   $ cat > renames.fmap <<EOF
   > include dir
   > exclude dir/file2
-  > rename dir dir2
+  > rename dir dir2//../dir2/
   > include foo
   > include copied
-  > rename foo foo2
-  > rename copied copied2
+  > rename foo foo2/
+  > rename copied ./copied2
   > exclude dir/subdir
   > include dir/subdir/file3
   > EOF
@@ -284,10 +284,8 @@
   > include
   > EOF
   $ hg -q convert --filemap errors.fmap source errors.repo
-  errors.fmap:1: superfluous / in exclude 'dir/'
   errors.fmap:3: superfluous / in include '/dir'
   errors.fmap:3: superfluous / in rename '/dir'
-  errors.fmap:3: superfluous / in exclude 'dir//dir'
   errors.fmap:4: unknown directive 'out of sync'
   errors.fmap:5: path to exclude is missing
   abort: errors in filemap
--- a/tests/test-convert-mtn-rename-directory.out	Mon Oct 08 00:19:30 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,23 +0,0 @@
-% tedious monotone keys configuration
-% create monotone repository
-mtn: adding dir1 to workspace manifest
-mtn: adding dir1/subdir1 to workspace manifest
-mtn: adding dir1/subdir1/file1 to workspace manifest
-mtn: beginning commit on branch 'com.selenic.test'
-mtn: committed revision 5ed13ff5582d8d1e319f079b694a37d2b45edfc8
-% rename directory
-mtn: skipping dir1, already accounted for in workspace
-mtn: renaming dir1/subdir1 to dir1/subdir2 in workspace manifest
-mtn: beginning commit on branch 'com.selenic.test'
-mtn: committed revision 985204142a822b22ee86b509d61f3c5ab6857d2b
-% convert
-assuming destination repo.mtn-hg
-initializing destination repo.mtn-hg repository
-scanning source...
-sorting...
-converting...
-1 initialize
-0 rename
-1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-% manifest
-dir1/subdir2/file1
--- a/tests/test-convert-mtn.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-mtn.t	Fri Oct 19 01:34:50 2012 -0500
@@ -2,9 +2,18 @@
   $ "$TESTDIR/hghave" mtn || exit 80
 
 Monotone directory is called .monotone on *nix and monotone
-on Windows. Having a variable here ease test patching.
+on Windows.
+
+#if windows
+
+  $ mtndir=monotone
+
+#else
 
   $ mtndir=.monotone
+
+#endif
+
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "convert=" >> $HGRCPATH
   $ echo 'graphlog =' >> $HGRCPATH
@@ -210,7 +219,9 @@
 
 test large file support (> 32kB)
 
-  $ python -c 'for x in range(10000): print x' > large-file
+  >>> fp = file('large-file', 'wb')
+  >>> for x in xrange(10000): fp.write('%d\n' % x)
+  >>> fp.close()
   $ $TESTDIR/md5sum.py large-file
   5d6de8a95c3b6bf9e0ffb808ba5299c1  large-file
   $ mtn add large-file
--- a/tests/test-convert-svn-branches.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-branches.t	Fri Oct 19 01:34:50 2012 -0500
@@ -3,7 +3,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > EOF
 
@@ -14,7 +14,7 @@
 
   $ cat > branchmap <<EOF
   > old3 newbranch
-  >     
+  > 
   > 
   > EOF
   $ hg convert --branchmap=branchmap --datesort -r 10 svn-repo A-hg
@@ -95,9 +95,5 @@
 
 Test hg failing to call itself
 
-  $ HG=foobar hg convert svn-repo B-hg
-  * (glob)
-  initializing destination B-hg repository
+  $ HG=foobar hg convert svn-repo B-hg 2>&1 | grep abort
   abort: Mercurial failed to run itself, check hg executable is in PATH
-  [255]
-
--- a/tests/test-convert-svn-encoding.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-encoding.t	Fri Oct 19 01:34:50 2012 -0500
@@ -3,7 +3,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > EOF
 
--- a/tests/test-convert-svn-move.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-move.t	Fri Oct 19 01:34:50 2012 -0500
@@ -3,7 +3,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > EOF
 
@@ -155,7 +155,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > progress = 
+  > progress =
   > [progress]
   > assume-tty = 1
   > delay = 0
@@ -165,68 +165,75 @@
   > width = 60
   > EOF
 
-  $ hg convert svn-repo hg-progress 2>&1 | "$TESTDIR/filtercr.py"
-  
-  scanning [ <=>                                          ] 1
-  scanning [  <=>                                         ] 2
-  scanning [   <=>                                        ] 3
-  scanning [    <=>                                       ] 4
-  scanning [     <=>                                      ] 5
-  scanning [      <=>                                     ] 6
-  scanning [       <=>                                    ] 7
-                                                              
-  converting [                                          ] 0/7
-  getting files [=====>                                 ] 1/6
-  getting files [============>                          ] 2/6
-  getting files [==================>                    ] 3/6
-  getting files [=========================>             ] 4/6
-  getting files [===============================>       ] 5/6
-  getting files [======================================>] 6/6
-                                                              
-  converting [=====>                                    ] 1/7
-  scanning paths [                                      ] 0/1
-  getting files [======================================>] 1/1
-                                                              
-  converting [===========>                              ] 2/7
-  scanning paths [                                      ] 0/2
-  scanning paths [==================>                   ] 1/2
-  getting files [========>                              ] 1/4
-  getting files [==================>                    ] 2/4
-  getting files [============================>          ] 3/4
-  getting files [======================================>] 4/4
-                                                              
-  converting [=================>                        ] 3/7
-  scanning paths [                                      ] 0/1
-  getting files [======================================>] 1/1
-                                                              
-  converting [=======================>                  ] 4/7
-  scanning paths [                                      ] 0/1
-  getting files [======================================>] 1/1
-                                                              
-  converting [=============================>            ] 5/7
-  scanning paths [                                      ] 0/3
-  scanning paths [===========>                          ] 1/3
-  scanning paths [========================>             ] 2/3
-  getting files [===>                                   ] 1/8
-  getting files [========>                              ] 2/8
-  getting files [=============>                         ] 3/8
-  getting files [==================>                    ] 4/8
-  getting files [=======================>               ] 5/8
-  getting files [============================>          ] 6/8
-  getting files [=================================>     ] 7/8
-  getting files [======================================>] 8/8
-                                                              
-  converting [===================================>      ] 6/7
-  scanning paths [                                      ] 0/1
-  getting files [===>                                   ] 1/8
-  getting files [========>                              ] 2/8
-  getting files [=============>                         ] 3/8
-  getting files [==================>                    ] 4/8
-  getting files [=======================>               ] 5/8
-  getting files [============================>          ] 6/8
-  getting files [=================================>     ] 7/8
-  getting files [======================================>] 8/8
-                                                              
+  $ hg convert svn-repo hg-progress
+  \r (no-eol) (esc)
+  scanning [ <=>                                          ] 1\r (no-eol) (esc)
+  scanning [  <=>                                         ] 2\r (no-eol) (esc)
+  scanning [   <=>                                        ] 3\r (no-eol) (esc)
+  scanning [    <=>                                       ] 4\r (no-eol) (esc)
+  scanning [     <=>                                      ] 5\r (no-eol) (esc)
+  scanning [      <=>                                     ] 6\r (no-eol) (esc)
+  scanning [       <=>                                    ] 7\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [                                          ] 0/7\r (no-eol) (esc)
+  getting files [=====>                                 ] 1/6\r (no-eol) (esc)
+  getting files [============>                          ] 2/6\r (no-eol) (esc)
+  getting files [==================>                    ] 3/6\r (no-eol) (esc)
+  getting files [=========================>             ] 4/6\r (no-eol) (esc)
+  getting files [===============================>       ] 5/6\r (no-eol) (esc)
+  getting files [======================================>] 6/6\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [=====>                                    ] 1/7\r (no-eol) (esc)
+  scanning paths [                                      ] 0/1\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [===========>                              ] 2/7\r (no-eol) (esc)
+  scanning paths [                                      ] 0/2\r (no-eol) (esc)
+  scanning paths [==================>                   ] 1/2\r (no-eol) (esc)
+  getting files [========>                              ] 1/4\r (no-eol) (esc)
+  getting files [==================>                    ] 2/4\r (no-eol) (esc)
+  getting files [============================>          ] 3/4\r (no-eol) (esc)
+  getting files [======================================>] 4/4\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [=================>                        ] 3/7\r (no-eol) (esc)
+  scanning paths [                                      ] 0/1\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [=======================>                  ] 4/7\r (no-eol) (esc)
+  scanning paths [                                      ] 0/1\r (no-eol) (esc)
+  getting files [======================================>] 1/1\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [=============================>            ] 5/7\r (no-eol) (esc)
+  scanning paths [                                      ] 0/3\r (no-eol) (esc)
+  scanning paths [===========>                          ] 1/3\r (no-eol) (esc)
+  scanning paths [========================>             ] 2/3\r (no-eol) (esc)
+  getting files [===>                                   ] 1/8\r (no-eol) (esc)
+  getting files [========>                              ] 2/8\r (no-eol) (esc)
+  getting files [=============>                         ] 3/8\r (no-eol) (esc)
+  getting files [==================>                    ] 4/8\r (no-eol) (esc)
+  getting files [=======================>               ] 5/8\r (no-eol) (esc)
+  getting files [============================>          ] 6/8\r (no-eol) (esc)
+  getting files [=================================>     ] 7/8\r (no-eol) (esc)
+  getting files [======================================>] 8/8\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  converting [===================================>      ] 6/7\r (no-eol) (esc)
+  scanning paths [                                      ] 0/1\r (no-eol) (esc)
+  getting files [===>                                   ] 1/8\r (no-eol) (esc)
+  getting files [========>                              ] 2/8\r (no-eol) (esc)
+  getting files [=============>                         ] 3/8\r (no-eol) (esc)
+  getting files [==================>                    ] 4/8\r (no-eol) (esc)
+  getting files [=======================>               ] 5/8\r (no-eol) (esc)
+  getting files [============================>          ] 6/8\r (no-eol) (esc)
+  getting files [=================================>     ] 7/8\r (no-eol) (esc)
+  getting files [======================================>] 8/8\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
   initializing destination hg-progress repository
   scanning source...
   sorting...
@@ -238,6 +245,5 @@
   2 adddb
   1 branch
   0 clobberdir
-  
 
   $ cd ..
--- a/tests/test-convert-svn-sink.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-sink.t	Fri Oct 19 01:34:50 2012 -0500
@@ -16,7 +16,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > EOF
 
--- a/tests/test-convert-svn-source.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-source.t	Fri Oct 19 01:34:50 2012 -0500
@@ -3,7 +3,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > [convert]
   > svn.trunk = mytrunk
--- a/tests/test-convert-svn-startrev.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-startrev.t	Fri Oct 19 01:34:50 2012 -0500
@@ -3,7 +3,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > EOF
   $ convert()
--- a/tests/test-convert-svn-tags.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert-svn-tags.t	Fri Oct 19 01:34:50 2012 -0500
@@ -3,7 +3,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
-  > convert = 
+  > convert =
   > graphlog =
   > EOF
 
--- a/tests/test-convert.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-convert.t	Fri Oct 19 01:34:50 2012 -0500
@@ -60,8 +60,8 @@
   
       The authormap is a simple text file that maps each source commit author to
       a destination commit author. It is handy for source SCMs that use unix
-      logins to identify authors (eg: CVS). One line per author mapping and the
-      line format is:
+      logins to identify authors (e.g.: CVS). One line per author mapping and
+      the line format is:
   
         source author = destination author
   
@@ -261,7 +261,7 @@
       --datesort         try to sort changesets by date
       --sourcesort       preserve source changesets order
   
-  use "hg -v help convert" to show more info
+  use "hg -v help convert" to show the global options
   $ hg init a
   $ cd a
   $ echo a > a
@@ -399,7 +399,7 @@
 
 test revset converted() lookup
 
-  $ hg --config convert.hg.saverev=True convert a c  
+  $ hg --config convert.hg.saverev=True convert a c
   initializing destination c repository
   scanning source...
   sorting...
--- a/tests/test-copy.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-copy.t	Fri Oct 19 01:34:50 2012 -0500
@@ -139,7 +139,7 @@
   foo3 does not exist!
   $ hg up -qC .
 
-copy --after to a nonexistant target filename
+copy --after to a nonexistent target filename
   $ hg cp -A foo dummy
   foo: not recording copy - dummy does not exist
 
--- a/tests/test-debugbuilddag.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-debugbuilddag.t	Fri Oct 19 01:34:50 2012 -0500
@@ -8,44 +8,43 @@
   > --config extensions.progress= --config progress.assume-tty=1 \
   > --config progress.delay=0 --config progress.refresh=0 \
   > --config progress.format=topic,bar,number \
-  > --config progress.width=60 2>&1 | \
-  > python "$TESTDIR/filtercr.py"
-  
-  building [                                          ]  0/12
-  building [                                          ]  0/12
-  building [                                          ]  0/12
-  building [                                          ]  0/12
-  building [==>                                       ]  1/12
-  building [==>                                       ]  1/12
-  building [==>                                       ]  1/12
-  building [==>                                       ]  1/12
-  building [======>                                   ]  2/12
-  building [======>                                   ]  2/12
-  building [=========>                                ]  3/12
-  building [=========>                                ]  3/12
-  building [=============>                            ]  4/12
-  building [=============>                            ]  4/12
-  building [=============>                            ]  4/12
-  building [=============>                            ]  4/12
-  building [=============>                            ]  4/12
-  building [=============>                            ]  4/12
-  building [================>                         ]  5/12
-  building [================>                         ]  5/12
-  building [====================>                     ]  6/12
-  building [====================>                     ]  6/12
-  building [=======================>                  ]  7/12
-  building [=======================>                  ]  7/12
-  building [===========================>              ]  8/12
-  building [===========================>              ]  8/12
-  building [===========================>              ]  8/12
-  building [===========================>              ]  8/12
-  building [==============================>           ]  9/12
-  building [==============================>           ]  9/12
-  building [==================================>       ] 10/12
-  building [==================================>       ] 10/12
-  building [=====================================>    ] 11/12
-  building [=====================================>    ] 11/12
-                                                              \r (esc)
+  > --config progress.width=60
+  \r (no-eol) (esc)
+  building [                                          ]  0/12\r (no-eol) (esc)
+  building [                                          ]  0/12\r (no-eol) (esc)
+  building [                                          ]  0/12\r (no-eol) (esc)
+  building [                                          ]  0/12\r (no-eol) (esc)
+  building [==>                                       ]  1/12\r (no-eol) (esc)
+  building [==>                                       ]  1/12\r (no-eol) (esc)
+  building [==>                                       ]  1/12\r (no-eol) (esc)
+  building [==>                                       ]  1/12\r (no-eol) (esc)
+  building [======>                                   ]  2/12\r (no-eol) (esc)
+  building [======>                                   ]  2/12\r (no-eol) (esc)
+  building [=========>                                ]  3/12\r (no-eol) (esc)
+  building [=========>                                ]  3/12\r (no-eol) (esc)
+  building [=============>                            ]  4/12\r (no-eol) (esc)
+  building [=============>                            ]  4/12\r (no-eol) (esc)
+  building [=============>                            ]  4/12\r (no-eol) (esc)
+  building [=============>                            ]  4/12\r (no-eol) (esc)
+  building [=============>                            ]  4/12\r (no-eol) (esc)
+  building [=============>                            ]  4/12\r (no-eol) (esc)
+  building [================>                         ]  5/12\r (no-eol) (esc)
+  building [================>                         ]  5/12\r (no-eol) (esc)
+  building [====================>                     ]  6/12\r (no-eol) (esc)
+  building [====================>                     ]  6/12\r (no-eol) (esc)
+  building [=======================>                  ]  7/12\r (no-eol) (esc)
+  building [=======================>                  ]  7/12\r (no-eol) (esc)
+  building [===========================>              ]  8/12\r (no-eol) (esc)
+  building [===========================>              ]  8/12\r (no-eol) (esc)
+  building [===========================>              ]  8/12\r (no-eol) (esc)
+  building [===========================>              ]  8/12\r (no-eol) (esc)
+  building [==============================>           ]  9/12\r (no-eol) (esc)
+  building [==============================>           ]  9/12\r (no-eol) (esc)
+  building [==================================>       ] 10/12\r (no-eol) (esc)
+  building [==================================>       ] 10/12\r (no-eol) (esc)
+  building [=====================================>    ] 11/12\r (no-eol) (esc)
+  building [=====================================>    ] 11/12\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 tags
   $ cat .hg/localtags
--- a/tests/test-debugcomplete.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-debugcomplete.t	Fri Oct 19 01:34:50 2012 -0500
@@ -237,7 +237,7 @@
   debugindexdot: 
   debuginstall: 
   debugknown: 
-  debugobsolete: date, user
+  debugobsolete: flags, date, user
   debugpushkey: 
   debugpvec: 
   debugrebuildstate: rev
--- a/tests/test-debugindexdot.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-debugindexdot.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-Just exercize debugindexdot
+Just exercise debugindexdot
 Create a short file history including a merge.
   $ hg init t
   $ cd t
--- a/tests/test-diff-ignore-whitespace.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-diff-ignore-whitespace.t	Fri Oct 19 01:34:50 2012 -0500
@@ -404,7 +404,8 @@
   -goodbye world
   +hello world\r (esc)
   +\r (esc)
-  +goodbye\rworld (esc)
+  +goodbye\r (no-eol) (esc)
+  world
 
 No completely blank lines to ignore:
 
@@ -417,7 +418,8 @@
   -goodbye world
   +hello world\r (esc)
   +\r (esc)
-  +goodbye\rworld (esc)
+  +goodbye\r (no-eol) (esc)
+  world
 
 Only new line noticed:
 
--- a/tests/test-diff-issue2761.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-diff-issue2761.t	Fri Oct 19 01:34:50 2012 -0500
@@ -15,9 +15,9 @@
 
   $ hg up -C
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ touch doesnt-exist-in-1
+  $ touch does-not-exist-in-1
   $ hg add
-  adding doesnt-exist-in-1
+  adding does-not-exist-in-1
   $ hg ci -m third
-  $ rm doesnt-exist-in-1
+  $ rm does-not-exist-in-1
   $ hg diff -r 1
--- a/tests/test-diff-newlines.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-diff-newlines.t	Fri Oct 19 01:34:50 2012 -0500
@@ -13,6 +13,7 @@
   +++ b/a	Thu Jan 01 00:00:02 1970 +0000
   @@ -1,2 +1,3 @@
    confuse str.splitlines
-   embedded\rnewline (esc)
+   embedded\r (no-eol) (esc)
+  newline
   +clean diff
 
--- a/tests/test-diffstat.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-diffstat.t	Fri Oct 19 01:34:50 2012 -0500
@@ -68,5 +68,5 @@
   $ hg diff --stat --git
    file with spaces |  Bin 
    1 files changed, 0 insertions(+), 0 deletions(-)
-	
+
   $ cd ..
--- a/tests/test-dirstate.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-dirstate.t	Fri Oct 19 01:34:50 2012 -0500
@@ -52,5 +52,11 @@
   $ hg status
   $ hg debugstate
   n 644          2 2021-01-01 12:00:00 a
-  $ cd ..
+
+Test modulo storage/comparison of absurd dates:
 
+  $ touch -t 195001011200 a
+  $ hg st
+  $ hg debugstate
+  n 644          2 2018-01-19 15:14:08 a
+
--- a/tests/test-encoding-align.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-encoding-align.t	Fri Oct 19 01:34:50 2012 -0500
@@ -59,7 +59,7 @@
                           \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
                           \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc)
   
-  use "hg -v help showoptlist" to show more info
+  use "hg -v help showoptlist" to show the global options
 
 
   $ rm -f s; touch s
--- a/tests/test-encoding-textwrap.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-encoding-textwrap.t	Fri Oct 19 01:34:50 2012 -0500
@@ -67,7 +67,7 @@
       \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf\x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf\x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc)
       \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc)
   
-  use "hg -v help show_full_ja" to show more info
+  use "hg -v help show_full_ja" to show the global options
 
 (1-2) display Japanese full-width characters in utf-8
 
@@ -82,7 +82,7 @@
       \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc)
       \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc)
   
-  use "hg -v help show_full_ja" to show more info
+  use "hg -v help show_full_ja" to show the global options
 
 
 (1-3) display Japanese half-width characters in cp932
@@ -98,7 +98,7 @@
       \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc)
       \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc)
   
-  use "hg -v help show_half_ja" to show more info
+  use "hg -v help show_half_ja" to show the global options
 
 (1-4) display Japanese half-width characters in utf-8
 
@@ -113,7 +113,7 @@
       \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc)
       \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc)
   
-  use "hg -v help show_half_ja" to show more info
+  use "hg -v help show_half_ja" to show the global options
 
 
 
@@ -134,7 +134,7 @@
       \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
       \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
   
-  use "hg -v help show_ambig_ja" to show more info
+  use "hg -v help show_ambig_ja" to show the global options
 
 (2-1-2) display Japanese ambiguous-width characters in utf-8
 
@@ -149,7 +149,7 @@
       \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
       \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
   
-  use "hg -v help show_ambig_ja" to show more info
+  use "hg -v help show_ambig_ja" to show the global options
 
 (2-1-3) display Russian ambiguous-width characters in cp1251
 
@@ -164,7 +164,7 @@
       \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
       \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
   
-  use "hg -v help show_ambig_ru" to show more info
+  use "hg -v help show_ambig_ru" to show the global options
 
 (2-1-4) display Russian ambiguous-width characters in utf-8
 
@@ -179,7 +179,7 @@
       \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
       \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
   
-  use "hg -v help show_ambig_ru" to show more info
+  use "hg -v help show_ambig_ru" to show the global options
 
 
 (2-2) treat width of ambiguous characters as wide
@@ -200,7 +200,7 @@
       \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
       \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc)
   
-  use "hg -v help show_ambig_ja" to show more info
+  use "hg -v help show_ambig_ja" to show the global options
 
 (2-2-2) display Japanese ambiguous-width characters in utf-8
 
@@ -218,7 +218,7 @@
       \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
       \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc)
   
-  use "hg -v help show_ambig_ja" to show more info
+  use "hg -v help show_ambig_ja" to show the global options
 
 (2-2-3) display Russian ambiguous-width characters in cp1251
 
@@ -236,7 +236,7 @@
       \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
       \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc)
   
-  use "hg -v help show_ambig_ru" to show more info
+  use "hg -v help show_ambig_ru" to show the global options
 
 (2-2-4) display Russian ambiguous-width charactes in utf-8
 
@@ -254,6 +254,6 @@
       \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
       \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc)
   
-  use "hg -v help show_ambig_ru" to show more info
+  use "hg -v help show_ambig_ru" to show the global options
 
   $ cd ..
--- a/tests/test-encoding.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-encoding.t	Fri Oct 19 01:34:50 2012 -0500
@@ -252,5 +252,5 @@
 
   $ HGENCODING=latin-1 hg up `cat latin-1-tag`
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
- 
+
   $ cd ..
--- a/tests/test-eolfilename.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-eolfilename.t	Fri Oct 19 01:34:50 2012 -0500
@@ -9,11 +9,13 @@
   $ A=`printf 'he\rllo'`
   $ echo foo > "$A"
   $ hg add
-  adding he\rllo (esc)
+  adding he\r (no-eol) (esc)
+  llo
   abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
   [255]
   $ hg ci -A -m m
-  adding he\rllo (esc)
+  adding he\r (no-eol) (esc)
+  llo
   abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
   [255]
   $ rm "$A"
@@ -31,7 +33,9 @@
   [255]
   $ echo foo > "$A"
   $ hg debugwalk
-  f  he\rllo  he\rllo (esc)
+  f  he\r (no-eol) (esc)
+  llo  he\r (no-eol) (esc)
+  llo
   f  hell
   o  hell
   o
--- a/tests/test-export.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-export.t	Fri Oct 19 01:34:50 2012 -0500
@@ -124,7 +124,7 @@
 Checking if only alphanumeric characters are used in the file name (%m option):
 
   $ echo "line" >> foo
-  $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~" 
+  $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~"
   $ hg export -v -o %m.patch tip
   exporting patch:
   ____________0123456789_______ABCDEFGHIJKLMNOPQRSTUVWXYZ______abcdefghijklmnopqrstuvwxyz____.patch
@@ -144,4 +144,28 @@
   abort: export requires at least one changeset
   [255]
 
+Check for color output
+  $ echo "[color]" >> $HGRCPATH
+  $ echo "mode = ansi" >> $HGRCPATH
+  $ echo "[extensions]" >> $HGRCPATH
+  $ echo "color=" >> $HGRCPATH
+
+  $ hg export --color always --nodates tip
+  # HG changeset patch
+  # User test
+  # Date 0 0
+  # Node ID * (glob)
+  # Parent * (glob)
+   !"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~
+  
+  \x1b[0;1mdiff -r f3acbafac161 -r 197ecd81a57f foo\x1b[0m (esc)
+  \x1b[0;31;1m--- a/foo\x1b[0m (esc)
+  \x1b[0;32;1m+++ b/foo\x1b[0m (esc)
+  \x1b[0;35m@@ -10,3 +10,4 @@\x1b[0m (esc)
+   foo-9
+   foo-10
+   foo-11
+  \x1b[0;32m+line\x1b[0m (esc)
+
+
   $ cd ..
--- a/tests/test-extdiff.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-extdiff.t	Fri Oct 19 01:34:50 2012 -0500
@@ -47,7 +47,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help falabala" to show more info
+  use "hg -v help falabala" to show the global options
 
   $ hg ci -d '0 0' -mtest1
 
--- a/tests/test-extension.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-extension.t	Fri Oct 19 01:34:50 2012 -0500
@@ -269,7 +269,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help extdiff" to show more info
+  use "hg -v help extdiff" to show the global options
 
   $ hg help --extension extdiff
   extdiff extension - command to allow external programs to compare revisions
@@ -372,7 +372,7 @@
   
   multirevs command
   
-  use "hg -v help multirevs" to show more info
+  use "hg -v help multirevs" to show the global options
 
   $ hg multirevs
   hg multirevs: invalid arguments
--- a/tests/test-filelog.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-filelog.py	Fri Oct 19 01:34:50 2012 -0500
@@ -12,7 +12,7 @@
 
 def addrev(text, renamed=False):
     if renamed:
-        # data doesnt matter. Just make sure filelog.renamed() returns True
+        # data doesn't matter. Just make sure filelog.renamed() returns True
         meta = dict(copyrev=hex(nullid), copy='bar')
     else:
         meta = {}
--- a/tests/test-fncache.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-fncache.t	Fri Oct 19 01:34:50 2012 -0500
@@ -117,3 +117,64 @@
   .hg/undo.dirstate
   $ cd ..
 
+Encoding of reserved / long paths in the store
+
+  $ hg init r2
+  $ cd r2
+  $ cat <<EOF > .hg/hgrc
+  > [ui]
+  > portablefilenames = ignore
+  > EOF
+
+  $ hg import -q --bypass - <<EOF
+  > # HG changeset patch
+  > # User test
+  > # Date 0 0
+  > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
+  > # Parent  0000000000000000000000000000000000000000
+  > 1
+  > 
+  > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
+  > @@ -0,0 +1,1 @@
+  > +foo
+  > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
+  > @@ -0,0 +1,1 @@
+  > +foo
+  > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt	
+  > @@ -0,0 +1,1 @@
+  > +foo
+  > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
+  > @@ -0,0 +1,1 @@
+  > +foo
+  > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
+  > new file mode 100644
+  > --- /dev/null
+  > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
+  > @@ -0,0 +1,1 @@
+  > +foo
+  > EOF
+
+  $ find .hg/store -name *.i  | sort
+  .hg/store/00changelog.i
+  .hg/store/00manifest.i
+  .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
+  .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
+  .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
+  .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
+  .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
+
+  $ cd ..
+
+
--- a/tests/test-glog.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-glog.t	Fri Oct 19 01:34:50 2012 -0500
@@ -83,7 +83,7 @@
 
   $ cat > printrevset.py <<EOF
   > from mercurial import extensions, revset, commands, cmdutil
-  >  
+  > 
   > def uisetup(ui):
   >     def printrevset(orig, ui, repo, *pats, **opts):
   >         if opts.get('print_revset'):
@@ -2044,20 +2044,40 @@
 Test --hidden
 
   $ cat > $HGTMP/testhidden.py << EOF
+  > from mercurial import util
   > def reposetup(ui, repo):
   >     for line in repo.opener('hidden'):
   >         ctx = repo[line.strip()]
   >         repo.hiddenrevs.add(ctx.rev())
+  >     if repo.revs('children(%ld) - %ld',  repo.hiddenrevs,  repo.hiddenrevs):
+  >       raise util.Abort('hidden revision with children!')
   > EOF
   $ echo '[extensions]' >> .hg/hgrc
   $ echo "hidden=$HGTMP/testhidden.py" >> .hg/hgrc
-  $ hg id --debug -i -r 0 > .hg/hidden
+  $ hg id --debug -i -r 8 > .hg/hidden
   $ testlog
   []
   []
   $ testlog --hidden
   []
   []
+  $ hg glog --template '{rev} {desc}\n'
+  o  7 Added tag foo-bar for changeset fc281d8ff18d
+  |
+  o    6 merge 5 and 4
+  |\
+  | o  5 add another e
+  | |
+  o |  4 mv dir/b e
+  |/
+  @  3 mv a b; add d
+  |
+  o  2 mv b dir/b
+  |
+  o  1 copy a b
+  |
+  o  0 add a
+  
 
 A template without trailing newline should do something sane
 
@@ -2066,6 +2086,8 @@
   |
   o  1 copy a b
   |
+  o  0 add a
+  
 
 Extra newlines must be preserved
 
@@ -2076,6 +2098,9 @@
   o
   |  1 copy a b
   |
+  o
+     0 add a
+  
 
 The almost-empty template should do something sane too ...
 
@@ -2084,5 +2109,7 @@
   |
   o
   |
+  o
+  
 
   $ cd ..
--- a/tests/test-grep.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-grep.t	Fri Oct 19 01:34:50 2012 -0500
@@ -32,9 +32,9 @@
 
   $ hg --config extensions.color= grep --config color.mode=ansi \
   >     --color=always port port
-  port:4:ex\x1b[0;31;1mport\x1b[0m (esc)
-  port:4:va\x1b[0;31;1mport\x1b[0might (esc)
-  port:4:im\x1b[0;31;1mport\x1b[0m/export (esc)
+  \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mex\x1b[0;31;1mport\x1b[0m (esc)
+  \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mva\x1b[0;31;1mport\x1b[0might (esc)
+  \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mim\x1b[0;31;1mport\x1b[0m/export (esc)
 
 all
 
@@ -51,6 +51,8 @@
 
 other
 
+  $ hg grep -l port port
+  port:4
   $ hg grep import port
   port:4:import/export
 
--- a/tests/test-help.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-help.t	Fri Oct 19 01:34:50 2012 -0500
@@ -279,7 +279,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help add" to show more info
+  use "hg -v help add" to show more complete help and the global options
 
 Verbose help for add
 
@@ -392,9 +392,12 @@
       manifest, and tracked files, as well as the integrity of their crosslinks
       and indices.
   
+      Please see http://mercurial.selenic.com/wiki/RepositoryCorruption for more
+      information about recovery from corruption of the repository.
+  
       Returns 0 on success, 1 if errors are encountered.
   
-  use "hg -v help verify" to show more info
+  use "hg -v help verify" to show the global options
 
   $ hg help diff
   hg diff [OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...
@@ -447,7 +450,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help diff" to show more info
+  use "hg -v help diff" to show more complete help and the global options
 
   $ hg help status
   hg status [OPTION]... [FILE]...
@@ -510,7 +513,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help status" to show more info
+  use "hg -v help status" to show more complete help and the global options
 
   $ hg -q help status
   hg status [OPTION]... [FILE]...
@@ -596,7 +599,7 @@
   
   (no help text available)
   
-  use "hg -v help nohelp" to show more info
+  use "hg -v help nohelp" to show the global options
 
   $ hg help -k nohelp
   Commands:
@@ -802,3 +805,76 @@
   
    qclone clone main and patch repository at same time
 
+Test omit indicating for help
+
+  $ cat > addverboseitems.py <<EOF
+  > '''extension to test omit indicating.
+  > 
+  > This paragraph is never omitted (for extension)
+  > 
+  > .. container:: verbose
+  > 
+  >   This paragraph is omitted,
+  >   if :hg:\`help\` is invoked witout \`\`-v\`\` (for extension)
+  > 
+  > This paragraph is never omitted, too (for extension)
+  > '''
+  > 
+  > from mercurial import help, commands
+  > testtopic = """This paragraph is never omitted (for topic).
+  > 
+  > .. container:: verbose
+  > 
+  >   This paragraph is omitted,
+  >   if :hg:\`help\` is invoked witout \`\`-v\`\` (for topic)
+  > 
+  > This paragraph is never omitted, too (for topic)
+  > """
+  > def extsetup(ui):
+  >     help.helptable.append((["topic-containing-verbose"],
+  >                            "This is the topic to test omit indicating.",
+  >                            lambda : testtopic))
+  > EOF
+  $ echo '[extensions]' >> $HGRCPATH
+  $ echo "addverboseitems = `pwd`/addverboseitems.py" >> $HGRCPATH
+  $ hg help addverboseitems
+  addverboseitems extension - extension to test omit indicating.
+  
+  This paragraph is never omitted (for extension)
+  
+  This paragraph is never omitted, too (for extension)
+  
+  use "hg help -v addverboseitems" to show more complete help
+  
+  no commands defined
+  $ hg help -v addverboseitems
+  addverboseitems extension - extension to test omit indicating.
+  
+  This paragraph is never omitted (for extension)
+  
+  This paragraph is omitted, if "hg help" is invoked witout "-v" (for extension)
+  
+  This paragraph is never omitted, too (for extension)
+  
+  no commands defined
+  $ hg help topic-containing-verbose
+  This is the topic to test omit indicating.
+  
+      This paragraph is never omitted (for topic).
+  
+      This paragraph is never omitted, too (for topic)
+  
+  use "hg help -v topic-containing-verbose" to show more complete help
+  $ hg help -v topic-containing-verbose
+  This is the topic to test omit indicating.
+  
+      This paragraph is never omitted (for topic).
+  
+      This paragraph is omitted, if "hg help" is invoked witout "-v" (for topic)
+  
+      This paragraph is never omitted, too (for topic)
+
+Test usage of section marks in help documents
+
+  $ cd "$TESTDIR"/../doc
+  $ python check-seclevel.py
--- a/tests/test-hgweb-commands.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hgweb-commands.t	Fri Oct 19 01:34:50 2012 -0500
@@ -27,7 +27,10 @@
   $ hg branch unstable
   marked working directory as branch unstable
   (branches are permanent and global, did you want a bookmark?)
-  $ hg ci -Ambranch
+  >>> open('msg', 'wb').write('branch commit with null character: \0\n')
+  $ hg ci -l msg
+  $ rm msg
+
   $ echo [graph] >> .hg/hgrc
   $ echo default.width = 3 >> .hg/hgrc
   $ echo stable.width = 3 >> .hg/hgrc
@@ -35,7 +38,7 @@
   $ hg serve --config server.uncompressed=False -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log
   $ cat hg.pid >> $DAEMON_PIDS
   $ hg log -G --template '{rev}:{node|short} {desc}\n'
-  @  3:ba87b23d29ca branch
+  @  3:cad8025a2e87 branch commit with null character: \x00 (esc)
   |
   o  2:1d22e65f027e branch
   |
@@ -59,9 +62,9 @@
    <updated>1970-01-01T00:00:00+00:00</updated>
   
    <entry>
-    <title>branch</title>
-    <id>http://*:$HGPORT/#changeset-ba87b23d29ca67a305625d81a20ac279c1e3f444</id> (glob)
-    <link href="http://*:$HGPORT/rev/ba87b23d29ca"/> (glob)
+    <title>branch commit with null character: </title>
+    <id>http://*:$HGPORT/#changeset-cad8025a2e87f88c06259790adfa15acb4080123</id> (glob)
+    <link href="http://*:$HGPORT/rev/cad8025a2e87"/> (glob)
     <author>
      <name>test</name>
      <email>&#116;&#101;&#115;&#116;</email>
@@ -70,7 +73,7 @@
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
      <div xmlns="http://www.w3.org/1999/xhtml">
-      <pre xml:space="preserve">branch</pre>
+      <pre xml:space="preserve">branch commit with null character: </pre>
      </div>
     </content>
    </entry>
@@ -137,9 +140,9 @@
    <updated>1970-01-01T00:00:00+00:00</updated>
   
    <entry>
-    <title>branch</title>
-    <id>http://*:$HGPORT/#changeset-ba87b23d29ca67a305625d81a20ac279c1e3f444</id> (glob)
-    <link href="http://*:$HGPORT/rev/ba87b23d29ca"/> (glob)
+    <title>branch commit with null character: </title>
+    <id>http://*:$HGPORT/#changeset-cad8025a2e87f88c06259790adfa15acb4080123</id> (glob)
+    <link href="http://*:$HGPORT/rev/cad8025a2e87"/> (glob)
     <author>
      <name>test</name>
      <email>&#116;&#101;&#115;&#116;</email>
@@ -148,7 +151,7 @@
     <published>1970-01-01T00:00:00+00:00</published>
     <content type="xhtml">
      <div xmlns="http://www.w3.org/1999/xhtml">
-      <pre xml:space="preserve">branch</pre>
+      <pre xml:space="preserve">branch commit with null character: </pre>
      </div>
     </content>
    </entry>
@@ -257,14 +260,14 @@
   </div>
   <ul>
   <li class="active">log</li>
-  <li><a href="/graph/ba87b23d29ca">graph</a></li>
+  <li><a href="/graph/cad8025a2e87">graph</a></li>
   <li><a href="/tags">tags</a></li>
   <li><a href="/bookmarks">bookmarks</a></li>
   <li><a href="/branches">branches</a></li>
   </ul>
   <ul>
-  <li><a href="/rev/ba87b23d29ca">changeset</a></li>
-  <li><a href="/file/ba87b23d29ca">browse</a></li>
+  <li><a href="/rev/cad8025a2e87">changeset</a></li>
+  <li><a href="/file/cad8025a2e87">browse</a></li>
   </ul>
   <ul>
   
@@ -300,7 +303,7 @@
    <tr class="parity0">
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
     <td class="author">test</td>
-    <td class="description"><a href="/rev/ba87b23d29ca">branch</a><span class="branchhead">unstable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
+    <td class="description"><a href="/rev/cad8025a2e87">branch commit with null character: </a><span class="branchhead">unstable</span> <span class="tag">tip</span> <span class="tag">something</span> </td>
    </tr>
    <tr class="parity1">
     <td class="age">Thu, 01 Jan 1970 00:00:00 +0000</td>
@@ -701,19 +704,19 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'raw-tags'
   200 Script output follows
   
-  tip	ba87b23d29ca67a305625d81a20ac279c1e3f444
+  tip	cad8025a2e87f88c06259790adfa15acb4080123
   1.0	2ef0ac749a14e4f57a5a822464a0902c6f7f448f
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'raw-branches'
   200 Script output follows
   
-  unstable	ba87b23d29ca67a305625d81a20ac279c1e3f444	open
+  unstable	cad8025a2e87f88c06259790adfa15acb4080123	open
   stable	1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe	inactive
   default	a4f92ed23982be056b9852de5dfe873eaac7f0de	inactive
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'raw-bookmarks'
   200 Script output follows
   
   anotherthing	2ef0ac749a14e4f57a5a822464a0902c6f7f448f
-  something	ba87b23d29ca67a305625d81a20ac279c1e3f444
+  something	cad8025a2e87f88c06259790adfa15acb4080123
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'summary/?style=gitweb'
   200 Script output follows
   
@@ -753,7 +756,7 @@
   <a href="/tags?style=gitweb">tags</a> |
   <a href="/bookmarks?style=gitweb">bookmarks</a> |
   <a href="/branches?style=gitweb">branches</a> |
-  <a href="/file/ba87b23d29ca?style=gitweb">files</a> |
+  <a href="/file/cad8025a2e87?style=gitweb">files</a> |
   <a href="/help?style=gitweb">help</a>
   <br/>
   </div>
@@ -772,14 +775,14 @@
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
   <td><i>test</i></td>
   <td>
-  <a class="list" href="/rev/ba87b23d29ca?style=gitweb">
-  <b>branch</b>
+  <a class="list" href="/rev/cad8025a2e87?style=gitweb">
+  <b>branch commit with null character: </b>
   <span class="logtags"><span class="branchtag" title="unstable">unstable</span> <span class="tagtag" title="tip">tip</span> <span class="bookmarktag" title="something">something</span> </span>
   </a>
   </td>
   <td class="link" nowrap>
-  <a href="/rev/ba87b23d29ca?style=gitweb">changeset</a> |
-  <a href="/file/ba87b23d29ca?style=gitweb">files</a>
+  <a href="/rev/cad8025a2e87?style=gitweb">changeset</a> |
+  <a href="/file/cad8025a2e87?style=gitweb">files</a>
   </td>
   </tr>
   <tr class="parity1">
@@ -856,11 +859,11 @@
   </tr>
   <tr class="parity1">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/rev/ba87b23d29ca?style=gitweb"><b>something</b></a></td>
+  <td><a class="list" href="/rev/cad8025a2e87?style=gitweb"><b>something</b></a></td>
   <td class="link">
-  <a href="/rev/ba87b23d29ca?style=gitweb">changeset</a> |
-  <a href="/log/ba87b23d29ca?style=gitweb">changelog</a> |
-  <a href="/file/ba87b23d29ca?style=gitweb">files</a>
+  <a href="/rev/cad8025a2e87?style=gitweb">changeset</a> |
+  <a href="/log/cad8025a2e87?style=gitweb">changelog</a> |
+  <a href="/file/cad8025a2e87?style=gitweb">files</a>
   </td>
   </tr>
   <tr class="light"><td colspan="3"><a class="list" href="/bookmarks?style=gitweb">...</a></td></tr>
@@ -871,12 +874,12 @@
   
   <tr class="parity0">
   <td class="age"><i class="age">Thu, 01 Jan 1970 00:00:00 +0000</i></td>
-  <td><a class="list" href="/shortlog/ba87b23d29ca?style=gitweb"><b>ba87b23d29ca</b></a></td>
+  <td><a class="list" href="/shortlog/cad8025a2e87?style=gitweb"><b>cad8025a2e87</b></a></td>
   <td class="">unstable</td>
   <td class="link">
-  <a href="/changeset/ba87b23d29ca?style=gitweb">changeset</a> |
-  <a href="/log/ba87b23d29ca?style=gitweb">changelog</a> |
-  <a href="/file/ba87b23d29ca?style=gitweb">files</a>
+  <a href="/changeset/cad8025a2e87?style=gitweb">changeset</a> |
+  <a href="/log/cad8025a2e87?style=gitweb">changelog</a> |
+  <a href="/file/cad8025a2e87?style=gitweb">files</a>
   </td>
   </tr>
   <tr class="parity1">
@@ -955,7 +958,7 @@
   <a href="/tags?style=gitweb">tags</a> |
   <a href="/bookmarks?style=gitweb">bookmarks</a> |
   <a href="/branches?style=gitweb">branches</a> |
-  <a href="/file/ba87b23d29ca?style=gitweb">files</a> |
+  <a href="/file/cad8025a2e87?style=gitweb">files</a> |
   <a href="/help?style=gitweb">help</a>
   <br/>
   <a href="/graph/3?style=gitweb&revcount=30">less</a>
@@ -976,7 +979,7 @@
   <script>
   <!-- hide script content
   
-  var data = [["ba87b23d29ca", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
+  var data = [["cad8025a2e87", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch commit with null character: \x00", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]]; (esc)
   var graph = new Graph();
   graph.scale(39);
   
@@ -1056,13 +1059,13 @@
   
   
   # HG graph
-  # Node ID ba87b23d29ca67a305625d81a20ac279c1e3f444
+  # Node ID cad8025a2e87f88c06259790adfa15acb4080123
   # Rows shown 4
   
-  changeset:   ba87b23d29ca
+  changeset:   cad8025a2e87
   user:        test
   date:        1970-01-01
-  summary:     branch
+  summary:     branch commit with null character: \x00 (esc)
   branch:      unstable
   tag:         tip
   bookmark:    something
@@ -1111,7 +1114,7 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=heads'
   200 Script output follows
   
-  ba87b23d29ca67a305625d81a20ac279c1e3f444
+  cad8025a2e87f88c06259790adfa15acb4080123
 
 branches
 
@@ -1125,11 +1128,11 @@
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT '?cmd=changegroup&roots=0000000000000000000000000000000000000000'
   200 Script output follows
   
-  x\x9c\xbdTMHTQ\x14\x1e\xfc\xef\xd9&\x10\x11*x\x88\x81\x9aN\xf7\xddw\xdf{\xf7Y\x0efR\xb4\x11\xb1U\x82\xc5\xfd\x9d!c\x06\x9c'd\xa0\x99X\x82\x92i\xablUZ-*\x08\x84\x82\x02KkQ\xf8\x13\xe4\xaa\x8dn\x94\x906)\xd5B\x02\xeb\xbe\x9c\x01\x85\xc9\x996\x1d\xf8x\x97{\xefy\xe7;\xe7|\xe7\x06\x02\x81\xb1\xe0\xda\x13\xefN\xd1\xca\x8f\xcb-\xbde\xfc\xeepU\xecJ\xc3\xcd@\x86\x96\xc6\xb7^`\xe9"[H\xe4\x18T\x1a\x16p]\xc3\x96\x14\x13\xcbt\xa1tM\x0c\x1c\x0b2,M\xcd\x13qO\x03:\xd089"c1\xcd\x87FI\\\xa8\xbf|\xbc\xbf\x11\\p{_\xe5\xb6\xddn^j\xdd\xec\x0f=z\xb7\xb6\x94)\xebT\xbe\x89\xa3 (esc)
-  \x1f6!6p\x00\xc4H`L\x18\x83\xdc\xa6\x8c\x0b\x84\x01\x06\x06s\xb84\x1cn2F4u\x19*\xd4*\x14\x04#a\x8f\x84\xe3\xfe^\xc8OS\xa1\xfc8\xe7\x82\xebj[7\x82@\x97\xb1v\x9dEH4,\xe2\xc2\xd3\xa1\x90\x800\x07\xb9\xc4@\xea\xee\xe4\xc1\xd2\xcf\xe7\xb3\xba[\xf2\xf6X\xdd]C\x1d\x05\xf3\x87\x1f,l\xeeBt\x87\xa5\xf2\xdd\x9e\x90*\xa9kC\xac"!\x17\x12)!c\x000\xd7\x05&\xb5\xa9\xc5\xa8-Ln (esc)
-  \x0c|\xf2A\x85\x1a\x85bUy\x9d\xb6\x93(\x8b\xd4\xc4=B/\x8a?\rP'G\x15\x98B\xde\xd6\xa9Zy/\xfb'j+f\xc2\xe3\xb9\xb4\xf5\xea\x98\xf6\xa6sz\xf9{\xc3.\xa4vX*\xdf\x04\x0f\xff[\xb4\x8dGG4\xc1$\xe1:\xb9\xbaq\xf2\xeb\xa9\xfd\xebM\xa3\xc5?\x07\xce\xdc\xda\xc0\xf9\xcd\xef\xbf\xa5\xd3g\xd2\xd2\xa8\xa5uKu\x01(8$\xa6k@\x02(D\x16\x80\x00\x99\x82\x08\xa5\r\x81(t\\f`\xea\x02\xce\xb5\x7f\xba\xac\x02\x8c\\x\x98\x9f\xd5\xb7:0W\xdd6\xbf\xd2\xd3s\xa0k\xbd\xeb\xd8L\xa6	\xa5Q\x86\x91Pc\x80\x98\x8cB,L\x07#\x80\x04\x82\xb6\x8d)\xa3\x08X\x02\x00\xear\x0c-`b\x9b\x18>\xa1\x1b\xf9g\xe9@\xd1\xe9\xca_US{G\xb3\x9f?\x9b\x8d\xd6\x86zR\x91LE\xe8/\xdd& (esc)
-  C
-  \xd5~u\xb0e#\x08\r\x8c\xd5\xf83\x93\x01B\x95\xe8\x1c\x03\xdb\x92s*\x99`\xcc0\x88\xb4d\xb2\xbd\x85\xc9,\x14\xb7\xf1\xd9\xf2\xe5Ku\x8d\xf5rp\xb6\xee\\\xe0\xc5\xa7C\xd9\xd7\xefe\xda\xe94\xc5\xaa\xde>\x8a\x02I\xcb!\x16\xc1\x10"\x1b\x11\xe0\x02\xc8l\xe9H\x84\xb0\xf4\xa78\xc9-\xf1(\xa9\x15\x0f.\x8c\x8fT\x16\x965\xe9'\xbe\xac6\xaeLtN\x0f\x0e/fJ-\x8d\x08s\x12#\xe7[\xfe\xff\x0b\x17\xb9\xc6KK\xfa\xa2o\xa7\x1e\x87\xfaKb\x8b\xaf?\xcc\xed{z>\xd3\xb8\xbb\xcc}\x8eB\x01\x89\xc6\xbc\x88hO\xa6\x15\xf8\rr4\xb3\xe5 (no-eol) (esc)
+  x\x9c\xbd\x94MHTQ\x14\xc7'+\x9d\xc66\x81\x89P\xc1\xa3\x14\xcct\xba\xef\xbe\xfb\xde\xbb\xcfr0\xb3"\x02\x11[%\x98\xdcO\xa7\xd2\x19\x98y\xd2\x07h"\x96\xa0e\xda\xa6lUY-\xca\x08\xa2\x82\x16\x96\xd1\xa2\xf0#\xc8\x95\x1b\xdd$!m*"\xc8\x82\xea\xbe\x9c\x01\x85\xc9\x996\x1d\xf8\xc1\xe3~\x9d\xff9\xef\x7f\xaf\xcf\xe7\xbb\x19\xfc4\xec^\xcb\x9b\xfbz\xa6\xbe\xb3\x90_\xef/\x8d\x9e\xad\xbe\xe4\xcb0\xd2\xec\xad\x12X:\xc8\x12\x12\xd9:\x95\xba	\x1cG\xb7$\xc5\xc44\x1c(\x1d\x03\x03\xdb\x84\x0cK#\xe0\x8a\xb8\x1b\x00\x1a\x08p\xb2SF\xa3\x01\x8f\x00%q\xa1Ny{k!8\xe5t>[{\xe2j\xddl\xc3\xcf\xee\xd0\xddW\x9ff3U\x9djobj\xbb\x87E\x88\x05l\x001\x12\x18\x13\xc6 \xb7(\xe3\x02a\x80\x81\xcel.u\x9b\x1b\x8c\x91\x80Z\x0c\x15\x15 (esc)
+  \x7f0\xdc\xe4\x92\xa6\xb87\x16\xf2\xcaT\x14\xef\xe1\\pM\r (no-eol) (esc)
+  kz\x10h2\x1a\xd3X\x98D\x9aD\\\xb8\x1a\x14\x12\x10f#\x87\xe8H\xad\x1d\xd9\xb2\xf5}cV{}\xf6:\xb3\xbd\xad\xaf\xd5?\xb9\xe3\xf6\xd4\xcf\x15\x84.\x8bT{\x97\x16\xa4Z\xeaX\x10\xabL\xc8\x81DJ\xc8\x18\x00\xccq\x80A-j2j	\x83\x1b\x02\x03O|PQ\xae\xc8W\x9d\xd7h\x8cDX\xb8<\xee\x12\xda,\xfe\xfc\x005\xb3K\xc1\x14\xd9\x8b\xb3^C\xc7\xa6\xb3\xea\x83\xdd\xdf.d\x17]\xe9\xbf\xff}\xe3\xf0#\xff\xaam+\x88Z\x16\xa9\xf6&tT+\xf2\x96\xe8h\x8d$\x94\xa8\xf1}\x8aC\x8a\xc2\xc59\x8dE[Z\x8e\xb9\xda\xc9cnX\x8b\xb467{\xad\x8e\x11\xe6\x8aX\xb9\x96L52\xbf\xb0\xff\xe3\x81M\x9fk\x07\xf3\x7f\xf4\x1c\xbe\xbc\x80s\xea^\x7fY\xc1\xca\xcb"\x8d\xbb\x1a\x16]\xea\x83\x82Cb8:$\x80Bd\x02\x08\x90!\x88P^\x12\x88B\xdba:\xa6\x0e\xe0<\xf0O\x8bU\x82\x81\xe3wr\xb2\xba\xe6{&\xcaNL\xceutln\xfb\xdc\xb6{,\xd3\x82\xd28IO\xb8\xd7G\x0cF!\x16\x86\x8d\x11@\x02A\xcb\xc2\x94Q\x04L\x01\x00u8\x86&0\xb0EtO\xd0\xc5\x9c#\xb4'\xef`\xc9\xaf\xd2\xd1\xf5\x83\xab\x9f<\x1e\x8fT\x84:R\x89L%\xe8/\xee \x8a>E\x99\xd7\x1dlZ\x08B\x1dc\xf5\\0\x83\x01B\x95Im\x1d[\x92s*\x99`L\xd7\x894e qfn\xb2 (esc)
+  \xa5mh\xbc\xf8\xdd\xa9\xca\x9a*\xd9;^y\xd4\xf7t\xbah\xf5\xf9\x1b\x99\xfe\xe94\xcd*[zu\x05\x92\xa6ML\x82!D\x16"\xc0\x01\x90Y\xd2\x96\x08a\xe9\xdd\xfa\xa4\xb6\xc4#\xa6\xbexpjh\xa0$\xb7\xb0V\xdb\xfba\xbef\xee\xe1\xe9\x17\xbd\xfd3\x99JKc\xc25\x89+\xeaE\xce\xffK\x17>\xc7\xb7\x16tE^\x8e\xde\x0bu\x17Dg\x9e\xbf\x99\xd8\xf0\xa01\xd3\xbc+\xbc\x13k\x14~\x12\x89\xbaa\x11K\x96\xe5\xfb\r (no-eol) (esc)
+  \x95)\xbe\xf6 (no-eol) (esc)
 
 stream_out
 
@@ -1260,7 +1263,7 @@
 
 Stop and restart with HGENCODING=cp932 and preferuncompressed
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ HGENCODING=cp932 hg serve --config server.preferuncompressed=True -n test \
   >     -p $HGPORT -d --pid-file=hg.pid -E errors.log
   $ cat hg.pid >> $DAEMON_PIDS
@@ -1273,8 +1276,8 @@
 Graph json escape of multibyte character
 
   $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/' \
-  >     | grep '^var data ='
-  var data = [["548001d11f45", [0, 1], [[0, 0, 1, -1, ""]], "\u80fd", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["ba87b23d29ca", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch", "test", "1970-01-01", ["unstable", false], [], []], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]];
+  >     | grep -a '^var data ='
+  var data = [["061dd13ba3c3", [0, 1], [[0, 0, 1, -1, ""]], "\\u80fd", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["cad8025a2e87", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch commit with null character: \x00", "test", "1970-01-01", ["unstable", false], [], []], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]]; (esc)
 
 capabilities
 
@@ -1288,7 +1291,7 @@
 ERRORS ENCOUNTERED
 
   $ cat errors.log
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
   $ cd ..
 
--- a/tests/test-hgweb-diffs.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hgweb-diffs.t	Fri Oct 19 01:34:50 2012 -0500
@@ -291,7 +291,7 @@
 
 set up hgweb with git diffs
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ hg serve --config 'diff.git=1' -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
   $ cat hg.pid >> $DAEMON_PIDS
 
@@ -936,7 +936,7 @@
 
 raw revision with diff block numbers
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat <<EOF > .hg/hgrc
   > [web]
   > templates = rawdiff
@@ -973,7 +973,7 @@
   @@ -0,0 +1,1 @@
   +b
   
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ rm .hg/hgrc rawdiff/map
   $ rmdir rawdiff
   $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
--- a/tests/test-hgweb.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hgweb.t	Fri Oct 19 01:34:50 2012 -0500
@@ -299,7 +299,7 @@
 
 stop and restart
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log
   $ cat hg.pid >> $DAEMON_PIDS
 
--- a/tests/test-hgwebdir.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hgwebdir.t	Fri Oct 19 01:34:50 2012 -0500
@@ -657,7 +657,7 @@
 
 Test collapse = True
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat >> paths.conf <<EOF
   > [web]
   > collapse=true
@@ -723,7 +723,7 @@
 
 Test descend = False
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat >> paths.conf <<EOF
   > descend=false
   > EOF
@@ -784,7 +784,7 @@
   $ hg id http://localhost:$HGPORT1/astar
   8580ff50825a
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat > paths.conf <<EOF
   > [paths]
   > t/a = $root/a
@@ -812,7 +812,7 @@
 
 Test collapse = True
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat >> paths.conf <<EOF
   > [web]
   > collapse=true
@@ -837,7 +837,7 @@
 
 test descend = False
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat >> paths.conf <<EOF
   > descend=false
   > EOF
@@ -857,7 +857,7 @@
   /t/a/
   /t/b/
   
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ cat > paths.conf <<EOF
   > [paths]
   > nostore = $root/nostore
@@ -956,7 +956,7 @@
 
   $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT2 'a/rss-log' | grep '<guid'
       <guid isPermaLink="true">http://hg.example.com:8080/a/rev/8580ff50825a</guid>
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   $ hg serve --config web.baseurl=http://hg.example.com:8080/foo/ -p $HGPORT2 -d \
   >     --pid-file=hg.pid --webdir-conf collections.conf \
   >     -A access-collections-2.log -E error-collections-2.log
--- a/tests/test-highlight.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-highlight.t	Fri Oct 19 01:34:50 2012 -0500
@@ -545,7 +545,7 @@
 errors encountered
 
   $ cat errors.log
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
 Change the pygments style
 
@@ -579,7 +579,7 @@
   $ hg ci -Ama
   adding eucjp.txt
   $ hgserveget () {
-  >     "$TESTDIR/killdaemons.py"
+  >     "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   >     echo % HGENCODING="$1" hg serve
   >     HGENCODING="$1" hg serve -p $HGPORT -d -n test --pid-file=hg.pid -E errors.log
   >     cat hg.pid >> $DAEMON_PIDS
--- a/tests/test-histedit-bookmark-motion.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-bookmark-motion.t	Fri Oct 19 01:34:50 2012 -0500
@@ -84,17 +84,12 @@
   > pick 652413bf663e 5 f
   > EOF
   $ hg histedit 1 --commands commands.txt --verbose | grep histedit
-  histedit: Should update metadata for the following changes:
-  histedit:  055a42cdd887 to ae467701c500
-  histedit:     moving bookmarks three
-  histedit:  177f92b77385 to d36c0562f908
-  histedit:     moving bookmarks also-two, two
-  histedit:  652413bf663e to 0efacef7cb48
-  histedit:     moving bookmarks five
-  histedit:  d2ae7f538514 to cb9a9f314b8b
-  histedit:     moving bookmarks will-move-backwards
-  histedit:  e860deea161a to ae467701c500
-  histedit:     moving bookmarks four
+  histedit: moving bookmarks two from 177f92b77385 to d36c0562f908
+  histedit: moving bookmarks three from 055a42cdd887 to ae467701c500
+  histedit: moving bookmarks four from e860deea161a to ae467701c500
+  histedit: moving bookmarks also-two from 177f92b77385 to d36c0562f908
+  histedit: moving bookmarks will-move-backwards from d2ae7f538514 to cb9a9f314b8b
+  histedit: moving bookmarks five from 652413bf663e to 0efacef7cb48
   saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-backup.hg (glob)
   saved backup bundle to $TESTTMP/r/.hg/strip-backup/34a9919932c1-backup.hg (glob)
   $ hg log --graph
@@ -146,12 +141,9 @@
   > pick ae467701c500 2 d
   > EOF
   $ hg histedit 1 --commands commands.txt --verbose | grep histedit
-  histedit: Should update metadata for the following changes:
-  histedit:  0efacef7cb48 to 1be9c35b4cb2
-  histedit:     moving bookmarks five
-  histedit:  0efacef7cb48 to 7c044e3e33a9
-  histedit:  ae467701c500 to 1be9c35b4cb2
-  histedit:     moving bookmarks four, three
+  histedit: moving bookmarks three from ae467701c500 to 1be9c35b4cb2
+  histedit: moving bookmarks four from ae467701c500 to 1be9c35b4cb2
+  histedit: moving bookmarks five from 0efacef7cb48 to 1be9c35b4cb2
   saved backup bundle to $TESTTMP/r/.hg/strip-backup/ae467701c500-backup.hg (glob)
 
 We expect 'five' to stay at tip, since the tipmost bookmark is most
--- a/tests/test-histedit-edit.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-edit.t	Fri Oct 19 01:34:50 2012 -0500
@@ -66,6 +66,19 @@
   abort: Make changes as needed, you may commit or record as needed now.
   When you are finished, run hg histedit --continue to resume.
 
+Go at a random point and try to continue
+
+  $ hg id -n
+  3+
+  $ hg up 0
+  0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+  $ HGEDITOR='echo foobaz > ' hg histedit --continue
+  abort: working directory parent is not a descendant of 055a42cdd887
+  (update to 055a42cdd887 or descendant and run "hg histedit --continue" again)
+  [255]
+  $ hg up 3
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
 commit, then edit the revision
   $ hg ci -m 'wat'
   created new head
--- a/tests/test-histedit-fold-non-commute.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-fold-non-commute.t	Fri Oct 19 01:34:50 2012 -0500
@@ -6,79 +6,96 @@
   > histedit=
   > EOF
 
-  $ EDITED="$TESTTMP/editedhistory"
-  $ cat > $EDITED <<EOF
-  > pick 177f92b77385 c
-  > pick 055a42cdd887 d
-  > fold bfa474341cc9 does not commute with e
-  > pick e860deea161a e
-  > pick 652413bf663e f
-  > EOF
   $ initrepo ()
   > {
   >     hg init $1
   >     cd $1
   >     for x in a b c d e f ; do
+  >         echo $x$x$x$x$x > $x
+  >         hg add $x
+  >     done
+  >     hg ci -m 'Initial commit'
+  >     for x in a b c d e f ; do
   >         echo $x > $x
-  >         hg add $x
   >         hg ci -m $x
   >     done
-  >     echo a >> e
+  >     echo 'I can haz no commute' > e
   >     hg ci -m 'does not commute with e'
   >     cd ..
   > }
 
   $ initrepo r
   $ cd r
+Initial generation of the command files
+
+  $ EDITED="$TESTTMP/editedhistory"
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED
+  $ hg log --template 'fold {node|short} {rev} {desc}\n' -r 7 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED
+  $ cat $EDITED
+  pick 65a9a84f33fd 3 c
+  pick 00f1c5383965 4 d
+  fold 39522b764e3d 7 does not commute with e
+  pick 7b4e2f4b7bcd 5 e
+  pick 500cac37a696 6 f
 
 log before edit
   $ hg log --graph
-  @  changeset:   6:bfa474341cc9
+  @  changeset:   7:39522b764e3d
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     does not commute with e
   |
-  o  changeset:   5:652413bf663e
+  o  changeset:   6:500cac37a696
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
-  o  changeset:   4:e860deea161a
+  o  changeset:   5:7b4e2f4b7bcd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     e
   |
-  o  changeset:   3:055a42cdd887
+  o  changeset:   4:00f1c5383965
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     d
   |
-  o  changeset:   2:177f92b77385
+  o  changeset:   3:65a9a84f33fd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     c
   |
-  o  changeset:   1:d2ae7f538514
+  o  changeset:   2:da6535b52e45
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     b
   |
-  o  changeset:   0:cb9a9f314b8b
+  o  changeset:   1:c1f09da44841
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     a
+  |
+  o  changeset:   0:1715188a53c7
      user:        test
      date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     a
+     summary:     Initial commit
   
 
 edit the history
-  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle
-  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 3 2>&1 | fixbundle
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
 fix up
-  $ echo a > e
-  $ hg add e
+  $ echo 'I can haz no commute' > e
+  $ hg resolve --mark e
   $ cat > cat.py <<EOF
   > import sys
   > print open(sys.argv[1]).read()
@@ -86,54 +103,74 @@
   > print
   > EOF
   $ HGEDITOR="python cat.py" hg histedit --continue 2>&1 | fixbundle | grep -v '2 files removed'
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   d
   ***
   does not commute with e
   
   
   
+  HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  HG: Leave message empty to abort commit.
+  HG: --
+  HG: user: test
+  HG: branch 'default'
+  HG: changed d
+  HG: changed e
+  
+  
+  
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  file e already exists
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
 just continue this time
+  $ hg revert -r 'p1()' e
+  $ hg resolve --mark e
   $ hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
 log after edit
   $ hg log --graph
-  @  changeset:   4:f768fd60ca34
+  @  changeset:   5:2696a654c663
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
-  o  changeset:   3:671efe372e33
+  o  changeset:   4:ec2c1cf833a8
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     d
   |
-  o  changeset:   2:177f92b77385
+  o  changeset:   3:65a9a84f33fd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     c
   |
-  o  changeset:   1:d2ae7f538514
+  o  changeset:   2:da6535b52e45
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     b
   |
-  o  changeset:   0:cb9a9f314b8b
+  o  changeset:   1:c1f09da44841
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     a
+  |
+  o  changeset:   0:1715188a53c7
      user:        test
      date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     a
+     summary:     Initial commit
   
 
 contents of e
   $ hg cat e
-  a
+  I can haz no commute
 
 manifest
   $ hg manifest
--- a/tests/test-histedit-fold.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-fold.t	Fri Oct 19 01:34:50 2012 -0500
@@ -66,6 +66,7 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -156,16 +157,21 @@
 
   $ HGEDITOR='python editor.py' hg histedit 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  patching file file
-  Hunk #1 FAILED at 2
-  1 out of 1 hunks FAILED -- saving rejects to file file.rej
+  merging file
+  warning: conflicts during merge.
+  merging file incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
   [255]
-There were conflicts, but we'll continue without resolving. This
+There were conflicts, we keep P1 content. This
 should effectively drop the changes from +6.
   $ hg status
+  M file
   ? editor.py
-  ? file.rej
+  ? file.orig
+  $ hg resolve -l
+  U file
+  $ hg revert -r 'p1()' file
+  $ hg resolve --mark file
   $ hg histedit --continue
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/*-backup.hg (glob)
@@ -216,12 +222,19 @@
   > EOF
   $ HGEDITOR="cat $EDITED >" hg histedit 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  patching file file
-  Hunk #1 FAILED at 2
-  1 out of 1 hunks FAILED -- saving rejects to file file.rej
+  merging file
+  warning: conflicts during merge.
+  merging file incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
   [255]
-  $ echo 5 >> file
+  $ cat > file << EOF
+  > 1
+  > 2
+  > 3
+  > 4
+  > 5
+  > EOF
+  $ hg resolve --mark file
   $ hg commit -m '+5.2'
   created new head
   $ echo 6 >> file
@@ -232,7 +245,51 @@
   +5.2
   ***
   +6
+  
+  
+  
+  HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  HG: Leave message empty to abort commit.
+  HG: --
+  HG: user: test
+  HG: branch 'default'
+  HG: changed file
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-backup.hg (glob)
+  $ hg log -G
+  @  changeset:   1:e29e02896e6c
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     +4
+  |
+  o  changeset:   0:0189ba417d34
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     1+2+3
+  
+  $ hg export tip
+  # HG changeset patch
+  # User test
+  # Date 0 0
+  # Node ID e29e02896e6c2b149d2228a0a64b4f3a9a4237f3
+  # Parent  0189ba417d34df9dda55f88b637dcae9917b5964
+  +4
+  ***
+  +5.2
+  ***
+  +6
+  
+  diff -r 0189ba417d34 -r e29e02896e6c file
+  --- a/file	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/file	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,3 +1,6 @@
+   1
+   2
+   3
+  +4
+  +5
+  +6
   $ cd ..
 
--- a/tests/test-histedit-non-commute-abort.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-non-commute-abort.t	Fri Oct 19 01:34:50 2012 -0500
@@ -73,23 +73,21 @@
 edit the history
   $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  remote changed e which local deleted
+  use (c)hanged version or leave (d)eleted? c
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
-fix up (pre abort)
-  $ echo a > e
-  $ hg add e
-  $ hg histedit --continue 2>&1 | fixbundle
-  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  file e already exists
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
-  abort: Fix up the change and run hg histedit --continue
 
 abort the edit
   $ hg histedit --abort 2>&1 | fixbundle
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
 log after abort
+  $ hg resolve -l
   $ hg log --graph
   @  changeset:   6:bfa474341cc9
   |  tag:         tip
--- a/tests/test-histedit-non-commute.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-non-commute.t	Fri Oct 19 01:34:50 2012 -0500
@@ -6,24 +6,20 @@
   > histedit=
   > EOF
 
-  $ EDITED="$TESTTMP/editedhistory"
-  $ cat > $EDITED <<EOF
-  > pick 177f92b77385 c
-  > pick 055a42cdd887 d
-  > pick bfa474341cc9 does not commute with e
-  > pick e860deea161a e
-  > pick 652413bf663e f
-  > EOF
   $ initrepo ()
   > {
   >     hg init $1
   >     cd $1
   >     for x in a b c d e f ; do
+  >         echo $x$x$x$x$x > $x
+  >         hg add $x
+  >     done
+  >     hg ci -m 'Initial commit'
+  >     for x in a b c d e f ; do
   >         echo $x > $x
-  >         hg add $x
   >         hg ci -m $x
   >     done
-  >     echo a >> e
+  >     echo 'I can haz no commute' > e
   >     hg ci -m 'does not commute with e'
   >     cd ..
   > }
@@ -31,49 +27,71 @@
   $ initrepo r1
   $ cd r1
 
+Initial generation of the command files
+
+  $ EDITED="$TESTTMP/editedhistory"
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 7 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED
+  $ cat $EDITED
+  pick 65a9a84f33fd 3 c
+  pick 00f1c5383965 4 d
+  pick 39522b764e3d 7 does not commute with e
+  pick 7b4e2f4b7bcd 5 e
+  pick 500cac37a696 6 f
+
 log before edit
   $ hg log --graph
-  @  changeset:   6:bfa474341cc9
+  @  changeset:   7:39522b764e3d
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     does not commute with e
   |
-  o  changeset:   5:652413bf663e
+  o  changeset:   6:500cac37a696
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
-  o  changeset:   4:e860deea161a
+  o  changeset:   5:7b4e2f4b7bcd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     e
   |
-  o  changeset:   3:055a42cdd887
+  o  changeset:   4:00f1c5383965
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     d
   |
-  o  changeset:   2:177f92b77385
+  o  changeset:   3:65a9a84f33fd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     c
   |
-  o  changeset:   1:d2ae7f538514
+  o  changeset:   2:da6535b52e45
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     b
   |
-  o  changeset:   0:cb9a9f314b8b
+  o  changeset:   1:c1f09da44841
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     a
+  |
+  o  changeset:   0:1715188a53c7
      user:        test
      date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     a
+     summary:     Initial commit
   
 
 edit the history
-  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle
-  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 3 2>&1 | fixbundle
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
 abort the edit
@@ -84,95 +102,113 @@
 second edit set
 
   $ hg log --graph
-  @  changeset:   6:bfa474341cc9
+  @  changeset:   7:39522b764e3d
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     does not commute with e
   |
-  o  changeset:   5:652413bf663e
+  o  changeset:   6:500cac37a696
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
-  o  changeset:   4:e860deea161a
+  o  changeset:   5:7b4e2f4b7bcd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     e
   |
-  o  changeset:   3:055a42cdd887
+  o  changeset:   4:00f1c5383965
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     d
   |
-  o  changeset:   2:177f92b77385
+  o  changeset:   3:65a9a84f33fd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     c
   |
-  o  changeset:   1:d2ae7f538514
+  o  changeset:   2:da6535b52e45
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     b
   |
-  o  changeset:   0:cb9a9f314b8b
+  o  changeset:   1:c1f09da44841
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     a
+  |
+  o  changeset:   0:1715188a53c7
      user:        test
      date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     a
+     summary:     Initial commit
   
 
 edit the history
-  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle
-  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 3 2>&1 | fixbundle
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
 fix up
-  $ echo a > e
-  $ hg add e
+  $ echo 'I can haz no commute' > e
+  $ hg resolve --mark e
   $ hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  file e already exists
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
+This failure is caused by 7b4e2f4b7bcd "e" not rebasing the non commutative
+former children.
+
 just continue this time
+  $ hg revert -r 'p1()' e
+  $ hg resolve --mark e
   $ hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
 log after edit
   $ hg log --graph
-  @  changeset:   5:9ab84894b459
+  @  changeset:   6:8e082d1a72ea
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
-  o  changeset:   4:1fff3ae8199d
+  o  changeset:   5:13b04d775b81
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     does not commute with e
   |
-  o  changeset:   3:055a42cdd887
+  o  changeset:   4:00f1c5383965
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     d
   |
-  o  changeset:   2:177f92b77385
+  o  changeset:   3:65a9a84f33fd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     c
   |
-  o  changeset:   1:d2ae7f538514
+  o  changeset:   2:da6535b52e45
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     b
   |
-  o  changeset:   0:cb9a9f314b8b
+  o  changeset:   1:c1f09da44841
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     a
+  |
+  o  changeset:   0:1715188a53c7
      user:        test
      date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     a
+     summary:     Initial commit
   
 
 start over
@@ -181,64 +217,79 @@
 
   $ initrepo r2
   $ cd r2
-  $ cat > $EDITED <<EOF
-  > pick 177f92b77385 c
-  > pick 055a42cdd887 d
-  > mess bfa474341cc9 does not commute with e
-  > pick e860deea161a e
-  > pick 652413bf663e f
-  > EOF
+  $ rm $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED
+  $ hg log --template 'mess {node|short} {rev} {desc}\n' -r 7 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED
+  $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED
+  $ cat $EDITED
+  pick 65a9a84f33fd 3 c
+  pick 00f1c5383965 4 d
+  mess 39522b764e3d 7 does not commute with e
+  pick 7b4e2f4b7bcd 5 e
+  pick 500cac37a696 6 f
 
 edit the history, this time with a fold action
-  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle
-  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  $ HGEDITOR="cat \"$EDITED\" > " hg histedit 3 2>&1 | fixbundle
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 
-  $ echo a > e
-  $ hg add e
+  $ echo 'I can haz no commute' > e
+  $ hg resolve --mark e
   $ HGEDITOR="cat \"$EDITED\" > " hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  file e already exists
-  1 out of 1 hunks FAILED -- saving rejects to file e.rej
+  merging e
+  warning: conflicts during merge.
+  merging e incomplete! (edit conflicts, then use 'hg resolve --mark')
   abort: Fix up the change and run hg histedit --continue
 second edit also fails, but just continue
+  $ hg revert -r 'p1()' e
+  $ hg resolve --mark e
   $ hg histedit --continue 2>&1 | fixbundle
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
 post message fix
   $ hg log --graph
-  @  changeset:   5:6459970fb49b
+  @  changeset:   6:f14da722aa4b
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     f
   |
-  o  changeset:   4:556f27c874b0
+  o  changeset:   5:382ff1adf0ed
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     pick 177f92b77385 c
+  |  summary:     pick 65a9a84f33fd 3 c
   |
-  o  changeset:   3:055a42cdd887
+  o  changeset:   4:00f1c5383965
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     d
   |
-  o  changeset:   2:177f92b77385
+  o  changeset:   3:65a9a84f33fd
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     c
   |
-  o  changeset:   1:d2ae7f538514
+  o  changeset:   2:da6535b52e45
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     b
   |
-  o  changeset:   0:cb9a9f314b8b
+  o  changeset:   1:c1f09da44841
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     a
+  |
+  o  changeset:   0:1715188a53c7
      user:        test
      date:        Thu Jan 01 00:00:00 1970 +0000
-     summary:     a
+     summary:     Initial commit
   
 
   $ cd ..
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-histedit-obsolete.t	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,143 @@
+  $ . "$TESTDIR/histedit-helpers.sh"
+
+Enable obsolete
+
+  $ cat > ${TESTTMP}/obs.py << EOF
+  > import mercurial.obsolete
+  > mercurial.obsolete._enabled = True
+  > EOF
+
+  $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > logtemplate= {rev}:{node|short} {desc|firstline}
+  > [phases]
+  > publish=False
+  > [extensions]'
+  > histedit=
+  > 
+  > obs=${TESTTMP}/obs.py
+  > EOF
+
+  $ hg init base
+  $ cd base
+
+  $ for x in a b c d e f ; do
+  >     echo $x > $x
+  >     hg add $x
+  >     hg ci -m $x
+  > done
+
+  $ hg log --graph
+  @  5:652413bf663e f
+  |
+  o  4:e860deea161a e
+  |
+  o  3:055a42cdd887 d
+  |
+  o  2:177f92b77385 c
+  |
+  o  1:d2ae7f538514 b
+  |
+  o  0:cb9a9f314b8b a
+  
+
+  $ HGEDITOR=cat hg histedit 1
+  pick d2ae7f538514 1 b
+  pick 177f92b77385 2 c
+  pick 055a42cdd887 3 d
+  pick e860deea161a 4 e
+  pick 652413bf663e 5 f
+  
+  # Edit history between d2ae7f538514 and 652413bf663e
+  #
+  # Commands:
+  #  p, pick = use commit
+  #  e, edit = use commit, but stop for amending
+  #  f, fold = use commit, but fold into previous commit (combines N and N-1)
+  #  d, drop = remove commit from history
+  #  m, mess = edit message without changing commit content
+  #
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat > commands.txt <<EOF
+  > pick 177f92b77385 2 c
+  > drop d2ae7f538514 1 b
+  > pick 055a42cdd887 3 d
+  > fold e860deea161a 4 e
+  > pick 652413bf663e 5 f
+  > EOF
+  $ hg histedit 1 --commands commands.txt --verbose | grep histedit
+  saved backup bundle to $TESTTMP/base/.hg/strip-backup/34a9919932c1-backup.hg (glob)
+  $ hg log --graph --hidden
+  @  8:0efacef7cb48 f
+  |
+  o  7:ae467701c500 d
+  |
+  o  6:d36c0562f908 c
+  |
+  | x  5:652413bf663e f
+  | |
+  | x  4:e860deea161a e
+  | |
+  | x  3:055a42cdd887 d
+  | |
+  | x  2:177f92b77385 c
+  | |
+  | x  1:d2ae7f538514 b
+  |/
+  o  0:cb9a9f314b8b a
+  
+  $ hg debugobsolete
+  d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {'date': '* *', 'user': 'test'} (glob)
+  177f92b773850b59254aa5e923436f921b55483b d36c0562f908c692f5204d606d4ff3537d41f1bf 0 {'date': '* *', 'user': 'test'} (glob)
+  055a42cdd88768532f9cf79daa407fc8d138de9b ae467701c5006bf21ffcfdb555b3d6b63280b6b7 0 {'date': '* *', 'user': 'test'} (glob)
+  e860deea161a2f77de56603b340ebbb4536308ae ae467701c5006bf21ffcfdb555b3d6b63280b6b7 0 {'date': '* *', 'user': 'test'} (glob)
+  652413bf663ef2a641cab26574e46d5f5a64a55a 0efacef7cb481bf574f69075b82d044fdbe5c20f 0 {'date': '* *', 'user': 'test'} (glob)
+
+
+Ensure hidden revision does not prevent histedit
+-------------------------------------------------
+
+create an hidden revision
+
+  $ cat > commands.txt <<EOF
+  > pick d36c0562f908 6 c
+  > drop ae467701c500 7 d
+  > pick 0efacef7cb48 8 f
+  > EOF
+  $ hg histedit 6 --commands commands.txt
+  0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg log --graph
+  @  9:7c044e3e33a9 f
+  |
+  o  6:d36c0562f908 c
+  |
+  o  0:cb9a9f314b8b a
+  
+check hidden revision are ignored (6 have hidden children 7 and 8)
+
+  $ cat > commands.txt <<EOF
+  > pick d36c0562f908 6 c
+  > pick 7c044e3e33a9 8 f
+  > EOF
+  $ hg histedit 6 --commands commands.txt
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+
+Check that histedit respect phases
+=========================================
+
+(not directly related to the test file but doesn't deserve it's own test case)
+
+  $ hg log -G
+  @  9:7c044e3e33a9 f
+  |
+  o  6:d36c0562f908 c
+  |
+  o  0:cb9a9f314b8b a
+  
+  $ hg ph -pv '.^'
+  phase changed for 2 changesets
+  $ hg histedit -r '.~2'
+  abort: cannot edit immutable changeset: cb9a9f314b8b
+  [255]
--- a/tests/test-histedit-revspec.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-histedit-revspec.t	Fri Oct 19 01:34:50 2012 -0500
@@ -60,3 +60,10 @@
   #
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
+Run on a revision not ancestors of the current working directory.
+
+  $ hg up 2
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg histedit -r 4
+  nothing to edit
+  [1]
--- a/tests/test-hook.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hook.t	Fri Oct 19 01:34:50 2012 -0500
@@ -529,12 +529,12 @@
   [1]
 
   $ echo '[hooks]' > .hg/hgrc
-  $ echo "update.ne = python:`pwd`/nonexisting.py:testhook" >> .hg/hgrc
+  $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc
   $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc
 
   $ hg up null
   loading update.ne hook failed:
-  abort: No such file or directory: $TESTTMP/d/repo/nonexisting.py
+  abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py
   [255]
 
   $ hg id
@@ -581,7 +581,7 @@
   cb9a9f314b8b
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
-make sure --verbose (and --quiet/--debug etc.) are propogated to the local ui
+make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui
 that is passed to pre/post hooks
 
   $ echo '[hooks]' > .hg/hgrc
--- a/tests/test-http-branchmap.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-http-branchmap.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,7 +1,10 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ hgserve() {
-  >     hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid -E errors.log -v $@
+  >     hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid \
+  >       -E errors.log -v $@ > startup.log
+  >     # Grepping hg serve stdout would hang on Windows
+  >     grep -v 'listening at' startup.log
   >     cat hg.pid >> "$DAEMON_PIDS"
   > }
   $ hg init a
@@ -12,7 +15,6 @@
   $ hg -R a ci -Am foo
   adding foo
   $ hgserve -R a --config web.push_ssl=False --config web.allow_push=* --encoding latin1
-  listening at http://*:$HGPORT1/ (bound to 127.0.0.1:$HGPORT1) (glob)
   $ hg --encoding utf-8 clone http://localhost:$HGPORT1 b
   requesting all changes
   adding changesets
@@ -52,7 +54,7 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     foo
   
-  $ kill `cat hg.pid`
+  $ "$TESTDIR/killdaemons.py" hg.pid
 
 verify 7e7d56fe4833 (encoding fallback in branchmap to maintain compatibility with 1.3.x)
 
--- a/tests/test-http-proxy.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-http-proxy.t	Fri Oct 19 01:34:50 2012 -0500
@@ -99,6 +99,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cat proxy.log
   * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
+  * - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - (glob)
   * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob)
   * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=bookmarks (glob)
   * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
--- a/tests/test-hybridencode.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hybridencode.py	Fri Oct 19 01:34:50 2012 -0500
@@ -1,14 +1,75 @@
 from mercurial import store
 
-auxencode = lambda f: store._auxencode(f, True)
-hybridencode = lambda f: store._hybridencode(f, auxencode)
+def show(s):
+    # show test input
+    print "A = '%s'" % s.encode("string_escape")
+
+    # show the result of the C implementation, if available
+    h = store._dothybridencode(s)
+    print "B = '%s'" % h.encode("string_escape")
+
+    # compare it with reference implementation in Python
+    r = store._hybridencode(s, True)
+    if h != r:
+        print "R = '%s'" % r.encode("string_escape")
+    print
 
-enc = hybridencode # used for 'dotencode' repo format
+show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}")
+
+print "uppercase char X is encoded as _x"
+show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ")
+
+print "underbar is doubled"
+show("data/_")
+
+print "tilde is character-encoded"
+show("data/~")
+
+print "characters in ASCII code range 1..31"
+show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'
+          '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f')
 
-def show(s):
-    print "A = '%s'" % s
-    print "B = '%s'" % enc(s)
-    print
+print "characters in ASCII code range 126..255"
+show('data/\x7e\x7f'
+          '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f'
+          '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f')
+show('data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf'
+          '\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf')
+show('data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf'
+          '\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf')
+show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef'
+          '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff')
+
+print "Windows reserved characters"
+show('data/less <, greater >, colon :, double-quote ", backslash \\'
+           ', pipe |, question-mark ?, asterisk *')
+
+print "encoding directories ending in .hg, .i or .d with '.hg' suffix"
+show('data/x.h.i/x.hg/x.i/x.d/foo')
+show('data/a.hg/a.i/a.d/foo')
+show('data/au.hg/au.i/au.d/foo')
+show('data/aux.hg/aux.i/aux.d/foo')
+show('data/auxy.hg/auxy.i/auxy.d/foo')
+
+print "but these are not encoded on *filenames*"
+show('data/foo/x.hg')
+show('data/foo/x.i')
+show('data/foo/x.d')
+show('data/foo/a.hg')
+show('data/foo/a.i')
+show('data/foo/a.d')
+show('data/foo/au.hg')
+show('data/foo/au.i')
+show('data/foo/au.d')
+show('data/foo/aux.hg')
+show('data/foo/aux.i')
+show('data/foo/aux.d')
+show('data/foo/auxy.hg')
+show('data/foo/auxy.i')
+show('data/foo/auxy.d')
+
+print "plain .hg, .i and .d directories have the leading dot encoded"
+show('data/.hg/.i/.d/foo')
 
 show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i')
 
@@ -25,3 +86,377 @@
      'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt')
 show('data/foo.../foo   / /a./_. /__/.x../    bla/.FOO/something.i')
 
+show('data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9')
+show('data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9')
+show('data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x'
+                                        '/com6.x/com7.x/com8.x/com9.x')
+show('data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5'
+                                        '/x.com6/x.com7/x.com8/x.com9')
+show('data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x'
+                                            '/com6x/com7x/com8x/com9x')
+show('data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5'
+                                            '/xcom6/xcom7/xcom8/xcom9')
+
+show('data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9')
+show('data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9')
+show('data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x'
+                                        '/lpt6.x/lpt7.x/lpt8.x/lpt9.x')
+show('data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5'
+                                        '/x.lpt6/x.lpt7/x.lpt8/x.lpt9')
+show('data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x'
+                                            '/lpt6x/lpt7x/lpt8x/lpt9x')
+show('data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5'
+                                            '/xlpt6/xlpt7/xlpt8/xlpt9')
+
+show('data/con/p/pr/prn/a/au/aux/n/nu/nul')
+show('data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL')
+show('data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x')
+show('data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul')
+show('data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx')
+show('data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul')
+
+show('data/a./au./aux./auxy./aux.')
+show('data/c./co./con./cony./con.')
+show('data/p./pr./prn./prny./prn.')
+show('data/n./nu./nul./nuly./nul.')
+show('data/l./lp./lpt./lpt1./lpt1y./lpt1.')
+show('data/lpt9./lpt9y./lpt9.')
+show('data/com./com1./com1y./com1.')
+show('data/com9./com9y./com9.')
+
+show('data/a /au /aux /auxy /aux ')
+
+print "largest unhashed path"
+show('data/123456789-123456789-123456789-123456789-123456789-'
+          'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "shortest hashed path"
+show('data/123456789-123456789-123456789-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "changing one char in part that's hashed away produces a different hash"
+show('data/123456789-123456789-123456789-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-'
+          '123456789-123456')
+
+print "uppercase hitting length limit due to encoding"
+show('data/A23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/Z23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "compare with lowercase not hitting limit"
+show('data/a23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/z23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "not hitting limit with any of these"
+show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;="
+          "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-"
+          "123456789-12345")
+
+print "underbar hitting length limit due to encoding"
+show('data/_23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "tilde hitting length limit due to encoding"
+show('data/~23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "Windows reserved characters hitting length limit"
+show('data/<23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/>23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/:23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/"23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/\\23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/|23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/?23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/*23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "initial space hitting length limit"
+show('data/ 23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "initial dot hitting length limit"
+show('data/.23456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "trailing space in filename hitting length limit"
+show('data/123456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-1234 ')
+
+print "trailing dot in filename hitting length limit"
+show('data/123456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-1234.')
+
+print "initial space in directory hitting length limit"
+show('data/ x/456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "initial dot in directory hitting length limit"
+show('data/.x/456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "trailing space in directory hitting length limit"
+show('data/x /456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "trailing dot in directory hitting length limit"
+show('data/x./456789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "with directories that need direncoding, hitting length limit"
+show('data/x.i/56789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/x.d/56789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/x.hg/5789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "Windows reserved filenames, hitting length limit"
+show('data/con/56789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/prn/56789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/aux/56789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/nul/56789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/com1/6789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/com9/6789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/lpt1/6789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+show('data/lpt9/6789-123456789-123456789-123456789-123456789-'
+          'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "non-reserved names, just not hitting limit"
+show('data/123456789-123456789-123456789-123456789-123456789-'
+          '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12345')
+
+print "hashed path with largest untruncated 1st dir"
+show('data/12345678/-123456789-123456789-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated 1st dir"
+show('data/123456789/123456789-123456789-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated two dirs"
+show('data/12345678/12345678/9-123456789-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated two dirs"
+show('data/123456789/123456789/123456789-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated three dirs"
+show('data/12345678/12345678/12345678/89-123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated three dirs"
+show('data/123456789/123456789/123456789/123456789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated four dirs"
+show('data/12345678/12345678/12345678/12345678/789-123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated four dirs"
+show('data/123456789/123456789/123456789/123456789/123456789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated five dirs"
+show('data/12345678/12345678/12345678/12345678/12345678/6789-'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated five dirs"
+show('data/123456789/123456789/123456789/123456789/123456789/'
+          'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated six dirs"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated six dirs"
+show('data/123456789/123456789/123456789/123456789/123456789/'
+          '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated seven dirs"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated seven dirs"
+show('data/123456789/123456789/123456789/123456789/123456789/'
+          '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest untruncated eight dirs"
+print "(directory 8 is dropped because it hits _maxshortdirslen)"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with smallest truncated eight dirs"
+print "(directory 8 is dropped because it hits _maxshortdirslen)"
+show('data/123456789/123456789/123456789/123456789/123456789/'
+          '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with largest non-dropped directory 8"
+print "(just not hitting the _maxshortdirslen boundary)"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "...adding one truncated char to dir 1..7 won't drop dir 8"
+show('data/12345678x/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678x/12345678/12345678/12345678/12345'
+          '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678x/12345678/12345678/12345'
+          '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678/12345678x/12345678/12345'
+          '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678/12345678/12345678x/12345'
+          '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path with shortest dropped directory 8"
+print "(just hitting the _maxshortdirslen boundary)"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "hashed path that drops dir 8 due to dot or space at end is"
+print "encoded, and thus causing to hit _maxshortdirslen"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print "... with dir 8 short enough for encoding"
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-123456')
+
+print '''Extensions are replicated on hashed paths. Note that
+we only get to encode files that end in .i or .d inside the
+store. Encoded filenames are thus bound in length.'''
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.345.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.345.d')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.34567.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.345678.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-1.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-12.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-123.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-1234.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-12345.i')
+show('data/12345678/12345678/12345678/12345678/12345678/12345'
+          '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+          '123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWX'
+          'YZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTU'
+          'VWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxx'
+          'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww'
+          'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i')
--- a/tests/test-hybridencode.py.out	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-hybridencode.py.out	Fri Oct 19 01:34:50 2012 -0500
@@ -1,3 +1,105 @@
+A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
+B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
+
+uppercase char X is encoded as _x
+A = 'data/ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+B = 'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z'
+
+underbar is doubled
+A = 'data/_'
+B = 'data/__'
+
+tilde is character-encoded
+A = 'data/~'
+B = 'data/~7e'
+
+characters in ASCII code range 1..31
+A = 'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
+B = 'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f'
+
+characters in ASCII code range 126..255
+A = 'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f'
+B = 'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f'
+
+A = 'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf'
+B = 'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf'
+
+A = 'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf'
+B = 'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df'
+
+A = 'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff'
+B = 'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff'
+
+Windows reserved characters
+A = 'data/less <, greater >, colon :, double-quote ", backslash \\, pipe |, question-mark ?, asterisk *'
+B = 'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a'
+
+encoding directories ending in .hg, .i or .d with '.hg' suffix
+A = 'data/x.h.i/x.hg/x.i/x.d/foo'
+B = 'data/x.h.i.hg/x.hg.hg/x.i.hg/x.d.hg/foo'
+
+A = 'data/a.hg/a.i/a.d/foo'
+B = 'data/a.hg.hg/a.i.hg/a.d.hg/foo'
+
+A = 'data/au.hg/au.i/au.d/foo'
+B = 'data/au.hg.hg/au.i.hg/au.d.hg/foo'
+
+A = 'data/aux.hg/aux.i/aux.d/foo'
+B = 'data/au~78.hg.hg/au~78.i.hg/au~78.d.hg/foo'
+
+A = 'data/auxy.hg/auxy.i/auxy.d/foo'
+B = 'data/auxy.hg.hg/auxy.i.hg/auxy.d.hg/foo'
+
+but these are not encoded on *filenames*
+A = 'data/foo/x.hg'
+B = 'data/foo/x.hg'
+
+A = 'data/foo/x.i'
+B = 'data/foo/x.i'
+
+A = 'data/foo/x.d'
+B = 'data/foo/x.d'
+
+A = 'data/foo/a.hg'
+B = 'data/foo/a.hg'
+
+A = 'data/foo/a.i'
+B = 'data/foo/a.i'
+
+A = 'data/foo/a.d'
+B = 'data/foo/a.d'
+
+A = 'data/foo/au.hg'
+B = 'data/foo/au.hg'
+
+A = 'data/foo/au.i'
+B = 'data/foo/au.i'
+
+A = 'data/foo/au.d'
+B = 'data/foo/au.d'
+
+A = 'data/foo/aux.hg'
+B = 'data/foo/au~78.hg'
+
+A = 'data/foo/aux.i'
+B = 'data/foo/au~78.i'
+
+A = 'data/foo/aux.d'
+B = 'data/foo/au~78.d'
+
+A = 'data/foo/auxy.hg'
+B = 'data/foo/auxy.hg'
+
+A = 'data/foo/auxy.i'
+B = 'data/foo/auxy.i'
+
+A = 'data/foo/auxy.d'
+B = 'data/foo/auxy.d'
+
+plain .hg, .i and .d directories have the leading dot encoded
+A = 'data/.hg/.i/.d/foo'
+B = 'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo'
+
 A = 'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i'
 B = 'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i'
 
@@ -19,3 +121,373 @@
 A = 'data/foo.../foo   / /a./_. /__/.x../    bla/.FOO/something.i'
 B = 'data/foo..~2e/foo  ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20   bla/~2e_f_o_o/something.i'
 
+A = 'data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9'
+B = 'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/co~6d7/co~6d8/co~6d9'
+
+A = 'data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9'
+B = 'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9'
+
+A = 'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/com6.x/com7.x/com8.x/com9.x'
+B = 'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x'
+
+A = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9'
+B = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9'
+
+A = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x'
+B = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x'
+
+A = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9'
+B = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9'
+
+A = 'data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9'
+B = 'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/lp~747/lp~748/lp~749'
+
+A = 'data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9'
+B = 'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9'
+
+A = 'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/lpt6.x/lpt7.x/lpt8.x/lpt9.x'
+B = 'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x'
+
+A = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9'
+B = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9'
+
+A = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x'
+B = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x'
+
+A = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9'
+B = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9'
+
+A = 'data/con/p/pr/prn/a/au/aux/n/nu/nul'
+B = 'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c'
+
+A = 'data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL'
+B = 'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l'
+
+A = 'data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x'
+B = 'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x'
+
+A = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul'
+B = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul'
+
+A = 'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx'
+B = 'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx'
+
+A = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul'
+B = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul'
+
+A = 'data/a./au./aux./auxy./aux.'
+B = 'data/a~2e/au~2e/au~78~2e/auxy~2e/au~78~2e'
+
+A = 'data/c./co./con./cony./con.'
+B = 'data/c~2e/co~2e/co~6e~2e/cony~2e/co~6e~2e'
+
+A = 'data/p./pr./prn./prny./prn.'
+B = 'data/p~2e/pr~2e/pr~6e~2e/prny~2e/pr~6e~2e'
+
+A = 'data/n./nu./nul./nuly./nul.'
+B = 'data/n~2e/nu~2e/nu~6c~2e/nuly~2e/nu~6c~2e'
+
+A = 'data/l./lp./lpt./lpt1./lpt1y./lpt1.'
+B = 'data/l~2e/lp~2e/lpt~2e/lp~741~2e/lpt1y~2e/lp~741~2e'
+
+A = 'data/lpt9./lpt9y./lpt9.'
+B = 'data/lp~749~2e/lpt9y~2e/lp~749~2e'
+
+A = 'data/com./com1./com1y./com1.'
+B = 'data/com~2e/co~6d1~2e/com1y~2e/co~6d1~2e'
+
+A = 'data/com9./com9y./com9.'
+B = 'data/co~6d9~2e/com9y~2e/co~6d9~2e'
+
+A = 'data/a /au /aux /auxy /aux '
+B = 'data/a~20/au~20/aux~20/auxy~20/aux~20'
+
+largest unhashed path
+A = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+
+shortest hashed path
+A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71'
+
+changing one char in part that's hashed away produces a different hash
+A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456'
+B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706'
+
+uppercase hitting length limit due to encoding
+A = 'data/A23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxcbbc657029b41b94ed510d05feb6716a5c03bc6b'
+
+A = 'data/Z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd'
+
+compare with lowercase not hitting limit
+A = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+
+A = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+
+not hitting limit with any of these
+A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+
+underbar hitting length limit due to encoding
+A = 'data/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b'
+
+tilde hitting length limit due to encoding
+A = 'data/~23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~7e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx9cec6f97d569c10995f785720044ea2e4227481b'
+
+Windows reserved characters hitting length limit
+A = 'data/<23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~3c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxee67d8f275876ca1ef2500fc542e63c885c4e62d'
+
+A = 'data/>23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~3e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx387a85a5b1547cc9136310c974df716818458ddb'
+
+A = 'data/:23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~3a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2e4154fb571d13d22399c58cc4ef4858e4b75999'
+
+A = 'data/"23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~2223456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxfc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5'
+
+A = 'data/\\23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~5c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx944e1f2b7110687e116e0d151328ac648b06ab4a'
+
+A = 'data/|23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~7c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx28b23dd3fd0242946334126ab62bcd772aac32f4'
+
+A = 'data/?23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~3f23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa263022d3994d2143d98f94f431eef8b5e7e0f8a'
+
+A = 'data/*23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~2a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx0e7e6020e3c00ba7bb7893d84ca2966fbf53e140'
+
+initial space hitting length limit
+A = 'data/ 23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~2023456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx92acbc78ef8c0b796111629a02601f07d8aec4ea'
+
+initial dot hitting length limit
+A = 'data/.23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~2e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxdbe19cc6505b3515ab9228cebf877ad07075168f'
+
+trailing space in filename hitting length limit
+A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234 '
+B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx0025dc73e04f97426db4893e3bf67d581dc6d066'
+
+trailing dot in filename hitting length limit
+A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234.'
+B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx85a16cf03ee7feba8a5abc626f1ba9886d01e89d'
+
+initial space in directory hitting length limit
+A = 'data/ x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~20x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516'
+
+initial dot in directory hitting length limit
+A = 'data/.x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/~2ex/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx39dbc4c193a5643a8936fc69c3363cd7ac91ab14'
+
+trailing space in directory hitting length limit
+A = 'data/x /456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/x~20/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2253c341df0b5290790ad312cd8499850f2273e5'
+
+trailing dot in directory hitting length limit
+A = 'data/x./456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/x~2e/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxcc0324d696d34562b44b5138db08ee1594ccc583'
+
+with directories that need direncoding, hitting length limit
+A = 'data/x.i/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/x.i.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxa4c4399bdf81c67dbbbb7060aa0124d8dea94f74'
+
+A = 'data/x.d/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/x.d.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx1303fa90473b230615f5b3ea7b660e881ae5270a'
+
+A = 'data/x.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx26d724a8af68e7a4e4455e6602ea9adbd0eb801f'
+
+Windows reserved filenames, hitting length limit
+A = 'data/con/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/co~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc0794d4f4c605a2617900eb2563d7113cf6ea7d3'
+
+A = 'data/prn/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/pr~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx64db876e1a9730e27236cb9b167aff942240e932'
+
+A = 'data/aux/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/au~78/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx8a178558405ca6fb4bbd75446dfa186f06751a0d'
+
+A = 'data/nul/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/nu~6c/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc5e51b6fec1bd07bd243b053a0c3f7209855b886'
+
+A = 'data/com1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/co~6d1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx32f5f44ece3bb62b9327369ca84cc19c86259fcd'
+
+A = 'data/com9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/co~6d9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx734360b28c66a3230f55849fe8926206d229f990'
+
+A = 'data/lpt1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/lp~741/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxe6f16ab4b6b0637676b2842b3345c9836df46ef7'
+
+A = 'data/lpt9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'dh/lp~749/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa475814c51acead3e44f2ff801f0c4903f986157'
+
+non-reserved names, just not hitting limit
+A = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+B = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+
+hashed path with largest untruncated 1st dir
+A = 'data/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c'
+
+hashed path with smallest truncated 1st dir
+A = 'data/123456789/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490'
+
+hashed path with largest untruncated two dirs
+A = 'data/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d'
+
+hashed path with smallest truncated two dirs
+A = 'data/123456789/123456789/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx9699559798247dffa18717138859be5f8874840e'
+
+hashed path with largest untruncated three dirs
+A = 'data/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439'
+
+hashed path with smallest truncated three dirs
+A = 'data/123456789/123456789/123456789/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-1c6f8284967384ec13985a046d3553179d9d03cd'
+
+hashed path with largest untruncated four dirs
+A = 'data/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd'
+
+hashed path with smallest truncated four dirs
+A = 'data/123456789/123456789/123456789/123456789/123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/123456789-hashed----xxxxxxxxx-xxxxxxxxx-x46162779e1a771810b37a737f82ae7ed33771402'
+
+hashed path with largest untruncated five dirs
+A = 'data/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390'
+
+hashed path with smallest truncated five dirs
+A = 'data/123456789/123456789/123456789/123456789/123456789/hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/hashed----xxxxxxxxx-xxxxxxxxx-xxb94c27b3532fa880cdd572b1c514785cab7b6ff2'
+
+hashed path with largest untruncated six dirs
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxcd8cc5483a0f3be409e0e5d4bf9e36e113c59235'
+
+hashed path with smallest truncated six dirs
+A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxxxxxxx-xxx47dd6f616f833a142da00701b334cebbf640da06'
+
+hashed path with largest untruncated seven dirs
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxx1c8ed635229fc22efe51035feeadeb4c8a0ecb82'
+
+hashed path with smallest truncated seven dirs
+A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926'
+
+hashed path with largest untruncated eight dirs
+(directory 8 is dropped because it hits _maxshortdirslen)
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d'
+
+hashed path with smallest truncated eight dirs
+(directory 8 is dropped because it hits _maxshortdirslen)
+A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx4fa04a839a6bda93e1c21c713f2edcbd16e8890d'
+
+hashed path with largest non-dropped directory 8
+(just not hitting the _maxshortdirslen boundary)
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxx4d43d1ccaa20efbfe99ec779dc063611536ff2c5'
+
+...adding one truncated char to dir 1..7 won't drop dir 8
+A = 'data/12345678x/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e'
+
+A = 'data/12345678/12345678x/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921'
+
+A = 'data/12345678/12345678/12345678x/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327'
+
+A = 'data/12345678/12345678/12345678/12345678x/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678x/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3'
+
+hashed path with shortest dropped directory 8
+(just hitting the _maxshortdirslen boundary)
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx11fa9873cc6c3215eae864528b5530a04efc6cfe'
+
+hashed path that drops dir 8 due to dot or space at end is
+encoded, and thus causing to hit _maxshortdirslen
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxx602df9b45bec564e2e1f0645d5140dddcc76ed58'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxxd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce'
+
+... with dir 8 short enough for encoding
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~2e/xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~20/xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182'
+
+Extensions are replicated on hashed paths. Note that
+we only get to encode files that end in .i or .d inside the
+store. Encoded filenames are thus bound in length.
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxc10ad03b5755ed524f5286aab1815dfe07729438.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345.d'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx9eec83381f2b39ef5ac8b4ecdf2c94f7983f57c8.d'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb7796dc7d175cfb0bb8a7728f58f6ebec9042568.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.34567.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb515857a6bfeef017c4894d8df42458ac65d55b8.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345678.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb05a0f247bc0a776211cd6a32ab714fd9cc09f2b.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxf192b48bff08d9e0e12035fb52bc58c70de72c94.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx435551e0ed4c7b083b9ba83cee916670e02e80ad.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxa7f74eb98d8d58b716356dfd26e2f9aaa65d6a9a.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxed68d9bd43b931f0b100267fee488d65a0c66f62.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-123.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx5cea44de2b642d2ba2b4a30693ffb1049644d698.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1234.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx68462f62a7f230b39c1b5400d73ec35920990b7e.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx4cb852a314c6da240a83eec94761cdd71c6ec22e.i'
+
+A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i'
+B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx93352aa50377751d9e5ebdf52da1e6e69a6887a6.i'
+
--- a/tests/test-import-git.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-import-git.t	Fri Oct 19 01:34:50 2012 -0500
@@ -322,12 +322,12 @@
 
 Filenames with spaces:
 
-  $ hg import -d "1000000 0" -m spaces - <<EOF
+  $ sed 's,EOL$,,g' <<EOF | hg import -d "1000000 0" -m spaces -
   > diff --git a/foo bar b/foo bar
   > new file mode 100644
   > index 0000000..257cc56
   > --- /dev/null
-  > +++ b/foo bar	
+  > +++ b/foo bar	EOL
   > @@ -0,0 +1 @@
   > +foo
   > EOF
@@ -384,7 +384,7 @@
   b
   \x00 (no-eol) (esc)
 
-  $ hg st --copies --change . 
+  $ hg st --copies --change .
   A binary2
     text2
   R text2
--- a/tests/test-import.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-import.t	Fri Oct 19 01:34:50 2012 -0500
@@ -602,6 +602,9 @@
   $ echo a > a
   $ hg ci -Am t
   adding a
+  $ hg import -p foo
+  abort: invalid value 'foo' for option -p, expected int
+  [255]
   $ hg import -p0 - << EOF
   > foobar
   > --- a	Sat Apr 12 22:43:58 2008 -0400
--- a/tests/test-inotify-issue1371.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-inotify-issue1371.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,6 +1,6 @@
 
   $ "$TESTDIR/hghave" inotify || exit 80
-  $ hg init 
+  $ hg init
   $ touch a b c d e f
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "inotify=" >> $HGRCPATH
@@ -22,7 +22,7 @@
 
   $ sleep 1
 
-eed to test all file opperations
+eed to test all file operations
 
   $ hg rm a
   $ rm b
@@ -41,4 +41,4 @@
 
 Are we able to kill the service? if not, the service died on some error
 
-  $ kill `cat hg.pid` 
+  $ kill `cat hg.pid`
--- a/tests/test-issue2137.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-issue2137.t	Fri Oct 19 01:34:50 2012 -0500
@@ -12,15 +12,15 @@
   > from mercurial import extensions, node, revlog
   > 
   > def reposetup(ui, repo):
-  >     def wrapcommit(orig, *args, **kwargs):
-  >         result = orig(*args, **kwargs)
-  >         tip1 = node.short(repo.changelog.tip())
-  >         tip2 = node.short(repo.lookup(tip1))
-  >         assert tip1 == tip2
-  >         ui.write('new tip: %s\n' % tip1)
-  >         return result
-  > 
-  >     extensions.wrapfunction(repo, 'commit', wrapcommit)
+  >     class wraprepo(repo.__class__):
+  >         def commit(self, *args, **kwargs):
+  >             result = super(wraprepo, self).commit(*args, **kwargs)
+  >             tip1 = node.short(repo.changelog.tip())
+  >             tip2 = node.short(repo.lookup(tip1))
+  >             assert tip1 == tip2
+  >             ui.write('new tip: %s\n' % tip1)
+  >             return result
+  >     repo.__class__ = wraprepo
   > 
   > def extsetup(ui):
   >     revlog._maxinline = 8             # split out 00changelog.d early
--- a/tests/test-keyword.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-keyword.t	Fri Oct 19 01:34:50 2012 -0500
@@ -509,9 +509,9 @@
   $ hg --debug commit --amend -d '15 1' -m 'amend without changes' | grep keywords
   overwriting a expanding keywords
   $ hg -q id
-  577e60613a88
+  67d8c481a6be
   $ head -1 a
-  expand $Id: a,v 577e60613a88 1970/01/01 00:00:15 test $
+  expand $Id: a,v 67d8c481a6be 1970/01/01 00:00:15 test $
 
   $ hg -q strip -n tip
 
@@ -727,7 +727,7 @@
   ignore $Id$
   a
 
-Write custom keyword and prepare multiline commit message
+Write custom keyword and prepare multi-line commit message
 
   $ echo '$Xinfo$' >> a
   $ cat <<EOF >> log
@@ -745,7 +745,7 @@
   ? c
   ? log
 
-Commit with multiline message and custom expansion
+Commit with multi-line message and custom expansion
 
   $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
   a
@@ -814,7 +814,7 @@
 
   $ cd ..
 
-Expansion in destinaton with global configuration
+Expansion in destination with global configuration
 
   $ hg --quiet clone Test globalconf
   $ cat globalconf/a
@@ -998,7 +998,7 @@
 
   $ echo '$Id$' > m
   $ hg add m
-  $ hg commit -m 4kw 
+  $ hg commit -m 4kw
   $ echo foo >> m
   $ hg commit -m 5foo
 
--- a/tests/test-known.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-known.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 = Test the known() protocol function =
 
@@ -35,4 +35,5 @@
   $ hg debugknown http://localhost:$HGPORT/
   
   $ cat error.log
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
--- a/tests/test-largefiles.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-largefiles.t	Fri Oct 19 01:34:50 2012 -0500
@@ -218,7 +218,7 @@
   -rw-r--r-- 9 normal4
 
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 #endif
 
 Test archiving the various revisions.  These hit corner cases known with
@@ -1291,7 +1291,7 @@
   [255]
 
 used all HGPORTs, kill all daemons
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 #endif
 
 vanilla clients locked out from largefiles ssh repos
@@ -1377,7 +1377,7 @@
   $ rm -rf empty
 
 used all HGPORTs, kill all daemons
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
 #endif
 
@@ -1587,9 +1587,8 @@
 
   $ hg clone -U . ../empty
   $ cd ../empty
-  $ hg archive --subrepos -r tip ../archive.tar.gz 2>&1 | "$TESTDIR/filtercr.py"
+  $ hg archive --subrepos -r tip ../archive.tar.gz
   cloning subrepo subrepo from $TESTTMP/statusmatch/subrepo
-  
   $ cd ..
 
 Test that addremove picks up largefiles prior to the initial commit (issue3541)
@@ -1628,3 +1627,87 @@
   .hglf/large2.dat
 
   $ cd ..
+
+issue3651: summary/outgoing with largefiles shows "no remote repo"
+unexpectedly
+
+  $ mkdir issue3651
+  $ cd issue3651
+
+  $ hg init src
+  $ echo a > src/a
+  $ hg -R src add --large src/a
+  $ hg -R src commit -m '#0'
+  Invoking status precommit hook
+  A a
+
+check messages when no remote repository is specified:
+"no remote repo" route for "hg outgoing --large" is not tested here,
+because it can't be reproduced easily.
+
+  $ hg init clone1
+  $ hg -R clone1 -q pull src
+  $ hg -R clone1 -q update
+  $ hg -R clone1 paths | grep default
+  [1]
+
+  $ hg -R clone1 summary --large
+  parent: 0:fc0bd45326d3 tip
+   #0
+  branch: default
+  commit: (clean)
+  update: (current)
+  largefiles: No remote repo
+
+check messages when there is no files to upload:
+
+  $ hg -q clone src clone2
+  $ hg -R clone2 paths | grep default
+  default = $TESTTMP/issue3651/src
+
+  $ hg -R clone2 summary --large
+  parent: 0:fc0bd45326d3 tip
+   #0
+  branch: default
+  commit: (clean)
+  update: (current)
+  searching for changes
+  largefiles: (no files to upload)
+  $ hg -R clone2 outgoing --large
+  comparing with $TESTTMP/issue3651/src
+  searching for changes
+  no changes found
+  searching for changes
+  largefiles: no files to upload
+  [1]
+
+check messages when there are files to upload:
+
+  $ echo b > clone2/b
+  $ hg -R clone2 add --large clone2/b
+  $ hg -R clone2 commit -m '#1'
+  Invoking status precommit hook
+  A b
+  $ hg -R clone2 summary --large
+  parent: 1:1acbe71ce432 tip
+   #1
+  branch: default
+  commit: (clean)
+  update: (current)
+  searching for changes
+  largefiles: 1 to upload
+  $ hg -R clone2 outgoing --large
+  comparing with $TESTTMP/issue3651/src
+  searching for changes
+  changeset:   1:1acbe71ce432
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     #1
+  
+  searching for changes
+  largefiles to upload:
+  b
+  
+
+  $ cd ..
--- a/tests/test-lfconvert.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-lfconvert.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,3 +1,5 @@
+  $ USERCACHE="$TESTTMP/cache"; export USERCACHE
+  $ mkdir "${USERCACHE}"
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
   > largefiles =
@@ -8,6 +10,7 @@
   > minsize = 0.5
   > patterns = **.other
   >     **.dat
+  > usercache=${USERCACHE}
   > EOF
 
 "lfconvert" works
@@ -270,3 +273,25 @@
   pass
 
   $ cd ..
+
+Avoid a traceback if a largefile isn't available (issue3519)
+
+Ensure the largefile can be cached in the source if necessary
+  $ hg clone -U largefiles-repo issue3519
+  $ rm "${USERCACHE}"/*
+  $ hg lfconvert --to-normal issue3519 normalized3519
+  initializing destination normalized3519
+
+Ensure the abort message is useful if a largefile is entirely unavailable
+  $ rm -rf normalized3519
+  $ rm "${USERCACHE}"/*
+  $ rm issue3519/.hg/largefiles/*
+  $ rm largefiles-repo/.hg/largefiles/*
+  $ hg lfconvert --to-normal issue3519 normalized3519
+  initializing destination normalized3519
+  large: can't get file locally
+  (no default or default-push path set in hgrc)
+  abort: missing largefile 'large' from revision d4892ec57ce212905215fad1d9018f56b99202ad
+  [255]
+
+
--- a/tests/test-log.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-log.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1213,3 +1213,46 @@
   1
 
   $ cd ..
+
+test hg log on non-existent files and on directories
+  $ hg init issue1340
+  $ cd issue1340
+  $ mkdir d1; mkdir D2; mkdir D3.i; mkdir d4.hg; mkdir d5.d; mkdir .d6
+  $ echo 1 > d1/f1
+  $ echo 1 > D2/f1
+  $ echo 1 > D3.i/f1
+  $ echo 1 > d4.hg/f1
+  $ echo 1 > d5.d/f1
+  $ echo 1 > .d6/f1
+  $ hg -q add .
+  $ hg commit -m "a bunch of weird directories"
+  $ hg log -l1 d1/f1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 f1
+  $ hg log -l1 . | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 ./ | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 d1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 D2 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 D2/f1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 D3.i | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 D3.i/f1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 d4.hg | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 d4.hg/f1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 d5.d | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 d5.d/f1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 .d6 | grep changeset
+  changeset:   0:65624cd9070a
+  $ hg log -l1 .d6/f1 | grep changeset
+  changeset:   0:65624cd9070a
+  $ cd ..
--- a/tests/test-merge-tools.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-merge-tools.t	Fri Oct 19 01:34:50 2012 -0500
@@ -191,7 +191,7 @@
   false.whatever=
   true.priority=1
   # hg update -C 1
-  $ hg merge -r 2 --config merge-tools.true.executable=nonexistingmergetool
+  $ hg merge -r 2 --config merge-tools.true.executable=nonexistentmergetool
   merging f
   merging f failed!
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
@@ -212,7 +212,7 @@
   false.whatever=
   true.priority=1
   # hg update -C 1
-  $ hg merge -r 2 --config merge-tools.true.executable=/nonexisting/mergetool
+  $ hg merge -r 2 --config merge-tools.true.executable=/nonexistent/mergetool
   merging f
   merging f failed!
   0 files updated, 0 files merged, 0 files removed, 1 files unresolved
@@ -336,7 +336,7 @@
   true.priority=1
   true.executable=cat
   # hg update -C 1
-  $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistingmergetool
+  $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool
   couldn't find merge tool true specified for f
   merging f
   merging f failed!
@@ -359,7 +359,7 @@
   true.priority=1
   true.executable=cat
   # hg update -C 1
-  $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexisting/mergetool
+  $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexistent/mergetool
   couldn't find merge tool true specified for f
   merging f
   merging f failed!
--- a/tests/test-mq-header-date.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-header-date.t	Fri Oct 19 01:34:50 2012 -0500
@@ -128,7 +128,7 @@
   >     catlogd 6
   > 
   >     drop 6
-  >     
+  > 
   > 
   >     echo ==== qnew -u
   >     hg qnew -u jane 6.patch
--- a/tests/test-mq-qclone-http.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qclone-http.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 hide outer repo
   $ hg init
@@ -151,3 +151,6 @@
   [1]
   $ hg --cwd d log --mq --template '{rev} {desc|firstline}\n'
   0 b.patch
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
+
--- a/tests/test-mq-qimport.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qimport.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ cat > writelines.py <<EOF
   > import sys
@@ -169,7 +169,7 @@
 
   $ cat > appendfoo.diff <<EOF
   > append foo
-  >  
+  > 
   > diff -r 07f494440405 -r 261500830e46 baz
   > --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
   > +++ b/baz	Thu Jan 01 00:00:00 1970 +0000
@@ -179,7 +179,7 @@
 
   $ cat > appendbar.diff <<EOF
   > append bar
-  >  
+  > 
   > diff -r 07f494440405 -r 261500830e46 baz
   > --- a/baz	Thu Jan 01 00:00:00 1970 +0000
   > +++ b/baz	Thu Jan 01 00:00:00 1970 +0000
@@ -240,7 +240,7 @@
 
 qimport with bad name, should abort before reading file
 
-  $ hg qimport non-existant-file --name .hg
+  $ hg qimport non-existent-file --name .hg
   abort: patch name cannot begin with ".hg"
   [255]
 
@@ -278,3 +278,5 @@
   1: secret
 
   $ cd ..
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
--- a/tests/test-mq-qpush-fail.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qpush-fail.t	Fri Oct 19 01:34:50 2012 -0500
@@ -34,7 +34,7 @@
   $ python -c 'print "\xe9"' > message
   $ cat .hg/patches/bad-patch >> message
   $ mv message .hg/patches/bad-patch
-  $ hg qpush -a && echo 'qpush succeded?!'
+  $ hg qpush -a && echo 'qpush succeeded?!'
   applying patch1
   applying patch2
   applying bad-patch
--- a/tests/test-mq-qqueue.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qqueue.t	Fri Oct 19 01:34:50 2012 -0500
@@ -22,7 +22,7 @@
 Try to change patch (create succeeds, switch fails):
 
   $ hg qqueue foo --create
-  abort: patches applied - cannot set new queue active
+  abort: new queue created, but cannot make active as patches are applied
   [255]
 
   $ hg qqueue
@@ -137,7 +137,7 @@
 Fail switching back:
 
   $ hg qqueue patches
-  abort: patches applied - cannot set new queue active
+  abort: new queue created, but cannot make active as patches are applied
   [255]
 
 Fail deleting current:
--- a/tests/test-mq-qrefresh-interactive.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qrefresh-interactive.t	Fri Oct 19 01:34:50 2012 -0500
@@ -46,7 +46,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help qrefresh" to show more info
+  use "hg -v help qrefresh" to show the global options
 
 help qrefresh (record)
 
@@ -91,7 +91,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help qrefresh" to show more info
+  use "hg -v help qrefresh" to show the global options
 
   $ hg init a
   $ cd a
--- a/tests/test-mq-qrefresh-replace-log-message.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qrefresh-replace-log-message.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-Environement setup for MQ
+Environment setup for MQ
 
   $ echo "[extensions]" >> $HGRCPATH
   $ echo "mq=" >> $HGRCPATH
--- a/tests/test-mq-qrefresh.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-qrefresh.t	Fri Oct 19 01:34:50 2012 -0500
@@ -207,7 +207,7 @@
 
   $ echo 'orphan' > orphanchild
   $ hg add orphanchild
-  $ hg qrefresh nonexistingfilename # clear patch
+  $ hg qrefresh nonexistentfilename # clear patch
   $ hg qrefresh --short 1/base
   $ hg qrefresh --short 2/base
 
--- a/tests/test-mq-subrepo.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mq-subrepo.t	Fri Oct 19 01:34:50 2012 -0500
@@ -478,7 +478,7 @@
   adding .hgsub
   $ hg -R sub update -C 1
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ hg commit -Am '#2 in parent (but be rollbacked soon)'
+  $ hg commit -Am '#2 in parent (but will be rolled back soon)'
   $ hg rollback
   repository tip rolled back to revision 1 (undo commit)
   working directory now based on revision 1
--- a/tests/test-mv-cp-st-diff.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-mv-cp-st-diff.t	Fri Oct 19 01:34:50 2012 -0500
@@ -187,7 +187,7 @@
   +y1
   
   
-  $ tb "add a a1" "add a a2" "hg cp a b" "copy in working dir" 
+  $ tb "add a a1" "add a a2" "hg cp a b" "copy in working dir"
   updating to branch default
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
   created new head
--- a/tests/test-notify.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-notify.t	Fri Oct 19 01:34:50 2012 -0500
@@ -42,16 +42,22 @@
   repository:
   
     [usersubs]
-    # key is subscriber email, value is a comma-separated list of repo glob
-    # patterns
+    # key is subscriber email, value is a comma-separated list of repo patterns
     user@host = pattern
   
     [reposubs]
-    # key is glob pattern, value is a comma-separated list of subscriber
-    # emails
+    # key is repo pattern, value is a comma-separated list of subscriber emails
     pattern = user@host
   
-  Glob patterns are matched against absolute path to repository root.
+  A "pattern" is a "glob" matching the absolute path to a repository, optionally
+  combined with a revset expression. A revset expression, if present, is
+  separated from the glob by a hash. Example:
+  
+    [reposubs]
+    */widgets#branch(release) = qa-team@example.com
+  
+  This sends to "qa-team@example.com" whenever a changeset on the "release"
+  branch triggers a notification in any repository ending in "widgets".
   
   In order to place them under direct user management, "[usersubs]" and
   "[reposubs]" sections may be placed in a separate "hgrc" file and incorporated
@@ -473,3 +479,77 @@
   ononononononononononononononononononononononononononononononononononononono=
   nonononononononononononono
   
+ revset selection: send to address that matches branch and repo
+
+  $ cat << EOF >> $HGRCPATH
+  > [hooks]
+  > incoming.notify = python:hgext.notify.hook
+  > 
+  > [notify]
+  > sources = pull
+  > test = True
+  > diffstat = False
+  > maxdiff = 0
+  > 
+  > [reposubs]
+  > */a#branch(test) = will_no_be_send@example.com
+  > */b#branch(test) = notify@example.com
+  > EOF
+  $ hg --cwd a branch test
+  marked working directory as branch test
+  (branches are permanent and global, did you want a bookmark?)
+  $ echo a >> a/a
+  $ hg --cwd a ci -m test -d '1 0'
+  $ hg --traceback --cwd b pull ../a | \
+  >   python -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+  pulling from ../a
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  X-Test: foo
+  Date: * (glob)
+  Subject: test
+  From: test@test.com
+  X-Hg-Notification: changeset fbbcbc516f2f
+  Message-Id: <hg.fbbcbc516f2f.*.*@*> (glob)
+  To: baz@test.com, foo@bar, notify@example.com
+  
+  changeset fbbcbc516f2f in b
+  description: test
+  (run 'hg update' to get a working copy)
+
+revset selection: don't send to address that waits for mails
+from different branch
+
+  $ hg --cwd a update default
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ echo a >> a/a
+  $ hg --cwd a ci -m test -d '1 0'
+  $ hg --traceback --cwd b pull ../a | \
+  >   python -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),'
+  pulling from ../a
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files (+1 heads)
+  Content-Type: text/plain; charset="us-ascii"
+  MIME-Version: 1.0
+  Content-Transfer-Encoding: 7bit
+  X-Test: foo
+  Date: * (glob)
+  Subject: test
+  From: test@test.com
+  X-Hg-Notification: changeset 38b42fa092de
+  Message-Id: <hg.38b42fa092de.*.*@*> (glob)
+  To: baz@test.com, foo@bar
+  
+  changeset 38b42fa092de in b
+  description: test
+  (run 'hg heads' to see heads)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-obsolete-checkheads.t	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,273 @@
+Check that obsolete properly strip heads
+  $ cat > obs.py << EOF
+  > import mercurial.obsolete
+  > mercurial.obsolete._enabled = True
+  > EOF
+  $ cat >> $HGRCPATH << EOF
+  > [phases]
+  > # public changeset are not obsolete
+  > publish=false
+  > [ui]
+  > logtemplate='{node|short} ({phase}) {desc|firstline}\n'
+  > [extensions]
+  > graphlog=
+  > EOF
+  $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
+  $ mkcommit() {
+  >    echo "$1" > "$1"
+  >    hg add "$1"
+  >    hg ci -m "add $1"
+  > }
+  $ getid() {
+  >    hg id --debug -ir "desc('$1')"
+  > }
+
+
+  $ hg init remote
+  $ cd remote
+  $ mkcommit base
+  $ hg phase --public .
+  $ cd ..
+  $ cp -r remote base
+  $ hg clone remote local
+  updating to branch default
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd local
+
+New head replaces old head
+==========================
+
+setup
+(we add the 1 flags to prevent bumped error during the test)
+
+  $ mkcommit old
+  $ hg push
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  $ hg up -q '.^'
+  $ mkcommit new
+  created new head
+  $ hg debugobsolete --flags 1 `getid old` `getid new`
+  $ hg glog --hidden
+  @  71e3228bffe1 (draft) add new
+  |
+  | x  c70b08862e08 (draft) add old
+  |/
+  o  b4952fcf48cf (public) add base
+  
+  $ cp -r ../remote ../backup1
+
+old exists remotely as draft. It is obsoleted by new that we now push.
+Push should not warn about creating new head
+
+  $ hg push
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+
+old head is now public (public local version)
+=============================================
+
+setup
+
+  $ rm -fr ../remote
+  $ cp -r ../backup1 ../remote
+  $ hg -R ../remote phase --public c70b08862e08
+  $ hg pull -v
+  pulling from $TESTTMP/remote (glob)
+  searching for changes
+  no changes found
+  $ hg glog --hidden
+  @  71e3228bffe1 (draft) add new
+  |
+  | o  c70b08862e08 (public) add old
+  |/
+  o  b4952fcf48cf (public) add base
+  
+
+Abort: old will still be an head because it's public.
+
+  $ hg push
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  abort: push creates new remote head 71e3228bffe1!
+  (did you forget to merge? use push -f to force)
+  [255]
+
+old head is now public (public remote version)
+==============================================
+
+TODO: Not implemented yet.
+
+# setup
+#
+#   $ rm -fr ../remote
+#   $ cp -r ../backup1 ../remote
+#   $ hg -R ../remote phase --public c70b08862e08
+#   $ hg phase --draft --force c70b08862e08
+#   $ hg glog --hidden
+#   @  71e3228bffe1 (draft) add new
+#   |
+#   | x  c70b08862e08 (draft) add old
+#   |/
+#   o  b4952fcf48cf (public) add base
+#
+#
+#
+# Abort: old will still be an head because it's public.
+#
+#   $ hg push
+#   pushing to $TESTTMP/remote
+#   searching for changes
+#   abort: push creates new remote head 71e3228bffe1!
+#   (did you forget to merge? use push -f to force)
+#   [255]
+
+old head is obsolete but replacement is not pushed
+==================================================
+
+setup
+
+  $ rm -fr ../remote
+  $ cp -r ../backup1 ../remote
+  $ hg phase --draft --force '(0::) - 0'
+  $ hg up -q '.^'
+  $ mkcommit other
+  created new head
+  $ hg glog --hidden
+  @  d7d41ccbd4de (draft) add other
+  |
+  | o  71e3228bffe1 (draft) add new
+  |/
+  | x  c70b08862e08 (draft) add old
+  |/
+  o  b4952fcf48cf (public) add base
+  
+
+old exists remotely as draft. It is obsoleted by new but we don't push new.
+Push should abort on new head
+
+  $ hg push -r 'desc("other")'
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  abort: push creates new remote head d7d41ccbd4de!
+  (did you forget to merge? use push -f to force)
+  [255]
+
+
+
+Both precursors and successors are already know remotely. Descendant adds heads
+===============================================================================
+
+setup. (The obsolete marker is known locally only
+
+  $ cd ..
+  $ rm -rf local
+  $ hg clone remote local
+  updating to branch default
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd local
+  $ mkcommit old
+  old already tracked!
+  nothing changed
+  [1]
+  $ hg up -q '.^'
+  $ mkcommit new
+  created new head
+  $ hg push -f
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
+  $ mkcommit desc1
+  $ hg up -q '.^'
+  $ mkcommit desc2
+  created new head
+  $ hg debugobsolete `getid old` `getid new`
+  $ hg glog --hidden
+  @  5fe37041cc2b (draft) add desc2
+  |
+  | o  a3ef1d111c5f (draft) add desc1
+  |/
+  o  71e3228bffe1 (draft) add new
+  |
+  | x  c70b08862e08 (draft) add old
+  |/
+  o  b4952fcf48cf (public) add base
+  
+  $ hg glog --hidden -R ../remote
+  o  71e3228bffe1 (draft) add new
+  |
+  | o  c70b08862e08 (draft) add old
+  |/
+  @  b4952fcf48cf (public) add base
+  
+  $ cp -r ../remote ../backup2
+
+Push should not warn about adding new heads. We create one, but we'll delete
+one anyway.
+
+  $ hg push
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 2 changes to 2 files (+1 heads)
+
+
+Remote head is unknown but obsoleted by a local changeset
+=========================================================
+
+setup
+
+  $ rm -fr ../remote
+  $ cp -r ../backup1 ../remote
+  $ cd ..
+  $ rm -rf local
+  $ hg clone remote local -r 0
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files
+  updating to branch default
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd local
+  $ mkcommit new
+  $ hg -R ../remote id --debug -r tip
+  c70b08862e0838ea6d7c59c85da2f1ed6c8d67da tip
+  $ hg  id --debug -r tip
+  71e3228bffe1886550777233d6c97bb5a6b2a650 tip
+  $ hg debugobsolete c70b08862e0838ea6d7c59c85da2f1ed6c8d67da 71e3228bffe1886550777233d6c97bb5a6b2a650
+  $ hg glog --hidden
+  @  71e3228bffe1 (draft) add new
+  |
+  o  b4952fcf48cf (public) add base
+  
+  $ hg glog --hidden -R ../remote
+  o  c70b08862e08 (draft) add old
+  |
+  @  b4952fcf48cf (public) add base
+  
+
+Push should not complain about new heads.
+
+It should not complain about "unsynced remote changes!" either but that's not
+handled yet.
+
+  $ hg push --traceback
+  pushing to $TESTTMP/remote (glob)
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 1 changes to 1 files (+1 heads)
--- a/tests/test-obsolete.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-obsolete.t	Fri Oct 19 01:34:50 2012 -0500
@@ -68,9 +68,12 @@
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ mkcommit new_c
   created new head
-  $ hg debugobsolete `getid original_c`  `getid new_c` -d '56 12'
+  $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
+  $ hg debugobsolete --flag 12 `getid original_c`  `getid new_c` -d '56 12'
+  $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
+  2:245bde4270cd add original_c
   $ hg debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
 
 do it again (it read the obsstore before adding new changeset)
 
@@ -80,7 +83,7 @@
   created new head
   $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
   $ hg debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
 
 Register two markers with a missing node
@@ -92,11 +95,18 @@
   $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
   $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
   $ hg debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
 
+Refuse pathological nullid successors
+  $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
+  transaction abort!
+  rollback completed
+  abort: bad obsolescence marker detected: invalid successors nullid
+  [255]
+
 Check that graphlog detect that a changeset is obsolete:
 
   $ hg glog
@@ -145,6 +155,67 @@
      summary:     add a
   
 
+And that bumped changeset are detected
+--------------------------------------
+
+If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
+note that the bumped changeset (5:5601fb93a350) is not a direct successor of
+the public changeset
+
+  $ hg log --hidden -r 'bumped()'
+  changeset:   5:5601fb93a350
+  tag:         tip
+  parent:      1:7c3bad9141dc
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     add new_3_c
+  
+
+And that we can't push bumped changeset
+
+  $ hg push ../tmpa
+  pushing to ../tmpa
+  searching for changes
+  abort: push includes bumped changeset: 5601fb93a350!
+  [255]
+
+Fixing "bumped" situation
+We need to create a clone of 5 and add a special marker with a flag
+
+  $ hg up '5^'
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ hg revert -ar 5
+  adding new_3_c
+  $ hg ci -m 'add n3w_3_c'
+  created new head
+  $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
+  $ hg log -r 'bumped()'
+  $ hg log -G
+  @  changeset:   6:6f9641995072
+  |  tag:         tip
+  |  parent:      1:7c3bad9141dc
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     add n3w_3_c
+  |
+  | o  changeset:   2:245bde4270cd
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     add original_c
+  |
+  o  changeset:   1:7c3bad9141dc
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     add b
+  |
+  o  changeset:   0:1f0dee641bb7
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     add a
+  
+
+
+
   $ cd ..
 
 Exchange Test
@@ -167,29 +238,32 @@
   added 4 changesets with 4 changes to 4 files (+1 heads)
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
 
 Rollback//Transaction support
 
   $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
   $ hg debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
   aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 {'date': '1340 0', 'user': 'test'}
   $ hg rollback -n
   repository tip rolled back to revision 3 (undo debugobsolete)
   $ hg rollback
   repository tip rolled back to revision 3 (undo debugobsolete)
   $ hg debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
 
   $ cd ..
 
@@ -204,10 +278,11 @@
   adding file changes
   added 4 changesets with 4 changes to 4 files (+1 heads)
   $ hg -R tmpd debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
 
 Check obsolete keys are exchanged only if source has an obsolete store
 
@@ -226,12 +301,18 @@
   updating to branch default
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg -R clone-dest log -G --hidden
-  @  changeset:   5:5601fb93a350
+  @  changeset:   6:6f9641995072
   |  tag:         tip
   |  parent:      1:7c3bad9141dc
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     add new_3_c
+  |  summary:     add n3w_3_c
+  |
+  | x  changeset:   5:5601fb93a350
+  |/   parent:      1:7c3bad9141dc
+  |    user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     add new_3_c
   |
   | x  changeset:   4:ca819180edb9
   |/   parent:      1:7c3bad9141dc
@@ -261,10 +342,11 @@
      summary:     add a
   
   $ hg -R clone-dest debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
 
 
 Destination repo have existing data
@@ -285,10 +367,11 @@
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ hg debugobsolete
   2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
 
 
 On push
@@ -299,10 +382,11 @@
   no changes found
   [1]
   $ hg -R ../tmpc debugobsolete
-  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f 0 {'date': '56 12', 'user': 'test'}
+  245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
   cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
   ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
   1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
+  5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
   2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
 
 detect outgoing obsolete and unstable
@@ -310,12 +394,12 @@
 
 
   $ hg glog
-  o  changeset:   3:5601fb93a350
+  o  changeset:   3:6f9641995072
   |  tag:         tip
   |  parent:      1:7c3bad9141dc
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     add new_3_c
+  |  summary:     add n3w_3_c
   |
   | o  changeset:   2:245bde4270cd
   |/   user:        test
@@ -332,34 +416,34 @@
      date:        Thu Jan 01 00:00:00 1970 +0000
      summary:     add a
   
-  $ hg up 'desc("new_3_c")'
+  $ hg up 'desc("n3w_3_c")'
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ mkcommit original_d
   $ mkcommit original_e
   $ hg debugobsolete `getid original_d` -d '0 0'
   $ hg log -r 'obsolete()'
-  changeset:   4:7c694bff0650
+  changeset:   4:94b33453f93b
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     add original_d
   
   $ hg glog -r '::unstable()'
-  @  changeset:   5:6e572121998e
+  @  changeset:   5:cda648ca50f5
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     add original_e
   |
-  x  changeset:   4:7c694bff0650
+  x  changeset:   4:94b33453f93b
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     add original_d
   |
-  o  changeset:   3:5601fb93a350
+  o  changeset:   3:6f9641995072
   |  parent:      1:7c3bad9141dc
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     add new_3_c
+  |  summary:     add n3w_3_c
   |
   o  changeset:   1:7c3bad9141dc
   |  user:        test
@@ -377,7 +461,7 @@
   $ hg push ../tmpc/ -r 'desc("original_d")'
   pushing to ../tmpc/
   searching for changes
-  abort: push includes an obsolete changeset: 7c694bff0650!
+  abort: push includes obsolete changeset: 94b33453f93b!
   [255]
 
 refuse to push unstable changeset
@@ -385,7 +469,7 @@
   $ hg push ../tmpc/
   pushing to ../tmpc/
   searching for changes
-  abort: push includes an unstable changeset: 6e572121998e!
+  abort: push includes unstable changeset: cda648ca50f5!
   [255]
 
 Test that extinct changeset are properly detected
@@ -413,18 +497,18 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     add original_c
   
-  changeset:   3:5601fb93a350
+  changeset:   3:6f9641995072
   parent:      1:7c3bad9141dc
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     add new_3_c
+  summary:     add n3w_3_c
   
-  changeset:   4:7c694bff0650
+  changeset:   4:94b33453f93b
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     add original_d
   
-  changeset:   5:6e572121998e
+  changeset:   5:cda648ca50f5
   tag:         tip
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
@@ -449,22 +533,22 @@
 Do not warn about new head when the new head is a successors of a remote one
 
   $ hg glog
-  @  changeset:   5:6e572121998e
+  @  changeset:   5:cda648ca50f5
   |  tag:         tip
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     add original_e
   |
-  x  changeset:   4:7c694bff0650
+  x  changeset:   4:94b33453f93b
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
   |  summary:     add original_d
   |
-  o  changeset:   3:5601fb93a350
+  o  changeset:   3:6f9641995072
   |  parent:      1:7c3bad9141dc
   |  user:        test
   |  date:        Thu Jan 01 00:00:00 1970 +0000
-  |  summary:     add new_3_c
+  |  summary:     add n3w_3_c
   |
   | o  changeset:   2:245bde4270cd
   |/   user:        test
@@ -481,7 +565,7 @@
      date:        Thu Jan 01 00:00:00 1970 +0000
      summary:     add a
   
-  $ hg up -q 'desc(new_3_c)'
+  $ hg up -q 'desc(n3w_3_c)'
   $ mkcommit obsolete_e
   created new head
   $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
@@ -498,10 +582,10 @@
   $ echo '[extensions]' >> $HGRCPATH
   $ echo "obs=!" >> $HGRCPATH
   $ hg log -r tip
-  obsolete feature not enabled but 7 markers found!
-  changeset:   6:d6a026544050
+  obsolete feature not enabled but 8 markers found!
+  changeset:   6:3de5eca88c00
   tag:         tip
-  parent:      3:5601fb93a350
+  parent:      3:6f9641995072
   user:        test
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     add obsolete_e
--- a/tests/test-patchbomb.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-patchbomb.t	Fri Oct 19 01:34:50 2012 -0500
@@ -149,29 +149,31 @@
   $ hg email -m test.mbox -f quux -t foo -c bar -s test 0:tip \
   > --config extensions.progress= --config progress.assume-tty=1 \
   > --config progress.delay=0 --config progress.refresh=0 \
-  > --config progress.width=60 2>&1 | \
-  > python "$TESTDIR/filtercr.py"
+  > --config progress.width=60
   this patch series consists of 2 patches.
   
   
   Write the introductory message for the patch series.
   
-  
-  sending [                                             ] 0/3
-  sending [                                             ] 0/3
-                                                              
-                                                              
-  sending [==============>                              ] 1/3
-  sending [==============>                              ] 1/3
-                                                              
-                                                              
-  sending [=============================>               ] 2/3
-  sending [=============================>               ] 2/3
+  \r (no-eol) (esc)
+  sending [                                             ] 0/3\r (no-eol) (esc)
+  sending [                                             ] 0/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  sending [==============>                              ] 1/3\r (no-eol) (esc)
+  sending [==============>                              ] 1/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  sending [=============================>               ] 2/3\r (no-eol) (esc)
+  sending [=============================>               ] 2/3\r (no-eol) (esc)
                                                               \r (esc)
   sending [PATCH 0 of 2] test ...
   sending [PATCH 1 of 2] a ...
   sending [PATCH 2 of 2] b ...
-  
 
   $ cd ..
 
@@ -1877,7 +1879,7 @@
   +b
   
 
-test mutiple flags for single patch:
+test multiple flags for single patch:
   $ hg email --date '1970-1-1 0:1' -n --flag fooFlag --flag barFlag -f quux -t foo \
   >  -c bar -s test -r 2
   this patch series consists of 1 patches.
--- a/tests/test-phases-exchange.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-phases-exchange.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
@@ -84,7 +84,7 @@
   
 
 pull did not updated ../alpha state.
-push from alpha to beta should update phase even if nothing is transfered
+push from alpha to beta should update phase even if nothing is transferred
 
   $ cd ../alpha
   $ hgph # not updated by remote pull
@@ -1062,4 +1062,5 @@
   |
   o  0 public a-A - 054250a37db4
   
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
--- a/tests/test-progress.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-progress.t	Fri Oct 19 01:34:50 2012 -0500
@@ -46,104 +46,96 @@
 
 test default params, display nothing because of delay
 
-  $ hg -y loop 3 2>&1 | "$TESTDIR/filtercr.py"
-  
+  $ hg -y loop 3
   $ echo "delay=0" >> $HGRCPATH
   $ echo "refresh=0" >> $HGRCPATH
 
 test with delay=0, refresh=0
 
-  $ hg -y loop 3 2>&1 | "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/3
-  loop [===============>                                ] 1/3
-  loop [===============================>                ] 2/3
-                                                              \r (esc)
+  $ hg -y loop 3
+  \r (no-eol) (esc)
+  loop [                                                ] 0/3\r (no-eol) (esc)
+  loop [===============>                                ] 1/3\r (no-eol) (esc)
+  loop [===============================>                ] 2/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 
 test nested short-lived topics (which shouldn't display with nestdelay):
 
-  $ hg -y loop 3 --nested 2>&1 | \
-  > python "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/3
-  loop [===============>                                ] 1/3
-  loop [===============================>                ] 2/3
-                                                              \r (esc)
+  $ hg -y loop 3 --nested
+  \r (no-eol) (esc)
+  loop [                                                ] 0/3\r (no-eol) (esc)
+  loop [===============>                                ] 1/3\r (no-eol) (esc)
+  loop [===============================>                ] 2/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 
-  $ hg --config progress.changedelay=0 -y loop 3 --nested 2>&1 | \
-  > python "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/3
-  nested [                                              ] 0/2
-  nested [======================>                       ] 1/2
-  loop [===============>                                ] 1/3
-  nested [                                              ] 0/2
-  nested [======================>                       ] 1/2
-  loop [===============================>                ] 2/3
-  nested [                                              ] 0/2
-  nested [======================>                       ] 1/2
-                                                              \r (esc)
+  $ hg --config progress.changedelay=0 -y loop 3 --nested
+  \r (no-eol) (esc)
+  loop [                                                ] 0/3\r (no-eol) (esc)
+  nested [                                              ] 0/2\r (no-eol) (esc)
+  nested [======================>                       ] 1/2\r (no-eol) (esc)
+  loop [===============>                                ] 1/3\r (no-eol) (esc)
+  nested [                                              ] 0/2\r (no-eol) (esc)
+  nested [======================>                       ] 1/2\r (no-eol) (esc)
+  loop [===============================>                ] 2/3\r (no-eol) (esc)
+  nested [                                              ] 0/2\r (no-eol) (esc)
+  nested [======================>                       ] 1/2\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 
 test two topics being printed in parallel (as when we're doing a local
 --pull clone, where you get the unbundle and bundle progress at the
 same time):
-  $ hg loop 3 --parallel 2>&1 | python "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/3
-  loop [===============>                                ] 1/3
-  loop [===============================>                ] 2/3
-                                                              \r (esc)
+  $ hg loop 3 --parallel
+  \r (no-eol) (esc)
+  loop [                                                ] 0/3\r (no-eol) (esc)
+  loop [===============>                                ] 1/3\r (no-eol) (esc)
+  loop [===============================>                ] 2/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 test refresh is taken in account
 
-  $ hg -y --config progress.refresh=100 loop 3 2>&1 | "$TESTDIR/filtercr.py"
-  
+  $ hg -y --config progress.refresh=100 loop 3
 
 test format options 1
 
-  $ hg -y --config 'progress.format=number topic item+2' loop 2 2>&1 \
-  > | "$TESTDIR/filtercr.py"
-  
-  0/2 loop lo
-  1/2 loop lo
-                                                              \r (esc)
+  $ hg -y --config 'progress.format=number topic item+2' loop 2
+  \r (no-eol) (esc)
+  0/2 loop lo\r (no-eol) (esc)
+  1/2 loop lo\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 test format options 2
 
-  $ hg -y --config 'progress.format=number item-3 bar' loop 2 2>&1 \
-  > | "$TESTDIR/filtercr.py"
-  
-  0/2 p.0 [                                                 ]
-  1/2 p.1 [=======================>                         ]
-                                                              \r (esc)
+  $ hg -y --config 'progress.format=number item-3 bar' loop 2
+  \r (no-eol) (esc)
+  0/2 p.0 [                                                 ]\r (no-eol) (esc)
+  1/2 p.1 [=======================>                         ]\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 test format options and indeterminate progress
 
-  $ hg -y --config 'progress.format=number item bar' loop -- -2 2>&1 \
-  > | "$TESTDIR/filtercr.py"
-  
-  0 loop.0               [ <=>                              ]
-  1 loop.1               [  <=>                             ]
-                                                              \r (esc)
+  $ hg -y --config 'progress.format=number item bar' loop -- -2
+  \r (no-eol) (esc)
+  0 loop.0               [ <=>                              ]\r (no-eol) (esc)
+  1 loop.1               [  <=>                             ]\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 make sure things don't fall over if count > total
 
-  $ hg -y loop --total 4 6 2>&1 | "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/4
-  loop [===========>                                    ] 1/4
-  loop [=======================>                        ] 2/4
-  loop [===================================>            ] 3/4
-  loop [===============================================>] 4/4
-  loop [ <=>                                            ] 5/4
-                                                              \r (esc)
+  $ hg -y loop --total 4 6
+  \r (no-eol) (esc)
+  loop [                                                ] 0/4\r (no-eol) (esc)
+  loop [===========>                                    ] 1/4\r (no-eol) (esc)
+  loop [=======================>                        ] 2/4\r (no-eol) (esc)
+  loop [===================================>            ] 3/4\r (no-eol) (esc)
+  loop [===============================================>] 4/4\r (no-eol) (esc)
+  loop [ <=>                                            ] 5/4\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 test immediate progress completion
 
-  $ hg -y loop 0 2>&1 | "$TESTDIR/filtercr.py"
-  
+  $ hg -y loop 0
 
 test delay time estimates
 
@@ -173,44 +165,44 @@
   $ echo "delay=25" >> $HGRCPATH
   $ echo "width=60" >> $HGRCPATH
 
-  $ hg -y loop 8 2>&1 | python "$TESTDIR/filtercr.py"
-  
-  loop [=========>                                ] 2/8 1m07s
-  loop [===============>                            ] 3/8 56s
-  loop [=====================>                      ] 4/8 45s
-  loop [==========================>                 ] 5/8 34s
-  loop [================================>           ] 6/8 23s
-  loop [=====================================>      ] 7/8 12s
-                                                              \r (esc)
+  $ hg -y loop 8
+  \r (no-eol) (esc)
+  loop [=========>                                ] 2/8 1m07s\r (no-eol) (esc)
+  loop [===============>                            ] 3/8 56s\r (no-eol) (esc)
+  loop [=====================>                      ] 4/8 45s\r (no-eol) (esc)
+  loop [==========================>                 ] 5/8 34s\r (no-eol) (esc)
+  loop [================================>           ] 6/8 23s\r (no-eol) (esc)
+  loop [=====================================>      ] 7/8 12s\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
-  $ MOCKTIME=10000 hg -y loop 4 2>&1 | python "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/4
-  loop [=========>                                ] 1/4 8h21m
-  loop [====================>                     ] 2/4 5h34m
-  loop [==============================>           ] 3/4 2h47m
-                                                              \r (esc)
+  $ MOCKTIME=10000 hg -y loop 4
+  \r (no-eol) (esc)
+  loop [                                                ] 0/4\r (no-eol) (esc)
+  loop [=========>                                ] 1/4 8h21m\r (no-eol) (esc)
+  loop [====================>                     ] 2/4 5h34m\r (no-eol) (esc)
+  loop [==============================>           ] 3/4 2h47m\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
-  $ MOCKTIME=1000000 hg -y loop 4 2>&1 | python "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/4
-  loop [=========>                                ] 1/4 5w00d
-  loop [====================>                     ] 2/4 3w03d
-  loop [=============================>           ] 3/4 11d14h
-                                                              \r (esc)
+  $ MOCKTIME=1000000 hg -y loop 4
+  \r (no-eol) (esc)
+  loop [                                                ] 0/4\r (no-eol) (esc)
+  loop [=========>                                ] 1/4 5w00d\r (no-eol) (esc)
+  loop [====================>                     ] 2/4 3w03d\r (no-eol) (esc)
+  loop [=============================>           ] 3/4 11d14h\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 
-  $ MOCKTIME=14000000 hg -y loop 4 2>&1 | python "$TESTDIR/filtercr.py"
-  
-  loop [                                                ] 0/4
-  loop [=========>                                ] 1/4 1y18w
-  loop [===================>                     ] 2/4 46w03d
-  loop [=============================>           ] 3/4 23w02d
-                                                              \r (esc)
+  $ MOCKTIME=14000000 hg -y loop 4
+  \r (no-eol) (esc)
+  loop [                                                ] 0/4\r (no-eol) (esc)
+  loop [=========>                                ] 1/4 1y18w\r (no-eol) (esc)
+  loop [===================>                     ] 2/4 46w03d\r (no-eol) (esc)
+  loop [=============================>           ] 3/4 23w02d\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
 
 Time estimates should not fail when there's no end point:
-  $ hg -y loop -- -4 2>&1 | python "$TESTDIR/filtercr.py"
-  
-  loop [ <=>                                              ] 2
-  loop [  <=>                                             ] 3
-                                                              \r (esc)
+  $ hg -y loop -- -4
+  \r (no-eol) (esc)
+  loop [ <=>                                              ] 2\r (no-eol) (esc)
+  loop [  <=>                                             ] 3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
--- a/tests/test-pull-branch.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-pull-branch.t	Fri Oct 19 01:34:50 2012 -0500
@@ -142,7 +142,7 @@
   $ hg branch branchC
   marked working directory as branch branchC
   (branches are permanent and global, did you want a bookmark?)
-  $ echo b1 > bar 
+  $ echo b1 > bar
   $ hg ci -Am "commit on branchC on tt"
   adding bar
 
@@ -151,7 +151,7 @@
   $ cd ../t
   $ hg up -C default
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  $ echo a1 > bar 
+  $ echo a1 > bar
   $ hg ci -Am "commit on default on t"
   adding bar
 
--- a/tests/test-pull-http.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-pull-http.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ hg init test
   $ cd test
@@ -28,7 +28,7 @@
   $ cat test3/.hg/hgrc
   [paths]
   default = http://foo@localhost:$HGPORT/
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
 expect error, cloning not allowed
 
@@ -40,7 +40,7 @@
   requesting all changes
   abort: authorization failed
   [255]
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
 serve errors
 
@@ -49,7 +49,7 @@
   >     hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
   >     cat hg.pid >> $DAEMON_PIDS
   >     hg --cwd ../test pull http://localhost:$HGPORT/
-  >     kill `cat hg.pid`
+  >     "$TESTDIR/killdaemons.py" hg.pid
   >     echo % serve errors
   >     cat errors.log
   > }
--- a/tests/test-pull.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-pull.t	Fri Oct 19 01:34:50 2012 -0500
@@ -68,7 +68,7 @@
 
 Test 'file:' uri handling:
 
-  $ hg pull -q file://../test-doesnt-exist
+  $ hg pull -q file://../test-does-not-exist
   abort: file:// URLs can only refer to localhost
   [255]
 
--- a/tests/test-push-http.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-push-http.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ hg init test
   $ cd test
@@ -16,9 +16,11 @@
   >     hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log
   >     cat hg.pid >> $DAEMON_PIDS
   >     hg --cwd ../test2 push http://localhost:$HGPORT/
-  >     "$TESTDIR/killdaemons.py"
+  >     exitstatus=$?
+  >     "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   >     echo % serve errors
   >     cat errors.log
+  >     return $exitstatus
   > }
   $ cd ../test
 
@@ -27,10 +29,9 @@
   $ req
   pushing to http://localhost:$HGPORT/
   searching for changes
-  remote: ssl required
-  remote: ssl required
-  updating cb9a9f314b8b to public failed!
+  abort: HTTP Error 403: ssl required
   % serve errors
+  [255]
 
 expect authorization error
 
@@ -41,6 +42,7 @@
   searching for changes
   abort: authorization failed
   % serve errors
+  [255]
 
 expect authorization error: must have authorized user
 
@@ -50,12 +52,14 @@
   searching for changes
   abort: authorization failed
   % serve errors
+  [255]
 
 expect success
 
   $ echo 'allow_push = *' >> .hg/hgrc
   $ echo '[hooks]' >> .hg/hgrc
   $ echo "changegroup = python \"$TESTDIR/printenv.py\" changegroup 0" >> .hg/hgrc
+  $ echo "pushkey = python \"$TESTDIR/printenv.py\" pushkey 0" >> .hg/hgrc
   $ req
   pushing to http://localhost:$HGPORT/
   searching for changes
@@ -64,6 +68,7 @@
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files
   remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:*: (glob)
+  remote: pushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1
   % serve errors
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
@@ -80,6 +85,7 @@
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files
   remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:*: (glob)
+  remote: pushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1
   % serve errors
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
@@ -96,10 +102,42 @@
   remote: adding file changes
   remote: added 1 changesets with 1 changes to 1 files
   remote: changegroup hook: HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_URL=remote:http:*: (glob)
+  remote: pushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1
   % serve errors
   $ hg rollback
   repository tip rolled back to revision 0 (undo serve)
 
+expect push success, phase change failure
+
+  $ echo '[web]' > .hg/hgrc
+  $ echo 'push_ssl = false' >> .hg/hgrc
+  $ echo 'allow_push = *' >> .hg/hgrc
+  $ echo '[hooks]' >> .hg/hgrc
+  $ echo 'prepushkey = python "$TESTDIR/printenv.py" prepushkey 1' >> .hg/hgrc
+  $ req
+  pushing to http://localhost:$HGPORT/
+  searching for changes
+  remote: adding changesets
+  remote: adding manifests
+  remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files
+  remote: prepushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1
+  updating ba677d0156c1 to public failed!
+  % serve errors
+
+expect phase change success
+
+  $ echo 'prepushkey = python "$TESTDIR/printenv.py" prepushkey 0' >> .hg/hgrc
+  $ req
+  pushing to http://localhost:$HGPORT/
+  searching for changes
+  no changes found
+  remote: prepushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1
+  % serve errors
+  [1]
+  $ hg rollback
+  repository tip rolled back to revision 0 (undo serve)
+
 expect authorization error: all users denied
 
   $ echo '[web]' > .hg/hgrc
@@ -110,6 +148,7 @@
   searching for changes
   abort: authorization failed
   % serve errors
+  [255]
 
 expect authorization error: some users denied, users must be authenticated
 
@@ -119,5 +158,6 @@
   searching for changes
   abort: authorization failed
   % serve errors
+  [255]
 
   $ cd ..
--- a/tests/test-qrecord.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-qrecord.t	Fri Oct 19 01:34:50 2012 -0500
@@ -74,7 +74,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help record" to show more info
+  use "hg -v help record" to show the global options
 
 help (no mq, so no qrecord)
 
@@ -85,7 +85,7 @@
   
       See "hg help qnew" & "hg help record" for more information and usage.
   
-  use "hg -v help qrecord" to show more info
+  use "hg -v help qrecord" to show the global options
 
   $ hg init a
 
@@ -108,20 +108,20 @@
 
 help (bad mq)
 
-  $ echo "mq=nonexistant" >> $HGRCPATH
+  $ echo "mq=nonexistent" >> $HGRCPATH
   $ hg help qrecord
-  *** failed to import extension mq from nonexistant: [Errno 2] * (glob)
+  *** failed to import extension mq from nonexistent: [Errno 2] * (glob)
   hg qrecord [OPTION]... PATCH [FILE]...
   
   interactively record a new patch
   
       See "hg help qnew" & "hg help record" for more information and usage.
   
-  use "hg -v help qrecord" to show more info
+  use "hg -v help qrecord" to show the global options
 
 help (mq present)
 
-  $ sed 's/mq=nonexistant/mq=/' $HGRCPATH > hgrc.tmp
+  $ sed 's/mq=nonexistent/mq=/' $HGRCPATH > hgrc.tmp
   $ mv hgrc.tmp $HGRCPATH
 
   $ hg help qrecord
@@ -150,7 +150,7 @@
   
   [+] marked option can be specified multiple times
   
-  use "hg -v help qrecord" to show more info
+  use "hg -v help qrecord" to show the global options
 
   $ cd a
 
--- a/tests/test-rebase-abort.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-rebase-abort.t	Fri Oct 19 01:34:50 2012 -0500
@@ -78,7 +78,7 @@
   $ cd ..
 
 
-Constrcut new repo:
+Construct new repo:
 
   $ hg init b
   $ cd b
--- a/tests/test-rebase-bookmarks.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-rebase-bookmarks.t	Fri Oct 19 01:34:50 2012 -0500
@@ -23,7 +23,7 @@
   adding b
   $ hg book 'X'
   $ hg book 'Y'
- 
+
   $ echo c > c
   $ hg ci -Am C
   adding c
@@ -38,7 +38,7 @@
 
   $ hg book W
 
-  $ hg tglog 
+  $ hg tglog
   @  3: 'D' bookmarks: W
   |
   | o  2: 'C' bookmarks: Y Z
@@ -47,7 +47,7 @@
   |/
   o  0: 'A' bookmarks:
   
- 
+
 Move only rebased bookmarks
 
   $ cd ..
@@ -59,7 +59,7 @@
   $ hg rebase -s Y -d 3
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
 
-  $ hg tglog 
+  $ hg tglog
   @  3: 'C' bookmarks: Y Z
   |
   o  2: 'D' bookmarks: W
@@ -79,7 +79,7 @@
   $ hg rebase -s 1 -d 3
   saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
 
-  $ hg tglog 
+  $ hg tglog
   @  3: 'C' bookmarks: Y Z
   |
   o  2: 'B' bookmarks: X
--- a/tests/test-rebase-cache.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-rebase-cache.t	Fri Oct 19 01:34:50 2012 -0500
@@ -73,7 +73,7 @@
   $ hg clone -q -u . a a1
   $ cd a1
 
-  $ hg tglog  
+  $ hg tglog
   @  8: 'F' branch3
   |
   o  7: 'branch3' branch3
@@ -120,7 +120,7 @@
   2: 'B' branch1
   0: 'A' 
 
-  $ hg tglog  
+  $ hg tglog
   @  8: 'E' branch3
   |
   o  7: 'D' branch3
@@ -244,7 +244,7 @@
   2: 'B' branch1
   0: 'A' 
 
-  $ hg tglog   
+  $ hg tglog
   @  7: 'F' branch2
   |
   o  6: 'E' branch2
--- a/tests/test-rebase-conflicts.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-rebase-conflicts.t	Fri Oct 19 01:34:50 2012 -0500
@@ -69,7 +69,7 @@
 
 Try to continue without solving the conflict:
 
-  $ hg rebase --continue 
+  $ hg rebase --continue
   abort: unresolved merge conflicts (see hg help resolve)
   [255]
 
--- a/tests/test-rebase-mq-skip.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-rebase-mq-skip.t	Fri Oct 19 01:34:50 2012 -0500
@@ -39,7 +39,7 @@
   $ hg add p1
   $ hg qref -m P1
 
-  $ hg export qtip > p1.patch 
+  $ hg export qtip > p1.patch
 
   $ hg up -q -C 1
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-rebase-obsolete.t	Fri Oct 19 01:34:50 2012 -0500
@@ -0,0 +1,282 @@
+==========================
+Test rebase with obsolete
+==========================
+
+Enable obsolete
+
+  $ cat > ${TESTTMP}/obs.py << EOF
+  > import mercurial.obsolete
+  > mercurial.obsolete._enabled = True
+  > EOF
+  $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > logtemplate= {rev}:{node|short} {desc|firstline}
+  > [phases]
+  > publish=False
+  > [extensions]'
+  > rebase=
+  > 
+  > obs=${TESTTMP}/obs.py
+  > EOF
+
+Setup rebase canonical repo
+
+  $ hg init base
+  $ cd base
+  $ hg unbundle "$TESTDIR/bundles/rebase.hg"
+  adding changesets
+  adding manifests
+  adding file changes
+  added 8 changesets with 7 changes to 7 files (+2 heads)
+  (run 'hg heads' to see heads, 'hg merge' to merge)
+  $ hg up tip
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg log -G
+  @  7:02de42196ebe H
+  |
+  | o  6:eea13746799a G
+  |/|
+  o |  5:24b6387c8c8c F
+  | |
+  | o  4:9520eea781bc E
+  |/
+  | o  3:32af7686d403 D
+  | |
+  | o  2:5fddd98957c8 C
+  | |
+  | o  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ cd ..
+
+simple rebase
+---------------------------------
+
+  $ hg clone base simple
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd simple
+  $ hg up 32af7686d403
+  3 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ hg rebase -d eea13746799a
+  $ hg log -G
+  @  10:8eeb3c33ad33 D
+  |
+  o  9:2327fea05063 C
+  |
+  o  8:e4e5be0395b2 B
+  |
+  | o  7:02de42196ebe H
+  | |
+  o |  6:eea13746799a G
+  |\|
+  | o  5:24b6387c8c8c F
+  | |
+  o |  4:9520eea781bc E
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg log --hidden -G
+  @  10:8eeb3c33ad33 D
+  |
+  o  9:2327fea05063 C
+  |
+  o  8:e4e5be0395b2 B
+  |
+  | o  7:02de42196ebe H
+  | |
+  o |  6:eea13746799a G
+  |\|
+  | o  5:24b6387c8c8c F
+  | |
+  o |  4:9520eea781bc E
+  |/
+  | x  3:32af7686d403 D
+  | |
+  | x  2:5fddd98957c8 C
+  | |
+  | x  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg debugobsolete
+  42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 {'date': '*', 'user': 'test'} (glob)
+  5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 {'date': '*', 'user': 'test'} (glob)
+  32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 {'date': '*', 'user': 'test'} (glob)
+
+
+  $ cd ..
+
+empty changeset
+---------------------------------
+
+  $ hg clone base empty
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd empty
+  $ hg up eea13746799a
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+We make a copy of both the first changeset in the rebased and some other in the
+set.
+
+  $ hg graft 42ccdea3bb16 32af7686d403
+  grafting revision 1
+  grafting revision 3
+  $ hg rebase  -s 42ccdea3bb16 -d .
+  $ hg log -G
+  @  10:5ae4c968c6ac C
+  |
+  o  9:08483444fef9 D
+  |
+  o  8:8877864f1edb B
+  |
+  | o  7:02de42196ebe H
+  | |
+  o |  6:eea13746799a G
+  |\|
+  | o  5:24b6387c8c8c F
+  | |
+  o |  4:9520eea781bc E
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg log --hidden -G
+  @  10:5ae4c968c6ac C
+  |
+  o  9:08483444fef9 D
+  |
+  o  8:8877864f1edb B
+  |
+  | o  7:02de42196ebe H
+  | |
+  o |  6:eea13746799a G
+  |\|
+  | o  5:24b6387c8c8c F
+  | |
+  o |  4:9520eea781bc E
+  |/
+  | x  3:32af7686d403 D
+  | |
+  | x  2:5fddd98957c8 C
+  | |
+  | x  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg debugobsolete
+  42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 08483444fef91d6224f6655ee586a65d263ad34c 0 {'date': '*', 'user': 'test'} (glob)
+  5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 {'date': '*', 'user': 'test'} (glob)
+  32af7686d403cf45b5d95f2d70cebea587ac806a 5ae4c968c6aca831df823664e706c9d4aa34473d 0 {'date': '*', 'user': 'test'} (glob)
+
+
+  $ cd ..
+
+collapse rebase
+---------------------------------
+
+  $ hg clone base collapse
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd collapse
+  $ hg rebase  -s 42ccdea3bb16 -d eea13746799a --collapse
+  $ hg log -G
+  @  8:4dc2197e807b Collapsed revision
+  |
+  | o  7:02de42196ebe H
+  | |
+  o |  6:eea13746799a G
+  |\|
+  | o  5:24b6387c8c8c F
+  | |
+  o |  4:9520eea781bc E
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg log --hidden -G
+  @  8:4dc2197e807b Collapsed revision
+  |
+  | o  7:02de42196ebe H
+  | |
+  o |  6:eea13746799a G
+  |\|
+  | o  5:24b6387c8c8c F
+  | |
+  o |  4:9520eea781bc E
+  |/
+  | x  3:32af7686d403 D
+  | |
+  | x  2:5fddd98957c8 C
+  | |
+  | x  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg id --debug
+  4dc2197e807bae9817f09905b50ab288be2dbbcf tip
+  $ hg debugobsolete
+  42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 {'date': '*', 'user': 'test'} (glob)
+  5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 {'date': '*', 'user': 'test'} (glob)
+  32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 {'date': '*', 'user': 'test'} (glob)
+
+  $ cd ..
+
+Rebase set has hidden descendants
+---------------------------------
+
+We rebase a changeset which has a hidden changeset. The hidden changeset must
+not be rebased.
+
+  $ hg clone base hidden
+  updating to branch default
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd hidden
+  $ hg rebase -s 5fddd98957c8 -d eea13746799a
+  $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe
+  $ hg log -G
+  @  10:7c6027df6a99 B
+  |
+  | o  9:cf44d2f5a9f4 D
+  | |
+  | o  8:e273c5e7d2d2 C
+  | |
+  o |  7:02de42196ebe H
+  | |
+  | o  6:eea13746799a G
+  |/|
+  o |  5:24b6387c8c8c F
+  | |
+  | o  4:9520eea781bc E
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg log --hidden -G
+  @  10:7c6027df6a99 B
+  |
+  | o  9:cf44d2f5a9f4 D
+  | |
+  | o  8:e273c5e7d2d2 C
+  | |
+  o |  7:02de42196ebe H
+  | |
+  | o  6:eea13746799a G
+  |/|
+  o |  5:24b6387c8c8c F
+  | |
+  | o  4:9520eea781bc E
+  |/
+  | x  3:32af7686d403 D
+  | |
+  | x  2:5fddd98957c8 C
+  | |
+  | x  1:42ccdea3bb16 B
+  |/
+  o  0:cd010b8cd998 A
+  
+  $ hg debugobsolete
+  5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 {'date': '*', 'user': 'test'} (glob)
+  32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 {'date': '*', 'user': 'test'} (glob)
+  42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 {'date': '*', 'user': 'test'} (glob)
+
+  $ cd ..
--- a/tests/test-record.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-record.t	Fri Oct 19 01:34:50 2012 -0500
@@ -246,10 +246,19 @@
   +9
   +10
   
+Modify end of plain file with username unset
+
+  $ echo 11 >> plain
+  $ unset HGUSER
+  $ hg record --config ui.username= -d '8 0' -m end plain
+  abort: no username supplied (see "hg help config")
+  [255]
+
 
 Modify end of plain file
 
-  $ echo 11 >> plain
+  $ HGUSER="test"
+  $ export HGUSER
   $ hg record -d '8 0' -m end plain <<EOF
   > y
   > y
--- a/tests/test-rename.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-rename.t	Fri Oct 19 01:34:50 2012 -0500
@@ -66,7 +66,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ rm d2/c
 
-rename --after a single file to a nonexistant target filename
+rename --after a single file to a nonexistent target filename
 
   $ hg rename --after d1/a dummy
   d1/a: not recording move - dummy does not exist (glob)
--- a/tests/test-revset.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-revset.t	Fri Oct 19 01:34:50 2012 -0500
@@ -332,6 +332,9 @@
   0
   $ log 'merge()'
   6
+  $ log 'branchpoint()'
+  1
+  4
   $ log 'modifies(b)'
   4
   $ log 'modifies("path:b")'
@@ -362,6 +365,13 @@
   $ log 'parents(merge())'
   4
   5
+  $ log 'p1(branchpoint())'
+  0
+  2
+  $ log 'p2(branchpoint())'
+  $ log 'parents(branchpoint())'
+  0
+  2
   $ log 'removes(a)'
   2
   6
--- a/tests/test-run-tests.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-run-tests.t	Fri Oct 19 01:34:50 2012 -0500
@@ -52,6 +52,28 @@
   $ echo 'foo (re)'
   foo (re)
 
+Windows: \r\n is handled like \n and can be escaped:
+
+#if windows
+  $ printf 'crlf\r\ncr\r\tcrlf\r\ncrlf\r\n'
+  crlf
+  cr\r (no-eol) (esc)
+  \tcrlf (esc)
+  crlf\r (esc)
+#endif
+
+Combining esc with other markups - and handling lines ending with \r instead of \n:
+
+  $ printf 'foo/bar\r'
+  foo/bar\r (no-eol) (glob) (esc)
+#if windows
+  $ printf 'foo\\bar\r'
+  foo/bar\r (no-eol) (glob) (esc)
+#endif
+  $ printf 'foo/bar\rfoo/bar\r'
+  foo.bar\r \(no-eol\) (re) (esc)
+  foo.bar\r \(no-eol\) (re)
+
 testing hghave
 
   $ "$TESTDIR/hghave" true
@@ -95,5 +117,5 @@
 
 Exit code:
 
-  $ (exit 1) 
+  $ (exit 1)
   [1]
--- a/tests/test-share.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-share.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ echo "[extensions]"      >> $HGRCPATH
   $ echo "share = "          >> $HGRCPATH
@@ -127,3 +127,8 @@
   c2e0ac586386 tip
 
   $ cd ..
+
+Explicitly kill daemons to let the test exit on Windows
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
+
--- a/tests/test-simplemerge.py	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-simplemerge.py	Fri Oct 19 01:34:50 2012 -0500
@@ -138,7 +138,7 @@
                     ['aaa', 'bbb', 'zz'],
                     ['zz'])
 
-        # todo: should use a sentinal at end as from get_matching_blocks
+        # todo: should use a sentinel at end as from get_matching_blocks
         # to match without zz
         self.assertEquals(list(m3.find_sync_regions()),
                           [(0, 1,  2, 3,  0, 1),
@@ -156,7 +156,7 @@
         m3 = Merge3([],
                     ['aaa', 'bbb'],
                     [])
-        # todo: should use a sentinal at end as from get_matching_blocks
+        # todo: should use a sentinel at end as from get_matching_blocks
         # to match without zz
         self.assertEquals(list(m3.find_sync_regions()),
                           [(0, 0,  2, 2,  0, 0)])
--- a/tests/test-static-http.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-static-http.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 #if windows
   $ hg clone http://localhost:$HGPORT/ copy
@@ -25,10 +25,27 @@
   >     httpd.serve_forever()
   > 
   > signal.signal(signal.SIGTERM, lambda x, y: sys.exit(0))
+  > fp = file('dumb.pid', 'wb')
+  > fp.write(str(os.getpid()) + '\n')
+  > fp.close()
   > run()
   > EOF
   $ python dumb.py 2>/dev/null &
-  $ echo $! >> $DAEMON_PIDS
+
+Cannot just read $!, it will not be set to the right value on Windows/MinGW
+
+  $ cat > wait.py <<EOF
+  > import time
+  > while True:
+  >     try:
+  >         if '\n' in file('dumb.pid', 'rb').read():
+  >             break
+  >     except IOError:
+  >         pass
+  >     time.sleep(0.2)
+  > EOF
+  $ python wait.py
+  $ cat dumb.pid >> $DAEMON_PIDS
   $ hg init remote
   $ cd remote
   $ echo foo > bar
@@ -97,8 +114,8 @@
 trying clone -r
 
   $ cd ..
-  $ hg clone -r donotexist static-http://localhost:$HGPORT/remote local0
-  abort: unknown revision 'donotexist'!
+  $ hg clone -r doesnotexist static-http://localhost:$HGPORT/remote local0
+  abort: unknown revision 'doesnotexist'!
   [255]
   $ hg clone -r 0 static-http://localhost:$HGPORT/remote local0
   adding changesets
@@ -171,4 +188,4 @@
   $ hg clone static-http://localhost:$HGPORT/notarepo local3
   abort: 'http://localhost:$HGPORT/notarepo' does not appear to be an hg repository!
   [255]
-  $ kill $!
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
--- a/tests/test-status.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-status.t	Fri Oct 19 01:34:50 2012 -0500
@@ -330,4 +330,9 @@
   $ hg status -A --rev 1 1
   R 1/2/3/4/5/b.txt
 
+#if windows
+  $ hg --config ui.slash=false status -A --rev 1 1
+  R 1\2\3\4\5\b.txt
+#endif
+
   $ cd ..
--- a/tests/test-subrepo-deep-nested-change.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-subrepo-deep-nested-change.t	Fri Oct 19 01:34:50 2012 -0500
@@ -100,7 +100,7 @@
    revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
 
 Check that deep archiving works
- 
+
   $ cd cloned
   $ echo 'test' > sub1/sub2/test.txt
   $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
--- a/tests/test-subrepo-git.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-subrepo-git.t	Fri Oct 19 01:34:50 2012 -0500
@@ -446,7 +446,7 @@
   $ git rev-parse HEAD
   da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
   $ cd ..
-  $ hg update --clean tip > /dev/null 2>&1 
+  $ hg update --clean tip > /dev/null 2>&1
 
 Sticky subrepository, revision updates
   $ hg id -n
--- a/tests/test-subrepo-missing.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-subrepo-missing.t	Fri Oct 19 01:34:50 2012 -0500
@@ -60,7 +60,7 @@
   warning: subrepo spec file .hgsub not found
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ rm .hgsubstate
-  $ hg up 0  
+  $ hg up 0
   remote changed .hgsubstate which local deleted
   use (c)hanged version or leave (d)eleted? c
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-subrepo-recursion.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-subrepo-recursion.t	Fri Oct 19 01:34:50 2012 -0500
@@ -260,31 +260,33 @@
 Test archiving to a directory tree (the doubled lines in the output
 only show up in the test output, not in real usage):
 
-  $ hg archive --subrepos ../archive 2>&1 | "$TESTDIR/filtercr.py"
-  
-  archiving [                                           ] 0/3
-  archiving [                                           ] 0/3
-  archiving [=============>                             ] 1/3
-  archiving [=============>                             ] 1/3
-  archiving [===========================>               ] 2/3
-  archiving [===========================>               ] 2/3
-  archiving [==========================================>] 3/3
-  archiving [==========================================>] 3/3
-                                                              
-  archiving (foo) [                                     ] 0/3
-  archiving (foo) [                                     ] 0/3
-  archiving (foo) [===========>                         ] 1/3
-  archiving (foo) [===========>                         ] 1/3
-  archiving (foo) [=======================>             ] 2/3
-  archiving (foo) [=======================>             ] 2/3
-  archiving (foo) [====================================>] 3/3
-  archiving (foo) [====================================>] 3/3
-                                                              
-  archiving (foo/bar) [                                 ] 0/1 (glob)
-  archiving (foo/bar) [                                 ] 0/1 (glob)
-  archiving (foo/bar) [================================>] 1/1 (glob)
-  archiving (foo/bar) [================================>] 1/1 (glob)
-                                                              \r (esc)
+  $ hg archive --subrepos ../archive
+  \r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
+  archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
+  archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
+  archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
+  archiving (foo) [=======================>             ] 2/3\r (no-eol) (esc)
+  archiving (foo) [=======================>             ] 2/3\r (no-eol) (esc)
+  archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
+  archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+                                                              \r (no-eol) (esc)
   $ find ../archive | sort
   ../archive
   ../archive/.hg_archival.txt
@@ -300,65 +302,69 @@
 
 Test archiving to zip file (unzip output is unstable):
 
-  $ hg archive --subrepos ../archive.zip 2>&1 | "$TESTDIR/filtercr.py"
-  
-  archiving [                                           ] 0/3
-  archiving [                                           ] 0/3
-  archiving [=============>                             ] 1/3
-  archiving [=============>                             ] 1/3
-  archiving [===========================>               ] 2/3
-  archiving [===========================>               ] 2/3
-  archiving [==========================================>] 3/3
-  archiving [==========================================>] 3/3
-                                                              
-  archiving (foo) [                                     ] 0/3
-  archiving (foo) [                                     ] 0/3
-  archiving (foo) [===========>                         ] 1/3
-  archiving (foo) [===========>                         ] 1/3
-  archiving (foo) [=======================>             ] 2/3
-  archiving (foo) [=======================>             ] 2/3
-  archiving (foo) [====================================>] 3/3
-  archiving (foo) [====================================>] 3/3
-                                                              
-  archiving (foo/bar) [                                 ] 0/1 (glob)
-  archiving (foo/bar) [                                 ] 0/1 (glob)
-  archiving (foo/bar) [================================>] 1/1 (glob)
-  archiving (foo/bar) [================================>] 1/1 (glob)
-                                                              \r (esc)
+  $ hg archive --subrepos ../archive.zip
+  \r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
+  archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
+  archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
+  archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
+  archiving (foo) [=======================>             ] 2/3\r (no-eol) (esc)
+  archiving (foo) [=======================>             ] 2/3\r (no-eol) (esc)
+  archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
+  archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+                                                              \r (no-eol) (esc)
 
 Test archiving a revision that references a subrepo that is not yet
 cloned:
 
   $ hg clone -U . ../empty
   $ cd ../empty
-  $ hg archive --subrepos -r tip ../archive.tar.gz 2>&1 | "$TESTDIR/filtercr.py"
-  
-  archiving [                                           ] 0/3
-  archiving [                                           ] 0/3
-  archiving [=============>                             ] 1/3
-  archiving [=============>                             ] 1/3
-  archiving [===========================>               ] 2/3
-  archiving [===========================>               ] 2/3
-  archiving [==========================================>] 3/3
-  archiving [==========================================>] 3/3
-                                                              
-  archiving (foo) [                                     ] 0/3
-  archiving (foo) [                                     ] 0/3
-  archiving (foo) [===========>                         ] 1/3
-  archiving (foo) [===========>                         ] 1/3
-  archiving (foo) [=======================>             ] 2/3
-  archiving (foo) [=======================>             ] 2/3
-  archiving (foo) [====================================>] 3/3
-  archiving (foo) [====================================>] 3/3
-                                                              
-  archiving (foo/bar) [                                 ] 0/1 (glob)
-  archiving (foo/bar) [                                 ] 0/1 (glob)
-  archiving (foo/bar) [================================>] 1/1 (glob)
-  archiving (foo/bar) [================================>] 1/1 (glob)
-                                                              
+  $ hg archive --subrepos -r tip ../archive.tar.gz
+  \r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [                                           ] 0/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [=============>                             ] 1/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [===========================>               ] 2/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+  archiving [==========================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
+  archiving (foo) [                                     ] 0/3\r (no-eol) (esc)
+  archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
+  archiving (foo) [===========>                         ] 1/3\r (no-eol) (esc)
+  archiving (foo) [=======================>             ] 2/3\r (no-eol) (esc)
+  archiving (foo) [=======================>             ] 2/3\r (no-eol) (esc)
+  archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
+  archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
+                                                              \r (no-eol) (esc)
+  \r (no-eol) (esc)
+  archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [                                 ] 0/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+  archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc)
+                                                              \r (no-eol) (esc)
   cloning subrepo foo from $TESTTMP/repo/foo
   cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob)
-  
+
 The newly cloned subrepos contain no working copy:
 
   $ hg -R foo summary
--- a/tests/test-subrepo-relative-path.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-subrepo-relative-path.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 Preparing the subrepository 'sub'
 
@@ -70,7 +70,7 @@
    source   ../sub
    revision 863c1745b441bd97a8c4a096e87793073f4fb215
 
-  $ "$TESTDIR/killdaemons.py"
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
 
 subrepo paths with ssh urls
 
--- a/tests/test-subrepo.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-subrepo.t	Fri Oct 19 01:34:50 2012 -0500
@@ -730,7 +730,7 @@
   925c17564ef8 tip
   $ hg -R s id
   12a213df6fa9 tip
-  $ hg -R t id  
+  $ hg -R t id
   52c0adc0515a tip
   $ hg update 11
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -738,7 +738,7 @@
   365661e5936a
   $ hg -R s id
   fc627a69481f
-  $ hg -R t id  
+  $ hg -R t id
   e95bcfa18a35
 
 Sticky subrepositorys, file changes
@@ -750,7 +750,7 @@
   365661e5936a+
   $ hg -R s id
   fc627a69481f+
-  $ hg -R t id  
+  $ hg -R t id
   e95bcfa18a35+
   $ hg update tip
    subrepository sources for s differ
@@ -764,7 +764,7 @@
   925c17564ef8+ tip
   $ hg -R s id
   fc627a69481f+
-  $ hg -R t id  
+  $ hg -R t id
   e95bcfa18a35+
   $ hg update --clean tip
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -774,7 +774,7 @@
   925c17564ef8 tip
   $ hg -R s id
   12a213df6fa9 tip
-  $ hg -R t id  
+  $ hg -R t id
   52c0adc0515a tip
   $ cd s
   $ hg update -r -2
@@ -792,7 +792,7 @@
   e45c8b14af55+
   $ hg -R s id
   02dcf1d70411
-  $ hg -R t id  
+  $ hg -R t id
   7af322bc1198
 
 Sticky subrepository, file changes and revision updates
@@ -804,7 +804,7 @@
   e45c8b14af55+
   $ hg -R s id
   02dcf1d70411+
-  $ hg -R t id  
+  $ hg -R t id
   7af322bc1198+
   $ hg update tip
    subrepository sources for s differ
@@ -818,7 +818,7 @@
   925c17564ef8+ tip
   $ hg -R s id
   02dcf1d70411+
-  $ hg -R t id  
+  $ hg -R t id
   7af322bc1198+
 
 Sticky repository, update --clean
@@ -828,7 +828,7 @@
   925c17564ef8 tip
   $ hg -R s id
   12a213df6fa9 tip
-  $ hg -R t id  
+  $ hg -R t id
   52c0adc0515a tip
 
 Test subrepo already at intended revision:
@@ -843,7 +843,7 @@
   11+
   $ hg -R s id
   fc627a69481f
-  $ hg -R t id 
+  $ hg -R t id
   e95bcfa18a35
 
 Test that removing .hgsubstate doesn't break anything:
--- a/tests/test-symlinks.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-symlinks.t	Fri Oct 19 01:34:50 2012 -0500
@@ -46,7 +46,7 @@
   $ rm dir/a.o
   $ rm dir/b.o
   $ mkdir dir/a.o
-  $ ln -s nonexist dir/b.o
+  $ ln -s nonexistent dir/b.o
   $ mkfifo a.c
 
 it should show a.c, dir/a.o and dir/b.o deleted
--- a/tests/test-tag.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-tag.t	Fri Oct 19 01:34:50 2012 -0500
@@ -40,9 +40,15 @@
   $ hg tag -r 0 x y z y y z
   abort: tag names must be unique
   [255]
-  $ hg tag tap nada dot tip null .
+  $ hg tag tap nada dot tip
   abort: the name 'tip' is reserved
   [255]
+  $ hg tag .
+  abort: the name '.' is reserved
+  [255]
+  $ hg tag null
+  abort: the name 'null' is reserved
+  [255]
   $ hg tag "bleah"
   abort: tag 'bleah' already exists (use -f to force)
   [255]
--- a/tests/test-tags.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-tags.t	Fri Oct 19 01:34:50 2012 -0500
@@ -137,7 +137,7 @@
   $ echo >> .hgtags
   $ echo "foo bar" >> .hgtags
   $ echo "a5a5 invalid" >> .hg/localtags
-  $ cat .hgtags 
+  $ cat .hgtags
   acb14030fe0a21b60322c440ad2d20cf7685a376 first
   spam
   
--- a/tests/test-template-engine.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-template-engine.t	Fri Oct 19 01:34:50 2012 -0500
@@ -36,4 +36,12 @@
   $ hg log --style=./mymap
   0 97e5f848f0936960273bbf75be6388cd0350a32b test
 
+  $ cat > changeset.txt << EOF
+  > {{p1rev}} {{p1node}} {{p2rev}} {{p2node}}
+  > EOF
+  $ hg ci -Ama
+  $ hg log --style=./mymap
+  0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000
+  -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000
+
   $ cd ..
--- a/tests/test-transplant.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-transplant.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
   $ cat <<EOF >> $HGRCPATH
   > [extensions]
@@ -632,3 +632,7 @@
   skipping emptied changeset 7a7d57e15850
   $ cd ..
 
+Explicitly kill daemons to let the test exit on Windows
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
+
--- a/tests/test-treediscovery-legacy.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-treediscovery-legacy.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 Tests discovery against servers without getbundle support:
 
@@ -35,7 +35,7 @@
   >   cat hg.pid >> $DAEMON_PIDS
   > }
   $ tstop() {
-  >   "$TESTDIR/killdaemons.py"
+  >   "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   >   cp $HGRCPATH-withcap $HGRCPATH
   > }
 
@@ -330,7 +330,7 @@
   $ hg ci -Am A
   adding A
   $ cd ..
-  $ hg clone rlocal rremote  
+  $ hg clone rlocal rremote
   updating to branch default
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd rlocal
@@ -341,7 +341,7 @@
   $ tstart rremote
 
   $ cd rlocal
-  $ hg incoming $remote 
+  $ hg incoming $remote
   comparing with http://localhost:$HGPORT/
   searching for changes
   no changes found
--- a/tests/test-treediscovery.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-treediscovery.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 Tests discovery against servers without getbundle support:
 
@@ -23,7 +23,7 @@
   >   cat hg.pid >> $DAEMON_PIDS
   > }
   $ tstop() {
-  >   "$TESTDIR/killdaemons.py"
+  >   "$TESTDIR/killdaemons.py" $DAEMON_PIDS
   > }
 
 Both are empty:
--- a/tests/test-unbundlehash.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-unbundlehash.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 Test wire protocol unbundle with hashed heads (capability: unbundlehash)
 
@@ -30,3 +30,8 @@
 
   $ cat access.log | grep unbundle
   * - - [*] "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+6768033e216468247bd031a0a2d9876d79818f8f (glob)
+
+Explicitly kill daemons to let the test exit on Windows
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
+
--- a/tests/test-update-branches.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-update-branches.t	Fri Oct 19 01:34:50 2012 -0500
@@ -12,6 +12,8 @@
 # |/
 # o  0:b608b9236435
 
+  $ mkdir b1
+  $ cd b1
   $ hg init
   $ echo foo > foo
   $ echo zero > a
@@ -47,6 +49,14 @@
   o  0:60829823a42a
   
 
+Make sure update doesn't assume b1 is a repository if invoked from outside:
+
+  $ cd ..
+  $ hg update b1
+  abort: no repository found in '$TESTTMP' (.hg not found)!
+  [255]
+  $ cd b1
+
 Test helper functions:
 
   $ revtest () {
@@ -61,7 +71,7 @@
   >     hg up $opt $targetrev
   >     hg parent --template 'parent={rev}\n'
   >     hg stat -S
-  > }    
+  > }
 
   $ norevtest () {
   >     msg=$1
@@ -74,7 +84,7 @@
   >     hg up $opt
   >     hg parent --template 'parent={rev}\n'
   >     hg stat -S
-  > }    
+  > }
 
 Test cases are documented in a table in the update function of merge.py.
 Cases are run as shown in that table, row by row.
--- a/tests/test-wireproto.t	Mon Oct 08 00:19:30 2012 +0200
+++ b/tests/test-wireproto.t	Fri Oct 19 01:34:50 2012 -0500
@@ -1,4 +1,4 @@
-  $ "$TESTDIR/hghave" serve || exit 80
+  $ "$TESTDIR/hghave" killdaemons || exit 80
 
 Test wire protocol argument passing
 
@@ -112,3 +112,7 @@
   $ hg debugwireargs --ssh "python ./dummyssh" ssh://user@dummy/repo eins zwei --five fuenf
   eins zwei None None None
 
+Explicitly kill daemons to let the test exit on Windows
+
+  $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
+