aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Documentation/RelNotes/1.7.11.3.txt53
-rw-r--r--Documentation/RelNotes/1.7.11.4.txt31
-rw-r--r--Documentation/RelNotes/1.7.12.txt106
-rw-r--r--Documentation/config.txt3
-rw-r--r--Documentation/git-commit-tree.txt2
-rw-r--r--Documentation/git-commit.txt9
-rw-r--r--Documentation/git-credential.txt22
-rw-r--r--Documentation/git-daemon.txt2
-rw-r--r--Documentation/git-merge.txt2
-rw-r--r--Documentation/git-mergetool.txt6
-rw-r--r--Documentation/git-rebase.txt2
-rw-r--r--Documentation/git-rev-parse.txt6
-rw-r--r--Documentation/git.txt4
-rw-r--r--Documentation/glossary-content.txt2
-rw-r--r--Documentation/rev-list-options.txt2
-rw-r--r--Documentation/revisions.txt43
-rw-r--r--Documentation/user-manual.txt12
-rwxr-xr-xGIT-VERSION-GEN2
-rw-r--r--Makefile31
-rw-r--r--advice.c2
-rw-r--r--attr.c12
-rw-r--r--block-sha1/sha1.c8
-rw-r--r--builtin/apply.c8
-rw-r--r--builtin/blame.c3
-rw-r--r--builtin/cat-file.c2
-rw-r--r--builtin/checkout.c5
-rw-r--r--builtin/commit-tree.c7
-rw-r--r--builtin/commit.c2
-rw-r--r--builtin/config.c16
-rw-r--r--builtin/log.c3
-rw-r--r--builtin/pack-objects.c2
-rw-r--r--builtin/reset.c10
-rw-r--r--builtin/rev-parse.c14
-rw-r--r--builtin/update-index.c9
-rw-r--r--cache.h47
-rw-r--r--commit.c2
-rw-r--r--config.c4
-rw-r--r--configure.ac56
-rwxr-xr-xcontrib/mw-to-git/git-remote-mediawiki158
-rw-r--r--contrib/mw-to-git/t/push-pull-tests.sh2
-rwxr-xr-xcontrib/mw-to-git/t/t9362-mw-to-git-utf8.sh20
-rw-r--r--credential.c2
-rw-r--r--date.c2
-rw-r--r--diff.c8
-rw-r--r--dir.c2
-rwxr-xr-xgit-am.sh5
-rwxr-xr-xgit-difftool.perl7
-rwxr-xr-xgit-filter-branch.sh2
-rw-r--r--git-mergetool--lib.sh6
-rwxr-xr-xgit-mergetool.sh42
-rw-r--r--git-rebase--interactive.sh25
-rwxr-xr-xgit-submodule.sh13
-rwxr-xr-xgit-svn.perl3128
-rwxr-xr-xgitweb/gitweb.perl33
-rw-r--r--help.c14
-rw-r--r--perl/.gitignore1
-rw-r--r--perl/Git/IndexInfo.pm33
-rw-r--r--perl/Git/SVN.pm2326
-rw-r--r--perl/Git/SVN/Fetcher.pm1
-rw-r--r--perl/Git/SVN/GlobSpec.pm59
-rw-r--r--perl/Git/SVN/Log.pm395
-rw-r--r--perl/Git/SVN/Migration.pm258
-rw-r--r--perl/Git/SVN/Utils.pm59
-rw-r--r--perl/Makefile9
-rw-r--r--perl/Makefile.PL35
-rw-r--r--read-cache.c67
-rw-r--r--revision.c77
-rw-r--r--revision.h5
-rw-r--r--setup.c8
-rw-r--r--sha1_file.c2
-rw-r--r--sha1_name.c494
-rw-r--r--t/Git-SVN/00compile.t14
-rw-r--r--t/Git-SVN/Utils/can_compress.t11
-rw-r--r--t/Git-SVN/Utils/fatal.t34
-rw-r--r--t/lib-httpd.sh4
-rw-r--r--t/lib-httpd/apache.conf5
-rwxr-xr-xt/t1100-commit-tree-options.sh18
-rwxr-xr-xt/t1306-xdg-files.sh39
-rwxr-xr-xt/t1512-rev-parse-disambiguation.sh264
-rwxr-xr-xt/t3404-rebase-interactive.sh8
-rwxr-xr-xt/t4012-diff-binary.sh94
-rwxr-xr-xt/t4020-diff-external.sh59
-rwxr-xr-xt/t7003-filter-branch.sh3
-rwxr-xr-xt/t7406-submodule-update.sh13
-rwxr-xr-xt/t7409-submodule-detached-worktree.sh78
-rwxr-xr-xt/t7502-commit.sh75
-rwxr-xr-xt/t7810-grep.sh11
-rw-r--r--t/test-lib-functions.sh13
-rw-r--r--t/test-lib.sh46
-rw-r--r--transport-helper.c15
-rw-r--r--tree.c7
-rw-r--r--unpack-trees.c3
-rw-r--r--wt-status.c3
-rw-r--r--wt-status.h1
94 files changed, 4962 insertions, 3706 deletions
diff --git a/Documentation/RelNotes/1.7.11.3.txt b/Documentation/RelNotes/1.7.11.3.txt
new file mode 100644
index 000000000..64494f89d
--- /dev/null
+++ b/Documentation/RelNotes/1.7.11.3.txt
@@ -0,0 +1,53 @@
+Git v1.7.11.3 Release Notes
+===========================
+
+Fixes since v1.7.11.3
+---------------------
+
+ * The error message from "git push $there :bogo" (and its equivalent
+ "git push $there --delete bogo") mentioned that we tried and failed
+ to guess what ref is being deleted based on the LHS of the refspec,
+ which we don't.
+
+ * A handful of files and directories we create had tighter than
+ necessary permission bits when the user wanted to have group
+ writability (e.g. by setting "umask 002").
+
+ * "commit --amend" used to refuse amending a commit with an empty log
+ message, with or without "--allow-empty-message".
+
+ * "git commit --amend --only --" was meant to allow "Clever" people to
+ rewrite the commit message without making any change even when they
+ have already changes for the next commit added to their index, but
+ it never worked as advertised since it was introduced in 1.3.0 era.
+
+ * Even though the index can record pathnames longer than 1<<12 bytes,
+ in some places we were not comparing them in full, potentially
+ replacing index entries instead of adding.
+
+ * "git show"'s auto-walking behaviour was an unreliable and
+ unpredictable hack; it now behaves just like "git log" does when it
+ walks.
+
+ * "git diff", "git status" and anything that internally uses the
+ comparison machinery was utterly broken when the difference
+ involved a file with "-" as its name. This was due to the way "git
+ diff --no-index" was incorrectly bolted on to the system, making
+ any comparison that involves a file "-" at the root level
+ incorrectly read from the standard input.
+
+ * We did not have test to make sure "git rebase" without extra options
+ filters out an empty commit in the original history.
+
+ * "git fast-export" produced an input stream for fast-import without
+ properly quoting pathnames when they contain SPs in them.
+
+ * "git checkout --detach", when you are still on an unborn branch,
+ should be forbidden, but it wasn't.
+
+ * Some implementations of Perl terminates "lines" with CRLF even when
+ the script is operating on just a sequence of bytes. Make sure to
+ use "$PERL_PATH", the version of Perl the user told Git to use, in
+ our tests to avoid unnecessary breakages in tests.
+
+Also contains minor typofixes and documentation updates.
diff --git a/Documentation/RelNotes/1.7.11.4.txt b/Documentation/RelNotes/1.7.11.4.txt
new file mode 100644
index 000000000..3a640c2d4
--- /dev/null
+++ b/Documentation/RelNotes/1.7.11.4.txt
@@ -0,0 +1,31 @@
+Git v1.7.11.4 Release Notes
+===========================
+
+Fixes since v1.7.11.3
+---------------------
+
+ * "$GIT_DIR/COMMIT_EDITMSG" file that is used to hold the commit log
+ message user edits was not documented.
+
+ * The advise() function did not use varargs correctly to format
+ its message.
+
+ * When "git am" failed, old timers knew to check .git/rebase-apply/patch
+ to see what went wrong, but we never told the users about it.
+
+ * "git commit-tree" learned a more natural "-p <parent> <tree>" order
+ of arguments long time ago, but recently forgot it by mistake.
+
+ * "git diff --no-ext-diff" did not output anything for a typechange
+ filepair when GIT_EXTERNAL_DIFF is in effect.
+
+ * In 1.7.9 era, we taught "git rebase" about the raw timestamp format
+ but we did not teach the same trick to "filter-branch", which rolled
+ a similar logic on its own.
+
+ * When "git submodule add" clones a submodule repository, it can get
+ confused where to store the resulting submodule repository in the
+ superproject's .git/ directory when there is a symbolic link in the
+ path to the current directory.
+
+Also contains minor typofixes and documentation updates.
diff --git a/Documentation/RelNotes/1.7.12.txt b/Documentation/RelNotes/1.7.12.txt
index d5a522d91..786a70242 100644
--- a/Documentation/RelNotes/1.7.12.txt
+++ b/Documentation/RelNotes/1.7.12.txt
@@ -19,8 +19,14 @@ UI, Workflows & Features
$HOME/.config/git/config instead, which is in line with XDG.
* The value of core.attributesfile and core.excludesfile default to
- $HOME/.config/attributes and $HOME/.config/ignore respectively when
- these files exist.
+ $HOME/.config/git/attributes and $HOME/.config/git/ignore respectively
+ when these files exist.
+
+ * Logic to disambiguate abbreviated object names have been taught to
+ take advantage of object types that are expected in the context,
+ e.g. XXXXXX in the "git describe" output v1.2.3-gXXXXXX must be a
+ commit object, not a blob nor a tree. This will help us prolong
+ the lifetime of abbreviated object names.
* "git apply" learned to wiggle the base version and perform three-way
merge when a patch does not exactly apply to the version you have.
@@ -71,6 +77,10 @@ UI, Workflows & Features
a separate "git prompting" script, to help lazy-autoloading of the
completion part while making prompting part always available.
+ * "gitweb" pays attention to various forms of credits that are
+ similar to "Signed-off-by:" lines in the commit objects and
+ highlights them accordingly.
+
Foreign Interface
@@ -103,6 +113,12 @@ Performance, Internal Implementation, etc. (please report possible regressions)
fnmatch() by comparing fixed leading substring literally when
possible.
+ * "git log -n 1 -- rarely-touched-path" was spending unnecessary
+ cycles after showing the first change to find the next one, only to
+ discard it.
+
+ * "git svn" got a large-looking code reorganization at the last
+ minute before the code freeze.
Also contains minor documentation updates and code clean-ups.
@@ -114,59 +130,33 @@ Unless otherwise noted, all the fixes since v1.7.11 in the maintenance
releases are contained in this release (see release notes to them for
details).
- * The error message from "git push $there :bogo" (and its equivalent
- "git push $there --delete bogo") mentioned that we tried and failed
- to guess what ref is being deleted based on the LHS of the refspec,
- which we don't.
- (merge 5742c82 jk/push-delete-ref-error-message later to maint).
-
- * A handful of files and directories we create had tighter than
- necessary permission bits when the user wanted to have group
- writability (e.g. by setting "umask 002").
- (merge 6ff2b72 ar/clone-honor-umask-at-top later to maint).
-
- * "commit --amend" used to refuse amending a commit with an empty log
- message, with or without "--allow-empty-message".
- (merge d9a9357 cw/amend-commit-without-message later to maint).
-
- * "git commit --amend --only --" was meant to allow "Clever" people to
- rewrite the commit message without making any change even when they
- have already changes for the next commit added to their index, but
- it never worked as advertised since it was introduced in 1.3.0 era.
- (merge ea2d4ed jk/maint-commit-amend-only-no-paths later to maint).
-
- * Even though the index can record pathnames longer than 1<<12 bytes,
- in some places we were not comparing them in full, potentially
- replacing index entries instead of adding.
- (merge d5f5333 tg/maint-cache-name-compare later to maint).
-
- * "git show"'s auto-walking behaviour was an unreliable and
- unpredictable hack; it now behaves just like "git log" does when it
- walks.
- (merge c5941f1 tr/maint-show-walk later to maint).
-
- * "git diff", "git status" and anything that internally uses the
- comparison machinery was utterly broken when the difference
- involved a file with "-" as its name. This was due to the way "git
- diff --no-index" was incorrectly bolted on to the system, making
- any comparison that involves a file "-" at the root level
- incorrectly read from the standard input.
- (merge 4682d85 jc/refactor-diff-stdin later to maint).
-
- * We did not have test to make sure "git rebase" without extra options
- filters out an empty commit in the original history.
- (merge 2b5ba7b mz/empty-rebase-test later to maint).
-
- * "git fast-export" produced an input stream for fast-import without
- properly quoting pathnames when they contain SPs in them.
- (merge ff59f6d js/fast-export-paths-with-spaces later to maint).
-
- * "git checkout --detach", when you are still on an unborn branch,
- should be forbidden, but it wasn't.
- (merge 8ced1aa cw/no-detaching-an-unborn later to maint).
-
- * Some implementations of Perl terminates "lines" with CRLF even when
- the script is operating on just a sequence of bytes. Make sure to
- use "$PERL_PATH", the version of Perl the user told Git to use, in
- our tests to avoid unnecessary breakages in tests.
- (merge ad78585 vr/use-our-perl-in-tests later to maint).
+ * "git grep" stopped spawning an external "grep" long time ago, but a
+ duplicated test to check internal and external "grep" was left
+ behind.
+ (merge 4ca9453 rj/maint-grep-remove-redundant-test later to maint).
+
+ * The code to avoid mistaken attempt to add the object directory
+ itself as its own alternate could read beyond end of a string while
+ comparison.
+ (merge cb2912c hv/link-alt-odb-entry later to maint).
+
+ * "git checkout <branchname>" to come back from a detached HEAD state
+ incorrectly computed reachability of the detached HEAD, resulting
+ in unnecessary warnings.
+ (merge add416a jk/maint-checkout-orphan-check-fix later to maint).
+
+ * The documentation for revision range specifiers (e.g. A..B, A^@)
+ has been updated.
+ (merge ca5ee2d mh/maint-revisions-doc later to maint).
+
+ * "git submodule add" was confused when the superproject did not have
+ its repository in its usual place in the working tree and GIT_DIR
+ and GIT_WORK_TREE was used to access it.
+
+ * "git mergetool" did not support --tool-help option to give the list
+ of supported backends, like "git difftool" does.
+ (merge 109859e jc/mergetool-tool-help later to maint).
+
+ * "git commit --amend" let the user edit the log message and then died
+ when the human-readable committer name was given insufficiently by
+ getpwent(3).
diff --git a/Documentation/config.txt b/Documentation/config.txt
index 7bc0e5384..a95e5a4ac 100644
--- a/Documentation/config.txt
+++ b/Documentation/config.txt
@@ -177,6 +177,9 @@ advice.*::
Advice shown when you used linkgit:git-checkout[1] to
move to the detach HEAD state, to instruct how to create
a local branch after the fact.
+ amWorkDir::
+ Advice that shows the location of the patch file when
+ linkgit:git-am[1] fails to apply it.
--
core.fileMode::
diff --git a/Documentation/git-commit-tree.txt b/Documentation/git-commit-tree.txt
index ff7328650..6d5a04c83 100644
--- a/Documentation/git-commit-tree.txt
+++ b/Documentation/git-commit-tree.txt
@@ -10,7 +10,7 @@ SYNOPSIS
--------
[verse]
'git commit-tree' <tree> [(-p <parent>)...] < changelog
-'git commit-tree' <tree> [(-p <parent>)...] [(-m <message>)...] [(-F <file>)...]
+'git commit-tree' [(-p <parent>)...] [(-m <message>)...] [(-F <file>)...] <tree>
DESCRIPTION
-----------
diff --git a/Documentation/git-commit.txt b/Documentation/git-commit.txt
index f40083592..4622297ec 100644
--- a/Documentation/git-commit.txt
+++ b/Documentation/git-commit.txt
@@ -407,6 +407,15 @@ This command can run `commit-msg`, `prepare-commit-msg`, `pre-commit`,
and `post-commit` hooks. See linkgit:githooks[5] for more
information.
+FILES
+-----
+
+`$GIT_DIR/COMMIT_EDITMSG`::
+ This file contains the commit message of a commit in progress.
+ If `git commit` exits due to an error before creating a commit,
+ any commit message that has been provided by the user (e.g., in
+ an editor session) will be available in this file, but will be
+ overwritten by the next invocation of `git commit`.
SEE ALSO
--------
diff --git a/Documentation/git-credential.txt b/Documentation/git-credential.txt
index a81684e15..53adee320 100644
--- a/Documentation/git-credential.txt
+++ b/Documentation/git-credential.txt
@@ -102,22 +102,20 @@ INPUT/OUTPUT FORMAT
-------------------
`git credential` reads and/or writes (depending on the action used)
-credential information in its standard input/output. These information
+credential information in its standard input/output. This information
can correspond either to keys for which `git credential` will obtain
the login/password information (e.g. host, protocol, path), or to the
actual credential data to be obtained (login/password).
-The credential is split into a set of named attributes.
-Attributes are provided to the helper, one per line. Each attribute is
+The credential is split into a set of named attributes, with one
+attribute per line. Each attribute is
specified by a key-value pair, separated by an `=` (equals) sign,
followed by a newline. The key may contain any bytes except `=`,
newline, or NUL. The value may contain any bytes except newline or NUL.
In both cases, all bytes are treated as-is (i.e., there is no quoting,
and one cannot transmit a value with newline or NUL in it). The list of
attributes is terminated by a blank line or end-of-file.
-Git will send the following attributes (but may not send all of
-them for a given credential; for example, a `host` attribute makes no
-sense when dealing with a non-network protocol):
+Git understands the following attributes:
`protocol`::
@@ -142,3 +140,15 @@ sense when dealing with a non-network protocol):
`password`::
The credential's password, if we are asking it to be stored.
+
+`url`::
+
+ When this special attribute is read by `git credential`, the
+ value is parsed as a URL and treated as if its constituent parts
+ were read (e.g., `url=https://example.com` would behave as if
+ `protocol=https` and `host=example.com` had been provided). This
+ can help callers avoid parsing URLs themselves. Note that any
+ components which are missing from the URL (e.g., there is no
+ username in the example above) will be set to empty; if you want
+ to provide a URL and override some attributes, provide the URL
+ attribute first, followed by any overrides.
diff --git a/Documentation/git-daemon.txt b/Documentation/git-daemon.txt
index 31b28fc29..e8f757704 100644
--- a/Documentation/git-daemon.txt
+++ b/Documentation/git-daemon.txt
@@ -204,7 +204,7 @@ receive-pack::
can push anything into the repository, including removal
of refs). This is solely meant for a closed LAN setting
where everybody is friendly. This service can be
- enabled by `daemon.receivepack` configuration item to
+ enabled by setting `daemon.receivepack` configuration item to
`true`.
EXAMPLES
diff --git a/Documentation/git-merge.txt b/Documentation/git-merge.txt
index 3ceefb8a1..20f922851 100644
--- a/Documentation/git-merge.txt
+++ b/Documentation/git-merge.txt
@@ -181,7 +181,7 @@ final result verbatim. When both sides made changes to the same area,
however, git cannot randomly pick one side over the other, and asks you to
resolve it by leaving what both sides did to that area.
-By default, git uses the same style as that is used by "merge" program
+By default, git uses the same style as the one used by the "merge" program
from the RCS suite to present such a conflicted hunk, like this:
------------
diff --git a/Documentation/git-mergetool.txt b/Documentation/git-mergetool.txt
index 2a49de7cf..d7207bd9b 100644
--- a/Documentation/git-mergetool.txt
+++ b/Documentation/git-mergetool.txt
@@ -27,9 +27,9 @@ OPTIONS
-t <tool>::
--tool=<tool>::
Use the merge resolution program specified by <tool>.
- Valid merge tools are:
- araxis, bc3, diffuse, ecmerge, emerge, gvimdiff, kdiff3,
- meld, opendiff, p4merge, tkdiff, tortoisemerge, vimdiff and xxdiff.
+ Valid values include emerge, gvimdiff, kdiff3,
+ meld, vimdiff, and tortoisemerge. Run `git mergetool --tool-help`
+ for the list of valid <tool> settings.
+
If a merge resolution program is not specified, 'git mergetool'
will use the configuration variable `merge.tool`. If the
diff --git a/Documentation/git-rebase.txt b/Documentation/git-rebase.txt
index b30ed352e..fd535b06a 100644
--- a/Documentation/git-rebase.txt
+++ b/Documentation/git-rebase.txt
@@ -273,7 +273,7 @@ which makes little sense.
Pass the <strategy-option> through to the merge strategy.
This implies `--merge` and, if no strategy has been
specified, `-s recursive`. Note the reversal of 'ours' and
- 'theirs' as noted in above for the `-m` option.
+ 'theirs' as noted above for the `-m` option.
-q::
--quiet::
diff --git a/Documentation/git-rev-parse.txt b/Documentation/git-rev-parse.txt
index 4cc3e9586..3c63561f0 100644
--- a/Documentation/git-rev-parse.txt
+++ b/Documentation/git-rev-parse.txt
@@ -101,6 +101,12 @@ OPTIONS
The option core.warnAmbiguousRefs is used to select the strict
abbreviation mode.
+--disambiguate=<prefix>::
+ Show every object whose name begins with the given prefix.
+ The <prefix> must be at least 4 hexadecimal digits long to
+ avoid listing each and every object in the repository by
+ mistake.
+
--all::
Show all refs found in `refs/`.
diff --git a/Documentation/git.txt b/Documentation/git.txt
index 43f9a1beb..7af8aaa04 100644
--- a/Documentation/git.txt
+++ b/Documentation/git.txt
@@ -44,9 +44,11 @@ unreleased) version of git, that is available from 'master'
branch of the `git.git` repository.
Documentation for older releases are available here:
-* link:v1.7.11.2/git.html[documentation for release 1.7.11.2]
+* link:v1.7.11.4/git.html[documentation for release 1.7.11.4]
* release notes for
+ link:RelNotes/1.7.11.4.txt[1.7.11.4],
+ link:RelNotes/1.7.11.3.txt[1.7.11.3],
link:RelNotes/1.7.11.2.txt[1.7.11.2],
link:RelNotes/1.7.11.1.txt[1.7.11.1],
link:RelNotes/1.7.11.txt[1.7.11].
diff --git a/Documentation/glossary-content.txt b/Documentation/glossary-content.txt
index 3595b586b..f928b57f9 100644
--- a/Documentation/glossary-content.txt
+++ b/Documentation/glossary-content.txt
@@ -117,7 +117,7 @@ to point at the new commit.
[[def_ent]]ent::
Favorite synonym to "<<def_tree-ish,tree-ish>>" by some total geeks. See
- `http://en.wikipedia.org/wiki/Ent_(Middle-earth)` for an in-depth
+ http://en.wikipedia.org/wiki/Ent_(Middle-earth) for an in-depth
explanation. Avoid this term, not to confuse people.
[[def_evil_merge]]evil merge::
diff --git a/Documentation/rev-list-options.txt b/Documentation/rev-list-options.txt
index 84e34b1ab..d9b2b5b2e 100644
--- a/Documentation/rev-list-options.txt
+++ b/Documentation/rev-list-options.txt
@@ -760,7 +760,7 @@ options may be given. See linkgit:git-diff-files[1] for more options.
--cc::
- This flag implies the '-c' options and further compresses the
+ This flag implies the '-c' option and further compresses the
patch output by omitting uninteresting hunks whose contents in
the parents have only two variants and the merge result picks
one of them without modification.
diff --git a/Documentation/revisions.txt b/Documentation/revisions.txt
index 172566183..dc0070bcb 100644
--- a/Documentation/revisions.txt
+++ b/Documentation/revisions.txt
@@ -24,22 +24,22 @@ blobs contained in a commit.
object referenced by 'refs/heads/master'. If you
happen to have both 'heads/master' and 'tags/master', you can
explicitly say 'heads/master' to tell git which one you mean.
- When ambiguous, a '<name>' is disambiguated by taking the
+ When ambiguous, a '<refname>' is disambiguated by taking the
first match in the following rules:
- . If '$GIT_DIR/<name>' exists, that is what you mean (this is usually
+ . If '$GIT_DIR/<refname>' exists, that is what you mean (this is usually
useful only for 'HEAD', 'FETCH_HEAD', 'ORIG_HEAD', 'MERGE_HEAD'
and 'CHERRY_PICK_HEAD');
- . otherwise, 'refs/<name>' if it exists;
+ . otherwise, 'refs/<refname>' if it exists;
. otherwise, 'refs/tags/<refname>' if it exists;
- . otherwise, 'refs/heads/<name>' if it exists;
+ . otherwise, 'refs/heads/<refname>' if it exists;
- . otherwise, 'refs/remotes/<name>' if it exists;
+ . otherwise, 'refs/remotes/<refname>' if it exists;
- . otherwise, 'refs/remotes/<name>/HEAD' if it exists.
+ . otherwise, 'refs/remotes/<refname>/HEAD' if it exists.
+
'HEAD' names the commit on which you based the changes in the working tree.
'FETCH_HEAD' records the branch which you fetched from a remote repository
@@ -218,13 +218,44 @@ and its parent commits exist. The 'r1{caret}@' notation means all
parents of 'r1'. 'r1{caret}!' includes commit 'r1' but excludes
all of its parents.
+To summarize:
+
+'<rev>'::
+ Include commits that are reachable from (i.e. ancestors of)
+ <rev>.
+
+'{caret}<rev>'::
+ Exclude commits that are reachable from (i.e. ancestors of)
+ <rev>.
+
+'<rev1>..<rev2>'::
+ Include commits that are reachable from <rev2> but exclude
+ those that are reachable from <rev1>.
+
+'<rev1>\...<rev2>'::
+ Include commits that are reachable from either <rev1> or
+ <rev2> but exclude those that are reachable from both.
+
+'<rev>{caret}@', e.g. 'HEAD{caret}@'::
+ A suffix '{caret}' followed by an at sign is the same as listing
+ all parents of '<rev>' (meaning, include anything reachable from
+ its parents, but not the commit itself).
+
+'<rev>{caret}!', e.g. 'HEAD{caret}!'::
+ A suffix '{caret}' followed by an exclamation mark is the same
+ as giving commit '<rev>' and then all its parents prefixed with
+ '{caret}' to exclude them (and their ancestors).
+
Here are a handful of examples:
D G H D
D F G H I J D F
^G D H D
^D B E I J F B
+ B..C C
B...C G H D E B C
^D B C E I J F B C
+ C I J F C
C^@ I J F
+ C^! C
F^! D G H D F
diff --git a/Documentation/user-manual.txt b/Documentation/user-manual.txt
index 02ed5668e..03d95dc29 100644
--- a/Documentation/user-manual.txt
+++ b/Documentation/user-manual.txt
@@ -2870,7 +2870,7 @@ $ git fetch example
You can also add a "+" to force the update each time:
-------------------------------------------------
-$ git config remote.example.fetch +master:ref/remotes/example/master
+$ git config remote.example.fetch +master:refs/remotes/example/master
-------------------------------------------------
Don't do this unless you're sure you won't mind "git fetch" possibly
@@ -2966,7 +2966,7 @@ As you can see, a commit is defined by:
- a tree: The SHA-1 name of a tree object (as defined below), representing
the contents of a directory at a certain point in time.
-- parent(s): The SHA-1 name of some number of commits which represent the
+- parent(s): The SHA-1 name(s) of some number of commits which represent the
immediately previous step(s) in the history of the project. The
example above has one parent; merge commits may have more than
one. A commit with no parents is called a "root" commit, and
@@ -3363,8 +3363,8 @@ Date:
:100644 100644 oldsha... 4b9458b... M somedirectory/myfile
------------------------------------------------
-This tells you that the immediately preceding version of the file was
-"newsha", and that the immediately following version was "oldsha".
+This tells you that the immediately following version of the file was
+"newsha", and that the immediately preceding version was "oldsha".
You also know the commit messages that went with the change from oldsha
to 4b9458b and with the change from 4b9458b to newsha.
@@ -4035,8 +4035,8 @@ $ git ls-files --unmerged
Each line of the `git ls-files --unmerged` output begins with
the blob mode bits, blob SHA-1, 'stage number', and the
filename. The 'stage number' is git's way to say which tree it
-came from: stage 1 corresponds to `$orig` tree, stage 2 `HEAD`
-tree, and stage3 `$target` tree.
+came from: stage 1 corresponds to the `$orig` tree, stage 2 to
+the `HEAD` tree, and stage 3 to the `$target` tree.
Earlier we said that trivial merges are done inside
`git read-tree -m`. For example, if the file did not change
diff --git a/GIT-VERSION-GEN b/GIT-VERSION-GEN
index fde74a68d..4c1a79e3b 100755
--- a/GIT-VERSION-GEN
+++ b/GIT-VERSION-GEN
@@ -1,7 +1,7 @@
#!/bin/sh
GVF=GIT-VERSION-FILE
-DEF_VER=v1.7.11.GIT
+DEF_VER=v1.7.12-rc1
LF='
'
diff --git a/Makefile b/Makefile
index e169290a9..4b58b9182 100644
--- a/Makefile
+++ b/Makefile
@@ -127,9 +127,8 @@ all::
# specify your own (or DarwinPort's) include directories and
# library directories by defining CFLAGS and LDFLAGS appropriately.
#
-# Define BLK_SHA1 environment variable if you want the C version
-# of the SHA1 that assumes you can do unaligned 32-bit loads and
-# have a fast htonl() function.
+# Define BLK_SHA1 environment variable to make use of the bundled
+# optimized C SHA1 routine.
#
# Define PPC_SHA1 environment variable when running make to make use of
# a bundled SHA1 routine optimized for PowerPC.
@@ -2091,6 +2090,13 @@ $(SCRIPT_LIB) : % : %.sh GIT-SCRIPT-DEFINES
ifndef NO_PERL
$(patsubst %.perl,%,$(SCRIPT_PERL)): perl/perl.mak
+perl/perl.mak: perl/PM.stamp
+
+perl/PM.stamp: FORCE
+ $(QUIET_GEN)find perl -type f -name '*.pm' | sort >$@+ && \
+ { cmp $@+ $@ >/dev/null 2>/dev/null || mv $@+ $@; } && \
+ $(RM) $@+
+
perl/perl.mak: GIT-CFLAGS GIT-PREFIX perl/Makefile perl/Makefile.PL
$(QUIET_SUBDIR0)perl $(QUIET_SUBDIR1) PERL_PATH='$(PERL_PATH_SQ)' prefix='$(prefix_SQ)' $(@F)
@@ -2158,6 +2164,18 @@ configure: configure.ac GIT-VERSION-FILE
autoconf -o $@ $<+ && \
$(RM) $<+
+ifdef AUTOCONFIGURED
+config.status: configure
+ $(QUIET_GEN)if test -f config.status; then \
+ ./config.status --recheck; \
+ else \
+ ./configure; \
+ fi
+reconfigure config.mak.autogen: config.status
+ $(QUIET_GEN)./config.status
+.PHONY: reconfigure # This is a convenience target.
+endif
+
XDIFF_OBJS += xdiff/xdiffi.o
XDIFF_OBJS += xdiff/xprepare.o
XDIFF_OBJS += xdiff/xutils.o
@@ -2263,7 +2281,7 @@ $(ASM_OBJ): %.o: %.S GIT-CFLAGS $(missing_dep_dirs)
endif
%.s: %.c GIT-CFLAGS FORCE
- $(QUIET_CC)$(CC) -S $(ALL_CFLAGS) $(EXTRA_CPPFLAGS) $<
+ $(QUIET_CC)$(CC) -o $@ -S $(ALL_CFLAGS) $(EXTRA_CPPFLAGS) $<
ifdef USE_COMPUTED_HEADER_DEPENDENCIES
# Take advantage of gcc's on-the-fly dependency generation
@@ -2743,6 +2761,9 @@ dist-doc:
distclean: clean
$(RM) configure
+ $(RM) config.log config.status config.cache
+ $(RM) config.mak.autogen config.mak.append
+ $(RM) -r autom4te.cache
profile-clean:
$(RM) $(addsuffix *.gcda,$(addprefix $(PROFILE_DIR)/, $(object_dirs)))
@@ -2757,8 +2778,6 @@ clean: profile-clean
$(RM) -r $(dep_dirs)
$(RM) -r po/build/
$(RM) *.spec *.pyc *.pyo */*.pyc */*.pyo common-cmds.h $(ETAGS_TARGET) tags cscope*
- $(RM) -r autom4te.cache
- $(RM) config.log config.mak.autogen config.mak.append config.status config.cache
$(RM) -r $(GIT_TARNAME) .doc-tmp-dir
$(RM) $(GIT_TARNAME).tar.gz git-core_$(GIT_VERSION)-*.tar.gz
$(RM) $(htmldocs).tar.gz $(manpages).tar.gz
diff --git a/advice.c b/advice.c
index a492eea24..edfbd4a6f 100644
--- a/advice.c
+++ b/advice.c
@@ -32,7 +32,7 @@ void advise(const char *advice, ...)
const char *cp, *np;
va_start(params, advice);
- strbuf_addf(&buf, advice, params);
+ strbuf_vaddf(&buf, advice, params);
va_end(params);
for (cp = buf.buf; *cp; cp = np) {
diff --git a/attr.c b/attr.c
index aef93d896..b52efb55a 100644
--- a/attr.c
+++ b/attr.c
@@ -520,11 +520,13 @@ static void bootstrap_attr_stack(void)
home_config_paths(NULL, &xdg_attributes_file, "attributes");
git_attributes_file = xdg_attributes_file;
}
- elem = read_attr_from_file(git_attributes_file, 1);
- if (elem) {
- elem->origin = NULL;
- elem->prev = attr_stack;
- attr_stack = elem;
+ if (git_attributes_file) {
+ elem = read_attr_from_file(git_attributes_file, 1);
+ if (elem) {
+ elem->origin = NULL;
+ elem->prev = attr_stack;
+ attr_stack = elem;
+ }
}
if (!is_bare_repository() || direction == GIT_ATTR_INDEX) {
diff --git a/block-sha1/sha1.c b/block-sha1/sha1.c
index c0054a0b0..a8d4bf930 100644
--- a/block-sha1/sha1.c
+++ b/block-sha1/sha1.c
@@ -101,8 +101,8 @@
* Where do we get the source from? The first 16 iterations get it from
* the input data, the next mix it from the 512-bit array.
*/
-#define SHA_SRC(t) get_be32(data + t)
-#define SHA_MIX(t) SHA_ROL(W(t+13) ^ W(t+8) ^ W(t+2) ^ W(t), 1)
+#define SHA_SRC(t) get_be32((unsigned char *) block + (t)*4)
+#define SHA_MIX(t) SHA_ROL(W((t)+13) ^ W((t)+8) ^ W((t)+2) ^ W(t), 1);
#define SHA_ROUND(t, input, fn, constant, A, B, C, D, E) do { \
unsigned int TEMP = input(t); setW(t, TEMP); \
@@ -115,7 +115,7 @@
#define T_40_59(t, A, B, C, D, E) SHA_ROUND(t, SHA_MIX, ((B&C)+(D&(B^C))) , 0x8f1bbcdc, A, B, C, D, E )
#define T_60_79(t, A, B, C, D, E) SHA_ROUND(t, SHA_MIX, (B^C^D) , 0xca62c1d6, A, B, C, D, E )
-static void blk_SHA1_Block(blk_SHA_CTX *ctx, const unsigned int *data)
+static void blk_SHA1_Block(blk_SHA_CTX *ctx, const void *block)
{
unsigned int A,B,C,D,E;
unsigned int array[16];
@@ -126,7 +126,7 @@ static void blk_SHA1_Block(blk_SHA_CTX *ctx, const unsigned int *data)
D = ctx->H[3];
E = ctx->H[4];
- /* Round 1 - iterations 0-16 take their input from 'data' */
+ /* Round 1 - iterations 0-16 take their input from 'block' */
T_0_15( 0, A, B, C, D, E);
T_0_15( 1, E, A, B, C, D);
T_0_15( 2, D, E, A, B, C);
diff --git a/builtin/apply.c b/builtin/apply.c
index ace04c453..d453c8337 100644
--- a/builtin/apply.c
+++ b/builtin/apply.c
@@ -3589,7 +3589,7 @@ static void build_fake_ancestor(struct patch *list, const char *filename)
name = patch->old_name ? patch->old_name : patch->new_name;
if (0 < patch->is_new)
continue;
- else if (get_sha1(patch->old_sha1_prefix, sha1))
+ else if (get_sha1_blob(patch->old_sha1_prefix, sha1))
/* git diff has no index line for mode/type changes */
if (!patch->lines_added && !patch->lines_deleted) {
if (get_current_sha1(patch->old_name, sha1))
@@ -3769,7 +3769,8 @@ static void add_index_file(const char *path, unsigned mode, void *buf, unsigned
ce = xcalloc(1, ce_size);
memcpy(ce->name, path, namelen);
ce->ce_mode = create_ce_mode(mode);
- ce->ce_flags = namelen;
+ ce->ce_flags = create_ce_flags(0);
+ ce->ce_namelen = namelen;
if (S_ISGITLINK(mode)) {
const char *s = buf;
@@ -3890,7 +3891,8 @@ static void add_conflicted_stages_file(struct patch *patch)
ce = xcalloc(1, ce_size);
memcpy(ce->name, patch->new_name, namelen);
ce->ce_mode = create_ce_mode(mode);
- ce->ce_flags = create_ce_flags(namelen, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = namelen;
hashcpy(ce->sha1, patch->threeway_stage[stage - 1]);
if (add_cache_entry(ce, ADD_CACHE_OK_TO_ADD) < 0)
die(_("unable to add cache entry for %s"), patch->new_name);
diff --git a/builtin/blame.c b/builtin/blame.c
index 960c58d85..0d50273ce 100644
--- a/builtin/blame.c
+++ b/builtin/blame.c
@@ -2171,7 +2171,8 @@ static struct commit *fake_working_tree_commit(struct diff_options *opt,
ce = xcalloc(1, size);
hashcpy(ce->sha1, origin->blob_sha1);
memcpy(ce->name, path, len);
- ce->ce_flags = create_ce_flags(len, 0);
+ ce->ce_flags = create_ce_flags(0);
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
add_cache_entry(ce, ADD_CACHE_OK_TO_ADD|ADD_CACHE_OK_TO_REPLACE);
diff --git a/builtin/cat-file.c b/builtin/cat-file.c
index 36a910443..af74e775a 100644
--- a/builtin/cat-file.c
+++ b/builtin/cat-file.c
@@ -91,7 +91,7 @@ static int cat_one_file(int opt, const char *exp_type, const char *obj_name)
unsigned long size;
struct object_context obj_context;
- if (get_sha1_with_context(obj_name, sha1, &obj_context))
+ if (get_sha1_with_context(obj_name, 0, sha1, &obj_context))
die("Not a valid object name %s", obj_name);
buf = NULL;
diff --git a/builtin/checkout.c b/builtin/checkout.c
index 3980d5d06..d812219b3 100644
--- a/builtin/checkout.c
+++ b/builtin/checkout.c
@@ -73,7 +73,8 @@ static int update_some(const unsigned char *sha1, const char *base, int baselen,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, base, baselen);
memcpy(ce->name + baselen, pathname, len - baselen);
- ce->ce_flags = create_ce_flags(len, 0) | CE_UPDATE;
+ ce->ce_flags = create_ce_flags(0) | CE_UPDATE;
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
add_cache_entry(ce, ADD_CACHE_OK_TO_ADD | ADD_CACHE_OK_TO_REPLACE);
return 0;
@@ -605,7 +606,7 @@ static int add_pending_uninteresting_ref(const char *refname,
const unsigned char *sha1,
int flags, void *cb_data)
{
- add_pending_sha1(cb_data, refname, sha1, flags | UNINTERESTING);
+ add_pending_sha1(cb_data, refname, sha1, UNINTERESTING);
return 0;
}
diff --git a/builtin/commit-tree.c b/builtin/commit-tree.c
index 164b655df..eac901a0e 100644
--- a/builtin/commit-tree.c
+++ b/builtin/commit-tree.c
@@ -48,16 +48,13 @@ int cmd_commit_tree(int argc, const char **argv, const char *prefix)
if (argc < 2 || !strcmp(argv[1], "-h"))
usage(commit_tree_usage);
- if (get_sha1(argv[1], tree_sha1))
- die("Not a valid object name %s", argv[1]);
-
for (i = 1; i < argc; i++) {
const char *arg = argv[i];
if (!strcmp(arg, "-p")) {
unsigned char sha1[20];
if (argc <= ++i)
usage(commit_tree_usage);
- if (get_sha1(argv[i], sha1))
+ if (get_sha1_commit(argv[i], sha1))
die("Not a valid object name %s", argv[i]);
assert_sha1_type(sha1, OBJ_COMMIT);
new_parent(lookup_commit(sha1), &parents);
@@ -104,7 +101,7 @@ int cmd_commit_tree(int argc, const char **argv, const char *prefix)
continue;
}
- if (get_sha1(arg, tree_sha1))
+ if (get_sha1_tree(arg, tree_sha1))
die("Not a valid object name %s", arg);
if (got_tree)
die("Cannot give more than one trees");
diff --git a/builtin/commit.c b/builtin/commit.c
index 95eeab1d5..20cef95d6 100644
--- a/builtin/commit.c
+++ b/builtin/commit.c
@@ -725,7 +725,7 @@ static int prepare_to_commit(const char *index_file, const char *prefix,
strbuf_release(&sb);
/* This checks if committer ident is explicitly given */
- strbuf_addstr(&committer_ident, git_committer_info(0));
+ strbuf_addstr(&committer_ident, git_committer_info(IDENT_STRICT));
if (use_editor && include_status) {
char *ai_tmp, *ci_tmp;
if (whence != FROM_COMMIT)
diff --git a/builtin/config.c b/builtin/config.c
index e8e1c0a45..8cd08da99 100644
--- a/builtin/config.c
+++ b/builtin/config.c
@@ -387,12 +387,20 @@ int cmd_config(int argc, const char **argv, const char *prefix)
home_config_paths(&user_config, &xdg_config, "config");
- if (access(user_config, R_OK) && !access(xdg_config, R_OK))
+ if (!user_config)
+ /*
+ * It is unknown if HOME/.gitconfig exists, so
+ * we do not know if we should write to XDG
+ * location; error out even if XDG_CONFIG_HOME
+ * is set and points at a sane location.
+ */
+ die("$HOME not set");
+
+ if (access(user_config, R_OK) &&
+ xdg_config && !access(xdg_config, R_OK))
given_config_file = xdg_config;
- else if (user_config)
- given_config_file = user_config;
else
- die("$HOME not set");
+ given_config_file = user_config;
}
else if (use_system_config)
given_config_file = git_etc_gitconfig();
diff --git a/builtin/log.c b/builtin/log.c
index adcbcf1f2..ecc279369 100644
--- a/builtin/log.c
+++ b/builtin/log.c
@@ -367,6 +367,7 @@ int cmd_whatchanged(int argc, const char **argv, const char *prefix)
rev.simplify_history = 0;
memset(&opt, 0, sizeof(opt));
opt.def = "HEAD";
+ opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt);
if (!rev.diffopt.output_format)
rev.diffopt.output_format = DIFF_FORMAT_RAW;
@@ -557,6 +558,7 @@ int cmd_log(int argc, const char **argv, const char *prefix)
rev.always_show_header = 1;
memset(&opt, 0, sizeof(opt));
opt.def = "HEAD";
+ opt.revarg_opt = REVARG_COMMITTISH;
cmd_log_init(argc, argv, prefix, &rev, &opt);
return cmd_log_walk(&rev);
}
@@ -1132,6 +1134,7 @@ int cmd_format_patch(int argc, const char **argv, const char *prefix)
rev.subject_prefix = fmt_patch_subject_prefix;
memset(&s_r_opt, 0, sizeof(s_r_opt));
s_r_opt.def = "HEAD";
+ s_r_opt.revarg_opt = REVARG_COMMITTISH;
if (default_attach) {
rev.mime_boundary = default_attach;
diff --git a/builtin/pack-objects.c b/builtin/pack-objects.c
index f3348208d..782e7d0c3 100644
--- a/builtin/pack-objects.c
+++ b/builtin/pack-objects.c
@@ -2373,7 +2373,7 @@ static void get_object_list(int ac, const char **av)
}
die("not a rev '%s'", line);
}
- if (handle_revision_arg(line, &revs, flags, 1))
+ if (handle_revision_arg(line, &revs, flags, REVARG_CANNOT_BE_FILENAME))
die("bad revision '%s'", line);
}
diff --git a/builtin/reset.c b/builtin/reset.c
index 4cc34c908..74442bd76 100644
--- a/builtin/reset.c
+++ b/builtin/reset.c
@@ -276,7 +276,7 @@ int cmd_reset(int argc, const char **argv, const char *prefix)
* Otherwise, argv[i] could be either <rev> or <paths> and
* has to be unambiguous.
*/
- else if (!get_sha1(argv[i], sha1)) {
+ else if (!get_sha1_committish(argv[i], sha1)) {
/*
* Ok, argv[i] looks like a rev; it should not
* be a filename.
@@ -289,9 +289,15 @@ int cmd_reset(int argc, const char **argv, const char *prefix)
}
}
- if (get_sha1(rev, sha1))
+ if (get_sha1_committish(rev, sha1))
die(_("Failed to resolve '%s' as a valid ref."), rev);
+ /*
+ * NOTE: As "git reset $treeish -- $path" should be usable on
+ * any tree-ish, this is not strictly correct. We are not
+ * moving the HEAD to any commit; we are merely resetting the
+ * entries in the index to that of a treeish.
+ */
commit = lookup_commit_reference(sha1);
if (!commit)
die(_("Could not parse object '%s'."), rev);
diff --git a/builtin/rev-parse.c b/builtin/rev-parse.c
index 13495b88f..32788a9f8 100644
--- a/builtin/rev-parse.c
+++ b/builtin/rev-parse.c
@@ -195,6 +195,12 @@ static int anti_reference(const char *refname, const unsigned char *sha1, int fl
return 0;
}
+static int show_abbrev(const unsigned char *sha1, void *cb_data)
+{
+ show_rev(NORMAL, sha1, NULL);
+ return 0;
+}
+
static void show_datestring(const char *flag, const char *datestr)
{
static char buffer[100];
@@ -238,7 +244,7 @@ static int try_difference(const char *arg)
next = "HEAD";
if (dotdot == arg)
this = "HEAD";
- if (!get_sha1(this, sha1) && !get_sha1(next, end)) {
+ if (!get_sha1_committish(this, sha1) && !get_sha1_committish(next, end)) {
show_rev(NORMAL, end, next);
show_rev(symmetric ? NORMAL : REVERSED, sha1, this);
if (symmetric) {
@@ -278,7 +284,7 @@ static int try_parent_shorthands(const char *arg)
return 0;
*dotdot = 0;
- if (get_sha1(arg, sha1))
+ if (get_sha1_committish(arg, sha1))
return 0;
if (!parents_only)
@@ -589,6 +595,10 @@ int cmd_rev_parse(int argc, const char **argv, const char *prefix)
for_each_ref(show_reference, NULL);
continue;
}
+ if (!prefixcmp(arg, "--disambiguate=")) {
+ for_each_abbrev(arg + 15, show_abbrev, NULL);
+ continue;
+ }
if (!strcmp(arg, "--bisect")) {
for_each_ref_in("refs/bisect/bad", show_reference, NULL);
for_each_ref_in("refs/bisect/good", anti_reference, NULL);
diff --git a/builtin/update-index.c b/builtin/update-index.c
index 5a4e9ea55..4ce341cee 100644
--- a/builtin/update-index.c
+++ b/builtin/update-index.c
@@ -95,7 +95,8 @@ static int add_one_path(struct cache_entry *old, const char *path, int len, stru
size = cache_entry_size(len);
ce = xcalloc(1, size);
memcpy(ce->name, path, len);
- ce->ce_flags = len;
+ ce->ce_flags = create_ce_flags(0);
+ ce->ce_namelen = len;
fill_stat_cache_info(ce, st);
ce->ce_mode = ce_mode_from_stat(old, st->st_mode);
@@ -229,7 +230,8 @@ static int add_cacheinfo(unsigned int mode, const unsigned char *sha1,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, path, len);
- ce->ce_flags = create_ce_flags(len, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
if (assume_unchanged)
ce->ce_flags |= CE_VALID;
@@ -427,7 +429,8 @@ static struct cache_entry *read_one_ent(const char *which,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, path, namelen);
- ce->ce_flags = create_ce_flags(namelen, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = namelen;
ce->ce_mode = create_ce_mode(mode);
return ce;
}
diff --git a/cache.h b/cache.h
index 6a1aff5e2..67f28b4da 100644
--- a/cache.h
+++ b/cache.h
@@ -128,13 +128,13 @@ struct cache_entry {
unsigned int ce_gid;
unsigned int ce_size;
unsigned int ce_flags;
+ unsigned int ce_namelen;
unsigned char sha1[20];
struct cache_entry *next;
struct cache_entry *dir_next;
char name[FLEX_ARRAY]; /* more */
};
-#define CE_NAMEMASK (0x0fff)
#define CE_STAGEMASK (0x3000)
#define CE_EXTENDED (0x4000)
#define CE_VALID (0x8000)
@@ -198,21 +198,12 @@ static inline void copy_cache_entry(struct cache_entry *dst, struct cache_entry
dst->ce_flags = (dst->ce_flags & ~CE_STATE_MASK) | state;
}
-static inline unsigned create_ce_flags(size_t len, unsigned stage)
+static inline unsigned create_ce_flags(unsigned stage)
{
- if (len >= CE_NAMEMASK)
- len = CE_NAMEMASK;
- return (len | (stage << CE_STAGESHIFT));
-}
-
-static inline size_t ce_namelen(const struct cache_entry *ce)
-{
- size_t len = ce->ce_flags & CE_NAMEMASK;
- if (len < CE_NAMEMASK)
- return len;
- return strlen(ce->name + CE_NAMEMASK) + CE_NAMEMASK;
+ return (stage << CE_STAGESHIFT);
}
+#define ce_namelen(ce) ((ce)->ce_namelen)
#define ce_size(ce) cache_entry_size(ce_namelen(ce))
#define ce_stage(ce) ((CE_STAGEMASK & (ce)->ce_flags) >> CE_STAGESHIFT)
#define ce_uptodate(ce) ((ce)->ce_flags & CE_UPTODATE)
@@ -451,6 +442,7 @@ extern int discard_index(struct index_state *);
extern int unmerged_index(const struct index_state *);
extern int verify_path(const char *path);
extern struct cache_entry *index_name_exists(struct index_state *istate, const char *name, int namelen, int igncase);
+extern int index_name_stage_pos(const struct index_state *, const char *name, int namelen, int stage);
extern int index_name_pos(const struct index_state *, const char *name, int namelen);
#define ADD_CACHE_OK_TO_ADD 1 /* Ok to add */
#define ADD_CACHE_OK_TO_REPLACE 2 /* Ok to replace file/directory */
@@ -790,17 +782,25 @@ struct object_context {
unsigned mode;
};
+#define GET_SHA1_QUIETLY 01
+#define GET_SHA1_COMMIT 02
+#define GET_SHA1_COMMITTISH 04
+#define GET_SHA1_TREE 010
+#define GET_SHA1_TREEISH 020
+#define GET_SHA1_BLOB 040
+#define GET_SHA1_ONLY_TO_DIE 04000
+
extern int get_sha1(const char *str, unsigned char *sha1);
-extern int get_sha1_with_mode_1(const char *str, unsigned char *sha1, unsigned *mode, int only_to_die, const char *prefix);
-static inline int get_sha1_with_mode(const char *str, unsigned char *sha1, unsigned *mode)
-{
- return get_sha1_with_mode_1(str, sha1, mode, 0, NULL);
-}
-extern int get_sha1_with_context_1(const char *name, unsigned char *sha1, struct object_context *orc, int only_to_die, const char *prefix);
-static inline int get_sha1_with_context(const char *str, unsigned char *sha1, struct object_context *orc)
-{
- return get_sha1_with_context_1(str, sha1, orc, 0, NULL);
-}
+extern int get_sha1_commit(const char *str, unsigned char *sha1);
+extern int get_sha1_committish(const char *str, unsigned char *sha1);
+extern int get_sha1_tree(const char *str, unsigned char *sha1);
+extern int get_sha1_treeish(const char *str, unsigned char *sha1);
+extern int get_sha1_blob(const char *str, unsigned char *sha1);
+extern void maybe_die_on_misspelt_object_name(const char *name, const char *prefix);
+extern int get_sha1_with_context(const char *str, unsigned flags, unsigned char *sha1, struct object_context *orc);
+
+typedef int each_abbrev_fn(const unsigned char *sha1, void *);
+extern int for_each_abbrev(const char *prefix, each_abbrev_fn, void *);
/*
* Try to read a SHA1 in hexadecimal format from the 40 characters
@@ -864,6 +864,7 @@ extern int validate_headref(const char *ref);
extern int base_name_compare(const char *name1, int len1, int mode1, const char *name2, int len2, int mode2);
extern int df_name_compare(const char *name1, int len1, int mode1, const char *name2, int len2, int mode2);
extern int cache_name_compare(const char *name1, int len1, const char *name2, int len2);
+extern int cache_name_stage_compare(const char *name1, int len1, int stage1, const char *name2, int len2, int stage2);
extern void *read_object_with_reference(const unsigned char *sha1,
const char *required_type,
diff --git a/commit.c b/commit.c
index 8248a994a..42af4c1f2 100644
--- a/commit.c
+++ b/commit.c
@@ -68,7 +68,7 @@ struct commit *lookup_commit_reference_by_name(const char *name)
unsigned char sha1[20];
struct commit *commit;
- if (get_sha1(name, sha1))
+ if (get_sha1_committish(name, sha1))
return NULL;
commit = lookup_commit_reference(sha1);
if (!commit || parse_commit(commit))
diff --git a/config.c b/config.c
index 40818e872..2b706ea20 100644
--- a/config.c
+++ b/config.c
@@ -945,12 +945,12 @@ int git_config_early(config_fn_t fn, void *data, const char *repo_config)
found += 1;
}
- if (!access(xdg_config, R_OK)) {
+ if (xdg_config && !access(xdg_config, R_OK)) {
ret += git_config_from_file(fn, xdg_config, data);
found += 1;
}
- if (!access(user_config, R_OK)) {
+ if (user_config && !access(user_config, R_OK)) {
ret += git_config_from_file(fn, user_config, data);
found += 1;
}
diff --git a/configure.ac b/configure.ac
index 4e9012f49..df7e37659 100644
--- a/configure.ac
+++ b/configure.ac
@@ -3,11 +3,24 @@
## Definitions of private macros.
-# GIT_CONF_APPEND_LINE(LINE)
-# --------------------------
-# Append LINE to file ${config_append}
-AC_DEFUN([GIT_CONF_APPEND_LINE],
- [echo "$1" >> "${config_append}"])
+# GIT_CONF_SUBST(VAL, VAR)
+# ------------------------
+# Cause the line "VAR=VAL" to be eventually appended to ${config_file}.
+AC_DEFUN([GIT_CONF_SUBST],
+ [AC_REQUIRE([GIT_CONF_SUBST_INIT])
+ config_appended_defs="$config_appended_defs${newline}$1=$2"])
+
+# GIT_CONF_SUBST_INIT
+# -------------------
+# Prepare shell variables and autoconf machine required by later calls
+# to GIT_CONF_SUBST.
+AC_DEFUN([GIT_CONF_SUBST_INIT],
+ [config_appended_defs=; newline='
+'
+ AC_CONFIG_COMMANDS([$config_file],
+ [echo "$config_appended_defs" >> "$config_file"],
+ [config_file=$config_file
+ config_appended_defs="$config_appended_defs"])])
# GIT_ARG_SET_PATH(PROGRAM)
# -------------------------
@@ -29,13 +42,12 @@ AC_DEFUN([GIT_ARG_SET_PATH],
# --without-PROGRAM is used.
AC_DEFUN([GIT_CONF_APPEND_PATH],
[m4_pushdef([GIT_UC_PROGRAM], m4_toupper([$1]))dnl
- PROGRAM=GIT_UC_PROGRAM
if test "$withval" = "no"; then
if test -n "$2"; then
GIT_UC_PROGRAM[]_PATH=$withval
- AC_MSG_NOTICE([Disabling use of ${PROGRAM}])
- GIT_CONF_APPEND_LINE(NO_${PROGRAM}=YesPlease)
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=)
+ AC_MSG_NOTICE([Disabling use of GIT_UC_PROGRAM])
+ GIT_CONF_SUBST([NO_]GIT_UC_PROGRAM, [YesPlease])
+ GIT_CONF_SUBST(GIT_UC_PROGRAM[]_PATH, [])
else
AC_MSG_ERROR([You cannot use git without $1])
fi
@@ -45,7 +57,7 @@ AC_DEFUN([GIT_CONF_APPEND_PATH],
else
GIT_UC_PROGRAM[]_PATH=$withval
AC_MSG_NOTICE([Setting GIT_UC_PROGRAM[]_PATH to $withval])
- GIT_CONF_APPEND_LINE(${PROGRAM}_PATH=$withval)
+ GIT_CONF_SUBST(GIT_UC_PROGRAM[]_PATH, [$withval])
fi
fi
m4_popdef([GIT_UC_PROGRAM])])
@@ -58,7 +70,6 @@ AC_DEFUN([GIT_CONF_APPEND_PATH],
# * Unset NO_PACKAGE for --with-PACKAGE without ARG
AC_DEFUN([GIT_PARSE_WITH],
[m4_pushdef([GIT_UC_PACKAGE], m4_toupper([$1]))dnl
- PACKAGE=GIT_UC_PACKAGE
if test "$withval" = "no"; then
NO_[]GIT_UC_PACKAGE=YesPlease
elif test "$withval" = "yes"; then
@@ -67,7 +78,7 @@ AC_DEFUN([GIT_PARSE_WITH],
NO_[]GIT_UC_PACKAGE=
GIT_UC_PACKAGE[]DIR=$withval
AC_MSG_NOTICE([Setting GIT_UC_PACKAGE[]DIR to $withval])
- GIT_CONF_APPEND_LINE(${PACKAGE}DIR=$withval)
+ GIT_CONF_SUBST(GIT_UC_PACKAGE[DIR], [$withval])
fi
m4_popdef([GIT_UC_PACKAGE])])
@@ -87,7 +98,7 @@ AC_DEFUN([GIT_PARSE_WITH_SET_MAKE_VAR],
[a value for $1 ($2). Maybe you do...?])
fi
AC_MSG_NOTICE([Setting $2 to $withval])
- GIT_CONF_APPEND_LINE($2=$withval)
+ GIT_CONF_SUBST([$2], [$withval])
fi)])# GIT_PARSE_WITH_SET_MAKE_VAR
#
@@ -135,10 +146,9 @@ AC_INIT([git], [@@GIT_VERSION@@], [git@vger.kernel.org])
AC_CONFIG_SRCDIR([git.c])
config_file=config.mak.autogen
-config_append=config.mak.append
config_in=config.mak.in
-echo "# ${config_append}. Generated by configure." > "${config_append}"
+GIT_CONF_SUBST([AUTOCONFIGURED], [YesPlease])
# Directories holding "saner" versions of common or POSIX binaries.
AC_ARG_WITH([sane-tool-path],
@@ -150,7 +160,7 @@ AC_ARG_WITH([sane-tool-path],
else
AC_MSG_NOTICE([Setting SANE_TOOL_PATH to '$withval'])
fi
- GIT_CONF_APPEND_LINE([SANE_TOOL_PATH=$withval])],
+ GIT_CONF_SUBST([SANE_TOOL_PATH], [$withval])],
[# If the "--with-sane-tool-path" option was not given, don't touch
# SANE_TOOL_PATH here, but let defaults in Makefile take care of it.
# This should minimize spurious differences in the behaviour of the
@@ -169,7 +179,7 @@ AC_ARG_WITH([lib],
else
lib=$withval
AC_MSG_NOTICE([Setting lib to '$lib'])
- GIT_CONF_APPEND_LINE(lib=$withval)
+ GIT_CONF_SUBST([lib], [$withval])
fi])
if test -z "$lib"; then
@@ -205,7 +215,7 @@ AC_ARG_ENABLE([jsmin],
[
JSMIN=$enableval;
AC_MSG_NOTICE([Setting JSMIN to '$JSMIN' to enable JavaScript minifying])
- GIT_CONF_APPEND_LINE(JSMIN=$enableval);
+ GIT_CONF_SUBST([JSMIN], [$enableval])
])
# Define option to enable CSS minification
@@ -215,7 +225,7 @@ AC_ARG_ENABLE([cssmin],
[
CSSMIN=$enableval;
AC_MSG_NOTICE([Setting CSSMIN to '$CSSMIN' to enable CSS minifying])
- GIT_CONF_APPEND_LINE(CSSMIN=$enableval);
+ GIT_CONF_SUBST([CSSMIN], [$enableval])
])
## Site configuration (override autodetection)
@@ -256,7 +266,7 @@ AS_HELP_STRING([], [ARG can be also prefix for libpcre library and hea
USE_LIBPCRE=YesPlease
LIBPCREDIR=$withval
AC_MSG_NOTICE([Setting LIBPCREDIR to $withval])
- GIT_CONF_APPEND_LINE(LIBPCREDIR=$withval)
+ GIT_CONF_SUBST([LIBPCREDIR], [$withval])
fi)
#
# Define NO_CURL if you do not have curl installed. git-http-pull and
@@ -1043,9 +1053,5 @@ AC_SUBST(PTHREAD_LIBS)
AC_SUBST(NO_PTHREADS)
## Output files
-AC_CONFIG_FILES(["${config_file}":"${config_in}":"${config_append}"])
+AC_CONFIG_FILES(["${config_file}":"${config_in}"])
AC_OUTPUT
-
-
-## Cleanup
-rm -f "${config_append}"
diff --git a/contrib/mw-to-git/git-remote-mediawiki b/contrib/mw-to-git/git-remote-mediawiki
index accd70a94..8647c92df 100755
--- a/contrib/mw-to-git/git-remote-mediawiki
+++ b/contrib/mw-to-git/git-remote-mediawiki
@@ -51,6 +51,9 @@ use constant EMPTY_CONTENT => "<!-- empty page -->\n";
# used to reflect file creation or deletion in diff.
use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+# Used on Git's side to reflect empty edit messages on the wiki
+use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+
my $remotename = $ARGV[0];
my $url = $ARGV[1];
@@ -63,11 +66,16 @@ chomp(@tracked_pages);
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.". $remotename .".categories"));
chomp(@tracked_categories);
-# Import media files too.
+# Import media files on pull
my $import_media = run_git("config --get --bool remote.". $remotename .".mediaimport");
chomp($import_media);
$import_media = ($import_media eq "true");
+# Export media files on push
+my $export_media = run_git("config --get --bool remote.". $remotename .".mediaexport");
+chomp($export_media);
+$export_media = !($export_media eq "false");
+
my $wiki_login = run_git("config --get remote.". $remotename .".mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
my $wiki_passwd = run_git("config --get remote.". $remotename .".mwPassword");
@@ -163,32 +171,6 @@ while (<STDIN>) {
## credential API management (generic functions)
-sub credential_from_url {
- my $url = shift;
- my $parsed = URI->new($url);
- my %credential;
-
- if ($parsed->scheme) {
- $credential{protocol} = $parsed->scheme;
- }
- if ($parsed->host) {
- $credential{host} = $parsed->host;
- }
- if ($parsed->path) {
- $credential{path} = $parsed->path;
- }
- if ($parsed->userinfo) {
- if ($parsed->userinfo =~ /([^:]*):(.*)/) {
- $credential{username} = $1;
- $credential{password} = $2;
- } else {
- $credential{username} = $parsed->userinfo;
- }
- }
-
- return %credential;
-}
-
sub credential_read {
my %credential;
my $reader = shift;
@@ -206,8 +188,10 @@ sub credential_read {
sub credential_write {
my $credential = shift;
my $writer = shift;
+ # url overwrites other fields, so it must come first
+ print $writer "url=$credential->{url}\n" if exists $credential->{url};
while (my ($key, $value) = each(%$credential) ) {
- if ($value) {
+ if (length $value && $key ne 'url') {
print $writer "$key=$value\n";
}
}
@@ -246,7 +230,7 @@ sub mw_connect_maybe {
$mediawiki = MediaWiki::API->new;
$mediawiki->{config}->{api_url} = "$url/api.php";
if ($wiki_login) {
- my %credential = credential_from_url($url);
+ my %credential = (url => $url);
$credential{username} = $wiki_login;
$credential{password} = $wiki_passwd;
credential_run("fill", \%credential);
@@ -363,6 +347,8 @@ sub get_mw_first_pages {
sub get_mw_pages {
mw_connect_maybe();
+ print STDERR "Listing pages on remote wiki...\n";
+
my %pages; # hash on page titles to avoid duplicates
my $user_defined;
if (@tracked_pages) {
@@ -386,6 +372,7 @@ sub get_mw_pages {
get_all_mediafiles(\%pages);
}
}
+ print STDERR (scalar keys %pages) . " pages found.\n";
return %pages;
}
@@ -568,6 +555,8 @@ sub get_last_remote_revision {
my $max_rev_num = 0;
+ print STDERR "Getting last revision id on tracked pages...\n";
+
foreach my $page (@pages) {
my $id = $page->{pageid};
@@ -897,6 +886,10 @@ sub mw_import_revids {
my $last_timestamp = 0; # Placeholer in case $rev->timestamp is undefined
foreach my $pagerevid (@$revision_ids) {
+ # Count page even if we skip it, since we display
+ # $n/$total and $total includes skipped pages.
+ $n++;
+
# fetch the content of the pages
my $query = {
action => 'query',
@@ -911,6 +904,11 @@ sub mw_import_revids {
die "Failed to retrieve modified page for revision $pagerevid";
}
+ if (defined($result->{query}->{badrevids}->{$pagerevid})) {
+ # The revision id does not exist on the remote wiki.
+ next;
+ }
+
if (!defined($result->{query}->{pages})) {
die "Invalid revision $pagerevid.";
}
@@ -919,10 +917,6 @@ sub mw_import_revids {
my $result_page = $result_pages[0];
my $rev = $result_pages[0]->{revisions}->[0];
- # Count page even if we skip it, since we display
- # $n/$total and $total includes skipped pages.
- $n++;
-
my $page_title = $result_page->{title};
if (!exists($pages->{$page_title})) {
@@ -935,7 +929,7 @@ sub mw_import_revids {
my %commit;
$commit{author} = $rev->{user} || 'Anonymous';
- $commit{comment} = $rev->{comment} || '*Empty MediaWiki Message*';
+ $commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
$commit{title} = mediawiki_smudge_filename($page_title);
$commit{mw_revision} = $rev->{revid};
$commit{content} = mediawiki_smudge($rev->{'*'});
@@ -950,8 +944,11 @@ sub mw_import_revids {
# Differentiates classic pages and media files.
my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
my %mediafile;
- if ($namespace && get_mw_namespace_id($namespace) == get_mw_namespace_id("File")) {
- %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+ if ($namespace) {
+ my $id = get_mw_namespace_id($namespace);
+ if ($id && $id == get_mw_namespace_id("File")) {
+ %mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+ }
}
# If this is a revision of the media page for new version
# of a file do one common commit for both file and media page.
@@ -1050,6 +1047,10 @@ sub mw_push_file {
my $oldrevid = shift;
my $newrevid;
+ if ($summary eq EMPTY_MESSAGE) {
+ $summary = '';
+ }
+
my $new_sha1 = $diff_info_split[3];
my $old_sha1 = $diff_info_split[2];
my $page_created = ($old_sha1 eq NULL_SHA1);
@@ -1061,6 +1062,11 @@ sub mw_push_file {
$extension = "";
}
if ($extension eq "mw") {
+ my $ns = get_mw_namespace_id_for_page($complete_file_name);
+ if ($ns && $ns == get_mw_namespace_id("File") && (!$export_media)) {
+ print STDERR "Ignoring media file related page: $complete_file_name\n";
+ return ($oldrevid, "ok");
+ }
my $file_content;
if ($page_deleted) {
# Deleting a page usually requires
@@ -1100,10 +1106,12 @@ sub mw_push_file {
}
$newrevid = $result->{edit}->{newrevid};
print STDERR "Pushed file: $new_sha1 - $title\n";
- } else {
+ } elsif ($export_media) {
$newrevid = mw_upload_file($complete_file_name, $new_sha1,
$extension, $page_deleted,
$summary);
+ } else {
+ print STDERR "Ignoring media file $title\n";
}
$newrevid = ($newrevid or $oldrevid);
return ($newrevid, "ok");
@@ -1177,16 +1185,26 @@ sub mw_push_revision {
if ($last_local_revid > 0) {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
+ print STDERR "Computing path from local to remote ...\n";
+ my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents $local ^$parsed_sha1"));
+ my %local_ancestry;
+ foreach my $line (@local_ancestry) {
+ if (my ($child, $parents) = $line =~ m/^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+ foreach my $parent (split(' ', $parents)) {
+ $local_ancestry{$parent} = $child;
+ }
+ } elsif (!$line =~ m/^([a-f0-9]+)/) {
+ die "Unexpected output from git rev-list: $line";
+ }
+ }
while ($parsed_sha1 ne $HEAD_sha1) {
- my @commit_info = grep(/^$parsed_sha1/, split(/\n/, run_git("rev-list --children $local")));
- if (!@commit_info) {
+ my $child = $local_ancestry{$parsed_sha1};
+ if (!$child) {
+ printf STDERR "Cannot find a path in history from remote commit to last commit\n";
return error_non_fast_forward($remote);
}
- my @commit_info_split = split(/ |\n/, $commit_info[0]);
- # $commit_info_split[1] is the sha1 of the commit to export
- # $commit_info_split[0] is the sha1 of its direct child
- push(@commit_pairs, \@commit_info_split);
- $parsed_sha1 = $commit_info_split[1];
+ push(@commit_pairs, [$parsed_sha1, $child]);
+ $parsed_sha1 = $child;
}
} else {
# No remote mediawiki revision. Export the whole
@@ -1234,7 +1252,7 @@ sub mw_push_revision {
}
}
unless ($dumb_push) {
- run_git("notes --ref=$remotename/mediawiki add -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
+ run_git("notes --ref=$remotename/mediawiki add -f -m \"mediawiki_revision: $mw_revision\" $sha1_commit");
run_git("update-ref -m \"Git-MediaWiki push\" refs/mediawiki/$remotename/master $sha1_commit $sha1_child");
}
}
@@ -1275,12 +1293,16 @@ sub get_mw_namespace_id {
# Look at configuration file, if the record for that namespace is
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
- my @temp = split(/[ \n]/, run_git("config --get-all remote."
+ my @temp = split(/[\n]/, run_git("config --get-all remote."
. $remotename .".namespaceCache"));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
- $namespace_id{$n} = $id;
+ if ($id eq 'notANameSpace') {
+ $namespace_id{$n} = {is_namespace => 0};
+ } else {
+ $namespace_id{$n} = {is_namespace => 1, id => $id};
+ }
$cached_mw_namespace_id{$n} = 1;
}
}
@@ -1298,26 +1320,44 @@ sub get_mw_namespace_id {
while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
if (defined($ns->{id}) && defined($ns->{canonical})) {
- $namespace_id{$ns->{canonical}} = $ns->{id};
+ $namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
if ($ns->{'*'}) {
# alias (e.g. french Fichier: as alias for canonical File:)
- $namespace_id{$ns->{'*'}} = $ns->{id};
+ $namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
}
}
}
}
- my $id = $namespace_id{$name};
+ my $ns = $namespace_id{$name};
+ my $id;
- if (defined $id) {
- # Store explicitely requested namespaces on disk
- if (!exists $cached_mw_namespace_id{$name}) {
- run_git("config --add remote.". $remotename
- .".namespaceCache \"". $name .":". $id ."\"");
- $cached_mw_namespace_id{$name} = 1;
- }
- return $id;
+ unless (defined $ns) {
+ print STDERR "No such namespace $name on MediaWiki.\n";
+ $ns = {is_namespace => 0};
+ $namespace_id{$name} = $ns;
+ }
+
+ if ($ns->{is_namespace}) {
+ $id = $ns->{id};
+ }
+
+ # Store "notANameSpace" as special value for inexisting namespaces
+ my $store_id = ($id || 'notANameSpace');
+
+ # Store explicitely requested namespaces on disk
+ if (!exists $cached_mw_namespace_id{$name}) {
+ run_git("config --add remote.". $remotename
+ .".namespaceCache \"". $name .":". $store_id ."\"");
+ $cached_mw_namespace_id{$name} = 1;
+ }
+ return $id;
+}
+
+sub get_mw_namespace_id_for_page {
+ if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+ return get_mw_namespace_id($namespace);
} else {
- die "No such namespace $name on MediaWiki.";
+ return;
}
}
diff --git a/contrib/mw-to-git/t/push-pull-tests.sh b/contrib/mw-to-git/t/push-pull-tests.sh
index 6692a0f40..9da2dc5ff 100644
--- a/contrib/mw-to-git/t/push-pull-tests.sh
+++ b/contrib/mw-to-git/t/push-pull-tests.sh
@@ -104,7 +104,7 @@ test_push_pull () {
git push
) &&
- test ! wiki_page_exist Foo
+ test_must_fail wiki_page_exist Foo
'
test_expect_success 'Merge conflict expected and solving it' '
diff --git a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
index 863587845..246d47d8f 100755
--- a/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
+++ b/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
@@ -169,6 +169,26 @@ test_expect_failure 'special character at the begining of file name from mw to g
test_path_is_file mw_dir_11/[char_2
'
+test_expect_success 'Pull page with title containing ":" other than namespace separator' '
+ wiki_editpage Foo:Bar content false &&
+ (
+ cd mw_dir_11 &&
+ git pull
+ ) &&
+ test_path_is_file mw_dir_11/Foo:Bar.mw
+'
+
+test_expect_success 'Push page with title containing ":" other than namespace separator' '
+ (
+ cd mw_dir_11 &&
+ echo content >NotANameSpace:Page.mw &&
+ git add NotANameSpace:Page.mw &&
+ git commit -m "add page with colon" &&
+ git push
+ ) &&
+ wiki_page_exist NotANameSpace:Page
+'
+
test_expect_success 'test of correct formating for file name from mw to git' '
wiki_reset &&
git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
diff --git a/credential.c b/credential.c
index 2c400073f..e54753c75 100644
--- a/credential.c
+++ b/credential.c
@@ -172,6 +172,8 @@ int credential_read(struct credential *c, FILE *fp)
} else if (!strcmp(key, "path")) {
free(c->path);
c->path = xstrdup(value);
+ } else if (!strcmp(key, "url")) {
+ credential_from_url(c, value);
}
/*
* Ignore other lines; we don't know what they mean, but
diff --git a/date.c b/date.c
index 1fdcf7c6e..57331ed40 100644
--- a/date.c
+++ b/date.c
@@ -624,7 +624,7 @@ static int match_object_header_date(const char *date, unsigned long *timestamp,
unsigned long stamp;
int ofs;
- if (*date < '0' || '9' <= *date)
+ if (*date < '0' || '9' < *date)
return -1;
stamp = strtoul(date, &end, 10);
if (*end != ' ' || stamp == ULONG_MAX || (end[1] != '+' && end[1] != '-'))
diff --git a/diff.c b/diff.c
index 208096fe4..62cbe141e 100644
--- a/diff.c
+++ b/diff.c
@@ -2992,9 +2992,8 @@ static void run_diff_cmd(const char *pgm,
int complete_rewrite = (p->status == DIFF_STATUS_MODIFIED) && p->score;
int must_show_header = 0;
- if (!DIFF_OPT_TST(o, ALLOW_EXTERNAL))
- pgm = NULL;
- else {
+
+ if (DIFF_OPT_TST(o, ALLOW_EXTERNAL)) {
struct userdiff_driver *drv = userdiff_find_by_path(attr_path);
if (drv && drv->external)
pgm = drv->external;
@@ -3074,6 +3073,9 @@ static void run_diff(struct diff_filepair *p, struct diff_options *o)
if (o->prefix_length)
strip_prefix(o->prefix_length, &name, &other);
+ if (!DIFF_OPT_TST(o, ALLOW_EXTERNAL))
+ pgm = NULL;
+
if (DIFF_PAIR_UNMERGED(p)) {
run_diff_cmd(pgm, name, NULL, attr_path,
NULL, NULL, NULL, o, p);
diff --git a/dir.c b/dir.c
index a772c6dc6..240bf0c49 100644
--- a/dir.c
+++ b/dir.c
@@ -1313,7 +1313,7 @@ void setup_standard_excludes(struct dir_struct *dir)
}
if (!access(path, R_OK))
add_excludes_from_file(dir, path);
- if (!access(excludes_file, R_OK))
+ if (excludes_file && !access(excludes_file, R_OK))
add_excludes_from_file(dir, excludes_file);
}
diff --git a/git-am.sh b/git-am.sh
index a1f3f3055..bd9620c6c 100755
--- a/git-am.sh
+++ b/git-am.sh
@@ -853,6 +853,11 @@ did you forget to use 'git add'?"
if test $apply_status != 0
then
eval_gettextln 'Patch failed at $msgnum $FIRSTLINE'
+ if test "$(git config --bool advice.amworkdir)" != false
+ then
+ eval_gettextln "The copy of the patch that failed is found in:
+ $dotest/patch"
+ fi
stop_here_user_resolve $this
fi
diff --git a/git-difftool.perl b/git-difftool.perl
index ae1e0525d..c0798540a 100755
--- a/git-difftool.perl
+++ b/git-difftool.perl
@@ -15,6 +15,7 @@ use strict;
use warnings;
use File::Basename qw(dirname);
use File::Copy;
+use File::Compare;
use File::Find;
use File::stat;
use File::Path qw(mkpath);
@@ -336,8 +337,10 @@ if (defined($dirdiff)) {
# files were modified during the diff, then the changes
# should be copied back to the working tree
for my $file (@working_tree) {
- copy("$b/$file", "$workdir/$file") or die $!;
- chmod(stat("$b/$file")->mode, "$workdir/$file") or die $!;
+ if (-e "$b/$file" && compare("$b/$file", "$workdir/$file")) {
+ copy("$b/$file", "$workdir/$file") or die $!;
+ chmod(stat("$b/$file")->mode, "$workdir/$file") or die $!;
+ }
}
} else {
if (defined($prompt)) {
diff --git a/git-filter-branch.sh b/git-filter-branch.sh
index add2c0247..178e45305 100755
--- a/git-filter-branch.sh
+++ b/git-filter-branch.sh
@@ -84,7 +84,7 @@ set_ident () {
s/.*/GIT_'$uid'_EMAIL='\''&'\''; export GIT_'$uid'_EMAIL/p
g
- s/^'$lid' [^<]* <[^>]*> \(.*\)$/\1/
+ s/^'$lid' [^<]* <[^>]*> \(.*\)$/@\1/
s/'\''/'\''\'\'\''/g
s/.*/GIT_'$uid'_DATE='\''&'\''; export GIT_'$uid'_DATE/p
diff --git a/git-mergetool--lib.sh b/git-mergetool--lib.sh
index ed630b208..f730253c0 100644
--- a/git-mergetool--lib.sh
+++ b/git-mergetool--lib.sh
@@ -111,7 +111,7 @@ run_merge_tool () {
return $status
}
-guess_merge_tool () {
+list_merge_tool_candidates () {
if merge_mode
then
tools="tortoisemerge"
@@ -136,6 +136,10 @@ guess_merge_tool () {
tools="$tools emerge vimdiff"
;;
esac
+}
+
+guess_merge_tool () {
+ list_merge_tool_candidates
echo >&2 "merge tool candidates: $tools"
# Loop over each candidate and stop when a valid merge tool is found.
diff --git a/git-mergetool.sh b/git-mergetool.sh
index a9f23f7fc..0db0c4484 100755
--- a/git-mergetool.sh
+++ b/git-mergetool.sh
@@ -8,7 +8,7 @@
# at the discretion of Junio C Hamano.
#
-USAGE='[--tool=tool] [-y|--no-prompt|--prompt] [file to merge] ...'
+USAGE='[--tool=tool] [--tool-help] [-y|--no-prompt|--prompt] [file to merge] ...'
SUBDIRECTORY_OK=Yes
OPTIONS_SPEC=
TOOL_MODE=merge
@@ -284,11 +284,51 @@ merge_file () {
return 0
}
+show_tool_help () {
+ TOOL_MODE=merge
+ list_merge_tool_candidates
+ unavailable= available= LF='
+'
+ for i in $tools
+ do
+ merge_tool_path=$(translate_merge_tool_path "$i")
+ if type "$merge_tool_path" >/dev/null 2>&1
+ then
+ available="$available$i$LF"
+ else
+ unavailable="$unavailable$i$LF"
+ fi
+ done
+ if test -n "$available"
+ then
+ echo "'git mergetool --tool=<tool>' may be set to one of the following:"
+ echo "$available" | sort | sed -e 's/^/ /'
+ else
+ echo "No suitable tool for 'git mergetool --tool=<tool>' found."
+ fi
+ if test -n "$unavailable"
+ then
+ echo
+ echo 'The following tools are valid, but not currently available:'
+ echo "$unavailable" | sort | sed -e 's/^/ /'
+ fi
+ if test -n "$unavailable$available"
+ then
+ echo
+ echo "Some of the tools listed above only work in a windowed"
+ echo "environment. If run in a terminal-only session, they will fail."
+ fi
+ exit 0
+}
+
prompt=$(git config --bool mergetool.prompt || echo true)
while test $# != 0
do
case "$1" in
+ --tool-help)
+ show_tool_help
+ ;;
-t|--tool*)
case "$#,$1" in
*,*=*)
diff --git a/git-rebase--interactive.sh b/git-rebase--interactive.sh
index bef7bc044..0d2056f02 100644
--- a/git-rebase--interactive.sh
+++ b/git-rebase--interactive.sh
@@ -493,25 +493,28 @@ do_next () {
author_script_content=$(get_author_ident_from_commit HEAD)
echo "$author_script_content" > "$author_script"
eval "$author_script_content"
- output git reset --soft HEAD^
- pick_one -n $sha1 || die_failed_squash $sha1 "$rest"
+ if ! pick_one -n $sha1
+ then
+ git rev-parse --verify HEAD >"$amend"
+ die_failed_squash $sha1 "$rest"
+ fi
case "$(peek_next_command)" in
squash|s|fixup|f)
# This is an intermediate commit; its message will only be
# used in case of trouble. So use the long version:
- do_with_author output git commit --no-verify -F "$squash_msg" ||
+ do_with_author output git commit --amend --no-verify -F "$squash_msg" ||
die_failed_squash $sha1 "$rest"
;;
*)
# This is the final command of this squash/fixup group
if test -f "$fixup_msg"
then
- do_with_author git commit --no-verify -F "$fixup_msg" ||
+ do_with_author git commit --amend --no-verify -F "$fixup_msg" ||
die_failed_squash $sha1 "$rest"
else
cp "$squash_msg" "$GIT_DIR"/SQUASH_MSG || exit
rm -f "$GIT_DIR"/MERGE_MSG
- do_with_author git commit --no-verify -e ||
+ do_with_author git commit --amend --no-verify -F "$GIT_DIR"/SQUASH_MSG -e ||
die_failed_squash $sha1 "$rest"
fi
rm -f "$squash_msg" "$fixup_msg"
@@ -748,7 +751,6 @@ In both case, once you're done, continue with:
fi
. "$author_script" ||
die "Error trying to find the author identity to amend commit"
- current_head=
if test -f "$amend"
then
current_head=$(git rev-parse --verify HEAD)
@@ -756,13 +758,12 @@ In both case, once you're done, continue with:
die "\
You have uncommitted changes in your working tree. Please, commit them
first and then run 'git rebase --continue' again."
- git reset --soft HEAD^ ||
- die "Cannot rewind the HEAD"
+ do_with_author git commit --amend --no-verify -F "$msg" -e ||
+ die "Could not commit staged changes."
+ else
+ do_with_author git commit --no-verify -F "$msg" -e ||
+ die "Could not commit staged changes."
fi
- do_with_author git commit --no-verify -F "$msg" -e || {
- test -n "$current_head" && git reset --soft $current_head
- die "Could not commit staged changes."
- }
fi
record_in_rewritten "$(cat "$state_dir"/stopped-sha)"
diff --git a/git-submodule.sh b/git-submodule.sh
index 57f98945c..aac575e74 100755
--- a/git-submodule.sh
+++ b/git-submodule.sh
@@ -181,13 +181,18 @@ module_clone()
rm -f "$gitdir/index"
else
mkdir -p "$gitdir_base"
- git clone $quiet -n ${reference:+"$reference"} \
- --separate-git-dir "$gitdir" "$url" "$sm_path" ||
+ (
+ clear_local_git_env
+ git clone $quiet -n ${reference:+"$reference"} \
+ --separate-git-dir "$gitdir" "$url" "$sm_path"
+ ) ||
die "$(eval_gettext "Clone of '\$url' into submodule path '\$sm_path' failed")"
fi
- a=$(cd "$gitdir" && pwd)/
- b=$(cd "$sm_path" && pwd)/
+ # We already are at the root of the work tree but cd_to_toplevel will
+ # resolve any symlinks that might be present in $PWD
+ a=$(cd_to_toplevel && cd "$gitdir" && pwd)/
+ b=$(cd_to_toplevel && cd "$sm_path" && pwd)/
# normalize Windows-style absolute paths to POSIX-style absolute paths
case $a in [a-zA-Z]:/*) a=/${a%%:*}${a#*:} ;; esac
case $b in [a-zA-Z]:/*) b=/${b%%:*}${b#*:} ;; esac
diff --git a/git-svn.perl b/git-svn.perl
index 0b074c4c6..5711c5719 100755
--- a/git-svn.perl
+++ b/git-svn.perl
@@ -10,6 +10,43 @@ use vars qw/ $AUTHOR $VERSION
$AUTHOR = 'Eric Wong <normalperson@yhbt.net>';
$VERSION = '@@GIT_VERSION@@';
+use Carp qw/croak/;
+use Digest::MD5;
+use IO::File qw//;
+use File::Basename qw/dirname basename/;
+use File::Path qw/mkpath/;
+use File::Spec;
+use File::Find;
+use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
+use IPC::Open3;
+use Memoize;
+
+use Git::SVN;
+use Git::SVN::Editor;
+use Git::SVN::Fetcher;
+use Git::SVN::Ra;
+use Git::SVN::Prompt;
+use Git::SVN::Log;
+use Git::SVN::Migration;
+
+use Git::SVN::Utils qw(fatal can_compress);
+use Git qw(
+ git_cmd_try
+ command
+ command_oneline
+ command_noisy
+ command_output_pipe
+ command_close_pipe
+ command_bidi_pipe
+ command_close_bidi_pipe
+);
+
+BEGIN {
+ Memoize::memoize 'Git::config';
+ Memoize::memoize 'Git::config_bool';
+}
+
+
# From which subdir have we been invoked?
my $cmd_dir_prefix = eval {
command_oneline([qw/rev-parse --show-prefix/], STDERR => 0)
@@ -17,10 +54,7 @@ my $cmd_dir_prefix = eval {
my $git_dir_user_set = 1 if defined $ENV{GIT_DIR};
$ENV{GIT_DIR} ||= '.git';
-$Git::SVN::default_repo_id = 'svn';
-$Git::SVN::default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn';
$Git::SVN::Ra::_log_window_size = 100;
-$Git::SVN::_minimize_url = 'unset';
if (! exists $ENV{SVN_SSH} && exists $ENV{GIT_SSH}) {
$ENV{SVN_SSH} = $ENV{GIT_SSH};
@@ -35,8 +69,6 @@ $Git::SVN::Log::TZ = $ENV{TZ};
$ENV{TZ} = 'UTC';
$| = 1; # unbuffer STDOUT
-sub fatal (@) { print STDERR "@_\n"; exit 1 }
-
# All SVN commands do it. Otherwise we may die on SIGPIPE when the remote
# repository decides to close the connection which we expect to be kept alive.
$SIG{PIPE} = 'IGNORE';
@@ -66,39 +98,6 @@ sub _req_svn {
fatal "Need SVN::Core 1.1.0 or better (got $SVN::Core::VERSION)";
}
}
-my $can_compress = eval { require Compress::Zlib; 1};
-use Carp qw/croak/;
-use Digest::MD5;
-use IO::File qw//;
-use File::Basename qw/dirname basename/;
-use File::Path qw/mkpath/;
-use File::Spec;
-use File::Find;
-use Getopt::Long qw/:config gnu_getopt no_ignore_case auto_abbrev/;
-use IPC::Open3;
-use Git;
-use Git::SVN::Editor qw//;
-use Git::SVN::Fetcher qw//;
-use Git::SVN::Ra qw//;
-use Git::SVN::Prompt qw//;
-use Memoize; # core since 5.8.0, Jul 2002
-
-BEGIN {
- # import functions from Git into our packages, en masse
- no strict 'refs';
- foreach (qw/command command_oneline command_noisy command_output_pipe
- command_input_pipe command_close_pipe
- command_bidi_pipe command_close_bidi_pipe/) {
- for my $package ( qw(Git::SVN::Migration Git::SVN::Log Git::SVN),
- __PACKAGE__) {
- *{"${package}::$_"} = \&{"Git::$_"};
- }
- }
- Memoize::memoize 'Git::config';
- Memoize::memoize 'Git::config_bool';
-}
-
-my ($SVN);
$sha1 = qr/[a-f\d]{40}/;
$sha1_short = qr/[a-f\d]{4,40}/;
@@ -108,8 +107,11 @@ my ($_stdin, $_help, $_edit,
$_version, $_fetch_all, $_no_rebase, $_fetch_parent,
$_merge, $_strategy, $_preserve_merges, $_dry_run, $_local,
$_prefix, $_no_checkout, $_url, $_verbose,
- $_git_format, $_commit_url, $_tag, $_merge_info, $_interactive);
-$Git::SVN::_follow_parent = 1;
+ $_commit_url, $_tag, $_merge_info, $_interactive);
+
+# This is a refactoring artifact so Git::SVN can get at this git-svn switch.
+sub opt_prefix { return $_prefix || '' }
+
$Git::SVN::Fetcher::_placeholder_filename = ".gitignore";
$_q ||= 0;
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
@@ -269,7 +271,7 @@ my %cmd = (
{ 'url' => \$_url, } ],
'blame' => [ \&Git::SVN::Log::cmd_blame,
"Show what revision and author last modified each line of a file",
- { 'git-format' => \$_git_format } ],
+ { 'git-format' => \$Git::SVN::Log::_git_format } ],
'reset' => [ \&cmd_reset,
"Undo fetches back to the specified SVN revision",
{ 'revision|r=s' => \$_revision,
@@ -367,9 +369,9 @@ Git::SVN::init_vars();
eval {
Git::SVN::verify_remotes_sanity();
$cmd{$cmd}->[0]->(@ARGV);
+ post_fetch_checkout();
};
fatal $@ if $@;
-post_fetch_checkout();
exit 0;
####################### primary functions ######################
@@ -1578,7 +1580,7 @@ sub cmd_reset {
}
sub cmd_gc {
- if (!$can_compress) {
+ if (!can_compress()) {
warn "Compress::Zlib could not be found; unhandled.log " .
"files will not be compressed.\n";
}
@@ -1598,8 +1600,8 @@ sub rebase_cmd {
sub post_fetch_checkout {
return if $_no_checkout;
+ return if verify_ref('HEAD^0');
my $gs = $Git::SVN::_head or return;
- return if verify_ref('refs/heads/master^0');
# look for "trunk" ref if it exists
my $remote = Git::SVN::read_all_remotes()->{$gs->{repo_id}};
@@ -1612,9 +1614,8 @@ sub post_fetch_checkout {
}
}
- my $valid_head = verify_ref('HEAD^0');
- command_noisy(qw(update-ref refs/heads/master), $gs->refname);
- return if ($valid_head || !verify_ref('HEAD^0'));
+ command_noisy(qw(update-ref HEAD), $gs->refname);
+ return unless verify_ref('HEAD^0');
return if $ENV{GIT_DIR} !~ m#^(?:.*/)?\.git$#;
my $index = $ENV{GIT_INDEX_FILE} || "$ENV{GIT_DIR}/index";
@@ -2014,13 +2015,13 @@ sub md5sum {
} elsif (!$ref) {
$md5->add($arg) or croak $!;
} else {
- ::fatal "Can't provide MD5 hash for unknown ref type: '", $ref, "'";
+ fatal "Can't provide MD5 hash for unknown ref type: '", $ref, "'";
}
return $md5->hexdigest();
}
sub gc_directory {
- if ($can_compress && -f $_ && basename($_) eq "unhandled.log") {
+ if (can_compress() && -f $_ && basename($_) eq "unhandled.log") {
my $out_filename = $_ . ".gz";
open my $in_fh, "<", $_ or die "Unable to open $_: $!\n";
binmode $in_fh;
@@ -2038,3035 +2039,6 @@ sub gc_directory {
}
}
-package Git::SVN;
-use strict;
-use warnings;
-use Fcntl qw/:DEFAULT :seek/;
-use constant rev_map_fmt => 'NH40';
-use vars qw/$default_repo_id $default_ref_id $_no_metadata $_follow_parent
- $_repack $_repack_flags $_use_svm_props $_head
- $_use_svnsync_props $no_reuse_existing $_minimize_url
- $_use_log_author $_add_author_from $_localtime/;
-use Carp qw/croak/;
-use File::Path qw/mkpath/;
-use File::Copy qw/copy/;
-use IPC::Open3;
-use Time::Local;
-use Memoize; # core since 5.8.0, Jul 2002
-use Memoize::Storable;
-use POSIX qw(:signal_h);
-my $can_use_yaml;
-BEGIN {
- $can_use_yaml = eval { require Git::SVN::Memoize::YAML; 1};
-}
-
-my ($_gc_nr, $_gc_period);
-
-# properties that we do not log:
-my %SKIP_PROP;
-BEGIN {
- %SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url
- svn:special svn:executable
- svn:entry:committed-rev
- svn:entry:last-author
- svn:entry:uuid
- svn:entry:committed-date/;
-
- # some options are read globally, but can be overridden locally
- # per [svn-remote "..."] section. Command-line options will *NOT*
- # override options set in an [svn-remote "..."] section
- no strict 'refs';
- for my $option (qw/follow_parent no_metadata use_svm_props
- use_svnsync_props/) {
- my $key = $option;
- $key =~ tr/_//d;
- my $prop = "-$option";
- *$option = sub {
- my ($self) = @_;
- return $self->{$prop} if exists $self->{$prop};
- my $k = "svn-remote.$self->{repo_id}.$key";
- eval { command_oneline(qw/config --get/, $k) };
- if ($@) {
- $self->{$prop} = ${"Git::SVN::_$option"};
- } else {
- my $v = command_oneline(qw/config --bool/,$k);
- $self->{$prop} = $v eq 'false' ? 0 : 1;
- }
- return $self->{$prop};
- }
- }
-}
-
-
-my (%LOCKFILES, %INDEX_FILES);
-END {
- unlink keys %LOCKFILES if %LOCKFILES;
- unlink keys %INDEX_FILES if %INDEX_FILES;
-}
-
-sub resolve_local_globs {
- my ($url, $fetch, $glob_spec) = @_;
- return unless defined $glob_spec;
- my $ref = $glob_spec->{ref};
- my $path = $glob_spec->{path};
- foreach (command(qw#for-each-ref --format=%(refname) refs/#)) {
- next unless m#^$ref->{regex}$#;
- my $p = $1;
- my $pathname = desanitize_refname($path->full_path($p));
- my $refname = desanitize_refname($ref->full_path($p));
- if (my $existing = $fetch->{$pathname}) {
- if ($existing ne $refname) {
- die "Refspec conflict:\n",
- "existing: $existing\n",
- " globbed: $refname\n";
- }
- my $u = (::cmt_metadata("$refname"))[0];
- $u =~ s!^\Q$url\E(/|$)!! or die
- "$refname: '$url' not found in '$u'\n";
- if ($pathname ne $u) {
- warn "W: Refspec glob conflict ",
- "(ref: $refname):\n",
- "expected path: $pathname\n",
- " real path: $u\n",
- "Continuing ahead with $u\n";
- next;
- }
- } else {
- $fetch->{$pathname} = $refname;
- }
- }
-}
-
-sub parse_revision_argument {
- my ($base, $head) = @_;
- if (!defined $::_revision || $::_revision eq 'BASE:HEAD') {
- return ($base, $head);
- }
- return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/);
- return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/);
- return ($head, $head) if ($::_revision eq 'HEAD');
- return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/);
- return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/);
- die "revision argument: $::_revision not understood by git-svn\n";
-}
-
-sub fetch_all {
- my ($repo_id, $remotes) = @_;
- if (ref $repo_id) {
- my $gs = $repo_id;
- $repo_id = undef;
- $repo_id = $gs->{repo_id};
- }
- $remotes ||= read_all_remotes();
- my $remote = $remotes->{$repo_id} or
- die "[svn-remote \"$repo_id\"] unknown\n";
- my $fetch = $remote->{fetch};
- my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n";
- my (@gs, @globs);
- my $ra = Git::SVN::Ra->new($url);
- my $uuid = $ra->get_uuid;
- my $head = $ra->get_latest_revnum;
-
- # ignore errors, $head revision may not even exist anymore
- eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) };
- warn "W: $@\n" if $@;
-
- my $base = defined $fetch ? $head : 0;
-
- # read the max revs for wildcard expansion (branches/*, tags/*)
- foreach my $t (qw/branches tags/) {
- defined $remote->{$t} or next;
- push @globs, @{$remote->{$t}};
-
- my $max_rev = eval { tmp_config(qw/--int --get/,
- "svn-remote.$repo_id.${t}-maxRev") };
- if (defined $max_rev && ($max_rev < $base)) {
- $base = $max_rev;
- } elsif (!defined $max_rev) {
- $base = 0;
- }
- }
-
- if ($fetch) {
- foreach my $p (sort keys %$fetch) {
- my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p);
- my $lr = $gs->rev_map_max;
- if (defined $lr) {
- $base = $lr if ($lr < $base);
- }
- push @gs, $gs;
- }
- }
-
- ($base, $head) = parse_revision_argument($base, $head);
- $ra->gs_fetch_loop_common($base, $head, \@gs, \@globs);
-}
-
-sub read_all_remotes {
- my $r = {};
- my $use_svm_props = eval { command_oneline(qw/config --bool
- svn.useSvmProps/) };
- $use_svm_props = $use_svm_props eq 'true' if $use_svm_props;
- my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*};
- foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) {
- if (m!^(.+)\.fetch=$svn_refspec$!) {
- my ($remote, $local_ref, $remote_ref) = ($1, $2, $3);
- die("svn-remote.$remote: remote ref '$remote_ref' "
- . "must start with 'refs/'\n")
- unless $remote_ref =~ m{^refs/};
- $local_ref = uri_decode($local_ref);
- $r->{$remote}->{fetch}->{$local_ref} = $remote_ref;
- $r->{$remote}->{svm} = {} if $use_svm_props;
- } elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) {
- $r->{$1}->{svm} = {};
- } elsif (m!^(.+)\.url=\s*(.*)\s*$!) {
- $r->{$1}->{url} = $2;
- } elsif (m!^(.+)\.pushurl=\s*(.*)\s*$!) {
- $r->{$1}->{pushurl} = $2;
- } elsif (m!^(.+)\.ignore-refs=\s*(.*)\s*$!) {
- $r->{$1}->{ignore_refs_regex} = $2;
- } elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
- my ($remote, $t, $local_ref, $remote_ref) =
- ($1, $2, $3, $4);
- die("svn-remote.$remote: remote ref '$remote_ref' ($t) "
- . "must start with 'refs/'\n")
- unless $remote_ref =~ m{^refs/};
- $local_ref = uri_decode($local_ref);
- my $rs = {
- t => $t,
- remote => $remote,
- path => Git::SVN::GlobSpec->new($local_ref, 1),
- ref => Git::SVN::GlobSpec->new($remote_ref, 0) };
- if (length($rs->{ref}->{right}) != 0) {
- die "The '*' glob character must be the last ",
- "character of '$remote_ref'\n";
- }
- push @{ $r->{$remote}->{$t} }, $rs;
- }
- }
-
- map {
- if (defined $r->{$_}->{svm}) {
- my $svm;
- eval {
- my $section = "svn-remote.$_";
- $svm = {
- source => tmp_config('--get',
- "$section.svm-source"),
- replace => tmp_config('--get',
- "$section.svm-replace"),
- }
- };
- $r->{$_}->{svm} = $svm;
- }
- } keys %$r;
-
- foreach my $remote (keys %$r) {
- foreach ( grep { defined $_ }
- map { $r->{$remote}->{$_} } qw(branches tags) ) {
- foreach my $rs ( @$_ ) {
- $rs->{ignore_refs_regex} =
- $r->{$remote}->{ignore_refs_regex};
- }
- }
- }
-
- $r;
-}
-
-sub init_vars {
- $_gc_nr = $_gc_period = 1000;
- if (defined $_repack || defined $_repack_flags) {
- warn "Repack options are obsolete; they have no effect.\n";
- }
-}
-
-sub verify_remotes_sanity {
- return unless -d $ENV{GIT_DIR};
- my %seen;
- foreach (command(qw/config -l/)) {
- if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) {
- if ($seen{$1}) {
- die "Remote ref refs/remote/$1 is tracked by",
- "\n \"$_\"\nand\n \"$seen{$1}\"\n",
- "Please resolve this ambiguity in ",
- "your git configuration file before ",
- "continuing\n";
- }
- $seen{$1} = $_;
- }
- }
-}
-
-sub find_existing_remote {
- my ($url, $remotes) = @_;
- return undef if $no_reuse_existing;
- my $existing;
- foreach my $repo_id (keys %$remotes) {
- my $u = $remotes->{$repo_id}->{url} or next;
- next if $u ne $url;
- $existing = $repo_id;
- last;
- }
- $existing;
-}
-
-sub init_remote_config {
- my ($self, $url, $no_write) = @_;
- $url =~ s!/+$!!; # strip trailing slash
- my $r = read_all_remotes();
- my $existing = find_existing_remote($url, $r);
- if ($existing) {
- unless ($no_write) {
- print STDERR "Using existing ",
- "[svn-remote \"$existing\"]\n";
- }
- $self->{repo_id} = $existing;
- } elsif ($_minimize_url) {
- my $min_url = Git::SVN::Ra->new($url)->minimize_url;
- $existing = find_existing_remote($min_url, $r);
- if ($existing) {
- unless ($no_write) {
- print STDERR "Using existing ",
- "[svn-remote \"$existing\"]\n";
- }
- $self->{repo_id} = $existing;
- }
- if ($min_url ne $url) {
- unless ($no_write) {
- print STDERR "Using higher level of URL: ",
- "$url => $min_url\n";
- }
- my $old_path = $self->{path};
- $self->{path} = $url;
- $self->{path} =~ s!^\Q$min_url\E(/|$)!!;
- if (length $old_path) {
- $self->{path} .= "/$old_path";
- }
- $url = $min_url;
- }
- }
- my $orig_url;
- if (!$existing) {
- # verify that we aren't overwriting anything:
- $orig_url = eval {
- command_oneline('config', '--get',
- "svn-remote.$self->{repo_id}.url")
- };
- if ($orig_url && ($orig_url ne $url)) {
- die "svn-remote.$self->{repo_id}.url already set: ",
- "$orig_url\nwanted to set to: $url\n";
- }
- }
- my ($xrepo_id, $xpath) = find_ref($self->refname);
- if (!$no_write && defined $xpath) {
- die "svn-remote.$xrepo_id.fetch already set to track ",
- "$xpath:", $self->refname, "\n";
- }
- unless ($no_write) {
- command_noisy('config',
- "svn-remote.$self->{repo_id}.url", $url);
- $self->{path} =~ s{^/}{};
- $self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
- command_noisy('config', '--add',
- "svn-remote.$self->{repo_id}.fetch",
- "$self->{path}:".$self->refname);
- }
- $self->{url} = $url;
-}
-
-sub find_by_url { # repos_root and, path are optional
- my ($class, $full_url, $repos_root, $path) = @_;
-
- return undef unless defined $full_url;
- remove_username($full_url);
- remove_username($repos_root) if defined $repos_root;
- my $remotes = read_all_remotes();
- if (defined $full_url && defined $repos_root && !defined $path) {
- $path = $full_url;
- $path =~ s#^\Q$repos_root\E(?:/|$)##;
- }
- foreach my $repo_id (keys %$remotes) {
- my $u = $remotes->{$repo_id}->{url} or next;
- remove_username($u);
- next if defined $repos_root && $repos_root ne $u;
-
- my $fetch = $remotes->{$repo_id}->{fetch} || {};
- foreach my $t (qw/branches tags/) {
- foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) {
- resolve_local_globs($u, $fetch, $globspec);
- }
- }
- my $p = $path;
- my $rwr = rewrite_root({repo_id => $repo_id});
- my $svm = $remotes->{$repo_id}->{svm}
- if defined $remotes->{$repo_id}->{svm};
- unless (defined $p) {
- $p = $full_url;
- my $z = $u;
- my $prefix = '';
- if ($rwr) {
- $z = $rwr;
- remove_username($z);
- } elsif (defined $svm) {
- $z = $svm->{source};
- $prefix = $svm->{replace};
- $prefix =~ s#^\Q$u\E(?:/|$)##;
- $prefix =~ s#/$##;
- }
- $p =~ s#^\Q$z\E(?:/|$)#$prefix# or next;
- }
- foreach my $f (keys %$fetch) {
- next if $f ne $p;
- return Git::SVN->new($fetch->{$f}, $repo_id, $f);
- }
- }
- undef;
-}
-
-sub init {
- my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_;
- my $self = _new($class, $repo_id, $ref_id, $path);
- if (defined $url) {
- $self->init_remote_config($url, $no_write);
- }
- $self;
-}
-
-sub find_ref {
- my ($ref_id) = @_;
- foreach (command(qw/config -l/)) {
- next unless m!^svn-remote\.(.+)\.fetch=
- \s*(.*?)\s*:\s*(.+?)\s*$!x;
- my ($repo_id, $path, $ref) = ($1, $2, $3);
- if ($ref eq $ref_id) {
- $path = '' if ($path =~ m#^\./?#);
- return ($repo_id, $path);
- }
- }
- (undef, undef, undef);
-}
-
-sub new {
- my ($class, $ref_id, $repo_id, $path) = @_;
- if (defined $ref_id && !defined $repo_id && !defined $path) {
- ($repo_id, $path) = find_ref($ref_id);
- if (!defined $repo_id) {
- die "Could not find a \"svn-remote.*.fetch\" key ",
- "in the repository configuration matching: ",
- "$ref_id\n";
- }
- }
- my $self = _new($class, $repo_id, $ref_id, $path);
- if (!defined $self->{path} || !length $self->{path}) {
- my $fetch = command_oneline('config', '--get',
- "svn-remote.$repo_id.fetch",
- ":$ref_id\$") or
- die "Failed to read \"svn-remote.$repo_id.fetch\" ",
- "\":$ref_id\$\" in config\n";
- ($self->{path}, undef) = split(/\s*:\s*/, $fetch);
- }
- $self->{path} =~ s{/+}{/}g;
- $self->{path} =~ s{\A/}{};
- $self->{path} =~ s{/\z}{};
- $self->{url} = command_oneline('config', '--get',
- "svn-remote.$repo_id.url") or
- die "Failed to read \"svn-remote.$repo_id.url\" in config\n";
- $self->{pushurl} = eval { command_oneline('config', '--get',
- "svn-remote.$repo_id.pushurl") };
- $self->rebuild;
- $self;
-}
-
-sub refname {
- my ($refname) = $_[0]->{ref_id} ;
-
- # It cannot end with a slash /, we'll throw up on this because
- # SVN can't have directories with a slash in their name, either:
- if ($refname =~ m{/$}) {
- die "ref: '$refname' ends with a trailing slash, this is ",
- "not permitted by git nor Subversion\n";
- }
-
- # It cannot have ASCII control character space, tilde ~, caret ^,
- # colon :, question-mark ?, asterisk *, space, or open bracket [
- # anywhere.
- #
- # Additionally, % must be escaped because it is used for escaping
- # and we want our escaped refname to be reversible
- $refname =~ s{([ \%~\^:\?\*\[\t])}{uc sprintf('%%%02x',ord($1))}eg;
-
- # no slash-separated component can begin with a dot .
- # /.* becomes /%2E*
- $refname =~ s{/\.}{/%2E}g;
-
- # It cannot have two consecutive dots .. anywhere
- # .. becomes %2E%2E
- $refname =~ s{\.\.}{%2E%2E}g;
-
- # trailing dots and .lock are not allowed
- # .$ becomes %2E and .lock becomes %2Elock
- $refname =~ s{\.(?=$|lock$)}{%2E};
-
- # the sequence @{ is used to access the reflog
- # @{ becomes %40{
- $refname =~ s{\@\{}{%40\{}g;
-
- return $refname;
-}
-
-sub desanitize_refname {
- my ($refname) = @_;
- $refname =~ s{%(?:([0-9A-F]{2}))}{chr hex($1)}eg;
- return $refname;
-}
-
-sub svm_uuid {
- my ($self) = @_;
- return $self->{svm}->{uuid} if $self->svm;
- $self->ra;
- unless ($self->{svm}) {
- die "SVM UUID not cached, and reading remotely failed\n";
- }
- $self->{svm}->{uuid};
-}
-
-sub svm {
- my ($self) = @_;
- return $self->{svm} if $self->{svm};
- my $svm;
- # see if we have it in our config, first:
- eval {
- my $section = "svn-remote.$self->{repo_id}";
- $svm = {
- source => tmp_config('--get', "$section.svm-source"),
- uuid => tmp_config('--get', "$section.svm-uuid"),
- replace => tmp_config('--get', "$section.svm-replace"),
- }
- };
- if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) {
- $self->{svm} = $svm;
- }
- $self->{svm};
-}
-
-sub _set_svm_vars {
- my ($self, $ra) = @_;
- return $ra if $self->svm;
-
- my @err = ( "useSvmProps set, but failed to read SVM properties\n",
- "(svm:source, svm:uuid) ",
- "from the following URLs:\n" );
- sub read_svm_props {
- my ($self, $ra, $path, $r) = @_;
- my $props = ($ra->get_dir($path, $r))[2];
- my $src = $props->{'svm:source'};
- my $uuid = $props->{'svm:uuid'};
- return undef if (!$src || !$uuid);
-
- chomp($src, $uuid);
-
- $uuid =~ m{^[0-9a-f\-]{30,}$}i
- or die "doesn't look right - svm:uuid is '$uuid'\n";
-
- # the '!' is used to mark the repos_root!/relative/path
- $src =~ s{/?!/?}{/};
- $src =~ s{/+$}{}; # no trailing slashes please
- # username is of no interest
- $src =~ s{(^[a-z\+]*://)[^/@]*@}{$1};
-
- my $replace = $ra->{url};
- $replace .= "/$path" if length $path;
-
- my $section = "svn-remote.$self->{repo_id}";
- tmp_config("$section.svm-source", $src);
- tmp_config("$section.svm-replace", $replace);
- tmp_config("$section.svm-uuid", $uuid);
- $self->{svm} = {
- source => $src,
- uuid => $uuid,
- replace => $replace
- };
- }
-
- my $r = $ra->get_latest_revnum;
- my $path = $self->{path};
- my %tried;
- while (length $path) {
- unless ($tried{"$self->{url}/$path"}) {
- return $ra if $self->read_svm_props($ra, $path, $r);
- $tried{"$self->{url}/$path"} = 1;
- }
- $path =~ s#/?[^/]+$##;
- }
- die "Path: '$path' should be ''\n" if $path ne '';
- return $ra if $self->read_svm_props($ra, $path, $r);
- $tried{"$self->{url}/$path"} = 1;
-
- if ($ra->{repos_root} eq $self->{url}) {
- die @err, (map { " $_\n" } keys %tried), "\n";
- }
-
- # nope, make sure we're connected to the repository root:
- my $ok;
- my @tried_b;
- $path = $ra->{svn_path};
- $ra = Git::SVN::Ra->new($ra->{repos_root});
- while (length $path) {
- unless ($tried{"$ra->{url}/$path"}) {
- $ok = $self->read_svm_props($ra, $path, $r);
- last if $ok;
- $tried{"$ra->{url}/$path"} = 1;
- }
- $path =~ s#/?[^/]+$##;
- }
- die "Path: '$path' should be ''\n" if $path ne '';
- $ok ||= $self->read_svm_props($ra, $path, $r);
- $tried{"$ra->{url}/$path"} = 1;
- if (!$ok) {
- die @err, (map { " $_\n" } keys %tried), "\n";
- }
- Git::SVN::Ra->new($self->{url});
-}
-
-sub svnsync {
- my ($self) = @_;
- return $self->{svnsync} if $self->{svnsync};
-
- if ($self->no_metadata) {
- die "Can't have both 'noMetadata' and ",
- "'useSvnsyncProps' options set!\n";
- }
- if ($self->rewrite_root) {
- die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ",
- "options set!\n";
- }
- if ($self->rewrite_uuid) {
- die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ",
- "options set!\n";
- }
-
- my $svnsync;
- # see if we have it in our config, first:
- eval {
- my $section = "svn-remote.$self->{repo_id}";
-
- my $url = tmp_config('--get', "$section.svnsync-url");
- ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
- die "doesn't look right - svn:sync-from-url is '$url'\n";
-
- my $uuid = tmp_config('--get', "$section.svnsync-uuid");
- ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
- die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
-
- $svnsync = { url => $url, uuid => $uuid }
- };
- if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) {
- return $self->{svnsync} = $svnsync;
- }
-
- my $err = "useSvnsyncProps set, but failed to read " .
- "svnsync property: svn:sync-from-";
- my $rp = $self->ra->rev_proplist(0);
-
- my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n";
- ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
- die "doesn't look right - svn:sync-from-url is '$url'\n";
-
- my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n";
- ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
- die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
-
- my $section = "svn-remote.$self->{repo_id}";
- tmp_config('--add', "$section.svnsync-uuid", $uuid);
- tmp_config('--add', "$section.svnsync-url", $url);
- return $self->{svnsync} = { url => $url, uuid => $uuid };
-}
-
-# this allows us to memoize our SVN::Ra UUID locally and avoid a
-# remote lookup (useful for 'git svn log').
-sub ra_uuid {
- my ($self) = @_;
- unless ($self->{ra_uuid}) {
- my $key = "svn-remote.$self->{repo_id}.uuid";
- my $uuid = eval { tmp_config('--get', $key) };
- if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) {
- $self->{ra_uuid} = $uuid;
- } else {
- die "ra_uuid called without URL\n" unless $self->{url};
- $self->{ra_uuid} = $self->ra->get_uuid;
- tmp_config('--add', $key, $self->{ra_uuid});
- }
- }
- $self->{ra_uuid};
-}
-
-sub _set_repos_root {
- my ($self, $repos_root) = @_;
- my $k = "svn-remote.$self->{repo_id}.reposRoot";
- $repos_root ||= $self->ra->{repos_root};
- tmp_config($k, $repos_root);
- $repos_root;
-}
-
-sub repos_root {
- my ($self) = @_;
- my $k = "svn-remote.$self->{repo_id}.reposRoot";
- eval { tmp_config('--get', $k) } || $self->_set_repos_root;
-}
-
-sub ra {
- my ($self) = shift;
- my $ra = Git::SVN::Ra->new($self->{url});
- $self->_set_repos_root($ra->{repos_root});
- if ($self->use_svm_props && !$self->{svm}) {
- if ($self->no_metadata) {
- die "Can't have both 'noMetadata' and ",
- "'useSvmProps' options set!\n";
- } elsif ($self->use_svnsync_props) {
- die "Can't have both 'useSvnsyncProps' and ",
- "'useSvmProps' options set!\n";
- }
- $ra = $self->_set_svm_vars($ra);
- $self->{-want_revprops} = 1;
- }
- $ra;
-}
-
-# prop_walk(PATH, REV, SUB)
-# -------------------------
-# Recursively traverse PATH at revision REV and invoke SUB for each
-# directory that contains a SVN property. SUB will be invoked as
-# follows: &SUB(gs, path, props); where `gs' is this instance of
-# Git::SVN, `path' the path to the directory where the properties
-# `props' were found. The `path' will be relative to point of checkout,
-# that is, if url://repo/trunk is the current Git branch, and that
-# directory contains a sub-directory `d', SUB will be invoked with `/d/'
-# as `path' (note the trailing `/').
-sub prop_walk {
- my ($self, $path, $rev, $sub) = @_;
-
- $path =~ s#^/##;
- my ($dirent, undef, $props) = $self->ra->get_dir($path, $rev);
- $path =~ s#^/*#/#g;
- my $p = $path;
- # Strip the irrelevant part of the path.
- $p =~ s#^/+\Q$self->{path}\E(/|$)#/#;
- # Ensure the path is terminated by a `/'.
- $p =~ s#/*$#/#;
-
- # The properties contain all the internal SVN stuff nobody
- # (usually) cares about.
- my $interesting_props = 0;
- foreach (keys %{$props}) {
- # If it doesn't start with `svn:', it must be a
- # user-defined property.
- ++$interesting_props and next if $_ !~ /^svn:/;
- # FIXME: Fragile, if SVN adds new public properties,
- # this needs to be updated.
- ++$interesting_props if /^svn:(?:ignore|keywords|executable
- |eol-style|mime-type
- |externals|needs-lock)$/x;
- }
- &$sub($self, $p, $props) if $interesting_props;
-
- foreach (sort keys %$dirent) {
- next if $dirent->{$_}->{kind} != $SVN::Node::dir;
- $self->prop_walk($self->{path} . $p . $_, $rev, $sub);
- }
-}
-
-sub last_rev { ($_[0]->last_rev_commit)[0] }
-sub last_commit { ($_[0]->last_rev_commit)[1] }
-
-# returns the newest SVN revision number and newest commit SHA1
-sub last_rev_commit {
- my ($self) = @_;
- if (defined $self->{last_rev} && defined $self->{last_commit}) {
- return ($self->{last_rev}, $self->{last_commit});
- }
- my $c = ::verify_ref($self->refname.'^0');
- if ($c && !$self->use_svm_props && !$self->no_metadata) {
- my $rev = (::cmt_metadata($c))[1];
- if (defined $rev) {
- ($self->{last_rev}, $self->{last_commit}) = ($rev, $c);
- return ($rev, $c);
- }
- }
- my $map_path = $self->map_path;
- unless (-e $map_path) {
- ($self->{last_rev}, $self->{last_commit}) = (undef, undef);
- return (undef, undef);
- }
- my ($rev, $commit) = $self->rev_map_max(1);
- ($self->{last_rev}, $self->{last_commit}) = ($rev, $commit);
- return ($rev, $commit);
-}
-
-sub get_fetch_range {
- my ($self, $min, $max) = @_;
- $max ||= $self->ra->get_latest_revnum;
- $min ||= $self->rev_map_max;
- (++$min, $max);
-}
-
-sub tmp_config {
- my (@args) = @_;
- my $old_def_config = "$ENV{GIT_DIR}/svn/config";
- my $config = "$ENV{GIT_DIR}/svn/.metadata";
- if (! -f $config && -f $old_def_config) {
- rename $old_def_config, $config or
- die "Failed rename $old_def_config => $config: $!\n";
- }
- my $old_config = $ENV{GIT_CONFIG};
- $ENV{GIT_CONFIG} = $config;
- $@ = undef;
- my @ret = eval {
- unless (-f $config) {
- mkfile($config);
- open my $fh, '>', $config or
- die "Can't open $config: $!\n";
- print $fh "; This file is used internally by ",
- "git-svn\n" or die
- "Couldn't write to $config: $!\n";
- print $fh "; You should not have to edit it\n" or
- die "Couldn't write to $config: $!\n";
- close $fh or die "Couldn't close $config: $!\n";
- }
- command('config', @args);
- };
- my $err = $@;
- if (defined $old_config) {
- $ENV{GIT_CONFIG} = $old_config;
- } else {
- delete $ENV{GIT_CONFIG};
- }
- die $err if $err;
- wantarray ? @ret : $ret[0];
-}
-
-sub tmp_index_do {
- my ($self, $sub) = @_;
- my $old_index = $ENV{GIT_INDEX_FILE};
- $ENV{GIT_INDEX_FILE} = $self->{index};
- $@ = undef;
- my @ret = eval {
- my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#);
- mkpath([$dir]) unless -d $dir;
- &$sub;
- };
- my $err = $@;
- if (defined $old_index) {
- $ENV{GIT_INDEX_FILE} = $old_index;
- } else {
- delete $ENV{GIT_INDEX_FILE};
- }
- die $err if $err;
- wantarray ? @ret : $ret[0];
-}
-
-sub assert_index_clean {
- my ($self, $treeish) = @_;
-
- $self->tmp_index_do(sub {
- command_noisy('read-tree', $treeish) unless -e $self->{index};
- my $x = command_oneline('write-tree');
- my ($y) = (command(qw/cat-file commit/, $treeish) =~
- /^tree ($::sha1)/mo);
- return if $y eq $x;
-
- warn "Index mismatch: $y != $x\nrereading $treeish\n";
- unlink $self->{index} or die "unlink $self->{index}: $!\n";
- command_noisy('read-tree', $treeish);
- $x = command_oneline('write-tree');
- if ($y ne $x) {
- ::fatal "trees ($treeish) $y != $x\n",
- "Something is seriously wrong...";
- }
- });
-}
-
-sub get_commit_parents {
- my ($self, $log_entry) = @_;
- my (%seen, @ret, @tmp);
- # legacy support for 'set-tree'; this is only used by set_tree_cb:
- if (my $ip = $self->{inject_parents}) {
- if (my $commit = delete $ip->{$log_entry->{revision}}) {
- push @tmp, $commit;
- }
- }
- if (my $cur = ::verify_ref($self->refname.'^0')) {
- push @tmp, $cur;
- }
- if (my $ipd = $self->{inject_parents_dcommit}) {
- if (my $commit = delete $ipd->{$log_entry->{revision}}) {
- push @tmp, @$commit;
- }
- }
- push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp);
- while (my $p = shift @tmp) {
- next if $seen{$p};
- $seen{$p} = 1;
- push @ret, $p;
- }
- @ret;
-}
-
-sub rewrite_root {
- my ($self) = @_;
- return $self->{-rewrite_root} if exists $self->{-rewrite_root};
- my $k = "svn-remote.$self->{repo_id}.rewriteRoot";
- my $rwr = eval { command_oneline(qw/config --get/, $k) };
- if ($rwr) {
- $rwr =~ s#/+$##;
- if ($rwr !~ m#^[a-z\+]+://#) {
- die "$rwr is not a valid URL (key: $k)\n";
- }
- }
- $self->{-rewrite_root} = $rwr;
-}
-
-sub rewrite_uuid {
- my ($self) = @_;
- return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid};
- my $k = "svn-remote.$self->{repo_id}.rewriteUUID";
- my $rwid = eval { command_oneline(qw/config --get/, $k) };
- if ($rwid) {
- $rwid =~ s#/+$##;
- if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) {
- die "$rwid is not a valid UUID (key: $k)\n";
- }
- }
- $self->{-rewrite_uuid} = $rwid;
-}
-
-sub metadata_url {
- my ($self) = @_;
- ($self->rewrite_root || $self->{url}) .
- (length $self->{path} ? '/' . $self->{path} : '');
-}
-
-sub full_url {
- my ($self) = @_;
- $self->{url} . (length $self->{path} ? '/' . $self->{path} : '');
-}
-
-sub full_pushurl {
- my ($self) = @_;
- if ($self->{pushurl}) {
- return $self->{pushurl} . (length $self->{path} ? '/' .
- $self->{path} : '');
- } else {
- return $self->full_url;
- }
-}
-
-sub set_commit_header_env {
- my ($log_entry) = @_;
- my %env;
- foreach my $ned (qw/NAME EMAIL DATE/) {
- foreach my $ac (qw/AUTHOR COMMITTER/) {
- $env{"GIT_${ac}_${ned}"} = $ENV{"GIT_${ac}_${ned}"};
- }
- }
-
- $ENV{GIT_AUTHOR_NAME} = $log_entry->{name};
- $ENV{GIT_AUTHOR_EMAIL} = $log_entry->{email};
- $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date};
-
- $ENV{GIT_COMMITTER_NAME} = (defined $log_entry->{commit_name})
- ? $log_entry->{commit_name}
- : $log_entry->{name};
- $ENV{GIT_COMMITTER_EMAIL} = (defined $log_entry->{commit_email})
- ? $log_entry->{commit_email}
- : $log_entry->{email};
- \%env;
-}
-
-sub restore_commit_header_env {
- my ($env) = @_;
- foreach my $ned (qw/NAME EMAIL DATE/) {
- foreach my $ac (qw/AUTHOR COMMITTER/) {
- my $k = "GIT_${ac}_${ned}";
- if (defined $env->{$k}) {
- $ENV{$k} = $env->{$k};
- } else {
- delete $ENV{$k};
- }
- }
- }
-}
-
-sub gc {
- command_noisy('gc', '--auto');
-};
-
-sub do_git_commit {
- my ($self, $log_entry) = @_;
- my $lr = $self->last_rev;
- if (defined $lr && $lr >= $log_entry->{revision}) {
- die "Last fetched revision of ", $self->refname,
- " was r$lr, but we are about to fetch: ",
- "r$log_entry->{revision}!\n";
- }
- if (my $c = $self->rev_map_get($log_entry->{revision})) {
- croak "$log_entry->{revision} = $c already exists! ",
- "Why are we refetching it?\n";
- }
- my $old_env = set_commit_header_env($log_entry);
- my $tree = $log_entry->{tree};
- if (!defined $tree) {
- $tree = $self->tmp_index_do(sub {
- command_oneline('write-tree') });
- }
- die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o;
-
- my @exec = ('git', 'commit-tree', $tree);
- foreach ($self->get_commit_parents($log_entry)) {
- push @exec, '-p', $_;
- }
- defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec))
- or croak $!;
- binmode $msg_fh;
-
- # we always get UTF-8 from SVN, but we may want our commits in
- # a different encoding.
- if (my $enc = Git::config('i18n.commitencoding')) {
- require Encode;
- Encode::from_to($log_entry->{log}, 'UTF-8', $enc);
- }
- print $msg_fh $log_entry->{log} or croak $!;
- restore_commit_header_env($old_env);
- unless ($self->no_metadata) {
- print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n"
- or croak $!;
- }
- $msg_fh->flush == 0 or croak $!;
- close $msg_fh or croak $!;
- chomp(my $commit = do { local $/; <$out_fh> });
- close $out_fh or croak $!;
- waitpid $pid, 0;
- croak $? if $?;
- if ($commit !~ /^$::sha1$/o) {
- die "Failed to commit, invalid sha1: $commit\n";
- }
-
- $self->rev_map_set($log_entry->{revision}, $commit, 1);
-
- $self->{last_rev} = $log_entry->{revision};
- $self->{last_commit} = $commit;
- print "r$log_entry->{revision}" unless $::_q > 1;
- if (defined $log_entry->{svm_revision}) {
- print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
- $self->rev_map_set($log_entry->{svm_revision}, $commit,
- 0, $self->svm_uuid);
- }
- print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
- if (--$_gc_nr == 0) {
- $_gc_nr = $_gc_period;
- gc();
- }
- return $commit;
-}
-
-sub match_paths {
- my ($self, $paths, $r) = @_;
- return 1 if $self->{path} eq '';
- if (my $path = $paths->{"/$self->{path}"}) {
- return ($path->{action} eq 'D') ? 0 : 1;
- }
- $self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//;
- if (grep /$self->{path_regex}/, keys %$paths) {
- return 1;
- }
- my $c = '';
- foreach (split m#/#, $self->{path}) {
- $c .= "/$_";
- next unless ($paths->{$c} &&
- ($paths->{$c}->{action} =~ /^[AR]$/));
- if ($self->ra->check_path($self->{path}, $r) ==
- $SVN::Node::dir) {
- return 1;
- }
- }
- return 0;
-}
-
-sub find_parent_branch {
- my ($self, $paths, $rev) = @_;
- return undef unless $self->follow_parent;
- unless (defined $paths) {
- my $err_handler = $SVN::Error::handler;
- $SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs;
- $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1,
- sub { $paths = $_[0] });
- $SVN::Error::handler = $err_handler;
- }
- return undef unless defined $paths;
-
- # look for a parent from another branch:
- my @b_path_components = split m#/#, $self->{path};
- my @a_path_components;
- my $i;
- while (@b_path_components) {
- $i = $paths->{'/'.join('/', @b_path_components)};
- last if $i && defined $i->{copyfrom_path};
- unshift(@a_path_components, pop(@b_path_components));
- }
- return undef unless defined $i && defined $i->{copyfrom_path};
- my $branch_from = $i->{copyfrom_path};
- if (@a_path_components) {
- print STDERR "branch_from: $branch_from => ";
- $branch_from .= '/'.join('/', @a_path_components);
- print STDERR $branch_from, "\n";
- }
- my $r = $i->{copyfrom_rev};
- my $repos_root = $self->ra->{repos_root};
- my $url = $self->ra->{url};
- my $new_url = $url . $branch_from;
- print STDERR "Found possible branch point: ",
- "$new_url => ", $self->full_url, ", $r\n"
- unless $::_q > 1;
- $branch_from =~ s#^/##;
- my $gs = $self->other_gs($new_url, $url,
- $branch_from, $r, $self->{ref_id});
- my ($r0, $parent) = $gs->find_rev_before($r, 1);
- {
- my ($base, $head);
- if (!defined $r0 || !defined $parent) {
- ($base, $head) = parse_revision_argument(0, $r);
- } else {
- if ($r0 < $r) {
- $gs->ra->get_log([$gs->{path}], $r0 + 1, $r, 1,
- 0, 1, sub { $base = $_[1] - 1 });
- }
- }
- if (defined $base && $base <= $r) {
- $gs->fetch($base, $r);
- }
- ($r0, $parent) = $gs->find_rev_before($r, 1);
- }
- if (defined $r0 && defined $parent) {
- print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"
- unless $::_q > 1;
- my $ed;
- if ($self->ra->can_do_switch) {
- $self->assert_index_clean($parent);
- print STDERR "Following parent with do_switch\n"
- unless $::_q > 1;
- # do_switch works with svn/trunk >= r22312, but that
- # is not included with SVN 1.4.3 (the latest version
- # at the moment), so we can't rely on it
- $self->{last_rev} = $r0;
- $self->{last_commit} = $parent;
- $ed = Git::SVN::Fetcher->new($self, $gs->{path});
- $gs->ra->gs_do_switch($r0, $rev, $gs,
- $self->full_url, $ed)
- or die "SVN connection failed somewhere...\n";
- } elsif ($self->ra->trees_match($new_url, $r0,
- $self->full_url, $rev)) {
- print STDERR "Trees match:\n",
- " $new_url\@$r0\n",
- " ${\$self->full_url}\@$rev\n",
- "Following parent with no changes\n"
- unless $::_q > 1;
- $self->tmp_index_do(sub {
- command_noisy('read-tree', $parent);
- });
- $self->{last_commit} = $parent;
- } else {
- print STDERR "Following parent with do_update\n"
- unless $::_q > 1;
- $ed = Git::SVN::Fetcher->new($self);
- $self->ra->gs_do_update($rev, $rev, $self, $ed)
- or die "SVN connection failed somewhere...\n";
- }
- print STDERR "Successfully followed parent\n" unless $::_q > 1;
- return $self->make_log_entry($rev, [$parent], $ed);
- }
- return undef;
-}
-
-sub do_fetch {
- my ($self, $paths, $rev) = @_;
- my $ed;
- my ($last_rev, @parents);
- if (my $lc = $self->last_commit) {
- # we can have a branch that was deleted, then re-added
- # under the same name but copied from another path, in
- # which case we'll have multiple parents (we don't
- # want to break the original ref, nor lose copypath info):
- if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
- push @{$log_entry->{parents}}, $lc;
- return $log_entry;
- }
- $ed = Git::SVN::Fetcher->new($self);
- $last_rev = $self->{last_rev};
- $ed->{c} = $lc;
- @parents = ($lc);
- } else {
- $last_rev = $rev;
- if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
- return $log_entry;
- }
- $ed = Git::SVN::Fetcher->new($self);
- }
- unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) {
- die "SVN connection failed somewhere...\n";
- }
- $self->make_log_entry($rev, \@parents, $ed);
-}
-
-sub mkemptydirs {
- my ($self, $r) = @_;
-
- sub scan {
- my ($r, $empty_dirs, $line) = @_;
- if (defined $r && $line =~ /^r(\d+)$/) {
- return 0 if $1 > $r;
- } elsif ($line =~ /^ \+empty_dir: (.+)$/) {
- $empty_dirs->{$1} = 1;
- } elsif ($line =~ /^ \-empty_dir: (.+)$/) {
- my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs);
- delete @$empty_dirs{@d};
- }
- 1; # continue
- };
-
- my %empty_dirs = ();
- my $gz_file = "$self->{dir}/unhandled.log.gz";
- if (-f $gz_file) {
- if (!$can_compress) {
- warn "Compress::Zlib could not be found; ",
- "empty directories in $gz_file will not be read\n";
- } else {
- my $gz = Compress::Zlib::gzopen($gz_file, "rb") or
- die "Unable to open $gz_file: $!\n";
- my $line;
- while ($gz->gzreadline($line) > 0) {
- scan($r, \%empty_dirs, $line) or last;
- }
- $gz->gzclose;
- }
- }
-
- if (open my $fh, '<', "$self->{dir}/unhandled.log") {
- binmode $fh or croak "binmode: $!";
- while (<$fh>) {
- scan($r, \%empty_dirs, $_) or last;
- }
- close $fh;
- }
-
- my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/;
- foreach my $d (sort keys %empty_dirs) {
- $d = uri_decode($d);
- $d =~ s/$strip//;
- next unless length($d);
- next if -d $d;
- if (-e $d) {
- warn "$d exists but is not a directory\n";
- } else {
- print "creating empty directory: $d\n";
- mkpath([$d]);
- }
- }
-}
-
-sub get_untracked {
- my ($self, $ed) = @_;
- my @out;
- my $h = $ed->{empty};
- foreach (sort keys %$h) {
- my $act = $h->{$_} ? '+empty_dir' : '-empty_dir';
- push @out, " $act: " . uri_encode($_);
- warn "W: $act: $_\n";
- }
- foreach my $t (qw/dir_prop file_prop/) {
- $h = $ed->{$t} or next;
- foreach my $path (sort keys %$h) {
- my $ppath = $path eq '' ? '.' : $path;
- foreach my $prop (sort keys %{$h->{$path}}) {
- next if $SKIP_PROP{$prop};
- my $v = $h->{$path}->{$prop};
- my $t_ppath_prop = "$t: " .
- uri_encode($ppath) . ' ' .
- uri_encode($prop);
- if (defined $v) {
- push @out, " +$t_ppath_prop " .
- uri_encode($v);
- } else {
- push @out, " -$t_ppath_prop";
- }
- }
- }
- }
- foreach my $t (qw/absent_file absent_directory/) {
- $h = $ed->{$t} or next;
- foreach my $parent (sort keys %$h) {
- foreach my $path (sort @{$h->{$parent}}) {
- push @out, " $t: " .
- uri_encode("$parent/$path");
- warn "W: $t: $parent/$path ",
- "Insufficient permissions?\n";
- }
- }
- }
- \@out;
-}
-
-sub get_tz {
- # some systmes don't handle or mishandle %z, so be creative.
- my $t = shift || time;
- my $gm = timelocal(gmtime($t));
- my $sign = qw( + + - )[ $t <=> $gm ];
- return sprintf("%s%02d%02d", $sign, (gmtime(abs($t - $gm)))[2,1]);
-}
-
-# parse_svn_date(DATE)
-# --------------------
-# Given a date (in UTC) from Subversion, return a string in the format
-# "<TZ Offset> <local date/time>" that Git will use.
-#
-# By default the parsed date will be in UTC; if $Git::SVN::_localtime
-# is true we'll convert it to the local timezone instead.
-sub parse_svn_date {
- my $date = shift || return '+0000 1970-01-01 00:00:00';
- my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
- (\d\d)\:(\d\d)\:(\d\d)\.\d*Z$/x) or
- croak "Unable to parse date: $date\n";
- my $parsed_date; # Set next.
-
- if ($Git::SVN::_localtime) {
- # Translate the Subversion datetime to an epoch time.
- # Begin by switching ourselves to $date's timezone, UTC.
- my $old_env_TZ = $ENV{TZ};
- $ENV{TZ} = 'UTC';
-
- my $epoch_in_UTC =
- POSIX::strftime('%s', $S, $M, $H, $d, $m - 1, $Y - 1900);
-
- # Determine our local timezone (including DST) at the
- # time of $epoch_in_UTC. $Git::SVN::Log::TZ stored the
- # value of TZ, if any, at the time we were run.
- if (defined $Git::SVN::Log::TZ) {
- $ENV{TZ} = $Git::SVN::Log::TZ;
- } else {
- delete $ENV{TZ};
- }
-
- my $our_TZ = get_tz();
-
- # This converts $epoch_in_UTC into our local timezone.
- my ($sec, $min, $hour, $mday, $mon, $year,
- $wday, $yday, $isdst) = localtime($epoch_in_UTC);
-
- $parsed_date = sprintf('%s %04d-%02d-%02d %02d:%02d:%02d',
- $our_TZ, $year + 1900, $mon + 1,
- $mday, $hour, $min, $sec);
-
- # Reset us to the timezone in effect when we entered
- # this routine.
- if (defined $old_env_TZ) {
- $ENV{TZ} = $old_env_TZ;
- } else {
- delete $ENV{TZ};
- }
- } else {
- $parsed_date = "+0000 $Y-$m-$d $H:$M:$S";
- }
-
- return $parsed_date;
-}
-
-sub other_gs {
- my ($self, $new_url, $url,
- $branch_from, $r, $old_ref_id) = @_;
- my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from);
- unless ($gs) {
- my $ref_id = $old_ref_id;
- $ref_id =~ s/\@\d+-*$//;
- $ref_id .= "\@$r";
- # just grow a tail if we're not unique enough :x
- $ref_id .= '-' while find_ref($ref_id);
- my ($u, $p, $repo_id) = ($new_url, '', $ref_id);
- if ($u =~ s#^\Q$url\E(/|$)##) {
- $p = $u;
- $u = $url;
- $repo_id = $self->{repo_id};
- }
- while (1) {
- # It is possible to tag two different subdirectories at
- # the same revision. If the url for an existing ref
- # does not match, we must either find a ref with a
- # matching url or create a new ref by growing a tail.
- $gs = Git::SVN->init($u, $p, $repo_id, $ref_id, 1);
- my (undef, $max_commit) = $gs->rev_map_max(1);
- last if (!$max_commit);
- my ($url) = ::cmt_metadata($max_commit);
- last if ($url eq $gs->metadata_url);
- $ref_id .= '-';
- }
- print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1;
- }
- $gs
-}
-
-sub call_authors_prog {
- my ($orig_author) = @_;
- $orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author);
- my $author = `$::_authors_prog $orig_author`;
- if ($? != 0) {
- die "$::_authors_prog failed with exit code $?\n"
- }
- if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) {
- my ($name, $email) = ($1, $2);
- $email = undef if length $2 == 0;
- return [$name, $email];
- } else {
- die "Author: $orig_author: $::_authors_prog returned "
- . "invalid author format: $author\n";
- }
-}
-
-sub check_author {
- my ($author) = @_;
- if (!defined $author || length $author == 0) {
- $author = '(no author)';
- }
- if (!defined $::users{$author}) {
- if (defined $::_authors_prog) {
- $::users{$author} = call_authors_prog($author);
- } elsif (defined $::_authors) {
- die "Author: $author not defined in $::_authors file\n";
- }
- }
- $author;
-}
-
-sub find_extra_svk_parents {
- my ($self, $ed, $tickets, $parents) = @_;
- # aha! svk:merge property changed...
- my @tickets = split "\n", $tickets;
- my @known_parents;
- for my $ticket ( @tickets ) {
- my ($uuid, $path, $rev) = split /:/, $ticket;
- if ( $uuid eq $self->ra_uuid ) {
- my $url = $self->{url};
- my $repos_root = $url;
- my $branch_from = $path;
- $branch_from =~ s{^/}{};
- my $gs = $self->other_gs($repos_root."/".$branch_from,
- $url,
- $branch_from,
- $rev,
- $self->{ref_id});
- if ( my $commit = $gs->rev_map_get($rev, $uuid) ) {
- # wahey! we found it, but it might be
- # an old one (!)
- push @known_parents, [ $rev, $commit ];
- }
- }
- }
- # Ordering matters; highest-numbered commit merge tickets
- # first, as they may account for later merge ticket additions
- # or changes.
- @known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents;
- for my $parent ( @known_parents ) {
- my @cmd = ('rev-list', $parent, map { "^$_" } @$parents );
- my ($msg_fh, $ctx) = command_output_pipe(@cmd);
- my $new;
- while ( <$msg_fh> ) {
- $new=1;last;
- }
- command_close_pipe($msg_fh, $ctx);
- if ( $new ) {
- print STDERR
- "Found merge parent (svk:merge ticket): $parent\n";
- push @$parents, $parent;
- }
- }
-}
-
-sub lookup_svn_merge {
- my $uuid = shift;
- my $url = shift;
- my $merge = shift;
-
- my ($source, $revs) = split ":", $merge;
- my $path = $source;
- $path =~ s{^/}{};
- my $gs = Git::SVN->find_by_url($url.$source, $url, $path);
- if ( !$gs ) {
- warn "Couldn't find revmap for $url$source\n";
- return;
- }
- my @ranges = split ",", $revs;
- my ($tip, $tip_commit);
- my @merged_commit_ranges;
- # find the tip
- for my $range ( @ranges ) {
- my ($bottom, $top) = split "-", $range;
- $top ||= $bottom;
- my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top );
- my $top_commit = $gs->find_rev_before( $top, 1, $bottom );
-
- unless ($top_commit and $bottom_commit) {
- warn "W:unknown path/rev in svn:mergeinfo "
- ."dirprop: $source:$range\n";
- next;
- }
-
- if (scalar(command('rev-parse', "$bottom_commit^@"))) {
- push @merged_commit_ranges,
- "$bottom_commit^..$top_commit";
- } else {
- push @merged_commit_ranges, "$top_commit";
- }
-
- if ( !defined $tip or $top > $tip ) {
- $tip = $top;
- $tip_commit = $top_commit;
- }
- }
- return ($tip_commit, @merged_commit_ranges);
-}
-
-sub _rev_list {
- my ($msg_fh, $ctx) = command_output_pipe(
- "rev-list", @_,
- );
- my @rv;
- while ( <$msg_fh> ) {
- chomp;
- push @rv, $_;
- }
- command_close_pipe($msg_fh, $ctx);
- @rv;
-}
-
-sub check_cherry_pick {
- my $base = shift;
- my $tip = shift;
- my $parents = shift;
- my @ranges = @_;
- my %commits = map { $_ => 1 }
- _rev_list("--no-merges", $tip, "--not", $base, @$parents, "--");
- for my $range ( @ranges ) {
- delete @commits{_rev_list($range, "--")};
- }
- for my $commit (keys %commits) {
- if (has_no_changes($commit)) {
- delete $commits{$commit};
- }
- }
- return (keys %commits);
-}
-
-sub has_no_changes {
- my $commit = shift;
-
- my @revs = split / /, command_oneline(
- qw(rev-list --parents -1 -m), $commit);
-
- # Commits with no parents, e.g. the start of a partial branch,
- # have changes by definition.
- return 1 if (@revs < 2);
-
- # Commits with multiple parents, e.g a merge, have no changes
- # by definition.
- return 0 if (@revs > 2);
-
- return (command_oneline("rev-parse", "$commit^{tree}") eq
- command_oneline("rev-parse", "$commit~1^{tree}"));
-}
-
-sub tie_for_persistent_memoization {
- my $hash = shift;
- my $path = shift;
-
- if ($can_use_yaml) {
- tie %$hash => 'Git::SVN::Memoize::YAML', "$path.yaml";
- } else {
- tie %$hash => 'Memoize::Storable', "$path.db", 'nstore';
- }
-}
-
-# The GIT_DIR environment variable is not always set until after the command
-# line arguments are processed, so we can't memoize in a BEGIN block.
-{
- my $memoized = 0;
-
- sub memoize_svn_mergeinfo_functions {
- return if $memoized;
- $memoized = 1;
-
- my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
- mkpath([$cache_path]) unless -d $cache_path;
-
- my %lookup_svn_merge_cache;
- my %check_cherry_pick_cache;
- my %has_no_changes_cache;
-
- tie_for_persistent_memoization(\%lookup_svn_merge_cache,
- "$cache_path/lookup_svn_merge");
- memoize 'lookup_svn_merge',
- SCALAR_CACHE => 'FAULT',
- LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache],
- ;
-
- tie_for_persistent_memoization(\%check_cherry_pick_cache,
- "$cache_path/check_cherry_pick");
- memoize 'check_cherry_pick',
- SCALAR_CACHE => 'FAULT',
- LIST_CACHE => ['HASH' => \%check_cherry_pick_cache],
- ;
-
- tie_for_persistent_memoization(\%has_no_changes_cache,
- "$cache_path/has_no_changes");
- memoize 'has_no_changes',
- SCALAR_CACHE => ['HASH' => \%has_no_changes_cache],
- LIST_CACHE => 'FAULT',
- ;
- }
-
- sub unmemoize_svn_mergeinfo_functions {
- return if not $memoized;
- $memoized = 0;
-
- Memoize::unmemoize 'lookup_svn_merge';
- Memoize::unmemoize 'check_cherry_pick';
- Memoize::unmemoize 'has_no_changes';
- }
-
- Memoize::memoize 'Git::SVN::repos_root';
-}
-
-END {
- # Force cache writeout explicitly instead of waiting for
- # global destruction to avoid segfault in Storable:
- # http://rt.cpan.org/Public/Bug/Display.html?id=36087
- unmemoize_svn_mergeinfo_functions();
-}
-
-sub parents_exclude {
- my $parents = shift;
- my @commits = @_;
- return unless @commits;
-
- my @excluded;
- my $excluded;
- do {
- my @cmd = ('rev-list', "-1", @commits, "--not", @$parents );
- $excluded = command_oneline(@cmd);
- if ( $excluded ) {
- my @new;
- my $found;
- for my $commit ( @commits ) {
- if ( $commit eq $excluded ) {
- push @excluded, $commit;
- $found++;
- last;
- }
- else {
- push @new, $commit;
- }
- }
- die "saw commit '$excluded' in rev-list output, "
- ."but we didn't ask for that commit (wanted: @commits --not @$parents)"
- unless $found;
- @commits = @new;
- }
- }
- while ($excluded and @commits);
-
- return @excluded;
-}
-
-
-# note: this function should only be called if the various dirprops
-# have actually changed
-sub find_extra_svn_parents {
- my ($self, $ed, $mergeinfo, $parents) = @_;
- # aha! svk:merge property changed...
-
- memoize_svn_mergeinfo_functions();
-
- # We first search for merged tips which are not in our
- # history. Then, we figure out which git revisions are in
- # that tip, but not this revision. If all of those revisions
- # are now marked as merge, we can add the tip as a parent.
- my @merges = split "\n", $mergeinfo;
- my @merge_tips;
- my $url = $self->{url};
- my $uuid = $self->ra_uuid;
- my %ranges;
- for my $merge ( @merges ) {
- my ($tip_commit, @ranges) =
- lookup_svn_merge( $uuid, $url, $merge );
- unless (!$tip_commit or
- grep { $_ eq $tip_commit } @$parents ) {
- push @merge_tips, $tip_commit;
- $ranges{$tip_commit} = \@ranges;
- } else {
- push @merge_tips, undef;
- }
- }
-
- my %excluded = map { $_ => 1 }
- parents_exclude($parents, grep { defined } @merge_tips);
-
- # check merge tips for new parents
- my @new_parents;
- for my $merge_tip ( @merge_tips ) {
- my $spec = shift @merges;
- next unless $merge_tip and $excluded{$merge_tip};
-
- my $ranges = $ranges{$merge_tip};
-
- # check out 'new' tips
- my $merge_base;
- eval {
- $merge_base = command_oneline(
- "merge-base",
- @$parents, $merge_tip,
- );
- };
- if ($@) {
- die "An error occurred during merge-base"
- unless $@->isa("Git::Error::Command");
-
- warn "W: Cannot find common ancestor between ".
- "@$parents and $merge_tip. Ignoring merge info.\n";
- next;
- }
-
- # double check that there are no missing non-merge commits
- my (@incomplete) = check_cherry_pick(
- $merge_base, $merge_tip,
- $parents,
- @$ranges,
- );
-
- if ( @incomplete ) {
- warn "W:svn cherry-pick ignored ($spec) - missing "
- .@incomplete." commit(s) (eg $incomplete[0])\n";
- } else {
- warn
- "Found merge parent (svn:mergeinfo prop): ",
- $merge_tip, "\n";
- push @new_parents, $merge_tip;
- }
- }
-
- # cater for merges which merge commits from multiple branches
- if ( @new_parents > 1 ) {
- for ( my $i = 0; $i <= $#new_parents; $i++ ) {
- for ( my $j = 0; $j <= $#new_parents; $j++ ) {
- next if $i == $j;
- next unless $new_parents[$i];
- next unless $new_parents[$j];
- my $revs = command_oneline(
- "rev-list", "-1",
- "$new_parents[$i]..$new_parents[$j]",
- );
- if ( !$revs ) {
- undef($new_parents[$j]);
- }
- }
- }
- }
- push @$parents, grep { defined } @new_parents;
-}
-
-sub make_log_entry {
- my ($self, $rev, $parents, $ed) = @_;
- my $untracked = $self->get_untracked($ed);
-
- my @parents = @$parents;
- my $ps = $ed->{path_strip} || "";
- for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) {
- my $props = $ed->{dir_prop}{$path};
- if ( $props->{"svk:merge"} ) {
- $self->find_extra_svk_parents
- ($ed, $props->{"svk:merge"}, \@parents);
- }
- if ( $props->{"svn:mergeinfo"} ) {
- $self->find_extra_svn_parents
- ($ed,
- $props->{"svn:mergeinfo"},
- \@parents);
- }
- }
-
- open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!;
- print $un "r$rev\n" or croak $!;
- print $un $_, "\n" foreach @$untracked;
- my %log_entry = ( parents => \@parents, revision => $rev,
- log => '');
-
- my $headrev;
- my $logged = delete $self->{logged_rev_props};
- if (!$logged || $self->{-want_revprops}) {
- my $rp = $self->ra->rev_proplist($rev);
- foreach (sort keys %$rp) {
- my $v = $rp->{$_};
- if (/^svn:(author|date|log)$/) {
- $log_entry{$1} = $v;
- } elsif ($_ eq 'svm:headrev') {
- $headrev = $v;
- } else {
- print $un " rev_prop: ", uri_encode($_), ' ',
- uri_encode($v), "\n";
- }
- }
- } else {
- map { $log_entry{$_} = $logged->{$_} } keys %$logged;
- }
- close $un or croak $!;
-
- $log_entry{date} = parse_svn_date($log_entry{date});
- $log_entry{log} .= "\n";
- my $author = $log_entry{author} = check_author($log_entry{author});
- my ($name, $email) = defined $::users{$author} ? @{$::users{$author}}
- : ($author, undef);
-
- my ($commit_name, $commit_email) = ($name, $email);
- if ($_use_log_author) {
- my $name_field;
- if ($log_entry{log} =~ /From:\s+(.*\S)\s*\n/i) {
- $name_field = $1;
- } elsif ($log_entry{log} =~ /Signed-off-by:\s+(.*\S)\s*\n/i) {
- $name_field = $1;
- }
- if (!defined $name_field) {
- if (!defined $email) {
- $email = $name;
- }
- } elsif ($name_field =~ /(.*?)\s+<(.*)>/) {
- ($name, $email) = ($1, $2);
- } elsif ($name_field =~ /(.*)@/) {
- ($name, $email) = ($1, $name_field);
- } else {
- ($name, $email) = ($name_field, $name_field);
- }
- }
- if (defined $headrev && $self->use_svm_props) {
- if ($self->rewrite_root) {
- die "Can't have both 'useSvmProps' and 'rewriteRoot' ",
- "options set!\n";
- }
- if ($self->rewrite_uuid) {
- die "Can't have both 'useSvmProps' and 'rewriteUUID' ",
- "options set!\n";
- }
- my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i;
- # we don't want "SVM: initializing mirror for junk" ...
- return undef if $r == 0;
- my $svm = $self->svm;
- if ($uuid ne $svm->{uuid}) {
- die "UUID mismatch on SVM path:\n",
- "expected: $svm->{uuid}\n",
- " got: $uuid\n";
- }
- my $full_url = $self->full_url;
- $full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or
- die "Failed to replace '$svm->{replace}' with ",
- "'$svm->{source}' in $full_url\n";
- # throw away username for storing in records
- remove_username($full_url);
- $log_entry{metadata} = "$full_url\@$r $uuid";
- $log_entry{svm_revision} = $r;
- $email ||= "$author\@$uuid";
- $commit_email ||= "$author\@$uuid";
- } elsif ($self->use_svnsync_props) {
- my $full_url = $self->svnsync->{url};
- $full_url .= "/$self->{path}" if length $self->{path};
- remove_username($full_url);
- my $uuid = $self->svnsync->{uuid};
- $log_entry{metadata} = "$full_url\@$rev $uuid";
- $email ||= "$author\@$uuid";
- $commit_email ||= "$author\@$uuid";
- } else {
- my $url = $self->metadata_url;
- remove_username($url);
- my $uuid = $self->rewrite_uuid || $self->ra->get_uuid;
- $log_entry{metadata} = "$url\@$rev " . $uuid;
- $email ||= "$author\@" . $uuid;
- $commit_email ||= "$author\@" . $uuid;
- }
- $log_entry{name} = $name;
- $log_entry{email} = $email;
- $log_entry{commit_name} = $commit_name;
- $log_entry{commit_email} = $commit_email;
- \%log_entry;
-}
-
-sub fetch {
- my ($self, $min_rev, $max_rev, @parents) = @_;
- my ($last_rev, $last_commit) = $self->last_rev_commit;
- my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev);
- $self->ra->gs_fetch_loop_common($base, $head, [$self]);
-}
-
-sub set_tree_cb {
- my ($self, $log_entry, $tree, $rev, $date, $author) = @_;
- $self->{inject_parents} = { $rev => $tree };
- $self->fetch(undef, undef);
-}
-
-sub set_tree {
- my ($self, $tree) = (shift, shift);
- my $log_entry = ::get_commit_entry($tree);
- unless ($self->{last_rev}) {
- ::fatal("Must have an existing revision to commit");
- }
- my %ed_opts = ( r => $self->{last_rev},
- log => $log_entry->{log},
- ra => $self->ra,
- tree_a => $self->{last_commit},
- tree_b => $tree,
- editor_cb => sub {
- $self->set_tree_cb($log_entry, $tree, @_) },
- svn_path => $self->{path} );
- if (!Git::SVN::Editor->new(\%ed_opts)->apply_diff) {
- print "No changes\nr$self->{last_rev} = $tree\n";
- }
-}
-
-sub rebuild_from_rev_db {
- my ($self, $path) = @_;
- my $r = -1;
- open my $fh, '<', $path or croak "open: $!";
- binmode $fh or croak "binmode: $!";
- while (<$fh>) {
- length($_) == 41 or croak "inconsistent size in ($_) != 41";
- chomp($_);
- ++$r;
- next if $_ eq ('0' x 40);
- $self->rev_map_set($r, $_);
- print "r$r = $_\n";
- }
- close $fh or croak "close: $!";
- unlink $path or croak "unlink: $!";
-}
-
-sub rebuild {
- my ($self) = @_;
- my $map_path = $self->map_path;
- my $partial = (-e $map_path && ! -z $map_path);
- return unless ::verify_ref($self->refname.'^0');
- if (!$partial && ($self->use_svm_props || $self->no_metadata)) {
- my $rev_db = $self->rev_db_path;
- $self->rebuild_from_rev_db($rev_db);
- if ($self->use_svm_props) {
- my $svm_rev_db = $self->rev_db_path($self->svm_uuid);
- $self->rebuild_from_rev_db($svm_rev_db);
- }
- $self->unlink_rev_db_symlink;
- return;
- }
- print "Rebuilding $map_path ...\n" if (!$partial);
- my ($base_rev, $head) = ($partial ? $self->rev_map_max_norebuild(1) :
- (undef, undef));
- my ($log, $ctx) =
- command_output_pipe(qw/rev-list --pretty=raw --reverse/,
- ($head ? "$head.." : "") . $self->refname,
- '--');
- my $metadata_url = $self->metadata_url;
- remove_username($metadata_url);
- my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid;
- my $c;
- while (<$log>) {
- if ( m{^commit ($::sha1)$} ) {
- $c = $1;
- next;
- }
- next unless s{^\s*(git-svn-id:)}{$1};
- my ($url, $rev, $uuid) = ::extract_metadata($_);
- remove_username($url);
-
- # ignore merges (from set-tree)
- next if (!defined $rev || !$uuid);
-
- # if we merged or otherwise started elsewhere, this is
- # how we break out of it
- if (($uuid ne $svn_uuid) ||
- ($metadata_url && $url && ($url ne $metadata_url))) {
- next;
- }
- if ($partial && $head) {
- print "Partial-rebuilding $map_path ...\n";
- print "Currently at $base_rev = $head\n";
- $head = undef;
- }
-
- $self->rev_map_set($rev, $c);
- print "r$rev = $c\n";
- }
- command_close_pipe($log, $ctx);
- print "Done rebuilding $map_path\n" if (!$partial || !$head);
- my $rev_db_path = $self->rev_db_path;
- if (-f $self->rev_db_path) {
- unlink $self->rev_db_path or croak "unlink: $!";
- }
- $self->unlink_rev_db_symlink;
-}
-
-# rev_map:
-# Tie::File seems to be prone to offset errors if revisions get sparse,
-# it's not that fast, either. Tie::File is also not in Perl 5.6. So
-# one of my favorite modules is out :< Next up would be one of the DBM
-# modules, but I'm not sure which is most portable...
-#
-# This is the replacement for the rev_db format, which was too big
-# and inefficient for large repositories with a lot of sparse history
-# (mainly tags)
-#
-# The format is this:
-# - 24 bytes for every record,
-# * 4 bytes for the integer representing an SVN revision number
-# * 20 bytes representing the sha1 of a git commit
-# - No empty padding records like the old format
-# (except the last record, which can be overwritten)
-# - new records are written append-only since SVN revision numbers
-# increase monotonically
-# - lookups on SVN revision number are done via a binary search
-# - Piping the file to xxd -c24 is a good way of dumping it for
-# viewing or editing (piped back through xxd -r), should the need
-# ever arise.
-# - The last record can be padding revision with an all-zero sha1
-# This is used to optimize fetch performance when using multiple
-# "fetch" directives in .git/config
-#
-# These files are disposable unless noMetadata or useSvmProps is set
-
-sub _rev_map_set {
- my ($fh, $rev, $commit) = @_;
-
- binmode $fh or croak "binmode: $!";
- my $size = (stat($fh))[7];
- ($size % 24) == 0 or croak "inconsistent size: $size";
-
- my $wr_offset = 0;
- if ($size > 0) {
- sysseek($fh, -24, SEEK_END) or croak "seek: $!";
- my $read = sysread($fh, my $buf, 24) or croak "read: $!";
- $read == 24 or croak "read only $read bytes (!= 24)";
- my ($last_rev, $last_commit) = unpack(rev_map_fmt, $buf);
- if ($last_commit eq ('0' x40)) {
- if ($size >= 48) {
- sysseek($fh, -48, SEEK_END) or croak "seek: $!";
- $read = sysread($fh, $buf, 24) or
- croak "read: $!";
- $read == 24 or
- croak "read only $read bytes (!= 24)";
- ($last_rev, $last_commit) =
- unpack(rev_map_fmt, $buf);
- if ($last_commit eq ('0' x40)) {
- croak "inconsistent .rev_map\n";
- }
- }
- if ($last_rev >= $rev) {
- croak "last_rev is higher!: $last_rev >= $rev";
- }
- $wr_offset = -24;
- }
- }
- sysseek($fh, $wr_offset, SEEK_END) or croak "seek: $!";
- syswrite($fh, pack(rev_map_fmt, $rev, $commit), 24) == 24 or
- croak "write: $!";
-}
-
-sub _rev_map_reset {
- my ($fh, $rev, $commit) = @_;
- my $c = _rev_map_get($fh, $rev);
- $c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n";
- my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!";
- truncate $fh, $offset or croak "truncate: $!";
-}
-
-sub mkfile {
- my ($path) = @_;
- unless (-e $path) {
- my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#);
- mkpath([$dir]) unless -d $dir;
- open my $fh, '>>', $path or die "Couldn't create $path: $!\n";
- close $fh or die "Couldn't close (create) $path: $!\n";
- }
-}
-
-sub rev_map_set {
- my ($self, $rev, $commit, $update_ref, $uuid) = @_;
- defined $commit or die "missing arg3\n";
- length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
- my $db = $self->map_path($uuid);
- my $db_lock = "$db.lock";
- my $sigmask;
- $update_ref ||= 0;
- if ($update_ref) {
- $sigmask = POSIX::SigSet->new();
- my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
- SIGALRM, SIGUSR1, SIGUSR2);
- sigprocmask(SIG_BLOCK, $signew, $sigmask) or
- croak "Can't block signals: $!";
- }
- mkfile($db);
-
- $LOCKFILES{$db_lock} = 1;
- my $sync;
- # both of these options make our .rev_db file very, very important
- # and we can't afford to lose it because rebuild() won't work
- if ($self->use_svm_props || $self->no_metadata) {
- $sync = 1;
- copy($db, $db_lock) or die "rev_map_set(@_): ",
- "Failed to copy: ",
- "$db => $db_lock ($!)\n";
- } else {
- rename $db, $db_lock or die "rev_map_set(@_): ",
- "Failed to rename: ",
- "$db => $db_lock ($!)\n";
- }
-
- sysopen(my $fh, $db_lock, O_RDWR | O_CREAT)
- or croak "Couldn't open $db_lock: $!\n";
- $update_ref eq 'reset' ? _rev_map_reset($fh, $rev, $commit) :
- _rev_map_set($fh, $rev, $commit);
- if ($sync) {
- $fh->flush or die "Couldn't flush $db_lock: $!\n";
- $fh->sync or die "Couldn't sync $db_lock: $!\n";
- }
- close $fh or croak $!;
- if ($update_ref) {
- $_head = $self;
- my $note = "";
- $note = " ($update_ref)" if ($update_ref !~ /^\d*$/);
- command_noisy('update-ref', '-m', "r$rev$note",
- $self->refname, $commit);
- }
- rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ",
- "$db_lock => $db ($!)\n";
- delete $LOCKFILES{$db_lock};
- if ($update_ref) {
- sigprocmask(SIG_SETMASK, $sigmask) or
- croak "Can't restore signal mask: $!";
- }
-}
-
-# If want_commit, this will return an array of (rev, commit) where
-# commit _must_ be a valid commit in the archive.
-# Otherwise, it'll return the max revision (whether or not the
-# commit is valid or just a 0x40 placeholder).
-sub rev_map_max {
- my ($self, $want_commit) = @_;
- $self->rebuild;
- my ($r, $c) = $self->rev_map_max_norebuild($want_commit);
- $want_commit ? ($r, $c) : $r;
-}
-
-sub rev_map_max_norebuild {
- my ($self, $want_commit) = @_;
- my $map_path = $self->map_path;
- stat $map_path or return $want_commit ? (0, undef) : 0;
- sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
- binmode $fh or croak "binmode: $!";
- my $size = (stat($fh))[7];
- ($size % 24) == 0 or croak "inconsistent size: $size";
-
- if ($size == 0) {
- close $fh or croak "close: $!";
- return $want_commit ? (0, undef) : 0;
- }
-
- sysseek($fh, -24, SEEK_END) or croak "seek: $!";
- sysread($fh, my $buf, 24) == 24 or croak "read: $!";
- my ($r, $c) = unpack(rev_map_fmt, $buf);
- if ($want_commit && $c eq ('0' x40)) {
- if ($size < 48) {
- return $want_commit ? (0, undef) : 0;
- }
- sysseek($fh, -48, SEEK_END) or croak "seek: $!";
- sysread($fh, $buf, 24) == 24 or croak "read: $!";
- ($r, $c) = unpack(rev_map_fmt, $buf);
- if ($c eq ('0'x40)) {
- croak "Penultimate record is all-zeroes in $map_path";
- }
- }
- close $fh or croak "close: $!";
- $want_commit ? ($r, $c) : $r;
-}
-
-sub rev_map_get {
- my ($self, $rev, $uuid) = @_;
- my $map_path = $self->map_path($uuid);
- return undef unless -e $map_path;
-
- sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
- my $c = _rev_map_get($fh, $rev);
- close($fh) or croak "close: $!";
- $c
-}
-
-sub _rev_map_get {
- my ($fh, $rev) = @_;
-
- binmode $fh or croak "binmode: $!";
- my $size = (stat($fh))[7];
- ($size % 24) == 0 or croak "inconsistent size: $size";
-
- if ($size == 0) {
- return undef;
- }
-
- my ($l, $u) = (0, $size - 24);
- my ($r, $c, $buf);
-
- while ($l <= $u) {
- my $i = int(($l/24 + $u/24) / 2) * 24;
- sysseek($fh, $i, SEEK_SET) or croak "seek: $!";
- sysread($fh, my $buf, 24) == 24 or croak "read: $!";
- my ($r, $c) = unpack(rev_map_fmt, $buf);
-
- if ($r < $rev) {
- $l = $i + 24;
- } elsif ($r > $rev) {
- $u = $i - 24;
- } else { # $r == $rev
- return $c eq ('0' x 40) ? undef : $c;
- }
- }
- undef;
-}
-
-# Finds the first svn revision that exists on (if $eq_ok is true) or
-# before $rev for the current branch. It will not search any lower
-# than $min_rev. Returns the git commit hash and svn revision number
-# if found, else (undef, undef).
-sub find_rev_before {
- my ($self, $rev, $eq_ok, $min_rev) = @_;
- --$rev unless $eq_ok;
- $min_rev ||= 1;
- my $max_rev = $self->rev_map_max;
- $rev = $max_rev if ($rev > $max_rev);
- while ($rev >= $min_rev) {
- if (my $c = $self->rev_map_get($rev)) {
- return ($rev, $c);
- }
- --$rev;
- }
- return (undef, undef);
-}
-
-# Finds the first svn revision that exists on (if $eq_ok is true) or
-# after $rev for the current branch. It will not search any higher
-# than $max_rev. Returns the git commit hash and svn revision number
-# if found, else (undef, undef).
-sub find_rev_after {
- my ($self, $rev, $eq_ok, $max_rev) = @_;
- ++$rev unless $eq_ok;
- $max_rev ||= $self->rev_map_max;
- while ($rev <= $max_rev) {
- if (my $c = $self->rev_map_get($rev)) {
- return ($rev, $c);
- }
- ++$rev;
- }
- return (undef, undef);
-}
-
-sub _new {
- my ($class, $repo_id, $ref_id, $path) = @_;
- unless (defined $repo_id && length $repo_id) {
- $repo_id = $Git::SVN::default_repo_id;
- }
- unless (defined $ref_id && length $ref_id) {
- $_prefix = '' unless defined($_prefix);
- $_[2] = $ref_id =
- "refs/remotes/$_prefix$Git::SVN::default_ref_id";
- }
- $_[1] = $repo_id;
- my $dir = "$ENV{GIT_DIR}/svn/$ref_id";
-
- # Older repos imported by us used $GIT_DIR/svn/foo instead of
- # $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo
- if ($ref_id =~ m{^refs/remotes/(.*)}) {
- my $old_dir = "$ENV{GIT_DIR}/svn/$1";
- if (-d $old_dir && ! -d $dir) {
- $dir = $old_dir;
- }
- }
-
- $_[3] = $path = '' unless (defined $path);
- mkpath([$dir]);
- bless {
- ref_id => $ref_id, dir => $dir, index => "$dir/index",
- path => $path, config => "$ENV{GIT_DIR}/svn/config",
- map_root => "$dir/.rev_map", repo_id => $repo_id }, $class;
-}
-
-# for read-only access of old .rev_db formats
-sub unlink_rev_db_symlink {
- my ($self) = @_;
- my $link = $self->rev_db_path;
- $link =~ s/\.[\w-]+$// or croak "missing UUID at the end of $link";
- if (-l $link) {
- unlink $link or croak "unlink: $link failed!";
- }
-}
-
-sub rev_db_path {
- my ($self, $uuid) = @_;
- my $db_path = $self->map_path($uuid);
- $db_path =~ s{/\.rev_map\.}{/\.rev_db\.}
- or croak "map_path: $db_path does not contain '/.rev_map.' !";
- $db_path;
-}
-
-# the new replacement for .rev_db
-sub map_path {
- my ($self, $uuid) = @_;
- $uuid ||= $self->ra_uuid;
- "$self->{map_root}.$uuid";
-}
-
-sub uri_encode {
- my ($f) = @_;
- $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg;
- $f
-}
-
-sub uri_decode {
- my ($f) = @_;
- $f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg;
- $f
-}
-
-sub remove_username {
- $_[0] =~ s{^([^:]*://)[^@]+@}{$1};
-}
-
-package Git::SVN::Log;
-use strict;
-use warnings;
-use POSIX qw/strftime/;
-use constant commit_log_separator => ('-' x 72) . "\n";
-use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline
- %rusers $show_commit $incremental/;
-my $l_fmt;
-
-sub cmt_showable {
- my ($c) = @_;
- return 1 if defined $c->{r};
-
- # big commit message got truncated by the 16k pretty buffer in rev-list
- if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
- $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
- @{$c->{l}} = ();
- my @log = command(qw/cat-file commit/, $c->{c});
-
- # shift off the headers
- shift @log while ($log[0] ne '');
- shift @log;
-
- # TODO: make $c->{l} not have a trailing newline in the future
- @{$c->{l}} = map { "$_\n" } grep !/^git-svn-id: /, @log;
-
- (undef, $c->{r}, undef) = ::extract_metadata(
- (grep(/^git-svn-id: /, @log))[-1]);
- }
- return defined $c->{r};
-}
-
-sub log_use_color {
- return $color || Git->repository->get_colorbool('color.diff');
-}
-
-sub git_svn_log_cmd {
- my ($r_min, $r_max, @args) = @_;
- my $head = 'HEAD';
- my (@files, @log_opts);
- foreach my $x (@args) {
- if ($x eq '--' || @files) {
- push @files, $x;
- } else {
- if (::verify_ref("$x^0")) {
- $head = $x;
- } else {
- push @log_opts, $x;
- }
- }
- }
-
- my ($url, $rev, $uuid, $gs) = ::working_head_info($head);
- $gs ||= Git::SVN->_new;
- my @cmd = (qw/log --abbrev-commit --pretty=raw --default/,
- $gs->refname);
- push @cmd, '-r' unless $non_recursive;
- push @cmd, qw/--raw --name-status/ if $verbose;
- push @cmd, '--color' if log_use_color();
- push @cmd, @log_opts;
- if (defined $r_max && $r_max == $r_min) {
- push @cmd, '--max-count=1';
- if (my $c = $gs->rev_map_get($r_max)) {
- push @cmd, $c;
- }
- } elsif (defined $r_max) {
- if ($r_max < $r_min) {
- ($r_min, $r_max) = ($r_max, $r_min);
- }
- my (undef, $c_max) = $gs->find_rev_before($r_max, 1, $r_min);
- my (undef, $c_min) = $gs->find_rev_after($r_min, 1, $r_max);
- # If there are no commits in the range, both $c_max and $c_min
- # will be undefined. If there is at least 1 commit in the
- # range, both will be defined.
- return () if !defined $c_min || !defined $c_max;
- if ($c_min eq $c_max) {
- push @cmd, '--max-count=1', $c_min;
- } else {
- push @cmd, '--boundary', "$c_min..$c_max";
- }
- }
- return (@cmd, @files);
-}
-
-# adapted from pager.c
-sub config_pager {
- if (! -t *STDOUT) {
- $ENV{GIT_PAGER_IN_USE} = 'false';
- $pager = undef;
- return;
- }
- chomp($pager = command_oneline(qw(var GIT_PAGER)));
- if ($pager eq 'cat') {
- $pager = undef;
- }
- $ENV{GIT_PAGER_IN_USE} = defined($pager);
-}
-
-sub run_pager {
- return unless defined $pager;
- pipe my ($rfd, $wfd) or return;
- defined(my $pid = fork) or ::fatal "Can't fork: $!";
- if (!$pid) {
- open STDOUT, '>&', $wfd or
- ::fatal "Can't redirect to stdout: $!";
- return;
- }
- open STDIN, '<&', $rfd or ::fatal "Can't redirect stdin: $!";
- $ENV{LESS} ||= 'FRSX';
- exec $pager or ::fatal "Can't run pager: $! ($pager)";
-}
-
-sub format_svn_date {
- my $t = shift || time;
- my $gmoff = Git::SVN::get_tz($t);
- return strftime("%Y-%m-%d %H:%M:%S $gmoff (%a, %d %b %Y)", localtime($t));
-}
-
-sub parse_git_date {
- my ($t, $tz) = @_;
- # Date::Parse isn't in the standard Perl distro :(
- if ($tz =~ s/^\+//) {
- $t += tz_to_s_offset($tz);
- } elsif ($tz =~ s/^\-//) {
- $t -= tz_to_s_offset($tz);
- }
- return $t;
-}
-
-sub set_local_timezone {
- if (defined $TZ) {
- $ENV{TZ} = $TZ;
- } else {
- delete $ENV{TZ};
- }
-}
-
-sub tz_to_s_offset {
- my ($tz) = @_;
- $tz =~ s/(\d\d)$//;
- return ($1 * 60) + ($tz * 3600);
-}
-
-sub get_author_info {
- my ($dest, $author, $t, $tz) = @_;
- $author =~ s/(?:^\s*|\s*$)//g;
- $dest->{a_raw} = $author;
- my $au;
- if ($::_authors) {
- $au = $rusers{$author} || undef;
- }
- if (!$au) {
- ($au) = ($author =~ /<([^>]+)\@[^>]+>$/);
- }
- $dest->{t} = $t;
- $dest->{tz} = $tz;
- $dest->{a} = $au;
- $dest->{t_utc} = parse_git_date($t, $tz);
-}
-
-sub process_commit {
- my ($c, $r_min, $r_max, $defer) = @_;
- if (defined $r_min && defined $r_max) {
- if ($r_min == $c->{r} && $r_min == $r_max) {
- show_commit($c);
- return 0;
- }
- return 1 if $r_min == $r_max;
- if ($r_min < $r_max) {
- # we need to reverse the print order
- return 0 if (defined $limit && --$limit < 0);
- push @$defer, $c;
- return 1;
- }
- if ($r_min != $r_max) {
- return 1 if ($r_min < $c->{r});
- return 1 if ($r_max > $c->{r});
- }
- }
- return 0 if (defined $limit && --$limit < 0);
- show_commit($c);
- return 1;
-}
-
-sub show_commit {
- my $c = shift;
- if ($oneline) {
- my $x = "\n";
- if (my $l = $c->{l}) {
- while ($l->[0] =~ /^\s*$/) { shift @$l }
- $x = $l->[0];
- }
- $l_fmt ||= 'A' . length($c->{r});
- print 'r',pack($l_fmt, $c->{r}),' | ';
- print "$c->{c} | " if $show_commit;
- print $x;
- } else {
- show_commit_normal($c);
- }
-}
-
-sub show_commit_changed_paths {
- my ($c) = @_;
- return unless $c->{changed};
- print "Changed paths:\n", @{$c->{changed}};
-}
-
-sub show_commit_normal {
- my ($c) = @_;
- print commit_log_separator, "r$c->{r} | ";
- print "$c->{c} | " if $show_commit;
- print "$c->{a} | ", format_svn_date($c->{t_utc}), ' | ';
- my $nr_line = 0;
-
- if (my $l = $c->{l}) {
- while ($l->[$#$l] eq "\n" && $#$l > 0
- && $l->[($#$l - 1)] eq "\n") {
- pop @$l;
- }
- $nr_line = scalar @$l;
- if (!$nr_line) {
- print "1 line\n\n\n";
- } else {
- if ($nr_line == 1) {
- $nr_line = '1 line';
- } else {
- $nr_line .= ' lines';
- }
- print $nr_line, "\n";
- show_commit_changed_paths($c);
- print "\n";
- print $_ foreach @$l;
- }
- } else {
- print "1 line\n";
- show_commit_changed_paths($c);
- print "\n";
-
- }
- foreach my $x (qw/raw stat diff/) {
- if ($c->{$x}) {
- print "\n";
- print $_ foreach @{$c->{$x}}
- }
- }
-}
-
-sub cmd_show_log {
- my (@args) = @_;
- my ($r_min, $r_max);
- my $r_last = -1; # prevent dupes
- set_local_timezone();
- if (defined $::_revision) {
- if ($::_revision =~ /^(\d+):(\d+)$/) {
- ($r_min, $r_max) = ($1, $2);
- } elsif ($::_revision =~ /^\d+$/) {
- $r_min = $r_max = $::_revision;
- } else {
- ::fatal "-r$::_revision is not supported, use ",
- "standard 'git log' arguments instead";
- }
- }
-
- config_pager();
- @args = git_svn_log_cmd($r_min, $r_max, @args);
- if (!@args) {
- print commit_log_separator unless $incremental || $oneline;
- return;
- }
- my $log = command_output_pipe(@args);
- run_pager();
- my (@k, $c, $d, $stat);
- my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/;
- while (<$log>) {
- if (/^${esc_color}commit (?:- )?($::sha1_short)/o) {
- my $cmt = $1;
- if ($c && cmt_showable($c) && $c->{r} != $r_last) {
- $r_last = $c->{r};
- process_commit($c, $r_min, $r_max, \@k) or
- goto out;
- }
- $d = undef;
- $c = { c => $cmt };
- } elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) {
- get_author_info($c, $1, $2, $3);
- } elsif (/^${esc_color}(?:tree|parent|committer) /o) {
- # ignore
- } elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) {
- push @{$c->{raw}}, $_;
- } elsif (/^${esc_color}[ACRMDT]\t/) {
- # we could add $SVN->{svn_path} here, but that requires
- # remote access at the moment (repo_path_split)...
- s#^(${esc_color})([ACRMDT])\t#$1 $2 #o;
- push @{$c->{changed}}, $_;
- } elsif (/^${esc_color}diff /o) {
- $d = 1;
- push @{$c->{diff}}, $_;
- } elsif ($d) {
- push @{$c->{diff}}, $_;
- } elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]*
- $esc_color*[\+\-]*$esc_color$/x) {
- $stat = 1;
- push @{$c->{stat}}, $_;
- } elsif ($stat && /^ \d+ files changed, \d+ insertions/) {
- push @{$c->{stat}}, $_;
- $stat = undef;
- } elsif (/^${esc_color} (git-svn-id:.+)$/o) {
- ($c->{url}, $c->{r}, undef) = ::extract_metadata($1);
- } elsif (s/^${esc_color} //o) {
- push @{$c->{l}}, $_;
- }
- }
- if ($c && defined $c->{r} && $c->{r} != $r_last) {
- $r_last = $c->{r};
- process_commit($c, $r_min, $r_max, \@k);
- }
- if (@k) {
- ($r_min, $r_max) = ($r_max, $r_min);
- process_commit($_, $r_min, $r_max) foreach reverse @k;
- }
-out:
- close $log;
- print commit_log_separator unless $incremental || $oneline;
-}
-
-sub cmd_blame {
- my $path = pop;
-
- config_pager();
- run_pager();
-
- my ($fh, $ctx, $rev);
-
- if ($_git_format) {
- ($fh, $ctx) = command_output_pipe('blame', @_, $path);
- while (my $line = <$fh>) {
- if ($line =~ /^\^?([[:xdigit:]]+)\s/) {
- # Uncommitted edits show up as a rev ID of
- # all zeros, which we can't look up with
- # cmt_metadata
- if ($1 !~ /^0+$/) {
- (undef, $rev, undef) =
- ::cmt_metadata($1);
- $rev = '0' if (!$rev);
- } else {
- $rev = '0';
- }
- $rev = sprintf('%-10s', $rev);
- $line =~ s/^\^?[[:xdigit:]]+(\s)/$rev$1/;
- }
- print $line;
- }
- } else {
- ($fh, $ctx) = command_output_pipe('blame', '-p', @_, 'HEAD',
- '--', $path);
- my ($sha1);
- my %authors;
- my @buffer;
- my %dsha; #distinct sha keys
-
- while (my $line = <$fh>) {
- push @buffer, $line;
- if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
- $dsha{$1} = 1;
- }
- }
-
- my $s2r = ::cmt_sha2rev_batch([keys %dsha]);
-
- foreach my $line (@buffer) {
- if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
- $rev = $s2r->{$1};
- $rev = '0' if (!$rev)
- }
- elsif ($line =~ /^author (.*)/) {
- $authors{$rev} = $1;
- $authors{$rev} =~ s/\s/_/g;
- }
- elsif ($line =~ /^\t(.*)$/) {
- printf("%6s %10s %s\n", $rev, $authors{$rev}, $1);
- }
- }
- }
- command_close_pipe($fh, $ctx);
-}
-
-package Git::SVN::Migration;
-# these version numbers do NOT correspond to actual version numbers
-# of git nor git-svn. They are just relative.
-#
-# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD
-#
-# v1 layout: .git/$id/info/url, refs/remotes/$id
-#
-# v2 layout: .git/svn/$id/info/url, refs/remotes/$id
-#
-# v3 layout: .git/svn/$id, refs/remotes/$id
-# - info/url may remain for backwards compatibility
-# - this is what we migrate up to this layout automatically,
-# - this will be used by git svn init on single branches
-# v3.1 layout (auto migrated):
-# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink
-# for backwards compatibility
-#
-# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id
-# - this is only created for newly multi-init-ed
-# repositories. Similar in spirit to the
-# --use-separate-remotes option in git-clone (now default)
-# - we do not automatically migrate to this (following
-# the example set by core git)
-#
-# v5 layout: .rev_db.$UUID => .rev_map.$UUID
-# - newer, more-efficient format that uses 24-bytes per record
-# with no filler space.
-# - use xxd -c24 < .rev_map.$UUID to view and debug
-# - This is a one-way migration, repositories updated to the
-# new format will not be able to use old git-svn without
-# rebuilding the .rev_db. Rebuilding the rev_db is not
-# possible if noMetadata or useSvmProps are set; but should
-# be no problem for users that use the (sensible) defaults.
-use strict;
-use warnings;
-use Carp qw/croak/;
-use File::Path qw/mkpath/;
-use File::Basename qw/dirname basename/;
-use vars qw/$_minimize/;
-
-sub migrate_from_v0 {
- my $git_dir = $ENV{GIT_DIR};
- return undef unless -d $git_dir;
- my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
- my $migrated = 0;
- while (<$fh>) {
- chomp;
- my ($id, $orig_ref) = ($_, $_);
- next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#;
- next unless -f "$git_dir/$id/info/url";
- my $new_ref = "refs/remotes/$id";
- if (::verify_ref("$new_ref^0")) {
- print STDERR "W: $orig_ref is probably an old ",
- "branch used by an ancient version of ",
- "git-svn.\n",
- "However, $new_ref also exists.\n",
- "We will not be able ",
- "to use this branch until this ",
- "ambiguity is resolved.\n";
- next;
- }
- print STDERR "Migrating from v0 layout...\n" if !$migrated;
- print STDERR "Renaming ref: $orig_ref => $new_ref\n";
- command_noisy('update-ref', $new_ref, $orig_ref);
- command_noisy('update-ref', '-d', $orig_ref, $orig_ref);
- $migrated++;
- }
- command_close_pipe($fh, $ctx);
- print STDERR "Done migrating from v0 layout...\n" if $migrated;
- $migrated;
-}
-
-sub migrate_from_v1 {
- my $git_dir = $ENV{GIT_DIR};
- my $migrated = 0;
- return $migrated unless -d $git_dir;
- my $svn_dir = "$git_dir/svn";
-
- # just in case somebody used 'svn' as their $id at some point...
- return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url";
-
- print STDERR "Migrating from a git-svn v1 layout...\n";
- mkpath([$svn_dir]);
- print STDERR "Data from a previous version of git-svn exists, but\n\t",
- "$svn_dir\n\t(required for this version ",
- "($::VERSION) of git-svn) does not exist.\n";
- my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
- while (<$fh>) {
- my $x = $_;
- next unless $x =~ s#^refs/remotes/##;
- chomp $x;
- next unless -f "$git_dir/$x/info/url";
- my $u = eval { ::file_to_s("$git_dir/$x/info/url") };
- next unless $u;
- my $dn = dirname("$git_dir/svn/$x");
- mkpath([$dn]) unless -d $dn;
- if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID:
- mkpath(["$git_dir/svn/svn"]);
- print STDERR " - $git_dir/$x/info => ",
- "$git_dir/svn/$x/info\n";
- rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or
- croak "$!: $x";
- # don't worry too much about these, they probably
- # don't exist with repos this old (save for index,
- # and we can easily regenerate that)
- foreach my $f (qw/unhandled.log index .rev_db/) {
- rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f";
- }
- } else {
- print STDERR " - $git_dir/$x => $git_dir/svn/$x\n";
- rename "$git_dir/$x", "$git_dir/svn/$x" or
- croak "$!: $x";
- }
- $migrated++;
- }
- command_close_pipe($fh, $ctx);
- print STDERR "Done migrating from a git-svn v1 layout\n";
- $migrated;
-}
-
-sub read_old_urls {
- my ($l_map, $pfx, $path) = @_;
- my @dir;
- foreach (<$path/*>) {
- if (-r "$_/info/url") {
- $pfx .= '/' if $pfx && $pfx !~ m!/$!;
- my $ref_id = $pfx . basename $_;
- my $url = ::file_to_s("$_/info/url");
- $l_map->{$ref_id} = $url;
- } elsif (-d $_) {
- push @dir, $_;
- }
- }
- foreach (@dir) {
- my $x = $_;
- $x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o;
- read_old_urls($l_map, $x, $_);
- }
-}
-
-sub migrate_from_v2 {
- my @cfg = command(qw/config -l/);
- return if grep /^svn-remote\..+\.url=/, @cfg;
- my %l_map;
- read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn");
- my $migrated = 0;
-
- foreach my $ref_id (sort keys %l_map) {
- eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) };
- if ($@) {
- Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id);
- }
- $migrated++;
- }
- $migrated;
-}
-
-sub minimize_connections {
- my $r = Git::SVN::read_all_remotes();
- my $new_urls = {};
- my $root_repos = {};
- foreach my $repo_id (keys %$r) {
- my $url = $r->{$repo_id}->{url} or next;
- my $fetch = $r->{$repo_id}->{fetch} or next;
- my $ra = Git::SVN::Ra->new($url);
-
- # skip existing cases where we already connect to the root
- if (($ra->{url} eq $ra->{repos_root}) ||
- ($ra->{repos_root} eq $repo_id)) {
- $root_repos->{$ra->{url}} = $repo_id;
- next;
- }
-
- my $root_ra = Git::SVN::Ra->new($ra->{repos_root});
- my $root_path = $ra->{url};
- $root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##;
- foreach my $path (keys %$fetch) {
- my $ref_id = $fetch->{$path};
- my $gs = Git::SVN->new($ref_id, $repo_id, $path);
-
- # make sure we can read when connecting to
- # a higher level of a repository
- my ($last_rev, undef) = $gs->last_rev_commit;
- if (!defined $last_rev) {
- $last_rev = eval {
- $root_ra->get_latest_revnum;
- };
- next if $@;
- }
- my $new = $root_path;
- $new .= length $path ? "/$path" : '';
- eval {
- $root_ra->get_log([$new], $last_rev, $last_rev,
- 0, 0, 1, sub { });
- };
- next if $@;
- $new_urls->{$ra->{repos_root}}->{$new} =
- { ref_id => $ref_id,
- old_repo_id => $repo_id,
- old_path => $path };
- }
- }
-
- my @emptied;
- foreach my $url (keys %$new_urls) {
- # see if we can re-use an existing [svn-remote "repo_id"]
- # instead of creating a(n ugly) new section:
- my $repo_id = $root_repos->{$url} || $url;
-
- my $fetch = $new_urls->{$url};
- foreach my $path (keys %$fetch) {
- my $x = $fetch->{$path};
- Git::SVN->init($url, $path, $repo_id, $x->{ref_id});
- my $pfx = "svn-remote.$x->{old_repo_id}";
-
- my $old_fetch = quotemeta("$x->{old_path}:".
- "$x->{ref_id}");
- command_noisy(qw/config --unset/,
- "$pfx.fetch", '^'. $old_fetch . '$');
- delete $r->{$x->{old_repo_id}}->
- {fetch}->{$x->{old_path}};
- if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) {
- command_noisy(qw/config --unset/,
- "$pfx.url");
- push @emptied, $x->{old_repo_id}
- }
- }
- }
- if (@emptied) {
- my $file = $ENV{GIT_CONFIG} || "$ENV{GIT_DIR}/config";
- print STDERR <<EOF;
-The following [svn-remote] sections in your config file ($file) are empty
-and can be safely removed:
-EOF
- print STDERR "[svn-remote \"$_\"]\n" foreach @emptied;
- }
-}
-
-sub migration_check {
- migrate_from_v0();
- migrate_from_v1();
- migrate_from_v2();
- minimize_connections() if $_minimize;
-}
-
-package Git::IndexInfo;
-use strict;
-use warnings;
-use Git qw/command_input_pipe command_close_pipe/;
-
-sub new {
- my ($class) = @_;
- my ($gui, $ctx) = command_input_pipe(qw/update-index -z --index-info/);
- bless { gui => $gui, ctx => $ctx, nr => 0}, $class;
-}
-
-sub remove {
- my ($self, $path) = @_;
- if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") {
- return ++$self->{nr};
- }
- undef;
-}
-
-sub update {
- my ($self, $mode, $hash, $path) = @_;
- if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") {
- return ++$self->{nr};
- }
- undef;
-}
-
-sub DESTROY {
- my ($self) = @_;
- command_close_pipe($self->{gui}, $self->{ctx});
-}
-
-package Git::SVN::GlobSpec;
-use strict;
-use warnings;
-
-sub new {
- my ($class, $glob, $pattern_ok) = @_;
- my $re = $glob;
- $re =~ s!/+$!!g; # no need for trailing slashes
- my (@left, @right, @patterns);
- my $state = "left";
- my $die_msg = "Only one set of wildcard directories " .
- "(e.g. '*' or '*/*/*') is supported: '$glob'\n";
- for my $part (split(m|/|, $glob)) {
- if ($part =~ /\*/ && $part ne "*") {
- die "Invalid pattern in '$glob': $part\n";
- } elsif ($pattern_ok && $part =~ /[{}]/ &&
- $part !~ /^\{[^{}]+\}/) {
- die "Invalid pattern in '$glob': $part\n";
- }
- if ($part eq "*") {
- die $die_msg if $state eq "right";
- $state = "pattern";
- push(@patterns, "[^/]*");
- } elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) {
- die $die_msg if $state eq "right";
- $state = "pattern";
- my $p = quotemeta($1);
- $p =~ s/\\,/|/g;
- push(@patterns, "(?:$p)");
- } else {
- if ($state eq "left") {
- push(@left, $part);
- } else {
- push(@right, $part);
- $state = "right";
- }
- }
- }
- my $depth = @patterns;
- if ($depth == 0) {
- die "One '*' is needed in glob: '$glob'\n";
- }
- my $left = join('/', @left);
- my $right = join('/', @right);
- $re = join('/', @patterns);
- $re = join('\/',
- grep(length, quotemeta($left), "($re)", quotemeta($right)));
- my $left_re = qr/^\/\Q$left\E(\/|$)/;
- bless { left => $left, right => $right, left_regex => $left_re,
- regex => qr/$re/, glob => $glob, depth => $depth }, $class;
-}
-
-sub full_path {
- my ($self, $path) = @_;
- return (length $self->{left} ? "$self->{left}/" : '') .
- $path . (length $self->{right} ? "/$self->{right}" : '');
-}
-
__END__
Data structures:
diff --git a/gitweb/gitweb.perl b/gitweb/gitweb.perl
index 55e0e9ea3..3d6a70538 100755
--- a/gitweb/gitweb.perl
+++ b/gitweb/gitweb.perl
@@ -4484,30 +4484,33 @@ sub git_print_log {
}
# print log
- my $signoff = 0;
- my $empty = 0;
+ my $skip_blank_line = 0;
foreach my $line (@$log) {
- if ($line =~ m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i) {
- $signoff = 1;
- $empty = 0;
+ if ($line =~ m/^\s*([A-Z][-A-Za-z]*-[Bb]y|C[Cc]): /) {
if (! $opts{'-remove_signoff'}) {
print "<span class=\"signoff\">" . esc_html($line) . "</span><br/>\n";
- next;
- } else {
- # remove signoff lines
- next;
+ $skip_blank_line = 1;
}
- } else {
- $signoff = 0;
+ next;
+ }
+
+ if ($line =~ m,\s*([a-z]*link): (https?://\S+),i) {
+ if (! $opts{'-remove_signoff'}) {
+ print "<span class=\"signoff\">" . esc_html($1) . ": " .
+ "<a href=\"" . esc_html($2) . "\">" . esc_html($2) . "</a>" .
+ "</span><br/>\n";
+ $skip_blank_line = 1;
+ }
+ next;
}
# print only one empty line
# do not print empty line after signoff
if ($line eq "") {
- next if ($empty || $signoff);
- $empty = 1;
+ next if ($skip_blank_line);
+ $skip_blank_line = 1;
} else {
- $empty = 0;
+ $skip_blank_line = 0;
}
print format_log_line_html($line) . "<br/>\n";
@@ -4515,7 +4518,7 @@ sub git_print_log {
if ($opts{'-final_empty_line'}) {
# end with single empty line
- print "<br/>\n" unless $empty;
+ print "<br/>\n" unless $skip_blank_line;
}
}
diff --git a/help.c b/help.c
index 662349dd5..2a42ec6d1 100644
--- a/help.c
+++ b/help.c
@@ -44,9 +44,12 @@ static void uniq(struct cmdnames *cmds)
if (!cmds->cnt)
return;
- for (i = j = 1; i < cmds->cnt; i++)
- if (strcmp(cmds->names[i]->name, cmds->names[i-1]->name))
+ for (i = j = 1; i < cmds->cnt; i++) {
+ if (!strcmp(cmds->names[i]->name, cmds->names[j-1]->name))
+ free(cmds->names[i]);
+ else
cmds->names[j++] = cmds->names[i];
+ }
cmds->cnt = j;
}
@@ -61,9 +64,10 @@ void exclude_cmds(struct cmdnames *cmds, struct cmdnames *excludes)
cmp = strcmp(cmds->names[ci]->name, excludes->names[ei]->name);
if (cmp < 0)
cmds->names[cj++] = cmds->names[ci++];
- else if (cmp == 0)
- ci++, ei++;
- else if (cmp > 0)
+ else if (cmp == 0) {
+ ei++;
+ free(cmds->names[ci++]);
+ } else if (cmp > 0)
ei++;
}
diff --git a/perl/.gitignore b/perl/.gitignore
index d5c6e22d0..0f1fc27f8 100644
--- a/perl/.gitignore
+++ b/perl/.gitignore
@@ -5,3 +5,4 @@ MYMETA.yml
blib
blibdirs
pm_to_blib
+PM.stamp
diff --git a/perl/Git/IndexInfo.pm b/perl/Git/IndexInfo.pm
new file mode 100644
index 000000000..a43108c98
--- /dev/null
+++ b/perl/Git/IndexInfo.pm
@@ -0,0 +1,33 @@
+package Git::IndexInfo;
+use strict;
+use warnings;
+use Git qw/command_input_pipe command_close_pipe/;
+
+sub new {
+ my ($class) = @_;
+ my ($gui, $ctx) = command_input_pipe(qw/update-index -z --index-info/);
+ bless { gui => $gui, ctx => $ctx, nr => 0}, $class;
+}
+
+sub remove {
+ my ($self, $path) = @_;
+ if (print { $self->{gui} } '0 ', 0 x 40, "\t", $path, "\0") {
+ return ++$self->{nr};
+ }
+ undef;
+}
+
+sub update {
+ my ($self, $mode, $hash, $path) = @_;
+ if (print { $self->{gui} } $mode, ' ', $hash, "\t", $path, "\0") {
+ return ++$self->{nr};
+ }
+ undef;
+}
+
+sub DESTROY {
+ my ($self) = @_;
+ command_close_pipe($self->{gui}, $self->{ctx});
+}
+
+1;
diff --git a/perl/Git/SVN.pm b/perl/Git/SVN.pm
new file mode 100644
index 000000000..b8b34744e
--- /dev/null
+++ b/perl/Git/SVN.pm
@@ -0,0 +1,2326 @@
+package Git::SVN;
+use strict;
+use warnings;
+use Fcntl qw/:DEFAULT :seek/;
+use constant rev_map_fmt => 'NH40';
+use vars qw/$_no_metadata
+ $_repack $_repack_flags $_use_svm_props $_head
+ $_use_svnsync_props $no_reuse_existing
+ $_use_log_author $_add_author_from $_localtime/;
+use Carp qw/croak/;
+use File::Path qw/mkpath/;
+use File::Copy qw/copy/;
+use IPC::Open3;
+use Time::Local;
+use Memoize; # core since 5.8.0, Jul 2002
+use Memoize::Storable;
+use POSIX qw(:signal_h);
+
+use Git qw(
+ command
+ command_oneline
+ command_noisy
+ command_output_pipe
+ command_close_pipe
+);
+use Git::SVN::Utils qw(fatal can_compress);
+
+my $can_use_yaml;
+BEGIN {
+ $can_use_yaml = eval { require Git::SVN::Memoize::YAML; 1};
+}
+
+our $_follow_parent = 1;
+our $_minimize_url = 'unset';
+our $default_repo_id = 'svn';
+our $default_ref_id = $ENV{GIT_SVN_ID} || 'git-svn';
+
+my ($_gc_nr, $_gc_period);
+
+# properties that we do not log:
+my %SKIP_PROP;
+BEGIN {
+ %SKIP_PROP = map { $_ => 1 } qw/svn:wc:ra_dav:version-url
+ svn:special svn:executable
+ svn:entry:committed-rev
+ svn:entry:last-author
+ svn:entry:uuid
+ svn:entry:committed-date/;
+
+ # some options are read globally, but can be overridden locally
+ # per [svn-remote "..."] section. Command-line options will *NOT*
+ # override options set in an [svn-remote "..."] section
+ no strict 'refs';
+ for my $option (qw/follow_parent no_metadata use_svm_props
+ use_svnsync_props/) {
+ my $key = $option;
+ $key =~ tr/_//d;
+ my $prop = "-$option";
+ *$option = sub {
+ my ($self) = @_;
+ return $self->{$prop} if exists $self->{$prop};
+ my $k = "svn-remote.$self->{repo_id}.$key";
+ eval { command_oneline(qw/config --get/, $k) };
+ if ($@) {
+ $self->{$prop} = ${"Git::SVN::_$option"};
+ } else {
+ my $v = command_oneline(qw/config --bool/,$k);
+ $self->{$prop} = $v eq 'false' ? 0 : 1;
+ }
+ return $self->{$prop};
+ }
+ }
+}
+
+
+my (%LOCKFILES, %INDEX_FILES);
+END {
+ unlink keys %LOCKFILES if %LOCKFILES;
+ unlink keys %INDEX_FILES if %INDEX_FILES;
+}
+
+sub resolve_local_globs {
+ my ($url, $fetch, $glob_spec) = @_;
+ return unless defined $glob_spec;
+ my $ref = $glob_spec->{ref};
+ my $path = $glob_spec->{path};
+ foreach (command(qw#for-each-ref --format=%(refname) refs/#)) {
+ next unless m#^$ref->{regex}$#;
+ my $p = $1;
+ my $pathname = desanitize_refname($path->full_path($p));
+ my $refname = desanitize_refname($ref->full_path($p));
+ if (my $existing = $fetch->{$pathname}) {
+ if ($existing ne $refname) {
+ die "Refspec conflict:\n",
+ "existing: $existing\n",
+ " globbed: $refname\n";
+ }
+ my $u = (::cmt_metadata("$refname"))[0];
+ $u =~ s!^\Q$url\E(/|$)!! or die
+ "$refname: '$url' not found in '$u'\n";
+ if ($pathname ne $u) {
+ warn "W: Refspec glob conflict ",
+ "(ref: $refname):\n",
+ "expected path: $pathname\n",
+ " real path: $u\n",
+ "Continuing ahead with $u\n";
+ next;
+ }
+ } else {
+ $fetch->{$pathname} = $refname;
+ }
+ }
+}
+
+sub parse_revision_argument {
+ my ($base, $head) = @_;
+ if (!defined $::_revision || $::_revision eq 'BASE:HEAD') {
+ return ($base, $head);
+ }
+ return ($1, $2) if ($::_revision =~ /^(\d+):(\d+)$/);
+ return ($::_revision, $::_revision) if ($::_revision =~ /^\d+$/);
+ return ($head, $head) if ($::_revision eq 'HEAD');
+ return ($base, $1) if ($::_revision =~ /^BASE:(\d+)$/);
+ return ($1, $head) if ($::_revision =~ /^(\d+):HEAD$/);
+ die "revision argument: $::_revision not understood by git-svn\n";
+}
+
+sub fetch_all {
+ my ($repo_id, $remotes) = @_;
+ if (ref $repo_id) {
+ my $gs = $repo_id;
+ $repo_id = undef;
+ $repo_id = $gs->{repo_id};
+ }
+ $remotes ||= read_all_remotes();
+ my $remote = $remotes->{$repo_id} or
+ die "[svn-remote \"$repo_id\"] unknown\n";
+ my $fetch = $remote->{fetch};
+ my $url = $remote->{url} or die "svn-remote.$repo_id.url not defined\n";
+ my (@gs, @globs);
+ my $ra = Git::SVN::Ra->new($url);
+ my $uuid = $ra->get_uuid;
+ my $head = $ra->get_latest_revnum;
+
+ # ignore errors, $head revision may not even exist anymore
+ eval { $ra->get_log("", $head, 0, 1, 0, 1, sub { $head = $_[1] }) };
+ warn "W: $@\n" if $@;
+
+ my $base = defined $fetch ? $head : 0;
+
+ # read the max revs for wildcard expansion (branches/*, tags/*)
+ foreach my $t (qw/branches tags/) {
+ defined $remote->{$t} or next;
+ push @globs, @{$remote->{$t}};
+
+ my $max_rev = eval { tmp_config(qw/--int --get/,
+ "svn-remote.$repo_id.${t}-maxRev") };
+ if (defined $max_rev && ($max_rev < $base)) {
+ $base = $max_rev;
+ } elsif (!defined $max_rev) {
+ $base = 0;
+ }
+ }
+
+ if ($fetch) {
+ foreach my $p (sort keys %$fetch) {
+ my $gs = Git::SVN->new($fetch->{$p}, $repo_id, $p);
+ my $lr = $gs->rev_map_max;
+ if (defined $lr) {
+ $base = $lr if ($lr < $base);
+ }
+ push @gs, $gs;
+ }
+ }
+
+ ($base, $head) = parse_revision_argument($base, $head);
+ $ra->gs_fetch_loop_common($base, $head, \@gs, \@globs);
+}
+
+sub read_all_remotes {
+ my $r = {};
+ my $use_svm_props = eval { command_oneline(qw/config --bool
+ svn.useSvmProps/) };
+ $use_svm_props = $use_svm_props eq 'true' if $use_svm_props;
+ my $svn_refspec = qr{\s*(.*?)\s*:\s*(.+?)\s*};
+ foreach (grep { s/^svn-remote\.// } command(qw/config -l/)) {
+ if (m!^(.+)\.fetch=$svn_refspec$!) {
+ my ($remote, $local_ref, $remote_ref) = ($1, $2, $3);
+ die("svn-remote.$remote: remote ref '$remote_ref' "
+ . "must start with 'refs/'\n")
+ unless $remote_ref =~ m{^refs/};
+ $local_ref = uri_decode($local_ref);
+ $r->{$remote}->{fetch}->{$local_ref} = $remote_ref;
+ $r->{$remote}->{svm} = {} if $use_svm_props;
+ } elsif (m!^(.+)\.usesvmprops=\s*(.*)\s*$!) {
+ $r->{$1}->{svm} = {};
+ } elsif (m!^(.+)\.url=\s*(.*)\s*$!) {
+ $r->{$1}->{url} = $2;
+ } elsif (m!^(.+)\.pushurl=\s*(.*)\s*$!) {
+ $r->{$1}->{pushurl} = $2;
+ } elsif (m!^(.+)\.ignore-refs=\s*(.*)\s*$!) {
+ $r->{$1}->{ignore_refs_regex} = $2;
+ } elsif (m!^(.+)\.(branches|tags)=$svn_refspec$!) {
+ my ($remote, $t, $local_ref, $remote_ref) =
+ ($1, $2, $3, $4);
+ die("svn-remote.$remote: remote ref '$remote_ref' ($t) "
+ . "must start with 'refs/'\n")
+ unless $remote_ref =~ m{^refs/};
+ $local_ref = uri_decode($local_ref);
+
+ require Git::SVN::GlobSpec;
+ my $rs = {
+ t => $t,
+ remote => $remote,
+ path => Git::SVN::GlobSpec->new($local_ref, 1),
+ ref => Git::SVN::GlobSpec->new($remote_ref, 0) };
+ if (length($rs->{ref}->{right}) != 0) {
+ die "The '*' glob character must be the last ",
+ "character of '$remote_ref'\n";
+ }
+ push @{ $r->{$remote}->{$t} }, $rs;
+ }
+ }
+
+ map {
+ if (defined $r->{$_}->{svm}) {
+ my $svm;
+ eval {
+ my $section = "svn-remote.$_";
+ $svm = {
+ source => tmp_config('--get',
+ "$section.svm-source"),
+ replace => tmp_config('--get',
+ "$section.svm-replace"),
+ }
+ };
+ $r->{$_}->{svm} = $svm;
+ }
+ } keys %$r;
+
+ foreach my $remote (keys %$r) {
+ foreach ( grep { defined $_ }
+ map { $r->{$remote}->{$_} } qw(branches tags) ) {
+ foreach my $rs ( @$_ ) {
+ $rs->{ignore_refs_regex} =
+ $r->{$remote}->{ignore_refs_regex};
+ }
+ }
+ }
+
+ $r;
+}
+
+sub init_vars {
+ $_gc_nr = $_gc_period = 1000;
+ if (defined $_repack || defined $_repack_flags) {
+ warn "Repack options are obsolete; they have no effect.\n";
+ }
+}
+
+sub verify_remotes_sanity {
+ return unless -d $ENV{GIT_DIR};
+ my %seen;
+ foreach (command(qw/config -l/)) {
+ if (m!^svn-remote\.(?:.+)\.fetch=.*:refs/remotes/(\S+)\s*$!) {
+ if ($seen{$1}) {
+ die "Remote ref refs/remote/$1 is tracked by",
+ "\n \"$_\"\nand\n \"$seen{$1}\"\n",
+ "Please resolve this ambiguity in ",
+ "your git configuration file before ",
+ "continuing\n";
+ }
+ $seen{$1} = $_;
+ }
+ }
+}
+
+sub find_existing_remote {
+ my ($url, $remotes) = @_;
+ return undef if $no_reuse_existing;
+ my $existing;
+ foreach my $repo_id (keys %$remotes) {
+ my $u = $remotes->{$repo_id}->{url} or next;
+ next if $u ne $url;
+ $existing = $repo_id;
+ last;
+ }
+ $existing;
+}
+
+sub init_remote_config {
+ my ($self, $url, $no_write) = @_;
+ $url =~ s!/+$!!; # strip trailing slash
+ my $r = read_all_remotes();
+ my $existing = find_existing_remote($url, $r);
+ if ($existing) {
+ unless ($no_write) {
+ print STDERR "Using existing ",
+ "[svn-remote \"$existing\"]\n";
+ }
+ $self->{repo_id} = $existing;
+ } elsif ($_minimize_url) {
+ my $min_url = Git::SVN::Ra->new($url)->minimize_url;
+ $existing = find_existing_remote($min_url, $r);
+ if ($existing) {
+ unless ($no_write) {
+ print STDERR "Using existing ",
+ "[svn-remote \"$existing\"]\n";
+ }
+ $self->{repo_id} = $existing;
+ }
+ if ($min_url ne $url) {
+ unless ($no_write) {
+ print STDERR "Using higher level of URL: ",
+ "$url => $min_url\n";
+ }
+ my $old_path = $self->{path};
+ $self->{path} = $url;
+ $self->{path} =~ s!^\Q$min_url\E(/|$)!!;
+ if (length $old_path) {
+ $self->{path} .= "/$old_path";
+ }
+ $url = $min_url;
+ }
+ }
+ my $orig_url;
+ if (!$existing) {
+ # verify that we aren't overwriting anything:
+ $orig_url = eval {
+ command_oneline('config', '--get',
+ "svn-remote.$self->{repo_id}.url")
+ };
+ if ($orig_url && ($orig_url ne $url)) {
+ die "svn-remote.$self->{repo_id}.url already set: ",
+ "$orig_url\nwanted to set to: $url\n";
+ }
+ }
+ my ($xrepo_id, $xpath) = find_ref($self->refname);
+ if (!$no_write && defined $xpath) {
+ die "svn-remote.$xrepo_id.fetch already set to track ",
+ "$xpath:", $self->refname, "\n";
+ }
+ unless ($no_write) {
+ command_noisy('config',
+ "svn-remote.$self->{repo_id}.url", $url);
+ $self->{path} =~ s{^/}{};
+ $self->{path} =~ s{%([0-9A-F]{2})}{chr hex($1)}ieg;
+ command_noisy('config', '--add',
+ "svn-remote.$self->{repo_id}.fetch",
+ "$self->{path}:".$self->refname);
+ }
+ $self->{url} = $url;
+}
+
+sub find_by_url { # repos_root and, path are optional
+ my ($class, $full_url, $repos_root, $path) = @_;
+
+ return undef unless defined $full_url;
+ remove_username($full_url);
+ remove_username($repos_root) if defined $repos_root;
+ my $remotes = read_all_remotes();
+ if (defined $full_url && defined $repos_root && !defined $path) {
+ $path = $full_url;
+ $path =~ s#^\Q$repos_root\E(?:/|$)##;
+ }
+ foreach my $repo_id (keys %$remotes) {
+ my $u = $remotes->{$repo_id}->{url} or next;
+ remove_username($u);
+ next if defined $repos_root && $repos_root ne $u;
+
+ my $fetch = $remotes->{$repo_id}->{fetch} || {};
+ foreach my $t (qw/branches tags/) {
+ foreach my $globspec (@{$remotes->{$repo_id}->{$t}}) {
+ resolve_local_globs($u, $fetch, $globspec);
+ }
+ }
+ my $p = $path;
+ my $rwr = rewrite_root({repo_id => $repo_id});
+ my $svm = $remotes->{$repo_id}->{svm}
+ if defined $remotes->{$repo_id}->{svm};
+ unless (defined $p) {
+ $p = $full_url;
+ my $z = $u;
+ my $prefix = '';
+ if ($rwr) {
+ $z = $rwr;
+ remove_username($z);
+ } elsif (defined $svm) {
+ $z = $svm->{source};
+ $prefix = $svm->{replace};
+ $prefix =~ s#^\Q$u\E(?:/|$)##;
+ $prefix =~ s#/$##;
+ }
+ $p =~ s#^\Q$z\E(?:/|$)#$prefix# or next;
+ }
+ foreach my $f (keys %$fetch) {
+ next if $f ne $p;
+ return Git::SVN->new($fetch->{$f}, $repo_id, $f);
+ }
+ }
+ undef;
+}
+
+sub init {
+ my ($class, $url, $path, $repo_id, $ref_id, $no_write) = @_;
+ my $self = _new($class, $repo_id, $ref_id, $path);
+ if (defined $url) {
+ $self->init_remote_config($url, $no_write);
+ }
+ $self;
+}
+
+sub find_ref {
+ my ($ref_id) = @_;
+ foreach (command(qw/config -l/)) {
+ next unless m!^svn-remote\.(.+)\.fetch=
+ \s*(.*?)\s*:\s*(.+?)\s*$!x;
+ my ($repo_id, $path, $ref) = ($1, $2, $3);
+ if ($ref eq $ref_id) {
+ $path = '' if ($path =~ m#^\./?#);
+ return ($repo_id, $path);
+ }
+ }
+ (undef, undef, undef);
+}
+
+sub new {
+ my ($class, $ref_id, $repo_id, $path) = @_;
+ if (defined $ref_id && !defined $repo_id && !defined $path) {
+ ($repo_id, $path) = find_ref($ref_id);
+ if (!defined $repo_id) {
+ die "Could not find a \"svn-remote.*.fetch\" key ",
+ "in the repository configuration matching: ",
+ "$ref_id\n";
+ }
+ }
+ my $self = _new($class, $repo_id, $ref_id, $path);
+ if (!defined $self->{path} || !length $self->{path}) {
+ my $fetch = command_oneline('config', '--get',
+ "svn-remote.$repo_id.fetch",
+ ":$ref_id\$") or
+ die "Failed to read \"svn-remote.$repo_id.fetch\" ",
+ "\":$ref_id\$\" in config\n";
+ ($self->{path}, undef) = split(/\s*:\s*/, $fetch);
+ }
+ $self->{path} =~ s{/+}{/}g;
+ $self->{path} =~ s{\A/}{};
+ $self->{path} =~ s{/\z}{};
+ $self->{url} = command_oneline('config', '--get',
+ "svn-remote.$repo_id.url") or
+ die "Failed to read \"svn-remote.$repo_id.url\" in config\n";
+ $self->{pushurl} = eval { command_oneline('config', '--get',
+ "svn-remote.$repo_id.pushurl") };
+ $self->rebuild;
+ $self;
+}
+
+sub refname {
+ my ($refname) = $_[0]->{ref_id} ;
+
+ # It cannot end with a slash /, we'll throw up on this because
+ # SVN can't have directories with a slash in their name, either:
+ if ($refname =~ m{/$}) {
+ die "ref: '$refname' ends with a trailing slash, this is ",
+ "not permitted by git nor Subversion\n";
+ }
+
+ # It cannot have ASCII control character space, tilde ~, caret ^,
+ # colon :, question-mark ?, asterisk *, space, or open bracket [
+ # anywhere.
+ #
+ # Additionally, % must be escaped because it is used for escaping
+ # and we want our escaped refname to be reversible
+ $refname =~ s{([ \%~\^:\?\*\[\t])}{uc sprintf('%%%02x',ord($1))}eg;
+
+ # no slash-separated component can begin with a dot .
+ # /.* becomes /%2E*
+ $refname =~ s{/\.}{/%2E}g;
+
+ # It cannot have two consecutive dots .. anywhere
+ # .. becomes %2E%2E
+ $refname =~ s{\.\.}{%2E%2E}g;
+
+ # trailing dots and .lock are not allowed
+ # .$ becomes %2E and .lock becomes %2Elock
+ $refname =~ s{\.(?=$|lock$)}{%2E};
+
+ # the sequence @{ is used to access the reflog
+ # @{ becomes %40{
+ $refname =~ s{\@\{}{%40\{}g;
+
+ return $refname;
+}
+
+sub desanitize_refname {
+ my ($refname) = @_;
+ $refname =~ s{%(?:([0-9A-F]{2}))}{chr hex($1)}eg;
+ return $refname;
+}
+
+sub svm_uuid {
+ my ($self) = @_;
+ return $self->{svm}->{uuid} if $self->svm;
+ $self->ra;
+ unless ($self->{svm}) {
+ die "SVM UUID not cached, and reading remotely failed\n";
+ }
+ $self->{svm}->{uuid};
+}
+
+sub svm {
+ my ($self) = @_;
+ return $self->{svm} if $self->{svm};
+ my $svm;
+ # see if we have it in our config, first:
+ eval {
+ my $section = "svn-remote.$self->{repo_id}";
+ $svm = {
+ source => tmp_config('--get', "$section.svm-source"),
+ uuid => tmp_config('--get', "$section.svm-uuid"),
+ replace => tmp_config('--get', "$section.svm-replace"),
+ }
+ };
+ if ($svm && $svm->{source} && $svm->{uuid} && $svm->{replace}) {
+ $self->{svm} = $svm;
+ }
+ $self->{svm};
+}
+
+sub _set_svm_vars {
+ my ($self, $ra) = @_;
+ return $ra if $self->svm;
+
+ my @err = ( "useSvmProps set, but failed to read SVM properties\n",
+ "(svm:source, svm:uuid) ",
+ "from the following URLs:\n" );
+ sub read_svm_props {
+ my ($self, $ra, $path, $r) = @_;
+ my $props = ($ra->get_dir($path, $r))[2];
+ my $src = $props->{'svm:source'};
+ my $uuid = $props->{'svm:uuid'};
+ return undef if (!$src || !$uuid);
+
+ chomp($src, $uuid);
+
+ $uuid =~ m{^[0-9a-f\-]{30,}$}i
+ or die "doesn't look right - svm:uuid is '$uuid'\n";
+
+ # the '!' is used to mark the repos_root!/relative/path
+ $src =~ s{/?!/?}{/};
+ $src =~ s{/+$}{}; # no trailing slashes please
+ # username is of no interest
+ $src =~ s{(^[a-z\+]*://)[^/@]*@}{$1};
+
+ my $replace = $ra->{url};
+ $replace .= "/$path" if length $path;
+
+ my $section = "svn-remote.$self->{repo_id}";
+ tmp_config("$section.svm-source", $src);
+ tmp_config("$section.svm-replace", $replace);
+ tmp_config("$section.svm-uuid", $uuid);
+ $self->{svm} = {
+ source => $src,
+ uuid => $uuid,
+ replace => $replace
+ };
+ }
+
+ my $r = $ra->get_latest_revnum;
+ my $path = $self->{path};
+ my %tried;
+ while (length $path) {
+ unless ($tried{"$self->{url}/$path"}) {
+ return $ra if $self->read_svm_props($ra, $path, $r);
+ $tried{"$self->{url}/$path"} = 1;
+ }
+ $path =~ s#/?[^/]+$##;
+ }
+ die "Path: '$path' should be ''\n" if $path ne '';
+ return $ra if $self->read_svm_props($ra, $path, $r);
+ $tried{"$self->{url}/$path"} = 1;
+
+ if ($ra->{repos_root} eq $self->{url}) {
+ die @err, (map { " $_\n" } keys %tried), "\n";
+ }
+
+ # nope, make sure we're connected to the repository root:
+ my $ok;
+ my @tried_b;
+ $path = $ra->{svn_path};
+ $ra = Git::SVN::Ra->new($ra->{repos_root});
+ while (length $path) {
+ unless ($tried{"$ra->{url}/$path"}) {
+ $ok = $self->read_svm_props($ra, $path, $r);
+ last if $ok;
+ $tried{"$ra->{url}/$path"} = 1;
+ }
+ $path =~ s#/?[^/]+$##;
+ }
+ die "Path: '$path' should be ''\n" if $path ne '';
+ $ok ||= $self->read_svm_props($ra, $path, $r);
+ $tried{"$ra->{url}/$path"} = 1;
+ if (!$ok) {
+ die @err, (map { " $_\n" } keys %tried), "\n";
+ }
+ Git::SVN::Ra->new($self->{url});
+}
+
+sub svnsync {
+ my ($self) = @_;
+ return $self->{svnsync} if $self->{svnsync};
+
+ if ($self->no_metadata) {
+ die "Can't have both 'noMetadata' and ",
+ "'useSvnsyncProps' options set!\n";
+ }
+ if ($self->rewrite_root) {
+ die "Can't have both 'useSvnsyncProps' and 'rewriteRoot' ",
+ "options set!\n";
+ }
+ if ($self->rewrite_uuid) {
+ die "Can't have both 'useSvnsyncProps' and 'rewriteUUID' ",
+ "options set!\n";
+ }
+
+ my $svnsync;
+ # see if we have it in our config, first:
+ eval {
+ my $section = "svn-remote.$self->{repo_id}";
+
+ my $url = tmp_config('--get', "$section.svnsync-url");
+ ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
+ die "doesn't look right - svn:sync-from-url is '$url'\n";
+
+ my $uuid = tmp_config('--get', "$section.svnsync-uuid");
+ ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
+ die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
+
+ $svnsync = { url => $url, uuid => $uuid }
+ };
+ if ($svnsync && $svnsync->{url} && $svnsync->{uuid}) {
+ return $self->{svnsync} = $svnsync;
+ }
+
+ my $err = "useSvnsyncProps set, but failed to read " .
+ "svnsync property: svn:sync-from-";
+ my $rp = $self->ra->rev_proplist(0);
+
+ my $url = $rp->{'svn:sync-from-url'} or die $err . "url\n";
+ ($url) = ($url =~ m{^([a-z\+]+://\S+)$}) or
+ die "doesn't look right - svn:sync-from-url is '$url'\n";
+
+ my $uuid = $rp->{'svn:sync-from-uuid'} or die $err . "uuid\n";
+ ($uuid) = ($uuid =~ m{^([0-9a-f\-]{30,})$}i) or
+ die "doesn't look right - svn:sync-from-uuid is '$uuid'\n";
+
+ my $section = "svn-remote.$self->{repo_id}";
+ tmp_config('--add', "$section.svnsync-uuid", $uuid);
+ tmp_config('--add', "$section.svnsync-url", $url);
+ return $self->{svnsync} = { url => $url, uuid => $uuid };
+}
+
+# this allows us to memoize our SVN::Ra UUID locally and avoid a
+# remote lookup (useful for 'git svn log').
+sub ra_uuid {
+ my ($self) = @_;
+ unless ($self->{ra_uuid}) {
+ my $key = "svn-remote.$self->{repo_id}.uuid";
+ my $uuid = eval { tmp_config('--get', $key) };
+ if (!$@ && $uuid && $uuid =~ /^([a-f\d\-]{30,})$/i) {
+ $self->{ra_uuid} = $uuid;
+ } else {
+ die "ra_uuid called without URL\n" unless $self->{url};
+ $self->{ra_uuid} = $self->ra->get_uuid;
+ tmp_config('--add', $key, $self->{ra_uuid});
+ }
+ }
+ $self->{ra_uuid};
+}
+
+sub _set_repos_root {
+ my ($self, $repos_root) = @_;
+ my $k = "svn-remote.$self->{repo_id}.reposRoot";
+ $repos_root ||= $self->ra->{repos_root};
+ tmp_config($k, $repos_root);
+ $repos_root;
+}
+
+sub repos_root {
+ my ($self) = @_;
+ my $k = "svn-remote.$self->{repo_id}.reposRoot";
+ eval { tmp_config('--get', $k) } || $self->_set_repos_root;
+}
+
+sub ra {
+ my ($self) = shift;
+ my $ra = Git::SVN::Ra->new($self->{url});
+ $self->_set_repos_root($ra->{repos_root});
+ if ($self->use_svm_props && !$self->{svm}) {
+ if ($self->no_metadata) {
+ die "Can't have both 'noMetadata' and ",
+ "'useSvmProps' options set!\n";
+ } elsif ($self->use_svnsync_props) {
+ die "Can't have both 'useSvnsyncProps' and ",
+ "'useSvmProps' options set!\n";
+ }
+ $ra = $self->_set_svm_vars($ra);
+ $self->{-want_revprops} = 1;
+ }
+ $ra;
+}
+
+# prop_walk(PATH, REV, SUB)
+# -------------------------
+# Recursively traverse PATH at revision REV and invoke SUB for each
+# directory that contains a SVN property. SUB will be invoked as
+# follows: &SUB(gs, path, props); where `gs' is this instance of
+# Git::SVN, `path' the path to the directory where the properties
+# `props' were found. The `path' will be relative to point of checkout,
+# that is, if url://repo/trunk is the current Git branch, and that
+# directory contains a sub-directory `d', SUB will be invoked with `/d/'
+# as `path' (note the trailing `/').
+sub prop_walk {
+ my ($self, $path, $rev, $sub) = @_;
+
+ $path =~ s#^/##;
+ my ($dirent, undef, $props) = $self->ra->get_dir($path, $rev);
+ $path =~ s#^/*#/#g;
+ my $p = $path;
+ # Strip the irrelevant part of the path.
+ $p =~ s#^/+\Q$self->{path}\E(/|$)#/#;
+ # Ensure the path is terminated by a `/'.
+ $p =~ s#/*$#/#;
+
+ # The properties contain all the internal SVN stuff nobody
+ # (usually) cares about.
+ my $interesting_props = 0;
+ foreach (keys %{$props}) {
+ # If it doesn't start with `svn:', it must be a
+ # user-defined property.
+ ++$interesting_props and next if $_ !~ /^svn:/;
+ # FIXME: Fragile, if SVN adds new public properties,
+ # this needs to be updated.
+ ++$interesting_props if /^svn:(?:ignore|keywords|executable
+ |eol-style|mime-type
+ |externals|needs-lock)$/x;
+ }
+ &$sub($self, $p, $props) if $interesting_props;
+
+ foreach (sort keys %$dirent) {
+ next if $dirent->{$_}->{kind} != $SVN::Node::dir;
+ $self->prop_walk($self->{path} . $p . $_, $rev, $sub);
+ }
+}
+
+sub last_rev { ($_[0]->last_rev_commit)[0] }
+sub last_commit { ($_[0]->last_rev_commit)[1] }
+
+# returns the newest SVN revision number and newest commit SHA1
+sub last_rev_commit {
+ my ($self) = @_;
+ if (defined $self->{last_rev} && defined $self->{last_commit}) {
+ return ($self->{last_rev}, $self->{last_commit});
+ }
+ my $c = ::verify_ref($self->refname.'^0');
+ if ($c && !$self->use_svm_props && !$self->no_metadata) {
+ my $rev = (::cmt_metadata($c))[1];
+ if (defined $rev) {
+ ($self->{last_rev}, $self->{last_commit}) = ($rev, $c);
+ return ($rev, $c);
+ }
+ }
+ my $map_path = $self->map_path;
+ unless (-e $map_path) {
+ ($self->{last_rev}, $self->{last_commit}) = (undef, undef);
+ return (undef, undef);
+ }
+ my ($rev, $commit) = $self->rev_map_max(1);
+ ($self->{last_rev}, $self->{last_commit}) = ($rev, $commit);
+ return ($rev, $commit);
+}
+
+sub get_fetch_range {
+ my ($self, $min, $max) = @_;
+ $max ||= $self->ra->get_latest_revnum;
+ $min ||= $self->rev_map_max;
+ (++$min, $max);
+}
+
+sub tmp_config {
+ my (@args) = @_;
+ my $old_def_config = "$ENV{GIT_DIR}/svn/config";
+ my $config = "$ENV{GIT_DIR}/svn/.metadata";
+ if (! -f $config && -f $old_def_config) {
+ rename $old_def_config, $config or
+ die "Failed rename $old_def_config => $config: $!\n";
+ }
+ my $old_config = $ENV{GIT_CONFIG};
+ $ENV{GIT_CONFIG} = $config;
+ $@ = undef;
+ my @ret = eval {
+ unless (-f $config) {
+ mkfile($config);
+ open my $fh, '>', $config or
+ die "Can't open $config: $!\n";
+ print $fh "; This file is used internally by ",
+ "git-svn\n" or die
+ "Couldn't write to $config: $!\n";
+ print $fh "; You should not have to edit it\n" or
+ die "Couldn't write to $config: $!\n";
+ close $fh or die "Couldn't close $config: $!\n";
+ }
+ command('config', @args);
+ };
+ my $err = $@;
+ if (defined $old_config) {
+ $ENV{GIT_CONFIG} = $old_config;
+ } else {
+ delete $ENV{GIT_CONFIG};
+ }
+ die $err if $err;
+ wantarray ? @ret : $ret[0];
+}
+
+sub tmp_index_do {
+ my ($self, $sub) = @_;
+ my $old_index = $ENV{GIT_INDEX_FILE};
+ $ENV{GIT_INDEX_FILE} = $self->{index};
+ $@ = undef;
+ my @ret = eval {
+ my ($dir, $base) = ($self->{index} =~ m#^(.*?)/?([^/]+)$#);
+ mkpath([$dir]) unless -d $dir;
+ &$sub;
+ };
+ my $err = $@;
+ if (defined $old_index) {
+ $ENV{GIT_INDEX_FILE} = $old_index;
+ } else {
+ delete $ENV{GIT_INDEX_FILE};
+ }
+ die $err if $err;
+ wantarray ? @ret : $ret[0];
+}
+
+sub assert_index_clean {
+ my ($self, $treeish) = @_;
+
+ $self->tmp_index_do(sub {
+ command_noisy('read-tree', $treeish) unless -e $self->{index};
+ my $x = command_oneline('write-tree');
+ my ($y) = (command(qw/cat-file commit/, $treeish) =~
+ /^tree ($::sha1)/mo);
+ return if $y eq $x;
+
+ warn "Index mismatch: $y != $x\nrereading $treeish\n";
+ unlink $self->{index} or die "unlink $self->{index}: $!\n";
+ command_noisy('read-tree', $treeish);
+ $x = command_oneline('write-tree');
+ if ($y ne $x) {
+ fatal "trees ($treeish) $y != $x\n",
+ "Something is seriously wrong...";
+ }
+ });
+}
+
+sub get_commit_parents {
+ my ($self, $log_entry) = @_;
+ my (%seen, @ret, @tmp);
+ # legacy support for 'set-tree'; this is only used by set_tree_cb:
+ if (my $ip = $self->{inject_parents}) {
+ if (my $commit = delete $ip->{$log_entry->{revision}}) {
+ push @tmp, $commit;
+ }
+ }
+ if (my $cur = ::verify_ref($self->refname.'^0')) {
+ push @tmp, $cur;
+ }
+ if (my $ipd = $self->{inject_parents_dcommit}) {
+ if (my $commit = delete $ipd->{$log_entry->{revision}}) {
+ push @tmp, @$commit;
+ }
+ }
+ push @tmp, $_ foreach (@{$log_entry->{parents}}, @tmp);
+ while (my $p = shift @tmp) {
+ next if $seen{$p};
+ $seen{$p} = 1;
+ push @ret, $p;
+ }
+ @ret;
+}
+
+sub rewrite_root {
+ my ($self) = @_;
+ return $self->{-rewrite_root} if exists $self->{-rewrite_root};
+ my $k = "svn-remote.$self->{repo_id}.rewriteRoot";
+ my $rwr = eval { command_oneline(qw/config --get/, $k) };
+ if ($rwr) {
+ $rwr =~ s#/+$##;
+ if ($rwr !~ m#^[a-z\+]+://#) {
+ die "$rwr is not a valid URL (key: $k)\n";
+ }
+ }
+ $self->{-rewrite_root} = $rwr;
+}
+
+sub rewrite_uuid {
+ my ($self) = @_;
+ return $self->{-rewrite_uuid} if exists $self->{-rewrite_uuid};
+ my $k = "svn-remote.$self->{repo_id}.rewriteUUID";
+ my $rwid = eval { command_oneline(qw/config --get/, $k) };
+ if ($rwid) {
+ $rwid =~ s#/+$##;
+ if ($rwid !~ m#^[a-f0-9]{8}-(?:[a-f0-9]{4}-){3}[a-f0-9]{12}$#) {
+ die "$rwid is not a valid UUID (key: $k)\n";
+ }
+ }
+ $self->{-rewrite_uuid} = $rwid;
+}
+
+sub metadata_url {
+ my ($self) = @_;
+ ($self->rewrite_root || $self->{url}) .
+ (length $self->{path} ? '/' . $self->{path} : '');
+}
+
+sub full_url {
+ my ($self) = @_;
+ $self->{url} . (length $self->{path} ? '/' . $self->{path} : '');
+}
+
+sub full_pushurl {
+ my ($self) = @_;
+ if ($self->{pushurl}) {
+ return $self->{pushurl} . (length $self->{path} ? '/' .
+ $self->{path} : '');
+ } else {
+ return $self->full_url;
+ }
+}
+
+sub set_commit_header_env {
+ my ($log_entry) = @_;
+ my %env;
+ foreach my $ned (qw/NAME EMAIL DATE/) {
+ foreach my $ac (qw/AUTHOR COMMITTER/) {
+ $env{"GIT_${ac}_${ned}"} = $ENV{"GIT_${ac}_${ned}"};
+ }
+ }
+
+ $ENV{GIT_AUTHOR_NAME} = $log_entry->{name};
+ $ENV{GIT_AUTHOR_EMAIL} = $log_entry->{email};
+ $ENV{GIT_AUTHOR_DATE} = $ENV{GIT_COMMITTER_DATE} = $log_entry->{date};
+
+ $ENV{GIT_COMMITTER_NAME} = (defined $log_entry->{commit_name})
+ ? $log_entry->{commit_name}
+ : $log_entry->{name};
+ $ENV{GIT_COMMITTER_EMAIL} = (defined $log_entry->{commit_email})
+ ? $log_entry->{commit_email}
+ : $log_entry->{email};
+ \%env;
+}
+
+sub restore_commit_header_env {
+ my ($env) = @_;
+ foreach my $ned (qw/NAME EMAIL DATE/) {
+ foreach my $ac (qw/AUTHOR COMMITTER/) {
+ my $k = "GIT_${ac}_${ned}";
+ if (defined $env->{$k}) {
+ $ENV{$k} = $env->{$k};
+ } else {
+ delete $ENV{$k};
+ }
+ }
+ }
+}
+
+sub gc {
+ command_noisy('gc', '--auto');
+};
+
+sub do_git_commit {
+ my ($self, $log_entry) = @_;
+ my $lr = $self->last_rev;
+ if (defined $lr && $lr >= $log_entry->{revision}) {
+ die "Last fetched revision of ", $self->refname,
+ " was r$lr, but we are about to fetch: ",
+ "r$log_entry->{revision}!\n";
+ }
+ if (my $c = $self->rev_map_get($log_entry->{revision})) {
+ croak "$log_entry->{revision} = $c already exists! ",
+ "Why are we refetching it?\n";
+ }
+ my $old_env = set_commit_header_env($log_entry);
+ my $tree = $log_entry->{tree};
+ if (!defined $tree) {
+ $tree = $self->tmp_index_do(sub {
+ command_oneline('write-tree') });
+ }
+ die "Tree is not a valid sha1: $tree\n" if $tree !~ /^$::sha1$/o;
+
+ my @exec = ('git', 'commit-tree', $tree);
+ foreach ($self->get_commit_parents($log_entry)) {
+ push @exec, '-p', $_;
+ }
+ defined(my $pid = open3(my $msg_fh, my $out_fh, '>&STDERR', @exec))
+ or croak $!;
+ binmode $msg_fh;
+
+ # we always get UTF-8 from SVN, but we may want our commits in
+ # a different encoding.
+ if (my $enc = Git::config('i18n.commitencoding')) {
+ require Encode;
+ Encode::from_to($log_entry->{log}, 'UTF-8', $enc);
+ }
+ print $msg_fh $log_entry->{log} or croak $!;
+ restore_commit_header_env($old_env);
+ unless ($self->no_metadata) {
+ print $msg_fh "\ngit-svn-id: $log_entry->{metadata}\n"
+ or croak $!;
+ }
+ $msg_fh->flush == 0 or croak $!;
+ close $msg_fh or croak $!;
+ chomp(my $commit = do { local $/; <$out_fh> });
+ close $out_fh or croak $!;
+ waitpid $pid, 0;
+ croak $? if $?;
+ if ($commit !~ /^$::sha1$/o) {
+ die "Failed to commit, invalid sha1: $commit\n";
+ }
+
+ $self->rev_map_set($log_entry->{revision}, $commit, 1);
+
+ $self->{last_rev} = $log_entry->{revision};
+ $self->{last_commit} = $commit;
+ print "r$log_entry->{revision}" unless $::_q > 1;
+ if (defined $log_entry->{svm_revision}) {
+ print " (\@$log_entry->{svm_revision})" unless $::_q > 1;
+ $self->rev_map_set($log_entry->{svm_revision}, $commit,
+ 0, $self->svm_uuid);
+ }
+ print " = $commit ($self->{ref_id})\n" unless $::_q > 1;
+ if (--$_gc_nr == 0) {
+ $_gc_nr = $_gc_period;
+ gc();
+ }
+ return $commit;
+}
+
+sub match_paths {
+ my ($self, $paths, $r) = @_;
+ return 1 if $self->{path} eq '';
+ if (my $path = $paths->{"/$self->{path}"}) {
+ return ($path->{action} eq 'D') ? 0 : 1;
+ }
+ $self->{path_regex} ||= qr/^\/\Q$self->{path}\E\//;
+ if (grep /$self->{path_regex}/, keys %$paths) {
+ return 1;
+ }
+ my $c = '';
+ foreach (split m#/#, $self->{path}) {
+ $c .= "/$_";
+ next unless ($paths->{$c} &&
+ ($paths->{$c}->{action} =~ /^[AR]$/));
+ if ($self->ra->check_path($self->{path}, $r) ==
+ $SVN::Node::dir) {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+sub find_parent_branch {
+ my ($self, $paths, $rev) = @_;
+ return undef unless $self->follow_parent;
+ unless (defined $paths) {
+ my $err_handler = $SVN::Error::handler;
+ $SVN::Error::handler = \&Git::SVN::Ra::skip_unknown_revs;
+ $self->ra->get_log([$self->{path}], $rev, $rev, 0, 1, 1,
+ sub { $paths = $_[0] });
+ $SVN::Error::handler = $err_handler;
+ }
+ return undef unless defined $paths;
+
+ # look for a parent from another branch:
+ my @b_path_components = split m#/#, $self->{path};
+ my @a_path_components;
+ my $i;
+ while (@b_path_components) {
+ $i = $paths->{'/'.join('/', @b_path_components)};
+ last if $i && defined $i->{copyfrom_path};
+ unshift(@a_path_components, pop(@b_path_components));
+ }
+ return undef unless defined $i && defined $i->{copyfrom_path};
+ my $branch_from = $i->{copyfrom_path};
+ if (@a_path_components) {
+ print STDERR "branch_from: $branch_from => ";
+ $branch_from .= '/'.join('/', @a_path_components);
+ print STDERR $branch_from, "\n";
+ }
+ my $r = $i->{copyfrom_rev};
+ my $repos_root = $self->ra->{repos_root};
+ my $url = $self->ra->{url};
+ my $new_url = $url . $branch_from;
+ print STDERR "Found possible branch point: ",
+ "$new_url => ", $self->full_url, ", $r\n"
+ unless $::_q > 1;
+ $branch_from =~ s#^/##;
+ my $gs = $self->other_gs($new_url, $url,
+ $branch_from, $r, $self->{ref_id});
+ my ($r0, $parent) = $gs->find_rev_before($r, 1);
+ {
+ my ($base, $head);
+ if (!defined $r0 || !defined $parent) {
+ ($base, $head) = parse_revision_argument(0, $r);
+ } else {
+ if ($r0 < $r) {
+ $gs->ra->get_log([$gs->{path}], $r0 + 1, $r, 1,
+ 0, 1, sub { $base = $_[1] - 1 });
+ }
+ }
+ if (defined $base && $base <= $r) {
+ $gs->fetch($base, $r);
+ }
+ ($r0, $parent) = $gs->find_rev_before($r, 1);
+ }
+ if (defined $r0 && defined $parent) {
+ print STDERR "Found branch parent: ($self->{ref_id}) $parent\n"
+ unless $::_q > 1;
+ my $ed;
+ if ($self->ra->can_do_switch) {
+ $self->assert_index_clean($parent);
+ print STDERR "Following parent with do_switch\n"
+ unless $::_q > 1;
+ # do_switch works with svn/trunk >= r22312, but that
+ # is not included with SVN 1.4.3 (the latest version
+ # at the moment), so we can't rely on it
+ $self->{last_rev} = $r0;
+ $self->{last_commit} = $parent;
+ $ed = Git::SVN::Fetcher->new($self, $gs->{path});
+ $gs->ra->gs_do_switch($r0, $rev, $gs,
+ $self->full_url, $ed)
+ or die "SVN connection failed somewhere...\n";
+ } elsif ($self->ra->trees_match($new_url, $r0,
+ $self->full_url, $rev)) {
+ print STDERR "Trees match:\n",
+ " $new_url\@$r0\n",
+ " ${\$self->full_url}\@$rev\n",
+ "Following parent with no changes\n"
+ unless $::_q > 1;
+ $self->tmp_index_do(sub {
+ command_noisy('read-tree', $parent);
+ });
+ $self->{last_commit} = $parent;
+ } else {
+ print STDERR "Following parent with do_update\n"
+ unless $::_q > 1;
+ $ed = Git::SVN::Fetcher->new($self);
+ $self->ra->gs_do_update($rev, $rev, $self, $ed)
+ or die "SVN connection failed somewhere...\n";
+ }
+ print STDERR "Successfully followed parent\n" unless $::_q > 1;
+ return $self->make_log_entry($rev, [$parent], $ed);
+ }
+ return undef;
+}
+
+sub do_fetch {
+ my ($self, $paths, $rev) = @_;
+ my $ed;
+ my ($last_rev, @parents);
+ if (my $lc = $self->last_commit) {
+ # we can have a branch that was deleted, then re-added
+ # under the same name but copied from another path, in
+ # which case we'll have multiple parents (we don't
+ # want to break the original ref, nor lose copypath info):
+ if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
+ push @{$log_entry->{parents}}, $lc;
+ return $log_entry;
+ }
+ $ed = Git::SVN::Fetcher->new($self);
+ $last_rev = $self->{last_rev};
+ $ed->{c} = $lc;
+ @parents = ($lc);
+ } else {
+ $last_rev = $rev;
+ if (my $log_entry = $self->find_parent_branch($paths, $rev)) {
+ return $log_entry;
+ }
+ $ed = Git::SVN::Fetcher->new($self);
+ }
+ unless ($self->ra->gs_do_update($last_rev, $rev, $self, $ed)) {
+ die "SVN connection failed somewhere...\n";
+ }
+ $self->make_log_entry($rev, \@parents, $ed);
+}
+
+sub mkemptydirs {
+ my ($self, $r) = @_;
+
+ sub scan {
+ my ($r, $empty_dirs, $line) = @_;
+ if (defined $r && $line =~ /^r(\d+)$/) {
+ return 0 if $1 > $r;
+ } elsif ($line =~ /^ \+empty_dir: (.+)$/) {
+ $empty_dirs->{$1} = 1;
+ } elsif ($line =~ /^ \-empty_dir: (.+)$/) {
+ my @d = grep {m[^\Q$1\E(/|$)]} (keys %$empty_dirs);
+ delete @$empty_dirs{@d};
+ }
+ 1; # continue
+ };
+
+ my %empty_dirs = ();
+ my $gz_file = "$self->{dir}/unhandled.log.gz";
+ if (-f $gz_file) {
+ if (!can_compress()) {
+ warn "Compress::Zlib could not be found; ",
+ "empty directories in $gz_file will not be read\n";
+ } else {
+ my $gz = Compress::Zlib::gzopen($gz_file, "rb") or
+ die "Unable to open $gz_file: $!\n";
+ my $line;
+ while ($gz->gzreadline($line) > 0) {
+ scan($r, \%empty_dirs, $line) or last;
+ }
+ $gz->gzclose;
+ }
+ }
+
+ if (open my $fh, '<', "$self->{dir}/unhandled.log") {
+ binmode $fh or croak "binmode: $!";
+ while (<$fh>) {
+ scan($r, \%empty_dirs, $_) or last;
+ }
+ close $fh;
+ }
+
+ my $strip = qr/\A\Q$self->{path}\E(?:\/|$)/;
+ foreach my $d (sort keys %empty_dirs) {
+ $d = uri_decode($d);
+ $d =~ s/$strip//;
+ next unless length($d);
+ next if -d $d;
+ if (-e $d) {
+ warn "$d exists but is not a directory\n";
+ } else {
+ print "creating empty directory: $d\n";
+ mkpath([$d]);
+ }
+ }
+}
+
+sub get_untracked {
+ my ($self, $ed) = @_;
+ my @out;
+ my $h = $ed->{empty};
+ foreach (sort keys %$h) {
+ my $act = $h->{$_} ? '+empty_dir' : '-empty_dir';
+ push @out, " $act: " . uri_encode($_);
+ warn "W: $act: $_\n";
+ }
+ foreach my $t (qw/dir_prop file_prop/) {
+ $h = $ed->{$t} or next;
+ foreach my $path (sort keys %$h) {
+ my $ppath = $path eq '' ? '.' : $path;
+ foreach my $prop (sort keys %{$h->{$path}}) {
+ next if $SKIP_PROP{$prop};
+ my $v = $h->{$path}->{$prop};
+ my $t_ppath_prop = "$t: " .
+ uri_encode($ppath) . ' ' .
+ uri_encode($prop);
+ if (defined $v) {
+ push @out, " +$t_ppath_prop " .
+ uri_encode($v);
+ } else {
+ push @out, " -$t_ppath_prop";
+ }
+ }
+ }
+ }
+ foreach my $t (qw/absent_file absent_directory/) {
+ $h = $ed->{$t} or next;
+ foreach my $parent (sort keys %$h) {
+ foreach my $path (sort @{$h->{$parent}}) {
+ push @out, " $t: " .
+ uri_encode("$parent/$path");
+ warn "W: $t: $parent/$path ",
+ "Insufficient permissions?\n";
+ }
+ }
+ }
+ \@out;
+}
+
+sub get_tz {
+ # some systmes don't handle or mishandle %z, so be creative.
+ my $t = shift || time;
+ my $gm = timelocal(gmtime($t));
+ my $sign = qw( + + - )[ $t <=> $gm ];
+ return sprintf("%s%02d%02d", $sign, (gmtime(abs($t - $gm)))[2,1]);
+}
+
+# parse_svn_date(DATE)
+# --------------------
+# Given a date (in UTC) from Subversion, return a string in the format
+# "<TZ Offset> <local date/time>" that Git will use.
+#
+# By default the parsed date will be in UTC; if $Git::SVN::_localtime
+# is true we'll convert it to the local timezone instead.
+sub parse_svn_date {
+ my $date = shift || return '+0000 1970-01-01 00:00:00';
+ my ($Y,$m,$d,$H,$M,$S) = ($date =~ /^(\d{4})\-(\d\d)\-(\d\d)T
+ (\d\d)\:(\d\d)\:(\d\d)\.\d*Z$/x) or
+ croak "Unable to parse date: $date\n";
+ my $parsed_date; # Set next.
+
+ if ($Git::SVN::_localtime) {
+ # Translate the Subversion datetime to an epoch time.
+ # Begin by switching ourselves to $date's timezone, UTC.
+ my $old_env_TZ = $ENV{TZ};
+ $ENV{TZ} = 'UTC';
+
+ my $epoch_in_UTC =
+ POSIX::strftime('%s', $S, $M, $H, $d, $m - 1, $Y - 1900);
+
+ # Determine our local timezone (including DST) at the
+ # time of $epoch_in_UTC. $Git::SVN::Log::TZ stored the
+ # value of TZ, if any, at the time we were run.
+ if (defined $Git::SVN::Log::TZ) {
+ $ENV{TZ} = $Git::SVN::Log::TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+
+ my $our_TZ = get_tz();
+
+ # This converts $epoch_in_UTC into our local timezone.
+ my ($sec, $min, $hour, $mday, $mon, $year,
+ $wday, $yday, $isdst) = localtime($epoch_in_UTC);
+
+ $parsed_date = sprintf('%s %04d-%02d-%02d %02d:%02d:%02d',
+ $our_TZ, $year + 1900, $mon + 1,
+ $mday, $hour, $min, $sec);
+
+ # Reset us to the timezone in effect when we entered
+ # this routine.
+ if (defined $old_env_TZ) {
+ $ENV{TZ} = $old_env_TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+ } else {
+ $parsed_date = "+0000 $Y-$m-$d $H:$M:$S";
+ }
+
+ return $parsed_date;
+}
+
+sub other_gs {
+ my ($self, $new_url, $url,
+ $branch_from, $r, $old_ref_id) = @_;
+ my $gs = Git::SVN->find_by_url($new_url, $url, $branch_from);
+ unless ($gs) {
+ my $ref_id = $old_ref_id;
+ $ref_id =~ s/\@\d+-*$//;
+ $ref_id .= "\@$r";
+ # just grow a tail if we're not unique enough :x
+ $ref_id .= '-' while find_ref($ref_id);
+ my ($u, $p, $repo_id) = ($new_url, '', $ref_id);
+ if ($u =~ s#^\Q$url\E(/|$)##) {
+ $p = $u;
+ $u = $url;
+ $repo_id = $self->{repo_id};
+ }
+ while (1) {
+ # It is possible to tag two different subdirectories at
+ # the same revision. If the url for an existing ref
+ # does not match, we must either find a ref with a
+ # matching url or create a new ref by growing a tail.
+ $gs = Git::SVN->init($u, $p, $repo_id, $ref_id, 1);
+ my (undef, $max_commit) = $gs->rev_map_max(1);
+ last if (!$max_commit);
+ my ($url) = ::cmt_metadata($max_commit);
+ last if ($url eq $gs->metadata_url);
+ $ref_id .= '-';
+ }
+ print STDERR "Initializing parent: $ref_id\n" unless $::_q > 1;
+ }
+ $gs
+}
+
+sub call_authors_prog {
+ my ($orig_author) = @_;
+ $orig_author = command_oneline('rev-parse', '--sq-quote', $orig_author);
+ my $author = `$::_authors_prog $orig_author`;
+ if ($? != 0) {
+ die "$::_authors_prog failed with exit code $?\n"
+ }
+ if ($author =~ /^\s*(.+?)\s*<(.*)>\s*$/) {
+ my ($name, $email) = ($1, $2);
+ $email = undef if length $2 == 0;
+ return [$name, $email];
+ } else {
+ die "Author: $orig_author: $::_authors_prog returned "
+ . "invalid author format: $author\n";
+ }
+}
+
+sub check_author {
+ my ($author) = @_;
+ if (!defined $author || length $author == 0) {
+ $author = '(no author)';
+ }
+ if (!defined $::users{$author}) {
+ if (defined $::_authors_prog) {
+ $::users{$author} = call_authors_prog($author);
+ } elsif (defined $::_authors) {
+ die "Author: $author not defined in $::_authors file\n";
+ }
+ }
+ $author;
+}
+
+sub find_extra_svk_parents {
+ my ($self, $ed, $tickets, $parents) = @_;
+ # aha! svk:merge property changed...
+ my @tickets = split "\n", $tickets;
+ my @known_parents;
+ for my $ticket ( @tickets ) {
+ my ($uuid, $path, $rev) = split /:/, $ticket;
+ if ( $uuid eq $self->ra_uuid ) {
+ my $url = $self->{url};
+ my $repos_root = $url;
+ my $branch_from = $path;
+ $branch_from =~ s{^/}{};
+ my $gs = $self->other_gs($repos_root."/".$branch_from,
+ $url,
+ $branch_from,
+ $rev,
+ $self->{ref_id});
+ if ( my $commit = $gs->rev_map_get($rev, $uuid) ) {
+ # wahey! we found it, but it might be
+ # an old one (!)
+ push @known_parents, [ $rev, $commit ];
+ }
+ }
+ }
+ # Ordering matters; highest-numbered commit merge tickets
+ # first, as they may account for later merge ticket additions
+ # or changes.
+ @known_parents = map {$_->[1]} sort {$b->[0] <=> $a->[0]} @known_parents;
+ for my $parent ( @known_parents ) {
+ my @cmd = ('rev-list', $parent, map { "^$_" } @$parents );
+ my ($msg_fh, $ctx) = command_output_pipe(@cmd);
+ my $new;
+ while ( <$msg_fh> ) {
+ $new=1;last;
+ }
+ command_close_pipe($msg_fh, $ctx);
+ if ( $new ) {
+ print STDERR
+ "Found merge parent (svk:merge ticket): $parent\n";
+ push @$parents, $parent;
+ }
+ }
+}
+
+sub lookup_svn_merge {
+ my $uuid = shift;
+ my $url = shift;
+ my $merge = shift;
+
+ my ($source, $revs) = split ":", $merge;
+ my $path = $source;
+ $path =~ s{^/}{};
+ my $gs = Git::SVN->find_by_url($url.$source, $url, $path);
+ if ( !$gs ) {
+ warn "Couldn't find revmap for $url$source\n";
+ return;
+ }
+ my @ranges = split ",", $revs;
+ my ($tip, $tip_commit);
+ my @merged_commit_ranges;
+ # find the tip
+ for my $range ( @ranges ) {
+ my ($bottom, $top) = split "-", $range;
+ $top ||= $bottom;
+ my $bottom_commit = $gs->find_rev_after( $bottom, 1, $top );
+ my $top_commit = $gs->find_rev_before( $top, 1, $bottom );
+
+ unless ($top_commit and $bottom_commit) {
+ warn "W:unknown path/rev in svn:mergeinfo "
+ ."dirprop: $source:$range\n";
+ next;
+ }
+
+ if (scalar(command('rev-parse', "$bottom_commit^@"))) {
+ push @merged_commit_ranges,
+ "$bottom_commit^..$top_commit";
+ } else {
+ push @merged_commit_ranges, "$top_commit";
+ }
+
+ if ( !defined $tip or $top > $tip ) {
+ $tip = $top;
+ $tip_commit = $top_commit;
+ }
+ }
+ return ($tip_commit, @merged_commit_ranges);
+}
+
+sub _rev_list {
+ my ($msg_fh, $ctx) = command_output_pipe(
+ "rev-list", @_,
+ );
+ my @rv;
+ while ( <$msg_fh> ) {
+ chomp;
+ push @rv, $_;
+ }
+ command_close_pipe($msg_fh, $ctx);
+ @rv;
+}
+
+sub check_cherry_pick {
+ my $base = shift;
+ my $tip = shift;
+ my $parents = shift;
+ my @ranges = @_;
+ my %commits = map { $_ => 1 }
+ _rev_list("--no-merges", $tip, "--not", $base, @$parents, "--");
+ for my $range ( @ranges ) {
+ delete @commits{_rev_list($range, "--")};
+ }
+ for my $commit (keys %commits) {
+ if (has_no_changes($commit)) {
+ delete $commits{$commit};
+ }
+ }
+ return (keys %commits);
+}
+
+sub has_no_changes {
+ my $commit = shift;
+
+ my @revs = split / /, command_oneline(
+ qw(rev-list --parents -1 -m), $commit);
+
+ # Commits with no parents, e.g. the start of a partial branch,
+ # have changes by definition.
+ return 1 if (@revs < 2);
+
+ # Commits with multiple parents, e.g a merge, have no changes
+ # by definition.
+ return 0 if (@revs > 2);
+
+ return (command_oneline("rev-parse", "$commit^{tree}") eq
+ command_oneline("rev-parse", "$commit~1^{tree}"));
+}
+
+sub tie_for_persistent_memoization {
+ my $hash = shift;
+ my $path = shift;
+
+ if ($can_use_yaml) {
+ tie %$hash => 'Git::SVN::Memoize::YAML', "$path.yaml";
+ } else {
+ tie %$hash => 'Memoize::Storable', "$path.db", 'nstore';
+ }
+}
+
+# The GIT_DIR environment variable is not always set until after the command
+# line arguments are processed, so we can't memoize in a BEGIN block.
+{
+ my $memoized = 0;
+
+ sub memoize_svn_mergeinfo_functions {
+ return if $memoized;
+ $memoized = 1;
+
+ my $cache_path = "$ENV{GIT_DIR}/svn/.caches/";
+ mkpath([$cache_path]) unless -d $cache_path;
+
+ my %lookup_svn_merge_cache;
+ my %check_cherry_pick_cache;
+ my %has_no_changes_cache;
+
+ tie_for_persistent_memoization(\%lookup_svn_merge_cache,
+ "$cache_path/lookup_svn_merge");
+ memoize 'lookup_svn_merge',
+ SCALAR_CACHE => 'FAULT',
+ LIST_CACHE => ['HASH' => \%lookup_svn_merge_cache],
+ ;
+
+ tie_for_persistent_memoization(\%check_cherry_pick_cache,
+ "$cache_path/check_cherry_pick");
+ memoize 'check_cherry_pick',
+ SCALAR_CACHE => 'FAULT',
+ LIST_CACHE => ['HASH' => \%check_cherry_pick_cache],
+ ;
+
+ tie_for_persistent_memoization(\%has_no_changes_cache,
+ "$cache_path/has_no_changes");
+ memoize 'has_no_changes',
+ SCALAR_CACHE => ['HASH' => \%has_no_changes_cache],
+ LIST_CACHE => 'FAULT',
+ ;
+ }
+
+ sub unmemoize_svn_mergeinfo_functions {
+ return if not $memoized;
+ $memoized = 0;
+
+ Memoize::unmemoize 'lookup_svn_merge';
+ Memoize::unmemoize 'check_cherry_pick';
+ Memoize::unmemoize 'has_no_changes';
+ }
+
+ Memoize::memoize 'Git::SVN::repos_root';
+}
+
+END {
+ # Force cache writeout explicitly instead of waiting for
+ # global destruction to avoid segfault in Storable:
+ # http://rt.cpan.org/Public/Bug/Display.html?id=36087
+ unmemoize_svn_mergeinfo_functions();
+}
+
+sub parents_exclude {
+ my $parents = shift;
+ my @commits = @_;
+ return unless @commits;
+
+ my @excluded;
+ my $excluded;
+ do {
+ my @cmd = ('rev-list', "-1", @commits, "--not", @$parents );
+ $excluded = command_oneline(@cmd);
+ if ( $excluded ) {
+ my @new;
+ my $found;
+ for my $commit ( @commits ) {
+ if ( $commit eq $excluded ) {
+ push @excluded, $commit;
+ $found++;
+ last;
+ }
+ else {
+ push @new, $commit;
+ }
+ }
+ die "saw commit '$excluded' in rev-list output, "
+ ."but we didn't ask for that commit (wanted: @commits --not @$parents)"
+ unless $found;
+ @commits = @new;
+ }
+ }
+ while ($excluded and @commits);
+
+ return @excluded;
+}
+
+
+# note: this function should only be called if the various dirprops
+# have actually changed
+sub find_extra_svn_parents {
+ my ($self, $ed, $mergeinfo, $parents) = @_;
+ # aha! svk:merge property changed...
+
+ memoize_svn_mergeinfo_functions();
+
+ # We first search for merged tips which are not in our
+ # history. Then, we figure out which git revisions are in
+ # that tip, but not this revision. If all of those revisions
+ # are now marked as merge, we can add the tip as a parent.
+ my @merges = split "\n", $mergeinfo;
+ my @merge_tips;
+ my $url = $self->{url};
+ my $uuid = $self->ra_uuid;
+ my %ranges;
+ for my $merge ( @merges ) {
+ my ($tip_commit, @ranges) =
+ lookup_svn_merge( $uuid, $url, $merge );
+ unless (!$tip_commit or
+ grep { $_ eq $tip_commit } @$parents ) {
+ push @merge_tips, $tip_commit;
+ $ranges{$tip_commit} = \@ranges;
+ } else {
+ push @merge_tips, undef;
+ }
+ }
+
+ my %excluded = map { $_ => 1 }
+ parents_exclude($parents, grep { defined } @merge_tips);
+
+ # check merge tips for new parents
+ my @new_parents;
+ for my $merge_tip ( @merge_tips ) {
+ my $spec = shift @merges;
+ next unless $merge_tip and $excluded{$merge_tip};
+
+ my $ranges = $ranges{$merge_tip};
+
+ # check out 'new' tips
+ my $merge_base;
+ eval {
+ $merge_base = command_oneline(
+ "merge-base",
+ @$parents, $merge_tip,
+ );
+ };
+ if ($@) {
+ die "An error occurred during merge-base"
+ unless $@->isa("Git::Error::Command");
+
+ warn "W: Cannot find common ancestor between ".
+ "@$parents and $merge_tip. Ignoring merge info.\n";
+ next;
+ }
+
+ # double check that there are no missing non-merge commits
+ my (@incomplete) = check_cherry_pick(
+ $merge_base, $merge_tip,
+ $parents,
+ @$ranges,
+ );
+
+ if ( @incomplete ) {
+ warn "W:svn cherry-pick ignored ($spec) - missing "
+ .@incomplete." commit(s) (eg $incomplete[0])\n";
+ } else {
+ warn
+ "Found merge parent (svn:mergeinfo prop): ",
+ $merge_tip, "\n";
+ push @new_parents, $merge_tip;
+ }
+ }
+
+ # cater for merges which merge commits from multiple branches
+ if ( @new_parents > 1 ) {
+ for ( my $i = 0; $i <= $#new_parents; $i++ ) {
+ for ( my $j = 0; $j <= $#new_parents; $j++ ) {
+ next if $i == $j;
+ next unless $new_parents[$i];
+ next unless $new_parents[$j];
+ my $revs = command_oneline(
+ "rev-list", "-1",
+ "$new_parents[$i]..$new_parents[$j]",
+ );
+ if ( !$revs ) {
+ undef($new_parents[$j]);
+ }
+ }
+ }
+ }
+ push @$parents, grep { defined } @new_parents;
+}
+
+sub make_log_entry {
+ my ($self, $rev, $parents, $ed) = @_;
+ my $untracked = $self->get_untracked($ed);
+
+ my @parents = @$parents;
+ my $ps = $ed->{path_strip} || "";
+ for my $path ( grep { m/$ps/ } %{$ed->{dir_prop}} ) {
+ my $props = $ed->{dir_prop}{$path};
+ if ( $props->{"svk:merge"} ) {
+ $self->find_extra_svk_parents
+ ($ed, $props->{"svk:merge"}, \@parents);
+ }
+ if ( $props->{"svn:mergeinfo"} ) {
+ $self->find_extra_svn_parents
+ ($ed,
+ $props->{"svn:mergeinfo"},
+ \@parents);
+ }
+ }
+
+ open my $un, '>>', "$self->{dir}/unhandled.log" or croak $!;
+ print $un "r$rev\n" or croak $!;
+ print $un $_, "\n" foreach @$untracked;
+ my %log_entry = ( parents => \@parents, revision => $rev,
+ log => '');
+
+ my $headrev;
+ my $logged = delete $self->{logged_rev_props};
+ if (!$logged || $self->{-want_revprops}) {
+ my $rp = $self->ra->rev_proplist($rev);
+ foreach (sort keys %$rp) {
+ my $v = $rp->{$_};
+ if (/^svn:(author|date|log)$/) {
+ $log_entry{$1} = $v;
+ } elsif ($_ eq 'svm:headrev') {
+ $headrev = $v;
+ } else {
+ print $un " rev_prop: ", uri_encode($_), ' ',
+ uri_encode($v), "\n";
+ }
+ }
+ } else {
+ map { $log_entry{$_} = $logged->{$_} } keys %$logged;
+ }
+ close $un or croak $!;
+
+ $log_entry{date} = parse_svn_date($log_entry{date});
+ $log_entry{log} .= "\n";
+ my $author = $log_entry{author} = check_author($log_entry{author});
+ my ($name, $email) = defined $::users{$author} ? @{$::users{$author}}
+ : ($author, undef);
+
+ my ($commit_name, $commit_email) = ($name, $email);
+ if ($_use_log_author) {
+ my $name_field;
+ if ($log_entry{log} =~ /From:\s+(.*\S)\s*\n/i) {
+ $name_field = $1;
+ } elsif ($log_entry{log} =~ /Signed-off-by:\s+(.*\S)\s*\n/i) {
+ $name_field = $1;
+ }
+ if (!defined $name_field) {
+ if (!defined $email) {
+ $email = $name;
+ }
+ } elsif ($name_field =~ /(.*?)\s+<(.*)>/) {
+ ($name, $email) = ($1, $2);
+ } elsif ($name_field =~ /(.*)@/) {
+ ($name, $email) = ($1, $name_field);
+ } else {
+ ($name, $email) = ($name_field, $name_field);
+ }
+ }
+ if (defined $headrev && $self->use_svm_props) {
+ if ($self->rewrite_root) {
+ die "Can't have both 'useSvmProps' and 'rewriteRoot' ",
+ "options set!\n";
+ }
+ if ($self->rewrite_uuid) {
+ die "Can't have both 'useSvmProps' and 'rewriteUUID' ",
+ "options set!\n";
+ }
+ my ($uuid, $r) = $headrev =~ m{^([a-f\d\-]{30,}):(\d+)$}i;
+ # we don't want "SVM: initializing mirror for junk" ...
+ return undef if $r == 0;
+ my $svm = $self->svm;
+ if ($uuid ne $svm->{uuid}) {
+ die "UUID mismatch on SVM path:\n",
+ "expected: $svm->{uuid}\n",
+ " got: $uuid\n";
+ }
+ my $full_url = $self->full_url;
+ $full_url =~ s#^\Q$svm->{replace}\E(/|$)#$svm->{source}$1# or
+ die "Failed to replace '$svm->{replace}' with ",
+ "'$svm->{source}' in $full_url\n";
+ # throw away username for storing in records
+ remove_username($full_url);
+ $log_entry{metadata} = "$full_url\@$r $uuid";
+ $log_entry{svm_revision} = $r;
+ $email ||= "$author\@$uuid";
+ $commit_email ||= "$author\@$uuid";
+ } elsif ($self->use_svnsync_props) {
+ my $full_url = $self->svnsync->{url};
+ $full_url .= "/$self->{path}" if length $self->{path};
+ remove_username($full_url);
+ my $uuid = $self->svnsync->{uuid};
+ $log_entry{metadata} = "$full_url\@$rev $uuid";
+ $email ||= "$author\@$uuid";
+ $commit_email ||= "$author\@$uuid";
+ } else {
+ my $url = $self->metadata_url;
+ remove_username($url);
+ my $uuid = $self->rewrite_uuid || $self->ra->get_uuid;
+ $log_entry{metadata} = "$url\@$rev " . $uuid;
+ $email ||= "$author\@" . $uuid;
+ $commit_email ||= "$author\@" . $uuid;
+ }
+ $log_entry{name} = $name;
+ $log_entry{email} = $email;
+ $log_entry{commit_name} = $commit_name;
+ $log_entry{commit_email} = $commit_email;
+ \%log_entry;
+}
+
+sub fetch {
+ my ($self, $min_rev, $max_rev, @parents) = @_;
+ my ($last_rev, $last_commit) = $self->last_rev_commit;
+ my ($base, $head) = $self->get_fetch_range($min_rev, $max_rev);
+ $self->ra->gs_fetch_loop_common($base, $head, [$self]);
+}
+
+sub set_tree_cb {
+ my ($self, $log_entry, $tree, $rev, $date, $author) = @_;
+ $self->{inject_parents} = { $rev => $tree };
+ $self->fetch(undef, undef);
+}
+
+sub set_tree {
+ my ($self, $tree) = (shift, shift);
+ my $log_entry = ::get_commit_entry($tree);
+ unless ($self->{last_rev}) {
+ fatal("Must have an existing revision to commit");
+ }
+ my %ed_opts = ( r => $self->{last_rev},
+ log => $log_entry->{log},
+ ra => $self->ra,
+ tree_a => $self->{last_commit},
+ tree_b => $tree,
+ editor_cb => sub {
+ $self->set_tree_cb($log_entry, $tree, @_) },
+ svn_path => $self->{path} );
+ if (!Git::SVN::Editor->new(\%ed_opts)->apply_diff) {
+ print "No changes\nr$self->{last_rev} = $tree\n";
+ }
+}
+
+sub rebuild_from_rev_db {
+ my ($self, $path) = @_;
+ my $r = -1;
+ open my $fh, '<', $path or croak "open: $!";
+ binmode $fh or croak "binmode: $!";
+ while (<$fh>) {
+ length($_) == 41 or croak "inconsistent size in ($_) != 41";
+ chomp($_);
+ ++$r;
+ next if $_ eq ('0' x 40);
+ $self->rev_map_set($r, $_);
+ print "r$r = $_\n";
+ }
+ close $fh or croak "close: $!";
+ unlink $path or croak "unlink: $!";
+}
+
+sub rebuild {
+ my ($self) = @_;
+ my $map_path = $self->map_path;
+ my $partial = (-e $map_path && ! -z $map_path);
+ return unless ::verify_ref($self->refname.'^0');
+ if (!$partial && ($self->use_svm_props || $self->no_metadata)) {
+ my $rev_db = $self->rev_db_path;
+ $self->rebuild_from_rev_db($rev_db);
+ if ($self->use_svm_props) {
+ my $svm_rev_db = $self->rev_db_path($self->svm_uuid);
+ $self->rebuild_from_rev_db($svm_rev_db);
+ }
+ $self->unlink_rev_db_symlink;
+ return;
+ }
+ print "Rebuilding $map_path ...\n" if (!$partial);
+ my ($base_rev, $head) = ($partial ? $self->rev_map_max_norebuild(1) :
+ (undef, undef));
+ my ($log, $ctx) =
+ command_output_pipe(qw/rev-list --pretty=raw --reverse/,
+ ($head ? "$head.." : "") . $self->refname,
+ '--');
+ my $metadata_url = $self->metadata_url;
+ remove_username($metadata_url);
+ my $svn_uuid = $self->rewrite_uuid || $self->ra_uuid;
+ my $c;
+ while (<$log>) {
+ if ( m{^commit ($::sha1)$} ) {
+ $c = $1;
+ next;
+ }
+ next unless s{^\s*(git-svn-id:)}{$1};
+ my ($url, $rev, $uuid) = ::extract_metadata($_);
+ remove_username($url);
+
+ # ignore merges (from set-tree)
+ next if (!defined $rev || !$uuid);
+
+ # if we merged or otherwise started elsewhere, this is
+ # how we break out of it
+ if (($uuid ne $svn_uuid) ||
+ ($metadata_url && $url && ($url ne $metadata_url))) {
+ next;
+ }
+ if ($partial && $head) {
+ print "Partial-rebuilding $map_path ...\n";
+ print "Currently at $base_rev = $head\n";
+ $head = undef;
+ }
+
+ $self->rev_map_set($rev, $c);
+ print "r$rev = $c\n";
+ }
+ command_close_pipe($log, $ctx);
+ print "Done rebuilding $map_path\n" if (!$partial || !$head);
+ my $rev_db_path = $self->rev_db_path;
+ if (-f $self->rev_db_path) {
+ unlink $self->rev_db_path or croak "unlink: $!";
+ }
+ $self->unlink_rev_db_symlink;
+}
+
+# rev_map:
+# Tie::File seems to be prone to offset errors if revisions get sparse,
+# it's not that fast, either. Tie::File is also not in Perl 5.6. So
+# one of my favorite modules is out :< Next up would be one of the DBM
+# modules, but I'm not sure which is most portable...
+#
+# This is the replacement for the rev_db format, which was too big
+# and inefficient for large repositories with a lot of sparse history
+# (mainly tags)
+#
+# The format is this:
+# - 24 bytes for every record,
+# * 4 bytes for the integer representing an SVN revision number
+# * 20 bytes representing the sha1 of a git commit
+# - No empty padding records like the old format
+# (except the last record, which can be overwritten)
+# - new records are written append-only since SVN revision numbers
+# increase monotonically
+# - lookups on SVN revision number are done via a binary search
+# - Piping the file to xxd -c24 is a good way of dumping it for
+# viewing or editing (piped back through xxd -r), should the need
+# ever arise.
+# - The last record can be padding revision with an all-zero sha1
+# This is used to optimize fetch performance when using multiple
+# "fetch" directives in .git/config
+#
+# These files are disposable unless noMetadata or useSvmProps is set
+
+sub _rev_map_set {
+ my ($fh, $rev, $commit) = @_;
+
+ binmode $fh or croak "binmode: $!";
+ my $size = (stat($fh))[7];
+ ($size % 24) == 0 or croak "inconsistent size: $size";
+
+ my $wr_offset = 0;
+ if ($size > 0) {
+ sysseek($fh, -24, SEEK_END) or croak "seek: $!";
+ my $read = sysread($fh, my $buf, 24) or croak "read: $!";
+ $read == 24 or croak "read only $read bytes (!= 24)";
+ my ($last_rev, $last_commit) = unpack(rev_map_fmt, $buf);
+ if ($last_commit eq ('0' x40)) {
+ if ($size >= 48) {
+ sysseek($fh, -48, SEEK_END) or croak "seek: $!";
+ $read = sysread($fh, $buf, 24) or
+ croak "read: $!";
+ $read == 24 or
+ croak "read only $read bytes (!= 24)";
+ ($last_rev, $last_commit) =
+ unpack(rev_map_fmt, $buf);
+ if ($last_commit eq ('0' x40)) {
+ croak "inconsistent .rev_map\n";
+ }
+ }
+ if ($last_rev >= $rev) {
+ croak "last_rev is higher!: $last_rev >= $rev";
+ }
+ $wr_offset = -24;
+ }
+ }
+ sysseek($fh, $wr_offset, SEEK_END) or croak "seek: $!";
+ syswrite($fh, pack(rev_map_fmt, $rev, $commit), 24) == 24 or
+ croak "write: $!";
+}
+
+sub _rev_map_reset {
+ my ($fh, $rev, $commit) = @_;
+ my $c = _rev_map_get($fh, $rev);
+ $c eq $commit or die "_rev_map_reset(@_) commit $c does not match!\n";
+ my $offset = sysseek($fh, 0, SEEK_CUR) or croak "seek: $!";
+ truncate $fh, $offset or croak "truncate: $!";
+}
+
+sub mkfile {
+ my ($path) = @_;
+ unless (-e $path) {
+ my ($dir, $base) = ($path =~ m#^(.*?)/?([^/]+)$#);
+ mkpath([$dir]) unless -d $dir;
+ open my $fh, '>>', $path or die "Couldn't create $path: $!\n";
+ close $fh or die "Couldn't close (create) $path: $!\n";
+ }
+}
+
+sub rev_map_set {
+ my ($self, $rev, $commit, $update_ref, $uuid) = @_;
+ defined $commit or die "missing arg3\n";
+ length $commit == 40 or die "arg3 must be a full SHA1 hexsum\n";
+ my $db = $self->map_path($uuid);
+ my $db_lock = "$db.lock";
+ my $sigmask;
+ $update_ref ||= 0;
+ if ($update_ref) {
+ $sigmask = POSIX::SigSet->new();
+ my $signew = POSIX::SigSet->new(SIGINT, SIGHUP, SIGTERM,
+ SIGALRM, SIGUSR1, SIGUSR2);
+ sigprocmask(SIG_BLOCK, $signew, $sigmask) or
+ croak "Can't block signals: $!";
+ }
+ mkfile($db);
+
+ $LOCKFILES{$db_lock} = 1;
+ my $sync;
+ # both of these options make our .rev_db file very, very important
+ # and we can't afford to lose it because rebuild() won't work
+ if ($self->use_svm_props || $self->no_metadata) {
+ $sync = 1;
+ copy($db, $db_lock) or die "rev_map_set(@_): ",
+ "Failed to copy: ",
+ "$db => $db_lock ($!)\n";
+ } else {
+ rename $db, $db_lock or die "rev_map_set(@_): ",
+ "Failed to rename: ",
+ "$db => $db_lock ($!)\n";
+ }
+
+ sysopen(my $fh, $db_lock, O_RDWR | O_CREAT)
+ or croak "Couldn't open $db_lock: $!\n";
+ $update_ref eq 'reset' ? _rev_map_reset($fh, $rev, $commit) :
+ _rev_map_set($fh, $rev, $commit);
+ if ($sync) {
+ $fh->flush or die "Couldn't flush $db_lock: $!\n";
+ $fh->sync or die "Couldn't sync $db_lock: $!\n";
+ }
+ close $fh or croak $!;
+ if ($update_ref) {
+ $_head = $self;
+ my $note = "";
+ $note = " ($update_ref)" if ($update_ref !~ /^\d*$/);
+ command_noisy('update-ref', '-m', "r$rev$note",
+ $self->refname, $commit);
+ }
+ rename $db_lock, $db or die "rev_map_set(@_): ", "Failed to rename: ",
+ "$db_lock => $db ($!)\n";
+ delete $LOCKFILES{$db_lock};
+ if ($update_ref) {
+ sigprocmask(SIG_SETMASK, $sigmask) or
+ croak "Can't restore signal mask: $!";
+ }
+}
+
+# If want_commit, this will return an array of (rev, commit) where
+# commit _must_ be a valid commit in the archive.
+# Otherwise, it'll return the max revision (whether or not the
+# commit is valid or just a 0x40 placeholder).
+sub rev_map_max {
+ my ($self, $want_commit) = @_;
+ $self->rebuild;
+ my ($r, $c) = $self->rev_map_max_norebuild($want_commit);
+ $want_commit ? ($r, $c) : $r;
+}
+
+sub rev_map_max_norebuild {
+ my ($self, $want_commit) = @_;
+ my $map_path = $self->map_path;
+ stat $map_path or return $want_commit ? (0, undef) : 0;
+ sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
+ binmode $fh or croak "binmode: $!";
+ my $size = (stat($fh))[7];
+ ($size % 24) == 0 or croak "inconsistent size: $size";
+
+ if ($size == 0) {
+ close $fh or croak "close: $!";
+ return $want_commit ? (0, undef) : 0;
+ }
+
+ sysseek($fh, -24, SEEK_END) or croak "seek: $!";
+ sysread($fh, my $buf, 24) == 24 or croak "read: $!";
+ my ($r, $c) = unpack(rev_map_fmt, $buf);
+ if ($want_commit && $c eq ('0' x40)) {
+ if ($size < 48) {
+ return $want_commit ? (0, undef) : 0;
+ }
+ sysseek($fh, -48, SEEK_END) or croak "seek: $!";
+ sysread($fh, $buf, 24) == 24 or croak "read: $!";
+ ($r, $c) = unpack(rev_map_fmt, $buf);
+ if ($c eq ('0'x40)) {
+ croak "Penultimate record is all-zeroes in $map_path";
+ }
+ }
+ close $fh or croak "close: $!";
+ $want_commit ? ($r, $c) : $r;
+}
+
+sub rev_map_get {
+ my ($self, $rev, $uuid) = @_;
+ my $map_path = $self->map_path($uuid);
+ return undef unless -e $map_path;
+
+ sysopen(my $fh, $map_path, O_RDONLY) or croak "open: $!";
+ my $c = _rev_map_get($fh, $rev);
+ close($fh) or croak "close: $!";
+ $c
+}
+
+sub _rev_map_get {
+ my ($fh, $rev) = @_;
+
+ binmode $fh or croak "binmode: $!";
+ my $size = (stat($fh))[7];
+ ($size % 24) == 0 or croak "inconsistent size: $size";
+
+ if ($size == 0) {
+ return undef;
+ }
+
+ my ($l, $u) = (0, $size - 24);
+ my ($r, $c, $buf);
+
+ while ($l <= $u) {
+ my $i = int(($l/24 + $u/24) / 2) * 24;
+ sysseek($fh, $i, SEEK_SET) or croak "seek: $!";
+ sysread($fh, my $buf, 24) == 24 or croak "read: $!";
+ my ($r, $c) = unpack(rev_map_fmt, $buf);
+
+ if ($r < $rev) {
+ $l = $i + 24;
+ } elsif ($r > $rev) {
+ $u = $i - 24;
+ } else { # $r == $rev
+ return $c eq ('0' x 40) ? undef : $c;
+ }
+ }
+ undef;
+}
+
+# Finds the first svn revision that exists on (if $eq_ok is true) or
+# before $rev for the current branch. It will not search any lower
+# than $min_rev. Returns the git commit hash and svn revision number
+# if found, else (undef, undef).
+sub find_rev_before {
+ my ($self, $rev, $eq_ok, $min_rev) = @_;
+ --$rev unless $eq_ok;
+ $min_rev ||= 1;
+ my $max_rev = $self->rev_map_max;
+ $rev = $max_rev if ($rev > $max_rev);
+ while ($rev >= $min_rev) {
+ if (my $c = $self->rev_map_get($rev)) {
+ return ($rev, $c);
+ }
+ --$rev;
+ }
+ return (undef, undef);
+}
+
+# Finds the first svn revision that exists on (if $eq_ok is true) or
+# after $rev for the current branch. It will not search any higher
+# than $max_rev. Returns the git commit hash and svn revision number
+# if found, else (undef, undef).
+sub find_rev_after {
+ my ($self, $rev, $eq_ok, $max_rev) = @_;
+ ++$rev unless $eq_ok;
+ $max_rev ||= $self->rev_map_max;
+ while ($rev <= $max_rev) {
+ if (my $c = $self->rev_map_get($rev)) {
+ return ($rev, $c);
+ }
+ ++$rev;
+ }
+ return (undef, undef);
+}
+
+sub _new {
+ my ($class, $repo_id, $ref_id, $path) = @_;
+ unless (defined $repo_id && length $repo_id) {
+ $repo_id = $default_repo_id;
+ }
+ unless (defined $ref_id && length $ref_id) {
+ # Access the prefix option from the git-svn main program if it's loaded.
+ my $prefix = defined &::opt_prefix ? ::opt_prefix() : "";
+ $_[2] = $ref_id =
+ "refs/remotes/$prefix$default_ref_id";
+ }
+ $_[1] = $repo_id;
+ my $dir = "$ENV{GIT_DIR}/svn/$ref_id";
+
+ # Older repos imported by us used $GIT_DIR/svn/foo instead of
+ # $GIT_DIR/svn/refs/remotes/foo when tracking refs/remotes/foo
+ if ($ref_id =~ m{^refs/remotes/(.*)}) {
+ my $old_dir = "$ENV{GIT_DIR}/svn/$1";
+ if (-d $old_dir && ! -d $dir) {
+ $dir = $old_dir;
+ }
+ }
+
+ $_[3] = $path = '' unless (defined $path);
+ mkpath([$dir]);
+ bless {
+ ref_id => $ref_id, dir => $dir, index => "$dir/index",
+ path => $path, config => "$ENV{GIT_DIR}/svn/config",
+ map_root => "$dir/.rev_map", repo_id => $repo_id }, $class;
+}
+
+# for read-only access of old .rev_db formats
+sub unlink_rev_db_symlink {
+ my ($self) = @_;
+ my $link = $self->rev_db_path;
+ $link =~ s/\.[\w-]+$// or croak "missing UUID at the end of $link";
+ if (-l $link) {
+ unlink $link or croak "unlink: $link failed!";
+ }
+}
+
+sub rev_db_path {
+ my ($self, $uuid) = @_;
+ my $db_path = $self->map_path($uuid);
+ $db_path =~ s{/\.rev_map\.}{/\.rev_db\.}
+ or croak "map_path: $db_path does not contain '/.rev_map.' !";
+ $db_path;
+}
+
+# the new replacement for .rev_db
+sub map_path {
+ my ($self, $uuid) = @_;
+ $uuid ||= $self->ra_uuid;
+ "$self->{map_root}.$uuid";
+}
+
+sub uri_encode {
+ my ($f) = @_;
+ $f =~ s#([^a-zA-Z0-9\*!\:_\./\-])#uc sprintf("%%%02x",ord($1))#eg;
+ $f
+}
+
+sub uri_decode {
+ my ($f) = @_;
+ $f =~ s#%([0-9a-fA-F]{2})#chr(hex($1))#eg;
+ $f
+}
+
+sub remove_username {
+ $_[0] =~ s{^([^:]*://)[^@]+@}{$1};
+}
+
+1;
diff --git a/perl/Git/SVN/Fetcher.pm b/perl/Git/SVN/Fetcher.pm
index ef8e9ed2a..76fae9bce 100644
--- a/perl/Git/SVN/Fetcher.pm
+++ b/perl/Git/SVN/Fetcher.pm
@@ -57,6 +57,7 @@ sub new {
$self->{file_prop} = {};
$self->{absent_dir} = {};
$self->{absent_file} = {};
+ require Git::IndexInfo;
$self->{gii} = $git_svn->tmp_index_do(sub { Git::IndexInfo->new });
$self->{pathnameencoding} = Git::config('svn.pathnameencoding');
$self;
diff --git a/perl/Git/SVN/GlobSpec.pm b/perl/Git/SVN/GlobSpec.pm
new file mode 100644
index 000000000..96cfd9896
--- /dev/null
+++ b/perl/Git/SVN/GlobSpec.pm
@@ -0,0 +1,59 @@
+package Git::SVN::GlobSpec;
+use strict;
+use warnings;
+
+sub new {
+ my ($class, $glob, $pattern_ok) = @_;
+ my $re = $glob;
+ $re =~ s!/+$!!g; # no need for trailing slashes
+ my (@left, @right, @patterns);
+ my $state = "left";
+ my $die_msg = "Only one set of wildcard directories " .
+ "(e.g. '*' or '*/*/*') is supported: '$glob'\n";
+ for my $part (split(m|/|, $glob)) {
+ if ($part =~ /\*/ && $part ne "*") {
+ die "Invalid pattern in '$glob': $part\n";
+ } elsif ($pattern_ok && $part =~ /[{}]/ &&
+ $part !~ /^\{[^{}]+\}/) {
+ die "Invalid pattern in '$glob': $part\n";
+ }
+ if ($part eq "*") {
+ die $die_msg if $state eq "right";
+ $state = "pattern";
+ push(@patterns, "[^/]*");
+ } elsif ($pattern_ok && $part =~ /^\{(.*)\}$/) {
+ die $die_msg if $state eq "right";
+ $state = "pattern";
+ my $p = quotemeta($1);
+ $p =~ s/\\,/|/g;
+ push(@patterns, "(?:$p)");
+ } else {
+ if ($state eq "left") {
+ push(@left, $part);
+ } else {
+ push(@right, $part);
+ $state = "right";
+ }
+ }
+ }
+ my $depth = @patterns;
+ if ($depth == 0) {
+ die "One '*' is needed in glob: '$glob'\n";
+ }
+ my $left = join('/', @left);
+ my $right = join('/', @right);
+ $re = join('/', @patterns);
+ $re = join('\/',
+ grep(length, quotemeta($left), "($re)", quotemeta($right)));
+ my $left_re = qr/^\/\Q$left\E(\/|$)/;
+ bless { left => $left, right => $right, left_regex => $left_re,
+ regex => qr/$re/, glob => $glob, depth => $depth }, $class;
+}
+
+sub full_path {
+ my ($self, $path) = @_;
+ return (length $self->{left} ? "$self->{left}/" : '') .
+ $path . (length $self->{right} ? "/$self->{right}" : '');
+}
+
+1;
diff --git a/perl/Git/SVN/Log.pm b/perl/Git/SVN/Log.pm
new file mode 100644
index 000000000..3cc1c6f08
--- /dev/null
+++ b/perl/Git/SVN/Log.pm
@@ -0,0 +1,395 @@
+package Git::SVN::Log;
+use strict;
+use warnings;
+use Git::SVN::Utils qw(fatal);
+use Git qw(command command_oneline command_output_pipe command_close_pipe);
+use POSIX qw/strftime/;
+use constant commit_log_separator => ('-' x 72) . "\n";
+use vars qw/$TZ $limit $color $pager $non_recursive $verbose $oneline
+ %rusers $show_commit $incremental/;
+
+# Option set in git-svn
+our $_git_format;
+
+sub cmt_showable {
+ my ($c) = @_;
+ return 1 if defined $c->{r};
+
+ # big commit message got truncated by the 16k pretty buffer in rev-list
+ if ($c->{l} && $c->{l}->[-1] eq "...\n" &&
+ $c->{a_raw} =~ /\@([a-f\d\-]+)>$/) {
+ @{$c->{l}} = ();
+ my @log = command(qw/cat-file commit/, $c->{c});
+
+ # shift off the headers
+ shift @log while ($log[0] ne '');
+ shift @log;
+
+ # TODO: make $c->{l} not have a trailing newline in the future
+ @{$c->{l}} = map { "$_\n" } grep !/^git-svn-id: /, @log;
+
+ (undef, $c->{r}, undef) = ::extract_metadata(
+ (grep(/^git-svn-id: /, @log))[-1]);
+ }
+ return defined $c->{r};
+}
+
+sub log_use_color {
+ return $color || Git->repository->get_colorbool('color.diff');
+}
+
+sub git_svn_log_cmd {
+ my ($r_min, $r_max, @args) = @_;
+ my $head = 'HEAD';
+ my (@files, @log_opts);
+ foreach my $x (@args) {
+ if ($x eq '--' || @files) {
+ push @files, $x;
+ } else {
+ if (::verify_ref("$x^0")) {
+ $head = $x;
+ } else {
+ push @log_opts, $x;
+ }
+ }
+ }
+
+ my ($url, $rev, $uuid, $gs) = ::working_head_info($head);
+
+ require Git::SVN;
+ $gs ||= Git::SVN->_new;
+ my @cmd = (qw/log --abbrev-commit --pretty=raw --default/,
+ $gs->refname);
+ push @cmd, '-r' unless $non_recursive;
+ push @cmd, qw/--raw --name-status/ if $verbose;
+ push @cmd, '--color' if log_use_color();
+ push @cmd, @log_opts;
+ if (defined $r_max && $r_max == $r_min) {
+ push @cmd, '--max-count=1';
+ if (my $c = $gs->rev_map_get($r_max)) {
+ push @cmd, $c;
+ }
+ } elsif (defined $r_max) {
+ if ($r_max < $r_min) {
+ ($r_min, $r_max) = ($r_max, $r_min);
+ }
+ my (undef, $c_max) = $gs->find_rev_before($r_max, 1, $r_min);
+ my (undef, $c_min) = $gs->find_rev_after($r_min, 1, $r_max);
+ # If there are no commits in the range, both $c_max and $c_min
+ # will be undefined. If there is at least 1 commit in the
+ # range, both will be defined.
+ return () if !defined $c_min || !defined $c_max;
+ if ($c_min eq $c_max) {
+ push @cmd, '--max-count=1', $c_min;
+ } else {
+ push @cmd, '--boundary', "$c_min..$c_max";
+ }
+ }
+ return (@cmd, @files);
+}
+
+# adapted from pager.c
+sub config_pager {
+ if (! -t *STDOUT) {
+ $ENV{GIT_PAGER_IN_USE} = 'false';
+ $pager = undef;
+ return;
+ }
+ chomp($pager = command_oneline(qw(var GIT_PAGER)));
+ if ($pager eq 'cat') {
+ $pager = undef;
+ }
+ $ENV{GIT_PAGER_IN_USE} = defined($pager);
+}
+
+sub run_pager {
+ return unless defined $pager;
+ pipe my ($rfd, $wfd) or return;
+ defined(my $pid = fork) or fatal "Can't fork: $!";
+ if (!$pid) {
+ open STDOUT, '>&', $wfd or
+ fatal "Can't redirect to stdout: $!";
+ return;
+ }
+ open STDIN, '<&', $rfd or fatal "Can't redirect stdin: $!";
+ $ENV{LESS} ||= 'FRSX';
+ exec $pager or fatal "Can't run pager: $! ($pager)";
+}
+
+sub format_svn_date {
+ my $t = shift || time;
+ require Git::SVN;
+ my $gmoff = Git::SVN::get_tz($t);
+ return strftime("%Y-%m-%d %H:%M:%S $gmoff (%a, %d %b %Y)", localtime($t));
+}
+
+sub parse_git_date {
+ my ($t, $tz) = @_;
+ # Date::Parse isn't in the standard Perl distro :(
+ if ($tz =~ s/^\+//) {
+ $t += tz_to_s_offset($tz);
+ } elsif ($tz =~ s/^\-//) {
+ $t -= tz_to_s_offset($tz);
+ }
+ return $t;
+}
+
+sub set_local_timezone {
+ if (defined $TZ) {
+ $ENV{TZ} = $TZ;
+ } else {
+ delete $ENV{TZ};
+ }
+}
+
+sub tz_to_s_offset {
+ my ($tz) = @_;
+ $tz =~ s/(\d\d)$//;
+ return ($1 * 60) + ($tz * 3600);
+}
+
+sub get_author_info {
+ my ($dest, $author, $t, $tz) = @_;
+ $author =~ s/(?:^\s*|\s*$)//g;
+ $dest->{a_raw} = $author;
+ my $au;
+ if ($::_authors) {
+ $au = $rusers{$author} || undef;
+ }
+ if (!$au) {
+ ($au) = ($author =~ /<([^>]+)\@[^>]+>$/);
+ }
+ $dest->{t} = $t;
+ $dest->{tz} = $tz;
+ $dest->{a} = $au;
+ $dest->{t_utc} = parse_git_date($t, $tz);
+}
+
+sub process_commit {
+ my ($c, $r_min, $r_max, $defer) = @_;
+ if (defined $r_min && defined $r_max) {
+ if ($r_min == $c->{r} && $r_min == $r_max) {
+ show_commit($c);
+ return 0;
+ }
+ return 1 if $r_min == $r_max;
+ if ($r_min < $r_max) {
+ # we need to reverse the print order
+ return 0 if (defined $limit && --$limit < 0);
+ push @$defer, $c;
+ return 1;
+ }
+ if ($r_min != $r_max) {
+ return 1 if ($r_min < $c->{r});
+ return 1 if ($r_max > $c->{r});
+ }
+ }
+ return 0 if (defined $limit && --$limit < 0);
+ show_commit($c);
+ return 1;
+}
+
+my $l_fmt;
+sub show_commit {
+ my $c = shift;
+ if ($oneline) {
+ my $x = "\n";
+ if (my $l = $c->{l}) {
+ while ($l->[0] =~ /^\s*$/) { shift @$l }
+ $x = $l->[0];
+ }
+ $l_fmt ||= 'A' . length($c->{r});
+ print 'r',pack($l_fmt, $c->{r}),' | ';
+ print "$c->{c} | " if $show_commit;
+ print $x;
+ } else {
+ show_commit_normal($c);
+ }
+}
+
+sub show_commit_changed_paths {
+ my ($c) = @_;
+ return unless $c->{changed};
+ print "Changed paths:\n", @{$c->{changed}};
+}
+
+sub show_commit_normal {
+ my ($c) = @_;
+ print commit_log_separator, "r$c->{r} | ";
+ print "$c->{c} | " if $show_commit;
+ print "$c->{a} | ", format_svn_date($c->{t_utc}), ' | ';
+ my $nr_line = 0;
+
+ if (my $l = $c->{l}) {
+ while ($l->[$#$l] eq "\n" && $#$l > 0
+ && $l->[($#$l - 1)] eq "\n") {
+ pop @$l;
+ }
+ $nr_line = scalar @$l;
+ if (!$nr_line) {
+ print "1 line\n\n\n";
+ } else {
+ if ($nr_line == 1) {
+ $nr_line = '1 line';
+ } else {
+ $nr_line .= ' lines';
+ }
+ print $nr_line, "\n";
+ show_commit_changed_paths($c);
+ print "\n";
+ print $_ foreach @$l;
+ }
+ } else {
+ print "1 line\n";
+ show_commit_changed_paths($c);
+ print "\n";
+
+ }
+ foreach my $x (qw/raw stat diff/) {
+ if ($c->{$x}) {
+ print "\n";
+ print $_ foreach @{$c->{$x}}
+ }
+ }
+}
+
+sub cmd_show_log {
+ my (@args) = @_;
+ my ($r_min, $r_max);
+ my $r_last = -1; # prevent dupes
+ set_local_timezone();
+ if (defined $::_revision) {
+ if ($::_revision =~ /^(\d+):(\d+)$/) {
+ ($r_min, $r_max) = ($1, $2);
+ } elsif ($::_revision =~ /^\d+$/) {
+ $r_min = $r_max = $::_revision;
+ } else {
+ fatal "-r$::_revision is not supported, use ",
+ "standard 'git log' arguments instead";
+ }
+ }
+
+ config_pager();
+ @args = git_svn_log_cmd($r_min, $r_max, @args);
+ if (!@args) {
+ print commit_log_separator unless $incremental || $oneline;
+ return;
+ }
+ my $log = command_output_pipe(@args);
+ run_pager();
+ my (@k, $c, $d, $stat);
+ my $esc_color = qr/(?:\033\[(?:(?:\d+;)*\d*)?m)*/;
+ while (<$log>) {
+ if (/^${esc_color}commit (?:- )?($::sha1_short)/o) {
+ my $cmt = $1;
+ if ($c && cmt_showable($c) && $c->{r} != $r_last) {
+ $r_last = $c->{r};
+ process_commit($c, $r_min, $r_max, \@k) or
+ goto out;
+ }
+ $d = undef;
+ $c = { c => $cmt };
+ } elsif (/^${esc_color}author (.+) (\d+) ([\-\+]?\d+)$/o) {
+ get_author_info($c, $1, $2, $3);
+ } elsif (/^${esc_color}(?:tree|parent|committer) /o) {
+ # ignore
+ } elsif (/^${esc_color}:\d{6} \d{6} $::sha1_short/o) {
+ push @{$c->{raw}}, $_;
+ } elsif (/^${esc_color}[ACRMDT]\t/) {
+ # we could add $SVN->{svn_path} here, but that requires
+ # remote access at the moment (repo_path_split)...
+ s#^(${esc_color})([ACRMDT])\t#$1 $2 #o;
+ push @{$c->{changed}}, $_;
+ } elsif (/^${esc_color}diff /o) {
+ $d = 1;
+ push @{$c->{diff}}, $_;
+ } elsif ($d) {
+ push @{$c->{diff}}, $_;
+ } elsif (/^\ .+\ \|\s*\d+\ $esc_color[\+\-]*
+ $esc_color*[\+\-]*$esc_color$/x) {
+ $stat = 1;
+ push @{$c->{stat}}, $_;
+ } elsif ($stat && /^ \d+ files changed, \d+ insertions/) {
+ push @{$c->{stat}}, $_;
+ $stat = undef;
+ } elsif (/^${esc_color} (git-svn-id:.+)$/o) {
+ ($c->{url}, $c->{r}, undef) = ::extract_metadata($1);
+ } elsif (s/^${esc_color} //o) {
+ push @{$c->{l}}, $_;
+ }
+ }
+ if ($c && defined $c->{r} && $c->{r} != $r_last) {
+ $r_last = $c->{r};
+ process_commit($c, $r_min, $r_max, \@k);
+ }
+ if (@k) {
+ ($r_min, $r_max) = ($r_max, $r_min);
+ process_commit($_, $r_min, $r_max) foreach reverse @k;
+ }
+out:
+ close $log;
+ print commit_log_separator unless $incremental || $oneline;
+}
+
+sub cmd_blame {
+ my $path = pop;
+
+ config_pager();
+ run_pager();
+
+ my ($fh, $ctx, $rev);
+
+ if ($_git_format) {
+ ($fh, $ctx) = command_output_pipe('blame', @_, $path);
+ while (my $line = <$fh>) {
+ if ($line =~ /^\^?([[:xdigit:]]+)\s/) {
+ # Uncommitted edits show up as a rev ID of
+ # all zeros, which we can't look up with
+ # cmt_metadata
+ if ($1 !~ /^0+$/) {
+ (undef, $rev, undef) =
+ ::cmt_metadata($1);
+ $rev = '0' if (!$rev);
+ } else {
+ $rev = '0';
+ }
+ $rev = sprintf('%-10s', $rev);
+ $line =~ s/^\^?[[:xdigit:]]+(\s)/$rev$1/;
+ }
+ print $line;
+ }
+ } else {
+ ($fh, $ctx) = command_output_pipe('blame', '-p', @_, 'HEAD',
+ '--', $path);
+ my ($sha1);
+ my %authors;
+ my @buffer;
+ my %dsha; #distinct sha keys
+
+ while (my $line = <$fh>) {
+ push @buffer, $line;
+ if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
+ $dsha{$1} = 1;
+ }
+ }
+
+ my $s2r = ::cmt_sha2rev_batch([keys %dsha]);
+
+ foreach my $line (@buffer) {
+ if ($line =~ /^([[:xdigit:]]{40})\s\d+\s\d+/) {
+ $rev = $s2r->{$1};
+ $rev = '0' if (!$rev)
+ }
+ elsif ($line =~ /^author (.*)/) {
+ $authors{$rev} = $1;
+ $authors{$rev} =~ s/\s/_/g;
+ }
+ elsif ($line =~ /^\t(.*)$/) {
+ printf("%6s %10s %s\n", $rev, $authors{$rev}, $1);
+ }
+ }
+ }
+ command_close_pipe($fh, $ctx);
+}
+
+1;
diff --git a/perl/Git/SVN/Migration.pm b/perl/Git/SVN/Migration.pm
new file mode 100644
index 000000000..75d74298e
--- /dev/null
+++ b/perl/Git/SVN/Migration.pm
@@ -0,0 +1,258 @@
+package Git::SVN::Migration;
+# these version numbers do NOT correspond to actual version numbers
+# of git nor git-svn. They are just relative.
+#
+# v0 layout: .git/$id/info/url, refs/heads/$id-HEAD
+#
+# v1 layout: .git/$id/info/url, refs/remotes/$id
+#
+# v2 layout: .git/svn/$id/info/url, refs/remotes/$id
+#
+# v3 layout: .git/svn/$id, refs/remotes/$id
+# - info/url may remain for backwards compatibility
+# - this is what we migrate up to this layout automatically,
+# - this will be used by git svn init on single branches
+# v3.1 layout (auto migrated):
+# - .rev_db => .rev_db.$UUID, .rev_db will remain as a symlink
+# for backwards compatibility
+#
+# v4 layout: .git/svn/$repo_id/$id, refs/remotes/$repo_id/$id
+# - this is only created for newly multi-init-ed
+# repositories. Similar in spirit to the
+# --use-separate-remotes option in git-clone (now default)
+# - we do not automatically migrate to this (following
+# the example set by core git)
+#
+# v5 layout: .rev_db.$UUID => .rev_map.$UUID
+# - newer, more-efficient format that uses 24-bytes per record
+# with no filler space.
+# - use xxd -c24 < .rev_map.$UUID to view and debug
+# - This is a one-way migration, repositories updated to the
+# new format will not be able to use old git-svn without
+# rebuilding the .rev_db. Rebuilding the rev_db is not
+# possible if noMetadata or useSvmProps are set; but should
+# be no problem for users that use the (sensible) defaults.
+use strict;
+use warnings;
+use Carp qw/croak/;
+use File::Path qw/mkpath/;
+use File::Basename qw/dirname basename/;
+
+our $_minimize;
+use Git qw(
+ command
+ command_noisy
+ command_output_pipe
+ command_close_pipe
+);
+
+sub migrate_from_v0 {
+ my $git_dir = $ENV{GIT_DIR};
+ return undef unless -d $git_dir;
+ my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
+ my $migrated = 0;
+ while (<$fh>) {
+ chomp;
+ my ($id, $orig_ref) = ($_, $_);
+ next unless $id =~ s#^refs/heads/(.+)-HEAD$#$1#;
+ next unless -f "$git_dir/$id/info/url";
+ my $new_ref = "refs/remotes/$id";
+ if (::verify_ref("$new_ref^0")) {
+ print STDERR "W: $orig_ref is probably an old ",
+ "branch used by an ancient version of ",
+ "git-svn.\n",
+ "However, $new_ref also exists.\n",
+ "We will not be able ",
+ "to use this branch until this ",
+ "ambiguity is resolved.\n";
+ next;
+ }
+ print STDERR "Migrating from v0 layout...\n" if !$migrated;
+ print STDERR "Renaming ref: $orig_ref => $new_ref\n";
+ command_noisy('update-ref', $new_ref, $orig_ref);
+ command_noisy('update-ref', '-d', $orig_ref, $orig_ref);
+ $migrated++;
+ }
+ command_close_pipe($fh, $ctx);
+ print STDERR "Done migrating from v0 layout...\n" if $migrated;
+ $migrated;
+}
+
+sub migrate_from_v1 {
+ my $git_dir = $ENV{GIT_DIR};
+ my $migrated = 0;
+ return $migrated unless -d $git_dir;
+ my $svn_dir = "$git_dir/svn";
+
+ # just in case somebody used 'svn' as their $id at some point...
+ return $migrated if -d $svn_dir && ! -f "$svn_dir/info/url";
+
+ print STDERR "Migrating from a git-svn v1 layout...\n";
+ mkpath([$svn_dir]);
+ print STDERR "Data from a previous version of git-svn exists, but\n\t",
+ "$svn_dir\n\t(required for this version ",
+ "($::VERSION) of git-svn) does not exist.\n";
+ my ($fh, $ctx) = command_output_pipe(qw/rev-parse --symbolic --all/);
+ while (<$fh>) {
+ my $x = $_;
+ next unless $x =~ s#^refs/remotes/##;
+ chomp $x;
+ next unless -f "$git_dir/$x/info/url";
+ my $u = eval { ::file_to_s("$git_dir/$x/info/url") };
+ next unless $u;
+ my $dn = dirname("$git_dir/svn/$x");
+ mkpath([$dn]) unless -d $dn;
+ if ($x eq 'svn') { # they used 'svn' as GIT_SVN_ID:
+ mkpath(["$git_dir/svn/svn"]);
+ print STDERR " - $git_dir/$x/info => ",
+ "$git_dir/svn/$x/info\n";
+ rename "$git_dir/$x/info", "$git_dir/svn/$x/info" or
+ croak "$!: $x";
+ # don't worry too much about these, they probably
+ # don't exist with repos this old (save for index,
+ # and we can easily regenerate that)
+ foreach my $f (qw/unhandled.log index .rev_db/) {
+ rename "$git_dir/$x/$f", "$git_dir/svn/$x/$f";
+ }
+ } else {
+ print STDERR " - $git_dir/$x => $git_dir/svn/$x\n";
+ rename "$git_dir/$x", "$git_dir/svn/$x" or
+ croak "$!: $x";
+ }
+ $migrated++;
+ }
+ command_close_pipe($fh, $ctx);
+ print STDERR "Done migrating from a git-svn v1 layout\n";
+ $migrated;
+}
+
+sub read_old_urls {
+ my ($l_map, $pfx, $path) = @_;
+ my @dir;
+ foreach (<$path/*>) {
+ if (-r "$_/info/url") {
+ $pfx .= '/' if $pfx && $pfx !~ m!/$!;
+ my $ref_id = $pfx . basename $_;
+ my $url = ::file_to_s("$_/info/url");
+ $l_map->{$ref_id} = $url;
+ } elsif (-d $_) {
+ push @dir, $_;
+ }
+ }
+ foreach (@dir) {
+ my $x = $_;
+ $x =~ s!^\Q$ENV{GIT_DIR}\E/svn/!!o;
+ read_old_urls($l_map, $x, $_);
+ }
+}
+
+sub migrate_from_v2 {
+ my @cfg = command(qw/config -l/);
+ return if grep /^svn-remote\..+\.url=/, @cfg;
+ my %l_map;
+ read_old_urls(\%l_map, '', "$ENV{GIT_DIR}/svn");
+ my $migrated = 0;
+
+ require Git::SVN;
+ foreach my $ref_id (sort keys %l_map) {
+ eval { Git::SVN->init($l_map{$ref_id}, '', undef, $ref_id) };
+ if ($@) {
+ Git::SVN->init($l_map{$ref_id}, '', $ref_id, $ref_id);
+ }
+ $migrated++;
+ }
+ $migrated;
+}
+
+sub minimize_connections {
+ require Git::SVN;
+ require Git::SVN::Ra;
+
+ my $r = Git::SVN::read_all_remotes();
+ my $new_urls = {};
+ my $root_repos = {};
+ foreach my $repo_id (keys %$r) {
+ my $url = $r->{$repo_id}->{url} or next;
+ my $fetch = $r->{$repo_id}->{fetch} or next;
+ my $ra = Git::SVN::Ra->new($url);
+
+ # skip existing cases where we already connect to the root
+ if (($ra->{url} eq $ra->{repos_root}) ||
+ ($ra->{repos_root} eq $repo_id)) {
+ $root_repos->{$ra->{url}} = $repo_id;
+ next;
+ }
+
+ my $root_ra = Git::SVN::Ra->new($ra->{repos_root});
+ my $root_path = $ra->{url};
+ $root_path =~ s#^\Q$ra->{repos_root}\E(/|$)##;
+ foreach my $path (keys %$fetch) {
+ my $ref_id = $fetch->{$path};
+ my $gs = Git::SVN->new($ref_id, $repo_id, $path);
+
+ # make sure we can read when connecting to
+ # a higher level of a repository
+ my ($last_rev, undef) = $gs->last_rev_commit;
+ if (!defined $last_rev) {
+ $last_rev = eval {
+ $root_ra->get_latest_revnum;
+ };
+ next if $@;
+ }
+ my $new = $root_path;
+ $new .= length $path ? "/$path" : '';
+ eval {
+ $root_ra->get_log([$new], $last_rev, $last_rev,
+ 0, 0, 1, sub { });
+ };
+ next if $@;
+ $new_urls->{$ra->{repos_root}}->{$new} =
+ { ref_id => $ref_id,
+ old_repo_id => $repo_id,
+ old_path => $path };
+ }
+ }
+
+ my @emptied;
+ foreach my $url (keys %$new_urls) {
+ # see if we can re-use an existing [svn-remote "repo_id"]
+ # instead of creating a(n ugly) new section:
+ my $repo_id = $root_repos->{$url} || $url;
+
+ my $fetch = $new_urls->{$url};
+ foreach my $path (keys %$fetch) {
+ my $x = $fetch->{$path};
+ Git::SVN->init($url, $path, $repo_id, $x->{ref_id});
+ my $pfx = "svn-remote.$x->{old_repo_id}";
+
+ my $old_fetch = quotemeta("$x->{old_path}:".
+ "$x->{ref_id}");
+ command_noisy(qw/config --unset/,
+ "$pfx.fetch", '^'. $old_fetch . '$');
+ delete $r->{$x->{old_repo_id}}->
+ {fetch}->{$x->{old_path}};
+ if (!keys %{$r->{$x->{old_repo_id}}->{fetch}}) {
+ command_noisy(qw/config --unset/,
+ "$pfx.url");
+ push @emptied, $x->{old_repo_id}
+ }
+ }
+ }
+ if (@emptied) {
+ my $file = $ENV{GIT_CONFIG} || "$ENV{GIT_DIR}/config";
+ print STDERR <<EOF;
+The following [svn-remote] sections in your config file ($file) are empty
+and can be safely removed:
+EOF
+ print STDERR "[svn-remote \"$_\"]\n" foreach @emptied;
+ }
+}
+
+sub migration_check {
+ migrate_from_v0();
+ migrate_from_v1();
+ migrate_from_v2();
+ minimize_connections() if $_minimize;
+}
+
+1;
diff --git a/perl/Git/SVN/Utils.pm b/perl/Git/SVN/Utils.pm
new file mode 100644
index 000000000..496006bc7
--- /dev/null
+++ b/perl/Git/SVN/Utils.pm
@@ -0,0 +1,59 @@
+package Git::SVN::Utils;
+
+use strict;
+use warnings;
+
+use base qw(Exporter);
+
+our @EXPORT_OK = qw(fatal can_compress);
+
+
+=head1 NAME
+
+Git::SVN::Utils - utility functions used across Git::SVN
+
+=head1 SYNOPSIS
+
+ use Git::SVN::Utils qw(functions to import);
+
+=head1 DESCRIPTION
+
+This module contains functions which are useful across many different
+parts of Git::SVN. Mostly it's a place to put utility functions
+rather than duplicate the code or have classes grabbing at other
+classes.
+
+=head1 FUNCTIONS
+
+All functions can be imported only on request.
+
+=head3 fatal
+
+ fatal(@message);
+
+Display a message and exit with a fatal error code.
+
+=cut
+
+# Note: not certain why this is in use instead of die. Probably because
+# the exit code of die is 255? Doesn't appear to be used consistently.
+sub fatal (@) { print STDERR "@_\n"; exit 1 }
+
+
+=head3 can_compress
+
+ my $can_compress = can_compress;
+
+Returns true if Compress::Zlib is available, false otherwise.
+
+=cut
+
+my $can_compress;
+sub can_compress {
+ return $can_compress if defined $can_compress;
+
+ return $can_compress = eval { require Compress::Zlib; };
+}
+
+
+1;
diff --git a/perl/Makefile b/perl/Makefile
index 6ca7d472e..15d96fcc7 100644
--- a/perl/Makefile
+++ b/perl/Makefile
@@ -20,17 +20,26 @@ clean:
$(RM) ppport.h
$(RM) $(makfile)
$(RM) $(makfile).old
+ $(RM) PM.stamp
+
+$(makfile): PM.stamp
ifdef NO_PERL_MAKEMAKER
instdir_SQ = $(subst ','\'',$(prefix)/lib)
modules += Git
modules += Git/I18N
+modules += Git/IndexInfo
+modules += Git/SVN
modules += Git/SVN/Memoize/YAML
modules += Git/SVN/Fetcher
modules += Git/SVN/Editor
+modules += Git/SVN/GlobSpec
+modules += Git/SVN/Log
+modules += Git/SVN/Migration
modules += Git/SVN/Prompt
modules += Git/SVN/Ra
+modules += Git/SVN/Utils
$(makfile): ../GIT-CFLAGS Makefile
echo all: private-Error.pm Git.pm Git/I18N.pm > $@
diff --git a/perl/Makefile.PL b/perl/Makefile.PL
index b54b04a61..3f29ba98a 100644
--- a/perl/Makefile.PL
+++ b/perl/Makefile.PL
@@ -2,11 +2,16 @@ use strict;
use warnings;
use ExtUtils::MakeMaker;
use Getopt::Long;
+use File::Find;
+
+# Don't forget to update the perl/Makefile, too.
+# Don't forget to test with NO_PERL_MAKEMAKER=YesPlease
# Sanity: die at first unknown option
Getopt::Long::Configure qw/ pass_through /;
-GetOptions("localedir=s" => \my $localedir);
+my $localedir = '';
+GetOptions("localedir=s" => \$localedir);
sub MY::postamble {
return <<'MAKE_FRAG';
@@ -24,24 +29,22 @@ endif
MAKE_FRAG
}
-# XXX. When editing this list:
-#
-# * Please update perl/Makefile, too.
-# * Don't forget to test with NO_PERL_MAKEMAKER=YesPlease
-my %pm = (
- 'Git.pm' => '$(INST_LIBDIR)/Git.pm',
- 'Git/I18N.pm' => '$(INST_LIBDIR)/Git/I18N.pm',
- 'Git/SVN/Memoize/YAML.pm' => '$(INST_LIBDIR)/Git/SVN/Memoize/YAML.pm',
- 'Git/SVN/Fetcher.pm' => '$(INST_LIBDIR)/Git/SVN/Fetcher.pm',
- 'Git/SVN/Editor.pm' => '$(INST_LIBDIR)/Git/SVN/Editor.pm',
- 'Git/SVN/Prompt.pm' => '$(INST_LIBDIR)/Git/SVN/Prompt.pm',
- 'Git/SVN/Ra.pm' => '$(INST_LIBDIR)/Git/SVN/Ra.pm',
-);
+# Find all the .pm files in "Git/" and Git.pm
+my %pm;
+find sub {
+ return unless /\.pm$/;
+
+ # sometimes File::Find prepends a ./ Strip it.
+ my $pm_path = $File::Find::name;
+ $pm_path =~ s{^\./}{};
+
+ $pm{$pm_path} = '$(INST_LIBDIR)/'.$pm_path;
+}, "Git", "Git.pm";
+
# We come with our own bundled Error.pm. It's not in the set of default
# Perl modules so install it if it's not available on the system yet.
-eval { require Error };
-if ($@ || $Error::VERSION < 0.15009) {
+if ( !eval { require Error } || $Error::VERSION < 0.15009) {
$pm{'private-Error.pm'} = '$(INST_LIBDIR)/Error.pm';
}
diff --git a/read-cache.c b/read-cache.c
index 2357afaa6..2f8159fb1 100644
--- a/read-cache.c
+++ b/read-cache.c
@@ -17,6 +17,10 @@
static struct cache_entry *refresh_cache_entry(struct cache_entry *ce, int really);
+/* Mask for the name length in ce_flags in the on-disk index */
+
+#define CE_NAMEMASK (0x0fff)
+
/* Index extensions.
*
* The first letter should be 'A'..'Z' for extensions that are not
@@ -54,8 +58,8 @@ void rename_index_entry_at(struct index_state *istate, int nr, const char *new_n
new = xmalloc(cache_entry_size(namelen));
copy_cache_entry(new, old);
- new->ce_flags &= ~(CE_STATE_MASK | CE_NAMEMASK);
- new->ce_flags |= (namelen >= CE_NAMEMASK ? CE_NAMEMASK : namelen);
+ new->ce_flags &= ~CE_STATE_MASK;
+ new->ce_namelen = namelen;
memcpy(new->name, new_name, namelen + 1);
cache_tree_invalidate_path(istate->cache_tree, old->name);
@@ -395,17 +399,10 @@ int df_name_compare(const char *name1, int len1, int mode1,
return c1 - c2;
}
-int cache_name_compare(const char *name1, int flags1, const char *name2, int flags2)
+int cache_name_stage_compare(const char *name1, int len1, int stage1, const char *name2, int len2, int stage2)
{
- int len1, len2, len, cmp;
-
- len1 = flags1 & CE_NAMEMASK;
- if (CE_NAMEMASK <= len1)
- len1 = strlen(name1 + CE_NAMEMASK) + CE_NAMEMASK;
- len2 = flags2 & CE_NAMEMASK;
- if (CE_NAMEMASK <= len2)
- len2 = strlen(name2 + CE_NAMEMASK) + CE_NAMEMASK;
- len = len1 < len2 ? len1 : len2;
+ int len = len1 < len2 ? len1 : len2;
+ int cmp;
cmp = memcmp(name1, name2, len);
if (cmp)
@@ -415,18 +412,19 @@ int cache_name_compare(const char *name1, int flags1, const char *name2, int fla
if (len1 > len2)
return 1;
- /* Compare stages */
- flags1 &= CE_STAGEMASK;
- flags2 &= CE_STAGEMASK;
-
- if (flags1 < flags2)
+ if (stage1 < stage2)
return -1;
- if (flags1 > flags2)
+ if (stage1 > stage2)
return 1;
return 0;
}
-int index_name_pos(const struct index_state *istate, const char *name, int namelen)
+int cache_name_compare(const char *name1, int len1, const char *name2, int len2)
+{
+ return cache_name_stage_compare(name1, len1, 0, name2, len2, 0);
+}
+
+int index_name_stage_pos(const struct index_state *istate, const char *name, int namelen, int stage)
{
int first, last;
@@ -435,7 +433,7 @@ int index_name_pos(const struct index_state *istate, const char *name, int namel
while (last > first) {
int next = (last + first) >> 1;
struct cache_entry *ce = istate->cache[next];
- int cmp = cache_name_compare(name, namelen, ce->name, ce->ce_flags);
+ int cmp = cache_name_stage_compare(name, namelen, stage, ce->name, ce_namelen(ce), ce_stage(ce));
if (!cmp)
return next;
if (cmp < 0) {
@@ -447,6 +445,11 @@ int index_name_pos(const struct index_state *istate, const char *name, int namel
return -first-1;
}
+int index_name_pos(const struct index_state *istate, const char *name, int namelen)
+{
+ return index_name_stage_pos(istate, name, namelen, 0);
+}
+
/* Remove entry, return true if there are more entries to go.. */
int remove_index_entry_at(struct index_state *istate, int pos)
{
@@ -586,7 +589,7 @@ int add_to_index(struct index_state *istate, const char *path, struct stat *st,
size = cache_entry_size(namelen);
ce = xcalloc(1, size);
memcpy(ce->name, path, namelen);
- ce->ce_flags = namelen;
+ ce->ce_namelen = namelen;
if (!intent_only)
fill_stat_cache_info(ce, st);
else
@@ -688,7 +691,8 @@ struct cache_entry *make_cache_entry(unsigned int mode,
hashcpy(ce->sha1, sha1);
memcpy(ce->name, path, len);
- ce->ce_flags = create_ce_flags(len, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = len;
ce->ce_mode = create_ce_mode(mode);
if (refresh)
@@ -825,7 +829,7 @@ static int has_dir_name(struct index_state *istate,
}
len = slash - name;
- pos = index_name_pos(istate, name, create_ce_flags(len, stage));
+ pos = index_name_stage_pos(istate, name, len, stage);
if (pos >= 0) {
/*
* Found one, but not so fast. This could
@@ -915,7 +919,7 @@ static int add_index_entry_with_check(struct index_state *istate, struct cache_e
int new_only = option & ADD_CACHE_NEW_ONLY;
cache_tree_invalidate_path(istate->cache_tree, ce->name);
- pos = index_name_pos(istate, ce->name, ce->ce_flags);
+ pos = index_name_stage_pos(istate, ce->name, ce_namelen(ce), ce_stage(ce));
/* existing match? Just replace it. */
if (pos >= 0) {
@@ -947,7 +951,7 @@ static int add_index_entry_with_check(struct index_state *istate, struct cache_e
if (!ok_to_replace)
return error("'%s' appears as both a file and as a directory",
ce->name);
- pos = index_name_pos(istate, ce->name, ce->ce_flags);
+ pos = index_name_stage_pos(istate, ce->name, ce_namelen(ce), ce_stage(ce));
pos = -pos-1;
}
return pos + 1;
@@ -1324,7 +1328,8 @@ static struct cache_entry *cache_entry_from_ondisk(struct ondisk_cache_entry *on
ce->ce_uid = ntoh_l(ondisk->uid);
ce->ce_gid = ntoh_l(ondisk->gid);
ce->ce_size = ntoh_l(ondisk->size);
- ce->ce_flags = flags;
+ ce->ce_flags = flags & ~CE_NAMEMASK;
+ ce->ce_namelen = len;
hashcpy(ce->sha1, ondisk->sha1);
memcpy(ce->name, name, len);
ce->name[len] = '\0';
@@ -1651,6 +1656,8 @@ static void ce_smudge_racily_clean_entry(struct cache_entry *ce)
static char *copy_cache_entry_to_ondisk(struct ondisk_cache_entry *ondisk,
struct cache_entry *ce)
{
+ short flags;
+
ondisk->ctime.sec = htonl(ce->ce_ctime.sec);
ondisk->mtime.sec = htonl(ce->ce_mtime.sec);
ondisk->ctime.nsec = htonl(ce->ce_ctime.nsec);
@@ -1662,7 +1669,10 @@ static char *copy_cache_entry_to_ondisk(struct ondisk_cache_entry *ondisk,
ondisk->gid = htonl(ce->ce_gid);
ondisk->size = htonl(ce->ce_size);
hashcpy(ondisk->sha1, ce->sha1);
- ondisk->flags = htons(ce->ce_flags);
+
+ flags = ce->ce_flags;
+ flags |= (ce_namelen(ce) >= CE_NAMEMASK ? CE_NAMEMASK : ce_namelen(ce));
+ ondisk->flags = htons(flags);
if (ce->ce_flags & CE_EXTENDED) {
struct ondisk_cache_entry_extended *ondisk2;
ondisk2 = (struct ondisk_cache_entry_extended *)ondisk;
@@ -1850,7 +1860,8 @@ int read_index_unmerged(struct index_state *istate)
size = cache_entry_size(len);
new_ce = xcalloc(1, size);
memcpy(new_ce->name, ce->name, len);
- new_ce->ce_flags = create_ce_flags(len, 0) | CE_CONFLICTED;
+ new_ce->ce_flags = create_ce_flags(0) | CE_CONFLICTED;
+ new_ce->ce_namelen = len;
new_ce->ce_mode = ce->ce_mode;
if (add_index_entry(istate, new_ce, 0))
return error("%s: cannot drop to stage #0",
diff --git a/revision.c b/revision.c
index 5b81a92e3..9e8f47a25 100644
--- a/revision.c
+++ b/revision.c
@@ -1000,7 +1000,7 @@ static int add_parents_only(struct rev_info *revs, const char *arg_, int flags)
flags ^= UNINTERESTING;
arg++;
}
- if (get_sha1(arg, sha1))
+ if (get_sha1_committish(arg, sha1))
return 0;
while (1) {
it = get_reference(revs, arg, sha1, 0);
@@ -1114,16 +1114,16 @@ static void prepare_show_merge(struct rev_info *revs)
revs->limited = 1;
}
-int handle_revision_arg(const char *arg_, struct rev_info *revs,
- int flags,
- int cant_be_filename)
+int handle_revision_arg(const char *arg_, struct rev_info *revs, int flags, unsigned revarg_opt)
{
- unsigned mode;
+ struct object_context oc;
char *dotdot;
struct object *object;
unsigned char sha1[20];
int local_flags;
const char *arg = arg_;
+ int cant_be_filename = revarg_opt & REVARG_CANNOT_BE_FILENAME;
+ unsigned get_sha1_flags = 0;
dotdot = strstr(arg, "..");
if (dotdot) {
@@ -1141,8 +1141,8 @@ int handle_revision_arg(const char *arg_, struct rev_info *revs,
next = "HEAD";
if (dotdot == arg)
this = "HEAD";
- if (!get_sha1(this, from_sha1) &&
- !get_sha1(next, sha1)) {
+ if (!get_sha1_committish(this, from_sha1) &&
+ !get_sha1_committish(next, sha1)) {
struct commit *a, *b;
struct commit_list *exclude;
@@ -1201,13 +1201,17 @@ int handle_revision_arg(const char *arg_, struct rev_info *revs,
local_flags = UNINTERESTING;
arg++;
}
- if (get_sha1_with_mode(arg, sha1, &mode))
+
+ if (revarg_opt & REVARG_COMMITTISH)
+ get_sha1_flags = GET_SHA1_COMMITTISH;
+
+ if (get_sha1_with_context(arg, get_sha1_flags, sha1, &oc))
return revs->ignore_missing ? 0 : -1;
if (!cant_be_filename)
verify_non_filename(revs->prefix, arg);
object = get_reference(revs, arg, sha1, flags ^ local_flags);
add_rev_cmdline(revs, object, arg_, REV_CMD_REV, flags ^ local_flags);
- add_pending_object_with_mode(revs, object, arg, mode);
+ add_pending_object_with_mode(revs, object, arg, oc.mode);
return 0;
}
@@ -1257,7 +1261,7 @@ static void read_revisions_from_stdin(struct rev_info *revs,
}
die("options not supported in --stdin mode");
}
- if (handle_revision_arg(sb.buf, revs, 0, 1))
+ if (handle_revision_arg(sb.buf, revs, 0, REVARG_CANNOT_BE_FILENAME))
die("bad revision '%s'", sb.buf);
}
if (seen_dashdash)
@@ -1708,7 +1712,7 @@ static int handle_revision_pseudo_opt(const char *submodule,
*/
int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct setup_revision_opt *opt)
{
- int i, flags, left, seen_dashdash, read_from_stdin, got_rev_arg = 0;
+ int i, flags, left, seen_dashdash, read_from_stdin, got_rev_arg = 0, revarg_opt;
struct cmdline_pathspec prune_data;
const char *submodule = NULL;
@@ -1736,6 +1740,9 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
/* Second, deal with arguments and options */
flags = 0;
+ revarg_opt = opt ? opt->revarg_opt : 0;
+ if (seen_dashdash)
+ revarg_opt |= REVARG_CANNOT_BE_FILENAME;
read_from_stdin = 0;
for (left = i = 1; i < argc; i++) {
const char *arg = argv[i];
@@ -1771,7 +1778,8 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
continue;
}
- if (handle_revision_arg(arg, revs, flags, seen_dashdash)) {
+
+ if (handle_revision_arg(arg, revs, flags, revarg_opt)) {
int j;
if (seen_dashdash || *arg == '^')
die("bad revision '%s'", arg);
@@ -1822,11 +1830,11 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, struct s
if (revs->def && !revs->pending.nr && !got_rev_arg) {
unsigned char sha1[20];
struct object *object;
- unsigned mode;
- if (get_sha1_with_mode(revs->def, sha1, &mode))
+ struct object_context oc;
+ if (get_sha1_with_context(revs->def, 0, sha1, &oc))
die("bad default revision '%s'", revs->def);
object = get_reference(revs, revs->def, sha1, 0);
- add_pending_object_with_mode(revs, object, revs->def, mode);
+ add_pending_object_with_mode(revs, object, revs->def, oc.mode);
}
/* Did the user ask for any diff output? Run the diff! */
@@ -2361,29 +2369,28 @@ static struct commit *get_revision_internal(struct rev_info *revs)
}
/*
- * Now pick up what they want to give us
+ * If our max_count counter has reached zero, then we are done. We
+ * don't simply return NULL because we still might need to show
+ * boundary commits. But we want to avoid calling get_revision_1, which
+ * might do a considerable amount of work finding the next commit only
+ * for us to throw it away.
+ *
+ * If it is non-zero, then either we don't have a max_count at all
+ * (-1), or it is still counting, in which case we decrement.
*/
- c = get_revision_1(revs);
- if (c) {
- while (0 < revs->skip_count) {
- revs->skip_count--;
- c = get_revision_1(revs);
- if (!c)
- break;
+ if (revs->max_count) {
+ c = get_revision_1(revs);
+ if (c) {
+ while (0 < revs->skip_count) {
+ revs->skip_count--;
+ c = get_revision_1(revs);
+ if (!c)
+ break;
+ }
}
- }
- /*
- * Check the max_count.
- */
- switch (revs->max_count) {
- case -1:
- break;
- case 0:
- c = NULL;
- break;
- default:
- revs->max_count--;
+ if (revs->max_count > 0)
+ revs->max_count--;
}
if (c)
diff --git a/revision.h b/revision.h
index 863f4f645..cb5ab3513 100644
--- a/revision.h
+++ b/revision.h
@@ -184,6 +184,7 @@ struct setup_revision_opt {
void (*tweak)(struct rev_info *, struct setup_revision_opt *);
const char *submodule;
int assume_dashdash;
+ unsigned revarg_opt;
};
extern void init_revisions(struct rev_info *revs, const char *prefix);
@@ -191,7 +192,9 @@ extern int setup_revisions(int argc, const char **argv, struct rev_info *revs, s
extern void parse_revision_opt(struct rev_info *revs, struct parse_opt_ctx_t *ctx,
const struct option *options,
const char * const usagestr[]);
-extern int handle_revision_arg(const char *arg, struct rev_info *revs,int flags,int cant_be_filename);
+#define REVARG_CANNOT_BE_FILENAME 01
+#define REVARG_COMMITTISH 02
+extern int handle_revision_arg(const char *arg, struct rev_info *revs, int flags, unsigned revarg_opt);
extern void reset_revision_walk(void);
extern int prepare_revision_walk(struct rev_info *revs);
diff --git a/setup.c b/setup.c
index e11497720..9139beefc 100644
--- a/setup.c
+++ b/setup.c
@@ -77,9 +77,6 @@ static void NORETURN die_verify_filename(const char *prefix,
const char *arg,
int diagnose_misspelt_rev)
{
- unsigned char sha1[20];
- unsigned mode;
-
if (!diagnose_misspelt_rev)
die("%s: no such path in the working tree.\n"
"Use '-- <path>...' to specify paths that do not exist locally.",
@@ -88,11 +85,10 @@ static void NORETURN die_verify_filename(const char *prefix,
* Saying "'(icase)foo' does not exist in the index" when the
* user gave us ":(icase)foo" is just stupid. A magic pathspec
* begins with a colon and is followed by a non-alnum; do not
- * let get_sha1_with_mode_1(only_to_die=1) to even trigger.
+ * let maybe_die_on_misspelt_object_name() even trigger.
*/
if (!(arg[0] == ':' && !isalnum(arg[1])))
- /* try a detailed diagnostic ... */
- get_sha1_with_mode_1(arg, sha1, &mode, 1, prefix);
+ maybe_die_on_misspelt_object_name(arg, prefix);
/* ... or fall back the most general message. */
die("ambiguous argument '%s': unknown revision or path not in the working tree.\n"
diff --git a/sha1_file.c b/sha1_file.c
index 4ccaf7ac1..af5cfbde6 100644
--- a/sha1_file.c
+++ b/sha1_file.c
@@ -298,7 +298,7 @@ static int link_alt_odb_entry(const char * entry, int len, const char * relative
return -1;
}
}
- if (!memcmp(ent->base, objdir, pfxlen)) {
+ if (!strcmp(ent->base, objdir)) {
free(ent);
return -1;
}
diff --git a/sha1_name.c b/sha1_name.c
index 5d81ea056..95003c77e 100644
--- a/sha1_name.c
+++ b/sha1_name.c
@@ -9,14 +9,82 @@
static int get_sha1_oneline(const char *, unsigned char *, struct commit_list *);
-static int find_short_object_filename(int len, const char *name, unsigned char *sha1)
+typedef int (*disambiguate_hint_fn)(const unsigned char *, void *);
+
+struct disambiguate_state {
+ disambiguate_hint_fn fn;
+ void *cb_data;
+ unsigned char candidate[20];
+ unsigned candidate_exists:1;
+ unsigned candidate_checked:1;
+ unsigned candidate_ok:1;
+ unsigned disambiguate_fn_used:1;
+ unsigned ambiguous:1;
+ unsigned always_call_fn:1;
+};
+
+static void update_candidates(struct disambiguate_state *ds, const unsigned char *current)
+{
+ if (ds->always_call_fn) {
+ ds->ambiguous = ds->fn(current, ds->cb_data) ? 1 : 0;
+ return;
+ }
+ if (!ds->candidate_exists) {
+ /* this is the first candidate */
+ hashcpy(ds->candidate, current);
+ ds->candidate_exists = 1;
+ return;
+ } else if (!hashcmp(ds->candidate, current)) {
+ /* the same as what we already have seen */
+ return;
+ }
+
+ if (!ds->fn) {
+ /* cannot disambiguate between ds->candidate and current */
+ ds->ambiguous = 1;
+ return;
+ }
+
+ if (!ds->candidate_checked) {
+ ds->candidate_ok = ds->fn(ds->candidate, ds->cb_data);
+ ds->disambiguate_fn_used = 1;
+ ds->candidate_checked = 1;
+ }
+
+ if (!ds->candidate_ok) {
+ /* discard the candidate; we know it does not satisify fn */
+ hashcpy(ds->candidate, current);
+ ds->candidate_checked = 0;
+ return;
+ }
+
+ /* if we reach this point, we know ds->candidate satisfies fn */
+ if (ds->fn(current, ds->cb_data)) {
+ /*
+ * if both current and candidate satisfy fn, we cannot
+ * disambiguate.
+ */
+ ds->candidate_ok = 0;
+ ds->ambiguous = 1;
+ }
+
+ /* otherwise, current can be discarded and candidate is still good */
+}
+
+static void find_short_object_filename(int len, const char *hex_pfx, struct disambiguate_state *ds)
{
struct alternate_object_database *alt;
char hex[40];
- int found = 0;
static struct alternate_object_database *fakeent;
if (!fakeent) {
+ /*
+ * Create a "fake" alternate object database that
+ * points to our own object database, to make it
+ * easier to get a temporary working space in
+ * alt->name/alt->base while iterating over the
+ * object databases including our own.
+ */
const char *objdir = get_object_directory();
int objdir_len = strlen(objdir);
int entlen = objdir_len + 43;
@@ -27,33 +95,28 @@ static int find_short_object_filename(int len, const char *name, unsigned char *
}
fakeent->next = alt_odb_list;
- sprintf(hex, "%.2s", name);
- for (alt = fakeent; alt && found < 2; alt = alt->next) {
+ sprintf(hex, "%.2s", hex_pfx);
+ for (alt = fakeent; alt && !ds->ambiguous; alt = alt->next) {
struct dirent *de;
DIR *dir;
- sprintf(alt->name, "%.2s/", name);
+ sprintf(alt->name, "%.2s/", hex_pfx);
dir = opendir(alt->base);
if (!dir)
continue;
- while ((de = readdir(dir)) != NULL) {
+
+ while (!ds->ambiguous && (de = readdir(dir)) != NULL) {
+ unsigned char sha1[20];
+
if (strlen(de->d_name) != 38)
continue;
- if (memcmp(de->d_name, name + 2, len - 2))
+ if (memcmp(de->d_name, hex_pfx + 2, len - 2))
continue;
- if (!found) {
- memcpy(hex + 2, de->d_name, 38);
- found++;
- }
- else if (memcmp(hex + 2, de->d_name, 38)) {
- found = 2;
- break;
- }
+ memcpy(hex + 2, de->d_name, 38);
+ if (!get_sha1_hex(hex, sha1))
+ update_candidates(ds, sha1);
}
closedir(dir);
}
- if (found == 1)
- return get_sha1_hex(hex, sha1) == 0;
- return found;
}
static int match_sha(unsigned len, const unsigned char *a, const unsigned char *b)
@@ -71,103 +134,157 @@ static int match_sha(unsigned len, const unsigned char *a, const unsigned char *
return 1;
}
-static int find_short_packed_object(int len, const unsigned char *match, unsigned char *sha1)
+static void unique_in_pack(int len,
+ const unsigned char *bin_pfx,
+ struct packed_git *p,
+ struct disambiguate_state *ds)
{
- struct packed_git *p;
- const unsigned char *found_sha1 = NULL;
- int found = 0;
-
- prepare_packed_git();
- for (p = packed_git; p && found < 2; p = p->next) {
- uint32_t num, last;
- uint32_t first = 0;
- open_pack_index(p);
- num = p->num_objects;
- last = num;
- while (first < last) {
- uint32_t mid = (first + last) / 2;
- const unsigned char *now;
- int cmp;
-
- now = nth_packed_object_sha1(p, mid);
- cmp = hashcmp(match, now);
- if (!cmp) {
- first = mid;
- break;
- }
- if (cmp > 0) {
- first = mid+1;
- continue;
- }
- last = mid;
+ uint32_t num, last, i, first = 0;
+ const unsigned char *current = NULL;
+
+ open_pack_index(p);
+ num = p->num_objects;
+ last = num;
+ while (first < last) {
+ uint32_t mid = (first + last) / 2;
+ const unsigned char *current;
+ int cmp;
+
+ current = nth_packed_object_sha1(p, mid);
+ cmp = hashcmp(bin_pfx, current);
+ if (!cmp) {
+ first = mid;
+ break;
}
- if (first < num) {
- const unsigned char *now, *next;
- now = nth_packed_object_sha1(p, first);
- if (match_sha(len, match, now)) {
- next = nth_packed_object_sha1(p, first+1);
- if (!next|| !match_sha(len, match, next)) {
- /* unique within this pack */
- if (!found) {
- found_sha1 = now;
- found++;
- }
- else if (hashcmp(found_sha1, now)) {
- found = 2;
- break;
- }
- }
- else {
- /* not even unique within this pack */
- found = 2;
- break;
- }
- }
+ if (cmp > 0) {
+ first = mid+1;
+ continue;
}
+ last = mid;
+ }
+
+ /*
+ * At this point, "first" is the location of the lowest object
+ * with an object name that could match "bin_pfx". See if we have
+ * 0, 1 or more objects that actually match(es).
+ */
+ for (i = first; i < num && !ds->ambiguous; i++) {
+ current = nth_packed_object_sha1(p, i);
+ if (!match_sha(len, bin_pfx, current))
+ break;
+ update_candidates(ds, current);
}
- if (found == 1)
- hashcpy(sha1, found_sha1);
- return found;
+}
+
+static void find_short_packed_object(int len, const unsigned char *bin_pfx,
+ struct disambiguate_state *ds)
+{
+ struct packed_git *p;
+
+ prepare_packed_git();
+ for (p = packed_git; p && !ds->ambiguous; p = p->next)
+ unique_in_pack(len, bin_pfx, p, ds);
}
#define SHORT_NAME_NOT_FOUND (-1)
#define SHORT_NAME_AMBIGUOUS (-2)
-static int find_unique_short_object(int len, char *canonical,
- unsigned char *res, unsigned char *sha1)
+static int finish_object_disambiguation(struct disambiguate_state *ds,
+ unsigned char *sha1)
{
- int has_unpacked, has_packed;
- unsigned char unpacked_sha1[20], packed_sha1[20];
+ if (ds->ambiguous)
+ return SHORT_NAME_AMBIGUOUS;
- prepare_alt_odb();
- has_unpacked = find_short_object_filename(len, canonical, unpacked_sha1);
- has_packed = find_short_packed_object(len, res, packed_sha1);
- if (!has_unpacked && !has_packed)
+ if (!ds->candidate_exists)
return SHORT_NAME_NOT_FOUND;
- if (1 < has_unpacked || 1 < has_packed)
+
+ if (!ds->candidate_checked)
+ /*
+ * If this is the only candidate, there is no point
+ * calling the disambiguation hint callback.
+ *
+ * On the other hand, if the current candidate
+ * replaced an earlier candidate that did _not_ pass
+ * the disambiguation hint callback, then we do have
+ * more than one objects that match the short name
+ * given, so we should make sure this one matches;
+ * otherwise, if we discovered this one and the one
+ * that we previously discarded in the reverse order,
+ * we would end up showing different results in the
+ * same repository!
+ */
+ ds->candidate_ok = (!ds->disambiguate_fn_used ||
+ ds->fn(ds->candidate, ds->cb_data));
+
+ if (!ds->candidate_ok)
return SHORT_NAME_AMBIGUOUS;
- if (has_unpacked != has_packed) {
- hashcpy(sha1, (has_packed ? packed_sha1 : unpacked_sha1));
+
+ hashcpy(sha1, ds->candidate);
+ return 0;
+}
+
+static int disambiguate_commit_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ int kind = sha1_object_info(sha1, NULL);
+ return kind == OBJ_COMMIT;
+}
+
+static int disambiguate_committish_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ struct object *obj;
+ int kind;
+
+ kind = sha1_object_info(sha1, NULL);
+ if (kind == OBJ_COMMIT)
+ return 1;
+ if (kind != OBJ_TAG)
return 0;
- }
- /* Both have unique ones -- do they match? */
- if (hashcmp(packed_sha1, unpacked_sha1))
- return SHORT_NAME_AMBIGUOUS;
- hashcpy(sha1, packed_sha1);
+
+ /* We need to do this the hard way... */
+ obj = deref_tag(lookup_object(sha1), NULL, 0);
+ if (obj && obj->type == OBJ_COMMIT)
+ return 1;
return 0;
}
-static int get_short_sha1(const char *name, int len, unsigned char *sha1,
- int quietly)
+static int disambiguate_tree_only(const unsigned char *sha1, void *cb_data_unused)
{
- int i, status;
- char canonical[40];
- unsigned char res[20];
+ int kind = sha1_object_info(sha1, NULL);
+ return kind == OBJ_TREE;
+}
- if (len < MINIMUM_ABBREV || len > 40)
- return -1;
- hashclr(res);
- memset(canonical, 'x', 40);
+static int disambiguate_treeish_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ struct object *obj;
+ int kind;
+
+ kind = sha1_object_info(sha1, NULL);
+ if (kind == OBJ_TREE || kind == OBJ_COMMIT)
+ return 1;
+ if (kind != OBJ_TAG)
+ return 0;
+
+ /* We need to do this the hard way... */
+ obj = deref_tag(lookup_object(sha1), NULL, 0);
+ if (obj && (obj->type == OBJ_TREE || obj->type == OBJ_COMMIT))
+ return 1;
+ return 0;
+}
+
+static int disambiguate_blob_only(const unsigned char *sha1, void *cb_data_unused)
+{
+ int kind = sha1_object_info(sha1, NULL);
+ return kind == OBJ_BLOB;
+}
+
+static int prepare_prefixes(const char *name, int len,
+ unsigned char *bin_pfx,
+ char *hex_pfx)
+{
+ int i;
+
+ hashclr(bin_pfx);
+ memset(hex_pfx, 'x', 40);
for (i = 0; i < len ;i++) {
unsigned char c = name[i];
unsigned char val;
@@ -181,18 +298,76 @@ static int get_short_sha1(const char *name, int len, unsigned char *sha1,
}
else
return -1;
- canonical[i] = c;
+ hex_pfx[i] = c;
if (!(i & 1))
val <<= 4;
- res[i >> 1] |= val;
+ bin_pfx[i >> 1] |= val;
}
+ return 0;
+}
+
+static int get_short_sha1(const char *name, int len, unsigned char *sha1,
+ unsigned flags)
+{
+ int status;
+ char hex_pfx[40];
+ unsigned char bin_pfx[20];
+ struct disambiguate_state ds;
+ int quietly = !!(flags & GET_SHA1_QUIETLY);
+
+ if (len < MINIMUM_ABBREV || len > 40)
+ return -1;
+ if (prepare_prefixes(name, len, bin_pfx, hex_pfx) < 0)
+ return -1;
+
+ prepare_alt_odb();
+
+ memset(&ds, 0, sizeof(ds));
+ if (flags & GET_SHA1_COMMIT)
+ ds.fn = disambiguate_commit_only;
+ else if (flags & GET_SHA1_COMMITTISH)
+ ds.fn = disambiguate_committish_only;
+ else if (flags & GET_SHA1_TREE)
+ ds.fn = disambiguate_tree_only;
+ else if (flags & GET_SHA1_TREEISH)
+ ds.fn = disambiguate_treeish_only;
+ else if (flags & GET_SHA1_BLOB)
+ ds.fn = disambiguate_blob_only;
+
+ find_short_object_filename(len, hex_pfx, &ds);
+ find_short_packed_object(len, bin_pfx, &ds);
+ status = finish_object_disambiguation(&ds, sha1);
- status = find_unique_short_object(i, canonical, res, sha1);
if (!quietly && (status == SHORT_NAME_AMBIGUOUS))
- return error("short SHA1 %.*s is ambiguous.", len, canonical);
+ return error("short SHA1 %.*s is ambiguous.", len, hex_pfx);
return status;
}
+
+int for_each_abbrev(const char *prefix, each_abbrev_fn fn, void *cb_data)
+{
+ char hex_pfx[40];
+ unsigned char bin_pfx[20];
+ struct disambiguate_state ds;
+ int len = strlen(prefix);
+
+ if (len < MINIMUM_ABBREV || len > 40)
+ return -1;
+ if (prepare_prefixes(prefix, len, bin_pfx, hex_pfx) < 0)
+ return -1;
+
+ prepare_alt_odb();
+
+ memset(&ds, 0, sizeof(ds));
+ ds.always_call_fn = 1;
+ ds.cb_data = cb_data;
+ ds.fn = fn;
+
+ find_short_object_filename(len, hex_pfx, &ds);
+ find_short_packed_object(len, bin_pfx, &ds);
+ return ds.ambiguous;
+}
+
const char *find_unique_abbrev(const unsigned char *sha1, int len)
{
int status, exists;
@@ -204,7 +379,7 @@ const char *find_unique_abbrev(const unsigned char *sha1, int len)
return hex;
while (len < 40) {
unsigned char sha1_ret[20];
- status = get_short_sha1(hex, len, sha1_ret, 1);
+ status = get_short_sha1(hex, len, sha1_ret, GET_SHA1_QUIETLY);
if (exists
? !status
: status == SHORT_NAME_NOT_FOUND) {
@@ -255,7 +430,7 @@ static inline int upstream_mark(const char *string, int len)
return 0;
}
-static int get_sha1_1(const char *name, int len, unsigned char *sha1);
+static int get_sha1_1(const char *name, int len, unsigned char *sha1, unsigned lookup_flags);
static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
{
@@ -292,7 +467,7 @@ static int get_sha1_basic(const char *str, int len, unsigned char *sha1)
ret = interpret_branch_name(str+at, &buf);
if (ret > 0) {
/* substitute this branch name and restart */
- return get_sha1_1(buf.buf, buf.len, sha1);
+ return get_sha1_1(buf.buf, buf.len, sha1, 0);
} else if (ret == 0) {
return -1;
}
@@ -362,7 +537,7 @@ static int get_parent(const char *name, int len,
unsigned char *result, int idx)
{
unsigned char sha1[20];
- int ret = get_sha1_1(name, len, sha1);
+ int ret = get_sha1_1(name, len, sha1, GET_SHA1_COMMITTISH);
struct commit *commit;
struct commit_list *p;
@@ -395,7 +570,7 @@ static int get_nth_ancestor(const char *name, int len,
struct commit *commit;
int ret;
- ret = get_sha1_1(name, len, sha1);
+ ret = get_sha1_1(name, len, sha1, GET_SHA1_COMMITTISH);
if (ret)
return ret;
commit = lookup_commit_reference(sha1);
@@ -441,6 +616,7 @@ static int peel_onion(const char *name, int len, unsigned char *sha1)
unsigned char outer[20];
const char *sp;
unsigned int expected_type = 0;
+ unsigned lookup_flags = 0;
struct object *o;
/*
@@ -476,7 +652,10 @@ static int peel_onion(const char *name, int len, unsigned char *sha1)
else
return -1;
- if (get_sha1_1(name, sp - name - 2, outer))
+ if (expected_type == OBJ_COMMIT)
+ lookup_flags = GET_SHA1_COMMITTISH;
+
+ if (get_sha1_1(name, sp - name - 2, outer, lookup_flags))
return -1;
o = parse_object(outer);
@@ -525,6 +704,7 @@ static int peel_onion(const char *name, int len, unsigned char *sha1)
static int get_describe_name(const char *name, int len, unsigned char *sha1)
{
const char *cp;
+ unsigned flags = GET_SHA1_QUIETLY | GET_SHA1_COMMIT;
for (cp = name + len - 1; name + 2 <= cp; cp--) {
char ch = *cp;
@@ -535,14 +715,14 @@ static int get_describe_name(const char *name, int len, unsigned char *sha1)
if (ch == 'g' && cp[-1] == '-') {
cp++;
len -= cp - name;
- return get_short_sha1(cp, len, sha1, 1);
+ return get_short_sha1(cp, len, sha1, flags);
}
}
}
return -1;
}
-static int get_sha1_1(const char *name, int len, unsigned char *sha1)
+static int get_sha1_1(const char *name, int len, unsigned char *sha1, unsigned lookup_flags)
{
int ret, has_suffix;
const char *cp;
@@ -587,7 +767,7 @@ static int get_sha1_1(const char *name, int len, unsigned char *sha1)
if (!ret)
return 0;
- return get_short_sha1(name, len, sha1, 0);
+ return get_short_sha1(name, len, sha1, lookup_flags);
}
/*
@@ -769,7 +949,7 @@ int get_sha1_mb(const char *name, unsigned char *sha1)
struct strbuf sb;
strbuf_init(&sb, dots - name);
strbuf_add(&sb, name, dots - name);
- st = get_sha1(sb.buf, sha1_tmp);
+ st = get_sha1_committish(sb.buf, sha1_tmp);
strbuf_release(&sb);
}
if (st)
@@ -778,7 +958,7 @@ int get_sha1_mb(const char *name, unsigned char *sha1)
if (!one)
return -1;
- if (get_sha1(dots[3] ? (dots + 3) : "HEAD", sha1_tmp))
+ if (get_sha1_committish(dots[3] ? (dots + 3) : "HEAD", sha1_tmp))
return -1;
two = lookup_commit_reference_gently(sha1_tmp, 0);
if (!two)
@@ -905,7 +1085,52 @@ int strbuf_check_branch_ref(struct strbuf *sb, const char *name)
int get_sha1(const char *name, unsigned char *sha1)
{
struct object_context unused;
- return get_sha1_with_context(name, sha1, &unused);
+ return get_sha1_with_context(name, 0, sha1, &unused);
+}
+
+/*
+ * Many callers know that the user meant to name a committish by
+ * syntactical positions where the object name appears. Calling this
+ * function allows the machinery to disambiguate shorter-than-unique
+ * abbreviated object names between committish and others.
+ *
+ * Note that this does NOT error out when the named object is not a
+ * committish. It is merely to give a hint to the disambiguation
+ * machinery.
+ */
+int get_sha1_committish(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_COMMITTISH,
+ sha1, &unused);
+}
+
+int get_sha1_treeish(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_TREEISH,
+ sha1, &unused);
+}
+
+int get_sha1_commit(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_COMMIT,
+ sha1, &unused);
+}
+
+int get_sha1_tree(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_TREE,
+ sha1, &unused);
+}
+
+int get_sha1_blob(const char *name, unsigned char *sha1)
+{
+ struct object_context unused;
+ return get_sha1_with_context(name, GET_SHA1_BLOB,
+ sha1, &unused);
}
/* Must be called only when object_name:filename doesn't exist. */
@@ -1004,16 +1229,6 @@ static void diagnose_invalid_index_path(int stage,
}
-int get_sha1_with_mode_1(const char *name, unsigned char *sha1, unsigned *mode,
- int only_to_die, const char *prefix)
-{
- struct object_context oc;
- int ret;
- ret = get_sha1_with_context_1(name, sha1, &oc, only_to_die, prefix);
- *mode = oc.mode;
- return ret;
-}
-
static char *resolve_relative_path(const char *rel)
{
if (prefixcmp(rel, "./") && prefixcmp(rel, "../"))
@@ -1031,20 +1246,24 @@ static char *resolve_relative_path(const char *rel)
rel);
}
-int get_sha1_with_context_1(const char *name, unsigned char *sha1,
- struct object_context *oc,
- int only_to_die, const char *prefix)
+static int get_sha1_with_context_1(const char *name,
+ unsigned flags,
+ const char *prefix,
+ unsigned char *sha1,
+ struct object_context *oc)
{
int ret, bracket_depth;
int namelen = strlen(name);
const char *cp;
+ int only_to_die = flags & GET_SHA1_ONLY_TO_DIE;
memset(oc, 0, sizeof(*oc));
oc->mode = S_IFINVALID;
- ret = get_sha1_1(name, namelen, sha1);
+ ret = get_sha1_1(name, namelen, sha1, flags);
if (!ret)
return ret;
- /* sha1:path --> object name of path in ent sha1
+ /*
+ * sha1:path --> object name of path in ent sha1
* :path -> object name of absolute path in index
* :./path -> object name of path relative to cwd in index
* :[0-3]:path -> object name of path in index at stage
@@ -1119,7 +1338,7 @@ int get_sha1_with_context_1(const char *name, unsigned char *sha1,
strncpy(object_name, name, cp-name);
object_name[cp-name] = '\0';
}
- if (!get_sha1_1(name, cp-name, tree_sha1)) {
+ if (!get_sha1_1(name, cp-name, tree_sha1, GET_SHA1_TREEISH)) {
const char *filename = cp+1;
char *new_filename = NULL;
@@ -1146,3 +1365,22 @@ int get_sha1_with_context_1(const char *name, unsigned char *sha1,
}
return ret;
}
+
+/*
+ * Call this function when you know "name" given by the end user must
+ * name an object but it doesn't; the function _may_ die with a better
+ * diagnostic message than "no such object 'name'", e.g. "Path 'doc' does not
+ * exist in 'HEAD'" when given "HEAD:doc", or it may return in which case
+ * you have a chance to diagnose the error further.
+ */
+void maybe_die_on_misspelt_object_name(const char *name, const char *prefix)
+{
+ struct object_context oc;
+ unsigned char sha1[20];
+ get_sha1_with_context_1(name, GET_SHA1_ONLY_TO_DIE, prefix, sha1, &oc);
+}
+
+int get_sha1_with_context(const char *str, unsigned flags, unsigned char *sha1, struct object_context *orc)
+{
+ return get_sha1_with_context_1(str, flags, NULL, sha1, orc);
+}
diff --git a/t/Git-SVN/00compile.t b/t/Git-SVN/00compile.t
new file mode 100644
index 000000000..c92fee453
--- /dev/null
+++ b/t/Git-SVN/00compile.t
@@ -0,0 +1,14 @@
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+
+use Test::More tests => 7;
+
+require_ok 'Git::SVN';
+require_ok 'Git::SVN::Utils';
+require_ok 'Git::SVN::Ra';
+require_ok 'Git::SVN::Log';
+require_ok 'Git::SVN::Migration';
+require_ok 'Git::IndexInfo';
+require_ok 'Git::SVN::GlobSpec';
diff --git a/t/Git-SVN/Utils/can_compress.t b/t/Git-SVN/Utils/can_compress.t
new file mode 100644
index 000000000..d7b49b8d5
--- /dev/null
+++ b/t/Git-SVN/Utils/can_compress.t
@@ -0,0 +1,11 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Test::More 'no_plan';
+
+use Git::SVN::Utils qw(can_compress);
+
+# !! is the "convert this to boolean" operator.
+is !!can_compress(), !!eval { require Compress::Zlib };
diff --git a/t/Git-SVN/Utils/fatal.t b/t/Git-SVN/Utils/fatal.t
new file mode 100644
index 000000000..49e143829
--- /dev/null
+++ b/t/Git-SVN/Utils/fatal.t
@@ -0,0 +1,34 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Test::More 'no_plan';
+
+BEGIN {
+ # Override exit at BEGIN time before Git::SVN::Utils is loaded
+ # so it will see our local exit later.
+ *CORE::GLOBAL::exit = sub(;$) {
+ return @_ ? CORE::exit($_[0]) : CORE::exit();
+ };
+}
+
+use Git::SVN::Utils qw(fatal);
+
+# fatal()
+{
+ # Capture the exit code and prevent exit.
+ my $exit_status;
+ no warnings 'redefine';
+ local *CORE::GLOBAL::exit = sub { $exit_status = $_[0] || 0 };
+
+ # Trap fatal's message to STDERR
+ my $stderr;
+ close STDERR;
+ ok open STDERR, ">", \$stderr;
+
+ fatal "Some", "Stuff", "Happened";
+
+ is $stderr, "Some Stuff Happened\n";
+ is $exit_status, 1;
+}
diff --git a/t/lib-httpd.sh b/t/lib-httpd.sh
index 094d49089..d77354268 100644
--- a/t/lib-httpd.sh
+++ b/t/lib-httpd.sh
@@ -43,6 +43,10 @@ TEST_PATH="$TEST_DIRECTORY"/lib-httpd
HTTPD_ROOT_PATH="$PWD"/httpd
HTTPD_DOCUMENT_ROOT_PATH=$HTTPD_ROOT_PATH/www
+# hack to suppress apache PassEnv warnings
+GIT_VALGRIND=$GIT_VALGRIND; export GIT_VALGRIND
+GIT_VALGRIND_OPTIONS=$GIT_VALGRIND_OPTIONS; export GIT_VALGRIND_OPTIONS
+
if ! test -x "$LIB_HTTPD_PATH"
then
skip_all="skipping test, no web server found at '$LIB_HTTPD_PATH'"
diff --git a/t/lib-httpd/apache.conf b/t/lib-httpd/apache.conf
index de3762e24..36b1596a1 100644
--- a/t/lib-httpd/apache.conf
+++ b/t/lib-httpd/apache.conf
@@ -42,6 +42,9 @@ ErrorLog error.log
</IfModule>
</IfVersion>
+PassEnv GIT_VALGRIND
+PassEnv GIT_VALGRIND_OPTIONS
+
Alias /dumb/ www/
Alias /auth/ www/auth/
@@ -62,7 +65,7 @@ ScriptAlias /smart/ ${GIT_EXEC_PATH}/git-http-backend/
ScriptAlias /smart_noexport/ ${GIT_EXEC_PATH}/git-http-backend/
ScriptAlias /smart_custom_env/ ${GIT_EXEC_PATH}/git-http-backend/
<Directory ${GIT_EXEC_PATH}>
- Options None
+ Options FollowSymlinks
</Directory>
<Files ${GIT_EXEC_PATH}/git-http-backend>
Options ExecCGI
diff --git a/t/t1100-commit-tree-options.sh b/t/t1100-commit-tree-options.sh
index c4414ff57..f8457f9d1 100755
--- a/t/t1100-commit-tree-options.sh
+++ b/t/t1100-commit-tree-options.sh
@@ -7,6 +7,9 @@ test_description='git commit-tree options test
This test checks that git commit-tree can create a specific commit
object by defining all environment variables that it understands.
+
+Also make sure that command line parser understands the normal
+"flags first and then non flag arguments" command line.
'
. ./test-lib.sh
@@ -42,4 +45,19 @@ test_expect_success \
'compare commit' \
'test_cmp expected commit'
+
+test_expect_success 'flags and then non flags' '
+ test_tick &&
+ echo comment text |
+ git commit-tree $(cat treeid) >commitid &&
+ echo comment text |
+ git commit-tree $(cat treeid) -p $(cat commitid) >childid-1 &&
+ echo comment text |
+ git commit-tree -p $(cat commitid) $(cat treeid) >childid-2 &&
+ test_cmp childid-1 childid-2 &&
+ git commit-tree $(cat treeid) -m foo >childid-3 &&
+ git commit-tree -m foo $(cat treeid) >childid-4 &&
+ test_cmp childid-3 childid-4
+'
+
test_done
diff --git a/t/t1306-xdg-files.sh b/t/t1306-xdg-files.sh
index 3c75c3f2e..8b14ab187 100755
--- a/t/t1306-xdg-files.sh
+++ b/t/t1306-xdg-files.sh
@@ -38,6 +38,13 @@ test_expect_success 'read with --get: xdg file exists and ~/.gitconfig doesn'\''
test_cmp expected actual
'
+test_expect_success '"$XDG_CONFIG_HOME overrides $HOME/.config/git' '
+ mkdir -p "$HOME"/xdg/git &&
+ echo "[user]name = in_xdg" >"$HOME"/xdg/git/config &&
+ echo in_xdg >expected &&
+ XDG_CONFIG_HOME="$HOME"/xdg git config --get-all user.name >actual &&
+ test_cmp expected actual
+'
test_expect_success 'read with --get: xdg file exists and ~/.gitconfig exists' '
>.gitconfig &&
@@ -80,6 +87,17 @@ test_expect_success 'Exclusion of a file in the XDG ignore file' '
test_must_fail git add to_be_excluded
'
+test_expect_success '$XDG_CONFIG_HOME overrides $HOME/.config/git/ignore' '
+ mkdir -p "$HOME"/xdg/git &&
+ echo content >excluded_by_xdg_only &&
+ echo excluded_by_xdg_only >"$HOME"/xdg/git/ignore &&
+ test_when_finished "git read-tree --empty" &&
+ (XDG_CONFIG_HOME="$HOME/xdg" &&
+ export XDG_CONFIG_HOME &&
+ git add to_be_excluded &&
+ test_must_fail git add excluded_by_xdg_only
+ )
+'
test_expect_success 'Exclusion in both XDG and local ignore files' '
echo to_be_excluded >.gitignore &&
@@ -95,6 +113,13 @@ test_expect_success 'Exclusion in a non-XDG global ignore file' '
test_must_fail git add to_be_excluded
'
+test_expect_success 'Checking XDG ignore file when HOME is unset' '
+ >expected &&
+ (sane_unset HOME &&
+ git config --unset core.excludesfile &&
+ git ls-files --exclude-standard --ignored >actual) &&
+ test_cmp expected actual
+'
test_expect_success 'Checking attributes in the XDG attributes file' '
echo foo >f &&
@@ -106,6 +131,20 @@ test_expect_success 'Checking attributes in the XDG attributes file' '
test_cmp expected actual
'
+test_expect_success 'Checking XDG attributes when HOME is unset' '
+ >expected &&
+ (sane_unset HOME &&
+ git check-attr -a f >actual) &&
+ test_cmp expected actual
+'
+
+test_expect_success '$XDG_CONFIG_HOME overrides $HOME/.config/git/attributes' '
+ mkdir -p "$HOME"/xdg/git &&
+ echo "f attr_f=xdg" >"$HOME"/xdg/git/attributes &&
+ echo "f: attr_f: xdg" >expected &&
+ XDG_CONFIG_HOME="$HOME/xdg" git check-attr -a f >actual &&
+ test_cmp expected actual
+'
test_expect_success 'Checking attributes in both XDG and local attributes files' '
echo "f -attr_f" >.gitattributes &&
diff --git a/t/t1512-rev-parse-disambiguation.sh b/t/t1512-rev-parse-disambiguation.sh
new file mode 100755
index 000000000..6b3d797ce
--- /dev/null
+++ b/t/t1512-rev-parse-disambiguation.sh
@@ -0,0 +1,264 @@
+#!/bin/sh
+
+test_description='object name disambiguation
+
+Create blobs, trees, commits and a tag that all share the same
+prefix, and make sure "git rev-parse" can take advantage of
+type information to disambiguate short object names that are
+not necessarily unique.
+
+The final history used in the test has five commits, with the bottom
+one tagged as v1.0.0. They all have one regular file each.
+
+ +-------------------------------------------+
+ | |
+ | .-------b3wettvi---- ad2uee |
+ | / / |
+ | a2onsxbvj---czy8f73t--ioiley5o |
+ | |
+ +-------------------------------------------+
+
+'
+
+. ./test-lib.sh
+
+test_expect_success 'blob and tree' '
+ test_tick &&
+ (
+ for i in 0 1 2 3 4 5 6 7 8 9
+ do
+ echo $i
+ done
+ echo
+ echo b1rwzyc3
+ ) >a0blgqsjc &&
+
+ # create one blob 0000000000b36
+ git add a0blgqsjc &&
+
+ # create one tree 0000000000cdc
+ git write-tree
+'
+
+test_expect_success 'warn ambiguity when no candidate matches type hint' '
+ test_must_fail git rev-parse --verify 000000000^{commit} 2>actual &&
+ grep "short SHA1 000000000 is ambiguous" actual
+'
+
+test_expect_success 'disambiguate tree-ish' '
+ # feed tree-ish in an unambiguous way
+ git rev-parse --verify 0000000000cdc:a0blgqsjc &&
+
+ # ambiguous at the object name level, but there is only one
+ # such tree-ish (the other is a blob)
+ git rev-parse --verify 000000000:a0blgqsjc
+'
+
+test_expect_success 'disambiguate blob' '
+ sed -e "s/|$//" >patch <<-EOF &&
+ diff --git a/frotz b/frotz
+ index 000000000..ffffff 100644
+ --- a/frotz
+ +++ b/frotz
+ @@ -10,3 +10,4 @@
+ 9
+ |
+ b1rwzyc3
+ +irwry
+ EOF
+ (
+ GIT_INDEX_FILE=frotz &&
+ export GIT_INDEX_FILE &&
+ git apply --build-fake-ancestor frotz patch &&
+ git cat-file blob :frotz >actual
+ ) &&
+ test_cmp a0blgqsjc actual
+'
+
+test_expect_success 'disambiguate tree' '
+ commit=$(echo "d7xm" | git commit-tree 000000000) &&
+ test $(git rev-parse $commit^{tree}) = $(git rev-parse 0000000000cdc)
+'
+
+test_expect_success 'first commit' '
+ # create one commit 0000000000e4f
+ git commit -m a2onsxbvj
+'
+
+test_expect_success 'disambiguate commit-ish' '
+ # feed commit-ish in an unambiguous way
+ git rev-parse --verify 0000000000e4f^{commit} &&
+
+ # ambiguous at the object name level, but there is only one
+ # such commit (the others are tree and blob)
+ git rev-parse --verify 000000000^{commit} &&
+
+ # likewise
+ git rev-parse --verify 000000000^0
+'
+
+test_expect_success 'disambiguate commit' '
+ commit=$(echo "hoaxj" | git commit-tree 0000000000cdc -p 000000000) &&
+ test $(git rev-parse $commit^) = $(git rev-parse 0000000000e4f)
+'
+
+test_expect_success 'log name1..name2 takes only commit-ishes on both ends' '
+ git log 000000000..000000000 &&
+ git log ..000000000 &&
+ git log 000000000.. &&
+ git log 000000000...000000000 &&
+ git log ...000000000 &&
+ git log 000000000...
+'
+
+test_expect_success 'rev-parse name1..name2 takes only commit-ishes on both ends' '
+ git rev-parse 000000000..000000000 &&
+ git rev-parse ..000000000 &&
+ git rev-parse 000000000..
+'
+
+test_expect_success 'git log takes only commit-ish' '
+ git log 000000000
+'
+
+test_expect_success 'git reset takes only commit-ish' '
+ git reset 000000000
+'
+
+test_expect_success 'first tag' '
+ # create one tag 0000000000f8f
+ git tag -a -m j7cp83um v1.0.0
+'
+
+test_expect_failure 'two semi-ambiguous commit-ish' '
+ # Once the parser becomes ultra-smart, it could notice that
+ # 110282 before ^{commit} name many different objects, but
+ # that only two (HEAD and v1.0.0 tag) can be peeled to commit,
+ # and that peeling them down to commit yield the same commit
+ # without ambiguity.
+ git rev-parse --verify 110282^{commit} &&
+
+ # likewise
+ git log 000000000..000000000 &&
+ git log ..000000000 &&
+ git log 000000000.. &&
+ git log 000000000...000000000 &&
+ git log ...000000000 &&
+ git log 000000000...
+'
+
+test_expect_failure 'three semi-ambiguous tree-ish' '
+ # Likewise for tree-ish. HEAD, v1.0.0 and HEAD^{tree} share
+ # the prefix but peeling them to tree yields the same thing
+ git rev-parse --verify 000000000^{tree}
+'
+
+test_expect_success 'parse describe name' '
+ # feed an unambiguous describe name
+ git rev-parse --verify v1.0.0-0-g0000000000e4f &&
+
+ # ambiguous at the object name level, but there is only one
+ # such commit (others are blob, tree and tag)
+ git rev-parse --verify v1.0.0-0-g000000000
+'
+
+test_expect_success 'more history' '
+ # commit 0000000000043
+ git mv a0blgqsjc d12cr3h8t &&
+ echo h62xsjeu >>d12cr3h8t &&
+ git add d12cr3h8t &&
+
+ test_tick &&
+ git commit -m czy8f73t &&
+
+ # commit 00000000008ec
+ git mv d12cr3h8t j000jmpzn &&
+ echo j08bekfvt >>j000jmpzn &&
+ git add j000jmpzn &&
+
+ test_tick &&
+ git commit -m ioiley5o &&
+
+ # commit 0000000005b0
+ git checkout v1.0.0^0 &&
+ git mv a0blgqsjc f5518nwu &&
+
+ for i in h62xsjeu j08bekfvt kg7xflhm
+ do
+ echo $i
+ done >>f5518nwu &&
+ git add f5518nwu &&
+
+ test_tick &&
+ git commit -m b3wettvi &&
+ side=$(git rev-parse HEAD) &&
+
+ # commit 000000000066
+ git checkout master &&
+
+ # If you use recursive, merge will fail and you will need to
+ # clean up a0blgqsjc as well. If you use resolve, merge will
+ # succeed.
+ test_might_fail git merge --no-commit -s recursive $side &&
+ git rm -f f5518nwu j000jmpzn &&
+
+ test_might_fail git rm -f a0blgqsjc &&
+ (
+ git cat-file blob $side:f5518nwu
+ echo j3l0i9s6
+ ) >ab2gs879 &&
+ git add ab2gs879 &&
+
+ test_tick &&
+ git commit -m ad2uee
+
+'
+
+test_expect_failure 'parse describe name taking advantage of generation' '
+ # ambiguous at the object name level, but there is only one
+ # such commit at generation 0
+ git rev-parse --verify v1.0.0-0-g000000000 &&
+
+ # likewise for generation 2 and 4
+ git rev-parse --verify v1.0.0-2-g000000000 &&
+ git rev-parse --verify v1.0.0-4-g000000000
+'
+
+# Note: because rev-parse does not even try to disambiguate based on
+# the generation number, this test currently succeeds for a wrong
+# reason. When it learns to use the generation number, the previous
+# test should succeed, and also this test should fail because the
+# describe name used in the test with generation number can name two
+# commits. Make sure that such a future enhancement does not randomly
+# pick one.
+test_expect_success 'parse describe name not ignoring ambiguity' '
+ # ambiguous at the object name level, and there are two such
+ # commits at generation 1
+ test_must_fail git rev-parse --verify v1.0.0-1-g000000000
+'
+
+test_expect_success 'ambiguous commit-ish' '
+ # Now there are many commits that begin with the
+ # common prefix, none of these should pick one at
+ # random. They all should result in ambiguity errors.
+ test_must_fail git rev-parse --verify 110282^{commit} &&
+
+ # likewise
+ test_must_fail git log 000000000..000000000 &&
+ test_must_fail git log ..000000000 &&
+ test_must_fail git log 000000000.. &&
+ test_must_fail git log 000000000...000000000 &&
+ test_must_fail git log ...000000000 &&
+ test_must_fail git log 000000000...
+'
+
+test_expect_success 'rev-parse --disambiguate' '
+ # The test creates 16 objects that share the prefix and two
+ # commits created by commit-tree in earlier tests share a
+ # different prefix.
+ git rev-parse --disambiguate=000000000 >actual &&
+ test $(wc -l <actual) = 16 &&
+ test "$(sed -e "s/^\(.........\).*/\1/" actual | sort -u)" = 000000000
+'
+
+test_done
diff --git a/t/t3404-rebase-interactive.sh b/t/t3404-rebase-interactive.sh
index f206a36b0..7304b663c 100755
--- a/t/t3404-rebase-interactive.sh
+++ b/t/t3404-rebase-interactive.sh
@@ -903,4 +903,12 @@ test_expect_success 'rebase -i --root temporary sentinel commit' '
git rebase --abort
'
+test_expect_success 'rebase -i --root fixup root commit' '
+ git checkout B &&
+ FAKE_LINES="1 fixup 2" git rebase -i --root &&
+ test A = $(git cat-file commit HEAD | sed -ne \$p) &&
+ test B = $(git show HEAD:file1) &&
+ test 0 = $(git cat-file commit HEAD | grep -c ^parent\ )
+'
+
test_done
diff --git a/t/t4012-diff-binary.sh b/t/t4012-diff-binary.sh
index 6cebb3951..ec4deea19 100755
--- a/t/t4012-diff-binary.sh
+++ b/t/t4012-diff-binary.sh
@@ -15,13 +15,14 @@ cat >expect.binary-numstat <<\EOF
- - d
EOF
-test_expect_success 'prepare repository' \
- 'echo AIT >a && echo BIT >b && echo CIT >c && echo DIT >d &&
- git update-index --add a b c d &&
- echo git >a &&
- cat "$TEST_DIRECTORY"/test-binary-1.png >b &&
- echo git >c &&
- cat b b >d'
+test_expect_success 'prepare repository' '
+ echo AIT >a && echo BIT >b && echo CIT >c && echo DIT >d &&
+ git update-index --add a b c d &&
+ echo git >a &&
+ cat "$TEST_DIRECTORY"/test-binary-1.png >b &&
+ echo git >c &&
+ cat b b >d
+'
cat > expected <<\EOF
a | 2 +-
@@ -30,16 +31,16 @@ cat > expected <<\EOF
d | Bin
4 files changed, 2 insertions(+), 2 deletions(-)
EOF
-test_expect_success '"apply --stat" output for binary file change' '
+test_expect_success 'apply --stat output for binary file change' '
git diff >diff &&
git apply --stat --summary <diff >current &&
test_i18ncmp expected current
'
test_expect_success 'diff --shortstat output for binary file change' '
- echo " 4 files changed, 2 insertions(+), 2 deletions(-)" >expected &&
+ tail -n 1 expected >expect &&
git diff --shortstat >current &&
- test_i18ncmp expected current
+ test_i18ncmp expect current
'
test_expect_success 'diff --shortstat output for binary file change only' '
@@ -62,49 +63,42 @@ test_expect_success 'apply --numstat understands diff --binary format' '
# apply needs to be able to skip the binary material correctly
# in order to report the line number of a corrupt patch.
-test_expect_success 'apply detecting corrupt patch correctly' \
- 'git diff | sed -e 's/-CIT/xCIT/' >broken &&
- if git apply --stat --summary broken 2>detected
- then
- echo unhappy - should have detected an error
- (exit 1)
- else
- echo happy
- fi &&
- detected=`cat detected` &&
- detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
- detected=`sed -ne "${detected}p" broken` &&
- test "$detected" = xCIT'
-
-test_expect_success 'apply detecting corrupt patch correctly' \
- 'git diff --binary | sed -e 's/-CIT/xCIT/' >broken &&
- if git apply --stat --summary broken 2>detected
- then
- echo unhappy - should have detected an error
- (exit 1)
- else
- echo happy
- fi &&
- detected=`cat detected` &&
- detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
- detected=`sed -ne "${detected}p" broken` &&
- test "$detected" = xCIT'
+test_expect_success 'apply detecting corrupt patch correctly' '
+ git diff >output &&
+ sed -e "s/-CIT/xCIT/" <output >broken &&
+ test_must_fail git apply --stat --summary broken 2>detected &&
+ detected=`cat detected` &&
+ detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
+ detected=`sed -ne "${detected}p" broken` &&
+ test "$detected" = xCIT
+'
+
+test_expect_success 'apply detecting corrupt patch correctly' '
+ git diff --binary | sed -e "s/-CIT/xCIT/" >broken &&
+ test_must_fail git apply --stat --summary broken 2>detected &&
+ detected=`cat detected` &&
+ detected=`expr "$detected" : "fatal.*at line \\([0-9]*\\)\$"` &&
+ detected=`sed -ne "${detected}p" broken` &&
+ test "$detected" = xCIT
+'
test_expect_success 'initial commit' 'git commit -a -m initial'
# Try removal (b), modification (d), and creation (e).
-test_expect_success 'diff-index with --binary' \
- 'echo AIT >a && mv b e && echo CIT >c && cat e >d &&
- git update-index --add --remove a b c d e &&
- tree0=`git write-tree` &&
- git diff --cached --binary >current &&
- git apply --stat --summary current'
-
-test_expect_success 'apply binary patch' \
- 'git reset --hard &&
- git apply --binary --index <current &&
- tree1=`git write-tree` &&
- test "$tree1" = "$tree0"'
+test_expect_success 'diff-index with --binary' '
+ echo AIT >a && mv b e && echo CIT >c && cat e >d &&
+ git update-index --add --remove a b c d e &&
+ tree0=`git write-tree` &&
+ git diff --cached --binary >current &&
+ git apply --stat --summary current
+'
+
+test_expect_success 'apply binary patch' '
+ git reset --hard &&
+ git apply --binary --index <current &&
+ tree1=`git write-tree` &&
+ test "$tree1" = "$tree0"
+'
test_expect_success 'diff --no-index with binary creation' '
echo Q | q_to_nul >binary &&
@@ -125,7 +119,7 @@ cat >expect <<EOF
EOF
test_expect_success 'diff --stat with binary files and big change count' '
- echo X | dd of=binfile bs=1k seek=1 &&
+ printf "\01\00%1024d" 1 >binfile &&
git add binfile &&
i=0 &&
while test $i -lt 10000; do
diff --git a/t/t4020-diff-external.sh b/t/t4020-diff-external.sh
index 533afc118..2e7d73f09 100755
--- a/t/t4020-diff-external.sh
+++ b/t/t4020-diff-external.sh
@@ -48,7 +48,53 @@ test_expect_success 'GIT_EXTERNAL_DIFF environment and --no-ext-diff' '
'
+test_expect_success SYMLINKS 'typechange diff' '
+ rm -f file &&
+ ln -s elif file &&
+ GIT_EXTERNAL_DIFF=echo git diff | {
+ read path oldfile oldhex oldmode newfile newhex newmode &&
+ test "z$path" = zfile &&
+ test "z$oldmode" = z100644 &&
+ test "z$newhex" = "z$_z40" &&
+ test "z$newmode" = z120000 &&
+ oh=$(git rev-parse --verify HEAD:file) &&
+ test "z$oh" = "z$oldhex"
+ } &&
+ GIT_EXTERNAL_DIFF=echo git diff --no-ext-diff >actual &&
+ git diff >expect &&
+ test_cmp expect actual
+'
+
+test_expect_success 'diff.external' '
+ git reset --hard &&
+ echo third >file &&
+ test_config diff.external echo &&
+ git diff | {
+ read path oldfile oldhex oldmode newfile newhex newmode &&
+ test "z$path" = zfile &&
+ test "z$oldmode" = z100644 &&
+ test "z$newhex" = "z$_z40" &&
+ test "z$newmode" = z100644 &&
+ oh=$(git rev-parse --verify HEAD:file) &&
+ test "z$oh" = "z$oldhex"
+ }
+'
+
+test_expect_success 'diff.external should apply only to diff' '
+ test_config diff.external echo &&
+ git log -p -1 HEAD |
+ grep "^diff --git a/file b/file"
+'
+
+test_expect_success 'diff.external and --no-ext-diff' '
+ test_config diff.external echo &&
+ git diff --no-ext-diff |
+ grep "^diff --git a/file b/file"
+'
+
test_expect_success 'diff attribute' '
+ git reset --hard &&
+ echo third >file &&
git config diff.parrot.command echo &&
@@ -113,6 +159,19 @@ test_expect_success 'diff attribute and --no-ext-diff' '
'
+test_expect_success 'GIT_EXTERNAL_DIFF trumps diff.external' '
+ >.gitattributes &&
+ test_config diff.external "echo ext-global" &&
+ GIT_EXTERNAL_DIFF="echo ext-env" git diff | grep ext-env
+'
+
+test_expect_success 'attributes trump GIT_EXTERNAL_DIFF and diff.external' '
+ test_config diff.foo.command "echo ext-attribute" &&
+ test_config diff.external "echo ext-global" &&
+ echo "file diff=foo" >.gitattributes &&
+ GIT_EXTERNAL_DIFF="echo ext-env" git diff | grep ext-attribute
+'
+
test_expect_success 'no diff with -diff' '
echo >.gitattributes "file -diff" &&
git diff | grep Binary
diff --git a/t/t7003-filter-branch.sh b/t/t7003-filter-branch.sh
index e0227730d..4d13e10de 100755
--- a/t/t7003-filter-branch.sh
+++ b/t/t7003-filter-branch.sh
@@ -5,7 +5,8 @@ test_description='git filter-branch'
test_expect_success 'setup' '
test_commit A &&
- test_commit B &&
+ GIT_COMMITTER_DATE="@0 +0000" GIT_AUTHOR_DATE="@0 +0000" &&
+ test_commit --notick B &&
git checkout -b branch B &&
test_commit D &&
mkdir dir &&
diff --git a/t/t7406-submodule-update.sh b/t/t7406-submodule-update.sh
index dcb195b4c..ce61d4c0f 100755
--- a/t/t7406-submodule-update.sh
+++ b/t/t7406-submodule-update.sh
@@ -636,4 +636,17 @@ test_expect_success 'submodule update properly revives a moved submodule' '
)
'
+test_expect_success SYMLINKS 'submodule update can handle symbolic links in pwd' '
+ mkdir -p linked/dir &&
+ ln -s linked/dir linkto &&
+ (
+ cd linkto &&
+ git clone "$TRASH_DIRECTORY"/super_update_r2 super &&
+ (
+ cd super &&
+ git submodule update --init --recursive
+ )
+ )
+'
+
test_done
diff --git a/t/t7409-submodule-detached-worktree.sh b/t/t7409-submodule-detached-worktree.sh
new file mode 100755
index 000000000..2fec13dcd
--- /dev/null
+++ b/t/t7409-submodule-detached-worktree.sh
@@ -0,0 +1,78 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Daniel GraƱa
+#
+
+test_description='Test submodules on detached working tree
+
+This test verifies that "git submodule" initialization, update and addition works
+on detahced working trees
+'
+
+TEST_NO_CREATE_REPO=1
+. ./test-lib.sh
+
+test_expect_success 'submodule on detached working tree' '
+ git init --bare remote &&
+ test_create_repo bundle1 &&
+ (
+ cd bundle1 &&
+ test_commit "shoot" &&
+ git rev-parse --verify HEAD >../expect
+ ) &&
+ mkdir home &&
+ (
+ cd home &&
+ export GIT_WORK_TREE="$(pwd)" GIT_DIR="$(pwd)/.dotfiles" &&
+ git clone --bare ../remote .dotfiles &&
+ git submodule add ../bundle1 .vim/bundle/sogood &&
+ test_commit "sogood" &&
+ (
+ unset GIT_WORK_TREE GIT_DIR &&
+ cd .vim/bundle/sogood &&
+ git rev-parse --verify HEAD >actual &&
+ test_cmp ../../../../expect actual
+ ) &&
+ git push origin master
+ ) &&
+ mkdir home2 &&
+ (
+ cd home2 &&
+ git clone --bare ../remote .dotfiles &&
+ export GIT_WORK_TREE="$(pwd)" GIT_DIR="$(pwd)/.dotfiles" &&
+ git checkout master &&
+ git submodule update --init &&
+ (
+ unset GIT_WORK_TREE GIT_DIR &&
+ cd .vim/bundle/sogood &&
+ git rev-parse --verify HEAD >actual &&
+ test_cmp ../../../../expect actual
+ )
+ )
+'
+
+test_expect_success 'submodule on detached working pointed by core.worktree' '
+ mkdir home3 &&
+ (
+ cd home3 &&
+ export GIT_DIR="$(pwd)/.dotfiles" &&
+ git clone --bare ../remote "$GIT_DIR" &&
+ git config core.bare false &&
+ git config core.worktree .. &&
+ git checkout master &&
+ git submodule add ../bundle1 .vim/bundle/dupe &&
+ test_commit "dupe" &&
+ git push origin master
+ ) &&
+ (
+ cd home &&
+ export GIT_DIR="$(pwd)/.dotfiles" &&
+ git config core.bare false &&
+ git config core.worktree .. &&
+ git pull &&
+ git submodule update --init &&
+ test -f .vim/bundle/dupe/shoot.t
+ )
+'
+
+test_done
diff --git a/t/t7502-commit.sh b/t/t7502-commit.sh
index 181456aa9..deb187eb7 100755
--- a/t/t7502-commit.sh
+++ b/t/t7502-commit.sh
@@ -235,44 +235,56 @@ test_expect_success 'cleanup commit messages (strip,-F,-e): output' '
test_i18ncmp expect actual
'
-echo "#
-# Author: $GIT_AUTHOR_NAME <$GIT_AUTHOR_EMAIL>
-#" >> expect
-
-test_expect_success 'author different from committer' '
+test_expect_success 'message shows author when it is not equal to committer' '
echo >>negative &&
- test_might_fail git commit -e -m "sample" &&
- head -n 7 .git/COMMIT_EDITMSG >actual &&
- test_i18ncmp expect actual
+ git commit -e -m "sample" -a &&
+ test_i18ngrep \
+ "^# Author: *A U Thor <author@example.com>\$" \
+ .git/COMMIT_EDITMSG
'
-mv expect expect.tmp
-sed '$d' < expect.tmp > expect
-rm -f expect.tmp
-echo "# Committer:
-#" >> expect
+test_expect_success 'setup auto-ident prerequisite' '
+ if (sane_unset GIT_COMMITTER_EMAIL &&
+ sane_unset GIT_COMMITTER_NAME &&
+ git var GIT_COMMITTER_IDENT); then
+ test_set_prereq AUTOIDENT
+ else
+ test_set_prereq NOAUTOIDENT
+ fi
+'
-test_expect_success 'committer is automatic' '
+test_expect_success AUTOIDENT 'message shows committer when it is automatic' '
echo >>negative &&
(
sane_unset GIT_COMMITTER_EMAIL &&
sane_unset GIT_COMMITTER_NAME &&
- # must fail because there is no change
- test_must_fail git commit -e -m "sample"
+ git commit -e -m "sample" -a
) &&
- head -n 8 .git/COMMIT_EDITMSG | \
- sed "s/^# Committer: .*/# Committer:/" >actual
- test_i18ncmp expect actual
+ # the ident is calculated from the system, so we cannot
+ # check the actual value, only that it is there
+ test_i18ngrep "^# Committer: " .git/COMMIT_EDITMSG
'
-pwd=`pwd`
-cat >> .git/FAKE_EDITOR << EOF
-#! /bin/sh
-echo editor started > "$pwd/.git/result"
+write_script .git/FAKE_EDITOR <<EOF
+echo editor started > "$(pwd)/.git/result"
exit 0
EOF
-chmod +x .git/FAKE_EDITOR
+
+test_expect_success NOAUTOIDENT 'do not fire editor when committer is bogus' '
+ >.git/result
+ >expect &&
+
+ echo >>negative &&
+ (
+ sane_unset GIT_COMMITTER_EMAIL &&
+ sane_unset GIT_COMMITTER_NAME &&
+ GIT_EDITOR="\"$(pwd)/.git/FAKE_EDITOR\"" &&
+ export GIT_EDITOR &&
+ test_must_fail git commit -e -m sample -a
+ ) &&
+ test_cmp expect .git/result
+'
test_expect_success 'do not fire editor in the presence of conflicts' '
@@ -293,16 +305,14 @@ test_expect_success 'do not fire editor in the presence of conflicts' '
test_must_fail git cherry-pick -n master &&
echo "editor not started" >.git/result &&
(
- GIT_EDITOR="$(pwd)/.git/FAKE_EDITOR" &&
+ GIT_EDITOR="\"$(pwd)/.git/FAKE_EDITOR\"" &&
export GIT_EDITOR &&
test_must_fail git commit
) &&
test "$(cat .git/result)" = "editor not started"
'
-pwd=`pwd`
-cat >.git/FAKE_EDITOR <<EOF
-#! $SHELL_PATH
+write_script .git/FAKE_EDITOR <<EOF
# kill -TERM command added below.
EOF
@@ -339,13 +349,12 @@ test_expect_success 'A single-liner subject with a token plus colon is not a foo
'
-cat >.git/FAKE_EDITOR <<EOF
-#!$SHELL_PATH
-mv "\$1" "\$1.orig"
+write_script .git/FAKE_EDITOR <<\EOF
+mv "$1" "$1.orig"
(
echo message
- cat "\$1.orig"
-) >"\$1"
+ cat "$1.orig"
+) >"$1"
EOF
echo '## Custom template' >template
diff --git a/t/t7810-grep.sh b/t/t7810-grep.sh
index 24e9b1974..523d04123 100755
--- a/t/t7810-grep.sh
+++ b/t/t7810-grep.sh
@@ -399,17 +399,6 @@ test_expect_success 'grep -q, silently report matches' '
test_cmp empty actual
'
-# Create 1024 file names that sort between "y" and "z" to make sure
-# the two files are handled by different calls to an external grep.
-# This depends on MAXARGS in builtin-grep.c being 1024 or less.
-c32="0 1 2 3 4 5 6 7 8 9 a b c d e f g h i j k l m n o p q r s t u v"
-test_expect_success 'grep -C1, hunk mark between files' '
- for a in $c32; do for b in $c32; do : >y-$a$b; done; done &&
- git add y-?? &&
- git grep -C1 "^[yz]" >actual &&
- test_cmp expected actual
-'
-
test_expect_success 'grep -C1 hunk mark between files' '
git grep -C1 "^[yz]" >actual &&
test_cmp expected actual
diff --git a/t/test-lib-functions.sh b/t/test-lib-functions.sh
index 16397691d..80daaca78 100644
--- a/t/test-lib-functions.sh
+++ b/t/test-lib-functions.sh
@@ -143,10 +143,19 @@ test_pause () {
# Both <file> and <contents> default to <message>.
test_commit () {
- file=${2:-"$1.t"}
+ notick= &&
+ if test "z$1" = "z--notick"
+ then
+ notick=yes
+ shift
+ fi &&
+ file=${2:-"$1.t"} &&
echo "${3-$1}" > "$file" &&
git add "$file" &&
- test_tick &&
+ if test -z "$notick"
+ then
+ test_tick
+ fi &&
git commit -m "$1" &&
git tag "$1"
}
diff --git a/t/test-lib.sh b/t/test-lib.sh
index acda33d17..bb4f8865b 100644
--- a/t/test-lib.sh
+++ b/t/test-lib.sh
@@ -34,6 +34,26 @@ esac
# Keep the original TERM for say_color
ORIGINAL_TERM=$TERM
+# Test the binaries we have just built. The tests are kept in
+# t/ subdirectory and are run in 'trash directory' subdirectory.
+if test -z "$TEST_DIRECTORY"
+then
+ # We allow tests to override this, in case they want to run tests
+ # outside of t/, e.g. for running tests on the test library
+ # itself.
+ TEST_DIRECTORY=$(pwd)
+fi
+if test -z "$TEST_OUTPUT_DIRECTORY"
+then
+ # Similarly, override this to store the test-results subdir
+ # elsewhere
+ TEST_OUTPUT_DIRECTORY=$TEST_DIRECTORY
+fi
+GIT_BUILD_DIR="$TEST_DIRECTORY"/..
+
+. "$GIT_BUILD_DIR"/GIT-BUILD-OPTIONS
+export PERL_PATH SHELL_PATH
+
# For repeatability, reset the environment to known value.
LANG=C
LC_ALL=C
@@ -46,7 +66,7 @@ EDITOR=:
# /usr/xpg4/bin/sh and /bin/ksh to bail out. So keep the unsets
# deriving from the command substitution clustered with the other
# ones.
-unset VISUAL EMAIL LANGUAGE COLUMNS $(perl -e '
+unset VISUAL EMAIL LANGUAGE COLUMNS $("$PERL_PATH" -e '
my @env = keys %ENV;
my $ok = join("|", qw(
TRACE
@@ -61,6 +81,7 @@ unset VISUAL EMAIL LANGUAGE COLUMNS $(perl -e '
my @vars = grep(/^GIT_/ && !/^GIT_($ok)/o, @env);
print join("\n", @vars);
')
+unset XDG_CONFIG_HOME
GIT_AUTHOR_EMAIL=author@example.com
GIT_AUTHOR_NAME='A U Thor'
GIT_COMMITTER_EMAIL=committer@example.com
@@ -229,7 +250,7 @@ trap 'die' EXIT
# The user-facing functions are loaded from a separate file so that
# test_perf subshells can have them too
-. "${TEST_DIRECTORY:-.}"/test-lib-functions.sh
+. "$TEST_DIRECTORY/test-lib-functions.sh"
# You are not expected to call test_ok_ and test_failure_ directly, use
# the text_expect_* functions instead.
@@ -380,23 +401,6 @@ test_done () {
esac
}
-# Test the binaries we have just built. The tests are kept in
-# t/ subdirectory and are run in 'trash directory' subdirectory.
-if test -z "$TEST_DIRECTORY"
-then
- # We allow tests to override this, in case they want to run tests
- # outside of t/, e.g. for running tests on the test library
- # itself.
- TEST_DIRECTORY=$(pwd)
-fi
-if test -z "$TEST_OUTPUT_DIRECTORY"
-then
- # Similarly, override this to store the test-results subdir
- # elsewhere
- TEST_OUTPUT_DIRECTORY=$TEST_DIRECTORY
-fi
-GIT_BUILD_DIR="$TEST_DIRECTORY"/..
-
if test -n "$valgrind"
then
make_symlink () {
@@ -492,10 +496,6 @@ GIT_CONFIG_NOSYSTEM=1
GIT_ATTR_NOSYSTEM=1
export PATH GIT_EXEC_PATH GIT_TEMPLATE_DIR GIT_CONFIG_NOSYSTEM GIT_ATTR_NOSYSTEM
-. "$GIT_BUILD_DIR"/GIT-BUILD-OPTIONS
-
-export PERL_PATH
-
if test -z "$GIT_TEST_CMP"
then
if test -n "$GIT_TEST_CMP_USE_COPIED_CONTEXT"
diff --git a/transport-helper.c b/transport-helper.c
index 61c928f6c..cfe098849 100644
--- a/transport-helper.c
+++ b/transport-helper.c
@@ -444,6 +444,21 @@ static int fetch_with_import(struct transport *transport,
free(fastimport.argv);
fastimport.argv = NULL;
+ /*
+ * The fast-import stream of a remote helper that advertises
+ * the "refspec" capability writes to the refs named after the
+ * right hand side of the first refspec matching each ref we
+ * were fetching.
+ *
+ * (If no "refspec" capability was specified, for historical
+ * reasons we default to *:*.)
+ *
+ * Store the result in to_fetch[i].old_sha1. Callers such
+ * as "git fetch" can use the value to write feedback to the
+ * terminal, populate FETCH_HEAD, and determine what new value
+ * should be written to peer_ref if the update is a
+ * fast-forward or this is a forced update.
+ */
for (i = 0; i < nr_heads; i++) {
char *private;
posn = to_fetch[i];
diff --git a/tree.c b/tree.c
index 676e9f710..62fed632d 100644
--- a/tree.c
+++ b/tree.c
@@ -22,7 +22,8 @@ static int read_one_entry_opt(const unsigned char *sha1, const char *base, int b
ce = xcalloc(1, size);
ce->ce_mode = create_ce_mode(mode);
- ce->ce_flags = create_ce_flags(baselen + len, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = baselen + len;
memcpy(ce->name, base, baselen);
memcpy(ce->name + baselen, pathname, len+1);
hashcpy(ce->sha1, sha1);
@@ -133,8 +134,8 @@ static int cmp_cache_name_compare(const void *a_, const void *b_)
ce1 = *((const struct cache_entry **)a_);
ce2 = *((const struct cache_entry **)b_);
- return cache_name_compare(ce1->name, ce1->ce_flags,
- ce2->name, ce2->ce_flags);
+ return cache_name_stage_compare(ce1->name, ce1->ce_namelen, ce_stage(ce1),
+ ce2->name, ce2->ce_namelen, ce_stage(ce2));
}
int read_tree(struct tree *tree, int stage, struct pathspec *match)
diff --git a/unpack-trees.c b/unpack-trees.c
index 29893bf65..6d9636623 100644
--- a/unpack-trees.c
+++ b/unpack-trees.c
@@ -539,7 +539,8 @@ static struct cache_entry *create_ce_entry(const struct traverse_info *info, con
struct cache_entry *ce = xcalloc(1, cache_entry_size(len));
ce->ce_mode = create_ce_mode(n->mode);
- ce->ce_flags = create_ce_flags(len, stage);
+ ce->ce_flags = create_ce_flags(stage);
+ ce->ce_namelen = len;
hashcpy(ce->sha1, n->sha1);
make_traverse_path(ce->name, info, n);
diff --git a/wt-status.c b/wt-status.c
index c749267c9..c110cbc12 100644
--- a/wt-status.c
+++ b/wt-status.c
@@ -24,7 +24,6 @@ static char default_wt_status_colors[][COLOR_MAXLEN] = {
GIT_COLOR_GREEN, /* WT_STATUS_LOCAL_BRANCH */
GIT_COLOR_RED, /* WT_STATUS_REMOTE_BRANCH */
GIT_COLOR_NIL, /* WT_STATUS_ONBRANCH */
- GIT_COLOR_NORMAL, /* WT_STATUS_IN_PROGRESS */
};
static const char *color(int slot, struct wt_status *s)
@@ -931,7 +930,7 @@ static void show_bisect_in_progress(struct wt_status *s,
static void wt_status_print_state(struct wt_status *s)
{
- const char *state_color = color(WT_STATUS_IN_PROGRESS, s);
+ const char *state_color = color(WT_STATUS_HEADER, s);
struct wt_status_state state;
struct stat st;
diff --git a/wt-status.h b/wt-status.h
index c1066a0ec..f8fc58cc0 100644
--- a/wt-status.h
+++ b/wt-status.h
@@ -15,7 +15,6 @@ enum color_wt_status {
WT_STATUS_LOCAL_BRANCH,
WT_STATUS_REMOTE_BRANCH,
WT_STATUS_ONBRANCH,
- WT_STATUS_IN_PROGRESS,
WT_STATUS_MAXSLOT
};