about summary refs log tree commit diff
path: root/third_party/git/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/git/contrib')
-rw-r--r--third_party/git/contrib/README43
-rw-r--r--third_party/git/contrib/buildsystems/Generators.pm42
-rw-r--r--third_party/git/contrib/buildsystems/Generators/QMake.pm189
-rw-r--r--third_party/git/contrib/buildsystems/Generators/Vcproj.pm579
-rw-r--r--third_party/git/contrib/buildsystems/Generators/Vcxproj.pm392
-rwxr-xr-xthird_party/git/contrib/buildsystems/engine.pl394
-rwxr-xr-xthird_party/git/contrib/buildsystems/generate29
-rwxr-xr-xthird_party/git/contrib/buildsystems/parse.pl228
-rw-r--r--third_party/git/contrib/coccinelle/.gitignore1
-rw-r--r--third_party/git/contrib/coccinelle/README43
-rw-r--r--third_party/git/contrib/coccinelle/array.cocci90
-rw-r--r--third_party/git/contrib/coccinelle/commit.cocci34
-rw-r--r--third_party/git/contrib/coccinelle/flex_alloc.cocci13
-rw-r--r--third_party/git/contrib/coccinelle/free.cocci18
-rw-r--r--third_party/git/contrib/coccinelle/hashmap.cocci16
-rw-r--r--third_party/git/contrib/coccinelle/object_id.cocci87
-rw-r--r--third_party/git/contrib/coccinelle/preincr.cocci5
-rw-r--r--third_party/git/contrib/coccinelle/qsort.cocci37
-rw-r--r--third_party/git/contrib/coccinelle/strbuf.cocci62
-rw-r--r--third_party/git/contrib/coccinelle/swap.cocci28
-rw-r--r--third_party/git/contrib/coccinelle/the_repository.pending.cocci144
-rw-r--r--third_party/git/contrib/coccinelle/xstrdup_or_null.cocci13
-rw-r--r--third_party/git/contrib/completion/.gitattributes1
-rw-r--r--third_party/git/contrib/completion/git-completion.bash3381
-rw-r--r--third_party/git/contrib/completion/git-completion.tcsh126
-rw-r--r--third_party/git/contrib/completion/git-completion.zsh244
-rw-r--r--third_party/git/contrib/completion/git-prompt.sh560
-rw-r--r--third_party/git/contrib/contacts/.gitignore3
-rw-r--r--third_party/git/contrib/contacts/Makefile71
-rwxr-xr-xthird_party/git/contrib/contacts/git-contacts203
-rw-r--r--third_party/git/contrib/contacts/git-contacts.txt94
-rwxr-xr-xthird_party/git/contrib/coverage-diff.sh108
-rw-r--r--third_party/git/contrib/credential/gnome-keyring/.gitignore1
-rw-r--r--third_party/git/contrib/credential/gnome-keyring/Makefile25
-rw-r--r--third_party/git/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c470
-rw-r--r--third_party/git/contrib/credential/libsecret/Makefile25
-rw-r--r--third_party/git/contrib/credential/libsecret/git-credential-libsecret.c369
-rw-r--r--third_party/git/contrib/credential/netrc/.gitignore1
-rw-r--r--third_party/git/contrib/credential/netrc/Makefile30
-rwxr-xr-xthird_party/git/contrib/credential/netrc/git-credential-netrc.perl440
-rwxr-xr-xthird_party/git/contrib/credential/netrc/t-git-credential-netrc.sh32
-rwxr-xr-xthird_party/git/contrib/credential/netrc/test.command-option-gpg2
-rwxr-xr-xthird_party/git/contrib/credential/netrc/test.git-config-gpg2
-rw-r--r--third_party/git/contrib/credential/netrc/test.netrc13
-rw-r--r--third_party/git/contrib/credential/netrc/test.netrc.gpg0
-rwxr-xr-xthird_party/git/contrib/credential/netrc/test.pl139
-rw-r--r--third_party/git/contrib/credential/osxkeychain/.gitignore1
-rw-r--r--third_party/git/contrib/credential/osxkeychain/Makefile17
-rw-r--r--third_party/git/contrib/credential/osxkeychain/git-credential-osxkeychain.c183
-rw-r--r--third_party/git/contrib/credential/wincred/Makefile22
-rw-r--r--third_party/git/contrib/credential/wincred/git-credential-wincred.c327
-rw-r--r--third_party/git/contrib/diff-highlight/.gitignore2
-rw-r--r--third_party/git/contrib/diff-highlight/DiffHighlight.pm285
-rw-r--r--third_party/git/contrib/diff-highlight/Makefile23
-rw-r--r--third_party/git/contrib/diff-highlight/README223
-rw-r--r--third_party/git/contrib/diff-highlight/diff-highlight.perl8
-rw-r--r--third_party/git/contrib/diff-highlight/t/.gitignore2
-rw-r--r--third_party/git/contrib/diff-highlight/t/Makefile22
-rwxr-xr-xthird_party/git/contrib/diff-highlight/t/t9400-diff-highlight.sh341
-rw-r--r--third_party/git/contrib/emacs/README33
-rw-r--r--third_party/git/contrib/emacs/git-blame.el6
-rw-r--r--third_party/git/contrib/emacs/git.el6
-rw-r--r--third_party/git/contrib/examples/README20
-rwxr-xr-xthird_party/git/contrib/fast-import/git-import.perl64
-rwxr-xr-xthird_party/git/contrib/fast-import/git-import.sh38
-rw-r--r--third_party/git/contrib/fast-import/git-p4.README12
-rwxr-xr-xthird_party/git/contrib/fast-import/import-directories.perl416
-rwxr-xr-xthird_party/git/contrib/fast-import/import-tars.perl225
-rwxr-xr-xthird_party/git/contrib/fast-import/import-zips.py78
-rw-r--r--third_party/git/contrib/git-jump/README112
-rwxr-xr-xthird_party/git/contrib/git-jump/git-jump78
-rwxr-xr-xthird_party/git/contrib/git-resurrect.sh182
-rw-r--r--third_party/git/contrib/git-shell-commands/README18
-rwxr-xr-xthird_party/git/contrib/git-shell-commands/help18
-rwxr-xr-xthird_party/git/contrib/git-shell-commands/list10
-rwxr-xr-xthird_party/git/contrib/hg-to-git/hg-to-git.py254
-rw-r--r--third_party/git/contrib/hg-to-git/hg-to-git.txt21
-rw-r--r--third_party/git/contrib/hooks/multimail/CHANGES285
-rw-r--r--third_party/git/contrib/hooks/multimail/CONTRIBUTING.rst60
-rw-r--r--third_party/git/contrib/hooks/multimail/README.Git15
-rw-r--r--third_party/git/contrib/hooks/multimail/README.migrate-from-post-receive-email145
-rw-r--r--third_party/git/contrib/hooks/multimail/README.rst774
-rw-r--r--third_party/git/contrib/hooks/multimail/doc/customizing-emails.rst56
-rw-r--r--third_party/git/contrib/hooks/multimail/doc/gerrit.rst56
-rw-r--r--third_party/git/contrib/hooks/multimail/doc/gitolite.rst118
-rw-r--r--third_party/git/contrib/hooks/multimail/doc/troubleshooting.rst78
-rwxr-xr-xthird_party/git/contrib/hooks/multimail/git_multimail.py4346
-rwxr-xr-xthird_party/git/contrib/hooks/multimail/migrate-mailhook-config274
-rwxr-xr-xthird_party/git/contrib/hooks/multimail/post-receive.example101
-rwxr-xr-xthird_party/git/contrib/hooks/post-receive-email759
-rwxr-xr-xthird_party/git/contrib/hooks/pre-auto-gc-battery42
-rwxr-xr-xthird_party/git/contrib/hooks/setgitperms.perl214
-rwxr-xr-xthird_party/git/contrib/hooks/update-paranoid421
-rwxr-xr-xthird_party/git/contrib/long-running-filter/example.pl132
-rw-r--r--third_party/git/contrib/mw-to-git/.gitignore2
-rw-r--r--third_party/git/contrib/mw-to-git/.perlcriticrc28
-rw-r--r--third_party/git/contrib/mw-to-git/Git/Mediawiki.pm101
-rw-r--r--third_party/git/contrib/mw-to-git/Makefile58
-rwxr-xr-xthird_party/git/contrib/mw-to-git/bin-wrapper/git14
-rwxr-xr-xthird_party/git/contrib/mw-to-git/git-mw.perl368
-rwxr-xr-xthird_party/git/contrib/mw-to-git/git-remote-mediawiki.perl1374
-rw-r--r--third_party/git/contrib/mw-to-git/git-remote-mediawiki.txt7
-rw-r--r--third_party/git/contrib/mw-to-git/t/.gitignore4
-rw-r--r--third_party/git/contrib/mw-to-git/t/Makefile31
-rw-r--r--third_party/git/contrib/mw-to-git/t/README124
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/install-wiki.sh55
-rw-r--r--third_party/git/contrib/mw-to-git/t/install-wiki/.gitignore1
-rw-r--r--third_party/git/contrib/mw-to-git/t/install-wiki/LocalSettings.php129
-rw-r--r--third_party/git/contrib/mw-to-git/t/install-wiki/db_install.php120
-rw-r--r--third_party/git/contrib/mw-to-git/t/push-pull-tests.sh144
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh257
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh24
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh347
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh217
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/t9364-pull-by-rev.sh17
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/t9365-continuing-queries.sh23
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/test-gitmw-lib.sh432
-rwxr-xr-xthird_party/git/contrib/mw-to-git/t/test-gitmw.pl225
-rw-r--r--third_party/git/contrib/mw-to-git/t/test.config37
-rw-r--r--third_party/git/contrib/persistent-https/LICENSE202
-rw-r--r--third_party/git/contrib/persistent-https/Makefile40
-rw-r--r--third_party/git/contrib/persistent-https/README72
-rw-r--r--third_party/git/contrib/persistent-https/client.go189
-rw-r--r--third_party/git/contrib/persistent-https/main.go82
-rw-r--r--third_party/git/contrib/persistent-https/proxy.go190
-rw-r--r--third_party/git/contrib/persistent-https/socket.go97
-rw-r--r--third_party/git/contrib/remote-helpers/README15
-rwxr-xr-xthird_party/git/contrib/remote-helpers/git-remote-bzr11
-rwxr-xr-xthird_party/git/contrib/remote-helpers/git-remote-hg11
-rwxr-xr-xthird_party/git/contrib/remotes2config.sh33
-rwxr-xr-xthird_party/git/contrib/rerere-train.sh102
-rwxr-xr-xthird_party/git/contrib/stats/git-common-hash26
-rwxr-xr-xthird_party/git/contrib/stats/mailmap.pl70
-rwxr-xr-xthird_party/git/contrib/stats/packinfo.pl212
-rw-r--r--third_party/git/contrib/subtree/.gitignore7
-rw-r--r--third_party/git/contrib/subtree/COPYING339
-rw-r--r--third_party/git/contrib/subtree/INSTALL28
-rw-r--r--third_party/git/contrib/subtree/Makefile101
-rw-r--r--third_party/git/contrib/subtree/README8
-rwxr-xr-xthird_party/git/contrib/subtree/git-subtree.sh901
-rw-r--r--third_party/git/contrib/subtree/git-subtree.txt351
-rw-r--r--third_party/git/contrib/subtree/t/Makefile86
-rwxr-xr-xthird_party/git/contrib/subtree/t/t7900-subtree.sh1034
-rw-r--r--third_party/git/contrib/subtree/todo48
-rw-r--r--third_party/git/contrib/svn-fe/.gitignore4
-rw-r--r--third_party/git/contrib/svn-fe/Makefile105
-rw-r--r--third_party/git/contrib/svn-fe/svn-fe.c18
-rw-r--r--third_party/git/contrib/svn-fe/svn-fe.txt71
-rwxr-xr-xthird_party/git/contrib/svn-fe/svnrdump_sim.py68
-rw-r--r--third_party/git/contrib/thunderbird-patch-inline/README20
-rwxr-xr-xthird_party/git/contrib/thunderbird-patch-inline/appp.sh55
-rw-r--r--third_party/git/contrib/update-unicode/.gitignore3
-rw-r--r--third_party/git/contrib/update-unicode/README20
-rwxr-xr-xthird_party/git/contrib/update-unicode/update_unicode.sh33
-rw-r--r--third_party/git/contrib/vscode/.gitattributes1
-rw-r--r--third_party/git/contrib/vscode/README.md14
-rwxr-xr-xthird_party/git/contrib/vscode/init.sh375
-rw-r--r--third_party/git/contrib/workdir/.gitattributes1
-rwxr-xr-xthird_party/git/contrib/workdir/git-new-workdir105
159 files changed, 29037 insertions, 0 deletions
diff --git a/third_party/git/contrib/README b/third_party/git/contrib/README
new file mode 100644
index 000000000000..05f291c1f1d3
--- /dev/null
+++ b/third_party/git/contrib/README
@@ -0,0 +1,43 @@
+Contributed Software
+
+Although these pieces are available as part of the official git
+source tree, they are in somewhat different status.  The
+intention is to keep interesting tools around git here, maybe
+even experimental ones, to give users an easier access to them,
+and to give tools wider exposure, so that they can be improved
+faster.
+
+I am not expecting to touch these myself that much.  As far as
+my day-to-day operation is concerned, these subdirectories are
+owned by their respective primary authors.  I am willing to help
+if users of these components and the contrib/ subtree "owners"
+have technical/design issues to resolve, but the initiative to
+fix and/or enhance things _must_ be on the side of the subtree
+owners.  IOW, I won't be actively looking for bugs and rooms for
+enhancements in them as the git maintainer -- I may only do so
+just as one of the users when I want to scratch my own itch.  If
+you have patches to things in contrib/ area, the patch should be
+first sent to the primary author, and then the primary author
+should ack and forward it to me (git pull request is nicer).
+This is the same way as how I have been treating gitk, and to a
+lesser degree various foreign SCM interfaces, so you know the
+drill.
+
+I expect that things that start their life in the contrib/ area
+to graduate out of contrib/ once they mature, either by becoming
+projects on their own, or moving to the toplevel directory.  On
+the other hand, I expect I'll be proposing removal of disused
+and inactive ones from time to time.
+
+If you have new things to add to this area, please first propose
+it on the git mailing list, and after a list discussion proves
+there are some general interests (it does not have to be a
+list-wide consensus for a tool targeted to a relatively narrow
+audience -- for example I do not work with projects whose
+upstream is svn, so I have no use for git-svn myself, but it is
+of general interest for people who need to interoperate with SVN
+repositories in a way git-svn works better than git-svnimport),
+submit a patch to create a subdirectory of contrib/ and put your
+stuff there.
+
+-jc
diff --git a/third_party/git/contrib/buildsystems/Generators.pm b/third_party/git/contrib/buildsystems/Generators.pm
new file mode 100644
index 000000000000..aa4cbaa2adac
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/Generators.pm
@@ -0,0 +1,42 @@
+package Generators;
+require Exporter;
+
+use strict;
+use File::Basename;
+no strict 'refs';
+use vars qw($VERSION @AVAILABLE);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+    local(*D);
+    my $me = $INC{"Generators.pm"};
+    die "Couldn't find myself in \@INC, which is required to load the generators!" if ("$me" eq "");
+    $me = dirname($me);
+    if (opendir(D,"$me/Generators")) {
+        foreach my $gen (readdir(D)) {
+            next unless ($gen  =~ /\.pm$/);
+            require "${me}/Generators/$gen";
+            $gen =~ s,\.pm,,;
+            push(@AVAILABLE, $gen);
+        }
+        closedir(D);
+        my $gens = join(', ', @AVAILABLE);
+    }
+
+    push @EXPORT_OK, qw(available);
+}
+
+sub available {
+    return @AVAILABLE;
+}
+
+sub generate {
+    my ($gen, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    return eval("Generators::${gen}::generate(\$git_dir, \$out_dir, \$rel_dir, \%build_structure)") if grep(/^$gen$/, @AVAILABLE);
+    die "Generator \"${gen}\" is not available!\nAvailable generators are: @AVAILABLE\n";
+}
+
+1;
diff --git a/third_party/git/contrib/buildsystems/Generators/QMake.pm b/third_party/git/contrib/buildsystems/Generators/QMake.pm
new file mode 100644
index 000000000000..ff3b657e6105
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/Generators/QMake.pm
@@ -0,0 +1,189 @@
+package Generators::QMake;
+require Exporter;
+
+use strict;
+use vars qw($VERSION);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+    push @EXPORT_OK, qw(generate);
+}
+
+sub generate {
+    my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+
+    my @libs = @{$build_structure{"LIBS"}};
+    foreach (@libs) {
+        createLibProject($_, $git_dir, $out_dir, $rel_dir, %build_structure);
+    }
+
+    my @apps = @{$build_structure{"APPS"}};
+    foreach (@apps) {
+        createAppProject($_, $git_dir, $out_dir, $rel_dir, %build_structure);
+    }
+
+    createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
+    return 0;
+}
+
+sub createLibProject {
+    my ($libname, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    print "Generate $libname lib project\n";
+    $rel_dir = "../$rel_dir";
+
+    my $sources = join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"LIBS_${libname}_SOURCES"}})));
+    my $defines = join(" \\\n\t", sort(@{$build_structure{"LIBS_${libname}_DEFINES"}}));
+    my $includes= join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"LIBS_${libname}_INCLUDES"}})));
+    my $cflags  = join(" ", sort(@{$build_structure{"LIBS_${libname}_CFLAGS"}}));
+
+    my $cflags_debug = $cflags;
+    $cflags_debug =~ s/-MT/-MTd/;
+    $cflags_debug =~ s/-O.//;
+
+    my $cflags_release = $cflags;
+    $cflags_release =~ s/-MTd/-MT/;
+
+    my @tmp  = @{$build_structure{"LIBS_${libname}_LFLAGS"}};
+    my @tmp2 = ();
+    foreach (@tmp) {
+        if (/^-LTCG/) {
+        } elsif (/^-L/) {
+            $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+        }
+        push(@tmp2, $_);
+    }
+    my $lflags = join(" ", sort(@tmp));
+
+    my $target = $libname;
+    $target =~ s/\//_/g;
+    $defines =~ s/-D//g;
+    $defines =~ s/"/\\\\"/g;
+    $includes =~ s/-I//g;
+    mkdir "$target" || die "Could not create the directory $target for lib project!\n";
+    open F, ">$target/$target.pro" || die "Could not open $target/$target.pro for writing!\n";
+    print F << "EOM";
+TEMPLATE = lib
+TARGET = $target
+DESTDIR = $rel_dir
+
+CONFIG -= qt
+CONFIG += static
+
+QMAKE_CFLAGS =
+QMAKE_CFLAGS_RELEASE = $cflags_release
+QMAKE_CFLAGS_DEBUG = $cflags_debug
+QMAKE_LIBFLAGS = $lflags
+
+DEFINES += \\
+        $defines
+
+INCLUDEPATH += \\
+        $includes
+
+SOURCES += \\
+        $sources
+EOM
+    close F;
+}
+
+sub createAppProject {
+    my ($appname, $git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    print "Generate $appname app project\n";
+    $rel_dir = "../$rel_dir";
+
+    my $sources = join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"APPS_${appname}_SOURCES"}})));
+    my $defines = join(" \\\n\t", sort(@{$build_structure{"APPS_${appname}_DEFINES"}}));
+    my $includes= join(" \\\n\t", sort(map("$rel_dir/$_", @{$build_structure{"APPS_${appname}_INCLUDES"}})));
+    my $cflags  = join(" ", sort(@{$build_structure{"APPS_${appname}_CFLAGS"}}));
+
+    my $cflags_debug = $cflags;
+    $cflags_debug =~ s/-MT/-MTd/;
+    $cflags_debug =~ s/-O.//;
+
+    my $cflags_release = $cflags;
+    $cflags_release =~ s/-MTd/-MT/;
+
+    my $libs;
+    foreach (sort(@{$build_structure{"APPS_${appname}_LIBS"}})) {
+        $_ =~ s/\//_/g;
+        $libs .= " $_";
+    }
+    my @tmp  = @{$build_structure{"APPS_${appname}_LFLAGS"}};
+    my @tmp2 = ();
+    foreach (@tmp) {
+        # next if ($_ eq "-NODEFAULTLIB:MSVCRT.lib");
+        if (/^-LTCG/) {
+        } elsif (/^-L/) {
+            $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+        }
+        push(@tmp2, $_);
+    }
+    my $lflags = join(" ", sort(@tmp));
+
+    my $target = $appname;
+    $target =~ s/\.exe//;
+    $target =~ s/\//_/g;
+    $defines =~ s/-D//g;
+    $defines =~ s/"/\\\\"/g;
+    $includes =~ s/-I//g;
+    mkdir "$target" || die "Could not create the directory $target for app project!\n";
+    open F, ">$target/$target.pro" || die "Could not open $target/$target.pro for writing!\n";
+    print F << "EOM";
+TEMPLATE = app
+TARGET = $target
+DESTDIR = $rel_dir
+
+CONFIG -= qt embed_manifest_exe
+CONFIG += console
+
+QMAKE_CFLAGS =
+QMAKE_CFLAGS_RELEASE = $cflags_release
+QMAKE_CFLAGS_DEBUG = $cflags_debug
+QMAKE_LFLAGS = $lflags
+LIBS   = $libs
+
+DEFINES += \\
+        $defines
+
+INCLUDEPATH += \\
+        $includes
+
+win32:QMAKE_LFLAGS += -LIBPATH:$rel_dir
+else: QMAKE_LFLAGS += -L$rel_dir
+
+SOURCES += \\
+        $sources
+EOM
+    close F;
+}
+
+sub createGlueProject {
+    my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    my $libs = join(" \\ \n", map("\t$_|$_.pro", @{$build_structure{"LIBS"}}));
+    my $apps = join(" \\ \n", map("\t$_|$_.pro", @{$build_structure{"APPS"}}));
+    $libs =~ s/\.a//g;
+    $libs =~ s/\//_/g;
+    $libs =~ s/\|/\//g;
+    $apps =~ s/\.exe//g;
+    $apps =~ s/\//_/g;
+    $apps =~ s/\|/\//g;
+
+    my $filename = $out_dir;
+    $filename =~ s/.*\/([^\/]+)$/$1/;
+    $filename =~ s/\/$//;
+    print "Generate glue project $filename.pro\n";
+    open F, ">$filename.pro" || die "Could not open $filename.pro for writing!\n";
+    print F << "EOM";
+TEMPLATE = subdirs
+CONFIG += ordered
+SUBDIRS += \\
+$libs \\
+$apps
+EOM
+    close F;
+}
+
+1;
diff --git a/third_party/git/contrib/buildsystems/Generators/Vcproj.pm b/third_party/git/contrib/buildsystems/Generators/Vcproj.pm
new file mode 100644
index 000000000000..737647e76afd
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/Generators/Vcproj.pm
@@ -0,0 +1,579 @@
+package Generators::Vcproj;
+require Exporter;
+
+use strict;
+use vars qw($VERSION);
+use Digest::SHA qw(sha256_hex);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+    push @EXPORT_OK, qw(generate);
+}
+
+sub generate_guid ($) {
+    my $hex = sha256_hex($_[0]);
+    $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/;
+    $hex =~ tr/a-z/A-Z/;
+    return $hex;
+}
+
+sub generate {
+    my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    my @libs = @{$build_structure{"LIBS"}};
+    foreach (@libs) {
+        createLibProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure);
+    }
+
+    my @apps = @{$build_structure{"APPS"}};
+    foreach (@apps) {
+        createAppProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure);
+    }
+
+    createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
+    return 0;
+}
+
+sub createLibProject {
+    my ($libname, $git_dir, $out_dir, $rel_dir, $build_structure) = @_;
+    print "Generate $libname vcproj lib project\n";
+    $rel_dir = "..\\$rel_dir";
+    $rel_dir =~ s/\//\\/g;
+
+    my $target = $libname;
+    $target =~ s/\//_/g;
+    $target =~ s/\.a//;
+
+    my $uuid = generate_guid($libname);
+    $$build_structure{"LIBS_${target}_GUID"} = $uuid;
+
+    my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"LIBS_${libname}_SOURCES"}}));
+    my @sources;
+    foreach (@srcs) {
+        $_ =~ s/\//\\/g;
+        push(@sources, $_);
+    }
+    my $defines = join(",", sort(@{$$build_structure{"LIBS_${libname}_DEFINES"}}));
+    my $includes= join(";", sort(map("&quot;$rel_dir\\$_&quot;", @{$$build_structure{"LIBS_${libname}_INCLUDES"}})));
+    my $cflags  = join(" ", sort(@{$$build_structure{"LIBS_${libname}_CFLAGS"}}));
+    $cflags =~ s/\"/&quot;/g;
+    $cflags =~ s/</&lt;/g;
+    $cflags =~ s/>/&gt;/g;
+
+    my $cflags_debug = $cflags;
+    $cflags_debug =~ s/-MT/-MTd/;
+    $cflags_debug =~ s/-O.//;
+
+    my $cflags_release = $cflags;
+    $cflags_release =~ s/-MTd/-MT/;
+
+    my @tmp  = @{$$build_structure{"LIBS_${libname}_LFLAGS"}};
+    my @tmp2 = ();
+    foreach (@tmp) {
+        if (/^-LTCG/) {
+        } elsif (/^-L/) {
+            $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+        }
+        push(@tmp2, $_);
+    }
+    my $lflags = join(" ", sort(@tmp));
+
+    $defines =~ s/-D//g;
+    $defines =~ s/\"/\\&quot;/g;
+    $defines =~ s/</&lt;/g;
+    $defines =~ s/>/&gt;/g;
+    $defines =~ s/\'//g;
+    $includes =~ s/-I//g;
+    mkdir "$target" || die "Could not create the directory $target for lib project!\n";
+    open F, ">$target/$target.vcproj" || die "Could not open $target/$target.pro for writing!\n";
+    binmode F, ":crlf";
+    print F << "EOM";
+<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+	ProjectType="Visual C++"
+	Version="9,00"
+	Name="$target"
+	ProjectGUID="$uuid">
+	<Platforms>
+		<Platform
+			Name="Win32"/>
+	</Platforms>
+	<ToolFiles>
+	</ToolFiles>
+	<Configurations>
+		<Configuration
+			Name="Debug|Win32"
+			OutputDirectory="$rel_dir"
+			ConfigurationType="4"
+			CharacterSet="0"
+			IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+			>
+			<Tool
+				Name="VCPreBuildEventTool"
+			/>
+			<Tool
+				Name="VCCustomBuildTool"
+			/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"
+			/>
+			<Tool
+				Name="VCMIDLTool"
+			/>
+			<Tool
+				Name="VCCLCompilerTool"
+				AdditionalOptions="$cflags_debug"
+				Optimization="0"
+				InlineFunctionExpansion="1"
+				AdditionalIncludeDirectories="$includes"
+				PreprocessorDefinitions="WIN32,_DEBUG,$defines"
+				MinimalRebuild="true"
+				RuntimeLibrary="1"
+				UsePrecompiledHeader="0"
+				ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+				WarningLevel="3"
+				DebugInformationFormat="3"
+			/>
+			<Tool
+				Name="VCManagedResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCPreLinkEventTool"
+			/>
+			<Tool
+				Name="VCLibrarianTool"
+				SuppressStartupBanner="true"
+			/>
+			<Tool
+				Name="VCALinkTool"
+			/>
+			<Tool
+				Name="VCXDCMakeTool"
+			/>
+			<Tool
+				Name="VCBscMakeTool"
+			/>
+			<Tool
+				Name="VCFxCopTool"
+			/>
+			<Tool
+				Name="VCPostBuildEventTool"
+			/>
+		</Configuration>
+		<Configuration
+			Name="Release|Win32"
+			OutputDirectory="$rel_dir"
+			ConfigurationType="4"
+			CharacterSet="0"
+			WholeProgramOptimization="1"
+			IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+			>
+			<Tool
+				Name="VCPreBuildEventTool"
+			/>
+			<Tool
+				Name="VCCustomBuildTool"
+			/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"
+			/>
+			<Tool
+				Name="VCMIDLTool"
+			/>
+			<Tool
+				Name="VCCLCompilerTool"
+				AdditionalOptions="$cflags_release"
+				Optimization="2"
+				InlineFunctionExpansion="1"
+				EnableIntrinsicFunctions="true"
+				AdditionalIncludeDirectories="$includes"
+				PreprocessorDefinitions="WIN32,NDEBUG,$defines"
+				RuntimeLibrary="0"
+				EnableFunctionLevelLinking="true"
+				UsePrecompiledHeader="0"
+				ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+				WarningLevel="3"
+				DebugInformationFormat="3"
+			/>
+			<Tool
+				Name="VCManagedResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCPreLinkEventTool"
+			/>
+			<Tool
+				Name="VCLibrarianTool"
+				SuppressStartupBanner="true"
+			/>
+			<Tool
+				Name="VCALinkTool"
+			/>
+			<Tool
+				Name="VCXDCMakeTool"
+			/>
+			<Tool
+				Name="VCBscMakeTool"
+			/>
+			<Tool
+				Name="VCFxCopTool"
+			/>
+			<Tool
+				Name="VCPostBuildEventTool"
+			/>
+		</Configuration>
+	</Configurations>
+	<Files>
+		<Filter
+			Name="Source Files"
+			Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
+			UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
+EOM
+    foreach(@sources) {
+        print F << "EOM";
+			<File
+				RelativePath="$_"/>
+EOM
+    }
+    print F << "EOM";
+		</Filter>
+	</Files>
+	<Globals>
+	</Globals>
+</VisualStudioProject>
+EOM
+    close F;
+}
+
+sub createAppProject {
+    my ($appname, $git_dir, $out_dir, $rel_dir, $build_structure) = @_;
+    print "Generate $appname vcproj app project\n";
+    $rel_dir = "..\\$rel_dir";
+    $rel_dir =~ s/\//\\/g;
+
+    my $target = $appname;
+    $target =~ s/\//_/g;
+    $target =~ s/\.exe//;
+
+    my $uuid = generate_guid($appname);
+    $$build_structure{"APPS_${target}_GUID"} = $uuid;
+
+    my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"APPS_${appname}_SOURCES"}}));
+    my @sources;
+    foreach (@srcs) {
+        $_ =~ s/\//\\/g;
+        push(@sources, $_);
+    }
+    my $defines = join(",", sort(@{$$build_structure{"APPS_${appname}_DEFINES"}}));
+    my $includes= join(";", sort(map("&quot;$rel_dir\\$_&quot;", @{$$build_structure{"APPS_${appname}_INCLUDES"}})));
+    my $cflags  = join(" ", sort(@{$$build_structure{"APPS_${appname}_CFLAGS"}}));
+    $cflags =~ s/\"/&quot;/g;
+    $cflags =~ s/</&lt;/g;
+    $cflags =~ s/>/&gt;/g;
+
+    my $cflags_debug = $cflags;
+    $cflags_debug =~ s/-MT/-MTd/;
+    $cflags_debug =~ s/-O.//;
+
+    my $cflags_release = $cflags;
+    $cflags_release =~ s/-MTd/-MT/;
+
+    my $libs;
+    foreach (sort(@{$$build_structure{"APPS_${appname}_LIBS"}})) {
+        $_ =~ s/\//_/g;
+        $libs .= " $_";
+    }
+    my @tmp  = @{$$build_structure{"APPS_${appname}_LFLAGS"}};
+    my @tmp2 = ();
+    foreach (@tmp) {
+        if (/^-LTCG/) {
+        } elsif (/^-L/) {
+            $_ =~ s/^-L/-LIBPATH:$rel_dir\//;
+        }
+        push(@tmp2, $_);
+    }
+    my $lflags = join(" ", sort(@tmp)) . " -LIBPATH:$rel_dir";
+
+    $defines =~ s/-D//g;
+    $defines =~ s/\"/\\&quot;/g;
+    $defines =~ s/</&lt;/g;
+    $defines =~ s/>/&gt;/g;
+    $defines =~ s/\'//g;
+    $defines =~ s/\\\\/\\/g;
+    $includes =~ s/-I//g;
+    mkdir "$target" || die "Could not create the directory $target for lib project!\n";
+    open F, ">$target/$target.vcproj" || die "Could not open $target/$target.pro for writing!\n";
+    binmode F, ":crlf";
+    print F << "EOM";
+<?xml version="1.0" encoding = "Windows-1252"?>
+<VisualStudioProject
+	ProjectType="Visual C++"
+	Version="9,00"
+	Name="$target"
+	ProjectGUID="$uuid">
+	<Platforms>
+		<Platform
+			Name="Win32"/>
+	</Platforms>
+	<ToolFiles>
+	</ToolFiles>
+	<Configurations>
+		<Configuration
+			Name="Debug|Win32"
+			OutputDirectory="$rel_dir"
+			ConfigurationType="1"
+			CharacterSet="0"
+			IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+			>
+			<Tool
+				Name="VCPreBuildEventTool"
+			/>
+			<Tool
+				Name="VCCustomBuildTool"
+			/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"
+			/>
+			<Tool
+				Name="VCMIDLTool"
+			/>
+			<Tool
+				Name="VCCLCompilerTool"
+				AdditionalOptions="$cflags_debug"
+				Optimization="0"
+				InlineFunctionExpansion="1"
+				AdditionalIncludeDirectories="$includes"
+				PreprocessorDefinitions="WIN32,_DEBUG,$defines"
+				MinimalRebuild="true"
+				RuntimeLibrary="1"
+				UsePrecompiledHeader="0"
+				ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+				WarningLevel="3"
+				DebugInformationFormat="3"
+			/>
+			<Tool
+				Name="VCManagedResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCPreLinkEventTool"
+			/>
+			<Tool
+				Name="VCLinkerTool"
+				AdditionalDependencies="$libs"
+				AdditionalOptions="$lflags"
+				LinkIncremental="2"
+				GenerateDebugInformation="true"
+				SubSystem="1"
+				TargetMachine="1"
+			/>
+			<Tool
+				Name="VCALinkTool"
+			/>
+			<Tool
+				Name="VCXDCMakeTool"
+			/>
+			<Tool
+				Name="VCBscMakeTool"
+			/>
+			<Tool
+				Name="VCFxCopTool"
+			/>
+			<Tool
+				Name="VCPostBuildEventTool"
+			/>
+		</Configuration>
+		<Configuration
+			Name="Release|Win32"
+			OutputDirectory="$rel_dir"
+			ConfigurationType="1"
+			CharacterSet="0"
+			WholeProgramOptimization="1"
+			IntermediateDirectory="\$(ProjectDir)\$(ConfigurationName)"
+			>
+			<Tool
+				Name="VCPreBuildEventTool"
+			/>
+			<Tool
+				Name="VCCustomBuildTool"
+			/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"
+			/>
+			<Tool
+				Name="VCMIDLTool"
+			/>
+			<Tool
+				Name="VCCLCompilerTool"
+				AdditionalOptions="$cflags_release"
+				Optimization="2"
+				InlineFunctionExpansion="1"
+				EnableIntrinsicFunctions="true"
+				AdditionalIncludeDirectories="$includes"
+				PreprocessorDefinitions="WIN32,NDEBUG,$defines"
+				RuntimeLibrary="0"
+				EnableFunctionLevelLinking="true"
+				UsePrecompiledHeader="0"
+				ProgramDataBaseFileName="\$(IntDir)\\\$(TargetName).pdb"
+				WarningLevel="3"
+				DebugInformationFormat="3"
+			/>
+			<Tool
+				Name="VCManagedResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCResourceCompilerTool"
+			/>
+			<Tool
+				Name="VCPreLinkEventTool"
+			/>
+			<Tool
+				Name="VCLinkerTool"
+				AdditionalDependencies="$libs"
+				AdditionalOptions="$lflags"
+				LinkIncremental="1"
+				GenerateDebugInformation="true"
+				SubSystem="1"
+				TargetMachine="1"
+				OptimizeReferences="2"
+				EnableCOMDATFolding="2"
+			/>
+			<Tool
+				Name="VCALinkTool"
+			/>
+			<Tool
+				Name="VCXDCMakeTool"
+			/>
+			<Tool
+				Name="VCBscMakeTool"
+			/>
+			<Tool
+				Name="VCFxCopTool"
+			/>
+			<Tool
+				Name="VCPostBuildEventTool"
+			/>
+		</Configuration>
+	</Configurations>
+	<Files>
+		<Filter
+			Name="Source Files"
+			Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
+			UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
+EOM
+    foreach(@sources) {
+        print F << "EOM";
+			<File
+				RelativePath="$_"/>
+EOM
+    }
+    print F << "EOM";
+		</Filter>
+	</Files>
+	<Globals>
+	</Globals>
+</VisualStudioProject>
+EOM
+    close F;
+}
+
+sub createGlueProject {
+    my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    print "Generate solutions file\n";
+    $rel_dir = "..\\$rel_dir";
+    $rel_dir =~ s/\//\\/g;
+    my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 10.00\n# Visual Studio 2008\n";
+    my $SLN_PRE  = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = ";
+    my $SLN_POST = "\nEndProject\n";
+
+    my @libs = @{$build_structure{"LIBS"}};
+    my @tmp;
+    foreach (@libs) {
+        $_ =~ s/\//_/g;
+        $_ =~ s/\.a//;
+        push(@tmp, $_);
+    }
+    @libs = @tmp;
+
+    my @apps = @{$build_structure{"APPS"}};
+    @tmp = ();
+    foreach (@apps) {
+        $_ =~ s/\//_/g;
+        $_ =~ s/\.exe//;
+        if ($_ eq "git" ) {
+            unshift(@tmp, $_);
+        } else {
+            push(@tmp, $_);
+        }
+    }
+    @apps = @tmp;
+
+    open F, ">git.sln" || die "Could not open git.sln for writing!\n";
+    binmode F, ":crlf";
+    print F "$SLN_HEAD";
+
+    my $uuid_libgit = $build_structure{"LIBS_libgit_GUID"};
+    my $uuid_xdiff_lib = $build_structure{"LIBS_xdiff_lib_GUID"};
+    foreach (@apps) {
+        my $appname = $_;
+        my $uuid = $build_structure{"APPS_${appname}_GUID"};
+        print F "$SLN_PRE";
+        print F "\"${appname}\", \"${appname}\\${appname}.vcproj\", \"${uuid}\"\n";
+        print F "	ProjectSection(ProjectDependencies) = postProject\n";
+        print F "		${uuid_libgit} = ${uuid_libgit}\n";
+        print F "		${uuid_xdiff_lib} = ${uuid_xdiff_lib}\n";
+        print F "	EndProjectSection";
+        print F "$SLN_POST";
+    }
+    foreach (@libs) {
+        my $libname = $_;
+        my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+        print F "$SLN_PRE";
+        print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\"";
+        print F "$SLN_POST";
+    }
+
+    print F << "EOM";
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|Win32 = Debug|Win32
+		Release|Win32 = Release|Win32
+	EndGlobalSection
+EOM
+    print F << "EOM";
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+EOM
+    foreach (@apps) {
+        my $appname = $_;
+        my $uuid = $build_structure{"APPS_${appname}_GUID"};
+        print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
+        print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
+        print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
+        print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
+    }
+    foreach (@libs) {
+        my $libname = $_;
+        my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+        print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n";
+        print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n";
+        print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n";
+        print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n";
+    }
+
+    print F << "EOM";
+	EndGlobalSection
+EndGlobal
+EOM
+    close F;
+}
+
+1;
diff --git a/third_party/git/contrib/buildsystems/Generators/Vcxproj.pm b/third_party/git/contrib/buildsystems/Generators/Vcxproj.pm
new file mode 100644
index 000000000000..5c666f9ac03b
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/Generators/Vcxproj.pm
@@ -0,0 +1,392 @@
+package Generators::Vcxproj;
+require Exporter;
+
+use strict;
+use vars qw($VERSION);
+use Digest::SHA qw(sha256_hex);
+
+our $VERSION = '1.00';
+our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE);
+@ISA = qw(Exporter);
+
+BEGIN {
+    push @EXPORT_OK, qw(generate);
+}
+
+sub generate_guid ($) {
+	my $hex = sha256_hex($_[0]);
+	$hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/;
+	$hex =~ tr/a-z/A-Z/;
+	return $hex;
+}
+
+sub generate {
+    my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    my @libs = @{$build_structure{"LIBS"}};
+    foreach (@libs) {
+        createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 1);
+    }
+
+    my @apps = @{$build_structure{"APPS"}};
+    foreach (@apps) {
+        createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 0);
+    }
+
+    createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure);
+    return 0;
+}
+
+sub createProject {
+    my ($name, $git_dir, $out_dir, $rel_dir, $build_structure, $static_library) = @_;
+    my $label = $static_library ? "lib" : "app";
+    my $prefix = $static_library ? "LIBS_" : "APPS_";
+    my $config_type = $static_library ? "StaticLibrary" : "Application";
+    print "Generate $name vcxproj $label project\n";
+    my $cdup = $name;
+    $cdup =~ s/[^\/]+/../g;
+    $cdup =~ s/\//\\/g;
+    $rel_dir = $rel_dir eq "." ? $cdup : "$cdup\\$rel_dir";
+    $rel_dir =~ s/\//\\/g;
+
+    my $target = $name;
+    if ($static_library) {
+      $target =~ s/\.a//;
+    } else {
+      $target =~ s/\.exe//;
+    }
+
+    my $uuid = generate_guid($name);
+    $$build_structure{"$prefix${target}_GUID"} = $uuid;
+    my $vcxproj = $target;
+    $vcxproj =~ s/(.*\/)?(.*)/$&\/$2.vcxproj/;
+    $vcxproj =~ s/([^\/]*)(\/lib)\/(lib.vcxproj)/$1$2\/$1_$3/;
+    $$build_structure{"$prefix${target}_VCXPROJ"} = $vcxproj;
+
+    my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"$prefix${name}_SOURCES"}}));
+    my @sources;
+    foreach (@srcs) {
+        $_ =~ s/\//\\/g;
+        push(@sources, $_);
+    }
+    my $defines = join(";", sort(@{$$build_structure{"$prefix${name}_DEFINES"}}));
+    my $includes= join(";", sort(map { s/^-I//; s/\//\\/g; File::Spec->file_name_is_absolute($_) ? $_ : "$rel_dir\\$_" } @{$$build_structure{"$prefix${name}_INCLUDES"}}));
+    my $cflags = join(" ", sort(map { s/^-[GLMOWZ].*//; s/.* .*/"$&"/; $_; } @{$$build_structure{"$prefix${name}_CFLAGS"}}));
+    $cflags =~ s/</&lt;/g;
+    $cflags =~ s/>/&gt;/g;
+
+    my $libs_release = "\n    ";
+    my $libs_debug = "\n    ";
+    if (!$static_library) {
+      $libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}}));
+      $libs_debug = $libs_release;
+      $libs_debug =~ s/zlib\.lib/zlibd\.lib/g;
+      $libs_debug =~ s/libcurl\.lib/libcurl-d\.lib/g;
+    }
+
+    $defines =~ s/-D//g;
+    $defines =~ s/</&lt;/g;
+    $defines =~ s/>/&gt;/g;
+    $defines =~ s/\'//g;
+
+    die "Could not create the directory $target for $label project!\n" unless (-d "$target" || mkdir "$target");
+
+    open F, ">$vcxproj" or die "Could not open $vcxproj for writing!\n";
+    binmode F, ":crlf :utf8";
+    print F chr(0xFEFF);
+    print F << "EOM";
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup Label="ProjectConfigurations">
+    <ProjectConfiguration Include="Debug|Win32">
+      <Configuration>Debug</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|Win32">
+      <Configuration>Release</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug|x64">
+      <Configuration>Debug</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|x64">
+      <Configuration>Release</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+  </ItemGroup>
+  <PropertyGroup Label="Globals">
+    <ProjectGuid>$uuid</ProjectGuid>
+    <Keyword>Win32Proj</Keyword>
+    <VCPKGArch Condition="'\$(Platform)'=='Win32'">x86-windows</VCPKGArch>
+    <VCPKGArch Condition="'\$(Platform)'!='Win32'">x64-windows</VCPKGArch>
+    <VCPKGArchDirectory>$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)</VCPKGArchDirectory>
+    <VCPKGBinDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
+    <VCPKGLibDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
+    <VCPKGBinDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
+    <VCPKGLibDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
+    <VCPKGIncludeDirectory>\$(VCPKGArchDirectory)\\include</VCPKGIncludeDirectory>
+    <VCPKGLibs Condition="'\$(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
+    <VCPKGLibs Condition="'\$(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
+  </PropertyGroup>
+  <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.Default.props" />
+  <PropertyGroup Condition="'\$(Configuration)'=='Debug'" Label="Configuration">
+    <UseDebugLibraries>true</UseDebugLibraries>
+    <LinkIncremental>true</LinkIncremental>
+  </PropertyGroup>
+  <PropertyGroup Condition="'\$(Configuration)'=='Release'" Label="Configuration">
+    <UseDebugLibraries>false</UseDebugLibraries>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+  </PropertyGroup>
+  <PropertyGroup>
+    <ConfigurationType>$config_type</ConfigurationType>
+    <PlatformToolset>v140</PlatformToolset>
+    <!-- <CharacterSet>UTF-8</CharacterSet> -->
+    <OutDir>..\\</OutDir>
+    <!-- <IntDir>\$(ProjectDir)\$(Configuration)\\</IntDir> -->
+  </PropertyGroup>
+  <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.props" />
+  <ImportGroup Label="ExtensionSettings">
+  </ImportGroup>
+  <ImportGroup Label="Shared">
+  </ImportGroup>
+  <ImportGroup Label="PropertySheets">
+    <Import Project="\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props" Condition="exists('\$(UserRootDir)\\Microsoft.Cpp.\$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <PropertyGroup Label="UserMacros" />
+  <PropertyGroup>
+    <GenerateManifest>false</GenerateManifest>
+    <EnableManagedIncrementalBuild>true</EnableManagedIncrementalBuild>
+  </PropertyGroup>
+  <ItemDefinitionGroup>
+    <ClCompile>
+      <AdditionalOptions>$cflags %(AdditionalOptions)</AdditionalOptions>
+      <AdditionalIncludeDirectories>$cdup;$cdup\\compat;$cdup\\compat\\regex;$cdup\\compat\\win32;$cdup\\compat\\poll;$cdup\\compat\\vcbuild\\include;\$(VCPKGIncludeDirectory);%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <EnableParallelCodeGeneration />
+      <InlineFunctionExpansion>OnlyExplicitInline</InlineFunctionExpansion>
+      <PrecompiledHeader />
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Lib>
+      <SuppressStartupBanner>true</SuppressStartupBanner>
+    </Lib>
+    <Link>
+      <AdditionalLibraryDirectories>\$(VCPKGLibDirectory);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <AdditionalDependencies>\$(VCPKGLibs);\$(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalOptions>invalidcontinue.obj %(AdditionalOptions)</AdditionalOptions>
+      <EntryPointSymbol>wmainCRTStartup</EntryPointSymbol>
+      <ManifestFile>$cdup\\compat\\win32\\git.manifest</ManifestFile>
+      <SubSystem>Console</SubSystem>
+    </Link>
+EOM
+    if ($target eq 'libgit') {
+        print F << "EOM";
+    <PreBuildEvent Condition="!Exists('$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)\\include\\openssl\\ssl.h')">
+      <Message>Initialize VCPKG</Message>
+      <Command>del "$cdup\\compat\\vcbuild\\vcpkg"</Command>
+      <Command>call "$cdup\\compat\\vcbuild\\vcpkg_install.bat"</Command>
+    </PreBuildEvent>
+EOM
+    }
+    print F << "EOM";
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'\$(Platform)'=='Win32'">
+    <Link>
+      <TargetMachine>MachineX86</TargetMachine>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'\$(Configuration)'=='Debug'">
+    <ClCompile>
+      <Optimization>Disabled</Optimization>
+      <PreprocessorDefinitions>WIN32;_DEBUG;$defines;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+    </ClCompile>
+    <Link>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'\$(Configuration)'=='Release'">
+    <ClCompile>
+      <Optimization>MaxSpeed</Optimization>
+      <IntrinsicFunctions>true</IntrinsicFunctions>
+      <PreprocessorDefinitions>WIN32;NDEBUG;$defines;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
+    </ClCompile>
+    <Link>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <OptimizeReferences>true</OptimizeReferences>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemGroup>
+EOM
+    foreach(@sources) {
+        print F << "EOM";
+    <ClCompile Include="$_" />
+EOM
+    }
+    print F << "EOM";
+  </ItemGroup>
+EOM
+    if (!$static_library || $target =~ 'vcs-svn' || $target =~ 'xdiff') {
+      my $uuid_libgit = $$build_structure{"LIBS_libgit_GUID"};
+      my $uuid_xdiff_lib = $$build_structure{"LIBS_xdiff/lib_GUID"};
+
+      print F << "EOM";
+  <ItemGroup>
+    <ProjectReference Include="$cdup\\libgit\\libgit.vcxproj">
+      <Project>$uuid_libgit</Project>
+      <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+    </ProjectReference>
+EOM
+      if (!($name =~ 'xdiff')) {
+        print F << "EOM";
+    <ProjectReference Include="$cdup\\xdiff\\lib\\xdiff_lib.vcxproj">
+      <Project>$uuid_xdiff_lib</Project>
+      <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+    </ProjectReference>
+EOM
+      }
+      if ($name =~ /(test-(line-buffer|svn-fe)|^git-remote-testsvn)\.exe$/) {
+        my $uuid_vcs_svn_lib = $$build_structure{"LIBS_vcs-svn/lib_GUID"};
+        print F << "EOM";
+    <ProjectReference Include="$cdup\\vcs-svn\\lib\\vcs-svn_lib.vcxproj">
+      <Project>$uuid_vcs_svn_lib</Project>
+      <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+    </ProjectReference>
+EOM
+      }
+      print F << "EOM";
+  </ItemGroup>
+EOM
+    }
+    print F << "EOM";
+  <Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.targets" />
+EOM
+    if (!$static_library) {
+      print F << "EOM";
+  <Target Name="${target}_AfterBuild" AfterTargets="AfterBuild">
+    <ItemGroup>
+      <DLLsAndPDBs Include="\$(VCPKGBinDirectory)\\*.dll;\$(VCPKGBinDirectory)\\*.pdb" />
+    </ItemGroup>
+    <Copy SourceFiles="@(DLLsAndPDBs)" DestinationFolder="\$(OutDir)" SkipUnchangedFiles="true" UseHardlinksIfPossible="true" />
+    <MakeDir Directories="..\\templates\\blt\\branches" />
+  </Target>
+EOM
+    }
+    if ($target eq 'git') {
+      print F "  <Import Project=\"LinkOrCopyBuiltins.targets\" />\n";
+    }
+    if ($target eq 'git-remote-http') {
+      print F "  <Import Project=\"LinkOrCopyRemoteHttp.targets\" />\n";
+    }
+    print F << "EOM";
+</Project>
+EOM
+    close F;
+}
+
+sub createGlueProject {
+    my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_;
+    print "Generate solutions file\n";
+    $rel_dir = "..\\$rel_dir";
+    $rel_dir =~ s/\//\\/g;
+    my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 11.00\n# Visual Studio 2010\n";
+    my $SLN_PRE  = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = ";
+    my $SLN_POST = "\nEndProject\n";
+
+    my @libs = @{$build_structure{"LIBS"}};
+    my @tmp;
+    foreach (@libs) {
+        $_ =~ s/\.a//;
+        push(@tmp, $_);
+    }
+    @libs = @tmp;
+
+    my @apps = @{$build_structure{"APPS"}};
+    @tmp = ();
+    foreach (@apps) {
+        $_ =~ s/\.exe//;
+        if ($_ eq "git" ) {
+            unshift(@tmp, $_);
+        } else {
+            push(@tmp, $_);
+        }
+    }
+    @apps = @tmp;
+
+    open F, ">git.sln" || die "Could not open git.sln for writing!\n";
+    binmode F, ":crlf :utf8";
+    print F chr(0xFEFF);
+    print F "$SLN_HEAD";
+
+    foreach (@apps) {
+        my $appname = $_;
+        my $uuid = $build_structure{"APPS_${appname}_GUID"};
+        print F "$SLN_PRE";
+	my $vcxproj = $build_structure{"APPS_${appname}_VCXPROJ"};
+	$vcxproj =~ s/\//\\/g;
+        $appname =~ s/.*\///;
+        print F "\"${appname}\", \"${vcxproj}\", \"${uuid}\"";
+        print F "$SLN_POST";
+    }
+    foreach (@libs) {
+        my $libname = $_;
+        my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+        print F "$SLN_PRE";
+        my $vcxproj = $build_structure{"LIBS_${libname}_VCXPROJ"};
+	$vcxproj =~ s/\//\\/g;
+        $libname =~ s/\//_/g;
+        print F "\"${libname}\", \"${vcxproj}\", \"${uuid}\"";
+        print F "$SLN_POST";
+    }
+
+    print F << "EOM";
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|x64 = Debug|x64
+		Debug|x86 = Debug|x86
+		Release|x64 = Release|x64
+		Release|x86 = Release|x86
+	EndGlobalSection
+EOM
+    print F << "EOM";
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+EOM
+    foreach (@apps) {
+        my $appname = $_;
+        my $uuid = $build_structure{"APPS_${appname}_GUID"};
+        print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n";
+        print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n";
+        print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n";
+        print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n";
+        print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n";
+        print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n";
+        print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n";
+        print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n";
+    }
+    foreach (@libs) {
+        my $libname = $_;
+        my $uuid = $build_structure{"LIBS_${libname}_GUID"};
+        print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n";
+        print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n";
+        print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n";
+        print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n";
+        print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n";
+        print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n";
+        print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n";
+        print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n";
+    }
+
+    print F << "EOM";
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+EndGlobal
+EOM
+    close F;
+}
+
+1;
diff --git a/third_party/git/contrib/buildsystems/engine.pl b/third_party/git/contrib/buildsystems/engine.pl
new file mode 100755
index 000000000000..070978506ad5
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/engine.pl
@@ -0,0 +1,394 @@
+#!/usr/bin/perl -w
+######################################################################
+# Do not call this script directly!
+#
+# The generate script ensures that @INC is correct before the engine
+# is executed.
+#
+# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
+######################################################################
+use strict;
+use File::Basename;
+use File::Spec;
+use Cwd;
+use Generators;
+use Text::ParseWords;
+
+my (%build_structure, %compile_options, @makedry);
+my $out_dir = getcwd();
+my $git_dir = $out_dir;
+$git_dir =~ s=\\=/=g;
+$git_dir = dirname($git_dir) while (!-e "$git_dir/git.c" && "$git_dir" ne "");
+die "Couldn't find Git repo" if ("$git_dir" eq "");
+
+my @gens = Generators::available();
+my $gen = "Vcproj";
+
+sub showUsage
+{
+    my $genlist = join(', ', @gens);
+    print << "EOM";
+generate usage:
+  -g <GENERATOR>  --gen <GENERATOR> Specify the buildsystem generator    (default: $gen)
+                                    Available: $genlist
+  -o <PATH>       --out <PATH>      Specify output directory generation  (default: .)
+                  --make-out <PATH> Write the output of GNU Make into a file
+  -i <FILE>       --in <FILE>       Specify input file, instead of running GNU Make
+  -h,-?           --help            This help
+EOM
+    exit 0;
+}
+
+# Parse command-line options
+my $make_out;
+while (@ARGV) {
+    my $arg = shift @ARGV;
+    if ("$arg" eq "-h" || "$arg" eq "--help" || "$arg" eq "-?") {
+	showUsage();
+	exit(0);
+    } elsif("$arg" eq "--out" || "$arg" eq "-o") {
+	$out_dir = shift @ARGV;
+    } elsif("$arg" eq "--make-out") {
+	$make_out = shift @ARGV;
+    } elsif("$arg" eq "--gen" || "$arg" eq "-g") {
+	$gen = shift @ARGV;
+    } elsif("$arg" eq "--in" || "$arg" eq "-i") {
+	my $infile = shift @ARGV;
+        open(F, "<$infile") || die "Couldn't open file $infile";
+        @makedry = <F>;
+        close(F);
+    } else {
+        die "Unknown option: " . $arg;
+    }
+}
+
+# NOT using File::Spec->rel2abs($path, $base) here, as
+# it fails badly for me in the msysgit environment
+$git_dir = File::Spec->rel2abs($git_dir);
+$out_dir = File::Spec->rel2abs($out_dir);
+my $rel_dir = makeOutRel2Git($git_dir, $out_dir);
+
+# Print some information so the user feels informed
+print << "EOM";
+-----
+Generator: $gen
+Git dir:   $git_dir
+Out dir:   $out_dir
+-----
+Running GNU Make to figure out build structure...
+EOM
+
+# Pipe a make --dry-run into a variable, if not already loaded from file
+# Capture the make dry stderr to file for review (will be empty for a release build).
+
+my $ErrsFile = "msvc-build-makedryerrors.txt";
+@makedry = `make -C $git_dir -n MSVC=1 SKIP_VCPKG=1 V=1 2>$ErrsFile`
+if !@makedry;
+# test for an empty Errors file and remove it
+unlink $ErrsFile if -f -z $ErrsFile;
+
+if (defined $make_out) {
+    open OUT, ">" . $make_out;
+    print OUT @makedry;
+    close OUT;
+}
+
+# Parse the make output into usable info
+parseMakeOutput();
+
+# Finally, ask the generator to start generating..
+Generators::generate($gen, $git_dir, $out_dir, $rel_dir, %build_structure);
+
+# main flow ends here
+# -------------------------------------------------------------------------------------------------
+
+
+# 1) path: /foo/bar/baz        2) path: /foo/bar/baz   3) path: /foo/bar/baz
+#    base: /foo/bar/baz/temp      base: /foo/bar          base: /tmp
+#    rel:  ..                     rel:  baz               rel:  ../foo/bar/baz
+sub makeOutRel2Git
+{
+    my ($path, $base) = @_;
+    my $rel;
+    if ("$path" eq "$base") {
+        return ".";
+    } elsif ($base =~ /^$path/) {
+        # case 1
+        my $tmp = $base;
+        $tmp =~ s/^$path//;
+        foreach (split('/', $tmp)) {
+            $rel .= "../" if ("$_" ne "");
+        }
+    } elsif ($path =~ /^$base/) {
+        # case 2
+        $rel = $path;
+        $rel =~ s/^$base//;
+        $rel = "./$rel";
+    } else {
+        my $tmp = $base;
+        foreach (split('/', $tmp)) {
+            $rel .= "../" if ("$_" ne "");
+        }
+        $rel .= $path;
+    }
+    $rel =~ s/\/\//\//g; # simplify
+    $rel =~ s/\/$//;     # don't end with /
+    return $rel;
+}
+
+sub parseMakeOutput
+{
+    print "Parsing GNU Make output to figure out build structure...\n";
+    my $line = 0;
+    while (my $text = shift @makedry) {
+        my $ate_next;
+        do {
+            $ate_next = 0;
+            $line++;
+            chomp $text;
+            chop $text if ($text =~ /\r$/);
+            if ($text =~ /\\$/) {
+                $text =~ s/\\$//;
+                $text .= shift @makedry;
+                $ate_next = 1;
+            }
+        } while($ate_next);
+
+        if ($text =~ /^test /) {
+            # options to test (eg -o) may be mistaken for linker options
+            next;
+        }
+
+        if ($text =~ /^(mkdir|msgfmt) /) {
+            # options to the Portable Object translations
+            # the line "mkdir ... && msgfmt ..." contains no linker options
+            next;
+        }
+
+        if($text =~ / -c /) {
+            # compilation
+            handleCompileLine($text, $line);
+
+        } elsif ($text =~ / -o /) {
+            # linking executable
+            handleLinkLine($text, $line);
+
+        } elsif ($text =~ /\.o / && $text =~ /\.a /) {
+            # libifying
+            handleLibLine($text, $line);
+#
+#        } elsif ($text =~ /^cp /) {
+#            # copy file around
+#
+#        } elsif ($text =~ /^rm -f /) {
+#            # shell command
+#
+#        } elsif ($text =~ /^make[ \[]/) {
+#            # make output
+#
+#        } elsif ($text =~ /^echo /) {
+#            # echo to file
+#
+#        } elsif ($text =~ /^if /) {
+#            # shell conditional
+#
+#        } elsif ($text =~ /^tclsh /) {
+#            # translation stuff
+#
+#        } elsif ($text =~ /^umask /) {
+#            # handling boilerplates
+#
+#        } elsif ($text =~ /\$\(\:\)/) {
+#            # ignore
+#
+#        } elsif ($text =~ /^FLAGS=/) {
+#            # flags check for dependencies
+#
+#        } elsif ($text =~ /^'\/usr\/bin\/perl' -MError -e/) {
+#            # perl commands for copying files
+#
+#        } elsif ($text =~ /generate-cmdlist\.sh/) {
+#            # command for generating list of commands
+#
+#        } elsif ($text =~ /new locations or Tcl/) {
+#            # command for detecting Tcl/Tk changes
+#
+#        } elsif ($text =~ /mkdir -p/) {
+#            # command creating path
+#
+#        } elsif ($text =~ /: no custom templates yet/) {
+#            # whatever
+#
+#        } else {
+#            print "Unhandled (line: $line): $text\n";
+        }
+    }
+
+#    use Data::Dumper;
+#    print "Parsed build structure:\n";
+#    print Dumper(%build_structure);
+}
+
+# variables for the compilation part of each step
+my (@defines, @incpaths, @cflags, @sources);
+
+sub clearCompileStep
+{
+    @defines = ();
+    @incpaths = ();
+    @cflags = ();
+    @sources = ();
+}
+
+sub removeDuplicates
+{
+    my (%dupHash, $entry);
+    %dupHash = map { $_, 1 } @defines;
+    @defines = keys %dupHash;
+
+    %dupHash = map { $_, 1 } @incpaths;
+    @incpaths = keys %dupHash;
+
+    %dupHash = map { $_, 1 } @cflags;
+    @cflags = keys %dupHash;
+}
+
+sub handleCompileLine
+{
+    my ($line, $lineno) = @_;
+    my @parts = shellwords($line);
+    my $sourcefile;
+    shift(@parts); # ignore cmd
+    while (my $part = shift @parts) {
+        if ("$part" eq "-o") {
+            # ignore object file
+            shift @parts;
+        } elsif ("$part" eq "-c") {
+            # ignore compile flag
+        } elsif ("$part" eq "-c") {
+        } elsif ($part =~ /^.?-I/) {
+            push(@incpaths, $part);
+        } elsif ($part =~ /^.?-D/) {
+            push(@defines, $part);
+        } elsif ($part =~ /^-/) {
+            push(@cflags, $part);
+        } elsif ($part =~ /\.(c|cc|cpp)$/) {
+            $sourcefile = $part;
+        } else {
+            die "Unhandled compiler option @ line $lineno: $part";
+        }
+    }
+    @{$compile_options{"${sourcefile}_CFLAGS"}} = @cflags;
+    @{$compile_options{"${sourcefile}_DEFINES"}} = @defines;
+    @{$compile_options{"${sourcefile}_INCPATHS"}} = @incpaths;
+    clearCompileStep();
+}
+
+sub handleLibLine
+{
+    my ($line, $lineno) = @_;
+    my (@objfiles, @lflags, $libout, $part);
+    # kill cmd and rm 'prefix'
+    $line =~ s/^rm -f .* && .* rcs //;
+    my @parts = shellwords($line);
+    while ($part = shift @parts) {
+        if ($part =~ /^-/) {
+            push(@lflags, $part);
+        } elsif ($part =~ /\.(o|obj)$/) {
+            push(@objfiles, $part);
+        } elsif ($part =~ /\.(a|lib)$/) {
+            $libout = $part;
+            $libout =~ s/\.a$//;
+        } else {
+            die "Unhandled lib option @ line $lineno: $part";
+        }
+    }
+#    print "LibOut: '$libout'\nLFlags: @lflags\nOfiles: @objfiles\n";
+#    exit(1);
+    foreach (@objfiles) {
+        my $sourcefile = $_;
+        $sourcefile =~ s/\.o$/.c/;
+        push(@sources, $sourcefile);
+        push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
+        push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
+        push(@incpaths, @{$compile_options{"${sourcefile}_INCPATHS"}});
+    }
+    removeDuplicates();
+
+    push(@{$build_structure{"LIBS"}}, $libout);
+    @{$build_structure{"LIBS_${libout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_SOURCES",
+                                             "_OBJECTS");
+    @{$build_structure{"LIBS_${libout}_DEFINES"}} = @defines;
+    @{$build_structure{"LIBS_${libout}_INCLUDES"}} = @incpaths;
+    @{$build_structure{"LIBS_${libout}_CFLAGS"}} = @cflags;
+    @{$build_structure{"LIBS_${libout}_LFLAGS"}} = @lflags;
+    @{$build_structure{"LIBS_${libout}_SOURCES"}} = @sources;
+    @{$build_structure{"LIBS_${libout}_OBJECTS"}} = @objfiles;
+    clearCompileStep();
+}
+
+sub handleLinkLine
+{
+    my ($line, $lineno) = @_;
+    my (@objfiles, @lflags, @libs, $appout, $part);
+    my @parts = shellwords($line);
+    shift(@parts); # ignore cmd
+    while ($part = shift @parts) {
+        if ($part =~ /^-IGNORE/) {
+            push(@lflags, $part);
+        } elsif ($part =~ /^-[GRIMDO]/) {
+            # eat compiler flags
+        } elsif ("$part" eq "-o") {
+            $appout = shift @parts;
+        } elsif ("$part" eq "-lz") {
+            push(@libs, "zlib.lib");
+        } elsif ("$part" eq "-lcrypto") {
+            push(@libs, "libcrypto.lib");
+        } elsif ("$part" eq "-lssl") {
+            push(@libs, "libssl.lib");
+        } elsif ("$part" eq "-lcurl") {
+            push(@libs, "libcurl.lib");
+        } elsif ("$part" eq "-lexpat") {
+            push(@libs, "expat.lib");
+        } elsif ("$part" eq "-liconv") {
+            push(@libs, "libiconv.lib");
+        } elsif ($part =~ /^[-\/]/) {
+            push(@lflags, $part);
+        } elsif ($part =~ /\.(a|lib)$/) {
+            $part =~ s/\.a$/.lib/;
+            push(@libs, $part);
+        } elsif ($part eq 'invalidcontinue.obj') {
+            # ignore - known to MSVC
+        } elsif ($part =~ /\.o$/) {
+            push(@objfiles, $part);
+        } elsif ($part =~ /\.obj$/) {
+            # do nothing, 'make' should not be producing .obj, only .o files
+        } else {
+            die "Unhandled link option @ line $lineno: $part";
+        }
+    }
+#    print "AppOut: '$appout'\nLFlags: @lflags\nLibs  : @libs\nOfiles: @objfiles\n";
+#    exit(1);
+    foreach (@objfiles) {
+        my $sourcefile = $_;
+        $sourcefile =~ s/\.o$/.c/;
+        push(@sources, $sourcefile);
+        push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}});
+        push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}});
+        push(@incpaths, @{$compile_options{"${sourcefile}_INCPATHS"}});
+    }
+    removeDuplicates();
+
+    removeDuplicates();
+    push(@{$build_structure{"APPS"}}, $appout);
+    @{$build_structure{"APPS_${appout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_LFLAGS",
+                                             "_SOURCES", "_OBJECTS", "_LIBS");
+    @{$build_structure{"APPS_${appout}_DEFINES"}} = @defines;
+    @{$build_structure{"APPS_${appout}_INCLUDES"}} = @incpaths;
+    @{$build_structure{"APPS_${appout}_CFLAGS"}} = @cflags;
+    @{$build_structure{"APPS_${appout}_LFLAGS"}} = @lflags;
+    @{$build_structure{"APPS_${appout}_SOURCES"}} = @sources;
+    @{$build_structure{"APPS_${appout}_OBJECTS"}} = @objfiles;
+    @{$build_structure{"APPS_${appout}_LIBS"}} = @libs;
+    clearCompileStep();
+}
diff --git a/third_party/git/contrib/buildsystems/generate b/third_party/git/contrib/buildsystems/generate
new file mode 100755
index 000000000000..bc10f25ff24a
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/generate
@@ -0,0 +1,29 @@
+#!/usr/bin/perl -w
+######################################################################
+# Generate buildsystem files
+#
+# This script generate buildsystem files based on the output of a
+# GNU Make --dry-run, enabling Windows users to develop Git with their
+# trusted IDE with native projects.
+#
+# Note:
+# It is not meant as *the* way of building Git with MSVC, but merely a
+# convenience. The correct way of building Git with MSVC is to use the
+# GNU Make tool to build with the maintained Makefile in the root of
+# the project. If you have the msysgit environment installed and
+# available in your current console, together with the Visual Studio
+# environment you wish to build for, all you have to do is run the
+# command:
+#     make MSVC=1
+#
+# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
+######################################################################
+use strict;
+use File::Basename;
+use Cwd;
+
+my $git_dir = getcwd();
+$git_dir =~ s=\\=/=g;
+$git_dir = dirname($git_dir) while (!-e "$git_dir/git.c" && "$git_dir" ne "");
+die "Couldn't find Git repo" if ("$git_dir" eq "");
+exec join(" ", ("PERL5LIB=${git_dir}/contrib/buildsystems ${git_dir}/contrib/buildsystems/engine.pl", @ARGV));
diff --git a/third_party/git/contrib/buildsystems/parse.pl b/third_party/git/contrib/buildsystems/parse.pl
new file mode 100755
index 000000000000..c9656ece9931
--- /dev/null
+++ b/third_party/git/contrib/buildsystems/parse.pl
@@ -0,0 +1,228 @@
+#!/usr/bin/perl -w
+######################################################################
+# Do not call this script directly!
+#
+# The generate script ensures that @INC is correct before the engine
+# is executed.
+#
+# Copyright (C) 2009 Marius Storm-Olsen <mstormo@gmail.com>
+######################################################################
+use strict;
+use File::Basename;
+use Cwd;
+
+my $file = $ARGV[0];
+die "No file provided!" if !defined $file;
+
+my ($cflags, $target, $type, $line);
+
+open(F, "<$file") || die "Couldn't open file $file";
+my @data = <F>;
+close(F);
+
+while (my $text = shift @data) {
+    my $ate_next;
+    do {
+        $ate_next = 0;
+        $line++;
+        chomp $text;
+        chop $text if ($text =~ /\r$/);
+        if ($text =~ /\\$/) {
+            $text =~ s/\\$//;
+            $text .= shift @data;
+            $ate_next = 1;
+        }
+    } while($ate_next);
+
+    if($text =~ / -c /) {
+        # compilation
+        handleCompileLine($text, $line);
+
+    } elsif ($text =~ / -o /) {
+        # linking executable
+        handleLinkLine($text, $line);
+
+    } elsif ($text =~ /\.o / && $text =~ /\.a /) {
+        # libifying
+        handleLibLine($text, $line);
+
+#    } elsif ($text =~ /^cp /) {
+#        # copy file around
+#
+#    } elsif ($text =~ /^rm -f /) {
+#        # shell command
+#
+#    } elsif ($text =~ /^make[ \[]/) {
+#        # make output
+#
+#    } elsif ($text =~ /^echo /) {
+#        # echo to file
+#
+#    } elsif ($text =~ /^if /) {
+#        # shell conditional
+#
+#    } elsif ($text =~ /^tclsh /) {
+#        # translation stuff
+#
+#    } elsif ($text =~ /^umask /) {
+#        # handling boilerplates
+#
+#    } elsif ($text =~ /\$\(\:\)/) {
+#        # ignore
+#
+#    } elsif ($text =~ /^FLAGS=/) {
+#        # flags check for dependencies
+#
+#    } elsif ($text =~ /^'\/usr\/bin\/perl' -MError -e/) {
+#        # perl commands for copying files
+#
+#    } elsif ($text =~ /generate-cmdlist\.sh/) {
+#        # command for generating list of commands
+#
+#    } elsif ($text =~ /^test / && $text =~ /|| rm -f /) {
+#        # commands removing executables, if they exist
+#
+#    } elsif ($text =~ /new locations or Tcl/) {
+#        # command for detecting Tcl/Tk changes
+#
+#    } elsif ($text =~ /mkdir -p/) {
+#        # command creating path
+#
+#    } elsif ($text =~ /: no custom templates yet/) {
+#        # whatever
+
+    } else {
+#        print "Unhandled (line: $line): $text\n";
+    }
+}
+close(F);
+
+# use Data::Dumper;
+# print "Parsed build structure:\n";
+# print Dumper(%build_structure);
+
+# -------------------------------------------------------------------
+# Functions under here
+# -------------------------------------------------------------------
+my (%build_structure, @defines, @incpaths, @cflags, @sources);
+
+sub clearCompileStep
+{
+    @defines = ();
+    @incpaths = ();
+    @cflags = ();
+    @sources = ();
+}
+
+sub removeDuplicates
+{
+    my (%dupHash, $entry);
+    %dupHash = map { $_, 1 } @defines;
+    @defines = keys %dupHash;
+
+    %dupHash = map { $_, 1 } @incpaths;
+    @incpaths = keys %dupHash;
+
+    %dupHash = map { $_, 1 } @cflags;
+    @cflags = keys %dupHash;
+
+    %dupHash = map { $_, 1 } @sources;
+    @sources = keys %dupHash;
+}
+
+sub handleCompileLine
+{
+    my ($line, $lineno) = @_;
+    my @parts = split(' ', $line);
+    shift(@parts); # ignore cmd
+    while (my $part = shift @parts) {
+        if ("$part" eq "-o") {
+            # ignore object file
+            shift @parts;
+        } elsif ("$part" eq "-c") {
+            # ignore compile flag
+        } elsif ("$part" eq "-c") {
+        } elsif ($part =~ /^.?-I/) {
+            push(@incpaths, $part);
+        } elsif ($part =~ /^.?-D/) {
+            push(@defines, $part);
+        } elsif ($part =~ /^-/) {
+            push(@cflags, $part);
+        } elsif ($part =~ /\.(c|cc|cpp)$/) {
+            push(@sources, $part);
+        } else {
+            die "Unhandled compiler option @ line $lineno: $part";
+        }
+    }
+    #print "Sources: @sources\nCFlags: @cflags\nDefine: @defines\nIncpat: @incpaths\n";
+    #exit(1);
+}
+
+sub handleLibLine
+{
+    my ($line, $lineno) = @_;
+    my (@objfiles, @lflags, $libout, $part);
+    # kill cmd and rm 'prefix'
+    $line =~ s/^rm -f .* && .* rcs //;
+    my @parts = split(' ', $line);
+    while ($part = shift @parts) {
+        if ($part =~ /^-/) {
+            push(@lflags, $part);
+        } elsif ($part =~ /\.(o|obj)$/) {
+            push(@objfiles, $part);
+        } elsif ($part =~ /\.(a|lib)$/) {
+            $libout = $part;
+        } else {
+            die "Unhandled lib option @ line $lineno: $part";
+        }
+    }
+    #print "LibOut: '$libout'\nLFlags: @lflags\nOfiles: @objfiles\n";
+    #exit(1);
+    removeDuplicates();
+    push(@{$build_structure{"LIBS"}}, $libout);
+    @{$build_structure{"LIBS_${libout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_SOURCES",
+                                             "_OBJECTS");
+    @{$build_structure{"LIBS_${libout}_DEFINES"}} = @defines;
+    @{$build_structure{"LIBS_${libout}_INCLUDES"}} = @incpaths;
+    @{$build_structure{"LIBS_${libout}_CFLAGS"}} = @cflags;
+    @{$build_structure{"LIBS_${libout}_SOURCES"}} = @sources;
+    @{$build_structure{"LIBS_${libout}_OBJECTS"}} = @objfiles;
+    clearCompileStep();
+}
+
+sub handleLinkLine
+{
+    my ($line, $lineno) = @_;
+    my (@objfiles, @lflags, @libs, $appout, $part);
+    my @parts = split(' ', $line);
+    shift(@parts); # ignore cmd
+    while ($part = shift @parts) {
+        if ($part =~ /^-[GRIDO]/) {
+            # eat compiler flags
+        } elsif ("$part" eq "-o") {
+            $appout = shift @parts;
+        } elsif ($part =~ /^-/) {
+            push(@lflags, $part);
+        } elsif ($part =~ /\.(a|lib)$/) {
+            push(@libs, $part);
+        } elsif ($part =~ /\.(o|obj)$/) {
+            push(@objfiles, $part);
+        } else {
+            die "Unhandled lib option @ line $lineno: $part";
+        }
+    }
+    #print "AppOut: '$appout'\nLFlags: @lflags\nLibs  : @libs\nOfiles: @objfiles\n";
+    #exit(1);
+    removeDuplicates();
+    push(@{$build_structure{"APPS"}}, $appout);
+    @{$build_structure{"APPS_${appout}"}} = ("_DEFINES", "_INCLUDES", "_CFLAGS", "_LFLAGS",
+                                             "_SOURCES", "_OBJECTS", "_LIBS");
+    @{$build_structure{"APPS_${appout}_DEFINES"}} = @defines;
+    @{$build_structure{"APPS_${appout}_INCLUDES"}} = @incpaths;
+    @{$build_structure{"APPS_${appout}_CFLAGS"}} = @cflags;
+    @{$build_structure{"APPS_${appout}_LFLAGS"}} = @lflags;
+    @{$build_structure{"APPS_${appout}_SOURCES"}} = @sources;
+    @{$build_structure{"APPS_${appout}_OBJECTS"}} = @objfiles;
+    @{$build_structure{"APPS_${appout}_LIBS"}} = @libs;
+    clearCompileStep();
+}
diff --git a/third_party/git/contrib/coccinelle/.gitignore b/third_party/git/contrib/coccinelle/.gitignore
new file mode 100644
index 000000000000..d3f29646dc3a
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/.gitignore
@@ -0,0 +1 @@
+*.patch*
diff --git a/third_party/git/contrib/coccinelle/README b/third_party/git/contrib/coccinelle/README
new file mode 100644
index 000000000000..f0e80bd7f037
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/README
@@ -0,0 +1,43 @@
+This directory provides examples of Coccinelle (http://coccinelle.lip6.fr/)
+semantic patches that might be useful to developers.
+
+There are two types of semantic patches:
+
+ * Using the semantic transformation to check for bad patterns in the code;
+   The target 'make coccicheck' is designed to check for these patterns and
+   it is expected that any resulting patch indicates a regression.
+   The patches resulting from 'make coccicheck' are small and infrequent,
+   so once they are found, they can be sent to the mailing list as per usual.
+
+   Example for introducing new patterns:
+   67947c34ae (convert "hashcmp() != 0" to "!hasheq()", 2018-08-28)
+   b84c783882 (fsck: s/++i > 1/i++/, 2018-10-24)
+
+   Example of fixes using this approach:
+   248f66ed8e (run-command: use strbuf_addstr() for adding a string to
+               a strbuf, 2018-03-25)
+   f919ffebed (Use MOVE_ARRAY, 2018-01-22)
+
+   These types of semantic patches are usually part of testing, c.f.
+   0860a7641b (travis-ci: fail if Coccinelle static analysis found something
+               to transform, 2018-07-23)
+
+ * Using semantic transformations in large scale refactorings throughout
+   the code base.
+
+   When applying the semantic patch into a real patch, sending it to the
+   mailing list in the usual way, such a patch would be expected to have a
+   lot of textual and semantic conflicts as such large scale refactorings
+   change function signatures that are used widely in the code base.
+   A textual conflict would arise if surrounding code near any call of such
+   function changes. A semantic conflict arises when other patch series in
+   flight introduce calls to such functions.
+
+   So to aid these large scale refactorings, semantic patches can be used.
+   However we do not want to store them in the same place as the checks for
+   bad patterns, as then automated builds would fail.
+   That is why semantic patches 'contrib/coccinelle/*.pending.cocci'
+   are ignored for checks, and can be applied using 'make coccicheck-pending'.
+
+   This allows to expose plans of pending large scale refactorings without
+   impacting the bad pattern checks.
diff --git a/third_party/git/contrib/coccinelle/array.cocci b/third_party/git/contrib/coccinelle/array.cocci
new file mode 100644
index 000000000000..46b8d2ee1115
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/array.cocci
@@ -0,0 +1,90 @@
+@@
+expression dst, src, n, E;
+@@
+  memcpy(dst, src, n * sizeof(
+- E[...]
++ *(E)
+  ))
+
+@@
+type T;
+T *ptr;
+T[] arr;
+expression E, n;
+@@
+(
+  memcpy(ptr, E,
+- n * sizeof(*(ptr))
++ n * sizeof(T)
+  )
+|
+  memcpy(arr, E,
+- n * sizeof(*(arr))
++ n * sizeof(T)
+  )
+|
+  memcpy(E, ptr,
+- n * sizeof(*(ptr))
++ n * sizeof(T)
+  )
+|
+  memcpy(E, arr,
+- n * sizeof(*(arr))
++ n * sizeof(T)
+  )
+)
+
+@@
+type T;
+T *dst_ptr;
+T *src_ptr;
+T[] dst_arr;
+T[] src_arr;
+expression n;
+@@
+(
+- memcpy(dst_ptr, src_ptr, (n) * sizeof(T))
++ COPY_ARRAY(dst_ptr, src_ptr, n)
+|
+- memcpy(dst_ptr, src_arr, (n) * sizeof(T))
++ COPY_ARRAY(dst_ptr, src_arr, n)
+|
+- memcpy(dst_arr, src_ptr, (n) * sizeof(T))
++ COPY_ARRAY(dst_arr, src_ptr, n)
+|
+- memcpy(dst_arr, src_arr, (n) * sizeof(T))
++ COPY_ARRAY(dst_arr, src_arr, n)
+)
+
+@@
+type T;
+T *dst;
+T *src;
+expression n;
+@@
+(
+- memmove(dst, src, (n) * sizeof(*dst));
++ MOVE_ARRAY(dst, src, n);
+|
+- memmove(dst, src, (n) * sizeof(*src));
++ MOVE_ARRAY(dst, src, n);
+|
+- memmove(dst, src, (n) * sizeof(T));
++ MOVE_ARRAY(dst, src, n);
+)
+
+@@
+type T;
+T *ptr;
+expression n;
+@@
+- ptr = xmalloc((n) * sizeof(*ptr));
++ ALLOC_ARRAY(ptr, n);
+
+@@
+type T;
+T *ptr;
+expression n;
+@@
+- ptr = xmalloc((n) * sizeof(T));
++ ALLOC_ARRAY(ptr, n);
diff --git a/third_party/git/contrib/coccinelle/commit.cocci b/third_party/git/contrib/coccinelle/commit.cocci
new file mode 100644
index 000000000000..778e4704f6a2
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/commit.cocci
@@ -0,0 +1,34 @@
+@@
+expression c;
+@@
+- &c->maybe_tree->object.oid
++ get_commit_tree_oid(c)
+
+@@
+expression c;
+@@
+- c->maybe_tree->object.oid.hash
++ get_commit_tree_oid(c)->hash
+
+@@
+identifier f !~ "^set_commit_tree$";
+expression c;
+expression s;
+@@
+  f(...) {<...
+- c->maybe_tree = s
++ set_commit_tree(c, s)
+  ...>}
+
+// These excluded functions must access c->maybe_tree directly.
+// Note that if c->maybe_tree is written somewhere outside of these
+// functions, then the recommended transformation will be bogus with
+// repo_get_commit_tree() on the LHS.
+@@
+identifier f !~ "^(repo_get_commit_tree|get_commit_tree_in_graph_one|load_tree_for_commit|set_commit_tree)$";
+expression c;
+@@
+  f(...) {<...
+- c->maybe_tree
++ repo_get_commit_tree(specify_the_right_repo_here, c)
+  ...>}
diff --git a/third_party/git/contrib/coccinelle/flex_alloc.cocci b/third_party/git/contrib/coccinelle/flex_alloc.cocci
new file mode 100644
index 000000000000..e9f7f6d861a4
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/flex_alloc.cocci
@@ -0,0 +1,13 @@
+@@
+expression str;
+identifier x, flexname;
+@@
+- FLEX_ALLOC_MEM(x, flexname, str, strlen(str));
++ FLEX_ALLOC_STR(x, flexname, str);
+
+@@
+expression str;
+identifier x, ptrname;
+@@
+- FLEXPTR_ALLOC_MEM(x, ptrname, str, strlen(str));
++ FLEXPTR_ALLOC_STR(x, ptrname, str);
diff --git a/third_party/git/contrib/coccinelle/free.cocci b/third_party/git/contrib/coccinelle/free.cocci
new file mode 100644
index 000000000000..4490069df965
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/free.cocci
@@ -0,0 +1,18 @@
+@@
+expression E;
+@@
+- if (E)
+  free(E);
+
+@@
+expression E;
+@@
+- if (!E)
+  free(E);
+
+@@
+expression E;
+@@
+- free(E);
++ FREE_AND_NULL(E);
+- E = NULL;
diff --git a/third_party/git/contrib/coccinelle/hashmap.cocci b/third_party/git/contrib/coccinelle/hashmap.cocci
new file mode 100644
index 000000000000..d69e120ccffc
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/hashmap.cocci
@@ -0,0 +1,16 @@
+@ hashmap_entry_init_usage @
+expression E;
+struct hashmap_entry HME;
+@@
+- HME.hash = E;
++ hashmap_entry_init(&HME, E);
+
+@@
+identifier f !~ "^hashmap_entry_init$";
+expression E;
+struct hashmap_entry *HMEP;
+@@
+  f(...) {<...
+- HMEP->hash = E;
++ hashmap_entry_init(HMEP, E);
+  ...>}
diff --git a/third_party/git/contrib/coccinelle/object_id.cocci b/third_party/git/contrib/coccinelle/object_id.cocci
new file mode 100644
index 000000000000..ddf4f22bd722
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/object_id.cocci
@@ -0,0 +1,87 @@
+@@
+struct object_id OID;
+@@
+- is_null_sha1(OID.hash)
++ is_null_oid(&OID)
+
+@@
+struct object_id *OIDPTR;
+@@
+- is_null_sha1(OIDPTR->hash)
++ is_null_oid(OIDPTR)
+
+@@
+struct object_id OID;
+@@
+- hashclr(OID.hash)
++ oidclr(&OID)
+
+@@
+identifier f != oidclr;
+struct object_id *OIDPTR;
+@@
+  f(...) {<...
+- hashclr(OIDPTR->hash)
++ oidclr(OIDPTR)
+  ...>}
+
+@@
+struct object_id OID1, OID2;
+@@
+- hashcmp(OID1.hash, OID2.hash)
++ oidcmp(&OID1, &OID2)
+
+@@
+identifier f != oidcmp;
+struct object_id *OIDPTR1, OIDPTR2;
+@@
+  f(...) {<...
+- hashcmp(OIDPTR1->hash, OIDPTR2->hash)
++ oidcmp(OIDPTR1, OIDPTR2)
+  ...>}
+
+@@
+struct object_id *OIDPTR;
+struct object_id OID;
+@@
+- hashcmp(OIDPTR->hash, OID.hash)
++ oidcmp(OIDPTR, &OID)
+
+@@
+struct object_id *OIDPTR;
+struct object_id OID;
+@@
+- hashcmp(OID.hash, OIDPTR->hash)
++ oidcmp(&OID, OIDPTR)
+
+@@
+struct object_id *OIDPTR1;
+struct object_id *OIDPTR2;
+@@
+- oidcmp(OIDPTR1, OIDPTR2) == 0
++ oideq(OIDPTR1, OIDPTR2)
+
+@@
+identifier f != hasheq;
+expression E1, E2;
+@@
+  f(...) {<...
+- hashcmp(E1, E2) == 0
++ hasheq(E1, E2)
+  ...>}
+
+@@
+struct object_id *OIDPTR1;
+struct object_id *OIDPTR2;
+@@
+- oidcmp(OIDPTR1, OIDPTR2) != 0
++ !oideq(OIDPTR1, OIDPTR2)
+
+@@
+identifier f != hasheq;
+expression E1, E2;
+@@
+  f(...) {<...
+- hashcmp(E1, E2) != 0
++ !hasheq(E1, E2)
+  ...>}
diff --git a/third_party/git/contrib/coccinelle/preincr.cocci b/third_party/git/contrib/coccinelle/preincr.cocci
new file mode 100644
index 000000000000..7fe1e8d2d9a0
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/preincr.cocci
@@ -0,0 +1,5 @@
+@ preincrement @
+identifier i;
+@@
+- ++i > 1
++ i++
diff --git a/third_party/git/contrib/coccinelle/qsort.cocci b/third_party/git/contrib/coccinelle/qsort.cocci
new file mode 100644
index 000000000000..22b93a99664d
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/qsort.cocci
@@ -0,0 +1,37 @@
+@@
+expression base, nmemb, compar;
+@@
+- qsort(base, nmemb, sizeof(*base), compar);
++ QSORT(base, nmemb, compar);
+
+@@
+expression base, nmemb, compar;
+@@
+- qsort(base, nmemb, sizeof(base[0]), compar);
++ QSORT(base, nmemb, compar);
+
+@@
+type T;
+T *base;
+expression nmemb, compar;
+@@
+- qsort(base, nmemb, sizeof(T), compar);
++ QSORT(base, nmemb, compar);
+
+@@
+expression base, nmemb, compar;
+@@
+- if (nmemb)
+    QSORT(base, nmemb, compar);
+
+@@
+expression base, nmemb, compar;
+@@
+- if (nmemb > 0)
+    QSORT(base, nmemb, compar);
+
+@@
+expression base, nmemb, compar;
+@@
+- if (nmemb > 1)
+    QSORT(base, nmemb, compar);
diff --git a/third_party/git/contrib/coccinelle/strbuf.cocci b/third_party/git/contrib/coccinelle/strbuf.cocci
new file mode 100644
index 000000000000..d9ada69b4323
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/strbuf.cocci
@@ -0,0 +1,62 @@
+@ strbuf_addf_with_format_only @
+expression E;
+constant fmt !~ "%";
+@@
+- strbuf_addf
++ strbuf_addstr
+  (E,
+(
+  fmt
+|
+  _(fmt)
+)
+  );
+
+@@
+expression E;
+struct strbuf SB;
+format F =~ "s";
+@@
+- strbuf_addf(E, "%@F@", SB.buf);
++ strbuf_addbuf(E, &SB);
+
+@@
+expression E;
+struct strbuf *SBP;
+format F =~ "s";
+@@
+- strbuf_addf(E, "%@F@", SBP->buf);
++ strbuf_addbuf(E, SBP);
+
+@@
+expression E;
+struct strbuf SB;
+@@
+- strbuf_addstr(E, SB.buf);
++ strbuf_addbuf(E, &SB);
+
+@@
+expression E;
+struct strbuf *SBP;
+@@
+- strbuf_addstr(E, SBP->buf);
++ strbuf_addbuf(E, SBP);
+
+@@
+expression E1, E2;
+format F =~ "s";
+@@
+- strbuf_addf(E1, "%@F@", E2);
++ strbuf_addstr(E1, E2);
+
+@@
+expression E1, E2, E3;
+@@
+- strbuf_addstr(E1, find_unique_abbrev(E2, E3));
++ strbuf_add_unique_abbrev(E1, E2, E3);
+
+@@
+expression E1, E2;
+@@
+- strbuf_addstr(E1, real_path(E2));
++ strbuf_add_real_path(E1, E2);
diff --git a/third_party/git/contrib/coccinelle/swap.cocci b/third_party/git/contrib/coccinelle/swap.cocci
new file mode 100644
index 000000000000..a0934d1fdaf0
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/swap.cocci
@@ -0,0 +1,28 @@
+@ swap_with_declaration @
+type T;
+identifier tmp;
+T a, b;
+@@
+- T tmp = a;
++ T tmp;
++ tmp = a;
+  a = b;
+  b = tmp;
+
+@ swap @
+type T;
+T tmp, a, b;
+@@
+- tmp = a;
+- a = b;
+- b = tmp;
++ SWAP(a, b);
+
+@ extends swap @
+identifier unused;
+@@
+  {
+  ...
+- T unused;
+  ... when != unused
+  }
diff --git a/third_party/git/contrib/coccinelle/the_repository.pending.cocci b/third_party/git/contrib/coccinelle/the_repository.pending.cocci
new file mode 100644
index 000000000000..2ee702ecf7fe
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/the_repository.pending.cocci
@@ -0,0 +1,144 @@
+// This file is used for the ongoing refactoring of
+// bringing the index or repository struct in all of
+// our code base.
+
+@@
+expression E;
+expression F;
+expression G;
+@@
+- read_object_file(
++ repo_read_object_file(the_repository,
+  E, F, G)
+
+@@
+expression E;
+@@
+- has_sha1_file(
++ repo_has_sha1_file(the_repository,
+  E)
+
+@@
+expression E;
+expression F;
+@@
+- has_sha1_file_with_flags(
++ repo_has_sha1_file_with_flags(the_repository,
+  E)
+
+@@
+expression E;
+@@
+- has_object_file(
++ repo_has_object_file(the_repository,
+  E)
+
+@@
+expression E;
+expression F;
+@@
+- has_object_file_with_flags(
++ repo_has_object_file_with_flags(the_repository,
+  E)
+
+@@
+expression E;
+expression F;
+expression G;
+@@
+- parse_commit_internal(
++ repo_parse_commit_internal(the_repository,
+  E, F, G)
+
+@@
+expression E;
+expression F;
+@@
+- parse_commit_gently(
++ repo_parse_commit_gently(the_repository,
+  E, F)
+
+@@
+expression E;
+@@
+- parse_commit(
++ repo_parse_commit(the_repository,
+  E)
+
+@@
+expression E;
+expression F;
+@@
+- get_merge_bases(
++ repo_get_merge_bases(the_repository,
+  E, F);
+
+@@
+expression E;
+expression F;
+expression G;
+@@
+- get_merge_bases_many(
++ repo_get_merge_bases_many(the_repository,
+  E, F, G);
+
+@@
+expression E;
+expression F;
+expression G;
+@@
+- get_merge_bases_many_dirty(
++ repo_get_merge_bases_many_dirty(the_repository,
+  E, F, G);
+
+@@
+expression E;
+expression F;
+@@
+- in_merge_bases(
++ repo_in_merge_bases(the_repository,
+  E, F);
+
+@@
+expression E;
+expression F;
+expression G;
+@@
+- in_merge_bases_many(
++ repo_in_merge_bases_many(the_repository,
+  E, F, G);
+
+@@
+expression E;
+expression F;
+@@
+- get_commit_buffer(
++ repo_get_commit_buffer(the_repository,
+  E, F);
+
+@@
+expression E;
+expression F;
+@@
+- unuse_commit_buffer(
++ repo_unuse_commit_buffer(the_repository,
+  E, F);
+
+@@
+expression E;
+expression F;
+expression G;
+@@
+- logmsg_reencode(
++ repo_logmsg_reencode(the_repository,
+  E, F, G);
+
+@@
+expression E;
+expression F;
+expression G;
+expression H;
+@@
+- format_commit_message(
++ repo_format_commit_message(the_repository,
+  E, F, G, H);
diff --git a/third_party/git/contrib/coccinelle/xstrdup_or_null.cocci b/third_party/git/contrib/coccinelle/xstrdup_or_null.cocci
new file mode 100644
index 000000000000..8e05d1ca4b61
--- /dev/null
+++ b/third_party/git/contrib/coccinelle/xstrdup_or_null.cocci
@@ -0,0 +1,13 @@
+@@
+expression E;
+expression V;
+@@
+- if (E)
+-    V = xstrdup(E);
++ V = xstrdup_or_null(E);
+
+@@
+expression E;
+@@
+- xstrdup(absolute_path(E))
++ absolute_pathdup(E)
diff --git a/third_party/git/contrib/completion/.gitattributes b/third_party/git/contrib/completion/.gitattributes
new file mode 100644
index 000000000000..19116944c15c
--- /dev/null
+++ b/third_party/git/contrib/completion/.gitattributes
@@ -0,0 +1 @@
+*.bash eol=lf
diff --git a/third_party/git/contrib/completion/git-completion.bash b/third_party/git/contrib/completion/git-completion.bash
new file mode 100644
index 000000000000..c21786f2fd00
--- /dev/null
+++ b/third_party/git/contrib/completion/git-completion.bash
@@ -0,0 +1,3381 @@
+# bash/zsh completion support for core Git.
+#
+# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
+# Conceptually based on gitcompletion (http://gitweb.hawaga.org.uk/).
+# Distributed under the GNU General Public License, version 2.0.
+#
+# The contained completion routines provide support for completing:
+#
+#    *) local and remote branch names
+#    *) local and remote tag names
+#    *) .git/remotes file names
+#    *) git 'subcommands'
+#    *) git email aliases for git-send-email
+#    *) tree paths within 'ref:path/to/file' expressions
+#    *) file paths within current working directory and index
+#    *) common --long-options
+#
+# To use these routines:
+#
+#    1) Copy this file to somewhere (e.g. ~/.git-completion.bash).
+#    2) Add the following line to your .bashrc/.zshrc:
+#        source ~/.git-completion.bash
+#    3) Consider changing your PS1 to also show the current branch,
+#       see git-prompt.sh for details.
+#
+# If you use complex aliases of form '!f() { ... }; f', you can use the null
+# command ':' as the first command in the function body to declare the desired
+# completion style.  For example '!f() { : git commit ; ... }; f' will
+# tell the completion to use commit completion.  This also works with aliases
+# of form "!sh -c '...'".  For example, "!sh -c ': git commit ; ... '".
+#
+# Compatible with bash 3.2.57.
+#
+# You can set the following environment variables to influence the behavior of
+# the completion routines:
+#
+#   GIT_COMPLETION_CHECKOUT_NO_GUESS
+#
+#     When set to "1", do not include "DWIM" suggestions in git-checkout
+#     and git-switch completion (e.g., completing "foo" when "origin/foo"
+#     exists).
+
+case "$COMP_WORDBREAKS" in
+*:*) : great ;;
+*)   COMP_WORDBREAKS="$COMP_WORDBREAKS:"
+esac
+
+# Discovers the path to the git repository taking any '--git-dir=<path>' and
+# '-C <path>' options into account and stores it in the $__git_repo_path
+# variable.
+__git_find_repo_path ()
+{
+	if [ -n "$__git_repo_path" ]; then
+		# we already know where it is
+		return
+	fi
+
+	if [ -n "${__git_C_args-}" ]; then
+		__git_repo_path="$(git "${__git_C_args[@]}" \
+			${__git_dir:+--git-dir="$__git_dir"} \
+			rev-parse --absolute-git-dir 2>/dev/null)"
+	elif [ -n "${__git_dir-}" ]; then
+		test -d "$__git_dir" &&
+		__git_repo_path="$__git_dir"
+	elif [ -n "${GIT_DIR-}" ]; then
+		test -d "${GIT_DIR-}" &&
+		__git_repo_path="$GIT_DIR"
+	elif [ -d .git ]; then
+		__git_repo_path=.git
+	else
+		__git_repo_path="$(git rev-parse --git-dir 2>/dev/null)"
+	fi
+}
+
+# Deprecated: use __git_find_repo_path() and $__git_repo_path instead
+# __gitdir accepts 0 or 1 arguments (i.e., location)
+# returns location of .git repo
+__gitdir ()
+{
+	if [ -z "${1-}" ]; then
+		__git_find_repo_path || return 1
+		echo "$__git_repo_path"
+	elif [ -d "$1/.git" ]; then
+		echo "$1/.git"
+	else
+		echo "$1"
+	fi
+}
+
+# Runs git with all the options given as argument, respecting any
+# '--git-dir=<path>' and '-C <path>' options present on the command line
+__git ()
+{
+	git ${__git_C_args:+"${__git_C_args[@]}"} \
+		${__git_dir:+--git-dir="$__git_dir"} "$@" 2>/dev/null
+}
+
+# Removes backslash escaping, single quotes and double quotes from a word,
+# stores the result in the variable $dequoted_word.
+# 1: The word to dequote.
+__git_dequote ()
+{
+	local rest="$1" len ch
+
+	dequoted_word=""
+
+	while test -n "$rest"; do
+		len=${#dequoted_word}
+		dequoted_word="$dequoted_word${rest%%[\\\'\"]*}"
+		rest="${rest:$((${#dequoted_word}-$len))}"
+
+		case "${rest:0:1}" in
+		\\)
+			ch="${rest:1:1}"
+			case "$ch" in
+			$'\n')
+				;;
+			*)
+				dequoted_word="$dequoted_word$ch"
+				;;
+			esac
+			rest="${rest:2}"
+			;;
+		\')
+			rest="${rest:1}"
+			len=${#dequoted_word}
+			dequoted_word="$dequoted_word${rest%%\'*}"
+			rest="${rest:$((${#dequoted_word}-$len+1))}"
+			;;
+		\")
+			rest="${rest:1}"
+			while test -n "$rest" ; do
+				len=${#dequoted_word}
+				dequoted_word="$dequoted_word${rest%%[\\\"]*}"
+				rest="${rest:$((${#dequoted_word}-$len))}"
+				case "${rest:0:1}" in
+				\\)
+					ch="${rest:1:1}"
+					case "$ch" in
+					\"|\\|\$|\`)
+						dequoted_word="$dequoted_word$ch"
+						;;
+					$'\n')
+						;;
+					*)
+						dequoted_word="$dequoted_word\\$ch"
+						;;
+					esac
+					rest="${rest:2}"
+					;;
+				\")
+					rest="${rest:1}"
+					break
+					;;
+				esac
+			done
+			;;
+		esac
+	done
+}
+
+# The following function is based on code from:
+#
+#   bash_completion - programmable completion functions for bash 3.2+
+#
+#   Copyright © 2006-2008, Ian Macdonald <ian@caliban.org>
+#             © 2009-2010, Bash Completion Maintainers
+#                     <bash-completion-devel@lists.alioth.debian.org>
+#
+#   This program is free software; you can redistribute it and/or modify
+#   it under the terms of the GNU General Public License as published by
+#   the Free Software Foundation; either version 2, or (at your option)
+#   any later version.
+#
+#   This program is distributed in the hope that it will be useful,
+#   but WITHOUT ANY WARRANTY; without even the implied warranty of
+#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#   GNU General Public License for more details.
+#
+#   You should have received a copy of the GNU General Public License
+#   along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+#   The latest version of this software can be obtained here:
+#
+#   http://bash-completion.alioth.debian.org/
+#
+#   RELEASE: 2.x
+
+# This function can be used to access a tokenized list of words
+# on the command line:
+#
+#	__git_reassemble_comp_words_by_ref '=:'
+#	if test "${words_[cword_-1]}" = -w
+#	then
+#		...
+#	fi
+#
+# The argument should be a collection of characters from the list of
+# word completion separators (COMP_WORDBREAKS) to treat as ordinary
+# characters.
+#
+# This is roughly equivalent to going back in time and setting
+# COMP_WORDBREAKS to exclude those characters.  The intent is to
+# make option types like --date=<type> and <rev>:<path> easy to
+# recognize by treating each shell word as a single token.
+#
+# It is best not to set COMP_WORDBREAKS directly because the value is
+# shared with other completion scripts.  By the time the completion
+# function gets called, COMP_WORDS has already been populated so local
+# changes to COMP_WORDBREAKS have no effect.
+#
+# Output: words_, cword_, cur_.
+
+__git_reassemble_comp_words_by_ref()
+{
+	local exclude i j first
+	# Which word separators to exclude?
+	exclude="${1//[^$COMP_WORDBREAKS]}"
+	cword_=$COMP_CWORD
+	if [ -z "$exclude" ]; then
+		words_=("${COMP_WORDS[@]}")
+		return
+	fi
+	# List of word completion separators has shrunk;
+	# re-assemble words to complete.
+	for ((i=0, j=0; i < ${#COMP_WORDS[@]}; i++, j++)); do
+		# Append each nonempty word consisting of just
+		# word separator characters to the current word.
+		first=t
+		while
+			[ $i -gt 0 ] &&
+			[ -n "${COMP_WORDS[$i]}" ] &&
+			# word consists of excluded word separators
+			[ "${COMP_WORDS[$i]//[^$exclude]}" = "${COMP_WORDS[$i]}" ]
+		do
+			# Attach to the previous token,
+			# unless the previous token is the command name.
+			if [ $j -ge 2 ] && [ -n "$first" ]; then
+				((j--))
+			fi
+			first=
+			words_[$j]=${words_[j]}${COMP_WORDS[i]}
+			if [ $i = $COMP_CWORD ]; then
+				cword_=$j
+			fi
+			if (($i < ${#COMP_WORDS[@]} - 1)); then
+				((i++))
+			else
+				# Done.
+				return
+			fi
+		done
+		words_[$j]=${words_[j]}${COMP_WORDS[i]}
+		if [ $i = $COMP_CWORD ]; then
+			cword_=$j
+		fi
+	done
+}
+
+if ! type _get_comp_words_by_ref >/dev/null 2>&1; then
+_get_comp_words_by_ref ()
+{
+	local exclude cur_ words_ cword_
+	if [ "$1" = "-n" ]; then
+		exclude=$2
+		shift 2
+	fi
+	__git_reassemble_comp_words_by_ref "$exclude"
+	cur_=${words_[cword_]}
+	while [ $# -gt 0 ]; do
+		case "$1" in
+		cur)
+			cur=$cur_
+			;;
+		prev)
+			prev=${words_[$cword_-1]}
+			;;
+		words)
+			words=("${words_[@]}")
+			;;
+		cword)
+			cword=$cword_
+			;;
+		esac
+		shift
+	done
+}
+fi
+
+# Fills the COMPREPLY array with prefiltered words without any additional
+# processing.
+# Callers must take care of providing only words that match the current word
+# to be completed and adding any prefix and/or suffix (trailing space!), if
+# necessary.
+# 1: List of newline-separated matching completion words, complete with
+#    prefix and suffix.
+__gitcomp_direct ()
+{
+	local IFS=$'\n'
+
+	COMPREPLY=($1)
+}
+
+__gitcompappend ()
+{
+	local x i=${#COMPREPLY[@]}
+	for x in $1; do
+		if [[ "$x" == "$3"* ]]; then
+			COMPREPLY[i++]="$2$x$4"
+		fi
+	done
+}
+
+__gitcompadd ()
+{
+	COMPREPLY=()
+	__gitcompappend "$@"
+}
+
+# Generates completion reply, appending a space to possible completion words,
+# if necessary.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word (optional).
+__gitcomp ()
+{
+	local cur_="${3-$cur}"
+
+	case "$cur_" in
+	--*=)
+		;;
+	--no-*)
+		local c i=0 IFS=$' \t\n'
+		for c in $1; do
+			if [[ $c == "--" ]]; then
+				continue
+			fi
+			c="$c${4-}"
+			if [[ $c == "$cur_"* ]]; then
+				case $c in
+				--*=|*.) ;;
+				*) c="$c " ;;
+				esac
+				COMPREPLY[i++]="${2-}$c"
+			fi
+		done
+		;;
+	*)
+		local c i=0 IFS=$' \t\n'
+		for c in $1; do
+			if [[ $c == "--" ]]; then
+				c="--no-...${4-}"
+				if [[ $c == "$cur_"* ]]; then
+					COMPREPLY[i++]="${2-}$c "
+				fi
+				break
+			fi
+			c="$c${4-}"
+			if [[ $c == "$cur_"* ]]; then
+				case $c in
+				*=|*.) ;;
+				*) c="$c " ;;
+				esac
+				COMPREPLY[i++]="${2-}$c"
+			fi
+		done
+		;;
+	esac
+}
+
+# Clear the variables caching builtins' options when (re-)sourcing
+# the completion script.
+if [[ -n ${ZSH_VERSION-} ]]; then
+	unset $(set |sed -ne 's/^\(__gitcomp_builtin_[a-zA-Z0-9_][a-zA-Z0-9_]*\)=.*/\1/p') 2>/dev/null
+else
+	unset $(compgen -v __gitcomp_builtin_)
+fi
+
+# This function is equivalent to
+#
+#    __gitcomp "$(git xxx --git-completion-helper) ..."
+#
+# except that the output is cached. Accept 1-3 arguments:
+# 1: the git command to execute, this is also the cache key
+# 2: extra options to be added on top (e.g. negative forms)
+# 3: options to be excluded
+__gitcomp_builtin ()
+{
+	# spaces must be replaced with underscore for multi-word
+	# commands, e.g. "git remote add" becomes remote_add.
+	local cmd="$1"
+	local incl="$2"
+	local excl="$3"
+
+	local var=__gitcomp_builtin_"${cmd/-/_}"
+	local options
+	eval "options=\$$var"
+
+	if [ -z "$options" ]; then
+		# leading and trailing spaces are significant to make
+		# option removal work correctly.
+		options=" $incl $(__git ${cmd/_/ } --git-completion-helper) " || return
+
+		for i in $excl; do
+			options="${options/ $i / }"
+		done
+		eval "$var=\"$options\""
+	fi
+
+	__gitcomp "$options"
+}
+
+# Variation of __gitcomp_nl () that appends to the existing list of
+# completion candidates, COMPREPLY.
+__gitcomp_nl_append ()
+{
+	local IFS=$'\n'
+	__gitcompappend "$1" "${2-}" "${3-$cur}" "${4- }"
+}
+
+# Generates completion reply from newline-separated possible completion words
+# by appending a space to all of them.
+# It accepts 1 to 4 arguments:
+# 1: List of possible completion words, separated by a single newline.
+# 2: A prefix to be added to each possible completion word (optional).
+# 3: Generate possible completion matches for this word (optional).
+# 4: A suffix to be appended to each possible completion word instead of
+#    the default space (optional).  If specified but empty, nothing is
+#    appended.
+__gitcomp_nl ()
+{
+	COMPREPLY=()
+	__gitcomp_nl_append "$@"
+}
+
+# Fills the COMPREPLY array with prefiltered paths without any additional
+# processing.
+# Callers must take care of providing only paths that match the current path
+# to be completed and adding any prefix path components, if necessary.
+# 1: List of newline-separated matching paths, complete with all prefix
+#    path components.
+__gitcomp_file_direct ()
+{
+	local IFS=$'\n'
+
+	COMPREPLY=($1)
+
+	# use a hack to enable file mode in bash < 4
+	compopt -o filenames +o nospace 2>/dev/null ||
+	compgen -f /non-existing-dir/ >/dev/null ||
+	true
+}
+
+# Generates completion reply with compgen from newline-separated possible
+# completion filenames.
+# It accepts 1 to 3 arguments:
+# 1: List of possible completion filenames, separated by a single newline.
+# 2: A directory prefix to be added to each possible completion filename
+#    (optional).
+# 3: Generate possible completion matches for this word (optional).
+__gitcomp_file ()
+{
+	local IFS=$'\n'
+
+	# XXX does not work when the directory prefix contains a tilde,
+	# since tilde expansion is not applied.
+	# This means that COMPREPLY will be empty and Bash default
+	# completion will be used.
+	__gitcompadd "$1" "${2-}" "${3-$cur}" ""
+
+	# use a hack to enable file mode in bash < 4
+	compopt -o filenames +o nospace 2>/dev/null ||
+	compgen -f /non-existing-dir/ >/dev/null ||
+	true
+}
+
+# Execute 'git ls-files', unless the --committable option is specified, in
+# which case it runs 'git diff-index' to find out the files that can be
+# committed.  It return paths relative to the directory specified in the first
+# argument, and using the options specified in the second argument.
+__git_ls_files_helper ()
+{
+	if [ "$2" == "--committable" ]; then
+		__git -C "$1" -c core.quotePath=false diff-index \
+			--name-only --relative HEAD -- "${3//\\/\\\\}*"
+	else
+		# NOTE: $2 is not quoted in order to support multiple options
+		__git -C "$1" -c core.quotePath=false ls-files \
+			--exclude-standard $2 -- "${3//\\/\\\\}*"
+	fi
+}
+
+
+# __git_index_files accepts 1 or 2 arguments:
+# 1: Options to pass to ls-files (required).
+# 2: A directory path (optional).
+#    If provided, only files within the specified directory are listed.
+#    Sub directories are never recursed.  Path must have a trailing
+#    slash.
+# 3: List only paths matching this path component (optional).
+__git_index_files ()
+{
+	local root="$2" match="$3"
+
+	__git_ls_files_helper "$root" "$1" "$match" |
+	awk -F / -v pfx="${2//\\/\\\\}" '{
+		paths[$1] = 1
+	}
+	END {
+		for (p in paths) {
+			if (substr(p, 1, 1) != "\"") {
+				# No special characters, easy!
+				print pfx p
+				continue
+			}
+
+			# The path is quoted.
+			p = dequote(p)
+			if (p == "")
+				continue
+
+			# Even when a directory name itself does not contain
+			# any special characters, it will still be quoted if
+			# any of its (stripped) trailing path components do.
+			# Because of this we may have seen the same directory
+			# both quoted and unquoted.
+			if (p in paths)
+				# We have seen the same directory unquoted,
+				# skip it.
+				continue
+			else
+				print pfx p
+		}
+	}
+	function dequote(p,    bs_idx, out, esc, esc_idx, dec) {
+		# Skip opening double quote.
+		p = substr(p, 2)
+
+		# Interpret backslash escape sequences.
+		while ((bs_idx = index(p, "\\")) != 0) {
+			out = out substr(p, 1, bs_idx - 1)
+			esc = substr(p, bs_idx + 1, 1)
+			p = substr(p, bs_idx + 2)
+
+			if ((esc_idx = index("abtvfr\"\\", esc)) != 0) {
+				# C-style one-character escape sequence.
+				out = out substr("\a\b\t\v\f\r\"\\",
+						 esc_idx, 1)
+			} else if (esc == "n") {
+				# Uh-oh, a newline character.
+				# We cannot reliably put a pathname
+				# containing a newline into COMPREPLY,
+				# and the newline would create a mess.
+				# Skip this path.
+				return ""
+			} else {
+				# Must be a \nnn octal value, then.
+				dec = esc             * 64 + \
+				      substr(p, 1, 1) * 8  + \
+				      substr(p, 2, 1)
+				out = out sprintf("%c", dec)
+				p = substr(p, 3)
+			}
+		}
+		# Drop closing double quote, if there is one.
+		# (There is not any if this is a directory, as it was
+		# already stripped with the trailing path components.)
+		if (substr(p, length(p), 1) == "\"")
+			out = out substr(p, 1, length(p) - 1)
+		else
+			out = out p
+
+		return out
+	}'
+}
+
+# __git_complete_index_file requires 1 argument:
+# 1: the options to pass to ls-file
+#
+# The exception is --committable, which finds the files appropriate commit.
+__git_complete_index_file ()
+{
+	local dequoted_word pfx="" cur_
+
+	__git_dequote "$cur"
+
+	case "$dequoted_word" in
+	?*/*)
+		pfx="${dequoted_word%/*}/"
+		cur_="${dequoted_word##*/}"
+		;;
+	*)
+		cur_="$dequoted_word"
+	esac
+
+	__gitcomp_file_direct "$(__git_index_files "$1" "$pfx" "$cur_")"
+}
+
+# Lists branches from the local repository.
+# 1: A prefix to be added to each listed branch (optional).
+# 2: List only branches matching this word (optional; list all branches if
+#    unset or empty).
+# 3: A suffix to be appended to each listed branch (optional).
+__git_heads ()
+{
+	local pfx="${1-}" cur_="${2-}" sfx="${3-}"
+
+	__git for-each-ref --format="${pfx//\%/%%}%(refname:strip=2)$sfx" \
+			"refs/heads/$cur_*" "refs/heads/$cur_*/**"
+}
+
+# Lists tags from the local repository.
+# Accepts the same positional parameters as __git_heads() above.
+__git_tags ()
+{
+	local pfx="${1-}" cur_="${2-}" sfx="${3-}"
+
+	__git for-each-ref --format="${pfx//\%/%%}%(refname:strip=2)$sfx" \
+			"refs/tags/$cur_*" "refs/tags/$cur_*/**"
+}
+
+# Lists refs from the local (by default) or from a remote repository.
+# It accepts 0, 1 or 2 arguments:
+# 1: The remote to list refs from (optional; ignored, if set but empty).
+#    Can be the name of a configured remote, a path, or a URL.
+# 2: In addition to local refs, list unique branches from refs/remotes/ for
+#    'git checkout's tracking DWIMery (optional; ignored, if set but empty).
+# 3: A prefix to be added to each listed ref (optional).
+# 4: List only refs matching this word (optional; list all refs if unset or
+#    empty).
+# 5: A suffix to be appended to each listed ref (optional; ignored, if set
+#    but empty).
+#
+# Use __git_complete_refs() instead.
+__git_refs ()
+{
+	local i hash dir track="${2-}"
+	local list_refs_from=path remote="${1-}"
+	local format refs
+	local pfx="${3-}" cur_="${4-$cur}" sfx="${5-}"
+	local match="${4-}"
+	local fer_pfx="${pfx//\%/%%}" # "escape" for-each-ref format specifiers
+
+	__git_find_repo_path
+	dir="$__git_repo_path"
+
+	if [ -z "$remote" ]; then
+		if [ -z "$dir" ]; then
+			return
+		fi
+	else
+		if __git_is_configured_remote "$remote"; then
+			# configured remote takes precedence over a
+			# local directory with the same name
+			list_refs_from=remote
+		elif [ -d "$remote/.git" ]; then
+			dir="$remote/.git"
+		elif [ -d "$remote" ]; then
+			dir="$remote"
+		else
+			list_refs_from=url
+		fi
+	fi
+
+	if [ "$list_refs_from" = path ]; then
+		if [[ "$cur_" == ^* ]]; then
+			pfx="$pfx^"
+			fer_pfx="$fer_pfx^"
+			cur_=${cur_#^}
+			match=${match#^}
+		fi
+		case "$cur_" in
+		refs|refs/*)
+			format="refname"
+			refs=("$match*" "$match*/**")
+			track=""
+			;;
+		*)
+			for i in HEAD FETCH_HEAD ORIG_HEAD MERGE_HEAD REBASE_HEAD; do
+				case "$i" in
+				$match*)
+					if [ -e "$dir/$i" ]; then
+						echo "$pfx$i$sfx"
+					fi
+					;;
+				esac
+			done
+			format="refname:strip=2"
+			refs=("refs/tags/$match*" "refs/tags/$match*/**"
+				"refs/heads/$match*" "refs/heads/$match*/**"
+				"refs/remotes/$match*" "refs/remotes/$match*/**")
+			;;
+		esac
+		__git_dir="$dir" __git for-each-ref --format="$fer_pfx%($format)$sfx" \
+			"${refs[@]}"
+		if [ -n "$track" ]; then
+			# employ the heuristic used by git checkout
+			# Try to find a remote branch that matches the completion word
+			# but only output if the branch name is unique
+			__git for-each-ref --format="$fer_pfx%(refname:strip=3)$sfx" \
+				--sort="refname:strip=3" \
+				"refs/remotes/*/$match*" "refs/remotes/*/$match*/**" | \
+			uniq -u
+		fi
+		return
+	fi
+	case "$cur_" in
+	refs|refs/*)
+		__git ls-remote "$remote" "$match*" | \
+		while read -r hash i; do
+			case "$i" in
+			*^{}) ;;
+			*) echo "$pfx$i$sfx" ;;
+			esac
+		done
+		;;
+	*)
+		if [ "$list_refs_from" = remote ]; then
+			case "HEAD" in
+			$match*)	echo "${pfx}HEAD$sfx" ;;
+			esac
+			__git for-each-ref --format="$fer_pfx%(refname:strip=3)$sfx" \
+				"refs/remotes/$remote/$match*" \
+				"refs/remotes/$remote/$match*/**"
+		else
+			local query_symref
+			case "HEAD" in
+			$match*)	query_symref="HEAD" ;;
+			esac
+			__git ls-remote "$remote" $query_symref \
+				"refs/tags/$match*" "refs/heads/$match*" \
+				"refs/remotes/$match*" |
+			while read -r hash i; do
+				case "$i" in
+				*^{})	;;
+				refs/*)	echo "$pfx${i#refs/*/}$sfx" ;;
+				*)	echo "$pfx$i$sfx" ;;  # symbolic refs
+				esac
+			done
+		fi
+		;;
+	esac
+}
+
+# Completes refs, short and long, local and remote, symbolic and pseudo.
+#
+# Usage: __git_complete_refs [<option>]...
+# --remote=<remote>: The remote to list refs from, can be the name of a
+#                    configured remote, a path, or a URL.
+# --track: List unique remote branches for 'git checkout's tracking DWIMery.
+# --pfx=<prefix>: A prefix to be added to each ref.
+# --cur=<word>: The current ref to be completed.  Defaults to the current
+#               word to be completed.
+# --sfx=<suffix>: A suffix to be appended to each ref instead of the default
+#                 space.
+__git_complete_refs ()
+{
+	local remote track pfx cur_="$cur" sfx=" "
+
+	while test $# != 0; do
+		case "$1" in
+		--remote=*)	remote="${1##--remote=}" ;;
+		--track)	track="yes" ;;
+		--pfx=*)	pfx="${1##--pfx=}" ;;
+		--cur=*)	cur_="${1##--cur=}" ;;
+		--sfx=*)	sfx="${1##--sfx=}" ;;
+		*)		return 1 ;;
+		esac
+		shift
+	done
+
+	__gitcomp_direct "$(__git_refs "$remote" "$track" "$pfx" "$cur_" "$sfx")"
+}
+
+# __git_refs2 requires 1 argument (to pass to __git_refs)
+# Deprecated: use __git_complete_fetch_refspecs() instead.
+__git_refs2 ()
+{
+	local i
+	for i in $(__git_refs "$1"); do
+		echo "$i:$i"
+	done
+}
+
+# Completes refspecs for fetching from a remote repository.
+# 1: The remote repository.
+# 2: A prefix to be added to each listed refspec (optional).
+# 3: The ref to be completed as a refspec instead of the current word to be
+#    completed (optional)
+# 4: A suffix to be appended to each listed refspec instead of the default
+#    space (optional).
+__git_complete_fetch_refspecs ()
+{
+	local i remote="$1" pfx="${2-}" cur_="${3-$cur}" sfx="${4- }"
+
+	__gitcomp_direct "$(
+		for i in $(__git_refs "$remote" "" "" "$cur_") ; do
+			echo "$pfx$i:$i$sfx"
+		done
+		)"
+}
+
+# __git_refs_remotes requires 1 argument (to pass to ls-remote)
+__git_refs_remotes ()
+{
+	local i hash
+	__git ls-remote "$1" 'refs/heads/*' | \
+	while read -r hash i; do
+		echo "$i:refs/remotes/$1/${i#refs/heads/}"
+	done
+}
+
+__git_remotes ()
+{
+	__git_find_repo_path
+	test -d "$__git_repo_path/remotes" && ls -1 "$__git_repo_path/remotes"
+	__git remote
+}
+
+# Returns true if $1 matches the name of a configured remote, false otherwise.
+__git_is_configured_remote ()
+{
+	local remote
+	for remote in $(__git_remotes); do
+		if [ "$remote" = "$1" ]; then
+			return 0
+		fi
+	done
+	return 1
+}
+
+__git_list_merge_strategies ()
+{
+	LANG=C LC_ALL=C git merge -s help 2>&1 |
+	sed -n -e '/[Aa]vailable strategies are: /,/^$/{
+		s/\.$//
+		s/.*://
+		s/^[ 	]*//
+		s/[ 	]*$//
+		p
+	}'
+}
+
+__git_merge_strategies=
+# 'git merge -s help' (and thus detection of the merge strategy
+# list) fails, unfortunately, if run outside of any git working
+# tree.  __git_merge_strategies is set to the empty string in
+# that case, and the detection will be repeated the next time it
+# is needed.
+__git_compute_merge_strategies ()
+{
+	test -n "$__git_merge_strategies" ||
+	__git_merge_strategies=$(__git_list_merge_strategies)
+}
+
+__git_merge_strategy_options="ours theirs subtree subtree= patience
+	histogram diff-algorithm= ignore-space-change ignore-all-space
+	ignore-space-at-eol renormalize no-renormalize no-renames
+	find-renames find-renames= rename-threshold="
+
+__git_complete_revlist_file ()
+{
+	local dequoted_word pfx ls ref cur_="$cur"
+	case "$cur_" in
+	*..?*:*)
+		return
+		;;
+	?*:*)
+		ref="${cur_%%:*}"
+		cur_="${cur_#*:}"
+
+		__git_dequote "$cur_"
+
+		case "$dequoted_word" in
+		?*/*)
+			pfx="${dequoted_word%/*}"
+			cur_="${dequoted_word##*/}"
+			ls="$ref:$pfx"
+			pfx="$pfx/"
+			;;
+		*)
+			cur_="$dequoted_word"
+			ls="$ref"
+			;;
+		esac
+
+		case "$COMP_WORDBREAKS" in
+		*:*) : great ;;
+		*)   pfx="$ref:$pfx" ;;
+		esac
+
+		__gitcomp_file "$(__git ls-tree "$ls" \
+				| sed 's/^.*	//
+				       s/$//')" \
+			"$pfx" "$cur_"
+		;;
+	*...*)
+		pfx="${cur_%...*}..."
+		cur_="${cur_#*...}"
+		__git_complete_refs --pfx="$pfx" --cur="$cur_"
+		;;
+	*..*)
+		pfx="${cur_%..*}.."
+		cur_="${cur_#*..}"
+		__git_complete_refs --pfx="$pfx" --cur="$cur_"
+		;;
+	*)
+		__git_complete_refs
+		;;
+	esac
+}
+
+__git_complete_file ()
+{
+	__git_complete_revlist_file
+}
+
+__git_complete_revlist ()
+{
+	__git_complete_revlist_file
+}
+
+__git_complete_remote_or_refspec ()
+{
+	local cur_="$cur" cmd="${words[1]}"
+	local i c=2 remote="" pfx="" lhs=1 no_complete_refspec=0
+	if [ "$cmd" = "remote" ]; then
+		((c++))
+	fi
+	while [ $c -lt $cword ]; do
+		i="${words[c]}"
+		case "$i" in
+		--mirror) [ "$cmd" = "push" ] && no_complete_refspec=1 ;;
+		-d|--delete) [ "$cmd" = "push" ] && lhs=0 ;;
+		--all)
+			case "$cmd" in
+			push) no_complete_refspec=1 ;;
+			fetch)
+				return
+				;;
+			*) ;;
+			esac
+			;;
+		--multiple) no_complete_refspec=1; break ;;
+		-*) ;;
+		*) remote="$i"; break ;;
+		esac
+		((c++))
+	done
+	if [ -z "$remote" ]; then
+		__gitcomp_nl "$(__git_remotes)"
+		return
+	fi
+	if [ $no_complete_refspec = 1 ]; then
+		return
+	fi
+	[ "$remote" = "." ] && remote=
+	case "$cur_" in
+	*:*)
+		case "$COMP_WORDBREAKS" in
+		*:*) : great ;;
+		*)   pfx="${cur_%%:*}:" ;;
+		esac
+		cur_="${cur_#*:}"
+		lhs=0
+		;;
+	+*)
+		pfx="+"
+		cur_="${cur_#+}"
+		;;
+	esac
+	case "$cmd" in
+	fetch)
+		if [ $lhs = 1 ]; then
+			__git_complete_fetch_refspecs "$remote" "$pfx" "$cur_"
+		else
+			__git_complete_refs --pfx="$pfx" --cur="$cur_"
+		fi
+		;;
+	pull|remote)
+		if [ $lhs = 1 ]; then
+			__git_complete_refs --remote="$remote" --pfx="$pfx" --cur="$cur_"
+		else
+			__git_complete_refs --pfx="$pfx" --cur="$cur_"
+		fi
+		;;
+	push)
+		if [ $lhs = 1 ]; then
+			__git_complete_refs --pfx="$pfx" --cur="$cur_"
+		else
+			__git_complete_refs --remote="$remote" --pfx="$pfx" --cur="$cur_"
+		fi
+		;;
+	esac
+}
+
+__git_complete_strategy ()
+{
+	__git_compute_merge_strategies
+	case "$prev" in
+	-s|--strategy)
+		__gitcomp "$__git_merge_strategies"
+		return 0
+		;;
+	-X)
+		__gitcomp "$__git_merge_strategy_options"
+		return 0
+		;;
+	esac
+	case "$cur" in
+	--strategy=*)
+		__gitcomp "$__git_merge_strategies" "" "${cur##--strategy=}"
+		return 0
+		;;
+	--strategy-option=*)
+		__gitcomp "$__git_merge_strategy_options" "" "${cur##--strategy-option=}"
+		return 0
+		;;
+	esac
+	return 1
+}
+
+__git_all_commands=
+__git_compute_all_commands ()
+{
+	test -n "$__git_all_commands" ||
+	__git_all_commands=$(__git --list-cmds=main,others,alias,nohelpers)
+}
+
+# Lists all set config variables starting with the given section prefix,
+# with the prefix removed.
+__git_get_config_variables ()
+{
+	local section="$1" i IFS=$'\n'
+	for i in $(__git config --name-only --get-regexp "^$section\..*"); do
+		echo "${i#$section.}"
+	done
+}
+
+__git_pretty_aliases ()
+{
+	__git_get_config_variables "pretty"
+}
+
+# __git_aliased_command requires 1 argument
+__git_aliased_command ()
+{
+	local word cmdline=$(__git config --get "alias.$1")
+	for word in $cmdline; do
+		case "$word" in
+		\!gitk|gitk)
+			echo "gitk"
+			return
+			;;
+		\!*)	: shell command alias ;;
+		-*)	: option ;;
+		*=*)	: setting env ;;
+		git)	: git itself ;;
+		\(\))   : skip parens of shell function definition ;;
+		{)	: skip start of shell helper function ;;
+		:)	: skip null command ;;
+		\'*)	: skip opening quote after sh -c ;;
+		*)
+			echo "$word"
+			return
+		esac
+	done
+}
+
+# Check whether one of the given words is present on the command line,
+# and print the first word found.
+#
+# Usage: __git_find_on_cmdline [<option>]... "<wordlist>"
+# --show-idx: Optionally show the index of the found word in the $words array.
+__git_find_on_cmdline ()
+{
+	local word c=1 show_idx
+
+	while test $# -gt 1; do
+		case "$1" in
+		--show-idx)	show_idx=y ;;
+		*)		return 1 ;;
+		esac
+		shift
+	done
+	local wordlist="$1"
+
+	while [ $c -lt $cword ]; do
+		for word in $wordlist; do
+			if [ "$word" = "${words[c]}" ]; then
+				if [ -n "$show_idx" ]; then
+					echo "$c $word"
+				else
+					echo "$word"
+				fi
+				return
+			fi
+		done
+		((c++))
+	done
+}
+
+# Echo the value of an option set on the command line or config
+#
+# $1: short option name
+# $2: long option name including =
+# $3: list of possible values
+# $4: config string (optional)
+#
+# example:
+# result="$(__git_get_option_value "-d" "--do-something=" \
+#     "yes no" "core.doSomething")"
+#
+# result is then either empty (no option set) or "yes" or "no"
+#
+# __git_get_option_value requires 3 arguments
+__git_get_option_value ()
+{
+	local c short_opt long_opt val
+	local result= values config_key word
+
+	short_opt="$1"
+	long_opt="$2"
+	values="$3"
+	config_key="$4"
+
+	((c = $cword - 1))
+	while [ $c -ge 0 ]; do
+		word="${words[c]}"
+		for val in $values; do
+			if [ "$short_opt$val" = "$word" ] ||
+			   [ "$long_opt$val"  = "$word" ]; then
+				result="$val"
+				break 2
+			fi
+		done
+		((c--))
+	done
+
+	if [ -n "$config_key" ] && [ -z "$result" ]; then
+		result="$(__git config "$config_key")"
+	fi
+
+	echo "$result"
+}
+
+__git_has_doubledash ()
+{
+	local c=1
+	while [ $c -lt $cword ]; do
+		if [ "--" = "${words[c]}" ]; then
+			return 0
+		fi
+		((c++))
+	done
+	return 1
+}
+
+# Try to count non option arguments passed on the command line for the
+# specified git command.
+# When options are used, it is necessary to use the special -- option to
+# tell the implementation were non option arguments begin.
+# XXX this can not be improved, since options can appear everywhere, as
+# an example:
+#	git mv x -n y
+#
+# __git_count_arguments requires 1 argument: the git command executed.
+__git_count_arguments ()
+{
+	local word i c=0
+
+	# Skip "git" (first argument)
+	for ((i=1; i < ${#words[@]}; i++)); do
+		word="${words[i]}"
+
+		case "$word" in
+			--)
+				# Good; we can assume that the following are only non
+				# option arguments.
+				((c = 0))
+				;;
+			"$1")
+				# Skip the specified git command and discard git
+				# main options
+				((c = 0))
+				;;
+			?*)
+				((c++))
+				;;
+		esac
+	done
+
+	printf "%d" $c
+}
+
+__git_whitespacelist="nowarn warn error error-all fix"
+__git_patchformat="mbox stgit stgit-series hg mboxrd"
+__git_showcurrentpatch="diff raw"
+__git_am_inprogress_options="--skip --continue --resolved --abort --quit --show-current-patch"
+
+_git_am ()
+{
+	__git_find_repo_path
+	if [ -d "$__git_repo_path"/rebase-apply ]; then
+		__gitcomp "$__git_am_inprogress_options"
+		return
+	fi
+	case "$cur" in
+	--whitespace=*)
+		__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+		return
+		;;
+	--patch-format=*)
+		__gitcomp "$__git_patchformat" "" "${cur##--patch-format=}"
+		return
+		;;
+	--show-current-patch=*)
+		__gitcomp "$__git_showcurrentpatch" "" "${cur##--show-current-patch=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin am "" \
+			"$__git_am_inprogress_options"
+		return
+	esac
+}
+
+_git_apply ()
+{
+	case "$cur" in
+	--whitespace=*)
+		__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin apply
+		return
+	esac
+}
+
+_git_add ()
+{
+	case "$cur" in
+	--chmod=*)
+		__gitcomp "+x -x" "" "${cur##--chmod=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin add
+		return
+	esac
+
+	local complete_opt="--others --modified --directory --no-empty-directory"
+	if test -n "$(__git_find_on_cmdline "-u --update")"
+	then
+		complete_opt="--modified"
+	fi
+	__git_complete_index_file "$complete_opt"
+}
+
+_git_archive ()
+{
+	case "$cur" in
+	--format=*)
+		__gitcomp "$(git archive --list)" "" "${cur##--format=}"
+		return
+		;;
+	--remote=*)
+		__gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin archive "--format= --list --verbose --prefix= --worktree-attributes"
+		return
+		;;
+	esac
+	__git_complete_file
+}
+
+_git_bisect ()
+{
+	__git_has_doubledash && return
+
+	local subcommands="start bad good skip reset visualize replay log run"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+	if [ -z "$subcommand" ]; then
+		__git_find_repo_path
+		if [ -f "$__git_repo_path"/BISECT_START ]; then
+			__gitcomp "$subcommands"
+		else
+			__gitcomp "replay start"
+		fi
+		return
+	fi
+
+	case "$subcommand" in
+	bad|good|reset|skip|start)
+		__git_complete_refs
+		;;
+	*)
+		;;
+	esac
+}
+
+__git_ref_fieldlist="refname objecttype objectsize objectname upstream push HEAD symref"
+
+_git_branch ()
+{
+	local i c=1 only_local_ref="n" has_r="n"
+
+	while [ $c -lt $cword ]; do
+		i="${words[c]}"
+		case "$i" in
+		-d|--delete|-m|--move)	only_local_ref="y" ;;
+		-r|--remotes)		has_r="y" ;;
+		esac
+		((c++))
+	done
+
+	case "$cur" in
+	--set-upstream-to=*)
+		__git_complete_refs --cur="${cur##--set-upstream-to=}"
+		;;
+	--*)
+		__gitcomp_builtin branch
+		;;
+	*)
+		if [ $only_local_ref = "y" -a $has_r = "n" ]; then
+			__gitcomp_direct "$(__git_heads "" "$cur" " ")"
+		else
+			__git_complete_refs
+		fi
+		;;
+	esac
+}
+
+_git_bundle ()
+{
+	local cmd="${words[2]}"
+	case "$cword" in
+	2)
+		__gitcomp "create list-heads verify unbundle"
+		;;
+	3)
+		# looking for a file
+		;;
+	*)
+		case "$cmd" in
+			create)
+				__git_complete_revlist
+			;;
+		esac
+		;;
+	esac
+}
+
+_git_checkout ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--conflict=*)
+		__gitcomp "diff3 merge" "" "${cur##--conflict=}"
+		;;
+	--*)
+		__gitcomp_builtin checkout
+		;;
+	*)
+		# check if --track, --no-track, or --no-guess was specified
+		# if so, disable DWIM mode
+		local flags="--track --no-track --no-guess" track_opt="--track"
+		if [ "$GIT_COMPLETION_CHECKOUT_NO_GUESS" = "1" ] ||
+		   [ -n "$(__git_find_on_cmdline "$flags")" ]; then
+			track_opt=''
+		fi
+		__git_complete_refs $track_opt
+		;;
+	esac
+}
+
+__git_sequencer_inprogress_options="--continue --quit --abort --skip"
+
+__git_cherry_pick_inprogress_options=$__git_sequencer_inprogress_options
+
+_git_cherry_pick ()
+{
+	__git_find_repo_path
+	if [ -f "$__git_repo_path"/CHERRY_PICK_HEAD ]; then
+		__gitcomp "$__git_cherry_pick_inprogress_options"
+		return
+	fi
+
+	__git_complete_strategy && return
+
+	case "$cur" in
+	--*)
+		__gitcomp_builtin cherry-pick "" \
+			"$__git_cherry_pick_inprogress_options"
+		;;
+	*)
+		__git_complete_refs
+		;;
+	esac
+}
+
+_git_clean ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin clean
+		return
+		;;
+	esac
+
+	# XXX should we check for -x option ?
+	__git_complete_index_file "--others --directory"
+}
+
+_git_clone ()
+{
+	case "$prev" in
+	-c|--config)
+		__git_complete_config_variable_name_and_value
+		return
+		;;
+	esac
+	case "$cur" in
+	--config=*)
+		__git_complete_config_variable_name_and_value \
+			--cur="${cur##--config=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin clone
+		return
+		;;
+	esac
+}
+
+__git_untracked_file_modes="all no normal"
+
+_git_commit ()
+{
+	case "$prev" in
+	-c|-C)
+		__git_complete_refs
+		return
+		;;
+	esac
+
+	case "$cur" in
+	--cleanup=*)
+		__gitcomp "default scissors strip verbatim whitespace
+			" "" "${cur##--cleanup=}"
+		return
+		;;
+	--reuse-message=*|--reedit-message=*|\
+	--fixup=*|--squash=*)
+		__git_complete_refs --cur="${cur#*=}"
+		return
+		;;
+	--untracked-files=*)
+		__gitcomp "$__git_untracked_file_modes" "" "${cur##--untracked-files=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin commit
+		return
+	esac
+
+	if __git rev-parse --verify --quiet HEAD >/dev/null; then
+		__git_complete_index_file "--committable"
+	else
+		# This is the first commit
+		__git_complete_index_file "--cached"
+	fi
+}
+
+_git_describe ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin describe
+		return
+	esac
+	__git_complete_refs
+}
+
+__git_diff_algorithms="myers minimal patience histogram"
+
+__git_diff_submodule_formats="diff log short"
+
+__git_color_moved_opts="no default plain blocks zebra dimmed-zebra"
+
+__git_color_moved_ws_opts="no ignore-space-at-eol ignore-space-change
+			ignore-all-space allow-indentation-change"
+
+__git_diff_common_options="--stat --numstat --shortstat --summary
+			--patch-with-stat --name-only --name-status --color
+			--no-color --color-words --no-renames --check
+			--color-moved --color-moved= --no-color-moved
+			--color-moved-ws= --no-color-moved-ws
+			--full-index --binary --abbrev --diff-filter=
+			--find-copies-harder --ignore-cr-at-eol
+			--text --ignore-space-at-eol --ignore-space-change
+			--ignore-all-space --ignore-blank-lines --exit-code
+			--quiet --ext-diff --no-ext-diff
+			--no-prefix --src-prefix= --dst-prefix=
+			--inter-hunk-context=
+			--patience --histogram --minimal
+			--raw --word-diff --word-diff-regex=
+			--dirstat --dirstat= --dirstat-by-file
+			--dirstat-by-file= --cumulative
+			--diff-algorithm=
+			--submodule --submodule= --ignore-submodules
+			--indent-heuristic --no-indent-heuristic
+			--textconv --no-textconv
+"
+
+_git_diff ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--diff-algorithm=*)
+		__gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
+		return
+		;;
+	--submodule=*)
+		__gitcomp "$__git_diff_submodule_formats" "" "${cur##--submodule=}"
+		return
+		;;
+	--color-moved=*)
+		__gitcomp "$__git_color_moved_opts" "" "${cur##--color-moved=}"
+		return
+		;;
+	--color-moved-ws=*)
+		__gitcomp "$__git_color_moved_ws_opts" "" "${cur##--color-moved-ws=}"
+		return
+		;;
+	--*)
+		__gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
+			--base --ours --theirs --no-index
+			$__git_diff_common_options
+			"
+		return
+		;;
+	esac
+	__git_complete_revlist_file
+}
+
+__git_mergetools_common="diffuse diffmerge ecmerge emerge kdiff3 meld opendiff
+			tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc
+			codecompare smerge
+"
+
+_git_difftool ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--tool=*)
+		__gitcomp "$__git_mergetools_common kompare" "" "${cur##--tool=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin difftool "$__git_diff_common_options
+					--base --cached --ours --theirs
+					--pickaxe-all --pickaxe-regex
+					--relative --staged
+					"
+		return
+		;;
+	esac
+	__git_complete_revlist_file
+}
+
+__git_fetch_recurse_submodules="yes on-demand no"
+
+_git_fetch ()
+{
+	case "$cur" in
+	--recurse-submodules=*)
+		__gitcomp "$__git_fetch_recurse_submodules" "" "${cur##--recurse-submodules=}"
+		return
+		;;
+	--filter=*)
+		__gitcomp "blob:none blob:limit= sparse:oid=" "" "${cur##--filter=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin fetch
+		return
+		;;
+	esac
+	__git_complete_remote_or_refspec
+}
+
+__git_format_patch_extra_options="
+	--full-index --not --all --no-prefix --src-prefix=
+	--dst-prefix= --notes
+"
+
+_git_format_patch ()
+{
+	case "$cur" in
+	--thread=*)
+		__gitcomp "
+			deep shallow
+			" "" "${cur##--thread=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin format-patch "$__git_format_patch_extra_options"
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+_git_fsck ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin fsck
+		return
+		;;
+	esac
+}
+
+_git_gitk ()
+{
+	_gitk
+}
+
+# Lists matching symbol names from a tag (as in ctags) file.
+# 1: List symbol names matching this word.
+# 2: The tag file to list symbol names from.
+# 3: A prefix to be added to each listed symbol name (optional).
+# 4: A suffix to be appended to each listed symbol name (optional).
+__git_match_ctag () {
+	awk -v pfx="${3-}" -v sfx="${4-}" "
+		/^${1//\//\\/}/ { print pfx \$1 sfx }
+		" "$2"
+}
+
+# Complete symbol names from a tag file.
+# Usage: __git_complete_symbol [<option>]...
+# --tags=<file>: The tag file to list symbol names from instead of the
+#                default "tags".
+# --pfx=<prefix>: A prefix to be added to each symbol name.
+# --cur=<word>: The current symbol name to be completed.  Defaults to
+#               the current word to be completed.
+# --sfx=<suffix>: A suffix to be appended to each symbol name instead
+#                 of the default space.
+__git_complete_symbol () {
+	local tags=tags pfx="" cur_="${cur-}" sfx=" "
+
+	while test $# != 0; do
+		case "$1" in
+		--tags=*)	tags="${1##--tags=}" ;;
+		--pfx=*)	pfx="${1##--pfx=}" ;;
+		--cur=*)	cur_="${1##--cur=}" ;;
+		--sfx=*)	sfx="${1##--sfx=}" ;;
+		*)		return 1 ;;
+		esac
+		shift
+	done
+
+	if test -r "$tags"; then
+		__gitcomp_direct "$(__git_match_ctag "$cur_" "$tags" "$pfx" "$sfx")"
+	fi
+}
+
+_git_grep ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--*)
+		__gitcomp_builtin grep
+		return
+		;;
+	esac
+
+	case "$cword,$prev" in
+	2,*|*,-*)
+		__git_complete_symbol && return
+		;;
+	esac
+
+	__git_complete_refs
+}
+
+_git_help ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin help
+		return
+		;;
+	esac
+	if test -n "$GIT_TESTING_ALL_COMMAND_LIST"
+	then
+		__gitcomp "$GIT_TESTING_ALL_COMMAND_LIST $(__git --list-cmds=alias,list-guide) gitk"
+	else
+		__gitcomp "$(__git --list-cmds=main,nohelpers,alias,list-guide) gitk"
+	fi
+}
+
+_git_init ()
+{
+	case "$cur" in
+	--shared=*)
+		__gitcomp "
+			false true umask group all world everybody
+			" "" "${cur##--shared=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin init
+		return
+		;;
+	esac
+}
+
+_git_ls_files ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin ls-files
+		return
+		;;
+	esac
+
+	# XXX ignore options like --modified and always suggest all cached
+	# files.
+	__git_complete_index_file "--cached"
+}
+
+_git_ls_remote ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin ls-remote
+		return
+		;;
+	esac
+	__gitcomp_nl "$(__git_remotes)"
+}
+
+_git_ls_tree ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin ls-tree
+		return
+		;;
+	esac
+
+	__git_complete_file
+}
+
+# Options that go well for log, shortlog and gitk
+__git_log_common_options="
+	--not --all
+	--branches --tags --remotes
+	--first-parent --merges --no-merges
+	--max-count=
+	--max-age= --since= --after=
+	--min-age= --until= --before=
+	--min-parents= --max-parents=
+	--no-min-parents --no-max-parents
+"
+# Options that go well for log and gitk (not shortlog)
+__git_log_gitk_options="
+	--dense --sparse --full-history
+	--simplify-merges --simplify-by-decoration
+	--left-right --notes --no-notes
+"
+# Options that go well for log and shortlog (not gitk)
+__git_log_shortlog_options="
+	--author= --committer= --grep=
+	--all-match --invert-grep
+"
+
+__git_log_pretty_formats="oneline short medium full fuller reference email raw format: tformat: mboxrd"
+__git_log_date_formats="relative iso8601 iso8601-strict rfc2822 short local default raw unix format:"
+
+_git_log ()
+{
+	__git_has_doubledash && return
+	__git_find_repo_path
+
+	local merge=""
+	if [ -f "$__git_repo_path/MERGE_HEAD" ]; then
+		merge="--merge"
+	fi
+	case "$prev,$cur" in
+	-L,:*:*)
+		return	# fall back to Bash filename completion
+		;;
+	-L,:*)
+		__git_complete_symbol --cur="${cur#:}" --sfx=":"
+		return
+		;;
+	-G,*|-S,*)
+		__git_complete_symbol
+		return
+		;;
+	esac
+	case "$cur" in
+	--pretty=*|--format=*)
+		__gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
+			" "" "${cur#*=}"
+		return
+		;;
+	--date=*)
+		__gitcomp "$__git_log_date_formats" "" "${cur##--date=}"
+		return
+		;;
+	--decorate=*)
+		__gitcomp "full short no" "" "${cur##--decorate=}"
+		return
+		;;
+	--diff-algorithm=*)
+		__gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
+		return
+		;;
+	--submodule=*)
+		__gitcomp "$__git_diff_submodule_formats" "" "${cur##--submodule=}"
+		return
+		;;
+	--no-walk=*)
+		__gitcomp "sorted unsorted" "" "${cur##--no-walk=}"
+		return
+		;;
+	--*)
+		__gitcomp "
+			$__git_log_common_options
+			$__git_log_shortlog_options
+			$__git_log_gitk_options
+			--root --topo-order --date-order --reverse
+			--follow --full-diff
+			--abbrev-commit --no-abbrev-commit --abbrev=
+			--relative-date --date=
+			--pretty= --format= --oneline
+			--show-signature
+			--cherry-mark
+			--cherry-pick
+			--graph
+			--decorate --decorate= --no-decorate
+			--walk-reflogs
+			--no-walk --no-walk= --do-walk
+			--parents --children
+			--expand-tabs --expand-tabs= --no-expand-tabs
+			--patch
+			$merge
+			$__git_diff_common_options
+			--pickaxe-all --pickaxe-regex
+			"
+		return
+		;;
+	-L:*:*)
+		return	# fall back to Bash filename completion
+		;;
+	-L:*)
+		__git_complete_symbol --cur="${cur#-L:}" --sfx=":"
+		return
+		;;
+	-G*)
+		__git_complete_symbol --pfx="-G" --cur="${cur#-G}"
+		return
+		;;
+	-S*)
+		__git_complete_symbol --pfx="-S" --cur="${cur#-S}"
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+_git_merge ()
+{
+	__git_complete_strategy && return
+
+	case "$cur" in
+	--*)
+		__gitcomp_builtin merge
+		return
+	esac
+	__git_complete_refs
+}
+
+_git_mergetool ()
+{
+	case "$cur" in
+	--tool=*)
+		__gitcomp "$__git_mergetools_common tortoisemerge" "" "${cur##--tool=}"
+		return
+		;;
+	--*)
+		__gitcomp "--tool= --prompt --no-prompt --gui --no-gui"
+		return
+		;;
+	esac
+}
+
+_git_merge_base ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin merge-base
+		return
+		;;
+	esac
+	__git_complete_refs
+}
+
+_git_mv ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin mv
+		return
+		;;
+	esac
+
+	if [ $(__git_count_arguments "mv") -gt 0 ]; then
+		# We need to show both cached and untracked files (including
+		# empty directories) since this may not be the last argument.
+		__git_complete_index_file "--cached --others --directory"
+	else
+		__git_complete_index_file "--cached"
+	fi
+}
+
+_git_notes ()
+{
+	local subcommands='add append copy edit get-ref list merge prune remove show'
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+
+	case "$subcommand,$cur" in
+	,--*)
+		__gitcomp_builtin notes
+		;;
+	,*)
+		case "$prev" in
+		--ref)
+			__git_complete_refs
+			;;
+		*)
+			__gitcomp "$subcommands --ref"
+			;;
+		esac
+		;;
+	*,--reuse-message=*|*,--reedit-message=*)
+		__git_complete_refs --cur="${cur#*=}"
+		;;
+	*,--*)
+		__gitcomp_builtin notes_$subcommand
+		;;
+	prune,*|get-ref,*)
+		# this command does not take a ref, do not complete it
+		;;
+	*)
+		case "$prev" in
+		-m|-F)
+			;;
+		*)
+			__git_complete_refs
+			;;
+		esac
+		;;
+	esac
+}
+
+_git_pull ()
+{
+	__git_complete_strategy && return
+
+	case "$cur" in
+	--recurse-submodules=*)
+		__gitcomp "$__git_fetch_recurse_submodules" "" "${cur##--recurse-submodules=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin pull
+
+		return
+		;;
+	esac
+	__git_complete_remote_or_refspec
+}
+
+__git_push_recurse_submodules="check on-demand only"
+
+__git_complete_force_with_lease ()
+{
+	local cur_=$1
+
+	case "$cur_" in
+	--*=)
+		;;
+	*:*)
+		__git_complete_refs --cur="${cur_#*:}"
+		;;
+	*)
+		__git_complete_refs --cur="$cur_"
+		;;
+	esac
+}
+
+_git_push ()
+{
+	case "$prev" in
+	--repo)
+		__gitcomp_nl "$(__git_remotes)"
+		return
+		;;
+	--recurse-submodules)
+		__gitcomp "$__git_push_recurse_submodules"
+		return
+		;;
+	esac
+	case "$cur" in
+	--repo=*)
+		__gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}"
+		return
+		;;
+	--recurse-submodules=*)
+		__gitcomp "$__git_push_recurse_submodules" "" "${cur##--recurse-submodules=}"
+		return
+		;;
+	--force-with-lease=*)
+		__git_complete_force_with_lease "${cur##--force-with-lease=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin push
+		return
+		;;
+	esac
+	__git_complete_remote_or_refspec
+}
+
+_git_range_diff ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp "
+			--creation-factor= --no-dual-color
+			$__git_diff_common_options
+		"
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+__git_rebase_inprogress_options="--continue --skip --abort --quit --show-current-patch"
+__git_rebase_interactive_inprogress_options="$__git_rebase_inprogress_options --edit-todo"
+
+_git_rebase ()
+{
+	__git_find_repo_path
+	if [ -f "$__git_repo_path"/rebase-merge/interactive ]; then
+		__gitcomp "$__git_rebase_interactive_inprogress_options"
+		return
+	elif [ -d "$__git_repo_path"/rebase-apply ] || \
+	     [ -d "$__git_repo_path"/rebase-merge ]; then
+		__gitcomp "$__git_rebase_inprogress_options"
+		return
+	fi
+	__git_complete_strategy && return
+	case "$cur" in
+	--whitespace=*)
+		__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
+		return
+		;;
+	--onto=*)
+		__git_complete_refs --cur="${cur##--onto=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin rebase "" \
+			"$__git_rebase_interactive_inprogress_options"
+
+		return
+	esac
+	__git_complete_refs
+}
+
+_git_reflog ()
+{
+	local subcommands="show delete expire"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+
+	if [ -z "$subcommand" ]; then
+		__gitcomp "$subcommands"
+	else
+		__git_complete_refs
+	fi
+}
+
+__git_send_email_confirm_options="always never auto cc compose"
+__git_send_email_suppresscc_options="author self cc bodycc sob cccmd body all"
+
+_git_send_email ()
+{
+	case "$prev" in
+	--to|--cc|--bcc|--from)
+		__gitcomp "$(__git send-email --dump-aliases)"
+		return
+		;;
+	esac
+
+	case "$cur" in
+	--confirm=*)
+		__gitcomp "
+			$__git_send_email_confirm_options
+			" "" "${cur##--confirm=}"
+		return
+		;;
+	--suppress-cc=*)
+		__gitcomp "
+			$__git_send_email_suppresscc_options
+			" "" "${cur##--suppress-cc=}"
+
+		return
+		;;
+	--smtp-encryption=*)
+		__gitcomp "ssl tls" "" "${cur##--smtp-encryption=}"
+		return
+		;;
+	--thread=*)
+		__gitcomp "
+			deep shallow
+			" "" "${cur##--thread=}"
+		return
+		;;
+	--to=*|--cc=*|--bcc=*|--from=*)
+		__gitcomp "$(__git send-email --dump-aliases)" "" "${cur#--*=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin send-email "--annotate --bcc --cc --cc-cmd --chain-reply-to
+			--compose --confirm= --dry-run --envelope-sender
+			--from --identity
+			--in-reply-to --no-chain-reply-to --no-signed-off-by-cc
+			--no-suppress-from --no-thread --quiet --reply-to
+			--signed-off-by-cc --smtp-pass --smtp-server
+			--smtp-server-port --smtp-encryption= --smtp-user
+			--subject --suppress-cc= --suppress-from --thread --to
+			--validate --no-validate
+			$__git_format_patch_extra_options"
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+_git_stage ()
+{
+	_git_add
+}
+
+_git_status ()
+{
+	local complete_opt
+	local untracked_state
+
+	case "$cur" in
+	--ignore-submodules=*)
+		__gitcomp "none untracked dirty all" "" "${cur##--ignore-submodules=}"
+		return
+		;;
+	--untracked-files=*)
+		__gitcomp "$__git_untracked_file_modes" "" "${cur##--untracked-files=}"
+		return
+		;;
+	--column=*)
+		__gitcomp "
+			always never auto column row plain dense nodense
+			" "" "${cur##--column=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin status
+		return
+		;;
+	esac
+
+	untracked_state="$(__git_get_option_value "-u" "--untracked-files=" \
+		"$__git_untracked_file_modes" "status.showUntrackedFiles")"
+
+	case "$untracked_state" in
+	no)
+		# --ignored option does not matter
+		complete_opt=
+		;;
+	all|normal|*)
+		complete_opt="--cached --directory --no-empty-directory --others"
+
+		if [ -n "$(__git_find_on_cmdline "--ignored")" ]; then
+			complete_opt="$complete_opt --ignored --exclude=*"
+		fi
+		;;
+	esac
+
+	__git_complete_index_file "$complete_opt"
+}
+
+_git_switch ()
+{
+	case "$cur" in
+	--conflict=*)
+		__gitcomp "diff3 merge" "" "${cur##--conflict=}"
+		;;
+	--*)
+		__gitcomp_builtin switch
+		;;
+	*)
+		# check if --track, --no-track, or --no-guess was specified
+		# if so, disable DWIM mode
+		local track_opt="--track" only_local_ref=n
+		if [ "$GIT_COMPLETION_CHECKOUT_NO_GUESS" = "1" ] ||
+		   [ -n "$(__git_find_on_cmdline "--track --no-track --no-guess")" ]; then
+			track_opt=''
+		fi
+		# explicit --guess enables DWIM mode regardless of
+		# $GIT_COMPLETION_CHECKOUT_NO_GUESS
+		if [ -n "$(__git_find_on_cmdline "--guess")" ]; then
+			track_opt='--track'
+		fi
+		if [ -z "$(__git_find_on_cmdline "-d --detach")" ]; then
+			only_local_ref=y
+		else
+			# --guess --detach is invalid combination, no
+			# dwim will be done when --detach is specified
+			track_opt=
+		fi
+		if [ $only_local_ref = y -a -z "$track_opt" ]; then
+			__gitcomp_direct "$(__git_heads "" "$cur" " ")"
+		else
+			__git_complete_refs $track_opt
+		fi
+		;;
+	esac
+}
+
+__git_config_get_set_variables ()
+{
+	local prevword word config_file= c=$cword
+	while [ $c -gt 1 ]; do
+		word="${words[c]}"
+		case "$word" in
+		--system|--global|--local|--file=*)
+			config_file="$word"
+			break
+			;;
+		-f|--file)
+			config_file="$word $prevword"
+			break
+			;;
+		esac
+		prevword=$word
+		c=$((--c))
+	done
+
+	__git config $config_file --name-only --list
+}
+
+__git_config_vars=
+__git_compute_config_vars ()
+{
+	test -n "$__git_config_vars" ||
+	__git_config_vars="$(git help --config-for-completion | sort -u)"
+}
+
+# Completes possible values of various configuration variables.
+#
+# Usage: __git_complete_config_variable_value [<option>]...
+# --varname=<word>: The name of the configuration variable whose value is
+#                   to be completed.  Defaults to the previous word on the
+#                   command line.
+# --cur=<word>: The current value to be completed.  Defaults to the current
+#               word to be completed.
+__git_complete_config_variable_value ()
+{
+	local varname="$prev" cur_="$cur"
+
+	while test $# != 0; do
+		case "$1" in
+		--varname=*)	varname="${1##--varname=}" ;;
+		--cur=*)	cur_="${1##--cur=}" ;;
+		*)		return 1 ;;
+		esac
+		shift
+	done
+
+	if [ "${BASH_VERSINFO[0]:-0}" -ge 4 ]; then
+		varname="${varname,,}"
+	else
+		varname="$(echo "$varname" |tr A-Z a-z)"
+	fi
+
+	case "$varname" in
+	branch.*.remote|branch.*.pushremote)
+		__gitcomp_nl "$(__git_remotes)" "" "$cur_"
+		return
+		;;
+	branch.*.merge)
+		__git_complete_refs --cur="$cur_"
+		return
+		;;
+	branch.*.rebase)
+		__gitcomp "false true merges preserve interactive" "" "$cur_"
+		return
+		;;
+	remote.pushdefault)
+		__gitcomp_nl "$(__git_remotes)" "" "$cur_"
+		return
+		;;
+	remote.*.fetch)
+		local remote="${varname#remote.}"
+		remote="${remote%.fetch}"
+		if [ -z "$cur_" ]; then
+			__gitcomp_nl "refs/heads/" "" "" ""
+			return
+		fi
+		__gitcomp_nl "$(__git_refs_remotes "$remote")" "" "$cur_"
+		return
+		;;
+	remote.*.push)
+		local remote="${varname#remote.}"
+		remote="${remote%.push}"
+		__gitcomp_nl "$(__git for-each-ref \
+			--format='%(refname):%(refname)' refs/heads)" "" "$cur_"
+		return
+		;;
+	pull.twohead|pull.octopus)
+		__git_compute_merge_strategies
+		__gitcomp "$__git_merge_strategies" "" "$cur_"
+		return
+		;;
+	color.pager)
+		__gitcomp "false true" "" "$cur_"
+		return
+		;;
+	color.*.*)
+		__gitcomp "
+			normal black red green yellow blue magenta cyan white
+			bold dim ul blink reverse
+			" "" "$cur_"
+		return
+		;;
+	color.*)
+		__gitcomp "false true always never auto" "" "$cur_"
+		return
+		;;
+	diff.submodule)
+		__gitcomp "$__git_diff_submodule_formats" "" "$cur_"
+		return
+		;;
+	help.format)
+		__gitcomp "man info web html" "" "$cur_"
+		return
+		;;
+	log.date)
+		__gitcomp "$__git_log_date_formats" "" "$cur_"
+		return
+		;;
+	sendemail.aliasfiletype)
+		__gitcomp "mutt mailrc pine elm gnus" "" "$cur_"
+		return
+		;;
+	sendemail.confirm)
+		__gitcomp "$__git_send_email_confirm_options" "" "$cur_"
+		return
+		;;
+	sendemail.suppresscc)
+		__gitcomp "$__git_send_email_suppresscc_options" "" "$cur_"
+		return
+		;;
+	sendemail.transferencoding)
+		__gitcomp "7bit 8bit quoted-printable base64" "" "$cur_"
+		return
+		;;
+	*.*)
+		return
+		;;
+	esac
+}
+
+# Completes configuration sections, subsections, variable names.
+#
+# Usage: __git_complete_config_variable_name [<option>]...
+# --cur=<word>: The current configuration section/variable name to be
+#               completed.  Defaults to the current word to be completed.
+# --sfx=<suffix>: A suffix to be appended to each fully completed
+#                 configuration variable name (but not to sections or
+#                 subsections) instead of the default space.
+__git_complete_config_variable_name ()
+{
+	local cur_="$cur" sfx
+
+	while test $# != 0; do
+		case "$1" in
+		--cur=*)	cur_="${1##--cur=}" ;;
+		--sfx=*)	sfx="${1##--sfx=}" ;;
+		*)		return 1 ;;
+		esac
+		shift
+	done
+
+	case "$cur_" in
+	branch.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "remote pushRemote merge mergeOptions rebase" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	branch.*)
+		local pfx="${cur%.*}."
+		cur_="${cur#*.}"
+		__gitcomp_direct "$(__git_heads "$pfx" "$cur_" ".")"
+		__gitcomp_nl_append $'autoSetupMerge\nautoSetupRebase\n' "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	guitool.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "
+			argPrompt cmd confirm needsFile noConsole noRescan
+			prompt revPrompt revUnmerged title
+			" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	difftool.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "cmd path" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	man.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "cmd path" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	mergetool.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "cmd path trustExitCode" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	pager.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_#*.}"
+		__git_compute_all_commands
+		__gitcomp_nl "$__git_all_commands" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	remote.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "
+			url proxy fetch push mirror skipDefaultUpdate
+			receivepack uploadpack tagOpt pushurl
+			" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	remote.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_#*.}"
+		__gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "."
+		__gitcomp_nl_append "pushDefault" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	url.*.*)
+		local pfx="${cur_%.*}."
+		cur_="${cur_##*.}"
+		__gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_" "$sfx"
+		return
+		;;
+	*.*)
+		__git_compute_config_vars
+		__gitcomp "$__git_config_vars" "" "$cur_" "$sfx"
+		;;
+	*)
+		__git_compute_config_vars
+		__gitcomp "$(echo "$__git_config_vars" |
+				awk -F . '{
+					sections[$1] = 1
+				}
+				END {
+					for (s in sections)
+						print s "."
+				}
+				')" "" "$cur_"
+		;;
+	esac
+}
+
+# Completes '='-separated configuration sections/variable names and values
+# for 'git -c section.name=value'.
+#
+# Usage: __git_complete_config_variable_name_and_value [<option>]...
+# --cur=<word>: The current configuration section/variable name/value to be
+#               completed. Defaults to the current word to be completed.
+__git_complete_config_variable_name_and_value ()
+{
+	local cur_="$cur"
+
+	while test $# != 0; do
+		case "$1" in
+		--cur=*)	cur_="${1##--cur=}" ;;
+		*)		return 1 ;;
+		esac
+		shift
+	done
+
+	case "$cur_" in
+	*=*)
+		__git_complete_config_variable_value \
+			--varname="${cur_%%=*}" --cur="${cur_#*=}"
+		;;
+	*)
+		__git_complete_config_variable_name --cur="$cur_" --sfx='='
+		;;
+	esac
+}
+
+_git_config ()
+{
+	case "$prev" in
+	--get|--get-all|--unset|--unset-all)
+		__gitcomp_nl "$(__git_config_get_set_variables)"
+		return
+		;;
+	*.*)
+		__git_complete_config_variable_value
+		return
+		;;
+	esac
+	case "$cur" in
+	--*)
+		__gitcomp_builtin config
+		;;
+	*)
+		__git_complete_config_variable_name
+		;;
+	esac
+}
+
+_git_remote ()
+{
+	local subcommands="
+		add rename remove set-head set-branches
+		get-url set-url show prune update
+		"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+	if [ -z "$subcommand" ]; then
+		case "$cur" in
+		--*)
+			__gitcomp_builtin remote
+			;;
+		*)
+			__gitcomp "$subcommands"
+			;;
+		esac
+		return
+	fi
+
+	case "$subcommand,$cur" in
+	add,--*)
+		__gitcomp_builtin remote_add
+		;;
+	add,*)
+		;;
+	set-head,--*)
+		__gitcomp_builtin remote_set-head
+		;;
+	set-branches,--*)
+		__gitcomp_builtin remote_set-branches
+		;;
+	set-head,*|set-branches,*)
+		__git_complete_remote_or_refspec
+		;;
+	update,--*)
+		__gitcomp_builtin remote_update
+		;;
+	update,*)
+		__gitcomp "$(__git_remotes) $(__git_get_config_variables "remotes")"
+		;;
+	set-url,--*)
+		__gitcomp_builtin remote_set-url
+		;;
+	get-url,--*)
+		__gitcomp_builtin remote_get-url
+		;;
+	prune,--*)
+		__gitcomp_builtin remote_prune
+		;;
+	*)
+		__gitcomp_nl "$(__git_remotes)"
+		;;
+	esac
+}
+
+_git_replace ()
+{
+	case "$cur" in
+	--format=*)
+		__gitcomp "short medium long" "" "${cur##--format=}"
+		return
+		;;
+	--*)
+		__gitcomp_builtin replace
+		return
+		;;
+	esac
+	__git_complete_refs
+}
+
+_git_rerere ()
+{
+	local subcommands="clear forget diff remaining status gc"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+	if test -z "$subcommand"
+	then
+		__gitcomp "$subcommands"
+		return
+	fi
+}
+
+_git_reset ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--*)
+		__gitcomp_builtin reset
+		return
+		;;
+	esac
+	__git_complete_refs
+}
+
+_git_restore ()
+{
+	case "$cur" in
+	--conflict=*)
+		__gitcomp "diff3 merge" "" "${cur##--conflict=}"
+		;;
+	--source=*)
+		__git_complete_refs --cur="${cur##--source=}"
+		;;
+	--*)
+		__gitcomp_builtin restore
+		;;
+	esac
+}
+
+__git_revert_inprogress_options=$__git_sequencer_inprogress_options
+
+_git_revert ()
+{
+	__git_find_repo_path
+	if [ -f "$__git_repo_path"/REVERT_HEAD ]; then
+		__gitcomp "$__git_revert_inprogress_options"
+		return
+	fi
+	__git_complete_strategy && return
+	case "$cur" in
+	--*)
+		__gitcomp_builtin revert "" \
+			"$__git_revert_inprogress_options"
+		return
+		;;
+	esac
+	__git_complete_refs
+}
+
+_git_rm ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin rm
+		return
+		;;
+	esac
+
+	__git_complete_index_file "--cached"
+}
+
+_git_shortlog ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--*)
+		__gitcomp "
+			$__git_log_common_options
+			$__git_log_shortlog_options
+			--numbered --summary --email
+			"
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+_git_show ()
+{
+	__git_has_doubledash && return
+
+	case "$cur" in
+	--pretty=*|--format=*)
+		__gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
+			" "" "${cur#*=}"
+		return
+		;;
+	--diff-algorithm=*)
+		__gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}"
+		return
+		;;
+	--submodule=*)
+		__gitcomp "$__git_diff_submodule_formats" "" "${cur##--submodule=}"
+		return
+		;;
+	--*)
+		__gitcomp "--pretty= --format= --abbrev-commit --no-abbrev-commit
+			--oneline --show-signature --patch
+			--expand-tabs --expand-tabs= --no-expand-tabs
+			$__git_diff_common_options
+			"
+		return
+		;;
+	esac
+	__git_complete_revlist_file
+}
+
+_git_show_branch ()
+{
+	case "$cur" in
+	--*)
+		__gitcomp_builtin show-branch
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+_git_sparse_checkout ()
+{
+	local subcommands="list init set disable"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+	if [ -z "$subcommand" ]; then
+		__gitcomp "$subcommands"
+		return
+	fi
+
+	case "$subcommand,$cur" in
+	init,--*)
+		__gitcomp "--cone"
+		;;
+	set,--*)
+		__gitcomp "--stdin"
+		;;
+	*)
+		;;
+	esac
+}
+
+_git_stash ()
+{
+	local save_opts='--all --keep-index --no-keep-index --quiet --patch --include-untracked'
+	local subcommands='push list show apply clear drop pop create branch'
+	local subcommand="$(__git_find_on_cmdline "$subcommands save")"
+	if [ -n "$(__git_find_on_cmdline "-p")" ]; then
+		subcommand="push"
+	fi
+	if [ -z "$subcommand" ]; then
+		case "$cur" in
+		--*)
+			__gitcomp "$save_opts"
+			;;
+		sa*)
+			if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then
+				__gitcomp "save"
+			fi
+			;;
+		*)
+			if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then
+				__gitcomp "$subcommands"
+			fi
+			;;
+		esac
+	else
+		case "$subcommand,$cur" in
+		push,--*)
+			__gitcomp "$save_opts --message"
+			;;
+		save,--*)
+			__gitcomp "$save_opts"
+			;;
+		apply,--*|pop,--*)
+			__gitcomp "--index --quiet"
+			;;
+		drop,--*)
+			__gitcomp "--quiet"
+			;;
+		list,--*)
+			__gitcomp "--name-status --oneline --patch-with-stat"
+			;;
+		show,--*|branch,--*)
+			;;
+		branch,*)
+			if [ $cword -eq 3 ]; then
+				__git_complete_refs
+			else
+				__gitcomp_nl "$(__git stash list \
+						| sed -n -e 's/:.*//p')"
+			fi
+			;;
+		show,*|apply,*|drop,*|pop,*)
+			__gitcomp_nl "$(__git stash list \
+					| sed -n -e 's/:.*//p')"
+			;;
+		*)
+			;;
+		esac
+	fi
+}
+
+_git_submodule ()
+{
+	__git_has_doubledash && return
+
+	local subcommands="add status init deinit update set-branch set-url summary foreach sync absorbgitdirs"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+	if [ -z "$subcommand" ]; then
+		case "$cur" in
+		--*)
+			__gitcomp "--quiet"
+			;;
+		*)
+			__gitcomp "$subcommands"
+			;;
+		esac
+		return
+	fi
+
+	case "$subcommand,$cur" in
+	add,--*)
+		__gitcomp "--branch --force --name --reference --depth"
+		;;
+	status,--*)
+		__gitcomp "--cached --recursive"
+		;;
+	deinit,--*)
+		__gitcomp "--force --all"
+		;;
+	update,--*)
+		__gitcomp "
+			--init --remote --no-fetch
+			--recommend-shallow --no-recommend-shallow
+			--force --rebase --merge --reference --depth --recursive --jobs
+		"
+		;;
+	set-branch,--*)
+		__gitcomp "--default --branch"
+		;;
+	summary,--*)
+		__gitcomp "--cached --files --summary-limit"
+		;;
+	foreach,--*|sync,--*)
+		__gitcomp "--recursive"
+		;;
+	*)
+		;;
+	esac
+}
+
+_git_svn ()
+{
+	local subcommands="
+		init fetch clone rebase dcommit log find-rev
+		set-tree commit-diff info create-ignore propget
+		proplist show-ignore show-externals branch tag blame
+		migrate mkdirs reset gc
+		"
+	local subcommand="$(__git_find_on_cmdline "$subcommands")"
+	if [ -z "$subcommand" ]; then
+		__gitcomp "$subcommands"
+	else
+		local remote_opts="--username= --config-dir= --no-auth-cache"
+		local fc_opts="
+			--follow-parent --authors-file= --repack=
+			--no-metadata --use-svm-props --use-svnsync-props
+			--log-window-size= --no-checkout --quiet
+			--repack-flags --use-log-author --localtime
+			--add-author-from
+			--recursive
+			--ignore-paths= --include-paths= $remote_opts
+			"
+		local init_opts="
+			--template= --shared= --trunk= --tags=
+			--branches= --stdlayout --minimize-url
+			--no-metadata --use-svm-props --use-svnsync-props
+			--rewrite-root= --prefix= $remote_opts
+			"
+		local cmt_opts="
+			--edit --rmdir --find-copies-harder --copy-similarity=
+			"
+
+		case "$subcommand,$cur" in
+		fetch,--*)
+			__gitcomp "--revision= --fetch-all $fc_opts"
+			;;
+		clone,--*)
+			__gitcomp "--revision= $fc_opts $init_opts"
+			;;
+		init,--*)
+			__gitcomp "$init_opts"
+			;;
+		dcommit,--*)
+			__gitcomp "
+				--merge --strategy= --verbose --dry-run
+				--fetch-all --no-rebase --commit-url
+				--revision --interactive $cmt_opts $fc_opts
+				"
+			;;
+		set-tree,--*)
+			__gitcomp "--stdin $cmt_opts $fc_opts"
+			;;
+		create-ignore,--*|propget,--*|proplist,--*|show-ignore,--*|\
+		show-externals,--*|mkdirs,--*)
+			__gitcomp "--revision="
+			;;
+		log,--*)
+			__gitcomp "
+				--limit= --revision= --verbose --incremental
+				--oneline --show-commit --non-recursive
+				--authors-file= --color
+				"
+			;;
+		rebase,--*)
+			__gitcomp "
+				--merge --verbose --strategy= --local
+				--fetch-all --dry-run $fc_opts
+				"
+			;;
+		commit-diff,--*)
+			__gitcomp "--message= --file= --revision= $cmt_opts"
+			;;
+		info,--*)
+			__gitcomp "--url"
+			;;
+		branch,--*)
+			__gitcomp "--dry-run --message --tag"
+			;;
+		tag,--*)
+			__gitcomp "--dry-run --message"
+			;;
+		blame,--*)
+			__gitcomp "--git-format"
+			;;
+		migrate,--*)
+			__gitcomp "
+				--config-dir= --ignore-paths= --minimize
+				--no-auth-cache --username=
+				"
+			;;
+		reset,--*)
+			__gitcomp "--revision= --parent"
+			;;
+		*)
+			;;
+		esac
+	fi
+}
+
+_git_tag ()
+{
+	local i c=1 f=0
+	while [ $c -lt $cword ]; do
+		i="${words[c]}"
+		case "$i" in
+		-d|--delete|-v|--verify)
+			__gitcomp_direct "$(__git_tags "" "$cur" " ")"
+			return
+			;;
+		-f)
+			f=1
+			;;
+		esac
+		((c++))
+	done
+
+	case "$prev" in
+	-m|-F)
+		;;
+	-*|tag)
+		if [ $f = 1 ]; then
+			__gitcomp_direct "$(__git_tags "" "$cur" " ")"
+		fi
+		;;
+	*)
+		__git_complete_refs
+		;;
+	esac
+
+	case "$cur" in
+	--*)
+		__gitcomp_builtin tag
+		;;
+	esac
+}
+
+_git_whatchanged ()
+{
+	_git_log
+}
+
+__git_complete_worktree_paths ()
+{
+	local IFS=$'\n'
+	__gitcomp_nl "$(git worktree list --porcelain |
+		# Skip the first entry: it's the path of the main worktree,
+		# which can't be moved, removed, locked, etc.
+		sed -n -e '2,$ s/^worktree //p')"
+}
+
+_git_worktree ()
+{
+	local subcommands="add list lock move prune remove unlock"
+	local subcommand subcommand_idx
+
+	subcommand="$(__git_find_on_cmdline --show-idx "$subcommands")"
+	subcommand_idx="${subcommand% *}"
+	subcommand="${subcommand#* }"
+
+	case "$subcommand,$cur" in
+	,*)
+		__gitcomp "$subcommands"
+		;;
+	*,--*)
+		__gitcomp_builtin worktree_$subcommand
+		;;
+	add,*)	# usage: git worktree add [<options>] <path> [<commit-ish>]
+		# Here we are not completing an --option, it's either the
+		# path or a ref.
+		case "$prev" in
+		-b|-B)	# Complete refs for branch to be created/reseted.
+			__git_complete_refs
+			;;
+		-*)	# The previous word is an -o|--option without an
+			# unstuck argument: have to complete the path for
+			# the new worktree, so don't list anything, but let
+			# Bash fall back to filename completion.
+			;;
+		*)	# The previous word is not an --option, so it must
+			# be either the 'add' subcommand, the unstuck
+			# argument of an option (e.g. branch for -b|-B), or
+			# the path for the new worktree.
+			if [ $cword -eq $((subcommand_idx+1)) ]; then
+				# Right after the 'add' subcommand: have to
+				# complete the path, so fall back to Bash
+				# filename completion.
+				:
+			else
+				case "${words[cword-2]}" in
+				-b|-B)	# After '-b <branch>': have to
+					# complete the path, so fall back
+					# to Bash filename completion.
+					;;
+				*)	# After the path: have to complete
+					# the ref to be checked out.
+					__git_complete_refs
+					;;
+				esac
+			fi
+			;;
+		esac
+		;;
+	lock,*|remove,*|unlock,*)
+		__git_complete_worktree_paths
+		;;
+	move,*)
+		if [ $cword -eq $((subcommand_idx+1)) ]; then
+			# The first parameter must be an existing working
+			# tree to be moved.
+			__git_complete_worktree_paths
+		else
+			# The second parameter is the destination: it could
+			# be any path, so don't list anything, but let Bash
+			# fall back to filename completion.
+			:
+		fi
+		;;
+	esac
+}
+
+__git_complete_common () {
+	local command="$1"
+
+	case "$cur" in
+	--*)
+		__gitcomp_builtin "$command"
+		;;
+	esac
+}
+
+__git_cmds_with_parseopt_helper=
+__git_support_parseopt_helper () {
+	test -n "$__git_cmds_with_parseopt_helper" ||
+		__git_cmds_with_parseopt_helper="$(__git --list-cmds=parseopt)"
+
+	case " $__git_cmds_with_parseopt_helper " in
+	*" $1 "*)
+		return 0
+		;;
+	*)
+		return 1
+		;;
+	esac
+}
+
+__git_complete_command () {
+	local command="$1"
+	local completion_func="_git_${command//-/_}"
+	if ! declare -f $completion_func >/dev/null 2>/dev/null &&
+		declare -f _completion_loader >/dev/null 2>/dev/null
+	then
+		_completion_loader "git-$command"
+	fi
+	if declare -f $completion_func >/dev/null 2>/dev/null
+	then
+		$completion_func
+		return 0
+	elif __git_support_parseopt_helper "$command"
+	then
+		__git_complete_common "$command"
+		return 0
+	else
+		return 1
+	fi
+}
+
+__git_main ()
+{
+	local i c=1 command __git_dir __git_repo_path
+	local __git_C_args C_args_count=0
+
+	while [ $c -lt $cword ]; do
+		i="${words[c]}"
+		case "$i" in
+		--git-dir=*) __git_dir="${i#--git-dir=}" ;;
+		--git-dir)   ((c++)) ; __git_dir="${words[c]}" ;;
+		--bare)      __git_dir="." ;;
+		--help) command="help"; break ;;
+		-c|--work-tree|--namespace) ((c++)) ;;
+		-C)	__git_C_args[C_args_count++]=-C
+			((c++))
+			__git_C_args[C_args_count++]="${words[c]}"
+			;;
+		-*) ;;
+		*) command="$i"; break ;;
+		esac
+		((c++))
+	done
+
+	if [ -z "$command" ]; then
+		case "$prev" in
+		--git-dir|-C|--work-tree)
+			# these need a path argument, let's fall back to
+			# Bash filename completion
+			return
+			;;
+		-c)
+			__git_complete_config_variable_name_and_value
+			return
+			;;
+		--namespace)
+			# we don't support completing these options' arguments
+			return
+			;;
+		esac
+		case "$cur" in
+		--*)   __gitcomp "
+			--paginate
+			--no-pager
+			--git-dir=
+			--bare
+			--version
+			--exec-path
+			--exec-path=
+			--html-path
+			--man-path
+			--info-path
+			--work-tree=
+			--namespace=
+			--no-replace-objects
+			--help
+			"
+			;;
+		*)
+			if test -n "$GIT_TESTING_PORCELAIN_COMMAND_LIST"
+			then
+				__gitcomp "$GIT_TESTING_PORCELAIN_COMMAND_LIST"
+			else
+				__gitcomp "$(__git --list-cmds=list-mainporcelain,others,nohelpers,alias,list-complete,config)"
+			fi
+			;;
+		esac
+		return
+	fi
+
+	__git_complete_command "$command" && return
+
+	local expansion=$(__git_aliased_command "$command")
+	if [ -n "$expansion" ]; then
+		words[1]=$expansion
+		__git_complete_command "$expansion"
+	fi
+}
+
+__gitk_main ()
+{
+	__git_has_doubledash && return
+
+	local __git_repo_path
+	__git_find_repo_path
+
+	local merge=""
+	if [ -f "$__git_repo_path/MERGE_HEAD" ]; then
+		merge="--merge"
+	fi
+	case "$cur" in
+	--*)
+		__gitcomp "
+			$__git_log_common_options
+			$__git_log_gitk_options
+			$merge
+			"
+		return
+		;;
+	esac
+	__git_complete_revlist
+}
+
+if [[ -n ${ZSH_VERSION-} ]] &&
+   # Don't define these functions when sourced from 'git-completion.zsh',
+   # it has its own implementations.
+   [[ -z ${GIT_SOURCING_ZSH_COMPLETION-} ]]; then
+	echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2
+
+	autoload -U +X compinit && compinit
+
+	__gitcomp ()
+	{
+		emulate -L zsh
+
+		local cur_="${3-$cur}"
+
+		case "$cur_" in
+		--*=)
+			;;
+		*)
+			local c IFS=$' \t\n'
+			local -a array
+			for c in ${=1}; do
+				c="$c${4-}"
+				case $c in
+				--*=*|*.) ;;
+				*) c="$c " ;;
+				esac
+				array[${#array[@]}+1]="$c"
+			done
+			compset -P '*[=:]'
+			compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
+			;;
+		esac
+	}
+
+	__gitcomp_direct ()
+	{
+		emulate -L zsh
+
+		local IFS=$'\n'
+		compset -P '*[=:]'
+		compadd -Q -- ${=1} && _ret=0
+	}
+
+	__gitcomp_nl ()
+	{
+		emulate -L zsh
+
+		local IFS=$'\n'
+		compset -P '*[=:]'
+		compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+	}
+
+	__gitcomp_file_direct ()
+	{
+		emulate -L zsh
+
+		local IFS=$'\n'
+		compset -P '*[=:]'
+		compadd -f -- ${=1} && _ret=0
+	}
+
+	__gitcomp_file ()
+	{
+		emulate -L zsh
+
+		local IFS=$'\n'
+		compset -P '*[=:]'
+		compadd -p "${2-}" -f -- ${=1} && _ret=0
+	}
+
+	_git ()
+	{
+		local _ret=1 cur cword prev
+		cur=${words[CURRENT]}
+		prev=${words[CURRENT-1]}
+		let cword=CURRENT-1
+		emulate ksh -c __${service}_main
+		let _ret && _default && _ret=0
+		return _ret
+	}
+
+	compdef _git git gitk
+	return
+fi
+
+__git_func_wrap ()
+{
+	local cur words cword prev
+	_get_comp_words_by_ref -n =: cur words cword prev
+	$1
+}
+
+# Setup completion for certain functions defined above by setting common
+# variables and workarounds.
+# This is NOT a public function; use at your own risk.
+__git_complete ()
+{
+	local wrapper="__git_wrap${2}"
+	eval "$wrapper () { __git_func_wrap $2 ; }"
+	complete -o bashdefault -o default -o nospace -F $wrapper $1 2>/dev/null \
+		|| complete -o default -o nospace -F $wrapper $1
+}
+
+# wrapper for backwards compatibility
+_git ()
+{
+	__git_wrap__git_main
+}
+
+# wrapper for backwards compatibility
+_gitk ()
+{
+	__git_wrap__gitk_main
+}
+
+__git_complete git __git_main
+__git_complete gitk __gitk_main
+
+# The following are necessary only for Cygwin, and only are needed
+# when the user has tab-completed the executable name and consequently
+# included the '.exe' suffix.
+#
+if [ Cygwin = "$(uname -o 2>/dev/null)" ]; then
+__git_complete git.exe __git_main
+fi
diff --git a/third_party/git/contrib/completion/git-completion.tcsh b/third_party/git/contrib/completion/git-completion.tcsh
new file mode 100644
index 000000000000..4a790d8f4ec1
--- /dev/null
+++ b/third_party/git/contrib/completion/git-completion.tcsh
@@ -0,0 +1,126 @@
+# tcsh completion support for core Git.
+#
+# Copyright (C) 2012 Marc Khouzam <marc.khouzam@gmail.com>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# When sourced, this script will generate a new script that uses
+# the git-completion.bash script provided by core Git.  This new
+# script can be used by tcsh to perform git completion.
+# The current script also issues the necessary tcsh 'complete'
+# commands.
+#
+# To use this completion script:
+#
+#    0) You need tcsh 6.16.00 or newer.
+#    1) Copy both this file and the bash completion script to ${HOME}.
+#       You _must_ use the name ${HOME}/.git-completion.bash for the
+#       bash script.
+#       (e.g. ~/.git-completion.tcsh and ~/.git-completion.bash).
+#    2) Add the following line to your .tcshrc/.cshrc:
+#        source ~/.git-completion.tcsh
+#    3) For completion similar to bash, it is recommended to also
+#       add the following line to your .tcshrc/.cshrc:
+#        set autolist=ambiguous
+#       It will tell tcsh to list the possible completion choices.
+
+set __git_tcsh_completion_version = `\echo ${tcsh} | \sed 's/\./ /g'`
+if ( ${__git_tcsh_completion_version[1]} < 6 || \
+     ( ${__git_tcsh_completion_version[1]} == 6 && \
+       ${__git_tcsh_completion_version[2]} < 16 ) ) then
+	echo "git-completion.tcsh: Your version of tcsh is too old, you need version 6.16.00 or newer.  Git completion will not work."
+	exit
+endif
+unset __git_tcsh_completion_version
+
+set __git_tcsh_completion_original_script = ${HOME}/.git-completion.bash
+set __git_tcsh_completion_script = ${HOME}/.git-completion.tcsh.bash
+
+# Check that the user put the script in the right place
+if ( ! -e ${__git_tcsh_completion_original_script} ) then
+	echo "git-completion.tcsh: Cannot find: ${__git_tcsh_completion_original_script}.  Git completion will not work."
+	exit
+endif
+
+cat << EOF >! ${__git_tcsh_completion_script}
+#!bash
+#
+# This script is GENERATED and will be overwritten automatically.
+# Do not modify it directly.  Instead, modify git-completion.tcsh
+# and source it again.
+
+source ${__git_tcsh_completion_original_script}
+
+# Remove the colon as a completion separator because tcsh cannot handle it
+COMP_WORDBREAKS=\${COMP_WORDBREAKS//:}
+
+# For file completion, tcsh needs the '/' to be appended to directories.
+# By default, the bash script does not do that.
+# We can achieve this by using the below compatibility
+# method of the git-completion.bash script.
+__git_index_file_list_filter ()
+{
+	__git_index_file_list_filter_compat
+}
+
+# Set COMP_WORDS in a way that can be handled by the bash script.
+COMP_WORDS=(\$2)
+
+# The cursor is at the end of parameter #1.
+# We must check for a space as the last character which will
+# tell us that the previous word is complete and the cursor
+# is on the next word.
+if [ "\${2: -1}" == " " ]; then
+	# The last character is a space, so our location is at the end
+	# of the command-line array
+	COMP_CWORD=\${#COMP_WORDS[@]}
+else
+	# The last character is not a space, so our location is on the
+	# last word of the command-line array, so we must decrement the
+	# count by 1
+	COMP_CWORD=\$((\${#COMP_WORDS[@]}-1))
+fi
+
+# Call _git() or _gitk() of the bash script, based on the first argument
+_\${1}
+
+IFS=\$'\n'
+if [ \${#COMPREPLY[*]} -eq 0 ]; then
+	# No completions suggested.  In this case, we want tcsh to perform
+	# standard file completion.  However, there does not seem to be way
+	# to tell tcsh to do that.  To help the user, we try to simulate
+	# file completion directly in this script.
+	#
+	# Known issues:
+	#     - Possible completions are shown with their directory prefix.
+	#     - Completions containing shell variables are not handled.
+	#     - Completions with ~ as the first character are not handled.
+
+	# No file completion should be done unless we are completing beyond
+	# the git sub-command.  An improvement on the bash completion :)
+	if [ \${COMP_CWORD} -gt 1 ]; then
+		TO_COMPLETE="\${COMP_WORDS[\${COMP_CWORD}]}"
+
+		# We don't support ~ expansion: too tricky.
+		if [ "\${TO_COMPLETE:0:1}" != "~" ]; then
+			# Use ls so as to add the '/' at the end of directories.
+			COMPREPLY=(\`ls -dp \${TO_COMPLETE}* 2> /dev/null\`)
+		fi
+	fi
+fi
+
+# tcsh does not automatically remove duplicates, so we do it ourselves
+echo "\${COMPREPLY[*]}" | sort | uniq
+
+# If there is a single completion and it is a directory, we output it
+# a second time to trick tcsh into not adding a space after it.
+if [ \${#COMPREPLY[*]} -eq 1 ] && [ "\${COMPREPLY[0]: -1}" == "/" ]; then
+	echo "\${COMPREPLY[*]}"
+fi
+
+EOF
+
+# Don't need this variable anymore, so don't pollute the users environment
+unset __git_tcsh_completion_original_script
+
+complete git  'p,*,`bash ${__git_tcsh_completion_script} git "${COMMAND_LINE}"`,'
+complete gitk 'p,*,`bash ${__git_tcsh_completion_script} gitk "${COMMAND_LINE}"`,'
diff --git a/third_party/git/contrib/completion/git-completion.zsh b/third_party/git/contrib/completion/git-completion.zsh
new file mode 100644
index 000000000000..eef4eff53dff
--- /dev/null
+++ b/third_party/git/contrib/completion/git-completion.zsh
@@ -0,0 +1,244 @@
+#compdef git gitk
+
+# zsh completion wrapper for git
+#
+# Copyright (c) 2012-2013 Felipe Contreras <felipe.contreras@gmail.com>
+#
+# You need git's bash completion script installed somewhere, by default it
+# would be the location bash-completion uses.
+#
+# If your script is somewhere else, you can configure it on your ~/.zshrc:
+#
+#  zstyle ':completion:*:*:git:*' script ~/.git-completion.zsh
+#
+# The recommended way to install this script is to make a copy of it in
+# ~/.zsh/ directory as ~/.zsh/git-completion.zsh and then add the following
+# to your ~/.zshrc file:
+#
+#  fpath=(~/.zsh $fpath)
+
+complete ()
+{
+	# do nothing
+	return 0
+}
+
+zstyle -T ':completion:*:*:git:*' tag-order && \
+	zstyle ':completion:*:*:git:*' tag-order 'common-commands'
+
+zstyle -s ":completion:*:*:git:*" script script
+if [ -z "$script" ]; then
+	local -a locations
+	local e
+	locations=(
+		$(dirname ${funcsourcetrace[1]%:*})/git-completion.bash
+		'/etc/bash_completion.d/git' # fedora, old debian
+		'/usr/share/bash-completion/completions/git' # arch, ubuntu, new debian
+		'/usr/share/bash-completion/git' # gentoo
+		)
+	for e in $locations; do
+		test -f $e && script="$e" && break
+	done
+fi
+GIT_SOURCING_ZSH_COMPLETION=y . "$script"
+
+__gitcomp ()
+{
+	emulate -L zsh
+
+	local cur_="${3-$cur}"
+
+	case "$cur_" in
+	--*=)
+		;;
+	*)
+		local c IFS=$' \t\n'
+		local -a array
+		for c in ${=1}; do
+			c="$c${4-}"
+			case $c in
+			--*=*|*.) ;;
+			*) c="$c " ;;
+			esac
+			array+=("$c")
+		done
+		compset -P '*[=:]'
+		compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
+		;;
+	esac
+}
+
+__gitcomp_direct ()
+{
+	emulate -L zsh
+
+	local IFS=$'\n'
+	compset -P '*[=:]'
+	compadd -Q -- ${=1} && _ret=0
+}
+
+__gitcomp_nl ()
+{
+	emulate -L zsh
+
+	local IFS=$'\n'
+	compset -P '*[=:]'
+	compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+}
+
+__gitcomp_nl_append ()
+{
+	emulate -L zsh
+
+	local IFS=$'\n'
+	compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
+}
+
+__gitcomp_file_direct ()
+{
+	emulate -L zsh
+
+	local IFS=$'\n'
+	compset -P '*[=:]'
+	compadd -f -- ${=1} && _ret=0
+}
+
+__gitcomp_file ()
+{
+	emulate -L zsh
+
+	local IFS=$'\n'
+	compset -P '*[=:]'
+	compadd -p "${2-}" -f -- ${=1} && _ret=0
+}
+
+__git_zsh_bash_func ()
+{
+	emulate -L ksh
+
+	local command=$1
+
+	local completion_func="_git_${command//-/_}"
+	declare -f $completion_func >/dev/null && $completion_func && return
+
+	local expansion=$(__git_aliased_command "$command")
+	if [ -n "$expansion" ]; then
+		words[1]=$expansion
+		completion_func="_git_${expansion//-/_}"
+		declare -f $completion_func >/dev/null && $completion_func
+	fi
+}
+
+__git_zsh_cmd_common ()
+{
+	local -a list
+	list=(
+	add:'add file contents to the index'
+	bisect:'find by binary search the change that introduced a bug'
+	branch:'list, create, or delete branches'
+	checkout:'checkout a branch or paths to the working tree'
+	clone:'clone a repository into a new directory'
+	commit:'record changes to the repository'
+	diff:'show changes between commits, commit and working tree, etc'
+	fetch:'download objects and refs from another repository'
+	grep:'print lines matching a pattern'
+	init:'create an empty Git repository or reinitialize an existing one'
+	log:'show commit logs'
+	merge:'join two or more development histories together'
+	mv:'move or rename a file, a directory, or a symlink'
+	pull:'fetch from and merge with another repository or a local branch'
+	push:'update remote refs along with associated objects'
+	rebase:'forward-port local commits to the updated upstream head'
+	reset:'reset current HEAD to the specified state'
+	rm:'remove files from the working tree and from the index'
+	show:'show various types of objects'
+	status:'show the working tree status'
+	tag:'create, list, delete or verify a tag object signed with GPG')
+	_describe -t common-commands 'common commands' list && _ret=0
+}
+
+__git_zsh_cmd_alias ()
+{
+	local -a list
+	list=(${${${(0)"$(git config -z --get-regexp '^alias\.')"}#alias.}%$'\n'*})
+	_describe -t alias-commands 'aliases' list $* && _ret=0
+}
+
+__git_zsh_cmd_all ()
+{
+	local -a list
+	emulate ksh -c __git_compute_all_commands
+	list=( ${=__git_all_commands} )
+	_describe -t all-commands 'all commands' list && _ret=0
+}
+
+__git_zsh_main ()
+{
+	local curcontext="$curcontext" state state_descr line
+	typeset -A opt_args
+	local -a orig_words
+
+	orig_words=( ${words[@]} )
+
+	_arguments -C \
+		'(-p --paginate --no-pager)'{-p,--paginate}'[pipe all output into ''less'']' \
+		'(-p --paginate)--no-pager[do not pipe git output into a pager]' \
+		'--git-dir=-[set the path to the repository]: :_directories' \
+		'--bare[treat the repository as a bare repository]' \
+		'(- :)--version[prints the git suite version]' \
+		'--exec-path=-[path to where your core git programs are installed]:: :_directories' \
+		'--html-path[print the path where git''s HTML documentation is installed]' \
+		'--info-path[print the path where the Info files are installed]' \
+		'--man-path[print the manpath (see `man(1)`) for the man pages]' \
+		'--work-tree=-[set the path to the working tree]: :_directories' \
+		'--namespace=-[set the git namespace]' \
+		'--no-replace-objects[do not use replacement refs to replace git objects]' \
+		'(- :)--help[prints the synopsis and a list of the most commonly used commands]: :->arg' \
+		'(-): :->command' \
+		'(-)*:: :->arg' && return
+
+	case $state in
+	(command)
+		_alternative \
+                         'alias-commands:alias:__git_zsh_cmd_alias' \
+                         'common-commands:common:__git_zsh_cmd_common' \
+                         'all-commands:all:__git_zsh_cmd_all' && _ret=0
+		;;
+	(arg)
+		local command="${words[1]}" __git_dir
+
+		if (( $+opt_args[--bare] )); then
+			__git_dir='.'
+		else
+			__git_dir=${opt_args[--git-dir]}
+		fi
+
+		(( $+opt_args[--help] )) && command='help'
+
+		words=( ${orig_words[@]} )
+
+		__git_zsh_bash_func $command
+		;;
+	esac
+}
+
+_git ()
+{
+	local _ret=1
+	local cur cword prev
+
+	cur=${words[CURRENT]}
+	prev=${words[CURRENT-1]}
+	let cword=CURRENT-1
+
+	if (( $+functions[__${service}_zsh_main] )); then
+		__${service}_zsh_main
+	else
+		emulate ksh -c __${service}_main
+	fi
+
+	let _ret && _default && _ret=0
+	return _ret
+}
+
+_git
diff --git a/third_party/git/contrib/completion/git-prompt.sh b/third_party/git/contrib/completion/git-prompt.sh
new file mode 100644
index 000000000000..014cd7c3cfcc
--- /dev/null
+++ b/third_party/git/contrib/completion/git-prompt.sh
@@ -0,0 +1,560 @@
+# bash/zsh git prompt support
+#
+# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
+# Distributed under the GNU General Public License, version 2.0.
+#
+# This script allows you to see repository status in your prompt.
+#
+# To enable:
+#
+#    1) Copy this file to somewhere (e.g. ~/.git-prompt.sh).
+#    2) Add the following line to your .bashrc/.zshrc:
+#        source ~/.git-prompt.sh
+#    3a) Change your PS1 to call __git_ps1 as
+#        command-substitution:
+#        Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
+#        ZSH:  setopt PROMPT_SUBST ; PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
+#        the optional argument will be used as format string.
+#    3b) Alternatively, for a slightly faster prompt, __git_ps1 can
+#        be used for PROMPT_COMMAND in Bash or for precmd() in Zsh
+#        with two parameters, <pre> and <post>, which are strings
+#        you would put in $PS1 before and after the status string
+#        generated by the git-prompt machinery.  e.g.
+#        Bash: PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "'
+#          will show username, at-sign, host, colon, cwd, then
+#          various status string, followed by dollar and SP, as
+#          your prompt.
+#        ZSH:  precmd () { __git_ps1 "%n" ":%~$ " "|%s" }
+#          will show username, pipe, then various status string,
+#          followed by colon, cwd, dollar and SP, as your prompt.
+#        Optionally, you can supply a third argument with a printf
+#        format string to finetune the output of the branch status
+#
+# The repository status will be displayed only if you are currently in a
+# git repository. The %s token is the placeholder for the shown status.
+#
+# The prompt status always includes the current branch name.
+#
+# In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
+# unstaged (*) and staged (+) changes will be shown next to the branch
+# name.  You can configure this per-repository with the
+# bash.showDirtyState variable, which defaults to true once
+# GIT_PS1_SHOWDIRTYSTATE is enabled.
+#
+# You can also see if currently something is stashed, by setting
+# GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
+# then a '$' will be shown next to the branch name.
+#
+# If you would like to see if there're untracked files, then you can set
+# GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked
+# files, then a '%' will be shown next to the branch name.  You can
+# configure this per-repository with the bash.showUntrackedFiles
+# variable, which defaults to true once GIT_PS1_SHOWUNTRACKEDFILES is
+# enabled.
+#
+# If you would like to see the difference between HEAD and its upstream,
+# set GIT_PS1_SHOWUPSTREAM="auto".  A "<" indicates you are behind, ">"
+# indicates you are ahead, "<>" indicates you have diverged and "="
+# indicates that there is no difference. You can further control
+# behaviour by setting GIT_PS1_SHOWUPSTREAM to a space-separated list
+# of values:
+#
+#     verbose       show number of commits ahead/behind (+/-) upstream
+#     name          if verbose, then also show the upstream abbrev name
+#     legacy        don't use the '--count' option available in recent
+#                   versions of git-rev-list
+#     git           always compare HEAD to @{upstream}
+#     svn           always compare HEAD to your SVN upstream
+#
+# You can change the separator between the branch name and the above
+# state symbols by setting GIT_PS1_STATESEPARATOR. The default separator
+# is SP.
+#
+# By default, __git_ps1 will compare HEAD to your SVN upstream if it can
+# find one, or @{upstream} otherwise.  Once you have set
+# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
+# setting the bash.showUpstream config variable.
+#
+# If you would like to see more information about the identity of
+# commits checked out as a detached HEAD, set GIT_PS1_DESCRIBE_STYLE
+# to one of these values:
+#
+#     contains      relative to newer annotated tag (v1.6.3.2~35)
+#     branch        relative to newer tag or branch (master~4)
+#     describe      relative to older annotated tag (v1.6.3.1-13-gdd42c2f)
+#     tag           relative to any older tag (v1.6.3.1-13-gdd42c2f)
+#     default       exactly matching tag
+#
+# If you would like a colored hint about the current dirty state, set
+# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on
+# the colored output of "git status -sb" and are available only when
+# using __git_ps1 for PROMPT_COMMAND or precmd.
+#
+# If you would like __git_ps1 to do nothing in the case when the current
+# directory is set up to be ignored by git, then set
+# GIT_PS1_HIDE_IF_PWD_IGNORED to a nonempty value. Override this on the
+# repository level by setting bash.hideIfPwdIgnored to "false".
+
+# check whether printf supports -v
+__git_printf_supports_v=
+printf -v __git_printf_supports_v -- '%s' yes >/dev/null 2>&1
+
+# stores the divergence from upstream in $p
+# used by GIT_PS1_SHOWUPSTREAM
+__git_ps1_show_upstream ()
+{
+	local key value
+	local svn_remote svn_url_pattern count n
+	local upstream=git legacy="" verbose="" name=""
+
+	svn_remote=()
+	# get some config options from git-config
+	local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')"
+	while read -r key value; do
+		case "$key" in
+		bash.showupstream)
+			GIT_PS1_SHOWUPSTREAM="$value"
+			if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then
+				p=""
+				return
+			fi
+			;;
+		svn-remote.*.url)
+			svn_remote[$((${#svn_remote[@]} + 1))]="$value"
+			svn_url_pattern="$svn_url_pattern\\|$value"
+			upstream=svn+git # default upstream is SVN if available, else git
+			;;
+		esac
+	done <<< "$output"
+
+	# parse configuration values
+	for option in ${GIT_PS1_SHOWUPSTREAM}; do
+		case "$option" in
+		git|svn) upstream="$option" ;;
+		verbose) verbose=1 ;;
+		legacy)  legacy=1  ;;
+		name)    name=1 ;;
+		esac
+	done
+
+	# Find our upstream
+	case "$upstream" in
+	git)    upstream="@{upstream}" ;;
+	svn*)
+		# get the upstream from the "git-svn-id: ..." in a commit message
+		# (git-svn uses essentially the same procedure internally)
+		local -a svn_upstream
+		svn_upstream=($(git log --first-parent -1 \
+					--grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
+		if [[ 0 -ne ${#svn_upstream[@]} ]]; then
+			svn_upstream=${svn_upstream[${#svn_upstream[@]} - 2]}
+			svn_upstream=${svn_upstream%@*}
+			local n_stop="${#svn_remote[@]}"
+			for ((n=1; n <= n_stop; n++)); do
+				svn_upstream=${svn_upstream#${svn_remote[$n]}}
+			done
+
+			if [[ -z "$svn_upstream" ]]; then
+				# default branch name for checkouts with no layout:
+				upstream=${GIT_SVN_ID:-git-svn}
+			else
+				upstream=${svn_upstream#/}
+			fi
+		elif [[ "svn+git" = "$upstream" ]]; then
+			upstream="@{upstream}"
+		fi
+		;;
+	esac
+
+	# Find how many commits we are ahead/behind our upstream
+	if [[ -z "$legacy" ]]; then
+		count="$(git rev-list --count --left-right \
+				"$upstream"...HEAD 2>/dev/null)"
+	else
+		# produce equivalent output to --count for older versions of git
+		local commits
+		if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)"
+		then
+			local commit behind=0 ahead=0
+			for commit in $commits
+			do
+				case "$commit" in
+				"<"*) ((behind++)) ;;
+				*)    ((ahead++))  ;;
+				esac
+			done
+			count="$behind	$ahead"
+		else
+			count=""
+		fi
+	fi
+
+	# calculate the result
+	if [[ -z "$verbose" ]]; then
+		case "$count" in
+		"") # no upstream
+			p="" ;;
+		"0	0") # equal to upstream
+			p="=" ;;
+		"0	"*) # ahead of upstream
+			p=">" ;;
+		*"	0") # behind upstream
+			p="<" ;;
+		*)	    # diverged from upstream
+			p="<>" ;;
+		esac
+	else
+		case "$count" in
+		"") # no upstream
+			p="" ;;
+		"0	0") # equal to upstream
+			p=" u=" ;;
+		"0	"*) # ahead of upstream
+			p=" u+${count#0	}" ;;
+		*"	0") # behind upstream
+			p=" u-${count%	0}" ;;
+		*)	    # diverged from upstream
+			p=" u+${count#*	}-${count%	*}" ;;
+		esac
+		if [[ -n "$count" && -n "$name" ]]; then
+			__git_ps1_upstream_name=$(git rev-parse \
+				--abbrev-ref "$upstream" 2>/dev/null)
+			if [ $pcmode = yes ] && [ $ps1_expanded = yes ]; then
+				p="$p \${__git_ps1_upstream_name}"
+			else
+				p="$p ${__git_ps1_upstream_name}"
+				# not needed anymore; keep user's
+				# environment clean
+				unset __git_ps1_upstream_name
+			fi
+		fi
+	fi
+
+}
+
+# Helper function that is meant to be called from __git_ps1.  It
+# injects color codes into the appropriate gitstring variables used
+# to build a gitstring.
+__git_ps1_colorize_gitstring ()
+{
+	if [[ -n ${ZSH_VERSION-} ]]; then
+		local c_red='%F{red}'
+		local c_green='%F{green}'
+		local c_lblue='%F{blue}'
+		local c_clear='%f'
+	else
+		# Using \[ and \] around colors is necessary to prevent
+		# issues with command line editing/browsing/completion!
+		local c_red='\[\e[31m\]'
+		local c_green='\[\e[32m\]'
+		local c_lblue='\[\e[1;34m\]'
+		local c_clear='\[\e[0m\]'
+	fi
+	local bad_color=$c_red
+	local ok_color=$c_green
+	local flags_color="$c_lblue"
+
+	local branch_color=""
+	if [ $detached = no ]; then
+		branch_color="$ok_color"
+	else
+		branch_color="$bad_color"
+	fi
+	c="$branch_color$c"
+
+	z="$c_clear$z"
+	if [ "$w" = "*" ]; then
+		w="$bad_color$w"
+	fi
+	if [ -n "$i" ]; then
+		i="$ok_color$i"
+	fi
+	if [ -n "$s" ]; then
+		s="$flags_color$s"
+	fi
+	if [ -n "$u" ]; then
+		u="$bad_color$u"
+	fi
+	r="$c_clear$r"
+}
+
+# Helper function to read the first line of a file into a variable.
+# __git_eread requires 2 arguments, the file path and the name of the
+# variable, in that order.
+__git_eread ()
+{
+	test -r "$1" && IFS=$'\r\n' read "$2" <"$1"
+}
+
+# see if a cherry-pick or revert is in progress, if the user has committed a
+# conflict resolution with 'git commit' in the middle of a sequence of picks or
+# reverts then CHERRY_PICK_HEAD/REVERT_HEAD will not exist so we have to read
+# the todo file.
+__git_sequencer_status ()
+{
+	local todo
+	if test -f "$g/CHERRY_PICK_HEAD"
+	then
+		r="|CHERRY-PICKING"
+		return 0;
+	elif test -f "$g/REVERT_HEAD"
+	then
+		r="|REVERTING"
+		return 0;
+	elif __git_eread "$g/sequencer/todo" todo
+	then
+		case "$todo" in
+		p[\ \	]|pick[\ \	]*)
+			r="|CHERRY-PICKING"
+			return 0
+		;;
+		revert[\ \	]*)
+			r="|REVERTING"
+			return 0
+		;;
+		esac
+	fi
+	return 1
+}
+
+# __git_ps1 accepts 0 or 1 arguments (i.e., format string)
+# when called from PS1 using command substitution
+# in this mode it prints text to add to bash PS1 prompt (includes branch name)
+#
+# __git_ps1 requires 2 or 3 arguments when called from PROMPT_COMMAND (pc)
+# in that case it _sets_ PS1. The arguments are parts of a PS1 string.
+# when two arguments are given, the first is prepended and the second appended
+# to the state string when assigned to PS1.
+# The optional third parameter will be used as printf format string to further
+# customize the output of the git-status string.
+# In this mode you can request colored hints using GIT_PS1_SHOWCOLORHINTS=true
+__git_ps1 ()
+{
+	# preserve exit status
+	local exit=$?
+	local pcmode=no
+	local detached=no
+	local ps1pc_start='\u@\h:\w '
+	local ps1pc_end='\$ '
+	local printf_format=' (%s)'
+
+	case "$#" in
+		2|3)	pcmode=yes
+			ps1pc_start="$1"
+			ps1pc_end="$2"
+			printf_format="${3:-$printf_format}"
+			# set PS1 to a plain prompt so that we can
+			# simply return early if the prompt should not
+			# be decorated
+			PS1="$ps1pc_start$ps1pc_end"
+		;;
+		0|1)	printf_format="${1:-$printf_format}"
+		;;
+		*)	return $exit
+		;;
+	esac
+
+	# ps1_expanded:  This variable is set to 'yes' if the shell
+	# subjects the value of PS1 to parameter expansion:
+	#
+	#   * bash does unless the promptvars option is disabled
+	#   * zsh does not unless the PROMPT_SUBST option is set
+	#   * POSIX shells always do
+	#
+	# If the shell would expand the contents of PS1 when drawing
+	# the prompt, a raw ref name must not be included in PS1.
+	# This protects the user from arbitrary code execution via
+	# specially crafted ref names.  For example, a ref named
+	# 'refs/heads/$(IFS=_;cmd=sudo_rm_-rf_/;$cmd)' might cause the
+	# shell to execute 'sudo rm -rf /' when the prompt is drawn.
+	#
+	# Instead, the ref name should be placed in a separate global
+	# variable (in the __git_ps1_* namespace to avoid colliding
+	# with the user's environment) and that variable should be
+	# referenced from PS1.  For example:
+	#
+	#     __git_ps1_foo=$(do_something_to_get_ref_name)
+	#     PS1="...stuff...\${__git_ps1_foo}...stuff..."
+	#
+	# If the shell does not expand the contents of PS1, the raw
+	# ref name must be included in PS1.
+	#
+	# The value of this variable is only relevant when in pcmode.
+	#
+	# Assume that the shell follows the POSIX specification and
+	# expands PS1 unless determined otherwise.  (This is more
+	# likely to be correct if the user has a non-bash, non-zsh
+	# shell and safer than the alternative if the assumption is
+	# incorrect.)
+	#
+	local ps1_expanded=yes
+	[ -z "${ZSH_VERSION-}" ] || [[ -o PROMPT_SUBST ]] || ps1_expanded=no
+	[ -z "${BASH_VERSION-}" ] || shopt -q promptvars || ps1_expanded=no
+
+	local repo_info rev_parse_exit_code
+	repo_info="$(git rev-parse --git-dir --is-inside-git-dir \
+		--is-bare-repository --is-inside-work-tree \
+		--short HEAD 2>/dev/null)"
+	rev_parse_exit_code="$?"
+
+	if [ -z "$repo_info" ]; then
+		return $exit
+	fi
+
+	local short_sha=""
+	if [ "$rev_parse_exit_code" = "0" ]; then
+		short_sha="${repo_info##*$'\n'}"
+		repo_info="${repo_info%$'\n'*}"
+	fi
+	local inside_worktree="${repo_info##*$'\n'}"
+	repo_info="${repo_info%$'\n'*}"
+	local bare_repo="${repo_info##*$'\n'}"
+	repo_info="${repo_info%$'\n'*}"
+	local inside_gitdir="${repo_info##*$'\n'}"
+	local g="${repo_info%$'\n'*}"
+
+	if [ "true" = "$inside_worktree" ] &&
+	   [ -n "${GIT_PS1_HIDE_IF_PWD_IGNORED-}" ] &&
+	   [ "$(git config --bool bash.hideIfPwdIgnored)" != "false" ] &&
+	   git check-ignore -q .
+	then
+		return $exit
+	fi
+
+	local r=""
+	local b=""
+	local step=""
+	local total=""
+	if [ -d "$g/rebase-merge" ]; then
+		__git_eread "$g/rebase-merge/head-name" b
+		__git_eread "$g/rebase-merge/msgnum" step
+		__git_eread "$g/rebase-merge/end" total
+		r="|REBASE"
+	else
+		if [ -d "$g/rebase-apply" ]; then
+			__git_eread "$g/rebase-apply/next" step
+			__git_eread "$g/rebase-apply/last" total
+			if [ -f "$g/rebase-apply/rebasing" ]; then
+				__git_eread "$g/rebase-apply/head-name" b
+				r="|REBASE"
+			elif [ -f "$g/rebase-apply/applying" ]; then
+				r="|AM"
+			else
+				r="|AM/REBASE"
+			fi
+		elif [ -f "$g/MERGE_HEAD" ]; then
+			r="|MERGING"
+		elif __git_sequencer_status; then
+			:
+		elif [ -f "$g/BISECT_LOG" ]; then
+			r="|BISECTING"
+		fi
+
+		if [ -n "$b" ]; then
+			:
+		elif [ -h "$g/HEAD" ]; then
+			# symlink symbolic ref
+			b="$(git symbolic-ref HEAD 2>/dev/null)"
+		else
+			local head=""
+			if ! __git_eread "$g/HEAD" head; then
+				return $exit
+			fi
+			# is it a symbolic ref?
+			b="${head#ref: }"
+			if [ "$head" = "$b" ]; then
+				detached=yes
+				b="$(
+				case "${GIT_PS1_DESCRIBE_STYLE-}" in
+				(contains)
+					git describe --contains HEAD ;;
+				(branch)
+					git describe --contains --all HEAD ;;
+				(tag)
+					git describe --tags HEAD ;;
+				(describe)
+					git describe HEAD ;;
+				(* | default)
+					git describe --tags --exact-match HEAD ;;
+				esac 2>/dev/null)" ||
+
+				b="$short_sha..."
+				b="($b)"
+			fi
+		fi
+	fi
+
+	if [ -n "$step" ] && [ -n "$total" ]; then
+		r="$r $step/$total"
+	fi
+
+	local w=""
+	local i=""
+	local s=""
+	local u=""
+	local c=""
+	local p=""
+
+	if [ "true" = "$inside_gitdir" ]; then
+		if [ "true" = "$bare_repo" ]; then
+			c="BARE:"
+		else
+			b="GIT_DIR!"
+		fi
+	elif [ "true" = "$inside_worktree" ]; then
+		if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] &&
+		   [ "$(git config --bool bash.showDirtyState)" != "false" ]
+		then
+			git diff --no-ext-diff --quiet || w="*"
+			git diff --no-ext-diff --cached --quiet || i="+"
+			if [ -z "$short_sha" ] && [ -z "$i" ]; then
+				i="#"
+			fi
+		fi
+		if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ] &&
+		   git rev-parse --verify --quiet refs/stash >/dev/null
+		then
+			s="$"
+		fi
+
+		if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] &&
+		   [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] &&
+		   git ls-files --others --exclude-standard --directory --no-empty-directory --error-unmatch -- ':/*' >/dev/null 2>/dev/null
+		then
+			u="%${ZSH_VERSION+%}"
+		fi
+
+		if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
+			__git_ps1_show_upstream
+		fi
+	fi
+
+	local z="${GIT_PS1_STATESEPARATOR-" "}"
+
+	# NO color option unless in PROMPT_COMMAND mode
+	if [ $pcmode = yes ] && [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then
+		__git_ps1_colorize_gitstring
+	fi
+
+	b=${b##refs/heads/}
+	if [ $pcmode = yes ] && [ $ps1_expanded = yes ]; then
+		__git_ps1_branch_name=$b
+		b="\${__git_ps1_branch_name}"
+	fi
+
+	local f="$w$i$s$u"
+	local gitstring="$c$b${f:+$z$f}$r$p"
+
+	if [ $pcmode = yes ]; then
+		if [ "${__git_printf_supports_v-}" != yes ]; then
+			gitstring=$(printf -- "$printf_format" "$gitstring")
+		else
+			printf -v gitstring -- "$printf_format" "$gitstring"
+		fi
+		PS1="$ps1pc_start$gitstring$ps1pc_end"
+	else
+		printf -- "$printf_format" "$gitstring"
+	fi
+
+	return $exit
+}
diff --git a/third_party/git/contrib/contacts/.gitignore b/third_party/git/contrib/contacts/.gitignore
new file mode 100644
index 000000000000..f385ee643c74
--- /dev/null
+++ b/third_party/git/contrib/contacts/.gitignore
@@ -0,0 +1,3 @@
+git-contacts.1
+git-contacts.html
+git-contacts.xml
diff --git a/third_party/git/contrib/contacts/Makefile b/third_party/git/contrib/contacts/Makefile
new file mode 100644
index 000000000000..a2990f0dcb53
--- /dev/null
+++ b/third_party/git/contrib/contacts/Makefile
@@ -0,0 +1,71 @@
+# The default target of this Makefile is...
+all::
+
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+prefix ?= /usr/local
+gitexecdir ?= $(prefix)/libexec/git-core
+mandir ?= $(prefix)/share/man
+man1dir ?= $(mandir)/man1
+htmldir ?= $(prefix)/share/doc/git-doc
+
+../../GIT-VERSION-FILE: FORCE
+	$(MAKE) -C ../../ GIT-VERSION-FILE
+
+-include ../../GIT-VERSION-FILE
+
+# this should be set to a 'standard' bsd-type install program
+INSTALL  ?= install
+RM       ?= rm -f
+
+ASCIIDOC = asciidoc
+XMLTO    = xmlto
+
+ifndef SHELL_PATH
+	SHELL_PATH = /bin/sh
+endif
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+ASCIIDOC_CONF = ../../Documentation/asciidoc.conf
+MANPAGE_XSL   = ../../Documentation/manpage-normal.xsl
+
+GIT_CONTACTS := git-contacts
+
+GIT_CONTACTS_DOC := git-contacts.1
+GIT_CONTACTS_XML := git-contacts.xml
+GIT_CONTACTS_TXT := git-contacts.txt
+GIT_CONTACTS_HTML := git-contacts.html
+
+doc: $(GIT_CONTACTS_DOC) $(GIT_CONTACTS_HTML)
+
+install: $(GIT_CONTACTS)
+	$(INSTALL) -d -m 755 $(DESTDIR)$(gitexecdir)
+	$(INSTALL) -m 755 $(GIT_CONTACTS) $(DESTDIR)$(gitexecdir)
+
+install-doc: install-man install-html
+
+install-man: $(GIT_CONTACTS_DOC)
+	$(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
+	$(INSTALL) -m 644 $^ $(DESTDIR)$(man1dir)
+
+install-html: $(GIT_CONTACTS_HTML)
+	$(INSTALL) -d -m 755 $(DESTDIR)$(htmldir)
+	$(INSTALL) -m 644 $^ $(DESTDIR)$(htmldir)
+
+$(GIT_CONTACTS_DOC): $(GIT_CONTACTS_XML)
+	$(XMLTO) -m $(MANPAGE_XSL) man $^
+
+$(GIT_CONTACTS_XML): $(GIT_CONTACTS_TXT)
+	$(ASCIIDOC) -b docbook -d manpage -f $(ASCIIDOC_CONF) \
+		-agit_version=$(GIT_VERSION) $^
+
+$(GIT_CONTACTS_HTML): $(GIT_CONTACTS_TXT)
+	$(ASCIIDOC) -b xhtml11 -d manpage -f $(ASCIIDOC_CONF) \
+		-agit_version=$(GIT_VERSION) $^
+
+clean:
+	$(RM) $(GIT_CONTACTS)
+	$(RM) *.xml *.html *.1
+
+.PHONY: FORCE
diff --git a/third_party/git/contrib/contacts/git-contacts b/third_party/git/contrib/contacts/git-contacts
new file mode 100755
index 000000000000..85ad732fc063
--- /dev/null
+++ b/third_party/git/contrib/contacts/git-contacts
@@ -0,0 +1,203 @@
+#!/usr/bin/perl
+
+# List people who might be interested in a patch.  Useful as the argument to
+# git-send-email --cc-cmd option, and in other situations.
+#
+# Usage: git contacts <file | rev-list option> ...
+
+use strict;
+use warnings;
+use IPC::Open2;
+
+my $since = '5-years-ago';
+my $min_percent = 10;
+my $labels_rx = qr/Signed-off-by|Reviewed-by|Acked-by|Cc|Reported-by/i;
+my %seen;
+
+sub format_contact {
+	my ($name, $email) = @_;
+	return "$name <$email>";
+}
+
+sub parse_commit {
+	my ($commit, $data) = @_;
+	my $contacts = $commit->{contacts};
+	my $inbody = 0;
+	for (split(/^/m, $data)) {
+		if (not $inbody) {
+			if (/^author ([^<>]+) <(\S+)> .+$/) {
+				$contacts->{format_contact($1, $2)} = 1;
+			} elsif (/^$/) {
+				$inbody = 1;
+			}
+		} elsif (/^$labels_rx:\s+([^<>]+)\s+<(\S+?)>$/o) {
+			$contacts->{format_contact($1, $2)} = 1;
+		}
+	}
+}
+
+sub import_commits {
+	my ($commits) = @_;
+	return unless %$commits;
+	my $pid = open2 my $reader, my $writer, qw(git cat-file --batch);
+	for my $id (keys(%$commits)) {
+		print $writer "$id\n";
+		my $line = <$reader>;
+		if ($line =~ /^([0-9a-f]{40}) commit (\d+)/) {
+			my ($cid, $len) = ($1, $2);
+			die "expected $id but got $cid\n" unless $id eq $cid;
+			my $data;
+			# cat-file emits newline after data, so read len+1
+			read $reader, $data, $len + 1;
+			parse_commit($commits->{$id}, $data);
+		}
+	}
+	close $reader;
+	close $writer;
+	waitpid($pid, 0);
+	die "git-cat-file error: $?\n" if $?;
+}
+
+sub get_blame {
+	my ($commits, $source, $from, $ranges) = @_;
+	return unless @$ranges;
+	open my $f, '-|',
+		qw(git blame --porcelain -C),
+		map({"-L$_->[0],+$_->[1]"} @$ranges),
+		'--since', $since, "$from^", '--', $source or die;
+	while (<$f>) {
+		if (/^([0-9a-f]{40}) \d+ \d+ \d+$/) {
+			my $id = $1;
+			$commits->{$id} = { id => $id, contacts => {} }
+				unless $seen{$id};
+			$seen{$id} = 1;
+		}
+	}
+	close $f;
+}
+
+sub blame_sources {
+	my ($sources, $commits) = @_;
+	for my $s (keys %$sources) {
+		for my $id (keys %{$sources->{$s}}) {
+			get_blame($commits, $s, $id, $sources->{$s}{$id});
+		}
+	}
+}
+
+sub scan_patches {
+	my ($sources, $id, $f) = @_;
+	my $source;
+	while (<$f>) {
+		if (/^From ([0-9a-f]{40}) Mon Sep 17 00:00:00 2001$/) {
+			$id = $1;
+			$seen{$id} = 1;
+		}
+		next unless $id;
+		if (m{^--- (?:a/(.+)|/dev/null)$}) {
+			$source = $1;
+		} elsif (/^@@ -(\d+)(?:,(\d+))?/ && $source) {
+			my $len = defined($2) ? $2 : 1;
+			push @{$sources->{$source}{$id}}, [$1, $len] if $len;
+		}
+	}
+}
+
+sub scan_patch_file {
+	my ($commits, $file) = @_;
+	open my $f, '<', $file or die "read failure: $file: $!\n";
+	scan_patches($commits, undef, $f);
+	close $f;
+}
+
+sub parse_rev_args {
+	my @args = @_;
+	open my $f, '-|',
+		qw(git rev-parse --revs-only --default HEAD --symbolic), @args
+		or die;
+	my @revs;
+	while (<$f>) {
+		chomp;
+		push @revs, $_;
+	}
+	close $f;
+	return @revs if scalar(@revs) != 1;
+	return "^$revs[0]", 'HEAD' unless $revs[0] =~ /^-/;
+	return $revs[0], 'HEAD';
+}
+
+sub scan_rev_args {
+	my ($commits, $args) = @_;
+	my @revs = parse_rev_args(@$args);
+	open my $f, '-|', qw(git rev-list --reverse), @revs or die;
+	while (<$f>) {
+		chomp;
+		my $id = $_;
+		$seen{$id} = 1;
+		open my $g, '-|', qw(git show -C --oneline), $id or die;
+		scan_patches($commits, $id, $g);
+		close $g;
+	}
+	close $f;
+}
+
+sub mailmap_contacts {
+	my ($contacts) = @_;
+	my %mapped;
+	my $pid = open2 my $reader, my $writer, qw(git check-mailmap --stdin);
+	for my $contact (keys(%$contacts)) {
+		print $writer "$contact\n";
+		my $canonical = <$reader>;
+		chomp $canonical;
+		$mapped{$canonical} += $contacts->{$contact};
+	}
+	close $reader;
+	close $writer;
+	waitpid($pid, 0);
+	die "git-check-mailmap error: $?\n" if $?;
+	return \%mapped;
+}
+
+if (!@ARGV) {
+	die "No input revisions or patch files\n";
+}
+
+my (@files, @rev_args);
+for (@ARGV) {
+	if (-e) {
+		push @files, $_;
+	} else {
+		push @rev_args, $_;
+	}
+}
+
+my %sources;
+for (@files) {
+	scan_patch_file(\%sources, $_);
+}
+if (@rev_args) {
+	scan_rev_args(\%sources, \@rev_args)
+}
+
+my $toplevel = `git rev-parse --show-toplevel`;
+chomp $toplevel;
+chdir($toplevel) or die "chdir failure: $toplevel: $!\n";
+
+my %commits;
+blame_sources(\%sources, \%commits);
+import_commits(\%commits);
+
+my $contacts = {};
+for my $commit (values %commits) {
+	for my $contact (keys %{$commit->{contacts}}) {
+		$contacts->{$contact}++;
+	}
+}
+$contacts = mailmap_contacts($contacts);
+
+my $ncommits = scalar(keys %commits);
+for my $contact (keys %$contacts) {
+	my $percent = $contacts->{$contact} * 100 / $ncommits;
+	next if $percent < $min_percent;
+	print "$contact\n";
+}
diff --git a/third_party/git/contrib/contacts/git-contacts.txt b/third_party/git/contrib/contacts/git-contacts.txt
new file mode 100644
index 000000000000..dd914d126123
--- /dev/null
+++ b/third_party/git/contrib/contacts/git-contacts.txt
@@ -0,0 +1,94 @@
+git-contacts(1)
+===============
+
+NAME
+----
+git-contacts - List people who might be interested in a set of changes
+
+
+SYNOPSIS
+--------
+[verse]
+'git contacts' (<patch>|<range>|<rev>)...
+
+
+DESCRIPTION
+-----------
+
+Given a set of changes, specified as patch files or revisions, determine people
+who might be interested in those changes.  This is done by consulting the
+history of each patch or revision hunk to find people mentioned by commits
+which touched the lines of files under consideration.
+
+Input consists of one or more patch files or revision arguments.  A revision
+argument can be a range or a single `<rev>` which is interpreted as
+`<rev>..HEAD`, thus the same revision arguments are accepted as for
+linkgit:git-format-patch[1]. Patch files and revision arguments can be combined
+in the same invocation.
+
+This command can be useful for determining the list of people with whom to
+discuss proposed changes, or for finding the list of recipients to Cc: when
+submitting a patch series via `git send-email`. For the latter case, `git
+contacts` can be used as the argument to `git send-email`'s `--cc-cmd` option.
+
+
+DISCUSSION
+----------
+
+`git blame` is invoked for each hunk in a patch file or revision.  For each
+commit mentioned by `git blame`, the commit message is consulted for people who
+authored, reviewed, signed, acknowledged, or were Cc:'d.  Once the list of
+participants is known, each person's relevance is computed by considering how
+many commits mentioned that person compared with the total number of commits
+under consideration.  The final output consists only of participants who exceed
+a minimum threshold of participation.
+
+
+OUTPUT
+------
+
+For each person of interest, a single line is output, terminated by a newline.
+If the person's name is known, ``Name $$<user@host>$$'' is printed; otherwise
+only ``$$<user@host>$$'' is printed.
+
+
+EXAMPLES
+--------
+
+* Consult patch files:
++
+------------
+$ git contacts feature/*.patch
+------------
+
+* Revision range:
++
+------------
+$ git contacts R1..R2
+------------
+
+* From a single revision to `HEAD`:
++
+------------
+$ git contacts origin
+------------
+
+* Helper for `git send-email`:
++
+------------
+$ git send-email --cc-cmd='git contacts' feature/*.patch
+------------
+
+
+LIMITATIONS
+-----------
+
+Several conditions controlling a person's significance are currently
+hard-coded, such as minimum participation level (10%), blame date-limiting (5
+years), and `-C` level for detecting moved and copied lines (a single `-C`). In
+the future, these conditions may become configurable.
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
diff --git a/third_party/git/contrib/coverage-diff.sh b/third_party/git/contrib/coverage-diff.sh
new file mode 100755
index 000000000000..4ec419f90048
--- /dev/null
+++ b/third_party/git/contrib/coverage-diff.sh
@@ -0,0 +1,108 @@
+#!/bin/sh
+
+# Usage: Run 'contrib/coverage-diff.sh <version1> <version2>' from source-root
+# after running
+#
+#     make coverage-test
+#     make coverage-report
+#
+# while checked out at <version2>. This script combines the *.gcov files
+# generated by the 'make' commands above with 'git diff <version1> <version2>'
+# to report new lines that are not covered by the test suite.
+
+V1=$1
+V2=$2
+
+diff_lines () {
+	perl -e '
+		my $line_num;
+		while (<>) {
+			# Hunk header?  Grab the beginning in postimage.
+			if (/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/) {
+				$line_num = $1;
+				next;
+			}
+
+			# Have we seen a hunk?  Ignore "diff --git" etc.
+			next unless defined $line_num;
+
+			# Deleted line? Ignore.
+			if (/^-/) {
+				next;
+			}
+
+			# Show only the line number of added lines.
+			if (/^\+/) {
+				print "$line_num\n";
+			}
+			# Either common context or added line appear in
+			# the postimage.  Count it.
+			$line_num++;
+		}
+	'
+}
+
+files=$(git diff --name-only "$V1" "$V2" -- \*.c)
+
+# create empty file
+>coverage-data.txt
+
+for file in $files
+do
+	git diff "$V1" "$V2" -- "$file" |
+	diff_lines |
+	sort >new_lines.txt
+
+	if ! test -s new_lines.txt
+	then
+		continue
+	fi
+
+	hash_file=$(echo $file | sed "s/\//\#/")
+
+	if ! test -s "$hash_file.gcov"
+	then
+		continue
+	fi
+
+	sed -ne '/#####:/{
+			s/    #####://
+			s/:.*//
+			s/ //g
+			p
+		}' "$hash_file.gcov" |
+	sort >uncovered_lines.txt
+
+	comm -12 uncovered_lines.txt new_lines.txt |
+	sed -e 's/$/\)/' |
+	sed -e 's/^/ /' >uncovered_new_lines.txt
+
+	grep -q '[^[:space:]]' <uncovered_new_lines.txt &&
+	echo $file >>coverage-data.txt &&
+	git blame -s "$V2" -- "$file" |
+	sed 's/\t//g' |
+	grep -f uncovered_new_lines.txt >>coverage-data.txt &&
+	echo >>coverage-data.txt
+
+	rm -f new_lines.txt uncovered_lines.txt uncovered_new_lines.txt
+done
+
+cat coverage-data.txt
+
+echo "Commits introducing uncovered code:"
+
+commit_list=$(cat coverage-data.txt |
+	grep -E '^[0-9a-f]{7,} ' |
+	awk '{print $1;}' |
+	sort |
+	uniq)
+
+(
+	for commit in $commit_list
+	do
+		git log --no-decorate --pretty=format:'%an      %h: %s' -1 $commit
+		echo
+	done
+) | sort
+
+rm coverage-data.txt
diff --git a/third_party/git/contrib/credential/gnome-keyring/.gitignore b/third_party/git/contrib/credential/gnome-keyring/.gitignore
new file mode 100644
index 000000000000..88d8fcdbce7d
--- /dev/null
+++ b/third_party/git/contrib/credential/gnome-keyring/.gitignore
@@ -0,0 +1 @@
+git-credential-gnome-keyring
diff --git a/third_party/git/contrib/credential/gnome-keyring/Makefile b/third_party/git/contrib/credential/gnome-keyring/Makefile
new file mode 100644
index 000000000000..22c19df94b81
--- /dev/null
+++ b/third_party/git/contrib/credential/gnome-keyring/Makefile
@@ -0,0 +1,25 @@
+MAIN:=git-credential-gnome-keyring
+all:: $(MAIN)
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+PKG_CONFIG = pkg-config
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+INCS:=$(shell $(PKG_CONFIG) --cflags gnome-keyring-1 glib-2.0)
+LIBS:=$(shell $(PKG_CONFIG) --libs gnome-keyring-1 glib-2.0)
+
+SRCS:=$(MAIN).c
+OBJS:=$(SRCS:.c=.o)
+
+%.o: %.c
+	$(CC) $(CFLAGS) $(CPPFLAGS) $(INCS) -o $@ -c $<
+
+$(MAIN): $(OBJS)
+	$(CC) -o $@ $(LDFLAGS) $^ $(LIBS)
+
+clean:
+	@$(RM) $(MAIN) $(OBJS)
diff --git a/third_party/git/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c b/third_party/git/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
new file mode 100644
index 000000000000..d389bfadceeb
--- /dev/null
+++ b/third_party/git/contrib/credential/gnome-keyring/git-credential-gnome-keyring.c
@@ -0,0 +1,470 @@
+/*
+ * Copyright (C) 2011 John Szakmeister <john@szakmeister.net>
+ *               2012 Philipp A. Hartmann <pah@qo.cx>
+ *
+ *  This program is free software; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation; either version 2 of the License, or
+ *  (at your option) any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program; if not, see <http://www.gnu.org/licenses/>.
+ */
+
+/*
+ * Credits:
+ * - GNOME Keyring API handling originally written by John Szakmeister
+ * - ported to credential helper API by Philipp A. Hartmann
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <glib.h>
+#include <gnome-keyring.h>
+
+#ifdef GNOME_KEYRING_DEFAULT
+
+   /* Modern gnome-keyring */
+
+#include <gnome-keyring-memory.h>
+
+#else
+
+   /*
+    * Support ancient gnome-keyring, circ. RHEL 5.X.
+    * GNOME_KEYRING_DEFAULT seems to have been introduced with Gnome 2.22,
+    * and the other features roughly around Gnome 2.20, 6 months before.
+    * Ubuntu 8.04 used Gnome 2.22 (I think).  Not sure any distro used 2.20.
+    * So the existence/non-existence of GNOME_KEYRING_DEFAULT seems like
+    * a decent thing to use as an indicator.
+    */
+
+#define GNOME_KEYRING_DEFAULT NULL
+
+/*
+ * ancient gnome-keyring returns DENIED when an entry is not found.
+ * Setting NO_MATCH to DENIED will prevent us from reporting DENIED
+ * errors during get and erase operations, but we will still report
+ * DENIED errors during a store.
+ */
+#define GNOME_KEYRING_RESULT_NO_MATCH GNOME_KEYRING_RESULT_DENIED
+
+#define gnome_keyring_memory_alloc g_malloc
+#define gnome_keyring_memory_free gnome_keyring_free_password
+#define gnome_keyring_memory_strdup g_strdup
+
+static const char *gnome_keyring_result_to_message(GnomeKeyringResult result)
+{
+	switch (result) {
+	case GNOME_KEYRING_RESULT_OK:
+		return "OK";
+	case GNOME_KEYRING_RESULT_DENIED:
+		return "Denied";
+	case GNOME_KEYRING_RESULT_NO_KEYRING_DAEMON:
+		return "No Keyring Daemon";
+	case GNOME_KEYRING_RESULT_ALREADY_UNLOCKED:
+		return "Already UnLocked";
+	case GNOME_KEYRING_RESULT_NO_SUCH_KEYRING:
+		return "No Such Keyring";
+	case GNOME_KEYRING_RESULT_BAD_ARGUMENTS:
+		return "Bad Arguments";
+	case GNOME_KEYRING_RESULT_IO_ERROR:
+		return "IO Error";
+	case GNOME_KEYRING_RESULT_CANCELLED:
+		return "Cancelled";
+	case GNOME_KEYRING_RESULT_ALREADY_EXISTS:
+		return "Already Exists";
+	default:
+		return "Unknown Error";
+	}
+}
+
+/*
+ * Support really ancient gnome-keyring, circ. RHEL 4.X.
+ * Just a guess for the Glib version.  Glib 2.8 was roughly Gnome 2.12 ?
+ * Which was released with gnome-keyring 0.4.3 ??
+ */
+#if GLIB_MAJOR_VERSION == 2 && GLIB_MINOR_VERSION < 8
+
+static void gnome_keyring_done_cb(GnomeKeyringResult result, gpointer user_data)
+{
+	gpointer *data = (gpointer *)user_data;
+	int *done = (int *)data[0];
+	GnomeKeyringResult *r = (GnomeKeyringResult *)data[1];
+
+	*r = result;
+	*done = 1;
+}
+
+static void wait_for_request_completion(int *done)
+{
+	GMainContext *mc = g_main_context_default();
+	while (!*done)
+		g_main_context_iteration(mc, TRUE);
+}
+
+static GnomeKeyringResult gnome_keyring_item_delete_sync(const char *keyring, guint32 id)
+{
+	int done = 0;
+	GnomeKeyringResult result;
+	gpointer data[] = { &done, &result };
+
+	gnome_keyring_item_delete(keyring, id, gnome_keyring_done_cb, data,
+		NULL);
+
+	wait_for_request_completion(&done);
+
+	return result;
+}
+
+#endif
+#endif
+
+/*
+ * This credential struct and API is simplified from git's credential.{h,c}
+ */
+struct credential {
+	char *protocol;
+	char *host;
+	unsigned short port;
+	char *path;
+	char *username;
+	char *password;
+};
+
+#define CREDENTIAL_INIT { NULL, NULL, 0, NULL, NULL, NULL }
+
+typedef int (*credential_op_cb)(struct credential *);
+
+struct credential_operation {
+	char *name;
+	credential_op_cb op;
+};
+
+#define CREDENTIAL_OP_END { NULL, NULL }
+
+/* ----------------- GNOME Keyring functions ----------------- */
+
+/* create a special keyring option string, if path is given */
+static char *keyring_object(struct credential *c)
+{
+	if (!c->path)
+		return NULL;
+
+	if (c->port)
+		return g_strdup_printf("%s:%hd/%s", c->host, c->port, c->path);
+
+	return g_strdup_printf("%s/%s", c->host, c->path);
+}
+
+static int keyring_get(struct credential *c)
+{
+	char *object = NULL;
+	GList *entries;
+	GnomeKeyringNetworkPasswordData *password_data;
+	GnomeKeyringResult result;
+
+	if (!c->protocol || !(c->host || c->path))
+		return EXIT_FAILURE;
+
+	object = keyring_object(c);
+
+	result = gnome_keyring_find_network_password_sync(
+				c->username,
+				NULL /* domain */,
+				c->host,
+				object,
+				c->protocol,
+				NULL /* authtype */,
+				c->port,
+				&entries);
+
+	g_free(object);
+
+	if (result == GNOME_KEYRING_RESULT_NO_MATCH)
+		return EXIT_SUCCESS;
+
+	if (result == GNOME_KEYRING_RESULT_CANCELLED)
+		return EXIT_SUCCESS;
+
+	if (result != GNOME_KEYRING_RESULT_OK) {
+		g_critical("%s", gnome_keyring_result_to_message(result));
+		return EXIT_FAILURE;
+	}
+
+	/* pick the first one from the list */
+	password_data = (GnomeKeyringNetworkPasswordData *)entries->data;
+
+	gnome_keyring_memory_free(c->password);
+	c->password = gnome_keyring_memory_strdup(password_data->password);
+
+	if (!c->username)
+		c->username = g_strdup(password_data->user);
+
+	gnome_keyring_network_password_list_free(entries);
+
+	return EXIT_SUCCESS;
+}
+
+
+static int keyring_store(struct credential *c)
+{
+	guint32 item_id;
+	char *object = NULL;
+	GnomeKeyringResult result;
+
+	/*
+	 * Sanity check that what we are storing is actually sensible.
+	 * In particular, we can't make a URL without a protocol field.
+	 * Without either a host or pathname (depending on the scheme),
+	 * we have no primary key. And without a username and password,
+	 * we are not actually storing a credential.
+	 */
+	if (!c->protocol || !(c->host || c->path) ||
+	    !c->username || !c->password)
+		return EXIT_FAILURE;
+
+	object = keyring_object(c);
+
+	result = gnome_keyring_set_network_password_sync(
+				GNOME_KEYRING_DEFAULT,
+				c->username,
+				NULL /* domain */,
+				c->host,
+				object,
+				c->protocol,
+				NULL /* authtype */,
+				c->port,
+				c->password,
+				&item_id);
+
+	g_free(object);
+
+	if (result != GNOME_KEYRING_RESULT_OK &&
+	    result != GNOME_KEYRING_RESULT_CANCELLED) {
+		g_critical("%s", gnome_keyring_result_to_message(result));
+		return EXIT_FAILURE;
+	}
+
+	return EXIT_SUCCESS;
+}
+
+static int keyring_erase(struct credential *c)
+{
+	char *object = NULL;
+	GList *entries;
+	GnomeKeyringNetworkPasswordData *password_data;
+	GnomeKeyringResult result;
+
+	/*
+	 * Sanity check that we actually have something to match
+	 * against. The input we get is a restrictive pattern,
+	 * so technically a blank credential means "erase everything".
+	 * But it is too easy to accidentally send this, since it is equivalent
+	 * to empty input. So explicitly disallow it, and require that the
+	 * pattern have some actual content to match.
+	 */
+	if (!c->protocol && !c->host && !c->path && !c->username)
+		return EXIT_FAILURE;
+
+	object = keyring_object(c);
+
+	result = gnome_keyring_find_network_password_sync(
+				c->username,
+				NULL /* domain */,
+				c->host,
+				object,
+				c->protocol,
+				NULL /* authtype */,
+				c->port,
+				&entries);
+
+	g_free(object);
+
+	if (result == GNOME_KEYRING_RESULT_NO_MATCH)
+		return EXIT_SUCCESS;
+
+	if (result == GNOME_KEYRING_RESULT_CANCELLED)
+		return EXIT_SUCCESS;
+
+	if (result != GNOME_KEYRING_RESULT_OK) {
+		g_critical("%s", gnome_keyring_result_to_message(result));
+		return EXIT_FAILURE;
+	}
+
+	/* pick the first one from the list (delete all matches?) */
+	password_data = (GnomeKeyringNetworkPasswordData *)entries->data;
+
+	result = gnome_keyring_item_delete_sync(
+		password_data->keyring, password_data->item_id);
+
+	gnome_keyring_network_password_list_free(entries);
+
+	if (result != GNOME_KEYRING_RESULT_OK) {
+		g_critical("%s", gnome_keyring_result_to_message(result));
+		return EXIT_FAILURE;
+	}
+
+	return EXIT_SUCCESS;
+}
+
+/*
+ * Table with helper operation callbacks, used by generic
+ * credential helper main function.
+ */
+static struct credential_operation const credential_helper_ops[] = {
+	{ "get",   keyring_get },
+	{ "store", keyring_store },
+	{ "erase", keyring_erase },
+	CREDENTIAL_OP_END
+};
+
+/* ------------------ credential functions ------------------ */
+
+static void credential_init(struct credential *c)
+{
+	memset(c, 0, sizeof(*c));
+}
+
+static void credential_clear(struct credential *c)
+{
+	g_free(c->protocol);
+	g_free(c->host);
+	g_free(c->path);
+	g_free(c->username);
+	gnome_keyring_memory_free(c->password);
+
+	credential_init(c);
+}
+
+static int credential_read(struct credential *c)
+{
+	char *buf;
+	size_t line_len;
+	char *key;
+	char *value;
+
+	key = buf = gnome_keyring_memory_alloc(1024);
+
+	while (fgets(buf, 1024, stdin)) {
+		line_len = strlen(buf);
+
+		if (line_len && buf[line_len-1] == '\n')
+			buf[--line_len] = '\0';
+
+		if (!line_len)
+			break;
+
+		value = strchr(buf, '=');
+		if (!value) {
+			g_warning("invalid credential line: %s", key);
+			gnome_keyring_memory_free(buf);
+			return -1;
+		}
+		*value++ = '\0';
+
+		if (!strcmp(key, "protocol")) {
+			g_free(c->protocol);
+			c->protocol = g_strdup(value);
+		} else if (!strcmp(key, "host")) {
+			g_free(c->host);
+			c->host = g_strdup(value);
+			value = strrchr(c->host, ':');
+			if (value) {
+				*value++ = '\0';
+				c->port = atoi(value);
+			}
+		} else if (!strcmp(key, "path")) {
+			g_free(c->path);
+			c->path = g_strdup(value);
+		} else if (!strcmp(key, "username")) {
+			g_free(c->username);
+			c->username = g_strdup(value);
+		} else if (!strcmp(key, "password")) {
+			gnome_keyring_memory_free(c->password);
+			c->password = gnome_keyring_memory_strdup(value);
+			while (*value)
+				*value++ = '\0';
+		}
+		/*
+		 * Ignore other lines; we don't know what they mean, but
+		 * this future-proofs us when later versions of git do
+		 * learn new lines, and the helpers are updated to match.
+		 */
+	}
+
+	gnome_keyring_memory_free(buf);
+
+	return 0;
+}
+
+static void credential_write_item(FILE *fp, const char *key, const char *value)
+{
+	if (!value)
+		return;
+	fprintf(fp, "%s=%s\n", key, value);
+}
+
+static void credential_write(const struct credential *c)
+{
+	/* only write username/password, if set */
+	credential_write_item(stdout, "username", c->username);
+	credential_write_item(stdout, "password", c->password);
+}
+
+static void usage(const char *name)
+{
+	struct credential_operation const *try_op = credential_helper_ops;
+	const char *basename = strrchr(name, '/');
+
+	basename = (basename) ? basename + 1 : name;
+	fprintf(stderr, "usage: %s <", basename);
+	while (try_op->name) {
+		fprintf(stderr, "%s", (try_op++)->name);
+		if (try_op->name)
+			fprintf(stderr, "%s", "|");
+	}
+	fprintf(stderr, "%s", ">\n");
+}
+
+int main(int argc, char *argv[])
+{
+	int ret = EXIT_SUCCESS;
+
+	struct credential_operation const *try_op = credential_helper_ops;
+	struct credential cred = CREDENTIAL_INIT;
+
+	if (!argv[1]) {
+		usage(argv[0]);
+		exit(EXIT_FAILURE);
+	}
+
+	g_set_application_name("Git Credential Helper");
+
+	/* lookup operation callback */
+	while (try_op->name && strcmp(argv[1], try_op->name))
+		try_op++;
+
+	/* unsupported operation given -- ignore silently */
+	if (!try_op->name || !try_op->op)
+		goto out;
+
+	ret = credential_read(&cred);
+	if (ret)
+		goto out;
+
+	/* perform credential operation */
+	ret = (*try_op->op)(&cred);
+
+	credential_write(&cred);
+
+out:
+	credential_clear(&cred);
+	return ret;
+}
diff --git a/third_party/git/contrib/credential/libsecret/Makefile b/third_party/git/contrib/credential/libsecret/Makefile
new file mode 100644
index 000000000000..3e67552cc5b5
--- /dev/null
+++ b/third_party/git/contrib/credential/libsecret/Makefile
@@ -0,0 +1,25 @@
+MAIN:=git-credential-libsecret
+all:: $(MAIN)
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+PKG_CONFIG = pkg-config
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+INCS:=$(shell $(PKG_CONFIG) --cflags libsecret-1 glib-2.0)
+LIBS:=$(shell $(PKG_CONFIG) --libs libsecret-1 glib-2.0)
+
+SRCS:=$(MAIN).c
+OBJS:=$(SRCS:.c=.o)
+
+%.o: %.c
+	$(CC) $(CFLAGS) $(CPPFLAGS) $(INCS) -o $@ -c $<
+
+$(MAIN): $(OBJS)
+	$(CC) -o $@ $(LDFLAGS) $^ $(LIBS)
+
+clean:
+	@$(RM) $(MAIN) $(OBJS)
diff --git a/third_party/git/contrib/credential/libsecret/git-credential-libsecret.c b/third_party/git/contrib/credential/libsecret/git-credential-libsecret.c
new file mode 100644
index 000000000000..e6598b638339
--- /dev/null
+++ b/third_party/git/contrib/credential/libsecret/git-credential-libsecret.c
@@ -0,0 +1,369 @@
+/*
+ * Copyright (C) 2011 John Szakmeister <john@szakmeister.net>
+ *               2012 Philipp A. Hartmann <pah@qo.cx>
+ *               2016 Mantas Mikulėnas <grawity@gmail.com>
+ *
+ *  This program is free software; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation; either version 2 of the License, or
+ *  (at your option) any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program; if not, see <http://www.gnu.org/licenses/>.
+ */
+
+/*
+ * Credits:
+ * - GNOME Keyring API handling originally written by John Szakmeister
+ * - ported to credential helper API by Philipp A. Hartmann
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <glib.h>
+#include <libsecret/secret.h>
+
+/*
+ * This credential struct and API is simplified from git's credential.{h,c}
+ */
+struct credential {
+	char *protocol;
+	char *host;
+	unsigned short port;
+	char *path;
+	char *username;
+	char *password;
+};
+
+#define CREDENTIAL_INIT { NULL, NULL, 0, NULL, NULL, NULL }
+
+typedef int (*credential_op_cb)(struct credential *);
+
+struct credential_operation {
+	char *name;
+	credential_op_cb op;
+};
+
+#define CREDENTIAL_OP_END { NULL, NULL }
+
+/* ----------------- Secret Service functions ----------------- */
+
+static char *make_label(struct credential *c)
+{
+	if (c->port)
+		return g_strdup_printf("Git: %s://%s:%hu/%s",
+					c->protocol, c->host, c->port, c->path ? c->path : "");
+	else
+		return g_strdup_printf("Git: %s://%s/%s",
+					c->protocol, c->host, c->path ? c->path : "");
+}
+
+static GHashTable *make_attr_list(struct credential *c)
+{
+	GHashTable *al = g_hash_table_new_full(g_str_hash, g_str_equal, NULL, g_free);
+
+	if (c->username)
+		g_hash_table_insert(al, "user", g_strdup(c->username));
+	if (c->protocol)
+		g_hash_table_insert(al, "protocol", g_strdup(c->protocol));
+	if (c->host)
+		g_hash_table_insert(al, "server", g_strdup(c->host));
+	if (c->port)
+		g_hash_table_insert(al, "port", g_strdup_printf("%hu", c->port));
+	if (c->path)
+		g_hash_table_insert(al, "object", g_strdup(c->path));
+
+	return al;
+}
+
+static int keyring_get(struct credential *c)
+{
+	SecretService *service = NULL;
+	GHashTable *attributes = NULL;
+	GError *error = NULL;
+	GList *items = NULL;
+
+	if (!c->protocol || !(c->host || c->path))
+		return EXIT_FAILURE;
+
+	service = secret_service_get_sync(0, NULL, &error);
+	if (error != NULL) {
+		g_critical("could not connect to Secret Service: %s", error->message);
+		g_error_free(error);
+		return EXIT_FAILURE;
+	}
+
+	attributes = make_attr_list(c);
+	items = secret_service_search_sync(service,
+					   SECRET_SCHEMA_COMPAT_NETWORK,
+					   attributes,
+					   SECRET_SEARCH_LOAD_SECRETS | SECRET_SEARCH_UNLOCK,
+					   NULL,
+					   &error);
+	g_hash_table_unref(attributes);
+	if (error != NULL) {
+		g_critical("lookup failed: %s", error->message);
+		g_error_free(error);
+		return EXIT_FAILURE;
+	}
+
+	if (items != NULL) {
+		SecretItem *item;
+		SecretValue *secret;
+		const char *s;
+
+		item = items->data;
+		secret = secret_item_get_secret(item);
+		attributes = secret_item_get_attributes(item);
+
+		s = g_hash_table_lookup(attributes, "user");
+		if (s) {
+			g_free(c->username);
+			c->username = g_strdup(s);
+		}
+
+		s = secret_value_get_text(secret);
+		if (s) {
+			g_free(c->password);
+			c->password = g_strdup(s);
+		}
+
+		g_hash_table_unref(attributes);
+		secret_value_unref(secret);
+		g_list_free_full(items, g_object_unref);
+	}
+
+	return EXIT_SUCCESS;
+}
+
+
+static int keyring_store(struct credential *c)
+{
+	char *label = NULL;
+	GHashTable *attributes = NULL;
+	GError *error = NULL;
+
+	/*
+	 * Sanity check that what we are storing is actually sensible.
+	 * In particular, we can't make a URL without a protocol field.
+	 * Without either a host or pathname (depending on the scheme),
+	 * we have no primary key. And without a username and password,
+	 * we are not actually storing a credential.
+	 */
+	if (!c->protocol || !(c->host || c->path) ||
+	    !c->username || !c->password)
+		return EXIT_FAILURE;
+
+	label = make_label(c);
+	attributes = make_attr_list(c);
+	secret_password_storev_sync(SECRET_SCHEMA_COMPAT_NETWORK,
+				    attributes,
+				    NULL,
+				    label,
+				    c->password,
+				    NULL,
+				    &error);
+	g_free(label);
+	g_hash_table_unref(attributes);
+
+	if (error != NULL) {
+		g_critical("store failed: %s", error->message);
+		g_error_free(error);
+		return EXIT_FAILURE;
+	}
+
+	return EXIT_SUCCESS;
+}
+
+static int keyring_erase(struct credential *c)
+{
+	GHashTable *attributes = NULL;
+	GError *error = NULL;
+
+	/*
+	 * Sanity check that we actually have something to match
+	 * against. The input we get is a restrictive pattern,
+	 * so technically a blank credential means "erase everything".
+	 * But it is too easy to accidentally send this, since it is equivalent
+	 * to empty input. So explicitly disallow it, and require that the
+	 * pattern have some actual content to match.
+	 */
+	if (!c->protocol && !c->host && !c->path && !c->username)
+		return EXIT_FAILURE;
+
+	attributes = make_attr_list(c);
+	secret_password_clearv_sync(SECRET_SCHEMA_COMPAT_NETWORK,
+				    attributes,
+				    NULL,
+				    &error);
+	g_hash_table_unref(attributes);
+
+	if (error != NULL) {
+		g_critical("erase failed: %s", error->message);
+		g_error_free(error);
+		return EXIT_FAILURE;
+	}
+
+	return EXIT_SUCCESS;
+}
+
+/*
+ * Table with helper operation callbacks, used by generic
+ * credential helper main function.
+ */
+static struct credential_operation const credential_helper_ops[] = {
+	{ "get",   keyring_get },
+	{ "store", keyring_store },
+	{ "erase", keyring_erase },
+	CREDENTIAL_OP_END
+};
+
+/* ------------------ credential functions ------------------ */
+
+static void credential_init(struct credential *c)
+{
+	memset(c, 0, sizeof(*c));
+}
+
+static void credential_clear(struct credential *c)
+{
+	g_free(c->protocol);
+	g_free(c->host);
+	g_free(c->path);
+	g_free(c->username);
+	g_free(c->password);
+
+	credential_init(c);
+}
+
+static int credential_read(struct credential *c)
+{
+	char *buf;
+	size_t line_len;
+	char *key;
+	char *value;
+
+	key = buf = g_malloc(1024);
+
+	while (fgets(buf, 1024, stdin)) {
+		line_len = strlen(buf);
+
+		if (line_len && buf[line_len-1] == '\n')
+			buf[--line_len] = '\0';
+
+		if (!line_len)
+			break;
+
+		value = strchr(buf, '=');
+		if (!value) {
+			g_warning("invalid credential line: %s", key);
+			g_free(buf);
+			return -1;
+		}
+		*value++ = '\0';
+
+		if (!strcmp(key, "protocol")) {
+			g_free(c->protocol);
+			c->protocol = g_strdup(value);
+		} else if (!strcmp(key, "host")) {
+			g_free(c->host);
+			c->host = g_strdup(value);
+			value = strrchr(c->host, ':');
+			if (value) {
+				*value++ = '\0';
+				c->port = atoi(value);
+			}
+		} else if (!strcmp(key, "path")) {
+			g_free(c->path);
+			c->path = g_strdup(value);
+		} else if (!strcmp(key, "username")) {
+			g_free(c->username);
+			c->username = g_strdup(value);
+		} else if (!strcmp(key, "password")) {
+			g_free(c->password);
+			c->password = g_strdup(value);
+			while (*value)
+				*value++ = '\0';
+		}
+		/*
+		 * Ignore other lines; we don't know what they mean, but
+		 * this future-proofs us when later versions of git do
+		 * learn new lines, and the helpers are updated to match.
+		 */
+	}
+
+	g_free(buf);
+
+	return 0;
+}
+
+static void credential_write_item(FILE *fp, const char *key, const char *value)
+{
+	if (!value)
+		return;
+	fprintf(fp, "%s=%s\n", key, value);
+}
+
+static void credential_write(const struct credential *c)
+{
+	/* only write username/password, if set */
+	credential_write_item(stdout, "username", c->username);
+	credential_write_item(stdout, "password", c->password);
+}
+
+static void usage(const char *name)
+{
+	struct credential_operation const *try_op = credential_helper_ops;
+	const char *basename = strrchr(name, '/');
+
+	basename = (basename) ? basename + 1 : name;
+	fprintf(stderr, "usage: %s <", basename);
+	while (try_op->name) {
+		fprintf(stderr, "%s", (try_op++)->name);
+		if (try_op->name)
+			fprintf(stderr, "%s", "|");
+	}
+	fprintf(stderr, "%s", ">\n");
+}
+
+int main(int argc, char *argv[])
+{
+	int ret = EXIT_SUCCESS;
+
+	struct credential_operation const *try_op = credential_helper_ops;
+	struct credential cred = CREDENTIAL_INIT;
+
+	if (!argv[1]) {
+		usage(argv[0]);
+		exit(EXIT_FAILURE);
+	}
+
+	g_set_application_name("Git Credential Helper");
+
+	/* lookup operation callback */
+	while (try_op->name && strcmp(argv[1], try_op->name))
+		try_op++;
+
+	/* unsupported operation given -- ignore silently */
+	if (!try_op->name || !try_op->op)
+		goto out;
+
+	ret = credential_read(&cred);
+	if (ret)
+		goto out;
+
+	/* perform credential operation */
+	ret = (*try_op->op)(&cred);
+
+	credential_write(&cred);
+
+out:
+	credential_clear(&cred);
+	return ret;
+}
diff --git a/third_party/git/contrib/credential/netrc/.gitignore b/third_party/git/contrib/credential/netrc/.gitignore
new file mode 100644
index 000000000000..d41cdde84b6a
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/.gitignore
@@ -0,0 +1 @@
+git-credential-netrc
diff --git a/third_party/git/contrib/credential/netrc/Makefile b/third_party/git/contrib/credential/netrc/Makefile
new file mode 100644
index 000000000000..c284fb8ac490
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/Makefile
@@ -0,0 +1,30 @@
+# The default target of this Makefile is...
+all::
+
+SCRIPT_PERL = git-credential-netrc.perl
+GIT_ROOT_DIR = ../../..
+HERE = contrib/credential/netrc
+
+SCRIPT_PERL_FULL = $(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
+
+all:: build
+
+build:
+	$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+                build-perl-script
+
+install: build
+	$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+                install-perl-script
+
+clean:
+	$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+                clean-perl-script
+
+test: build
+	./t-git-credential-netrc.sh
+
+testverbose: build
+	./t-git-credential-netrc.sh -d -v
+
+.PHONY: all build install clean test testverbose
diff --git a/third_party/git/contrib/credential/netrc/git-credential-netrc.perl b/third_party/git/contrib/credential/netrc/git-credential-netrc.perl
new file mode 100755
index 000000000000..bc57cc65884b
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/git-credential-netrc.perl
@@ -0,0 +1,440 @@
+#!/usr/bin/perl
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use File::Basename;
+use Git;
+
+my $VERSION = "0.2";
+
+my %options = (
+	       help => 0,
+	       debug => 0,
+	       verbose => 0,
+	       insecure => 0,
+	       file => [],
+
+	       # identical token maps, e.g. host -> host, will be inserted later
+	       tmap => {
+			port => 'protocol',
+			machine => 'host',
+			path => 'path',
+			login => 'username',
+			user => 'username',
+			password => 'password',
+		       }
+	      );
+
+# Map each credential protocol token to itself on the netrc side.
+foreach (values %{$options{tmap}}) {
+	$options{tmap}->{$_} = $_;
+}
+
+# Now, $options{tmap} has a mapping from the netrc format to the Git credential
+# helper protocol.
+
+# Next, we build the reverse token map.
+
+# When $rmap{foo} contains 'bar', that means that what the Git credential helper
+# protocol calls 'bar' is found as 'foo' in the netrc/authinfo file.  Keys in
+# %rmap are what we expect to read from the netrc/authinfo file.
+
+my %rmap;
+foreach my $k (keys %{$options{tmap}}) {
+	push @{$rmap{$options{tmap}->{$k}}}, $k;
+}
+
+Getopt::Long::Configure("bundling");
+
+# TODO: maybe allow the token map $options{tmap} to be configurable.
+GetOptions(\%options,
+           "help|h",
+           "debug|d",
+           "insecure|k",
+           "verbose|v",
+           "file|f=s@",
+           'gpg|g:s',
+          );
+
+if ($options{help}) {
+	my $shortname = basename($0);
+	$shortname =~ s/git-credential-//;
+
+	print <<EOHIPPUS;
+
+$0 [(-f <authfile>)...] [-g <program>] [-d] [-v] [-k] get
+
+Version $VERSION by tzz\@lifelogs.com.  License: BSD.
+
+Options:
+
+  -f|--file <authfile>: specify netrc-style files.  Files with the .gpg
+                        extension will be decrypted by GPG before parsing.
+                        Multiple -f arguments are OK.  They are processed in
+                        order, and the first matching entry found is returned
+                        via the credential helper protocol (see below).
+
+                        When no -f option is given, .authinfo.gpg, .netrc.gpg,
+                        .authinfo, and .netrc files in your home directory are
+                        used in this order.
+
+  -g|--gpg <program>  : specify the program for GPG. By default, this is the
+                        value of gpg.program in the git repository or global
+                        option or gpg.
+
+  -k|--insecure       : ignore bad file ownership or permissions
+
+  -d|--debug          : turn on debugging (developer info)
+
+  -v|--verbose        : be more verbose (show files and information found)
+
+To enable this credential helper:
+
+  git config credential.helper '$shortname -f AUTHFILE1 -f AUTHFILE2'
+
+(Note that Git will prepend "git-credential-" to the helper name and look for it
+in the path.)
+
+...and if you want lots of debugging info:
+
+  git config credential.helper '$shortname -f AUTHFILE -d'
+
+...or to see the files opened and data found:
+
+  git config credential.helper '$shortname -f AUTHFILE -v'
+
+Only "get" mode is supported by this credential helper.  It opens every
+<authfile> and looks for the first entry that matches the requested search
+criteria:
+
+ 'port|protocol':
+   The protocol that will be used (e.g., https). (protocol=X)
+
+ 'machine|host':
+   The remote hostname for a network credential. (host=X)
+
+ 'path':
+   The path with which the credential will be used. (path=X)
+
+ 'login|user|username':
+   The credential’s username, if we already have one. (username=X)
+
+Thus, when we get this query on STDIN:
+
+host=github.com
+protocol=https
+username=tzz
+
+this credential helper will look for the first entry in every <authfile> that
+matches
+
+machine github.com port https login tzz
+
+OR
+
+machine github.com protocol https login tzz
+
+OR... etc. acceptable tokens as listed above.  Any unknown tokens are
+simply ignored.
+
+Then, the helper will print out whatever tokens it got from the entry, including
+"password" tokens, mapping back to Git's helper protocol; e.g. "port" is mapped
+back to "protocol".  Any redundant entry tokens (part of the original query) are
+skipped.
+
+Again, note that only the first matching entry from all the <authfile>s,
+processed in the sequence given on the command line, is used.
+
+Netrc/authinfo tokens can be quoted as 'STRING' or "STRING".
+
+No caching is performed by this credential helper.
+
+EOHIPPUS
+
+	exit 0;
+}
+
+my $mode = shift @ARGV;
+
+# Credentials must get a parameter, so die if it's missing.
+die "Syntax: $0 [(-f <authfile>)...] [-d] get" unless defined $mode;
+
+# Only support 'get' mode; with any other unsupported ones we just exit.
+exit 0 unless $mode eq 'get';
+
+my $files = $options{file};
+
+# if no files were given, use a predefined list.
+# note that .gpg files come first
+unless (scalar @$files) {
+	my @candidates = qw[
+				   ~/.authinfo.gpg
+				   ~/.netrc.gpg
+				   ~/.authinfo
+				   ~/.netrc
+			  ];
+
+	$files = $options{file} = [ map { glob $_ } @candidates ];
+}
+
+load_config(\%options);
+
+my $query = read_credential_data_from_stdin();
+
+FILE:
+foreach my $file (@$files) {
+	my $gpgmode = $file =~ m/\.gpg$/;
+	unless (-r $file) {
+		log_verbose("Unable to read $file; skipping it");
+		next FILE;
+	}
+
+	# the following check is copied from Net::Netrc, for non-GPG files
+	# OS/2 and Win32 do not handle stat in a way compatible with this check :-(
+	unless ($gpgmode || $options{insecure} ||
+		$^O eq 'os2'
+		|| $^O eq 'MSWin32'
+		|| $^O eq 'MacOS'
+		|| $^O =~ /^cygwin/) {
+		my @stat = stat($file);
+
+		if (@stat) {
+			if ($stat[2] & 077) {
+				log_verbose("Insecure $file (mode=%04o); skipping it",
+					    $stat[2] & 07777);
+				next FILE;
+			}
+
+			if ($stat[4] != $<) {
+				log_verbose("Not owner of $file; skipping it");
+				next FILE;
+			}
+		}
+	}
+
+	my @entries = load_netrc($file, $gpgmode);
+
+	unless (scalar @entries) {
+		if ($!) {
+			log_verbose("Unable to open $file: $!");
+		} else {
+			log_verbose("No netrc entries found in $file");
+		}
+
+		next FILE;
+	}
+
+	my $entry = find_netrc_entry($query, @entries);
+	if ($entry) {
+		print_credential_data($entry, $query);
+		# we're done!
+		last FILE;
+	}
+}
+
+exit 0;
+
+sub load_netrc {
+	my $file = shift @_;
+	my $gpgmode = shift @_;
+
+	my $io;
+	if ($gpgmode) {
+		my @cmd = ($options{'gpg'}, qw(--decrypt), $file);
+		log_verbose("Using GPG to open $file: [@cmd]");
+		open $io, "-|", @cmd;
+	} else {
+		log_verbose("Opening $file...");
+		open $io, '<', $file;
+	}
+
+	# nothing to do if the open failed (we log the error later)
+	return unless $io;
+
+	# Net::Netrc does this, but the functionality is merged with the file
+	# detection logic, so we have to extract just the part we need
+	my @netrc_entries = net_netrc_loader($io);
+
+	# these entries will use the credential helper protocol token names
+	my @entries;
+
+	foreach my $nentry (@netrc_entries) {
+		my %entry;
+		my $num_port;
+
+		if (!defined $nentry->{machine}) {
+			next;
+		}
+		if (defined $nentry->{port} && $nentry->{port} =~ m/^\d+$/) {
+			$num_port = $nentry->{port};
+			delete $nentry->{port};
+		}
+
+		# create the new entry for the credential helper protocol
+		$entry{$options{tmap}->{$_}} = $nentry->{$_} foreach keys %$nentry;
+
+		# for "host X port Y" where Y is an integer (captured by
+		# $num_port above), set the host to "X:Y"
+		if (defined $entry{host} && defined $num_port) {
+			$entry{host} = join(':', $entry{host}, $num_port);
+		}
+
+		push @entries, \%entry;
+	}
+
+	return @entries;
+}
+
+sub net_netrc_loader {
+	my $fh = shift @_;
+	my @entries;
+	my ($mach, $macdef, $tok, @tok);
+
+    LINE:
+	while (<$fh>) {
+		undef $macdef if /\A\n\Z/;
+
+		if ($macdef) {
+			next LINE;
+		}
+
+		s/^\s*//;
+		chomp;
+
+		while (length && s/^("((?:[^"]+|\\.)*)"|((?:[^\\\s]+|\\.)*))\s*//) {
+			(my $tok = $+) =~ s/\\(.)/$1/g;
+			push(@tok, $tok);
+		}
+
+	    TOKEN:
+		while (@tok) {
+			if ($tok[0] eq "default") {
+				shift(@tok);
+				$mach = { machine => undef };
+				next TOKEN;
+			}
+
+			$tok = shift(@tok);
+
+			if ($tok eq "machine") {
+				my $host = shift @tok;
+				$mach = { machine => $host };
+				push @entries, $mach;
+			} elsif (exists $options{tmap}->{$tok}) {
+				unless ($mach) {
+					log_debug("Skipping token $tok because no machine was given");
+					next TOKEN;
+				}
+
+				my $value = shift @tok;
+				unless (defined $value) {
+					log_debug("Token $tok had no value, skipping it.");
+					next TOKEN;
+				}
+
+				# Following line added by rmerrell to remove '/' escape char in .netrc
+				$value =~ s/\/\\/\\/g;
+				$mach->{$tok} = $value;
+			} elsif ($tok eq "macdef") { # we ignore macros
+				next TOKEN unless $mach;
+				my $value = shift @tok;
+				$macdef = 1;
+			}
+		}
+	}
+
+	return @entries;
+}
+
+sub read_credential_data_from_stdin {
+	# the query: start with every token with no value
+	my %q = map { $_ => undef } values(%{$options{tmap}});
+
+	while (<STDIN>) {
+		next unless m/^([^=]+)=(.+)/;
+
+		my ($token, $value) = ($1, $2);
+		die "Unknown search token $token" unless exists $q{$token};
+		$q{$token} = $value;
+		log_debug("We were given search token $token and value $value");
+	}
+
+	foreach (sort keys %q) {
+		log_debug("Searching for %s = %s", $_, $q{$_} || '(any value)');
+	}
+
+	return \%q;
+}
+
+# takes the search tokens and then a list of entries
+# each entry is a hash reference
+sub find_netrc_entry {
+	my $query = shift @_;
+
+    ENTRY:
+	foreach my $entry (@_)
+	{
+		my $entry_text = join ', ', map { "$_=$entry->{$_}" } keys %$entry;
+		foreach my $check (sort keys %$query) {
+			if (!defined $entry->{$check}) {
+			        log_debug("OK: entry has no $check token, so any value satisfies check $check");
+			} elsif (defined $query->{$check}) {
+				log_debug("compare %s [%s] to [%s] (entry: %s)",
+					  $check,
+					  $entry->{$check},
+					  $query->{$check},
+					  $entry_text);
+				unless ($query->{$check} eq $entry->{$check}) {
+					next ENTRY;
+				}
+			} else {
+				log_debug("OK: any value satisfies check $check");
+			}
+		}
+
+		return $entry;
+	}
+
+	# nothing was found
+	return;
+}
+
+sub print_credential_data {
+	my $entry = shift @_;
+	my $query = shift @_;
+
+	log_debug("entry has passed all the search checks");
+ TOKEN:
+	foreach my $git_token (sort keys %$entry) {
+		log_debug("looking for useful token $git_token");
+		# don't print unknown (to the credential helper protocol) tokens
+		next TOKEN unless exists $query->{$git_token};
+
+		# don't print things asked in the query (the entry matches them)
+		next TOKEN if defined $query->{$git_token};
+
+		log_debug("FOUND: $git_token=$entry->{$git_token}");
+		printf "%s=%s\n", $git_token, $entry->{$git_token};
+	}
+}
+sub load_config {
+	# load settings from git config
+	my $options = shift;
+	# set from command argument, gpg.program option, or default to gpg
+	$options->{'gpg'} //= Git::config('gpg.program')
+	                  // 'gpg';
+	log_verbose("using $options{'gpg'} for GPG operations");
+}
+sub log_verbose {
+	return unless $options{verbose};
+	printf STDERR @_;
+	printf STDERR "\n";
+}
+
+sub log_debug {
+	return unless $options{debug};
+	printf STDERR @_;
+	printf STDERR "\n";
+}
diff --git a/third_party/git/contrib/credential/netrc/t-git-credential-netrc.sh b/third_party/git/contrib/credential/netrc/t-git-credential-netrc.sh
new file mode 100755
index 000000000000..07227d022876
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/t-git-credential-netrc.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+(
+	cd ../../../t
+	test_description='git-credential-netrc'
+	. ./test-lib.sh
+
+	if ! test_have_prereq PERL; then
+		skip_all='skipping perl interface tests, perl not available'
+		test_done
+	fi
+
+	perl -MTest::More -e 0 2>/dev/null || {
+		skip_all="Perl Test::More unavailable, skipping test"
+		test_done
+	}
+
+	# set up test repository
+
+	test_expect_success \
+		'set up test repository' \
+		'git config --add gpg.program test.git-config-gpg'
+
+	# The external test will outputs its own plan
+	test_external_has_tap=1
+
+	export PERL5LIB="$GITPERLLIB"
+	test_external \
+		'git-credential-netrc' \
+		perl "$GIT_BUILD_DIR"/contrib/credential/netrc/test.pl
+
+	test_done
+)
diff --git a/third_party/git/contrib/credential/netrc/test.command-option-gpg b/third_party/git/contrib/credential/netrc/test.command-option-gpg
new file mode 100755
index 000000000000..d8f1285d416d
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/test.command-option-gpg
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo machine command-option-gpg login username password password
diff --git a/third_party/git/contrib/credential/netrc/test.git-config-gpg b/third_party/git/contrib/credential/netrc/test.git-config-gpg
new file mode 100755
index 000000000000..65cf594c2001
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/test.git-config-gpg
@@ -0,0 +1,2 @@
+#!/bin/sh
+echo machine git-config-gpg login username password password
diff --git a/third_party/git/contrib/credential/netrc/test.netrc b/third_party/git/contrib/credential/netrc/test.netrc
new file mode 100644
index 000000000000..ba119a937f67
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/test.netrc
@@ -0,0 +1,13 @@
+machine imap login tzz@lifelogs.com port imaps password letmeknow
+machine imap login bob port imaps password bobwillknow
+
+# comment test
+
+machine imap2 login tzz port 1099 password tzzknow
+machine imap2 login bob password bobwillknow
+
+# another command
+
+machine github.com
+  multilinetoken anothervalue
+  login carol password carolknows
diff --git a/third_party/git/contrib/credential/netrc/test.netrc.gpg b/third_party/git/contrib/credential/netrc/test.netrc.gpg
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/test.netrc.gpg
diff --git a/third_party/git/contrib/credential/netrc/test.pl b/third_party/git/contrib/credential/netrc/test.pl
new file mode 100755
index 000000000000..c0fb3718b280
--- /dev/null
+++ b/third_party/git/contrib/credential/netrc/test.pl
@@ -0,0 +1,139 @@
+#!/usr/bin/perl
+
+use warnings;
+use strict;
+use Test::More qw(no_plan);
+use File::Basename;
+use File::Spec::Functions qw(:DEFAULT rel2abs);
+use IPC::Open2;
+
+BEGIN {
+	# t-git-credential-netrc.sh kicks off our testing, so we have to go
+	# from there.
+	Test::More->builder->current_test(1);
+}
+
+my @global_credential_args = @ARGV;
+my $scriptDir = dirname rel2abs $0;
+my ($netrc, $netrcGpg, $gcNetrc) = map { catfile $scriptDir, $_; }
+                                       qw(test.netrc
+                                          test.netrc.gpg
+                                          git-credential-netrc);
+local $ENV{PATH} = join ':'
+                      , $scriptDir
+                      , $ENV{PATH}
+                      ? $ENV{PATH}
+                      : ();
+
+diag "Testing insecure file, nothing should be found\n";
+chmod 0644, $netrc;
+my $cred = run_credential(['-f', $netrc, 'get'],
+			  { host => 'github.com' });
+
+ok(scalar keys %$cred == 0, "Got 0 keys from insecure file");
+
+diag "Testing missing file, nothing should be found\n";
+chmod 0644, $netrc;
+$cred = run_credential(['-f', '///nosuchfile///', 'get'],
+		       { host => 'github.com' });
+
+ok(scalar keys %$cred == 0, "Got 0 keys from missing file");
+
+chmod 0600, $netrc;
+
+diag "Testing with invalid data\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+		       "bad data");
+ok(scalar keys %$cred == 4, "Got first found keys with bad data");
+
+diag "Testing netrc file for a missing corovamilkbar entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+		       { host => 'corovamilkbar' });
+
+ok(scalar keys %$cred == 0, "Got no corovamilkbar keys");
+
+diag "Testing netrc file for a github.com entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+		       { host => 'github.com' });
+
+ok(scalar keys %$cred == 2, "Got 2 Github keys");
+
+is($cred->{password}, 'carolknows', "Got correct Github password");
+is($cred->{username}, 'carol', "Got correct Github username");
+
+diag "Testing netrc file for a username-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+		       { host => 'imap', username => 'bob' });
+
+ok(scalar keys %$cred == 2, "Got 2 username-specific keys");
+
+is($cred->{password}, 'bobwillknow', "Got correct user-specific password");
+is($cred->{protocol}, 'imaps', "Got correct user-specific protocol");
+
+diag "Testing netrc file for a host:port-specific entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+		       { host => 'imap2:1099' });
+
+ok(scalar keys %$cred == 2, "Got 2 host:port-specific keys");
+
+is($cred->{password}, 'tzzknow', "Got correct host:port-specific password");
+is($cred->{username}, 'tzz', "Got correct host:port-specific username");
+
+diag "Testing netrc file that 'host:port kills host' entry\n";
+$cred = run_credential(['-f', $netrc, 'get'],
+		       { host => 'imap2' });
+
+ok(scalar keys %$cred == 2, "Got 2 'host:port kills host' keys");
+
+is($cred->{password}, 'bobwillknow', "Got correct 'host:port kills host' password");
+is($cred->{username}, 'bob', "Got correct 'host:port kills host' username");
+
+diag 'Testing netrc file decryption by git config gpg.program setting\n';
+$cred = run_credential( ['-f', $netrcGpg, 'get']
+                      , { host => 'git-config-gpg' }
+                      );
+
+ok(scalar keys %$cred == 2, 'Got keys decrypted by git config option');
+
+diag 'Testing netrc file decryption by gpg option\n';
+$cred = run_credential( ['-f', $netrcGpg, '-g', 'test.command-option-gpg', 'get']
+                      , { host => 'command-option-gpg' }
+                      );
+
+ok(scalar keys %$cred == 2, 'Got keys decrypted by command option');
+
+my $is_passing = eval { Test::More->is_passing };
+exit($is_passing ? 0 : 1) unless $@ =~ /Can't locate object method/;
+
+sub run_credential
+{
+	my $args = shift @_;
+	my $data = shift @_;
+	my $pid = open2(my $chld_out, my $chld_in,
+			$gcNetrc, @global_credential_args,
+			@$args);
+
+	die "Couldn't open pipe to netrc credential helper: $!" unless $pid;
+
+	if (ref $data eq 'HASH')
+	{
+		print $chld_in "$_=$data->{$_}\n" foreach sort keys %$data;
+	}
+	else
+	{
+		print $chld_in "$data\n";
+	}
+
+	close $chld_in;
+	my %ret;
+
+	while (<$chld_out>)
+	{
+		chomp;
+		next unless m/^([^=]+)=(.+)/;
+
+		$ret{$1} = $2;
+	}
+
+	return \%ret;
+}
diff --git a/third_party/git/contrib/credential/osxkeychain/.gitignore b/third_party/git/contrib/credential/osxkeychain/.gitignore
new file mode 100644
index 000000000000..6c5b7026c53d
--- /dev/null
+++ b/third_party/git/contrib/credential/osxkeychain/.gitignore
@@ -0,0 +1 @@
+git-credential-osxkeychain
diff --git a/third_party/git/contrib/credential/osxkeychain/Makefile b/third_party/git/contrib/credential/osxkeychain/Makefile
new file mode 100644
index 000000000000..4b3a08a2bac4
--- /dev/null
+++ b/third_party/git/contrib/credential/osxkeychain/Makefile
@@ -0,0 +1,17 @@
+all:: git-credential-osxkeychain
+
+CC = gcc
+RM = rm -f
+CFLAGS = -g -O2 -Wall
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+git-credential-osxkeychain: git-credential-osxkeychain.o
+	$(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) -Wl,-framework -Wl,Security
+
+git-credential-osxkeychain.o: git-credential-osxkeychain.c
+	$(CC) -c $(CFLAGS) $<
+
+clean:
+	$(RM) git-credential-osxkeychain git-credential-osxkeychain.o
diff --git a/third_party/git/contrib/credential/osxkeychain/git-credential-osxkeychain.c b/third_party/git/contrib/credential/osxkeychain/git-credential-osxkeychain.c
new file mode 100644
index 000000000000..bcd3f575a3e3
--- /dev/null
+++ b/third_party/git/contrib/credential/osxkeychain/git-credential-osxkeychain.c
@@ -0,0 +1,183 @@
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <Security/Security.h>
+
+static SecProtocolType protocol;
+static char *host;
+static char *path;
+static char *username;
+static char *password;
+static UInt16 port;
+
+static void die(const char *err, ...)
+{
+	char msg[4096];
+	va_list params;
+	va_start(params, err);
+	vsnprintf(msg, sizeof(msg), err, params);
+	fprintf(stderr, "%s\n", msg);
+	va_end(params);
+	exit(1);
+}
+
+static void *xstrdup(const char *s1)
+{
+	void *ret = strdup(s1);
+	if (!ret)
+		die("Out of memory");
+	return ret;
+}
+
+#define KEYCHAIN_ITEM(x) (x ? strlen(x) : 0), x
+#define KEYCHAIN_ARGS \
+	NULL, /* default keychain */ \
+	KEYCHAIN_ITEM(host), \
+	0, NULL, /* account domain */ \
+	KEYCHAIN_ITEM(username), \
+	KEYCHAIN_ITEM(path), \
+	port, \
+	protocol, \
+	kSecAuthenticationTypeDefault
+
+static void write_item(const char *what, const char *buf, int len)
+{
+	printf("%s=", what);
+	fwrite(buf, 1, len, stdout);
+	putchar('\n');
+}
+
+static void find_username_in_item(SecKeychainItemRef item)
+{
+	SecKeychainAttributeList list;
+	SecKeychainAttribute attr;
+
+	list.count = 1;
+	list.attr = &attr;
+	attr.tag = kSecAccountItemAttr;
+
+	if (SecKeychainItemCopyContent(item, NULL, &list, NULL, NULL))
+		return;
+
+	write_item("username", attr.data, attr.length);
+	SecKeychainItemFreeContent(&list, NULL);
+}
+
+static void find_internet_password(void)
+{
+	void *buf;
+	UInt32 len;
+	SecKeychainItemRef item;
+
+	if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, &len, &buf, &item))
+		return;
+
+	write_item("password", buf, len);
+	if (!username)
+		find_username_in_item(item);
+
+	SecKeychainItemFreeContent(NULL, buf);
+}
+
+static void delete_internet_password(void)
+{
+	SecKeychainItemRef item;
+
+	/*
+	 * Require at least a protocol and host for removal, which is what git
+	 * will give us; if you want to do something more fancy, use the
+	 * Keychain manager.
+	 */
+	if (!protocol || !host)
+		return;
+
+	if (SecKeychainFindInternetPassword(KEYCHAIN_ARGS, 0, NULL, &item))
+		return;
+
+	SecKeychainItemDelete(item);
+}
+
+static void add_internet_password(void)
+{
+	/* Only store complete credentials */
+	if (!protocol || !host || !username || !password)
+		return;
+
+	if (SecKeychainAddInternetPassword(
+	      KEYCHAIN_ARGS,
+	      KEYCHAIN_ITEM(password),
+	      NULL))
+		return;
+}
+
+static void read_credential(void)
+{
+	char buf[1024];
+
+	while (fgets(buf, sizeof(buf), stdin)) {
+		char *v;
+
+		if (!strcmp(buf, "\n"))
+			break;
+		buf[strlen(buf)-1] = '\0';
+
+		v = strchr(buf, '=');
+		if (!v)
+			die("bad input: %s", buf);
+		*v++ = '\0';
+
+		if (!strcmp(buf, "protocol")) {
+			if (!strcmp(v, "imap"))
+				protocol = kSecProtocolTypeIMAP;
+			else if (!strcmp(v, "imaps"))
+				protocol = kSecProtocolTypeIMAPS;
+			else if (!strcmp(v, "ftp"))
+				protocol = kSecProtocolTypeFTP;
+			else if (!strcmp(v, "ftps"))
+				protocol = kSecProtocolTypeFTPS;
+			else if (!strcmp(v, "https"))
+				protocol = kSecProtocolTypeHTTPS;
+			else if (!strcmp(v, "http"))
+				protocol = kSecProtocolTypeHTTP;
+			else if (!strcmp(v, "smtp"))
+				protocol = kSecProtocolTypeSMTP;
+			else /* we don't yet handle other protocols */
+				exit(0);
+		}
+		else if (!strcmp(buf, "host")) {
+			char *colon = strchr(v, ':');
+			if (colon) {
+				*colon++ = '\0';
+				port = atoi(colon);
+			}
+			host = xstrdup(v);
+		}
+		else if (!strcmp(buf, "path"))
+			path = xstrdup(v);
+		else if (!strcmp(buf, "username"))
+			username = xstrdup(v);
+		else if (!strcmp(buf, "password"))
+			password = xstrdup(v);
+	}
+}
+
+int main(int argc, const char **argv)
+{
+	const char *usage =
+		"usage: git credential-osxkeychain <get|store|erase>";
+
+	if (!argv[1])
+		die(usage);
+
+	read_credential();
+
+	if (!strcmp(argv[1], "get"))
+		find_internet_password();
+	else if (!strcmp(argv[1], "store"))
+		add_internet_password();
+	else if (!strcmp(argv[1], "erase"))
+		delete_internet_password();
+	/* otherwise, ignore unknown action */
+
+	return 0;
+}
diff --git a/third_party/git/contrib/credential/wincred/Makefile b/third_party/git/contrib/credential/wincred/Makefile
new file mode 100644
index 000000000000..6e992c08667d
--- /dev/null
+++ b/third_party/git/contrib/credential/wincred/Makefile
@@ -0,0 +1,22 @@
+all: git-credential-wincred.exe
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+CC ?= gcc
+RM ?= rm -f
+CFLAGS ?= -O2 -Wall
+
+prefix ?= /usr/local
+libexecdir ?= $(prefix)/libexec/git-core
+
+INSTALL ?= install
+
+git-credential-wincred.exe : git-credential-wincred.c
+	$(LINK.c) $^ $(LOADLIBES) $(LDLIBS) -o $@
+
+install: git-credential-wincred.exe
+	$(INSTALL) -m 755 $^ $(libexecdir)
+
+clean:
+	$(RM) git-credential-wincred.exe
diff --git a/third_party/git/contrib/credential/wincred/git-credential-wincred.c b/third_party/git/contrib/credential/wincred/git-credential-wincred.c
new file mode 100644
index 000000000000..5bdad41de1f8
--- /dev/null
+++ b/third_party/git/contrib/credential/wincred/git-credential-wincred.c
@@ -0,0 +1,327 @@
+/*
+ * A git credential helper that interface with Windows' Credential Manager
+ *
+ */
+#include <windows.h>
+#include <stdio.h>
+#include <io.h>
+#include <fcntl.h>
+
+/* common helpers */
+
+#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0]))
+
+static void die(const char *err, ...)
+{
+	char msg[4096];
+	va_list params;
+	va_start(params, err);
+	vsnprintf(msg, sizeof(msg), err, params);
+	fprintf(stderr, "%s\n", msg);
+	va_end(params);
+	exit(1);
+}
+
+static void *xmalloc(size_t size)
+{
+	void *ret = malloc(size);
+	if (!ret && !size)
+		ret = malloc(1);
+	if (!ret)
+		 die("Out of memory");
+	return ret;
+}
+
+/* MinGW doesn't have wincred.h, so we need to define stuff */
+
+typedef struct _CREDENTIAL_ATTRIBUTEW {
+	LPWSTR Keyword;
+	DWORD  Flags;
+	DWORD  ValueSize;
+	LPBYTE Value;
+} CREDENTIAL_ATTRIBUTEW, *PCREDENTIAL_ATTRIBUTEW;
+
+typedef struct _CREDENTIALW {
+	DWORD                  Flags;
+	DWORD                  Type;
+	LPWSTR                 TargetName;
+	LPWSTR                 Comment;
+	FILETIME               LastWritten;
+	DWORD                  CredentialBlobSize;
+	LPBYTE                 CredentialBlob;
+	DWORD                  Persist;
+	DWORD                  AttributeCount;
+	PCREDENTIAL_ATTRIBUTEW Attributes;
+	LPWSTR                 TargetAlias;
+	LPWSTR                 UserName;
+} CREDENTIALW, *PCREDENTIALW;
+
+#define CRED_TYPE_GENERIC 1
+#define CRED_PERSIST_LOCAL_MACHINE 2
+#define CRED_MAX_ATTRIBUTES 64
+
+typedef BOOL (WINAPI *CredWriteWT)(PCREDENTIALW, DWORD);
+typedef BOOL (WINAPI *CredEnumerateWT)(LPCWSTR, DWORD, DWORD *,
+    PCREDENTIALW **);
+typedef VOID (WINAPI *CredFreeT)(PVOID);
+typedef BOOL (WINAPI *CredDeleteWT)(LPCWSTR, DWORD, DWORD);
+
+static HMODULE advapi;
+static CredWriteWT CredWriteW;
+static CredEnumerateWT CredEnumerateW;
+static CredFreeT CredFree;
+static CredDeleteWT CredDeleteW;
+
+static void load_cred_funcs(void)
+{
+	/* load DLLs */
+	advapi = LoadLibraryExA("advapi32.dll", NULL,
+				LOAD_LIBRARY_SEARCH_SYSTEM32);
+	if (!advapi)
+		die("failed to load advapi32.dll");
+
+	/* get function pointers */
+	CredWriteW = (CredWriteWT)GetProcAddress(advapi, "CredWriteW");
+	CredEnumerateW = (CredEnumerateWT)GetProcAddress(advapi,
+	    "CredEnumerateW");
+	CredFree = (CredFreeT)GetProcAddress(advapi, "CredFree");
+	CredDeleteW = (CredDeleteWT)GetProcAddress(advapi, "CredDeleteW");
+	if (!CredWriteW || !CredEnumerateW || !CredFree || !CredDeleteW)
+		die("failed to load functions");
+}
+
+static WCHAR *wusername, *password, *protocol, *host, *path, target[1024];
+
+static void write_item(const char *what, LPCWSTR wbuf, int wlen)
+{
+	char *buf;
+
+	if (!wbuf || !wlen) {
+		printf("%s=\n", what);
+		return;
+	}
+
+	int len = WideCharToMultiByte(CP_UTF8, 0, wbuf, wlen, NULL, 0, NULL,
+	    FALSE);
+	buf = xmalloc(len);
+
+	if (!WideCharToMultiByte(CP_UTF8, 0, wbuf, wlen, buf, len, NULL, FALSE))
+		die("WideCharToMultiByte failed!");
+
+	printf("%s=", what);
+	fwrite(buf, 1, len, stdout);
+	putchar('\n');
+	free(buf);
+}
+
+/*
+ * Match an (optional) expected string and a delimiter in the target string,
+ * consuming the matched text by updating the target pointer.
+ */
+
+static LPCWSTR wcsstr_last(LPCWSTR str, LPCWSTR find)
+{
+	LPCWSTR res = NULL, pos;
+	for (pos = wcsstr(str, find); pos; pos = wcsstr(pos + 1, find))
+		res = pos;
+	return res;
+}
+
+static int match_part_with_last(LPCWSTR *ptarget, LPCWSTR want, LPCWSTR delim, int last)
+{
+	LPCWSTR delim_pos, start = *ptarget;
+	int len;
+
+	/* find start of delimiter (or end-of-string if delim is empty) */
+	if (*delim)
+		delim_pos = last ? wcsstr_last(start, delim) : wcsstr(start, delim);
+	else
+		delim_pos = start + wcslen(start);
+
+	/*
+	 * match text up to delimiter, or end of string (e.g. the '/' after
+	 * host is optional if not followed by a path)
+	 */
+	if (delim_pos)
+		len = delim_pos - start;
+	else
+		len = wcslen(start);
+
+	/* update ptarget if we either found a delimiter or need a match */
+	if (delim_pos || want)
+		*ptarget = delim_pos ? delim_pos + wcslen(delim) : start + len;
+
+	return !want || (!wcsncmp(want, start, len) && !want[len]);
+}
+
+static int match_part(LPCWSTR *ptarget, LPCWSTR want, LPCWSTR delim)
+{
+	return match_part_with_last(ptarget, want, delim, 0);
+}
+
+static int match_part_last(LPCWSTR *ptarget, LPCWSTR want, LPCWSTR delim)
+{
+	return match_part_with_last(ptarget, want, delim, 1);
+}
+
+static int match_cred(const CREDENTIALW *cred)
+{
+	LPCWSTR target = cred->TargetName;
+	if (wusername && wcscmp(wusername, cred->UserName ? cred->UserName : L""))
+		return 0;
+
+	return match_part(&target, L"git", L":") &&
+		match_part(&target, protocol, L"://") &&
+		match_part_last(&target, wusername, L"@") &&
+		match_part(&target, host, L"/") &&
+		match_part(&target, path, L"");
+}
+
+static void get_credential(void)
+{
+	CREDENTIALW **creds;
+	DWORD num_creds;
+	int i;
+
+	if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds))
+		return;
+
+	/* search for the first credential that matches username */
+	for (i = 0; i < num_creds; ++i)
+		if (match_cred(creds[i])) {
+			write_item("username", creds[i]->UserName,
+				creds[i]->UserName ? wcslen(creds[i]->UserName) : 0);
+			write_item("password",
+				(LPCWSTR)creds[i]->CredentialBlob,
+				creds[i]->CredentialBlobSize / sizeof(WCHAR));
+			break;
+		}
+
+	CredFree(creds);
+}
+
+static void store_credential(void)
+{
+	CREDENTIALW cred;
+
+	if (!wusername || !password)
+		return;
+
+	cred.Flags = 0;
+	cred.Type = CRED_TYPE_GENERIC;
+	cred.TargetName = target;
+	cred.Comment = L"saved by git-credential-wincred";
+	cred.CredentialBlobSize = (wcslen(password)) * sizeof(WCHAR);
+	cred.CredentialBlob = (LPVOID)password;
+	cred.Persist = CRED_PERSIST_LOCAL_MACHINE;
+	cred.AttributeCount = 0;
+	cred.Attributes = NULL;
+	cred.TargetAlias = NULL;
+	cred.UserName = wusername;
+
+	if (!CredWriteW(&cred, 0))
+		die("CredWrite failed");
+}
+
+static void erase_credential(void)
+{
+	CREDENTIALW **creds;
+	DWORD num_creds;
+	int i;
+
+	if (!CredEnumerateW(L"git:*", 0, &num_creds, &creds))
+		return;
+
+	for (i = 0; i < num_creds; ++i) {
+		if (match_cred(creds[i]))
+			CredDeleteW(creds[i]->TargetName, creds[i]->Type, 0);
+	}
+
+	CredFree(creds);
+}
+
+static WCHAR *utf8_to_utf16_dup(const char *str)
+{
+	int wlen = MultiByteToWideChar(CP_UTF8, 0, str, -1, NULL, 0);
+	WCHAR *wstr = xmalloc(sizeof(WCHAR) * wlen);
+	MultiByteToWideChar(CP_UTF8, 0, str, -1, wstr, wlen);
+	return wstr;
+}
+
+static void read_credential(void)
+{
+	char buf[1024];
+
+	while (fgets(buf, sizeof(buf), stdin)) {
+		char *v;
+		int len = strlen(buf);
+		/* strip trailing CR / LF */
+		while (len && strchr("\r\n", buf[len - 1]))
+			buf[--len] = 0;
+
+		if (!*buf)
+			break;
+
+		v = strchr(buf, '=');
+		if (!v)
+			die("bad input: %s", buf);
+		*v++ = '\0';
+
+		if (!strcmp(buf, "protocol"))
+			protocol = utf8_to_utf16_dup(v);
+		else if (!strcmp(buf, "host"))
+			host = utf8_to_utf16_dup(v);
+		else if (!strcmp(buf, "path"))
+			path = utf8_to_utf16_dup(v);
+		else if (!strcmp(buf, "username")) {
+			wusername = utf8_to_utf16_dup(v);
+		} else if (!strcmp(buf, "password"))
+			password = utf8_to_utf16_dup(v);
+		else
+			die("unrecognized input");
+	}
+}
+
+int main(int argc, char *argv[])
+{
+	const char *usage =
+	    "usage: git credential-wincred <get|store|erase>\n";
+
+	if (!argv[1])
+		die(usage);
+
+	/* git use binary pipes to avoid CRLF-issues */
+	_setmode(_fileno(stdin), _O_BINARY);
+	_setmode(_fileno(stdout), _O_BINARY);
+
+	read_credential();
+
+	load_cred_funcs();
+
+	if (!protocol || !(host || path))
+		return 0;
+
+	/* prepare 'target', the unique key for the credential */
+	wcscpy(target, L"git:");
+	wcsncat(target, protocol, ARRAY_SIZE(target));
+	wcsncat(target, L"://", ARRAY_SIZE(target));
+	if (wusername) {
+		wcsncat(target, wusername, ARRAY_SIZE(target));
+		wcsncat(target, L"@", ARRAY_SIZE(target));
+	}
+	if (host)
+		wcsncat(target, host, ARRAY_SIZE(target));
+	if (path) {
+		wcsncat(target, L"/", ARRAY_SIZE(target));
+		wcsncat(target, path, ARRAY_SIZE(target));
+	}
+
+	if (!strcmp(argv[1], "get"))
+		get_credential();
+	else if (!strcmp(argv[1], "store"))
+		store_credential();
+	else if (!strcmp(argv[1], "erase"))
+		erase_credential();
+	/* otherwise, ignore unknown action */
+	return 0;
+}
diff --git a/third_party/git/contrib/diff-highlight/.gitignore b/third_party/git/contrib/diff-highlight/.gitignore
new file mode 100644
index 000000000000..c07454824e3a
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/.gitignore
@@ -0,0 +1,2 @@
+shebang.perl
+diff-highlight
diff --git a/third_party/git/contrib/diff-highlight/DiffHighlight.pm b/third_party/git/contrib/diff-highlight/DiffHighlight.pm
new file mode 100644
index 000000000000..e2589922a659
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/DiffHighlight.pm
@@ -0,0 +1,285 @@
+package DiffHighlight;
+
+use 5.008;
+use warnings FATAL => 'all';
+use strict;
+
+# Use the correct value for both UNIX and Windows (/dev/null vs nul)
+use File::Spec;
+
+my $NULL = File::Spec->devnull();
+
+# Highlight by reversing foreground and background. You could do
+# other things like bold or underline if you prefer.
+my @OLD_HIGHLIGHT = (
+	color_config('color.diff-highlight.oldnormal'),
+	color_config('color.diff-highlight.oldhighlight', "\x1b[7m"),
+	color_config('color.diff-highlight.oldreset', "\x1b[27m")
+);
+my @NEW_HIGHLIGHT = (
+	color_config('color.diff-highlight.newnormal', $OLD_HIGHLIGHT[0]),
+	color_config('color.diff-highlight.newhighlight', $OLD_HIGHLIGHT[1]),
+	color_config('color.diff-highlight.newreset', $OLD_HIGHLIGHT[2])
+);
+
+my $RESET = "\x1b[m";
+my $COLOR = qr/\x1b\[[0-9;]*m/;
+my $BORING = qr/$COLOR|\s/;
+
+my @removed;
+my @added;
+my $in_hunk;
+my $graph_indent = 0;
+
+our $line_cb = sub { print @_ };
+our $flush_cb = sub { local $| = 1 };
+
+# Count the visible width of a string, excluding any terminal color sequences.
+sub visible_width {
+	local $_ = shift;
+	my $ret = 0;
+	while (length) {
+		if (s/^$COLOR//) {
+			# skip colors
+		} elsif (s/^.//) {
+			$ret++;
+		}
+	}
+	return $ret;
+}
+
+# Return a substring of $str, omitting $len visible characters from the
+# beginning, where terminal color sequences do not count as visible.
+sub visible_substr {
+	my ($str, $len) = @_;
+	while ($len > 0) {
+		if ($str =~ s/^$COLOR//) {
+			next
+		}
+		$str =~ s/^.//;
+		$len--;
+	}
+	return $str;
+}
+
+sub handle_line {
+	my $orig = shift;
+	local $_ = $orig;
+
+	# match a graph line that begins a commit
+	if (/^(?:$COLOR?\|$COLOR?[ ])* # zero or more leading "|" with space
+	         $COLOR?\*$COLOR?[ ]   # a "*" with its trailing space
+	      (?:$COLOR?\|$COLOR?[ ])* # zero or more trailing "|"
+	                         [ ]*  # trailing whitespace for merges
+	    /x) {
+		my $graph_prefix = $&;
+
+		# We must flush before setting graph indent, since the
+		# new commit may be indented differently from what we
+		# queued.
+		flush();
+		$graph_indent = visible_width($graph_prefix);
+
+	} elsif ($graph_indent) {
+		if (length($_) < $graph_indent) {
+			$graph_indent = 0;
+		} else {
+			$_ = visible_substr($_, $graph_indent);
+		}
+	}
+
+	if (!$in_hunk) {
+		$line_cb->($orig);
+		$in_hunk = /^$COLOR*\@\@ /;
+	}
+	elsif (/^$COLOR*-/) {
+		push @removed, $orig;
+	}
+	elsif (/^$COLOR*\+/) {
+		push @added, $orig;
+	}
+	else {
+		flush();
+		$line_cb->($orig);
+		$in_hunk = /^$COLOR*[\@ ]/;
+	}
+
+	# Most of the time there is enough output to keep things streaming,
+	# but for something like "git log -Sfoo", you can get one early
+	# commit and then many seconds of nothing. We want to show
+	# that one commit as soon as possible.
+	#
+	# Since we can receive arbitrary input, there's no optimal
+	# place to flush. Flushing on a blank line is a heuristic that
+	# happens to match git-log output.
+	if (!length) {
+		$flush_cb->();
+	}
+}
+
+sub flush {
+	# Flush any queued hunk (this can happen when there is no trailing
+	# context in the final diff of the input).
+	show_hunk(\@removed, \@added);
+	@removed = ();
+	@added = ();
+}
+
+sub highlight_stdin {
+	while (<STDIN>) {
+		handle_line($_);
+	}
+	flush();
+}
+
+# Ideally we would feed the default as a human-readable color to
+# git-config as the fallback value. But diff-highlight does
+# not otherwise depend on git at all, and there are reports
+# of it being used in other settings. Let's handle our own
+# fallback, which means we will work even if git can't be run.
+sub color_config {
+	my ($key, $default) = @_;
+	my $s = `git config --get-color $key 2>$NULL`;
+	return length($s) ? $s : $default;
+}
+
+sub show_hunk {
+	my ($a, $b) = @_;
+
+	# If one side is empty, then there is nothing to compare or highlight.
+	if (!@$a || !@$b) {
+		$line_cb->(@$a, @$b);
+		return;
+	}
+
+	# If we have mismatched numbers of lines on each side, we could try to
+	# be clever and match up similar lines. But for now we are simple and
+	# stupid, and only handle multi-line hunks that remove and add the same
+	# number of lines.
+	if (@$a != @$b) {
+		$line_cb->(@$a, @$b);
+		return;
+	}
+
+	my @queue;
+	for (my $i = 0; $i < @$a; $i++) {
+		my ($rm, $add) = highlight_pair($a->[$i], $b->[$i]);
+		$line_cb->($rm);
+		push @queue, $add;
+	}
+	$line_cb->(@queue);
+}
+
+sub highlight_pair {
+	my @a = split_line(shift);
+	my @b = split_line(shift);
+
+	# Find common prefix, taking care to skip any ansi
+	# color codes.
+	my $seen_plusminus;
+	my ($pa, $pb) = (0, 0);
+	while ($pa < @a && $pb < @b) {
+		if ($a[$pa] =~ /$COLOR/) {
+			$pa++;
+		}
+		elsif ($b[$pb] =~ /$COLOR/) {
+			$pb++;
+		}
+		elsif ($a[$pa] eq $b[$pb]) {
+			$pa++;
+			$pb++;
+		}
+		elsif (!$seen_plusminus && $a[$pa] eq '-' && $b[$pb] eq '+') {
+			$seen_plusminus = 1;
+			$pa++;
+			$pb++;
+		}
+		else {
+			last;
+		}
+	}
+
+	# Find common suffix, ignoring colors.
+	my ($sa, $sb) = ($#a, $#b);
+	while ($sa >= $pa && $sb >= $pb) {
+		if ($a[$sa] =~ /$COLOR/) {
+			$sa--;
+		}
+		elsif ($b[$sb] =~ /$COLOR/) {
+			$sb--;
+		}
+		elsif ($a[$sa] eq $b[$sb]) {
+			$sa--;
+			$sb--;
+		}
+		else {
+			last;
+		}
+	}
+
+	if (is_pair_interesting(\@a, $pa, $sa, \@b, $pb, $sb)) {
+		return highlight_line(\@a, $pa, $sa, \@OLD_HIGHLIGHT),
+		       highlight_line(\@b, $pb, $sb, \@NEW_HIGHLIGHT);
+	}
+	else {
+		return join('', @a),
+		       join('', @b);
+	}
+}
+
+# we split either by $COLOR or by character. This has the side effect of
+# leaving in graph cruft. It works because the graph cruft does not contain "-"
+# or "+"
+sub split_line {
+	local $_ = shift;
+	return utf8::decode($_) ?
+		map { utf8::encode($_); $_ }
+			map { /$COLOR/ ? $_ : (split //) }
+			split /($COLOR+)/ :
+		map { /$COLOR/ ? $_ : (split //) }
+		split /($COLOR+)/;
+}
+
+sub highlight_line {
+	my ($line, $prefix, $suffix, $theme) = @_;
+
+	my $start = join('', @{$line}[0..($prefix-1)]);
+	my $mid = join('', @{$line}[$prefix..$suffix]);
+	my $end = join('', @{$line}[($suffix+1)..$#$line]);
+
+	# If we have a "normal" color specified, then take over the whole line.
+	# Otherwise, we try to just manipulate the highlighted bits.
+	if (defined $theme->[0]) {
+		s/$COLOR//g for ($start, $mid, $end);
+		chomp $end;
+		return join('',
+			$theme->[0], $start, $RESET,
+			$theme->[1], $mid, $RESET,
+			$theme->[0], $end, $RESET,
+			"\n"
+		);
+	} else {
+		return join('',
+			$start,
+			$theme->[1], $mid, $theme->[2],
+			$end
+		);
+	}
+}
+
+# Pairs are interesting to highlight only if we are going to end up
+# highlighting a subset (i.e., not the whole line). Otherwise, the highlighting
+# is just useless noise. We can detect this by finding either a matching prefix
+# or suffix (disregarding boring bits like whitespace and colorization).
+sub is_pair_interesting {
+	my ($a, $pa, $sa, $b, $pb, $sb) = @_;
+	my $prefix_a = join('', @$a[0..($pa-1)]);
+	my $prefix_b = join('', @$b[0..($pb-1)]);
+	my $suffix_a = join('', @$a[($sa+1)..$#$a]);
+	my $suffix_b = join('', @$b[($sb+1)..$#$b]);
+
+	return visible_substr($prefix_a, $graph_indent) !~ /^$COLOR*-$BORING*$/ ||
+	       visible_substr($prefix_b, $graph_indent) !~ /^$COLOR*\+$BORING*$/ ||
+	       $suffix_a !~ /^$BORING*$/ ||
+	       $suffix_b !~ /^$BORING*$/;
+}
diff --git a/third_party/git/contrib/diff-highlight/Makefile b/third_party/git/contrib/diff-highlight/Makefile
new file mode 100644
index 000000000000..f2be7cc92437
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/Makefile
@@ -0,0 +1,23 @@
+all: diff-highlight
+
+PERL_PATH = /usr/bin/perl
+-include ../../config.mak
+
+PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
+
+diff-highlight: shebang.perl DiffHighlight.pm diff-highlight.perl
+	cat $^ >$@+
+	chmod +x $@+
+	mv $@+ $@
+
+shebang.perl: FORCE
+	@echo '#!$(PERL_PATH_SQ)' >$@+
+	@cmp $@+ $@ >/dev/null 2>/dev/null || mv $@+ $@
+
+test: all
+	$(MAKE) -C t
+
+clean:
+	$(RM) diff-highlight
+
+.PHONY: FORCE
diff --git a/third_party/git/contrib/diff-highlight/README b/third_party/git/contrib/diff-highlight/README
new file mode 100644
index 000000000000..d4c234317520
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/README
@@ -0,0 +1,223 @@
+diff-highlight
+==============
+
+Line oriented diffs are great for reviewing code, because for most
+hunks, you want to see the old and the new segments of code next to each
+other. Sometimes, though, when an old line and a new line are very
+similar, it's hard to immediately see the difference.
+
+You can use "--color-words" to highlight only the changed portions of
+lines. However, this can often be hard to read for code, as it loses
+the line structure, and you end up with oddly formatted bits.
+
+Instead, this script post-processes the line-oriented diff, finds pairs
+of lines, and highlights the differing segments.  It's currently very
+simple and stupid about doing these tasks. In particular:
+
+  1. It will only highlight hunks in which the number of removed and
+     added lines is the same, and it will pair lines within the hunk by
+     position (so the first removed line is compared to the first added
+     line, and so forth). This is simple and tends to work well in
+     practice. More complex changes don't highlight well, so we tend to
+     exclude them due to the "same number of removed and added lines"
+     restriction. Or even if we do try to highlight them, they end up
+     not highlighting because of our "don't highlight if the whole line
+     would be highlighted" rule.
+
+  2. It will find the common prefix and suffix of two lines, and
+     consider everything in the middle to be "different". It could
+     instead do a real diff of the characters between the two lines and
+     find common subsequences. However, the point of the highlight is to
+     call attention to a certain area. Even if some small subset of the
+     highlighted area actually didn't change, that's OK. In practice it
+     ends up being more readable to just have a single blob on the line
+     showing the interesting bit.
+
+The goal of the script is therefore not to be exact about highlighting
+changes, but to call attention to areas of interest without being
+visually distracting.  Non-diff lines and existing diff coloration is
+preserved; the intent is that the output should look exactly the same as
+the input, except for the occasional highlight.
+
+Use
+---
+
+You can try out the diff-highlight program with:
+
+---------------------------------------------
+git log -p --color | /path/to/diff-highlight
+---------------------------------------------
+
+If you want to use it all the time, drop it in your $PATH and put the
+following in your git configuration:
+
+---------------------------------------------
+[pager]
+	log = diff-highlight | less
+	show = diff-highlight | less
+	diff = diff-highlight | less
+---------------------------------------------
+
+
+Color Config
+------------
+
+You can configure the highlight colors and attributes using git's
+config. The colors for "old" and "new" lines can be specified
+independently. There are two "modes" of configuration:
+
+  1. You can specify a "highlight" color and a matching "reset" color.
+     This will retain any existing colors in the diff, and apply the
+     "highlight" and "reset" colors before and after the highlighted
+     portion.
+
+  2. You can specify a "normal" color and a "highlight" color. In this
+     case, existing colors are dropped from that line. The non-highlighted
+     bits of the line get the "normal" color, and the highlights get the
+     "highlight" color.
+
+If no "new" colors are specified, they default to the "old" colors. If
+no "old" colors are specified, the default is to reverse the foreground
+and background for highlighted portions.
+
+Examples:
+
+---------------------------------------------
+# Underline highlighted portions
+[color "diff-highlight"]
+oldHighlight = ul
+oldReset = noul
+---------------------------------------------
+
+---------------------------------------------
+# Varying background intensities
+[color "diff-highlight"]
+oldNormal = "black #f8cbcb"
+oldHighlight = "black #ffaaaa"
+newNormal = "black #cbeecb"
+newHighlight = "black #aaffaa"
+---------------------------------------------
+
+
+Using diff-highlight as a module
+--------------------------------
+
+If you want to pre- or post- process the highlighted lines as part of
+another perl script, you can use the DiffHighlight module. You can
+either "require" it or just cat the module together with your script (to
+avoid run-time dependencies).
+
+Your script may set up one or more of the following variables:
+
+  - $DiffHighlight::line_cb - this should point to a function which is
+    called whenever DiffHighlight has lines (which may contain
+    highlights) to output. The default function prints each line to
+    stdout. Note that the function may be called with multiple lines.
+
+  - $DiffHighlight::flush_cb - this should point to a function which
+    flushes the output (because DiffHighlight believes it has completed
+    processing a logical chunk of input). The default function flushes
+    stdout.
+
+The script may then feed lines, one at a time, to DiffHighlight::handle_line().
+When lines are done processing, they will be fed to $line_cb. Note that
+DiffHighlight may queue up many input lines (to analyze a whole hunk)
+before calling $line_cb. After providing all lines, call
+DiffHighlight::flush() to flush any unprocessed lines.
+
+If you just want to process stdin, DiffHighlight::highlight_stdin()
+is a convenience helper which will loop and flush for you.
+
+
+Bugs
+----
+
+Because diff-highlight relies on heuristics to guess which parts of
+changes are important, there are some cases where the highlighting is
+more distracting than useful. Fortunately, these cases are rare in
+practice, and when they do occur, the worst case is simply a little
+extra highlighting. This section documents some cases known to be
+sub-optimal, in case somebody feels like working on improving the
+heuristics.
+
+1. Two changes on the same line get highlighted in a blob. For example,
+   highlighting:
+
+----------------------------------------------
+-foo(buf, size);
++foo(obj->buf, obj->size);
+----------------------------------------------
+
+   yields (where the inside of "+{}" would be highlighted):
+
+----------------------------------------------
+-foo(buf, size);
++foo(+{obj->buf, obj->}size);
+----------------------------------------------
+
+   whereas a more semantically meaningful output would be:
+
+----------------------------------------------
+-foo(buf, size);
++foo(+{obj->}buf, +{obj->}size);
+----------------------------------------------
+
+   Note that doing this right would probably involve a set of
+   content-specific boundary patterns, similar to word-diff. Otherwise
+   you get junk like:
+
+-----------------------------------------------------
+-this line has some -{i}nt-{ere}sti-{ng} text on it
++this line has some +{fa}nt+{a}sti+{c} text on it
+-----------------------------------------------------
+
+   which is less readable than the current output.
+
+2. The multi-line matching assumes that lines in the pre- and post-image
+   match by position. This is often the case, but can be fooled when a
+   line is removed from the top and a new one added at the bottom (or
+   vice versa). Unless the lines in the middle are also changed, diffs
+   will show this as two hunks, and it will not get highlighted at all
+   (which is good). But if the lines in the middle are changed, the
+   highlighting can be misleading. Here's a pathological case:
+
+-----------------------------------------------------
+-one
+-two
+-three
+-four
++two 2
++three 3
++four 4
++five 5
+-----------------------------------------------------
+
+   which gets highlighted as:
+
+-----------------------------------------------------
+-one
+-t-{wo}
+-three
+-f-{our}
++two 2
++t+{hree 3}
++four 4
++f+{ive 5}
+-----------------------------------------------------
+
+   because it matches "two" to "three 3", and so forth. It would be
+   nicer as:
+
+-----------------------------------------------------
+-one
+-two
+-three
+-four
++two +{2}
++three +{3}
++four +{4}
++five 5
+-----------------------------------------------------
+
+   which would probably involve pre-matching the lines into pairs
+   according to some heuristic.
diff --git a/third_party/git/contrib/diff-highlight/diff-highlight.perl b/third_party/git/contrib/diff-highlight/diff-highlight.perl
new file mode 100644
index 000000000000..9b3e9c1f4d7b
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/diff-highlight.perl
@@ -0,0 +1,8 @@
+package main;
+
+# Some scripts may not realize that SIGPIPE is being ignored when launching the
+# pager--for instance scripts written in Python.
+$SIG{PIPE} = 'DEFAULT';
+
+DiffHighlight::highlight_stdin();
+exit 0;
diff --git a/third_party/git/contrib/diff-highlight/t/.gitignore b/third_party/git/contrib/diff-highlight/t/.gitignore
new file mode 100644
index 000000000000..7dcbb232cd87
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/t/.gitignore
@@ -0,0 +1,2 @@
+/trash directory*
+/test-results
diff --git a/third_party/git/contrib/diff-highlight/t/Makefile b/third_party/git/contrib/diff-highlight/t/Makefile
new file mode 100644
index 000000000000..5ff5275496c5
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/t/Makefile
@@ -0,0 +1,22 @@
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+# copied from ../../t/Makefile
+SHELL_PATH ?= $(SHELL)
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+all: test
+test: $(T)
+
+.PHONY: help clean all test $(T)
+
+help:
+	@echo 'Run "$(MAKE) test" to launch test scripts'
+	@echo 'Run "$(MAKE) clean" to remove trash folders'
+
+$(T):
+	@echo "*** $@ ***"; '$(SHELL_PATH_SQ)' $@ $(GIT_TEST_OPTS)
+
+clean:
+	$(RM) -r 'trash directory'.*
diff --git a/third_party/git/contrib/diff-highlight/t/t9400-diff-highlight.sh b/third_party/git/contrib/diff-highlight/t/t9400-diff-highlight.sh
new file mode 100755
index 000000000000..f6f5195d00f6
--- /dev/null
+++ b/third_party/git/contrib/diff-highlight/t/t9400-diff-highlight.sh
@@ -0,0 +1,341 @@
+#!/bin/sh
+
+test_description='Test diff-highlight'
+
+CURR_DIR=$(pwd)
+TEST_OUTPUT_DIRECTORY=$(pwd)
+TEST_DIRECTORY="$CURR_DIR"/../../../t
+DIFF_HIGHLIGHT="$CURR_DIR"/../diff-highlight
+
+CW="$(printf "\033[7m")"	# white
+CR="$(printf "\033[27m")"	# reset
+
+. "$TEST_DIRECTORY"/test-lib.sh
+
+if ! test_have_prereq PERL
+then
+	skip_all='skipping diff-highlight tests; perl not available'
+	test_done
+fi
+
+# dh_test is a test helper function which takes 3 file names as parameters. The
+# first 2 files are used to generate diff and commit output, which is then
+# piped through diff-highlight. The 3rd file should contain the expected output
+# of diff-highlight (minus the diff/commit header, ie. everything after and
+# including the first @@ line).
+dh_test () {
+	a="$1" b="$2" &&
+
+	cat >patch.exp &&
+
+	{
+		cat "$a" >file &&
+		git add file &&
+		git commit -m "Add a file" &&
+
+		cat "$b" >file &&
+		git diff file >diff.raw &&
+		git commit -a -m "Update a file" &&
+		git show >commit.raw
+	} >/dev/null &&
+
+	"$DIFF_HIGHLIGHT" <diff.raw | test_strip_patch_header >diff.act &&
+	"$DIFF_HIGHLIGHT" <commit.raw | test_strip_patch_header >commit.act &&
+	test_cmp patch.exp diff.act &&
+	test_cmp patch.exp commit.act
+}
+
+test_strip_patch_header () {
+	sed -n '/^@@/,$p' $*
+}
+
+# dh_test_setup_history generates a contrived graph such that we have at least
+# 1 nesting (E) and 2 nestings (F).
+#
+#	  A---B master
+#	 /
+#	D---E---F branch
+#
+#	git log --all --graph
+#	* commit
+#	|    B
+#	| * commit
+#	| |    F
+#	* | commit
+#	| |    A
+#	| * commit
+#	|/
+#	|    E
+#	* commit
+#	     D
+#
+dh_test_setup_history () {
+	echo file1 >file &&
+	git add file &&
+	test_tick &&
+	git commit -m "D" &&
+
+	git checkout -b branch &&
+	echo file2 >file &&
+	test_tick &&
+	git commit -a -m "E" &&
+
+	git checkout master &&
+	echo file2 >file &&
+	test_tick &&
+	git commit -a -m "A" &&
+
+	git checkout branch &&
+	echo file3 >file &&
+	test_tick &&
+	git commit -a -m "F" &&
+
+	git checkout master &&
+	echo file3 >file &&
+	test_tick &&
+	git commit -a -m "B"
+}
+
+left_trim () {
+	"$PERL_PATH" -pe 's/^\s+//'
+}
+
+trim_graph () {
+	# graphs start with * or |
+	# followed by a space or / or \
+	"$PERL_PATH" -pe 's@^((\*|\|)( |/|\\))+@@'
+}
+
+test_expect_success 'diff-highlight highlights the beginning of a line' '
+	cat >a <<-\EOF &&
+		aaa
+		bbb
+		ccc
+	EOF
+
+	cat >b <<-\EOF &&
+		aaa
+		0bb
+		ccc
+	EOF
+
+	dh_test a b <<-EOF
+		@@ -1,3 +1,3 @@
+		 aaa
+		-${CW}b${CR}bb
+		+${CW}0${CR}bb
+		 ccc
+	EOF
+'
+
+test_expect_success 'diff-highlight highlights the end of a line' '
+	cat >a <<-\EOF &&
+		aaa
+		bbb
+		ccc
+	EOF
+
+	cat >b <<-\EOF &&
+		aaa
+		bb0
+		ccc
+	EOF
+
+	dh_test a b <<-EOF
+		@@ -1,3 +1,3 @@
+		 aaa
+		-bb${CW}b${CR}
+		+bb${CW}0${CR}
+		 ccc
+	EOF
+'
+
+test_expect_success 'diff-highlight highlights the middle of a line' '
+	cat >a <<-\EOF &&
+		aaa
+		bbb
+		ccc
+	EOF
+
+	cat >b <<-\EOF &&
+		aaa
+		b0b
+		ccc
+	EOF
+
+	dh_test a b <<-EOF
+		@@ -1,3 +1,3 @@
+		 aaa
+		-b${CW}b${CR}b
+		+b${CW}0${CR}b
+		 ccc
+	EOF
+'
+
+test_expect_success 'diff-highlight does not highlight whole line' '
+	cat >a <<-\EOF &&
+		aaa
+		bbb
+		ccc
+	EOF
+
+	cat >b <<-\EOF &&
+		aaa
+		000
+		ccc
+	EOF
+
+	dh_test a b <<-EOF
+		@@ -1,3 +1,3 @@
+		 aaa
+		-bbb
+		+000
+		 ccc
+	EOF
+'
+
+test_expect_failure 'diff-highlight highlights mismatched hunk size' '
+	cat >a <<-\EOF &&
+		aaa
+		bbb
+	EOF
+
+	cat >b <<-\EOF &&
+		aaa
+		b0b
+		ccc
+	EOF
+
+	dh_test a b <<-EOF
+		@@ -1,3 +1,3 @@
+		 aaa
+		-b${CW}b${CR}b
+		+b${CW}0${CR}b
+		+ccc
+	EOF
+'
+
+# These two code points share the same leading byte in UTF-8 representation;
+# a naive byte-wise diff would highlight only the second byte.
+#
+#   - U+00f3 ("o" with acute)
+o_accent=$(printf '\303\263')
+#   - U+00f8 ("o" with stroke)
+o_stroke=$(printf '\303\270')
+
+test_expect_success 'diff-highlight treats multibyte utf-8 as a unit' '
+	echo "unic${o_accent}de" >a &&
+	echo "unic${o_stroke}de" >b &&
+	dh_test a b <<-EOF
+		@@ -1 +1 @@
+		-unic${CW}${o_accent}${CR}de
+		+unic${CW}${o_stroke}${CR}de
+	EOF
+'
+
+# Unlike the UTF-8 above, these are combining code points which are meant
+# to modify the character preceding them:
+#
+#   - U+0301 (combining acute accent)
+combine_accent=$(printf '\314\201')
+#   - U+0302 (combining circumflex)
+combine_circum=$(printf '\314\202')
+
+test_expect_failure 'diff-highlight treats combining code points as a unit' '
+	echo "unico${combine_accent}de" >a &&
+	echo "unico${combine_circum}de" >b &&
+	dh_test a b <<-EOF
+		@@ -1 +1 @@
+		-unic${CW}o${combine_accent}${CR}de
+		+unic${CW}o${combine_circum}${CR}de
+	EOF
+'
+
+test_expect_success 'diff-highlight works with the --graph option' '
+	dh_test_setup_history &&
+
+	# date-order so that the commits are interleaved for both
+	# trim graph elements so we can do a diff
+	# trim leading space because our trim_graph is not perfect
+	git log --branches -p --date-order |
+		"$DIFF_HIGHLIGHT" | left_trim >graph.exp &&
+	git log --branches -p --date-order --graph |
+		"$DIFF_HIGHLIGHT" | trim_graph | left_trim >graph.act &&
+	test_cmp graph.exp graph.act
+'
+
+# Just reuse the previous graph test, but with --color.  Our trimming
+# doesn't know about color, so just sanity check that something got
+# highlighted.
+test_expect_success 'diff-highlight works with color graph' '
+	git log --branches -p --date-order --graph --color |
+		"$DIFF_HIGHLIGHT" | trim_graph | left_trim >graph &&
+	grep "\[7m" graph
+'
+
+# Most combined diffs won't meet diff-highlight's line-number filter. So we
+# create one here where one side drops a line and the other modifies it. That
+# should result in a diff like:
+#
+#    - modified content
+#    ++resolved content
+#
+# which naively looks like one side added "+resolved".
+test_expect_success 'diff-highlight ignores combined diffs' '
+	echo "content" >file &&
+	git add file &&
+	git commit -m base &&
+
+	>file &&
+	git commit -am master &&
+
+	git checkout -b other HEAD^ &&
+	echo "modified content" >file &&
+	git commit -am other &&
+
+	test_must_fail git merge master &&
+	echo "resolved content" >file &&
+	git commit -am resolved &&
+
+	cat >expect <<-\EOF &&
+	--- a/file
+	+++ b/file
+	@@@ -1,1 -1,0 +1,1 @@@
+	- modified content
+	++resolved content
+	EOF
+
+	git show -c | "$DIFF_HIGHLIGHT" >actual.raw &&
+	sed -n "/^---/,\$p" <actual.raw >actual &&
+	test_cmp expect actual
+'
+
+test_expect_success 'diff-highlight handles --graph with leading dash' '
+	cat >file <<-\EOF &&
+	before
+	the old line
+	-leading dash
+	EOF
+	git add file &&
+	git commit -m before &&
+
+	sed s/old/new/ <file >file.tmp &&
+	mv file.tmp file &&
+	git add file &&
+	git commit -m after &&
+
+	cat >expect <<-EOF &&
+	--- a/file
+	+++ b/file
+	@@ -1,3 +1,3 @@
+	 before
+	-the ${CW}old${CR} line
+	+the ${CW}new${CR} line
+	 -leading dash
+	EOF
+	git log --graph -p -1 | "$DIFF_HIGHLIGHT" >actual.raw &&
+	trim_graph <actual.raw | sed -n "/^---/,\$p" >actual &&
+	test_cmp expect actual
+'
+
+test_done
diff --git a/third_party/git/contrib/emacs/README b/third_party/git/contrib/emacs/README
new file mode 100644
index 000000000000..977a16f1e339
--- /dev/null
+++ b/third_party/git/contrib/emacs/README
@@ -0,0 +1,33 @@
+This directory used to contain various modules for Emacs support.
+
+These were added shortly after Git was first released. Since then
+Emacs's own support for Git got better than what was offered by these
+modes. There are also popular 3rd-party Git modes such as Magit which
+offer replacements for these.
+
+The following modules were available, and can be dug up from the Git
+history:
+
+* git.el:
+
+  Wrapper for "git status" that provided access to other git commands.
+
+  Modern alternatives to this include Magit, and VC mode that ships
+  with Emacs.
+
+* git-blame.el:
+
+  A wrapper for "git blame" written before Emacs's own vc-annotate
+  mode learned to invoke git-blame, which can be done via C-x v g.
+
+* vc-git.el:
+
+  This file used to contain the VC-mode backend for git, but it is no
+  longer distributed with git. It is now maintained as part of Emacs
+  and included in standard Emacs distributions starting from version
+  22.2.
+
+  If you have an earlier Emacs version, upgrading to Emacs 22 is
+  recommended, since the VC mode in older Emacs is not generic enough
+  to be able to support git in a reasonable manner, and no attempt has
+  been made to backport vc-git.el.
diff --git a/third_party/git/contrib/emacs/git-blame.el b/third_party/git/contrib/emacs/git-blame.el
new file mode 100644
index 000000000000..6a8a2b8ff190
--- /dev/null
+++ b/third_party/git/contrib/emacs/git-blame.el
@@ -0,0 +1,6 @@
+(error "git-blame.el no longer ships with git. It's recommended
+to replace its use with Emacs's own vc-annotate. See
+contrib/emacs/README in git's
+sources (https://github.com/git/git/blob/master/contrib/emacs/README)
+for more info on suggested alternatives and for why this
+happened.")
diff --git a/third_party/git/contrib/emacs/git.el b/third_party/git/contrib/emacs/git.el
new file mode 100644
index 000000000000..03f926281fb1
--- /dev/null
+++ b/third_party/git/contrib/emacs/git.el
@@ -0,0 +1,6 @@
+(error "git.el no longer ships with git. It's recommended to
+replace its use with Magit, or simply delete references to git.el
+in your initialization file(s). See contrib/emacs/README in git's
+sources (https://github.com/git/git/blob/master/contrib/emacs/README)
+for suggested alternatives and for why this happened. Emacs's own
+VC mode and Magit are viable alternatives.")
diff --git a/third_party/git/contrib/examples/README b/third_party/git/contrib/examples/README
new file mode 100644
index 000000000000..18bc60b021be
--- /dev/null
+++ b/third_party/git/contrib/examples/README
@@ -0,0 +1,20 @@
+This directory used to contain scripted implementations of builtins
+that have since been rewritten in C.
+
+They have now been removed, but can be retrieved from an older commit
+that removed them from this directory.
+
+They're interesting for their reference value to any aspiring plumbing
+users who want to learn how pieces can be fit together, but in many
+cases have drifted enough from the actual implementations Git uses to
+be instructive.
+
+Other things that can be useful:
+
+ * Some commands such as git-gc wrap other commands, and what they're
+   doing behind the scenes can be seen by running them under
+   GIT_TRACE=1
+
+ * Doing `git log` on paths matching '*--helper.c' will show
+   incremental effort in the direction of moving existing shell
+   scripts to C.
diff --git a/third_party/git/contrib/fast-import/git-import.perl b/third_party/git/contrib/fast-import/git-import.perl
new file mode 100755
index 000000000000..0891b9e36672
--- /dev/null
+++ b/third_party/git/contrib/fast-import/git-import.perl
@@ -0,0 +1,64 @@
+#!/usr/bin/perl
+#
+# Performs an initial import of a directory. This is the equivalent
+# of doing 'git init; git add .; git commit'. It's a little slower,
+# but is meant to be a simple fast-import example.
+
+use strict;
+use File::Find;
+
+my $USAGE = 'usage: git-import branch import-message';
+my $branch = shift or die "$USAGE\n";
+my $message = shift or die "$USAGE\n";
+
+chomp(my $username = `git config user.name`);
+chomp(my $email = `git config user.email`);
+die 'You need to set user name and email'
+  unless $username && $email;
+
+system('git init');
+open(my $fi, '|-', qw(git fast-import --date-format=now))
+  or die "unable to spawn fast-import: $!";
+
+print $fi <<EOF;
+commit refs/heads/$branch
+committer $username <$email> now
+data <<MSGEOF
+$message
+MSGEOF
+
+EOF
+
+find(
+  sub {
+    if($File::Find::name eq './.git') {
+      $File::Find::prune = 1;
+      return;
+    }
+    return unless -f $_;
+
+    my $fn = $File::Find::name;
+    $fn =~ s#^.\/##;
+
+    open(my $in, '<', $_)
+      or die "unable to open $fn: $!";
+    my @st = stat($in)
+      or die "unable to stat $fn: $!";
+    my $len = $st[7];
+
+    print $fi "M 644 inline $fn\n";
+    print $fi "data $len\n";
+    while($len > 0) {
+      my $r = read($in, my $buf, $len < 4096 ? $len : 4096);
+      defined($r) or die "read error from $fn: $!";
+      $r > 0 or die "premature EOF from $fn: $!";
+      print $fi $buf;
+      $len -= $r;
+    }
+    print $fi "\n";
+
+  }, '.'
+);
+
+close($fi);
+exit $?;
diff --git a/third_party/git/contrib/fast-import/git-import.sh b/third_party/git/contrib/fast-import/git-import.sh
new file mode 100755
index 000000000000..f8d803c5e2be
--- /dev/null
+++ b/third_party/git/contrib/fast-import/git-import.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+#
+# Performs an initial import of a directory. This is the equivalent
+# of doing 'git init; git add .; git commit'. It's a lot slower,
+# but is meant to be a simple fast-import example.
+
+if [ -z "$1" -o -z "$2" ]; then
+	echo "usage: git-import branch import-message"
+	exit 1
+fi
+
+USERNAME="$(git config user.name)"
+EMAIL="$(git config user.email)"
+
+if [ -z "$USERNAME" -o -z "$EMAIL" ]; then
+	echo "You need to set user name and email"
+	exit 1
+fi
+
+git init
+
+(
+	cat <<EOF
+commit refs/heads/$1
+committer $USERNAME <$EMAIL> now
+data <<MSGEOF
+$2
+MSGEOF
+
+EOF
+	find * -type f|while read i;do
+		echo "M 100644 inline $i"
+		echo data $(stat -c '%s' "$i")
+		cat "$i"
+		echo
+	done
+	echo
+) | git fast-import --date-format=now
diff --git a/third_party/git/contrib/fast-import/git-p4.README b/third_party/git/contrib/fast-import/git-p4.README
new file mode 100644
index 000000000000..cec5ecfa7c78
--- /dev/null
+++ b/third_party/git/contrib/fast-import/git-p4.README
@@ -0,0 +1,12 @@
+The git-p4 script moved to the top-level of the git source directory.
+
+Invoke it as any other git command, like "git p4 clone", for instance.
+
+Note that the top-level git-p4.py script is now the source.  It is
+built using make to git-p4, which will be installed.
+
+Windows users can copy the git-p4.py source script directly, possibly
+invoking it through a batch file called "git-p4.bat" in the same folder.
+It should contain just one line:
+
+    @python "%~d0%~p0git-p4.py" %*
diff --git a/third_party/git/contrib/fast-import/import-directories.perl b/third_party/git/contrib/fast-import/import-directories.perl
new file mode 100755
index 000000000000..a16f79cfdc46
--- /dev/null
+++ b/third_party/git/contrib/fast-import/import-directories.perl
@@ -0,0 +1,416 @@
+#!/usr/bin/perl
+#
+# Copyright 2008-2009 Peter Krefting <peter@softwolves.pp.se>
+#
+# ------------------------------------------------------------------------
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <http://www.gnu.org/licenses/>.
+#
+# ------------------------------------------------------------------------
+
+=pod
+
+=head1 NAME
+
+import-directories - Import bits and pieces to Git.
+
+=head1 SYNOPSIS
+
+B<import-directories.perl> F<configfile> F<outputfile>
+
+=head1 DESCRIPTION
+
+Script to import arbitrary projects version controlled by the "copy the
+source directory to a new location and edit it there"-version controlled
+projects into version control. Handles projects with arbitrary branching
+and version trees, taking a file describing the inputs and generating a
+file compatible with the L<git-fast-import(1)> format.
+
+=head1 CONFIGURATION FILE
+
+=head2 Format
+
+The configuration file is based on the standard I<.ini> format.
+
+ ; Comments start with semi-colons
+ [section]
+ key=value
+
+Please see below for information on how to escape special characters.
+
+=head2 Global configuration
+
+Global configuration is done in the B<[config]> section, which should be
+the first section in the file. Configuration can be changed by
+repeating configuration sections later on.
+
+ [config]
+ ; configure conversion of CRLFs. "convert" means that all CRLFs
+ ; should be converted into LFs (suitable for the core.autocrlf
+ ; setting set to true in Git). "none" means that all data is
+ ; treated as binary.
+ crlf=convert
+
+=head2 Revision configuration
+
+Each revision that is to be imported is described in three
+sections. Revisions should be defined in topological order, so
+that a revision's parent has always been defined when a new revision
+is introduced. All the sections for one revision must be defined
+before defining the next revision.
+
+Each revision is assigned a unique numerical identifier. The
+numbers do not need to be consecutive, nor monotonically
+increasing.
+
+For instance, if your configuration file contains only the two
+revisions 4711 and 42, where 4711 is the initial commit, the
+only requirement is that 4711 is completely defined before 42.
+
+=pod
+
+=head3 Revision description section
+
+A section whose section name is just an integer gives meta-data
+about the revision.
+
+ [3]
+ ; author sets the author of the revisions
+ author=Peter Krefting <peter@softwolves.pp.se>
+ ; branch sets the branch that the revision should be committed to
+ branch=master
+ ; parent describes the revision that is the parent of this commit
+ ; (optional)
+ parent=1
+ ; merges describes a revision that is merged into this commit
+ ; (optional; can be repeated)
+ merges=2
+ ; selects one file to take the timestamp from
+ ; (optional; if unspecified, the most recent file from the .files
+ ;  section is used)
+ timestamp=3/source.c
+
+=head3 Revision contents section
+
+A section whose section name is an integer followed by B<.files>
+describe all the files included in this revision. If a file that
+was available previously is not included in this revision, it will
+be removed.
+
+If an on-disk revision is incomplete, you can point to files from
+a previous revision. There are no restrictions on where the source
+files are located, nor on their names.
+
+ [3.files]
+ ; the key is the path inside the repository, the value is the path
+ ; as seen from the importer script.
+ source.c=ver-3.00/source.c
+ source.h=ver-2.99/source.h
+ readme.txt=ver-3.00/introduction to the project.txt
+
+File names are treated as byte strings (but please see below on
+quoting rules), and should be stored in the configuration file in
+the encoding that should be used in the generated repository.
+
+=head3 Revision commit message section
+
+A section whose section name is an integer followed by B<.message>
+gives the commit message. This section is read verbatim, up until
+the beginning of the next section. As such, a commit message may not
+contain a line that begins with an opening square bracket ("[") and
+ends with a closing square bracket ("]"), unless they are surrounded
+by whitespace or other characters.
+
+ [3.message]
+ Implement foobar.
+ ; trailing blank lines are ignored.
+
+=cut
+
+# Globals
+use strict;
+use warnings;
+use integer;
+my $crlfmode = 0;
+my @revs;
+my (%revmap, %message, %files, %author, %branch, %parent, %merges, %time, %timesource);
+my $sectiontype = 0;
+my $rev = 0;
+my $mark = 1;
+
+# Check command line
+if ($#ARGV < 1 || $ARGV[0] =~ /^--?h/)
+{
+    exec('perldoc', $0);
+    exit 1;
+}
+
+# Open configuration
+my $config = $ARGV[0];
+open CFG, '<', $config or die "Cannot open configuration file \"$config\": ";
+
+# Open output
+my $output = $ARGV[1];
+open OUT, '>', $output or die "Cannot create output file \"$output\": ";
+binmode OUT;
+
+LINE: while (my $line = <CFG>)
+{
+	$line =~ s/\r?\n$//;
+	next LINE if $sectiontype != 4 && $line eq '';
+	next LINE if $line =~ /^;/;
+	my $oldsectiontype = $sectiontype;
+	my $oldrev = $rev;
+
+	# Sections
+	if ($line =~ m"^\[(config|(\d+)(|\.files|\.message))\]$")
+	{
+		if ($1 eq 'config')
+		{
+			$sectiontype = 1;
+		}
+		elsif ($3 eq '')
+		{
+			$sectiontype = 2;
+			$rev = $2;
+			# Create a new revision
+			die "Duplicate rev: $line\n " if defined $revmap{$rev};
+			print "Reading revision $rev\n";
+			push @revs, $rev;
+			$revmap{$rev} = $mark ++;
+			$time{$revmap{$rev}} = 0;
+		}
+		elsif ($3 eq '.files')
+		{
+			$sectiontype = 3;
+			$rev = $2;
+			die "Revision mismatch: $line\n " unless $rev == $oldrev;
+		}
+		elsif ($3 eq '.message')
+		{
+			$sectiontype = 4;
+			$rev = $2;
+			die "Revision mismatch: $line\n " unless $rev == $oldrev;
+		}
+		else
+		{
+			die "Internal parse error: $line\n ";
+		}
+		next LINE;
+	}
+
+	# Parse data
+	if ($sectiontype != 4)
+	{
+		# Key and value
+		if ($line =~ m"^\s*([^\s].*=.*[^\s])\s*$")
+		{
+			my ($key, $value) = &parsekeyvaluepair($1);
+			# Global configuration
+			if (1 == $sectiontype)
+			{
+				if ($key eq 'crlf')
+				{
+					$crlfmode = 1, next LINE if $value eq 'convert';
+					$crlfmode = 0, next LINE if $value eq 'none';
+				}
+				die "Unknown configuration option: $line\n ";
+			}
+			# Revision specification
+			if (2 == $sectiontype)
+			{
+				my $current = $revmap{$rev};
+				$author{$current} = $value, next LINE if $key eq 'author';
+				$branch{$current} = $value, next LINE if $key eq 'branch';
+				$parent{$current} = $value, next LINE if $key eq 'parent';
+				$timesource{$current} = $value, next LINE if $key eq 'timestamp';
+				push(@{$merges{$current}}, $value), next LINE if $key eq 'merges';
+				die "Unknown revision option: $line\n ";
+			}
+			# Filespecs
+			if (3 == $sectiontype)
+			{
+				# Add the file and create a marker
+				die "File not found: $line\n " unless -f $value;
+				my $current = $revmap{$rev};
+				${$files{$current}}{$key} = $mark;
+				my $time = &fileblob($value, $crlfmode, $mark ++);
+
+				# Update revision timestamp if more recent than other
+				# files seen, or if this is the file we have selected
+				# to take the time stamp from using the "timestamp"
+				# directive.
+				if ((defined $timesource{$current} && $timesource{$current} eq $value)
+				    || $time > $time{$current})
+				{
+					$time{$current} = $time;
+				}
+			}
+		}
+		else
+		{
+			die "Parse error: $line\n ";
+		}
+	}
+	else
+	{
+		# Commit message
+		my $current = $revmap{$rev};
+		if (defined $message{$current})
+		{
+			$message{$current} .= "\n";
+		}
+		$message{$current} .= $line;
+	}
+}
+close CFG;
+
+# Start spewing out data for git-fast-import
+foreach my $commit (@revs)
+{
+	# Progress
+	print OUT "progress Creating revision $commit\n";
+
+	# Create commit header
+	my $mark = $revmap{$commit};
+
+	# Branch and commit id
+	print OUT "commit refs/heads/", $branch{$mark}, "\nmark :", $mark, "\n";
+
+	# Author and timestamp
+	die "No timestamp defined for $commit (no files?)\n" unless defined $time{$mark};
+	print OUT "committer ", $author{$mark}, " ", $time{$mark}, " +0100\n";
+
+	# Commit message
+	die "No message defined for $commit\n" unless defined $message{$mark};
+	my $message = $message{$mark};
+	$message =~ s/\n$//; # Kill trailing empty line
+	print OUT "data ", length($message), "\n", $message, "\n";
+
+	# Parent and any merges
+	print OUT "from :", $revmap{$parent{$mark}}, "\n" if defined $parent{$mark};
+	if (defined $merges{$mark})
+	{
+		foreach my $merge (@{$merges{$mark}})
+		{
+			print OUT "merge :", $revmap{$merge}, "\n";
+		}
+	}
+
+	# Output file marks
+	print OUT "deleteall\n"; # start from scratch
+	foreach my $file (sort keys %{$files{$mark}})
+	{
+		print OUT "M 644 :", ${$files{$mark}}{$file}, " $file\n";
+	}
+	print OUT "\n";
+}
+
+# Create one file blob
+sub fileblob
+{
+	my ($filename, $crlfmode, $mark) = @_;
+
+	# Import the file
+	print OUT "progress Importing $filename\nblob\nmark :$mark\n";
+	open FILE, '<', $filename or die "Cannot read $filename\n ";
+	binmode FILE;
+	my ($size, $mtime) = (stat(FILE))[7,9];
+	my $file;
+	read FILE, $file, $size;
+	close FILE;
+	$file =~ s/\r\n/\n/g if $crlfmode;
+	print OUT "data ", length($file), "\n", $file, "\n";
+
+	return $mtime;
+}
+
+# Parse a key=value pair
+sub parsekeyvaluepair
+{
+=pod
+
+=head2 Escaping special characters
+
+Key and value strings may be enclosed in quotes, in which case
+whitespace inside the quotes is preserved. Additionally, an equal
+sign may be included in the key by preceding it with a backslash.
+For example:
+
+ "key1 "=value1
+ key2=" value2"
+ key\=3=value3
+ key4=value=4
+ "key5""=value5
+
+Here the first key is "key1 " (note the trailing white-space) and the
+second value is " value2" (note the leading white-space). The third
+key contains an equal sign "key=3" and so does the fourth value, which
+does not need to be escaped. The fifth key contains a trailing quote,
+which does not need to be escaped since it is inside a surrounding
+quote.
+
+=cut
+	my $pair = shift;
+
+	# Separate key and value by the first non-quoted equal sign
+	my ($key, $value);
+	if ($pair =~ /^(.*[^\\])=(.*)$/)
+	{
+		($key, $value) = ($1, $2)
+	}
+	else
+	{
+		die "Parse error: $pair\n ";
+	}
+
+	# Unquote and unescape the key and value separately
+	return (&unescape($key), &unescape($value));
+}
+
+# Unquote and unescape
+sub unescape
+{
+	my $string = shift;
+
+	# First remove enclosing quotes. Backslash before the trailing
+	# quote leaves both.
+	if ($string =~ /^"(.*[^\\])"$/)
+	{
+		$string = $1;
+	}
+
+	# Second remove any backslashes inside the unquoted string.
+	# For later: Handle special sequences like \t ?
+	$string =~ s/\\(.)/$1/g;
+
+	return $string;
+}
+
+__END__
+
+=pod
+
+=head1 EXAMPLES
+
+B<import-directories.perl> F<project.import>
+
+=head1 AUTHOR
+
+Copyright 2008-2009 Peter Krefting E<lt>peter@softwolves.pp.se>
+
+This program is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation.
+
+=cut
diff --git a/third_party/git/contrib/fast-import/import-tars.perl b/third_party/git/contrib/fast-import/import-tars.perl
new file mode 100755
index 000000000000..e800d9f5c9cf
--- /dev/null
+++ b/third_party/git/contrib/fast-import/import-tars.perl
@@ -0,0 +1,225 @@
+#!/usr/bin/perl
+
+## tar archive frontend for git-fast-import
+##
+## For example:
+##
+##  mkdir project; cd project; git init
+##  perl import-tars.perl *.tar.bz2
+##  git whatchanged import-tars
+##
+## Use --metainfo to specify the extension for a meta data file, where
+## import-tars can read the commit message and optionally author and
+## committer information.
+##
+##  echo 'This is the commit message' > myfile.tar.bz2.msg
+##  perl import-tars.perl --metainfo=msg myfile.tar.bz2
+
+use strict;
+use Getopt::Long;
+
+my $metaext = '';
+
+die "usage: import-tars [--metainfo=extension] *.tar.{gz,bz2,lzma,xz,Z}\n"
+	unless GetOptions('metainfo=s' => \$metaext) && @ARGV;
+
+my $branch_name = 'import-tars';
+my $branch_ref = "refs/heads/$branch_name";
+my $author_name = $ENV{'GIT_AUTHOR_NAME'} || 'T Ar Creator';
+my $author_email = $ENV{'GIT_AUTHOR_EMAIL'} || 'tar@example.com';
+my $committer_name = $ENV{'GIT_COMMITTER_NAME'} || `git config --get user.name`;
+my $committer_email = $ENV{'GIT_COMMITTER_EMAIL'} || `git config --get user.email`;
+
+chomp($committer_name, $committer_email);
+
+open(FI, '|-', 'git', 'fast-import', '--quiet')
+	or die "Unable to start git fast-import: $!\n";
+foreach my $tar_file (@ARGV)
+{
+	my $commit_time = time;
+	$tar_file =~ m,([^/]+)$,;
+	my $tar_name = $1;
+
+	if ($tar_name =~ s/\.(tar\.gz|tgz)$//) {
+		open(I, '-|', 'gunzip', '-c', $tar_file)
+			or die "Unable to gunzip -c $tar_file: $!\n";
+	} elsif ($tar_name =~ s/\.(tar\.bz2|tbz2)$//) {
+		open(I, '-|', 'bunzip2', '-c', $tar_file)
+			or die "Unable to bunzip2 -c $tar_file: $!\n";
+	} elsif ($tar_name =~ s/\.tar\.Z$//) {
+		open(I, '-|', 'uncompress', '-c', $tar_file)
+			or die "Unable to uncompress -c $tar_file: $!\n";
+	} elsif ($tar_name =~ s/\.(tar\.(lzma|xz)|(tlz|txz))$//) {
+		open(I, '-|', 'xz', '-dc', $tar_file)
+			or die "Unable to xz -dc $tar_file: $!\n";
+	} elsif ($tar_name =~ s/\.tar$//) {
+		open(I, $tar_file) or die "Unable to open $tar_file: $!\n";
+	} else {
+		die "Unrecognized compression format: $tar_file\n";
+	}
+
+	my $author_time = 0;
+	my $next_mark = 1;
+	my $have_top_dir = 1;
+	my ($top_dir, %files);
+
+	my $next_path = '';
+
+	while (read(I, $_, 512) == 512) {
+		my ($name, $mode, $uid, $gid, $size, $mtime,
+			$chksum, $typeflag, $linkname, $magic,
+			$version, $uname, $gname, $devmajor, $devminor,
+			$prefix) = unpack 'Z100 Z8 Z8 Z8 Z12 Z12
+			Z8 Z1 Z100 Z6
+			Z2 Z32 Z32 Z8 Z8 Z*', $_;
+
+		unless ($next_path eq '') {
+			# Recover name from previous extended header
+			$name = $next_path;
+			$next_path = '';
+		}
+
+		last unless length($name);
+		if ($name eq '././@LongLink') {
+			# GNU tar extension
+			if (read(I, $_, 512) != 512) {
+				die ('Short archive');
+			}
+			$name = unpack 'Z257', $_;
+			next unless $name;
+
+			my $dummy;
+			if (read(I, $_, 512) != 512) {
+				die ('Short archive');
+			}
+			($dummy, $mode, $uid, $gid, $size, $mtime,
+			$chksum, $typeflag, $linkname, $magic,
+			$version, $uname, $gname, $devmajor, $devminor,
+			$prefix) = unpack 'Z100 Z8 Z8 Z8 Z12 Z12
+			Z8 Z1 Z100 Z6
+			Z2 Z32 Z32 Z8 Z8 Z*', $_;
+		}
+		$mode = oct $mode;
+		$size = oct $size;
+		$mtime = oct $mtime;
+		next if $typeflag == 5; # directory
+
+		if ($typeflag eq 'x') { # extended header
+			# If extended header, check for path
+			my $pax_header = '';
+			while ($size > 0 && read(I, $_, 512) == 512) {
+				$pax_header = $pax_header . substr($_, 0, $size);
+				$size -= 512;
+			}
+
+			my @lines = split /\n/, $pax_header;
+			foreach my $line (@lines) {
+				my ($len, $entry) = split / /, $line;
+				my ($key, $value) = split /=/, $entry;
+				if ($key eq 'path') {
+					$next_path = $value;
+				}
+			}
+			next;
+		} elsif ($name =~ m{/\z}) { # directory
+			next;
+		} elsif ($typeflag != 1) { # handle hard links later
+			print FI "blob\n", "mark :$next_mark\n";
+			if ($typeflag == 2) { # symbolic link
+				print FI "data ", length($linkname), "\n",
+					$linkname;
+				$mode = 0120000;
+			} else {
+				print FI "data $size\n";
+				while ($size > 0 && read(I, $_, 512) == 512) {
+					print FI substr($_, 0, $size);
+					$size -= 512;
+				}
+			}
+			print FI "\n";
+		}
+
+		my $path;
+		if ($prefix) {
+			$path = "$prefix/$name";
+		} else {
+			$path = "$name";
+		}
+
+		if ($typeflag == 1) { # hard link
+			$linkname = "$prefix/$linkname" if $prefix;
+			$files{$path} = [ $files{$linkname}->[0], $mode ];
+		} else {
+			$files{$path} = [$next_mark++, $mode];
+		}
+
+		$author_time = $mtime if $mtime > $author_time;
+		$path =~ m,^([^/]+)/,;
+		$top_dir = $1 unless $top_dir;
+		$have_top_dir = 0 if $top_dir ne $1;
+	}
+
+	my $commit_msg = "Imported from $tar_file.";
+	my $this_committer_name = $committer_name;
+	my $this_committer_email = $committer_email;
+	my $this_author_name = $author_name;
+	my $this_author_email = $author_email;
+	if ($metaext ne '') {
+		# Optionally read a commit message from <filename.tar>.msg
+		# Add a line on the form "Committer: name <e-mail>" to override
+		# the committer and "Author: name <e-mail>" to override the
+		# author for this tar ball.
+		if (open MSG, '<', "${tar_file}.${metaext}") {
+			my $header_done = 0;
+			$commit_msg = '';
+			while (<MSG>) {
+				if (!$header_done && /^Committer:\s+([^<>]*)\s+<(.*)>\s*$/i) {
+					$this_committer_name = $1;
+					$this_committer_email = $2;
+				} elsif (!$header_done && /^Author:\s+([^<>]*)\s+<(.*)>\s*$/i) {
+					$this_author_name = $1;
+					$this_author_email = $2;
+				} elsif (!$header_done && /^$/) { # empty line ends header.
+					$header_done = 1;
+				} else {
+					$commit_msg .= $_;
+					$header_done = 1;
+				}
+			}
+			close MSG;
+		}
+	}
+
+	print FI <<EOF;
+commit $branch_ref
+author $this_author_name <$this_author_email> $author_time +0000
+committer $this_committer_name <$this_committer_email> $commit_time +0000
+data <<END_OF_COMMIT_MESSAGE
+$commit_msg
+END_OF_COMMIT_MESSAGE
+
+deleteall
+EOF
+
+	foreach my $path (keys %files)
+	{
+		my ($mark, $mode) = @{$files{$path}};
+		$path =~ s,^([^/]+)/,, if $have_top_dir;
+		$mode = $mode & 0111 ? 0755 : 0644 unless $mode == 0120000;
+		printf FI "M %o :%i %s\n", $mode, $mark, $path;
+	}
+	print FI "\n";
+
+	print FI <<EOF;
+tag $tar_name
+from $branch_ref
+tagger $author_name <$author_email> $author_time +0000
+data <<END_OF_TAG_MESSAGE
+Package $tar_name
+END_OF_TAG_MESSAGE
+
+EOF
+
+	close I;
+}
+close FI;
diff --git a/third_party/git/contrib/fast-import/import-zips.py b/third_party/git/contrib/fast-import/import-zips.py
new file mode 100755
index 000000000000..d12c2962230b
--- /dev/null
+++ b/third_party/git/contrib/fast-import/import-zips.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+## zip archive frontend for git-fast-import
+##
+## For example:
+##
+##  mkdir project; cd project; git init
+##  python import-zips.py *.zip
+##  git log --stat import-zips
+
+from os import popen, path
+from sys import argv, exit, hexversion, stderr
+from time import mktime
+from zipfile import ZipFile
+
+if hexversion < 0x01060000:
+    # The limiter is the zipfile module
+    stderr.write("import-zips.py: requires Python 1.6.0 or later.\n")
+    exit(1)
+
+if len(argv) < 2:
+    print 'usage:', argv[0], '<zipfile>...'
+    exit(1)
+
+branch_ref = 'refs/heads/import-zips'
+committer_name = 'Z Ip Creator'
+committer_email = 'zip@example.com'
+
+fast_import = popen('git fast-import --quiet', 'w')
+def printlines(list):
+    for str in list:
+        fast_import.write(str + "\n")
+
+for zipfile in argv[1:]:
+    commit_time = 0
+    next_mark = 1
+    common_prefix = None
+    mark = dict()
+
+    zip = ZipFile(zipfile, 'r')
+    for name in zip.namelist():
+        if name.endswith('/'):
+            continue
+        info = zip.getinfo(name)
+
+        if commit_time < info.date_time:
+            commit_time = info.date_time
+        if common_prefix == None:
+            common_prefix = name[:name.rfind('/') + 1]
+        else:
+            while not name.startswith(common_prefix):
+                last_slash = common_prefix[:-1].rfind('/') + 1
+                common_prefix = common_prefix[:last_slash]
+
+        mark[name] = ':' + str(next_mark)
+        next_mark += 1
+
+        printlines(('blob', 'mark ' + mark[name], \
+                    'data ' + str(info.file_size)))
+        fast_import.write(zip.read(name) + "\n")
+
+    committer = committer_name + ' <' + committer_email + '> %d +0000' % \
+        mktime(commit_time + (0, 0, 0))
+
+    printlines(('commit ' + branch_ref, 'committer ' + committer, \
+        'data <<EOM', 'Imported from ' + zipfile + '.', 'EOM', \
+        '', 'deleteall'))
+
+    for name in mark.keys():
+        fast_import.write('M 100644 ' + mark[name] + ' ' +
+            name[len(common_prefix):] + "\n")
+
+    printlines(('',  'tag ' + path.basename(zipfile), \
+        'from ' + branch_ref, 'tagger ' + committer, \
+        'data <<EOM', 'Package ' + zipfile, 'EOM', ''))
+
+if fast_import.close():
+    exit(1)
diff --git a/third_party/git/contrib/git-jump/README b/third_party/git/contrib/git-jump/README
new file mode 100644
index 000000000000..2f618a7f9788
--- /dev/null
+++ b/third_party/git/contrib/git-jump/README
@@ -0,0 +1,112 @@
+git-jump
+========
+
+Git-jump is a script for helping you jump to "interesting" parts of your
+project in your editor. It works by outputting a set of interesting
+spots in the "quickfix" format, which editors like vim can use as a
+queue of places to visit (this feature is usually used to jump to errors
+produced by a compiler). For example, given a diff like this:
+
+------------------------------------
+diff --git a/foo.c b/foo.c
+index a655540..5a59044 100644
+--- a/foo.c
++++ b/foo.c
+@@ -1,3 +1,3 @@
+ int main(void) {
+-  printf("hello word!\n");
++  printf("hello world!\n");
+ }
+-----------------------------------
+
+git-jump will feed this to the editor:
+
+-----------------------------------
+foo.c:2: printf("hello word!\n");
+-----------------------------------
+
+Or, when running 'git jump grep', column numbers will also be emitted,
+e.g. `git jump grep "hello"` would return:
+
+-----------------------------------
+foo.c:2:9: printf("hello word!\n");
+-----------------------------------
+
+Obviously this trivial case isn't that interesting; you could just open
+`foo.c` yourself. But when you have many changes scattered across a
+project, you can use the editor's support to "jump" from point to point.
+
+Git-jump can generate four types of interesting lists:
+
+  1. The beginning of any diff hunks.
+
+  2. The beginning of any merge conflict markers.
+
+  3. Any grep matches, including the column of the first match on a
+     line.
+
+  4. Any whitespace errors detected by `git diff --check`.
+
+
+Using git-jump
+--------------
+
+To use it, just drop git-jump in your PATH, and then invoke it like
+this:
+
+--------------------------------------------------
+# jump to changes not yet staged for commit
+git jump diff
+
+# jump to changes that are staged for commit; you can give
+# arbitrary diff options
+git jump diff --cached
+
+# jump to merge conflicts
+git jump merge
+
+# jump to all instances of foo_bar
+git jump grep foo_bar
+
+# same as above, but case-insensitive; you can give
+# arbitrary grep options
+git jump grep -i foo_bar
+
+# use the silver searcher for git jump grep
+git config jump.grepCmd "ag --column"
+--------------------------------------------------
+
+
+Related Programs
+----------------
+
+You can accomplish some of the same things with individual tools. For
+example, you can use `git mergetool` to start vimdiff on each unmerged
+file. `git jump merge` is for the vim-wielding luddite who just wants to
+jump straight to the conflict text with no fanfare.
+
+As of git v1.7.2, `git grep` knows the `--open-files-in-pager` option,
+which does something similar to `git jump grep`. However, it is limited
+to positioning the cursor to the correct line in only the first file,
+leaving you to locate subsequent hits in that file or other files using
+the editor or pager. By contrast, git-jump provides the editor with a
+complete list of files, lines, and a column number for each match.
+
+
+Limitations
+-----------
+
+This script was written and tested with vim. Given that the quickfix
+format is the same as what gcc produces, I expect emacs users have a
+similar feature for iterating through the list, but I know nothing about
+how to activate it.
+
+The shell snippets to generate the quickfix lines will almost certainly
+choke on filenames with exotic characters (like newlines).
+
+Contributing
+------------
+
+Bug fixes, bug reports, and feature requests should be discussed on the
+Git mailing list <git@vger.kernel.org>, and cc'd to the git-jump
+maintainer, Jeff King <peff@peff.net>.
diff --git a/third_party/git/contrib/git-jump/git-jump b/third_party/git/contrib/git-jump/git-jump
new file mode 100755
index 000000000000..931b0fe3a948
--- /dev/null
+++ b/third_party/git/contrib/git-jump/git-jump
@@ -0,0 +1,78 @@
+#!/bin/sh
+
+usage() {
+	cat <<\EOF
+usage: git jump <mode> [<args>]
+
+Jump to interesting elements in an editor.
+The <mode> parameter is one of:
+
+diff: elements are diff hunks. Arguments are given to diff.
+
+merge: elements are merge conflicts. Arguments are ignored.
+
+grep: elements are grep hits. Arguments are given to git grep or, if
+      configured, to the command in `jump.grepCmd`.
+
+ws: elements are whitespace errors. Arguments are given to diff --check.
+EOF
+}
+
+open_editor() {
+	editor=`git var GIT_EDITOR`
+	eval "$editor -q \$1"
+}
+
+mode_diff() {
+	git diff --no-prefix --relative "$@" |
+	perl -ne '
+	if (m{^\+\+\+ (.*)}) { $file = $1; next }
+	defined($file) or next;
+	if (m/^@@ .*?\+(\d+)/) { $line = $1; next }
+	defined($line) or next;
+	if (/^ /) { $line++; next }
+	if (/^[-+]\s*(.*)/) {
+		print "$file:$line: $1\n";
+		$line = undef;
+	}
+	'
+}
+
+mode_merge() {
+	git ls-files -u |
+	perl -pe 's/^.*?\t//' |
+	sort -u |
+	while IFS= read fn; do
+		grep -Hn '^<<<<<<<' "$fn"
+	done
+}
+
+# Grep -n generates nice quickfix-looking lines by itself,
+# but let's clean up extra whitespace, so they look better if the
+# editor shows them to us in the status bar.
+mode_grep() {
+	cmd=$(git config jump.grepCmd)
+	test -n "$cmd" || cmd="git grep -n --column"
+	$cmd "$@" |
+	perl -pe '
+	s/[ \t]+/ /g;
+	s/^ *//;
+	'
+}
+
+mode_ws() {
+	git diff --check "$@"
+}
+
+if test $# -lt 1; then
+	usage >&2
+	exit 1
+fi
+mode=$1; shift
+
+trap 'rm -f "$tmp"' 0 1 2 3 15
+tmp=`mktemp -t git-jump.XXXXXX` || exit 1
+type "mode_$mode" >/dev/null 2>&1 || { usage >&2; exit 1; }
+"mode_$mode" "$@" >"$tmp"
+test -s "$tmp" || exit 0
+open_editor "$tmp"
diff --git a/third_party/git/contrib/git-resurrect.sh b/third_party/git/contrib/git-resurrect.sh
new file mode 100755
index 000000000000..8c171dd959f6
--- /dev/null
+++ b/third_party/git/contrib/git-resurrect.sh
@@ -0,0 +1,182 @@
+#!/bin/sh
+
+USAGE="[-a] [-r] [-m] [-t] [-n] [-b <newname>] <name>"
+LONG_USAGE="git-resurrect attempts to find traces of a branch tip
+called <name>, and tries to resurrect it.  Currently, the reflog is
+searched for checkout messages, and with -r also merge messages.  With
+-m and -t, the history of all refs is scanned for Merge <name> into
+other/Merge <other> into <name> (respectively) commit subjects, which
+is rather slow but allows you to resurrect other people's topic
+branches."
+
+OPTIONS_KEEPDASHDASH=
+OPTIONS_STUCKLONG=
+OPTIONS_SPEC="\
+git resurrect $USAGE
+--
+b,branch=            save branch as <newname> instead of <name>
+a,all                same as -l -r -m -t
+k,keep-going         full rev-list scan (instead of first match)
+l,reflog             scan reflog for checkouts (enabled by default)
+r,reflog-merges      scan for merges recorded in reflog
+m,merges             scan for merges into other branches (slow)
+t,merge-targets      scan for merges of other branches into <name>
+n,dry-run            don't recreate the branch"
+
+. git-sh-setup
+
+search_reflog () {
+	sed -ne 's~^\([^ ]*\) .*	checkout: moving from '"$1"' .*~\1~p' \
+                < "$GIT_DIR"/logs/HEAD
+}
+
+search_reflog_merges () {
+	git rev-parse $(
+		sed -ne 's~^[^ ]* \([^ ]*\) .*	merge '"$1"':.*~\1^2~p' \
+			< "$GIT_DIR"/logs/HEAD
+	)
+}
+
+_x40="[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]"
+_x40="$_x40$_x40$_x40$_x40$_x40$_x40$_x40$_x40"
+
+search_merges () {
+        git rev-list --all --grep="Merge branch '$1'" \
+                --pretty=tformat:"%P %s" |
+        sed -ne "/^$_x40 \($_x40\) Merge .*/ {s//\1/p;$early_exit}"
+}
+
+search_merge_targets () {
+	git rev-list --all --grep="Merge branch '[^']*' into $branch\$" \
+		--pretty=tformat:"%H %s" --all |
+	sed -ne "/^\($_x40\) Merge .*/ {s//\1/p;$early_exit} "
+}
+
+dry_run=
+early_exit=q
+scan_reflog=t
+scan_reflog_merges=
+scan_merges=
+scan_merge_targets=
+new_name=
+
+while test "$#" != 0; do
+	case "$1" in
+	    -b|--branch)
+		shift
+		new_name="$1"
+		;;
+	    -n|--dry-run)
+		dry_run=t
+		;;
+	    --no-dry-run)
+		dry_run=
+		;;
+	    -k|--keep-going)
+		early_exit=
+		;;
+	    --no-keep-going)
+		early_exit=q
+		;;
+	    -m|--merges)
+		scan_merges=t
+		;;
+	    --no-merges)
+		scan_merges=
+		;;
+	    -l|--reflog)
+		scan_reflog=t
+		;;
+	    --no-reflog)
+		scan_reflog=
+		;;
+	    -r|--reflog_merges)
+		scan_reflog_merges=t
+		;;
+	    --no-reflog_merges)
+		scan_reflog_merges=
+		;;
+	    -t|--merge-targets)
+		scan_merge_targets=t
+		;;
+	    --no-merge-targets)
+		scan_merge_targets=
+		;;
+	    -a|--all)
+		scan_reflog=t
+		scan_reflog_merges=t
+		scan_merges=t
+		scan_merge_targets=t
+		;;
+	    --)
+		shift
+		break
+		;;
+	    *)
+		usage
+		;;
+	esac
+	shift
+done
+
+test "$#" = 1 || usage
+
+all_strategies="$scan_reflog$scan_reflog_merges$scan_merges$scan_merge_targets"
+if test -z "$all_strategies"; then
+	die "must enable at least one of -lrmt"
+fi
+
+branch="$1"
+test -z "$new_name" && new_name="$branch"
+
+if test ! -z "$scan_reflog"; then
+	if test -r "$GIT_DIR"/logs/HEAD; then
+		candidates="$(search_reflog $branch)"
+	else
+		die 'reflog scanning requested, but' \
+			'$GIT_DIR/logs/HEAD not readable'
+	fi
+fi
+if test ! -z "$scan_reflog_merges"; then
+	if test -r "$GIT_DIR"/logs/HEAD; then
+		candidates="$candidates $(search_reflog_merges $branch)"
+	else
+		die 'reflog scanning requested, but' \
+			'$GIT_DIR/logs/HEAD not readable'
+	fi
+fi
+if test ! -z "$scan_merges"; then
+	candidates="$candidates $(search_merges $branch)"
+fi
+if test ! -z "$scan_merge_targets"; then
+	candidates="$candidates $(search_merge_targets $branch)"
+fi
+
+candidates="$(git rev-parse $candidates | sort -u)"
+
+if test -z "$candidates"; then
+	hint=
+	test "z$all_strategies" != "ztttt" \
+		&& hint=" (maybe try again with -a)"
+	die "no candidates for $branch found$hint"
+fi
+
+echo "** Candidates for $branch **"
+for cmt in $candidates; do
+	git --no-pager log --pretty=tformat:"%ct:%h [%cr] %s" --abbrev-commit -1 $cmt
+done \
+| sort -n | cut -d: -f2-
+
+newest="$(git rev-list -1 $candidates)"
+if test ! -z "$dry_run"; then
+	printf "** Most recent: "
+	git --no-pager log -1 --pretty=tformat:"%h %s" $newest
+elif ! git rev-parse --verify --quiet $new_name >/dev/null; then
+	printf "** Restoring $new_name to "
+	git --no-pager log -1 --pretty=tformat:"%h %s" $newest
+	git branch $new_name $newest
+else
+	printf "Most recent: "
+	git --no-pager log -1 --pretty=tformat:"%h %s" $newest
+	echo "** $new_name already exists, doing nothing"
+fi
diff --git a/third_party/git/contrib/git-shell-commands/README b/third_party/git/contrib/git-shell-commands/README
new file mode 100644
index 000000000000..438463b16099
--- /dev/null
+++ b/third_party/git/contrib/git-shell-commands/README
@@ -0,0 +1,18 @@
+Sample programs callable through git-shell.  Place a directory named
+'git-shell-commands' in the home directory of a user whose shell is
+git-shell.  Then anyone logging in as that user will be able to run
+executables in the 'git-shell-commands' directory.
+
+Provided commands:
+
+help: Prints out the names of available commands.  When run
+interactively, git-shell will automatically run 'help' on startup,
+provided it exists.
+
+list: Displays any bare repository whose name ends with ".git" under
+user's home directory.  No other git repositories are visible,
+although they might be clonable through git-shell.  'list' is designed
+to minimize the number of calls to git that must be made in finding
+available repositories; if your setup has additional repositories that
+should be user-discoverable, you may wish to modify 'list'
+accordingly.
diff --git a/third_party/git/contrib/git-shell-commands/help b/third_party/git/contrib/git-shell-commands/help
new file mode 100755
index 000000000000..535770c6ec19
--- /dev/null
+++ b/third_party/git/contrib/git-shell-commands/help
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+if tty -s
+then
+	echo "Run 'help' for help, or 'exit' to leave.  Available commands:"
+else
+	echo "Run 'help' for help.  Available commands:"
+fi
+
+cd "$(dirname "$0")"
+
+for cmd in *
+do
+	case "$cmd" in
+	help) ;;
+	*) [ -f "$cmd" ] && [ -x "$cmd" ] && echo "$cmd" ;;
+	esac
+done
diff --git a/third_party/git/contrib/git-shell-commands/list b/third_party/git/contrib/git-shell-commands/list
new file mode 100755
index 000000000000..6f8993882114
--- /dev/null
+++ b/third_party/git/contrib/git-shell-commands/list
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+print_if_bare_repo='
+	if "$(git --git-dir="$1" rev-parse --is-bare-repository)" = true
+	then
+		printf "%s\n" "${1#./}"
+	fi
+'
+
+find -type d -name "*.git" -exec sh -c "$print_if_bare_repo" -- \{} \; -prune 2>/dev/null
diff --git a/third_party/git/contrib/hg-to-git/hg-to-git.py b/third_party/git/contrib/hg-to-git/hg-to-git.py
new file mode 100755
index 000000000000..7eb1b24cc7a1
--- /dev/null
+++ b/third_party/git/contrib/hg-to-git/hg-to-git.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python
+
+""" hg-to-git.py - A Mercurial to GIT converter
+
+    Copyright (C)2007 Stelian Pop <stelian@popies.net>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2, or (at your option)
+    any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, see <http://www.gnu.org/licenses/>.
+"""
+
+import os, os.path, sys
+import tempfile, pickle, getopt
+import re
+
+if sys.hexversion < 0x02030000:
+   # The behavior of the pickle module changed significantly in 2.3
+   sys.stderr.write("hg-to-git.py: requires Python 2.3 or later.\n")
+   sys.exit(1)
+
+# Maps hg version -> git version
+hgvers = {}
+# List of children for each hg revision
+hgchildren = {}
+# List of parents for each hg revision
+hgparents = {}
+# Current branch for each hg revision
+hgbranch = {}
+# Number of new changesets converted from hg
+hgnewcsets = 0
+
+#------------------------------------------------------------------------------
+
+def usage():
+
+        print("""\
+%s: [OPTIONS] <hgprj>
+
+options:
+    -s, --gitstate=FILE: name of the state to be saved/read
+                         for incrementals
+    -n, --nrepack=INT:   number of changesets that will trigger
+                         a repack (default=0, -1 to deactivate)
+    -v, --verbose:       be verbose
+
+required:
+    hgprj:  name of the HG project to import (directory)
+""" % sys.argv[0])
+
+#------------------------------------------------------------------------------
+
+def getgitenv(user, date):
+    env = ''
+    elems = re.compile('(.*?)\s+<(.*)>').match(user)
+    if elems:
+        env += 'export GIT_AUTHOR_NAME="%s" ;' % elems.group(1)
+        env += 'export GIT_COMMITTER_NAME="%s" ;' % elems.group(1)
+        env += 'export GIT_AUTHOR_EMAIL="%s" ;' % elems.group(2)
+        env += 'export GIT_COMMITTER_EMAIL="%s" ;' % elems.group(2)
+    else:
+        env += 'export GIT_AUTHOR_NAME="%s" ;' % user
+        env += 'export GIT_COMMITTER_NAME="%s" ;' % user
+        env += 'export GIT_AUTHOR_EMAIL= ;'
+        env += 'export GIT_COMMITTER_EMAIL= ;'
+
+    env += 'export GIT_AUTHOR_DATE="%s" ;' % date
+    env += 'export GIT_COMMITTER_DATE="%s" ;' % date
+    return env
+
+#------------------------------------------------------------------------------
+
+state = ''
+opt_nrepack = 0
+verbose = False
+
+try:
+    opts, args = getopt.getopt(sys.argv[1:], 's:t:n:v', ['gitstate=', 'tempdir=', 'nrepack=', 'verbose'])
+    for o, a in opts:
+        if o in ('-s', '--gitstate'):
+            state = a
+            state = os.path.abspath(state)
+        if o in ('-n', '--nrepack'):
+            opt_nrepack = int(a)
+        if o in ('-v', '--verbose'):
+            verbose = True
+    if len(args) != 1:
+        raise Exception('params')
+except:
+    usage()
+    sys.exit(1)
+
+hgprj = args[0]
+os.chdir(hgprj)
+
+if state:
+    if os.path.exists(state):
+        if verbose:
+            print('State does exist, reading')
+        f = open(state, 'r')
+        hgvers = pickle.load(f)
+    else:
+        print('State does not exist, first run')
+
+sock = os.popen('hg tip --template "{rev}"')
+tip = sock.read()
+if sock.close():
+    sys.exit(1)
+if verbose:
+    print('tip is', tip)
+
+# Calculate the branches
+if verbose:
+    print('analysing the branches...')
+hgchildren["0"] = ()
+hgparents["0"] = (None, None)
+hgbranch["0"] = "master"
+for cset in range(1, int(tip) + 1):
+    hgchildren[str(cset)] = ()
+    prnts = os.popen('hg log -r %d --template "{parents}"' % cset).read().strip().split(' ')
+    prnts = map(lambda x: x[:x.find(':')], prnts)
+    if prnts[0] != '':
+        parent = prnts[0].strip()
+    else:
+        parent = str(cset - 1)
+    hgchildren[parent] += ( str(cset), )
+    if len(prnts) > 1:
+        mparent = prnts[1].strip()
+        hgchildren[mparent] += ( str(cset), )
+    else:
+        mparent = None
+
+    hgparents[str(cset)] = (parent, mparent)
+
+    if mparent:
+        # For merge changesets, take either one, preferably the 'master' branch
+        if hgbranch[mparent] == 'master':
+            hgbranch[str(cset)] = 'master'
+        else:
+            hgbranch[str(cset)] = hgbranch[parent]
+    else:
+        # Normal changesets
+        # For first children, take the parent branch, for the others create a new branch
+        if hgchildren[parent][0] == str(cset):
+            hgbranch[str(cset)] = hgbranch[parent]
+        else:
+            hgbranch[str(cset)] = "branch-" + str(cset)
+
+if "0" not in hgvers:
+    print('creating repository')
+    os.system('git init')
+
+# loop through every hg changeset
+for cset in range(int(tip) + 1):
+
+    # incremental, already seen
+    if str(cset) in hgvers:
+        continue
+    hgnewcsets += 1
+
+    # get info
+    log_data = os.popen('hg log -r %d --template "{tags}\n{date|date}\n{author}\n"' % cset).readlines()
+    tag = log_data[0].strip()
+    date = log_data[1].strip()
+    user = log_data[2].strip()
+    parent = hgparents[str(cset)][0]
+    mparent = hgparents[str(cset)][1]
+
+    #get comment
+    (fdcomment, filecomment) = tempfile.mkstemp()
+    csetcomment = os.popen('hg log -r %d --template "{desc}"' % cset).read().strip()
+    os.write(fdcomment, csetcomment)
+    os.close(fdcomment)
+
+    print('-----------------------------------------')
+    print('cset:', cset)
+    print('branch:', hgbranch[str(cset)])
+    print('user:', user)
+    print('date:', date)
+    print('comment:', csetcomment)
+    if parent:
+        print('parent:', parent)
+    if mparent:
+        print('mparent:', mparent)
+    if tag:
+        print('tag:', tag)
+    print('-----------------------------------------')
+
+    # checkout the parent if necessary
+    if cset != 0:
+        if hgbranch[str(cset)] == "branch-" + str(cset):
+            print('creating new branch', hgbranch[str(cset)])
+            os.system('git checkout -b %s %s' % (hgbranch[str(cset)], hgvers[parent]))
+        else:
+            print('checking out branch', hgbranch[str(cset)])
+            os.system('git checkout %s' % hgbranch[str(cset)])
+
+    # merge
+    if mparent:
+        if hgbranch[parent] == hgbranch[str(cset)]:
+            otherbranch = hgbranch[mparent]
+        else:
+            otherbranch = hgbranch[parent]
+        print('merging', otherbranch, 'into', hgbranch[str(cset)])
+        os.system(getgitenv(user, date) + 'git merge --no-commit -s ours "" %s %s' % (hgbranch[str(cset)], otherbranch))
+
+    # remove everything except .git and .hg directories
+    os.system('find . \( -path "./.hg" -o -path "./.git" \) -prune -o ! -name "." -print | xargs rm -rf')
+
+    # repopulate with checkouted files
+    os.system('hg update -C %d' % cset)
+
+    # add new files
+    os.system('git ls-files -x .hg --others | git update-index --add --stdin')
+    # delete removed files
+    os.system('git ls-files -x .hg --deleted | git update-index --remove --stdin')
+
+    # commit
+    os.system(getgitenv(user, date) + 'git commit --allow-empty --allow-empty-message -a -F %s' % filecomment)
+    os.unlink(filecomment)
+
+    # tag
+    if tag and tag != 'tip':
+        os.system(getgitenv(user, date) + 'git tag %s' % tag)
+
+    # delete branch if not used anymore...
+    if mparent and len(hgchildren[str(cset)]):
+        print("Deleting unused branch:", otherbranch)
+        os.system('git branch -d %s' % otherbranch)
+
+    # retrieve and record the version
+    vvv = os.popen('git show --quiet --pretty=format:%H').read()
+    print('record', cset, '->', vvv)
+    hgvers[str(cset)] = vvv
+
+if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
+    os.system('git repack -a -d')
+
+# write the state for incrementals
+if state:
+    if verbose:
+        print('Writing state')
+    f = open(state, 'w')
+    pickle.dump(hgvers, f)
+
+# vim: et ts=8 sw=4 sts=4
diff --git a/third_party/git/contrib/hg-to-git/hg-to-git.txt b/third_party/git/contrib/hg-to-git/hg-to-git.txt
new file mode 100644
index 000000000000..91f8fe6410c0
--- /dev/null
+++ b/third_party/git/contrib/hg-to-git/hg-to-git.txt
@@ -0,0 +1,21 @@
+hg-to-git.py is able to convert a Mercurial repository into a git one,
+and preserves the branches in the process (unlike tailor)
+
+hg-to-git.py can probably be greatly improved (it's a rather crude
+combination of shell and python) but it does already work quite well for
+me. Features:
+	- supports incremental conversion
+	  (for keeping a git repo in sync with a hg one)
+        - supports hg branches
+        - converts hg tags
+
+Note that the git repository will be created 'in place' (at the same
+location as the source hg repo). You will have to manually remove the
+'.hg' directory after the conversion.
+
+Also note that the incremental conversion uses 'simple' hg changesets
+identifiers (ordinals, as opposed to SHA-1 ids), and since these ids
+are not stable across different repositories the hg-to-git.py state file
+is forever tied to one hg repository.
+
+Stelian Pop <stelian@popies.net>
diff --git a/third_party/git/contrib/hooks/multimail/CHANGES b/third_party/git/contrib/hooks/multimail/CHANGES
new file mode 100644
index 000000000000..35791fd02c21
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/CHANGES
@@ -0,0 +1,285 @@
+Release 1.5.0
+=============
+
+Backward-incompatible change
+----------------------------
+
+The name of classes for environment was misnamed as `*Environement`.
+It is now `*Environment`.
+
+New features
+------------
+
+* A Thread-Index header is now added to each email sent (except for
+  combined emails where it would not make sense), so that MS Outlook
+  properly groups messages by threads even though they have a
+  different subject line. Unfortunately, even adding this header the
+  threading still seems to be unreliable, but it is unclear whether
+  this is an issue on our side or on MS Outlook's side (see discussion
+  here: https://github.com/git-multimail/git-multimail/pull/194).
+
+* A new variable multimailhook.ExcludeMergeRevisions was added to send
+  notification emails only for non-merge commits.
+
+* For gitolite environment, it is now possible to specify the mail map
+  in a separate file in addition to gitolite.conf, using the variable
+  multimailhook.MailaddressMap.
+
+Internal changes
+----------------
+
+* The testsuite now uses GIT_PRINT_SHA1_ELLIPSIS where needed for
+  compatibility with recent Git versions. Only tests are affected.
+
+* We don't try to install pyflakes in the continuous integration job
+  for old Python versions where it's no longer available.
+
+* Stop using the deprecated cgi.escape in Python 3.
+
+* New flake8 warnings have been fixed.
+
+* Python 3.6 is now tested against on Travis-CI.
+
+* A bunch of lgtm.com warnings have been fixed.
+
+Bug fixes
+---------
+
+* SMTPMailer logs in only once now. It used to re-login for each email
+  sent which triggered errors for some SMTP servers.
+
+* migrate-mailhook-config was broken by internal refactoring, it
+  should now work again.
+
+This version was tested with Python 2.6 to 3.7. It was tested with Git
+1.7.10.406.gdc801, 2.15.1 and 2.20.1.98.gecbdaf0.
+
+Release 1.4.0
+=============
+
+New features to troubleshoot a git-multimail installation
+---------------------------------------------------------
+
+* One can now perform a basic check of git-multimail's setup by
+  running the hook with the environment variable
+  GIT_MULTIMAIL_CHECK_SETUP set to a non-empty string. See
+  doc/troubleshooting.rst for details.
+
+* A new log files system was added. See the multimailhook.logFile,
+  multimailhook.errorLogFile and multimailhook.debugLogFile variables.
+
+* git_multimail.py can now be made more verbose using
+  multimailhook.verbose.
+
+* A new option --check-ref-filter is now available to help debugging
+  the refFilter* options.
+
+Formatting emails
+-----------------
+
+* Formatting of emails was made slightly more compact, to reduce the
+  odds of having long subject lines truncated or wrapped in short list
+  of commits.
+
+* multimailhook.emailPrefix may now use the '%(repo_shortname)s'
+  placeholder for the repository's short name.
+
+* A new option multimailhook.subjectMaxLength is available to truncate
+  overly long subject lines.
+
+Bug fixes and minor changes
+---------------------------
+
+* Options refFilterDoSendRegex and refFilterDontSendRegex were
+  essentially broken. They should work now.
+
+* The behavior when both refFilter{Do,Dont}SendRegex and
+  refFilter{Exclusion,Inclusion}Regex are set have been slightly
+  changed. Exclusion/Inclusion is now strictly stronger than
+  DoSend/DontSend.
+
+* The management of precedence when a setting can be computed in
+  multiple ways has been considerably refactored and modified.
+  multimailhook.from and multimailhook.reponame now have precedence
+  over the environment-specific settings ($GL_REPO/$GL_USER for
+  gitolite, --stash-user/repo for Stash, --submitter/--project for
+  Gerrit).
+
+* The coverage of the testsuite has been considerably improved. All
+  configuration variables now appear at least once in the testsuite.
+
+This version was tested with Python 2.6 to 3.5. It also mostly works
+with Python 2.4, but there is one known breakage in the testsuite
+related to non-ascii characters. It was tested with Git
+1.7.10.406.gdc801, 1.8.5.6, 2.1.4, and 2.10.0.rc0.1.g07c9292.
+
+Release 1.3.1 (bugfix-only release)
+===================================
+
+* Generate links to commits in combined emails (it was done only for
+  commit emails in 1.3.0).
+
+* Fix broken links on PyPi.
+
+Release 1.3.0
+=============
+
+* New options multimailhook.htmlInIntro and multimailhook.htmlInFooter
+  now allow using HTML in the introduction and footer of emails (e.g.
+  for a more pleasant formatting or to insert a link to the commit on
+  a web interface).
+
+* A new option multimailhook.commitBrowseURL gives a simpler (and less
+  flexible) way to add a link to a web interface for commit emails
+  than multimailhook.htmlInIntro and multimailhook.htmlInFooter.
+
+* A new public function config.add_config_parameters was added to
+  allow custom hooks to set specific Git configuration variables
+  without modifying the configuration files. See an example in
+  post-receive.example.
+
+* Error handling for SMTP has been improved (we used to print Python
+  backtraces for legitimate errors).
+
+* The SMTP mailer can now check TLS certificates when the newly added
+  configuration variable multimailhook.smtpCACerts.
+
+* Python 3 portability has been improved.
+
+* The documentation's formatting has been improved.
+
+* The testsuite has been improved (we now use pyflakes to check for
+  errors in the code).
+
+This version has been tested with Python 2.4 and 2.6 to 3.5, and Git
+v1.7.10-406-gdc801e7, 2.1.4 and 2.8.1.339.g3ad15fd.
+
+No change since 1.3 RC1.
+
+Release 1.2.0
+=============
+
+* It is now possible to exclude some refs (e.g. exclude some branches
+  or tags). See refFilterDoSendRegex, refFilterDontSendRegex,
+  refFilterInclusionRegex and refFilterExclusionRegex.
+
+* New commitEmailFormat option which can be set to "html" to generate
+  simple colorized diffs using HTML for the commit emails.
+
+* git-multimail can now be ran as a Gerrit ref-updated hook, or from
+  Atlassian BitBucket Server (formerly known as Atlassian Stash).
+
+* The From: field is now more customizeable. It can be set
+  independently for refchange emails and commit emails (see
+  fromCommit, fromRefChange). The special values pusher and author can
+  be used in these configuration variable.
+
+* A new command-line option, --version, was added. The version is also
+  available in the X-Git-Multimail-Version header of sent emails.
+
+* Set X-Git-NotificationType header to differentiate the various types
+  of notifications. Current values are: diff, ref_changed_plus_diff,
+  ref_changed.
+
+* Preliminary support for Python 3. The testsuite passes with Python 3,
+  but it has not received as much testing as the Python 2 version yet.
+
+* Several encoding-related fixes. UTF-8 characters work in more
+  situations (but non-ascii characters in email address are still not
+  supported).
+
+* The testsuite and its documentation has been greatly improved.
+
+Plus all the bugfixes from version 1.1.1.
+
+This version has been tested with Python 2.4 and 2.6 to 3.5, and Git
+v1.7.10-406-gdc801e7, git-1.8.2.3 and 2.6.0. Git versions prior to
+v1.7.10-406-gdc801e7 probably work, but cannot run the testsuite
+properly.
+
+Release 1.1.1 (bugfix-only release)
+===================================
+
+* The SMTP mailer was not working with Python 2.4.
+
+Release 1.1.0
+=============
+
+* When a single commit is pushed, omit the reference changed email.
+  Set multimailhook.combineWhenSingleCommit to false to disable this
+  new feature.
+
+* In gitolite environments, the pusher's email address can be used as
+  the From address by creating a specially formatted comment block in
+  gitolite.conf (see multimailhook.from in README).
+
+* Support for SMTP authentication and SSL/TLS encryption was added,
+  see smtpUser, smtpPass, smtpEncryption in README.
+
+* A new option scanCommitForCc was added to allow git-multimail to
+  search the commit message for 'Cc: ...' lines, and add the
+  corresponding emails in Cc.
+
+* If $USER is not set, use the variable $USERNAME. This is needed on
+  Windows platform to recognize the pusher.
+
+* The emailPrefix variable can now be set to an empty string to remove
+  the prefix.
+
+* A short tutorial was added in doc/gitolite.rst to set up
+  git-multimail with gitolite.
+
+* The post-receive file was renamed to post-receive.example. It has
+  always been an example (the standard way to call git-multimail is to
+  call git_multimail.py), but it was unclear to many users.
+
+* A new refchangeShowGraph option was added to make it possible to
+  include both a graph and a log in the summary emails.  The options
+  to control the graph formatting can be set via the new graphOpts
+  option.
+
+* New option --force-send was added to disable new commit detection
+  for update hook. One use-case is to run git_multimail.py after
+  running "git fetch" to send emails about commits that have just been
+  fetched (the detection of new commits was unreliable in this mode).
+
+* The testing infrastructure was considerably improved (continuous
+  integration with travis-ci, automatic check of PEP8 and RST syntax,
+  many improvements to the test scripts).
+
+This version has been tested with Python 2.4 to 2.7, and Git 1.7.1 to
+2.4.
+
+Release 1.0.0
+=============
+
+* Fix encoding of non-ASCII email addresses in email headers.
+
+* Fix backwards-compatibility bugs for older Python 2.x versions.
+
+* Fix a backwards-compatibility bug for Git 1.7.1.
+
+* Add an option commitDiffOpts to customize logs for revisions.
+
+* Pass "-oi" to sendmail by default to prevent premature termination
+  on a line containing only ".".
+
+* Stagger email "Date:" values in an attempt to help mail clients
+  thread the emails in the right order.
+
+* If a mailing list setting is missing, just skip sending the
+  corresponding email (with a warning) instead of failing.
+
+* Add a X-Git-Host header that can be used for email filtering.
+
+* Allow the sender's fully-qualified domain name to be configured.
+
+* Minor documentation improvements.
+
+* Add this CHANGES file.
+
+
+Release 0.9.0
+=============
+
+* Initial release.
diff --git a/third_party/git/contrib/hooks/multimail/CONTRIBUTING.rst b/third_party/git/contrib/hooks/multimail/CONTRIBUTING.rst
new file mode 100644
index 000000000000..de20a5428730
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/CONTRIBUTING.rst
@@ -0,0 +1,60 @@
+Contributing
+============
+
+git-multimail is an open-source project, built by volunteers. We would
+welcome your help!
+
+The current maintainers are `Matthieu Moy <http://matthieu-moy.fr>`__ and
+`Michael Haggerty <https://github.com/mhagger>`__.
+
+Please note that although a copy of git-multimail is distributed in
+the "contrib" section of the main Git project, development takes place
+in a separate `git-multimail repository on GitHub`_.
+
+Whenever enough changes to git-multimail have accumulated, a new
+code-drop of git-multimail will be submitted for inclusion in the Git
+project.
+
+We use the GitHub issue tracker to keep track of bugs and feature
+requests, and we use GitHub pull requests to exchange patches (though,
+if you prefer, you can send patches via the Git mailing list with CC
+to the maintainers). Please sign off your patches as per the `Git
+project practice
+<https://github.com/git/git/blob/master/Documentation/SubmittingPatches#L234>`__.
+
+Please vote for issues you would like to be addressed in priority
+(click "add your reaction" and then the "+1" thumbs-up button on the
+GitHub issue).
+
+General discussion of git-multimail can take place on the main `Git
+mailing list`_.
+
+Please CC emails regarding git-multimail to the maintainers so that we
+don't overlook them.
+
+Help needed: testers/maintainer for specific environments/OS
+------------------------------------------------------------
+
+The current maintainer uses and tests git-multimail on Linux with the
+Generic environment. More testers, or better contributors are needed
+to test git-multimail on other real-life setups:
+
+* Mac OS X, Windows: git-multimail is currently not supported on these
+  platforms. But since we have no external dependencies and try to
+  write code as portable as possible, it is possible that
+  git-multimail already runs there and if not, it is likely that it
+  could be ported easily.
+
+  Patches to improve support for Windows and OS X are welcome.
+  Ideally, there would be a sub-maintainer for each OS who would test
+  at least once before each release (around twice a year).
+
+* Gerrit, Stash, Gitolite environments: although the testsuite
+  contains tests for these environments, a tester/maintainer for each
+  environment would be welcome to test and report failure (or success)
+  on real-life environments periodically (here also, feedback before
+  each release would be highly appreciated).
+
+
+.. _`git-multimail repository on GitHub`: https://github.com/git-multimail/git-multimail
+.. _`Git mailing list`: git@vger.kernel.org
diff --git a/third_party/git/contrib/hooks/multimail/README.Git b/third_party/git/contrib/hooks/multimail/README.Git
new file mode 100644
index 000000000000..044444245d09
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/README.Git
@@ -0,0 +1,15 @@
+This copy of git-multimail is distributed as part of the "contrib"
+section of the Git project as a convenience to Git users.
+git-multimail is developed as an independent project at the following
+website:
+
+    https://github.com/git-multimail/git-multimail
+
+The version in this directory was obtained from the upstream project
+on January 07 2019 and consists of the "git-multimail" subdirectory from
+revision
+
+    04e80e6c40be465cc62b6c246f0fcb8fd2cfd454 refs/tags/1.5.0
+
+Please see the README file in this directory for information about how
+to report bugs or contribute to git-multimail.
diff --git a/third_party/git/contrib/hooks/multimail/README.migrate-from-post-receive-email b/third_party/git/contrib/hooks/multimail/README.migrate-from-post-receive-email
new file mode 100644
index 000000000000..1e6a976699aa
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/README.migrate-from-post-receive-email
@@ -0,0 +1,145 @@
+git-multimail is close to, but not exactly, a plug-in replacement for
+the old Git project script contrib/hooks/post-receive-email.  This
+document describes the differences and explains how to configure
+git-multimail to get behavior closest to that of post-receive-email.
+
+If you are in a hurry
+=====================
+
+A script called migrate-mailhook-config is included with
+git-multimail.  If you run this script within a Git repository that is
+configured to use post-receive-email, it will convert the
+configuration settings into the approximate equivalent settings for
+git-multimail.  For more information, run
+
+    migrate-mailhook-config --help
+
+
+Configuration differences
+=========================
+
+* The names of the config options for git-multimail are in namespace
+  "multimailhook.*" instead of "hooks.*".  (Editorial comment:
+  post-receive-email should never have used such a generic top-level
+  namespace.)
+
+* In emails about new annotated tags, post-receive-email includes a
+  shortlog of all changes since the previous annotated tag.  To get
+  this behavior with git-multimail, you need to set
+  multimailhook.announceshortlog to true:
+
+      git config multimailhook.announceshortlog true
+
+* multimailhook.commitlist -- This is a new configuration variable.
+  Recipients listed here will receive a separate email for each new
+  commit.  However, if this variable is *not* set, it defaults to the
+  value of multimailhook.mailinglist.  Therefore, if you *don't* want
+  the members of multimailhook.mailinglist to receive one email per
+  commit, then set this value to the empty string:
+
+      git config multimailhook.commitlist ''
+
+* multimailhook.emailprefix -- If this value is not set, then the
+  subjects of generated emails are prefixed with the short name of the
+  repository enclosed in square brackets; e.g., "[myrepo]".
+  post-receive-email defaults to prefix "[SCM]" if this option is not
+  set.  So if you were using the old default and want to retain it
+  (for example, to avoid having to change your email filters), set
+  this variable explicitly to the old value:
+
+      git config multimailhook.emailprefix "[SCM]"
+
+* The "multimailhook.showrev" configuration option is not supported.
+  Its main use is obsoleted by the one-email-per-commit feature of
+  git-multimail.
+
+
+Other differences
+=================
+
+This section describes other differences in the behavior of
+git-multimail vs. post-receive-email.  For full details, please refer
+to the main README file:
+
+* One email per commit.  For each reference change, the script first
+  outputs one email summarizing the reference change (including
+  one-line summaries of the new commits), then it outputs a separate
+  email for each new commit that was introduced, including patches.
+  These one-email-per-commit emails go to the addresses listed in
+  multimailhook.commitlist.  post-receive-email sends only one email
+  for each *reference* that is changed, no matter how many commits
+  were added to the reference.
+
+* Better algorithm for detecting new commits.  post-receive-email
+  processes one reference change at a time, which causes it to fail to
+  describe new commits that were included in multiple branches.  For
+  example, if a single push adds the "*" commits in the diagram below,
+  then post-receive-email would never include the details of the two
+  commits that are common to "master" and "branch" in its
+  notifications.
+
+      o---o---o---*---*---*    <-- master
+                       \
+                        *---*  <-- branch
+
+  git-multimail analyzes all reference modifications to determine
+  which commits were not present before the change, therefore avoiding
+  that error.
+
+* In reference change emails, git-multimail tells which commits have
+  been added to the reference vs. are entirely new to the repository,
+  and which commits that have been omitted from the reference
+  vs. entirely discarded from the repository.
+
+* The environment in which Git is running can be configured via an
+  "Environment" abstraction.
+
+* Built-in support for Gitolite-managed repositories.
+
+* Instead of using full SHA1 object names in emails, git-multimail
+  mostly uses abbreviated SHA1s, plus one-line log message summaries
+  where appropriate.
+
+* In the schematic diagrams that explain non-fast-forward commits,
+  git-multimail shows the names of the branches involved.
+
+* The emails generated by git-multimail include the name of the Git
+  repository that was modified; this is convenient for recipients who
+  are monitoring multiple repositories.
+
+* git-multimail allows the email "From" addresses to be configured.
+
+* The recipients lists (multimailhook.mailinglist,
+  multimailhook.refchangelist, multimailhook.announcelist, and
+  multimailhook.commitlist) can be comma-separated values and/or
+  multivalued settings in the config file; e.g.,
+
+      [multimailhook]
+              mailinglist = mr.brown@example.com, mr.black@example.com
+              announcelist = Him <him@example.com>
+              announcelist = Jim <jim@example.com>
+              announcelist = pop@example.com
+
+  This might make it easier to maintain short recipients lists without
+  requiring full-fledged mailing list software.
+
+* By default, git-multimail sets email "Reply-To" headers to reply to
+  the pusher (for reference updates) and to the author (for commit
+  notifications).  By default, the pusher's email address is
+  constructed by appending "multimailhook.emaildomain" to the pusher's
+  username.
+
+* The generated emails contain a configurable footer.  By default, it
+  lists the name of the administrator who should be contacted to
+  unsubscribe from notification emails.
+
+* New option multimailhook.emailmaxlinelength to limit the length of
+  lines in the main part of the email body.  The default limit is 500
+  characters.
+
+* New option multimailhook.emailstrictutf8 to ensure that the main
+  part of the email body is valid UTF-8.  Invalid characters are
+  turned into the Unicode replacement character, U+FFFD.  By default
+  this option is turned on.
+
+* Written in Python.  Easier to add new features.
diff --git a/third_party/git/contrib/hooks/multimail/README.rst b/third_party/git/contrib/hooks/multimail/README.rst
new file mode 100644
index 000000000000..7c0fc4a6ef00
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/README.rst
@@ -0,0 +1,774 @@
+git-multimail version 1.5.0
+===========================
+
+.. image:: https://travis-ci.org/git-multimail/git-multimail.svg?branch=master
+    :target: https://travis-ci.org/git-multimail/git-multimail
+
+git-multimail is a tool for sending notification emails on pushes to a
+Git repository.  It includes a Python module called ``git_multimail.py``,
+which can either be used as a hook script directly or can be imported
+as a Python module into another script.
+
+git-multimail is derived from the Git project's old
+contrib/hooks/post-receive-email, and is mostly compatible with that
+script.  See README.migrate-from-post-receive-email for details about
+the differences and for how to migrate from post-receive-email to
+git-multimail.
+
+git-multimail, like the rest of the Git project, is licensed under
+GPLv2 (see the COPYING file for details).
+
+Please note: although, as a convenience, git-multimail may be
+distributed along with the main Git project, development of
+git-multimail takes place in its own, separate project.  Please, read
+`<CONTRIBUTING.rst>`__ for more information.
+
+
+By default, for each push received by the repository, git-multimail:
+
+1. Outputs one email summarizing each reference that was changed.
+   These "reference change" (called "refchange" below) emails describe
+   the nature of the change (e.g., was the reference created, deleted,
+   fast-forwarded, etc.) and include a one-line summary of each commit
+   that was added to the reference.
+
+2. Outputs one email for each new commit that was introduced by the
+   reference change.  These "commit" emails include a list of the
+   files changed by the commit, followed by the diffs of files
+   modified by the commit.  The commit emails are threaded to the
+   corresponding reference change email via "In-Reply-To".  This style
+   (similar to the "git format-patch" style used on the Git mailing
+   list) makes it easy to scan through the emails, jump to patches
+   that need further attention, and write comments about specific
+   commits.  Commits are handled in reverse topological order (i.e.,
+   parents shown before children).  For example::
+
+     [git] branch master updated
+     + [git] 01/08: doc: fix xref link from api docs to manual pages
+     + [git] 02/08: api-credentials.txt: show the big picture first
+     + [git] 03/08: api-credentials.txt: mention credential.helper explicitly
+     + [git] 04/08: api-credentials.txt: add "see also" section
+     + [git] 05/08: t3510 (cherry-pick-sequence): add missing '&&'
+     + [git] 06/08: Merge branch 'rr/maint-t3510-cascade-fix'
+     + [git] 07/08: Merge branch 'mm/api-credentials-doc'
+     + [git] 08/08: Git 1.7.11-rc2
+
+   By default, each commit appears in exactly one commit email, the
+   first time that it is pushed to the repository.  If a commit is later
+   merged into another branch, then a one-line summary of the commit
+   is included in the reference change email (as usual), but no
+   additional commit email is generated. See
+   `multimailhook.refFilter(Inclusion|Exclusion|DoSend|DontSend)Regex`
+   below to configure which branches and tags are watched by the hook.
+
+   By default, reference change emails have their "Reply-To" field set
+   to the person who pushed the change, and commit emails have their
+   "Reply-To" field set to the author of the commit.
+
+3. Output one "announce" mail for each new annotated tag, including
+   information about the tag and optionally a shortlog describing the
+   changes since the previous tag.  Such emails might be useful if you
+   use annotated tags to mark releases of your project.
+
+
+Requirements
+------------
+
+* Python 2.x, version 2.4 or later.  No non-standard Python modules
+  are required.  git-multimail has preliminary support for Python 3
+  (but it has been better tested with Python 2).
+
+* The ``git`` command must be in your PATH.  git-multimail is known to
+  work with Git versions back to 1.7.1.  (Earlier versions have not
+  been tested; if you do so, please report your results.)
+
+* To send emails using the default configuration, a standard sendmail
+  program must be located at '/usr/sbin/sendmail' or
+  '/usr/lib/sendmail' and must be configured correctly to send emails.
+  If this is not the case, set multimailhook.sendmailCommand, or see
+  the multimailhook.mailer configuration variable below for how to
+  configure git-multimail to send emails via an SMTP server.
+
+* git-multimail is currently tested only on Linux. It may or may not
+  work on other platforms such as Windows and Mac OS. See
+  `<CONTRIBUTING.rst>`__ to improve the situation.
+
+
+Invocation
+----------
+
+``git_multimail.py`` is designed to be used as a ``post-receive`` hook in a
+Git repository (see githooks(5)).  Link or copy it to
+$GIT_DIR/hooks/post-receive within the repository for which email
+notifications are desired.  Usually it should be installed on the
+central repository for a project, to which all commits are eventually
+pushed.
+
+For use on pre-v1.5.1 Git servers, ``git_multimail.py`` can also work as
+an ``update`` hook, taking its arguments on the command line.  To use
+this script in this manner, link or copy it to $GIT_DIR/hooks/update.
+Please note that the script is not completely reliable in this mode
+[1]_.
+
+Alternatively, ``git_multimail.py`` can be imported as a Python module
+into your own Python post-receive script.  This method is a bit more
+work, but allows the behavior of the hook to be customized using
+arbitrary Python code.  For example, you can use a custom environment
+(perhaps inheriting from GenericEnvironment or GitoliteEnvironment) to
+
+* change how the user who did the push is determined
+
+* read users' email addresses from an LDAP server or from a database
+
+* decide which users should be notified about which commits based on
+  the contents of the commits (e.g., for users who want to be notified
+  only about changes affecting particular files or subdirectories)
+
+Or you can change how emails are sent by writing your own Mailer
+class.  The ``post-receive`` script in this directory demonstrates how
+to use ``git_multimail.py`` as a Python module.  (If you make interesting
+changes of this type, please consider sharing them with the
+community.)
+
+
+Troubleshooting/FAQ
+-------------------
+
+Please read `<doc/troubleshooting.rst>`__ for frequently asked
+questions and common issues with git-multimail.
+
+
+Configuration
+-------------
+
+By default, git-multimail mostly takes its configuration from the
+following ``git config`` settings:
+
+multimailhook.environment
+    This describes the general environment of the repository. In most
+    cases, you do not need to specify a value for this variable:
+    `git-multimail` will autodetect which environment to use.
+    Currently supported values:
+
+    generic
+      the username of the pusher is read from $USER or $USERNAME and
+      the repository name is derived from the repository's path.
+
+    gitolite
+      Environment to use when ``git-multimail`` is ran as a gitolite_
+      hook.
+
+      The username of the pusher is read from $GL_USER, the repository
+      name is read from $GL_REPO, and the From: header value is
+      optionally read from gitolite.conf (see multimailhook.from).
+
+      For more information about gitolite and git-multimail, read
+      `<doc/gitolite.rst>`__
+
+    stash
+      Environment to use when ``git-multimail`` is ran as an Atlassian
+      BitBucket Server (formerly known as Atlassian Stash) hook.
+
+      **Warning:** this mode was provided by a third-party contributor
+      and never tested by the git-multimail maintainers. It is
+      provided as-is and may or may not work for you.
+
+      This value is automatically assumed when the stash-specific
+      flags (``--stash-user`` and ``--stash-repo``) are specified on
+      the command line. When this environment is active, the username
+      and repo come from these two command line flags, which must be
+      specified.
+
+    gerrit
+      Environment to use when ``git-multimail`` is ran as a
+      ``ref-updated`` Gerrit hook.
+
+      This value is used when the gerrit-specific command line flags
+      (``--oldrev``, ``--newrev``, ``--refname``, ``--project``) for
+      gerrit's ref-updated hook are present. When this environment is
+      active, the username of the pusher is taken from the
+      ``--submitter`` argument if that command line option is passed,
+      otherwise 'Gerrit' is used. The repository name is taken from
+      the ``--project`` option on the command line, which must be passed.
+
+      For more information about gerrit and git-multimail, read
+      `<doc/gerrit.rst>`__
+
+    If none of these environments is suitable for your setup, then you
+    can implement a Python class that inherits from Environment and
+    instantiate it via a script that looks like the example
+    post-receive script.
+
+    The environment value can be specified on the command line using
+    the ``--environment`` option. If it is not specified on the
+    command line or by ``multimailhook.environment``, the value is
+    guessed as follows:
+
+    * If stash-specific (respectively gerrit-specific) command flags
+      are present on the command-line, then ``stash`` (respectively
+      ``gerrit``) is used.
+
+    * If the environment variables $GL_USER and $GL_REPO are set, then
+      ``gitolite`` is used.
+
+    * If none of the above apply, then ``generic`` is used.
+
+multimailhook.repoName
+    A short name of this Git repository, to be used in various places
+    in the notification email text.  The default is to use $GL_REPO
+    for gitolite repositories, or otherwise to derive this value from
+    the repository path name.
+
+multimailhook.mailingList
+    The list of email addresses to which notification emails should be
+    sent, as RFC 2822 email addresses separated by commas.  This
+    configuration option can be multivalued.  Leave it unset or set it
+    to the empty string to not send emails by default.  The next few
+    settings can be used to configure specific address lists for
+    specific types of notification email.
+
+multimailhook.refchangeList
+    The list of email addresses to which summary emails about
+    reference changes should be sent, as RFC 2822 email addresses
+    separated by commas.  This configuration option can be
+    multivalued.  The default is the value in
+    multimailhook.mailingList.  Set this value to "none" (or the empty
+    string) to prevent reference change emails from being sent even if
+    multimailhook.mailingList is set.
+
+multimailhook.announceList
+    The list of email addresses to which emails about new annotated
+    tags should be sent, as RFC 2822 email addresses separated by
+    commas.  This configuration option can be multivalued.  The
+    default is the value in multimailhook.refchangeList or
+    multimailhook.mailingList.  Set this value to "none" (or the empty
+    string) to prevent annotated tag announcement emails from being sent
+    even if one of the other values is set.
+
+multimailhook.commitList
+    The list of email addresses to which emails about individual new
+    commits should be sent, as RFC 2822 email addresses separated by
+    commas.  This configuration option can be multivalued.  The
+    default is the value in multimailhook.mailingList.  Set this value
+    to "none" (or the empty string) to prevent notification emails about
+    individual commits from being sent even if
+    multimailhook.mailingList is set.
+
+multimailhook.announceShortlog
+    If this option is set to true, then emails about changes to
+    annotated tags include a shortlog of changes since the previous
+    tag.  This can be useful if the annotated tags represent releases;
+    then the shortlog will be a kind of rough summary of what has
+    happened since the last release.  But if your tagging policy is
+    not so straightforward, then the shortlog might be confusing
+    rather than useful.  Default is false.
+
+multimailhook.commitEmailFormat
+    The format of email messages for the individual commits, can be "text" or
+    "html". In the latter case, the emails will include diffs using colorized
+    HTML instead of plain text used by default. Note that this  currently the
+    ref change emails are always sent in plain text.
+
+    Note that when using "html", the formatting is done by parsing the
+    output of ``git log`` with ``-p``. When using
+    ``multimailhook.commitLogOpts`` to specify a ``--format`` for
+    ``git log``, one may get false positive (e.g. lines in the body of
+    the message starting with ``+++`` or ``---`` colored in red or
+    green).
+
+    By default, all the message is HTML-escaped. See
+    ``multimailhook.htmlInIntro`` to change this behavior.
+
+multimailhook.commitBrowseURL
+    Used to generate a link to an online repository browser in commit
+    emails. This variable must be a string. Format directives like
+    ``%(<variable>)s`` will be expanded the same way as template
+    strings. In particular, ``%(id)s`` will be replaced by the full
+    Git commit identifier (40-chars hexadecimal).
+
+    If the string does not contain any format directive, then
+    ``%(id)s`` will be automatically added to the string. If you don't
+    want ``%(id)s`` to be automatically added, use the empty format
+    directive ``%()s`` anywhere in the string.
+
+    For example, a suitable value for the git-multimail project itself
+    would be
+    ``https://github.com/git-multimail/git-multimail/commit/%(id)s``.
+
+multimailhook.htmlInIntro, multimailhook.htmlInFooter
+    When generating an HTML message, git-multimail escapes any HTML
+    sequence by default. This means that if a template contains HTML
+    like ``<a href="foo">link</a>``, the reader will see the HTML
+    source code and not a proper link.
+
+    Set ``multimailhook.htmlInIntro`` to true to allow writing HTML
+    formatting in introduction templates. Similarly, set
+    ``multimailhook.htmlInFooter`` for HTML in the footer.
+
+    Variables expanded in the template are still escaped. For example,
+    if a repository's path contains a ``<``, it will be rendered as
+    such in the message.
+
+    Read `<doc/customizing-emails.rst>`__ for more details and
+    examples.
+
+multimailhook.refchangeShowGraph
+    If this option is set to true, then summary emails about reference
+    changes will additionally include:
+
+    * a graph of the added commits (if any)
+
+    * a graph of the discarded commits (if any)
+
+    The log is generated by running ``git log --graph`` with the options
+    specified in graphOpts.  The default is false.
+
+multimailhook.refchangeShowLog
+    If this option is set to true, then summary emails about reference
+    changes will include a detailed log of the added commits in
+    addition to the one line summary.  The log is generated by running
+    ``git log`` with the options specified in multimailhook.logOpts.
+    Default is false.
+
+multimailhook.mailer
+    This option changes the way emails are sent.  Accepted values are:
+
+    * **sendmail (the default)**: use the command ``/usr/sbin/sendmail`` or
+      ``/usr/lib/sendmail`` (or sendmailCommand, if configured).  This
+      mode can be further customized via the following options:
+
+      multimailhook.sendmailCommand
+          The command used by mailer ``sendmail`` to send emails.  Shell
+          quoting is allowed in the value of this setting, but remember that
+          Git requires double-quotes to be escaped; e.g.::
+
+              git config multimailhook.sendmailcommand '/usr/sbin/sendmail -oi -t -F \"Git Repo\"'
+
+          Default is '/usr/sbin/sendmail -oi -t' or
+          '/usr/lib/sendmail -oi -t' (depending on which file is
+          present and executable).
+
+      multimailhook.envelopeSender
+          If set then pass this value to sendmail via the -f option to set
+          the envelope sender address.
+
+    * **smtp**: use Python's smtplib.  This is useful when the sendmail
+      command is not available on the system.  This mode can be
+      further customized via the following options:
+
+      multimailhook.smtpServer
+          The name of the SMTP server to connect to.  The value can
+          also include a colon and a port number; e.g.,
+          ``mail.example.com:25``.  Default is 'localhost' using port 25.
+
+      multimailhook.smtpUser, multimailhook.smtpPass
+          Server username and password. Required if smtpEncryption is 'ssl'.
+          Note that the username and password currently need to be
+          set cleartext in the configuration file, which is not
+          recommended. If you need to use this option, be sure your
+          configuration file is read-only.
+
+      multimailhook.envelopeSender
+        The sender address to be passed to the SMTP server.  If
+        unset, then the value of multimailhook.from is used.
+
+      multimailhook.smtpServerTimeout
+        Timeout in seconds. Default is 10.
+
+      multimailhook.smtpEncryption
+        Set the security type. Allowed values: ``none``, ``ssl``, ``tls`` (starttls).
+        Default is ``none``.
+
+      multimailhook.smtpCACerts
+        Set the path to a list of trusted CA certificate to verify the
+        server certificate, only supported when ``smtpEncryption`` is
+        ``tls``. If unset or empty, the server certificate is not
+        verified. If it targets a file containing a list of trusted CA
+        certificates (PEM format) these CAs will be used to verify the
+        server certificate. For debian, you can set
+        ``/etc/ssl/certs/ca-certificates.crt`` for using the system
+        trusted CAs. For self-signed server, you can add your server
+        certificate to the system store::
+
+            cd /usr/local/share/ca-certificates/
+            openssl s_client -starttls smtp \
+                   -connect mail.example.net:587 -showcerts \
+                   </dev/null 2>/dev/null \
+                 | openssl x509 -outform PEM >mail.example.net.crt
+            update-ca-certificates
+
+        and used the updated ``/etc/ssl/certs/ca-certificates.crt``. Or
+        directly use your ``/path/to/mail.example.net.crt``. Default is
+        unset.
+
+      multimailhook.smtpServerDebugLevel
+        Integer number. Set to greater than 0 to activate debugging.
+
+multimailhook.from, multimailhook.fromCommit, multimailhook.fromRefchange
+    If set, use this value in the From: field of generated emails.
+    ``fromCommit`` is used for commit emails, ``fromRefchange`` is
+    used for refchange emails, and ``from`` is used as fall-back in
+    all cases.
+
+    The value for these variables can be either:
+
+    - An email address, which will be used directly.
+
+    - The value ``pusher``, in which case the pusher's address (if
+      available) will be used.
+
+    - The value ``author`` (meaningful only for ``fromCommit``), in which
+      case the commit author's address will be used.
+
+    If config values are unset, the value of the From: header is
+    determined as follows:
+
+    1. (gitolite environment only)
+       1.a) If ``multimailhook.MailaddressMap`` is set, and is a path
+       to an existing file (if relative, it is considered relative to
+       the place where ``gitolite.conf`` is located), then this file
+       should contain lines like::
+
+           username Firstname Lastname <email@example.com>
+
+       git-multimail will then look for a line where ``$GL_USER``
+       matches the ``username`` part, and use the rest of the line for
+       the ``From:`` header.
+
+       1.b) Parse gitolite.conf, looking for a block of comments that
+       looks like this::
+
+           # BEGIN USER EMAILS
+           # username Firstname Lastname <email@example.com>
+           # END USER EMAILS
+
+       If that block exists, and there is a line between the BEGIN
+       USER EMAILS and END USER EMAILS lines where the first field
+       matches the gitolite username ($GL_USER), use the rest of the
+       line for the From: header.
+
+    2. If the user.email configuration setting is set, use its value
+       (and the value of user.name, if set).
+
+    3. Use the value of multimailhook.envelopeSender.
+
+multimailhook.MailaddressMap
+    (gitolite environment only)
+    File to look for a ``From:`` address based on the user doing the
+    push. Defaults to unset. See ``multimailhook.from`` for details.
+
+multimailhook.administrator
+    The name and/or email address of the administrator of the Git
+    repository; used in FOOTER_TEMPLATE.  Default is
+    multimailhook.envelopesender if it is set; otherwise a generic
+    string is used.
+
+multimailhook.emailPrefix
+    All emails have this string prepended to their subjects, to aid
+    email filtering (though filtering based on the X-Git-* email
+    headers is probably more robust).  Default is the short name of
+    the repository in square brackets; e.g., ``[myrepo]``.  Set this
+    value to the empty string to suppress the email prefix. You may
+    use the placeholder ``%(repo_shortname)s`` for the short name of
+    the repository.
+
+multimailhook.emailMaxLines
+    The maximum number of lines that should be included in the body of
+    a generated email.  If not specified, there is no limit.  Lines
+    beyond the limit are suppressed and counted, and a final line is
+    added indicating the number of suppressed lines.
+
+multimailhook.emailMaxLineLength
+    The maximum length of a line in the email body.  Lines longer than
+    this limit are truncated to this length with a trailing ``[...]``
+    added to indicate the missing text.  The default is 500, because
+    (a) diffs with longer lines are probably from binary files, for
+    which a diff is useless, and (b) even if a text file has such long
+    lines, the diffs are probably unreadable anyway.  To disable line
+    truncation, set this option to 0.
+
+multimailhook.subjectMaxLength
+    The maximum length of the subject line (i.e. the ``oneline`` field
+    in templates, not including the prefix). Lines longer than this
+    limit are truncated to this length with a trailing ``[...]`` added
+    to indicate the missing text. This option The default is to use
+    ``multimailhook.emailMaxLineLength``. This option avoids sending
+    emails with overly long subject lines, but should not be needed if
+    the commit messages follow the Git convention (one short subject
+    line, then a blank line, then the message body). To disable line
+    truncation, set this option to 0.
+
+multimailhook.maxCommitEmails
+    The maximum number of commit emails to send for a given change.
+    When the number of patches is larger that this value, only the
+    summary refchange email is sent.  This can avoid accidental
+    mailbombing, for example on an initial push.  To disable commit
+    emails limit, set this option to 0.  The default is 500.
+
+multimailhook.excludeMergeRevisions
+    When sending out revision emails, do not consider merge commits (the
+    functional equivalent of `rev-list --no-merges`).
+    The default is `false` (send merge commit emails).
+
+multimailhook.emailStrictUTF8
+    If this boolean option is set to `true`, then the main part of the
+    email body is forced to be valid UTF-8.  Any characters that are
+    not valid UTF-8 are converted to the Unicode replacement
+    character, U+FFFD.  The default is `true`.
+
+    This option is ineffective with Python 3, where non-UTF-8
+    characters are unconditionally replaced.
+
+multimailhook.diffOpts
+    Options passed to ``git diff-tree`` when generating the summary
+    information for ReferenceChange emails.  Default is ``--stat
+    --summary --find-copies-harder``.  Add -p to those options to
+    include a unified diff of changes in addition to the usual summary
+    output.  Shell quoting is allowed; see ``multimailhook.logOpts`` for
+    details.
+
+multimailhook.graphOpts
+    Options passed to ``git log --graph`` when generating graphs for the
+    reference change summary emails (used only if refchangeShowGraph
+    is true).  The default is '--oneline --decorate'.
+
+    Shell quoting is allowed; see logOpts for details.
+
+multimailhook.logOpts
+    Options passed to ``git log`` to generate additional info for
+    reference change emails (used only if refchangeShowLog is set).
+    For example, adding -p will show each commit's complete diff.  The
+    default is empty.
+
+    Shell quoting is allowed; for example, a log format that contains
+    spaces can be specified using something like::
+
+      git config multimailhook.logopts '--pretty=format:"%h %aN <%aE>%n%s%n%n%b%n"'
+
+    If you want to set this by editing your configuration file
+    directly, remember that Git requires double-quotes to be escaped
+    (see git-config(1) for more information)::
+
+      [multimailhook]
+              logopts = --pretty=format:\"%h %aN <%aE>%n%s%n%n%b%n\"
+
+multimailhook.commitLogOpts
+    Options passed to ``git log`` to generate additional info for
+    revision change emails.  For example, adding --ignore-all-spaces
+    will suppress whitespace changes.  The default options are ``-C
+    --stat -p --cc``.  Shell quoting is allowed; see
+    multimailhook.logOpts for details.
+
+multimailhook.dateSubstitute
+    String to use as a substitute for ``Date:`` in the output of ``git
+    log`` while formatting commit messages. This is useful to avoid
+    emitting a line that can be interpreted by mailers as the start of
+    a cited message (Zimbra webmail in particular). Defaults to
+    ``CommitDate:``. Set to an empty string or ``none`` to deactivate
+    the behavior.
+
+multimailhook.emailDomain
+    Domain name appended to the username of the person doing the push
+    to convert it into an email address
+    (via ``"%s@%s" % (username, emaildomain)``). More complicated
+    schemes can be implemented by overriding Environment and
+    overriding its get_pusher_email() method.
+
+multimailhook.replyTo, multimailhook.replyToCommit, multimailhook.replyToRefchange
+    Addresses to use in the Reply-To: field for commit emails
+    (replyToCommit) and refchange emails (replyToRefchange).
+    multimailhook.replyTo is used as default when replyToCommit or
+    replyToRefchange is not set. The shortcuts ``pusher`` and
+    ``author`` are allowed with the same semantics as for
+    ``multimailhook.from``. In addition, the value ``none`` can be
+    used to omit the ``Reply-To:`` field.
+
+    The default is ``pusher`` for refchange emails, and ``author`` for
+    commit emails.
+
+multimailhook.quiet
+    Do not output the list of email recipients from the hook
+
+multimailhook.stdout
+    For debugging, send emails to stdout rather than to the
+    mailer.  Equivalent to the --stdout command line option
+
+multimailhook.scanCommitForCc
+    If this option is set to true, than recipients from lines in commit body
+    that starts with ``CC:`` will be added to CC list.
+    Default: false
+
+multimailhook.combineWhenSingleCommit
+    If this option is set to true and a single new commit is pushed to
+    a branch, combine the summary and commit email messages into a
+    single email.
+    Default: true
+
+multimailhook.refFilterInclusionRegex, multimailhook.refFilterExclusionRegex, multimailhook.refFilterDoSendRegex, multimailhook.refFilterDontSendRegex
+    **Warning:** these options are experimental. They should work, but
+    the user-interface is not stable yet (in particular, the option
+    names may change). If you want to participate in stabilizing the
+    feature, please contact the maintainers and/or send pull-requests.
+    If you are happy with the current shape of the feature, please
+    report it too.
+
+    Regular expressions that can be used to limit refs for which email
+    updates will be sent.  It is an error to specify both an inclusion
+    and an exclusion regex.  If a ``refFilterInclusionRegex`` is
+    specified, emails will only be sent for refs which match this
+    regex.  If a ``refFilterExclusionRegex`` regex is specified,
+    emails will be sent for all refs except those that match this
+    regex (or that match a predefined regex specific to the
+    environment, such as "^refs/notes" for most environments and
+    "^refs/notes|^refs/changes" for the gerrit environment).
+
+    The expressions are matched against the complete refname, and is
+    considered to match if any substring matches. For example, to
+    filter-out all tags, set ``refFilterExclusionRegex`` to
+    ``^refs/tags/`` (note the leading ``^`` but no trailing ``$``). If
+    you set ``refFilterExclusionRegex`` to ``master``, then any ref
+    containing ``master`` will be excluded (the ``master`` branch, but
+    also ``refs/tags/master`` or ``refs/heads/foo-master-bar``).
+
+    ``refFilterDoSendRegex`` and ``refFilterDontSendRegex`` are
+    analogous to ``refFilterInclusionRegex`` and
+    ``refFilterExclusionRegex`` with one difference: with
+    ``refFilterDoSendRegex`` and ``refFilterDontSendRegex``, commits
+    introduced by one excluded ref will not be considered as new when
+    they reach an included ref. Typically, if you add a branch ``foo``
+    to  ``refFilterDontSendRegex``, push commits to this branch, and
+    later merge branch ``foo`` into ``master``, then the notification
+    email for ``master`` will contain a commit email only for the
+    merge commit. If you include ``foo`` in
+    ``refFilterExclusionRegex``, then at the time of merge, you will
+    receive one commit email per commit in the branch.
+
+    These variables can be multi-valued, like::
+
+      [multimailhook]
+              refFilterExclusionRegex = ^refs/tags/
+              refFilterExclusionRegex = ^refs/heads/master$
+
+    You can also provide a whitespace-separated list like::
+
+      [multimailhook]
+              refFilterExclusionRegex = ^refs/tags/ ^refs/heads/master$
+
+    Both examples exclude tags and the master branch, and are
+    equivalent to::
+
+      [multimailhook]
+              refFilterExclusionRegex = ^refs/tags/|^refs/heads/master$
+
+    ``refFilterInclusionRegex`` and ``refFilterExclusionRegex`` are
+    strictly stronger than ``refFilterDoSendRegex`` and
+    ``refFilterDontSendRegex``. In other words, adding a ref to a
+    DoSend/DontSend regex has no effect if it is already excluded by a
+    Exclusion/Inclusion regex.
+
+multimailhook.logFile, multimailhook.errorLogFile, multimailhook.debugLogFile
+
+    When set, these variable designate path to files where
+    git-multimail will log some messages. Normal messages and error
+    messages are sent to ``logFile``, and error messages are also sent
+    to ``errorLogFile``. Debug messages and all other messages are
+    sent to ``debugLogFile``. The recommended way is to set only one
+    of these variables, but it is also possible to set several of them
+    (part of the information is then duplicated in several log files,
+    for example errors are duplicated to all log files).
+
+    Relative path are relative to the Git repository where the push is
+    done.
+
+multimailhook.verbose
+
+    Verbosity level of git-multimail on its standard output. By
+    default, show only error and info messages. If set to true, show
+    also debug messages.
+
+Email filtering aids
+--------------------
+
+All emails include extra headers to enable fine tuned filtering and
+give information for debugging.  All emails include the headers
+``X-Git-Host``, ``X-Git-Repo``, ``X-Git-Refname``, and ``X-Git-Reftype``.
+ReferenceChange emails also include headers ``X-Git-Oldrev`` and ``X-Git-Newrev``;
+Revision emails also include header ``X-Git-Rev``.
+
+
+Customizing email contents
+--------------------------
+
+git-multimail mostly generates emails by expanding templates.  The
+templates can be customized.  To avoid the need to edit
+``git_multimail.py`` directly, the preferred way to change the templates
+is to write a separate Python script that imports ``git_multimail.py`` as
+a module, then replaces the templates in place.  See the provided
+post-receive script for an example of how this is done.
+
+
+Customizing git-multimail for your environment
+----------------------------------------------
+
+git-multimail is mostly customized via an "environment" that describes
+the local environment in which Git is running.  Two types of
+environment are built in:
+
+GenericEnvironment
+    a stand-alone Git repository.
+
+GitoliteEnvironment
+    a Git repository that is managed by gitolite_.  For such
+    repositories, the identity of the pusher is read from
+    environment variable $GL_USER, the name of the repository is read
+    from $GL_REPO (if it is not overridden by multimailhook.reponame),
+    and the From: header value is optionally read from gitolite.conf
+    (see multimailhook.from).
+
+By default, git-multimail assumes GitoliteEnvironment if $GL_USER and
+$GL_REPO are set, and otherwise assumes GenericEnvironment.
+Alternatively, you can choose one of these two environments explicitly
+by setting a ``multimailhook.environment`` config setting (which can
+have the value `generic` or `gitolite`) or by passing an --environment
+option to the script.
+
+If you need to customize the script in ways that are not supported by
+the existing environments, you can define your own environment class
+class using arbitrary Python code.  To do so, you need to import
+``git_multimail.py`` as a Python module, as demonstrated by the example
+post-receive script.  Then implement your environment class; it should
+usually inherit from one of the existing Environment classes and
+possibly one or more of the EnvironmentMixin classes.  Then set the
+``environment`` variable to an instance of your own environment class
+and pass it to ``run_as_post_receive_hook()``.
+
+The standard environment classes, GenericEnvironment and
+GitoliteEnvironment, are in fact themselves put together out of a
+number of mixin classes, each of which handles one aspect of the
+customization.  For the finest control over your configuration, you
+can specify exactly which mixin classes your own environment class
+should inherit from, and override individual methods (or even add your
+own mixin classes) to implement entirely new behaviors.  If you
+implement any mixins that might be useful to other people, please
+consider sharing them with the community!
+
+
+Getting involved
+----------------
+
+Please, read `<CONTRIBUTING.rst>`__ for instructions on how to
+contribute to git-multimail.
+
+
+Footnotes
+---------
+
+.. [1] Because of the way information is passed to update hooks, the
+       script's method of determining whether a commit has already
+       been seen does not work when it is used as an ``update`` script.
+       In particular, no notification email will be generated for a
+       new commit that is added to multiple references in the same
+       push. A workaround is to use --force-send to force sending the
+       emails.
+
+.. _gitolite: https://github.com/sitaramc/gitolite
diff --git a/third_party/git/contrib/hooks/multimail/doc/customizing-emails.rst b/third_party/git/contrib/hooks/multimail/doc/customizing-emails.rst
new file mode 100644
index 000000000000..3f5b67f768db
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/doc/customizing-emails.rst
@@ -0,0 +1,56 @@
+Customizing the content and formatting of emails
+================================================
+
+Overloading template strings
+----------------------------
+
+The content of emails is generated based on template strings defined
+in ``git_multimail.py``. You can customize these template strings
+without changing the script itself, by defining a Python wrapper
+around it. The python wrapper should ``import git_multimail`` and then
+override the ``git_multimail.*`` strings like this::
+
+  import sys  # needed for sys.argv
+
+  # Import and customize git_multimail:
+  import git_multimail
+  git_multimail.REVISION_INTRO_TEMPLATE = """..."""
+  git_multimail.COMBINED_INTRO_TEMPLATE = git_multimail.REVISION_INTRO_TEMPLATE
+
+  # start git_multimail itself:
+  git_multimail.main(sys.argv[1:])
+
+The template strings can use any value already used in the existing
+templates (read the source code).
+
+Using HTML in template strings
+------------------------------
+
+If ``multimailhook.commitEmailFormat`` is set to HTML, then
+git-multimail will generate HTML emails for commit notifications. The
+log and diff will be formatted automatically by git-multimail. By
+default, any HTML special character in the templates will be escaped.
+
+To use HTML formatting in the introduction of the email, set
+``multimailhook.htmlInIntro`` to ``true``. Then, the template can
+contain any HTML tags, that will be sent as-is in the email. For
+example, to add some formatting and a link to the online commit, use
+a format like::
+
+  git_multimail.REVISION_INTRO_TEMPLATE = """\
+  <span style="color:#808080">This is an automated email from the git hooks/post-receive script.</span><br /><br />
+
+  <strong>%(pusher)s</strong> pushed a commit to %(refname_type)s %(short_refname)s
+  in repository %(repo_shortname)s.<br />
+
+  <a href="https://github.com/git-multimail/git-multimail/commit/%(newrev)s">View on GitHub</a>.
+  """
+
+Note that the values expanded from ``%(variable)s`` in the format
+strings will still be escaped.
+
+For a less flexible but easier to set up way to add a link to commit
+emails, see ``multimailhook.commitBrowseURL``.
+
+Similarly, one can set ``multimailhook.htmlInFooter`` and override any
+of the ``*_FOOTER*`` template strings.
diff --git a/third_party/git/contrib/hooks/multimail/doc/gerrit.rst b/third_party/git/contrib/hooks/multimail/doc/gerrit.rst
new file mode 100644
index 000000000000..8011d05dec03
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/doc/gerrit.rst
@@ -0,0 +1,56 @@
+Setting up git-multimail on Gerrit
+==================================
+
+Gerrit has its own email-sending system, but you may prefer using
+``git-multimail`` instead. It supports Gerrit natively as a Gerrit
+``ref-updated`` hook (Warning: `Gerrit hooks
+<https://gerrit-review.googlesource.com/Documentation/config-hooks.html>`__
+are distinct from Git hooks). Setting up ``git-multimail`` on a Gerrit
+installation can be done following the instructions below.
+
+The explanations show an easy way to set up ``git-multimail``,
+but leave ``git-multimail`` installed and unconfigured for a while. If
+you run Gerrit on a production server, it is advised that you
+execute the step "Set up the hook" last to avoid confusing your users
+in the meantime.
+
+Set up the hook
+---------------
+
+Create a directory ``$site_path/hooks/`` if it does not exist (if you
+don't know what ``$site_path`` is, run ``gerrit.sh status`` and look
+for a ``GERRIT_SITE`` line). Either copy ``git_multimail.py`` to
+``$site_path/hooks/ref-updated`` or create a wrapper script like
+this::
+
+  #! /bin/sh
+  exec /path/to/git_multimail.py "$@"
+
+In both cases, make sure the file is named exactly
+``$site_path/hooks/ref-updated`` and is executable.
+
+(Alternatively, you may configure the ``[hooks]`` section of
+gerrit.config)
+
+Configuration
+-------------
+
+Log on the gerrit server and edit ``$site_path/git/$project/config``
+to configure ``git-multimail``.
+
+Troubleshooting
+---------------
+
+Warning: this will disable ``git-multimail`` during the debug, and
+could confuse your users. Don't run on a production server.
+
+To debug configuration issues with ``git-multimail``, you can add the
+``--stdout`` option when calling ``git_multimail.py`` like this::
+
+  #!/bin/sh
+  exec /path/to/git-multimail/git-multimail/git_multimail.py \
+    --stdout "$@" >> /tmp/log.txt
+
+and try pushing from a test repository. You should see the source of
+the email that would have been sent in the output of ``git push`` in
+the file ``/tmp/log.txt``.
diff --git a/third_party/git/contrib/hooks/multimail/doc/gitolite.rst b/third_party/git/contrib/hooks/multimail/doc/gitolite.rst
new file mode 100644
index 000000000000..505483310552
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/doc/gitolite.rst
@@ -0,0 +1,118 @@
+Setting up git-multimail on gitolite
+====================================
+
+``git-multimail`` supports gitolite 3 natively.
+The explanations below show an easy way to set up ``git-multimail``,
+but leave ``git-multimail`` installed and unconfigured for a while. If
+you run gitolite on a production server, it is advised that you
+execute the step "Set up the hook" last to avoid confusing your users
+in the meantime.
+
+Set up the hook
+---------------
+
+Log in as your gitolite user.
+
+Create a file ``.gitolite/hooks/common/post-receive`` on your gitolite
+account containing (adapt the path, obviously)::
+
+  #!/bin/sh
+  exec /path/to/git-multimail/git-multimail/git_multimail.py "$@"
+
+Make sure it's executable (``chmod +x``). Record the hook in
+gitolite::
+
+  gitolite setup
+
+Configuration
+-------------
+
+First, you have to allow the admin to set Git configuration variables.
+
+As gitolite user, edit the line containing ``GIT_CONFIG_KEYS`` in file
+``.gitolite.rc``, to make it look like::
+
+  GIT_CONFIG_KEYS                 =>  'multimailhook\..*',
+
+You can now log out and return to your normal user.
+
+In the ``gitolite-admin`` clone, edit the file ``conf/gitolite.conf``
+and add::
+
+  repo @all
+      # Not strictly needed as git_multimail.py will chose gitolite if
+      # $GL_USER is set.
+      config multimailhook.environment = gitolite
+      config multimailhook.mailingList = # Where emails should be sent
+      config multimailhook.from = # From address to use
+
+Note that by default, gitolite forbids ``<`` and ``>`` in variable
+values (for security/paranoia reasons, see
+`compensating for UNSAFE_PATT
+<http://gitolite.com/gitolite/git-config/index.html#compensating-for-unsafe95patt>`__
+in gitolite's documentation for explanations and a way to disable
+this). As a consequence, you will not be able to use ``First Last
+<First.Last@example.com>`` as recipient email, but specifying
+``First.Last@example.com`` alone works.
+
+Obviously, you can customize all parameters on a per-repository basis by
+adding these ``config multimailhook.*`` lines in the section
+corresponding to a repository or set of repositories.
+
+To activate ``git-multimail`` on a per-repository basis, do not set
+``multimailhook.mailingList`` in the ``@all`` section and set it only
+for repositories for which you want ``git-multimail``.
+
+Alternatively, you can set up the ``From:`` field on a per-user basis
+by adding a ``BEGIN USER EMAILS``/``END USER EMAILS`` section (see
+``../README``).
+
+Specificities of Gitolite for Configuration
+-------------------------------------------
+
+Empty configuration variables
+.............................
+
+With gitolite, the syntax ``config multimailhook.commitList = ""``
+unsets the variable instead of setting it to an empty string (see
+`here
+<http://gitolite.com/gitolite/git-config.html#an-important-warning-about-deleting-a-config-line>`__).
+As a result, there is no way to set a variable to the empty string.
+In all most places where an empty value is required, git-multimail
+now allows to specify special ``"none"`` value (case-sensitive) to
+mean the same.
+
+Alternatively, one can use ``" "`` (a single space) instead of ``""``.
+In most cases (in particular ``multimailhook.*List`` variables), this
+will be equivalent to an empty string.
+
+If you have a use-case where ``"none"`` is not an acceptable value and
+you need ``" "`` or  ``""`` instead, please report it as a bug to
+git-multimail.
+
+Allowing Regular Expressions in Configuration
+.............................................
+
+gitolite has a mechanism to prevent unsafe configuration variable
+values, which prevent characters like ``|`` commonly used in regular
+expressions. If you do not need the safety feature of gitolite and
+need to use regular expressions in your configuration (e.g. for
+``multimailhook.refFilter*`` variables), set
+`UNSAFE_PATT
+<http://gitolite.com/gitolite/git-config.html#unsafe-patt>`__ to a
+less restrictive value.
+
+Troubleshooting
+---------------
+
+Warning: this will disable ``git-multimail`` during the debug, and
+could confuse your users. Don't run on a production server.
+
+To debug configuration issues with ``git-multimail``, you can add the
+``--stdout`` option when calling ``git_multimail.py`` like this::
+
+  #!/bin/sh
+  exec /path/to/git-multimail/git-multimail/git_multimail.py --stdout "$@"
+
+and try pushing from a test repository. You should see the source of
+the email that would have been sent in the output of ``git push``.
diff --git a/third_party/git/contrib/hooks/multimail/doc/troubleshooting.rst b/third_party/git/contrib/hooks/multimail/doc/troubleshooting.rst
new file mode 100644
index 000000000000..651b509ee66c
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/doc/troubleshooting.rst
@@ -0,0 +1,78 @@
+Troubleshooting issues with git-multimail: a FAQ
+================================================
+
+How to check that git-multimail is properly set up?
+---------------------------------------------------
+
+Since version 1.4.0, git-multimail allows a simple self-checking of
+its configuration: run it with the environment variable
+``GIT_MULTIMAIL_CHECK_SETUP`` set to a non-empty string. You should
+get something like this::
+
+  $ GIT_MULTIMAIL_CHECK_SETUP=true /home/moy/dev/git-multimail/git-multimail/git_multimail.py
+  Environment values:
+      administrator : 'the administrator of this repository'
+      charset : 'utf-8'
+      emailprefix : '[git-multimail] '
+      fqdn : 'anie'
+      projectdesc : 'UNNAMED PROJECT'
+      pusher : 'moy'
+      repo_path : '/home/moy/dev/git-multimail'
+      repo_shortname : 'git-multimail'
+
+  Now, checking that git-multimail's standard input is properly set ...
+  Please type some text and then press Return
+  foo
+  You have just entered:
+  foo
+  git-multimail seems properly set up.
+
+If you forgot to set an important variable, you may get instead::
+
+  $ GIT_MULTIMAIL_CHECK_SETUP=true /home/moy/dev/git-multimail/git-multimail/git_multimail.py
+  No email recipients configured!
+
+Do not set ``$GIT_MULTIMAIL_CHECK_SETUP`` other than for testing your
+configuration: it would disable the hook completely.
+
+Git is not using the right address in the From/To/Reply-To field
+----------------------------------------------------------------
+
+First, make sure that git-multimail actually uses what you think it is
+using. A lot happens to your email (especially when posting to a
+mailing-list) between the time `git_multimail.py` sends it and the
+time it reaches your inbox.
+
+A simple test (to do on a test repository, do not use in production as
+it would disable email sending): change your post-receive hook to call
+`git_multimail.py` with the `--stdout` option, and try to push to the
+repository. You should see something like::
+
+  Counting objects: 3, done.
+  Writing objects: 100% (3/3), 263 bytes | 0 bytes/s, done.
+  Total 3 (delta 0), reused 0 (delta 0)
+  remote: Sending notification emails to: foo.bar@example.com
+  remote: ===========================================================================
+  remote: Date: Mon, 25 Apr 2016 18:39:59 +0200
+  remote: To: foo.bar@example.com
+  remote: Subject: [git] branch master updated: foo
+  remote: MIME-Version: 1.0
+  remote: Content-Type: text/plain; charset=utf-8
+  remote: Content-Transfer-Encoding: 8bit
+  remote: Message-ID: <20160425163959.2311.20498@anie>
+  remote: From: Auth Or <Foo.Bar@example.com>
+  remote: Reply-To: Auth Or <Foo.Bar@example.com>
+  remote: X-Git-Host: example
+  ...
+  remote: --
+  remote: To stop receiving notification emails like this one, please contact
+  remote: the administrator of this repository.
+  remote: ===========================================================================
+  To /path/to/repo
+     6278f04..e173f20  master -> master
+
+Note: this does not include the sender (Return-Path: header), as it is
+not part of the message content but passed to the mailer. Some mailer
+show the ``Sender:`` field instead of the ``From:`` field (for
+example, Zimbra Webmail shows ``From: <sender-field> on behalf of
+<from-field>``).
diff --git a/third_party/git/contrib/hooks/multimail/git_multimail.py b/third_party/git/contrib/hooks/multimail/git_multimail.py
new file mode 100755
index 000000000000..f563be82fc7e
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/git_multimail.py
@@ -0,0 +1,4346 @@
+#! /usr/bin/env python
+
+__version__ = '1.5.0'
+
+# Copyright (c) 2015-2016 Matthieu Moy and others
+# Copyright (c) 2012-2014 Michael Haggerty and others
+# Derived from contrib/hooks/post-receive-email, which is
+# Copyright (c) 2007 Andy Parkins
+# and also includes contributions by other authors.
+#
+# This file is part of git-multimail.
+#
+# git-multimail is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License version
+# 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see
+# <http://www.gnu.org/licenses/>.
+
+"""Generate notification emails for pushes to a git repository.
+
+This hook sends emails describing changes introduced by pushes to a
+git repository.  For each reference that was changed, it emits one
+ReferenceChange email summarizing how the reference was changed,
+followed by one Revision email for each new commit that was introduced
+by the reference change.
+
+Each commit is announced in exactly one Revision email.  If the same
+commit is merged into another branch in the same or a later push, then
+the ReferenceChange email will list the commit's SHA1 and its one-line
+summary, but no new Revision email will be generated.
+
+This script is designed to be used as a "post-receive" hook in a git
+repository (see githooks(5)).  It can also be used as an "update"
+script, but this usage is not completely reliable and is deprecated.
+
+To help with debugging, this script accepts a --stdout option, which
+causes the emails to be written to standard output rather than sent
+using sendmail.
+
+See the accompanying README file for the complete documentation.
+
+"""
+
+import sys
+import os
+import re
+import bisect
+import socket
+import subprocess
+import shlex
+import optparse
+import logging
+import smtplib
+try:
+    import ssl
+except ImportError:
+    # Python < 2.6 do not have ssl, but that's OK if we don't use it.
+    pass
+import time
+
+import uuid
+import base64
+
+PYTHON3 = sys.version_info >= (3, 0)
+
+if sys.version_info <= (2, 5):
+    def all(iterable):
+        for element in iterable:
+            if not element:
+                return False
+        return True
+
+
+def is_ascii(s):
+    return all(ord(c) < 128 and ord(c) > 0 for c in s)
+
+
+if PYTHON3:
+    def is_string(s):
+        return isinstance(s, str)
+
+    def str_to_bytes(s):
+        return s.encode(ENCODING)
+
+    def bytes_to_str(s, errors='strict'):
+        return s.decode(ENCODING, errors)
+
+    unicode = str
+
+    def write_str(f, msg):
+        # Try outputting with the default encoding. If it fails,
+        # try UTF-8.
+        try:
+            f.buffer.write(msg.encode(sys.getdefaultencoding()))
+        except UnicodeEncodeError:
+            f.buffer.write(msg.encode(ENCODING))
+
+    def read_line(f):
+        # Try reading with the default encoding. If it fails,
+        # try UTF-8.
+        out = f.buffer.readline()
+        try:
+            return out.decode(sys.getdefaultencoding())
+        except UnicodeEncodeError:
+            return out.decode(ENCODING)
+
+    import html
+
+    def html_escape(s):
+        return html.escape(s)
+
+else:
+    def is_string(s):
+        try:
+            return isinstance(s, basestring)
+        except NameError:  # Silence Pyflakes warning
+            raise
+
+    def str_to_bytes(s):
+        return s
+
+    def bytes_to_str(s, errors='strict'):
+        return s
+
+    def write_str(f, msg):
+        f.write(msg)
+
+    def read_line(f):
+        return f.readline()
+
+    def next(it):
+        return it.next()
+
+    import cgi
+
+    def html_escape(s):
+        return cgi.escape(s, True)
+
+try:
+    from email.charset import Charset
+    from email.utils import make_msgid
+    from email.utils import getaddresses
+    from email.utils import formataddr
+    from email.utils import formatdate
+    from email.header import Header
+except ImportError:
+    # Prior to Python 2.5, the email module used different names:
+    from email.Charset import Charset
+    from email.Utils import make_msgid
+    from email.Utils import getaddresses
+    from email.Utils import formataddr
+    from email.Utils import formatdate
+    from email.Header import Header
+
+
+DEBUG = False
+
+ZEROS = '0' * 40
+LOGBEGIN = '- Log -----------------------------------------------------------------\n'
+LOGEND = '-----------------------------------------------------------------------\n'
+
+ADDR_HEADERS = set(['from', 'to', 'cc', 'bcc', 'reply-to', 'sender'])
+
+# It is assumed in many places that the encoding is uniformly UTF-8,
+# so changing these constants is unsupported.  But define them here
+# anyway, to make it easier to find (at least most of) the places
+# where the encoding is important.
+(ENCODING, CHARSET) = ('UTF-8', 'utf-8')
+
+
+REF_CREATED_SUBJECT_TEMPLATE = (
+    '%(emailprefix)s%(refname_type)s %(short_refname)s created'
+    ' (now %(newrev_short)s)'
+    )
+REF_UPDATED_SUBJECT_TEMPLATE = (
+    '%(emailprefix)s%(refname_type)s %(short_refname)s updated'
+    ' (%(oldrev_short)s -> %(newrev_short)s)'
+    )
+REF_DELETED_SUBJECT_TEMPLATE = (
+    '%(emailprefix)s%(refname_type)s %(short_refname)s deleted'
+    ' (was %(oldrev_short)s)'
+    )
+
+COMBINED_REFCHANGE_REVISION_SUBJECT_TEMPLATE = (
+    '%(emailprefix)s%(refname_type)s %(short_refname)s updated: %(oneline)s'
+    )
+
+REFCHANGE_HEADER_TEMPLATE = """\
+Date: %(send_date)s
+To: %(recipients)s
+Subject: %(subject)s
+MIME-Version: 1.0
+Content-Type: text/%(contenttype)s; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+Message-ID: %(msgid)s
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+Thread-Index: %(thread_index)s
+X-Git-Host: %(fqdn)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Oldrev: %(oldrev)s
+X-Git-Newrev: %(newrev)s
+X-Git-NotificationType: ref_changed
+X-Git-Multimail-Version: %(multimail_version)s
+Auto-Submitted: auto-generated
+"""
+
+REFCHANGE_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a change to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+
+FOOTER_TEMPLATE = """\
+
+-- \n\
+To stop receiving notification emails like this one, please contact
+%(administrator)s.
+"""
+
+
+REWIND_ONLY_TEMPLATE = """\
+This update removed existing revisions from the reference, leaving the
+reference pointing at a previous point in the repository history.
+
+ * -- * -- N   %(refname)s (%(newrev_short)s)
+            \\
+             O -- O -- O   (%(oldrev_short)s)
+
+Any revisions marked "omit" are not gone; other references still
+refer to them.  Any revisions marked "discard" are gone forever.
+"""
+
+
+NON_FF_TEMPLATE = """\
+This update added new revisions after undoing existing revisions.
+That is to say, some revisions that were in the old version of the
+%(refname_type)s are not in the new version.  This situation occurs
+when a user --force pushes a change and generates a repository
+containing something like this:
+
+ * -- * -- B -- O -- O -- O   (%(oldrev_short)s)
+            \\
+             N -- N -- N   %(refname)s (%(newrev_short)s)
+
+You should already have received notification emails for all of the O
+revisions, and so the following emails describe only the N revisions
+from the common base, B.
+
+Any revisions marked "omit" are not gone; other references still
+refer to them.  Any revisions marked "discard" are gone forever.
+"""
+
+
+NO_NEW_REVISIONS_TEMPLATE = """\
+No new revisions were added by this update.
+"""
+
+
+DISCARDED_REVISIONS_TEMPLATE = """\
+This change permanently discards the following revisions:
+"""
+
+
+NO_DISCARDED_REVISIONS_TEMPLATE = """\
+The revisions that were on this %(refname_type)s are still contained in
+other references; therefore, this change does not discard any commits
+from the repository.
+"""
+
+
+NEW_REVISIONS_TEMPLATE = """\
+The %(tot)s revisions listed above as "new" are entirely new to this
+repository and will be described in separate emails.  The revisions
+listed as "add" were already present in the repository and have only
+been added to this reference.
+
+"""
+
+
+TAG_CREATED_TEMPLATE = """\
+      at %(newrev_short)-8s (%(newrev_type)s)
+"""
+
+
+TAG_UPDATED_TEMPLATE = """\
+*** WARNING: tag %(short_refname)s was modified! ***
+
+    from %(oldrev_short)-8s (%(oldrev_type)s)
+      to %(newrev_short)-8s (%(newrev_type)s)
+"""
+
+
+TAG_DELETED_TEMPLATE = """\
+*** WARNING: tag %(short_refname)s was deleted! ***
+
+"""
+
+
+# The template used in summary tables.  It looks best if this uses the
+# same alignment as TAG_CREATED_TEMPLATE and TAG_UPDATED_TEMPLATE.
+BRIEF_SUMMARY_TEMPLATE = """\
+%(action)8s %(rev_short)-8s %(text)s
+"""
+
+
+NON_COMMIT_UPDATE_TEMPLATE = """\
+This is an unusual reference change because the reference did not
+refer to a commit either before or after the change.  We do not know
+how to provide full information about this reference change.
+"""
+
+
+REVISION_HEADER_TEMPLATE = """\
+Date: %(send_date)s
+To: %(recipients)s
+Cc: %(cc_recipients)s
+Subject: %(emailprefix)s%(num)02d/%(tot)02d: %(oneline)s
+MIME-Version: 1.0
+Content-Type: text/%(contenttype)s; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+In-Reply-To: %(reply_to_msgid)s
+References: %(reply_to_msgid)s
+Thread-Index: %(thread_index)s
+X-Git-Host: %(fqdn)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Rev: %(rev)s
+X-Git-NotificationType: diff
+X-Git-Multimail-Version: %(multimail_version)s
+Auto-Submitted: auto-generated
+"""
+
+REVISION_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a commit to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+LINK_TEXT_TEMPLATE = """\
+View the commit online:
+%(browse_url)s
+
+"""
+
+LINK_HTML_TEMPLATE = """\
+<p><a href="%(browse_url)s">View the commit online</a>.</p>
+"""
+
+
+REVISION_FOOTER_TEMPLATE = FOOTER_TEMPLATE
+
+
+# Combined, meaning refchange+revision email (for single-commit additions)
+COMBINED_HEADER_TEMPLATE = """\
+Date: %(send_date)s
+To: %(recipients)s
+Subject: %(subject)s
+MIME-Version: 1.0
+Content-Type: text/%(contenttype)s; charset=%(charset)s
+Content-Transfer-Encoding: 8bit
+Message-ID: %(msgid)s
+From: %(fromaddr)s
+Reply-To: %(reply_to)s
+X-Git-Host: %(fqdn)s
+X-Git-Repo: %(repo_shortname)s
+X-Git-Refname: %(refname)s
+X-Git-Reftype: %(refname_type)s
+X-Git-Oldrev: %(oldrev)s
+X-Git-Newrev: %(newrev)s
+X-Git-Rev: %(rev)s
+X-Git-NotificationType: ref_changed_plus_diff
+X-Git-Multimail-Version: %(multimail_version)s
+Auto-Submitted: auto-generated
+"""
+
+COMBINED_INTRO_TEMPLATE = """\
+This is an automated email from the git hooks/post-receive script.
+
+%(pusher)s pushed a commit to %(refname_type)s %(short_refname)s
+in repository %(repo_shortname)s.
+
+"""
+
+COMBINED_FOOTER_TEMPLATE = FOOTER_TEMPLATE
+
+
+class CommandError(Exception):
+    def __init__(self, cmd, retcode):
+        self.cmd = cmd
+        self.retcode = retcode
+        Exception.__init__(
+            self,
+            'Command "%s" failed with retcode %s' % (' '.join(cmd), retcode,)
+            )
+
+
+class ConfigurationException(Exception):
+    pass
+
+
+# The "git" program (this could be changed to include a full path):
+GIT_EXECUTABLE = 'git'
+
+
+# How "git" should be invoked (including global arguments), as a list
+# of words.  This variable is usually initialized automatically by
+# read_git_output() via choose_git_command(), but if a value is set
+# here then it will be used unconditionally.
+GIT_CMD = None
+
+
+def choose_git_command():
+    """Decide how to invoke git, and record the choice in GIT_CMD."""
+
+    global GIT_CMD
+
+    if GIT_CMD is None:
+        try:
+            # Check to see whether the "-c" option is accepted (it was
+            # only added in Git 1.7.2).  We don't actually use the
+            # output of "git --version", though if we needed more
+            # specific version information this would be the place to
+            # do it.
+            cmd = [GIT_EXECUTABLE, '-c', 'foo.bar=baz', '--version']
+            read_output(cmd)
+            GIT_CMD = [GIT_EXECUTABLE, '-c', 'i18n.logoutputencoding=%s' % (ENCODING,)]
+        except CommandError:
+            GIT_CMD = [GIT_EXECUTABLE]
+
+
+def read_git_output(args, input=None, keepends=False, **kw):
+    """Read the output of a Git command."""
+
+    if GIT_CMD is None:
+        choose_git_command()
+
+    return read_output(GIT_CMD + args, input=input, keepends=keepends, **kw)
+
+
+def read_output(cmd, input=None, keepends=False, **kw):
+    if input:
+        stdin = subprocess.PIPE
+        input = str_to_bytes(input)
+    else:
+        stdin = None
+    errors = 'strict'
+    if 'errors' in kw:
+        errors = kw['errors']
+        del kw['errors']
+    p = subprocess.Popen(
+        tuple(str_to_bytes(w) for w in cmd),
+        stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kw
+        )
+    (out, err) = p.communicate(input)
+    out = bytes_to_str(out, errors=errors)
+    retcode = p.wait()
+    if retcode:
+        raise CommandError(cmd, retcode)
+    if not keepends:
+        out = out.rstrip('\n\r')
+    return out
+
+
+def read_git_lines(args, keepends=False, **kw):
+    """Return the lines output by Git command.
+
+    Return as single lines, with newlines stripped off."""
+
+    return read_git_output(args, keepends=True, **kw).splitlines(keepends)
+
+
+def git_rev_list_ish(cmd, spec, args=None, **kw):
+    """Common functionality for invoking a 'git rev-list'-like command.
+
+    Parameters:
+      * cmd is the Git command to run, e.g., 'rev-list' or 'log'.
+      * spec is a list of revision arguments to pass to the named
+        command.  If None, this function returns an empty list.
+      * args is a list of extra arguments passed to the named command.
+      * All other keyword arguments (if any) are passed to the
+        underlying read_git_lines() function.
+
+    Return the output of the Git command in the form of a list, one
+    entry per output line.
+    """
+    if spec is None:
+        return []
+    if args is None:
+        args = []
+    args = [cmd, '--stdin'] + args
+    spec_stdin = ''.join(s + '\n' for s in spec)
+    return read_git_lines(args, input=spec_stdin, **kw)
+
+
+def git_rev_list(spec, **kw):
+    """Run 'git rev-list' with the given list of revision arguments.
+
+    See git_rev_list_ish() for parameter and return value
+    documentation.
+    """
+    return git_rev_list_ish('rev-list', spec, **kw)
+
+
+def git_log(spec, **kw):
+    """Run 'git log' with the given list of revision arguments.
+
+    See git_rev_list_ish() for parameter and return value
+    documentation.
+    """
+    return git_rev_list_ish('log', spec, **kw)
+
+
+def header_encode(text, header_name=None):
+    """Encode and line-wrap the value of an email header field."""
+
+    # Convert to unicode, if required.
+    if not isinstance(text, unicode):
+        text = unicode(text, 'utf-8')
+
+    if is_ascii(text):
+        charset = 'ascii'
+    else:
+        charset = 'utf-8'
+
+    return Header(text, header_name=header_name, charset=Charset(charset)).encode()
+
+
+def addr_header_encode(text, header_name=None):
+    """Encode and line-wrap the value of an email header field containing
+    email addresses."""
+
+    # Convert to unicode, if required.
+    if not isinstance(text, unicode):
+        text = unicode(text, 'utf-8')
+
+    text = ', '.join(
+        formataddr((header_encode(name), emailaddr))
+        for name, emailaddr in getaddresses([text])
+        )
+
+    if is_ascii(text):
+        charset = 'ascii'
+    else:
+        charset = 'utf-8'
+
+    return Header(text, header_name=header_name, charset=Charset(charset)).encode()
+
+
+class Config(object):
+    def __init__(self, section, git_config=None):
+        """Represent a section of the git configuration.
+
+        If git_config is specified, it is passed to "git config" in
+        the GIT_CONFIG environment variable, meaning that "git config"
+        will read the specified path rather than the Git default
+        config paths."""
+
+        self.section = section
+        if git_config:
+            self.env = os.environ.copy()
+            self.env['GIT_CONFIG'] = git_config
+        else:
+            self.env = None
+
+    @staticmethod
+    def _split(s):
+        """Split NUL-terminated values."""
+
+        words = s.split('\0')
+        assert words[-1] == ''
+        return words[:-1]
+
+    @staticmethod
+    def add_config_parameters(c):
+        """Add configuration parameters to Git.
+
+        c is either an str or a list of str, each element being of the
+        form 'var=val' or 'var', with the same syntax and meaning as
+        the argument of 'git -c var=val'.
+        """
+        if isinstance(c, str):
+            c = (c,)
+        parameters = os.environ.get('GIT_CONFIG_PARAMETERS', '')
+        if parameters:
+            parameters += ' '
+        # git expects GIT_CONFIG_PARAMETERS to be of the form
+        #    "'name1=value1' 'name2=value2' 'name3=value3'"
+        # including everything inside the double quotes (but not the double
+        # quotes themselves).  Spacing is critical.  Also, if a value contains
+        # a literal single quote that quote must be represented using the
+        # four character sequence: '\''
+        parameters += ' '.join("'" + x.replace("'", "'\\''") + "'" for x in c)
+        os.environ['GIT_CONFIG_PARAMETERS'] = parameters
+
+    def get(self, name, default=None):
+        try:
+            values = self._split(read_git_output(
+                ['config', '--get', '--null', '%s.%s' % (self.section, name)],
+                env=self.env, keepends=True,
+                ))
+            assert len(values) == 1
+            return values[0]
+        except CommandError:
+            return default
+
+    def get_bool(self, name, default=None):
+        try:
+            value = read_git_output(
+                ['config', '--get', '--bool', '%s.%s' % (self.section, name)],
+                env=self.env,
+                )
+        except CommandError:
+            return default
+        return value == 'true'
+
+    def get_all(self, name, default=None):
+        """Read a (possibly multivalued) setting from the configuration.
+
+        Return the result as a list of values, or default if the name
+        is unset."""
+
+        try:
+            return self._split(read_git_output(
+                ['config', '--get-all', '--null', '%s.%s' % (self.section, name)],
+                env=self.env, keepends=True,
+                ))
+        except CommandError:
+            t, e, traceback = sys.exc_info()
+            if e.retcode == 1:
+                # "the section or key is invalid"; i.e., there is no
+                # value for the specified key.
+                return default
+            else:
+                raise
+
+    def set(self, name, value):
+        read_git_output(
+            ['config', '%s.%s' % (self.section, name), value],
+            env=self.env,
+            )
+
+    def add(self, name, value):
+        read_git_output(
+            ['config', '--add', '%s.%s' % (self.section, name), value],
+            env=self.env,
+            )
+
+    def __contains__(self, name):
+        return self.get_all(name, default=None) is not None
+
+    # We don't use this method anymore internally, but keep it here in
+    # case somebody is calling it from their own code:
+    def has_key(self, name):
+        return name in self
+
+    def unset_all(self, name):
+        try:
+            read_git_output(
+                ['config', '--unset-all', '%s.%s' % (self.section, name)],
+                env=self.env,
+                )
+        except CommandError:
+            t, e, traceback = sys.exc_info()
+            if e.retcode == 5:
+                # The name doesn't exist, which is what we wanted anyway...
+                pass
+            else:
+                raise
+
+    def set_recipients(self, name, value):
+        self.unset_all(name)
+        for pair in getaddresses([value]):
+            self.add(name, formataddr(pair))
+
+
+def generate_summaries(*log_args):
+    """Generate a brief summary for each revision requested.
+
+    log_args are strings that will be passed directly to "git log" as
+    revision selectors.  Iterate over (sha1_short, subject) for each
+    commit specified by log_args (subject is the first line of the
+    commit message as a string without EOLs)."""
+
+    cmd = [
+        'log', '--abbrev', '--format=%h %s',
+        ] + list(log_args) + ['--']
+    for line in read_git_lines(cmd):
+        yield tuple(line.split(' ', 1))
+
+
+def limit_lines(lines, max_lines):
+    for (index, line) in enumerate(lines):
+        if index < max_lines:
+            yield line
+
+    if index >= max_lines:
+        yield '... %d lines suppressed ...\n' % (index + 1 - max_lines,)
+
+
+def limit_linelength(lines, max_linelength):
+    for line in lines:
+        # Don't forget that lines always include a trailing newline.
+        if len(line) > max_linelength + 1:
+            line = line[:max_linelength - 7] + ' [...]\n'
+        yield line
+
+
+class CommitSet(object):
+    """A (constant) set of object names.
+
+    The set should be initialized with full SHA1 object names.  The
+    __contains__() method returns True iff its argument is an
+    abbreviation of any the names in the set."""
+
+    def __init__(self, names):
+        self._names = sorted(names)
+
+    def __len__(self):
+        return len(self._names)
+
+    def __contains__(self, sha1_abbrev):
+        """Return True iff this set contains sha1_abbrev (which might be abbreviated)."""
+
+        i = bisect.bisect_left(self._names, sha1_abbrev)
+        return i < len(self) and self._names[i].startswith(sha1_abbrev)
+
+
+class GitObject(object):
+    def __init__(self, sha1, type=None):
+        if sha1 == ZEROS:
+            self.sha1 = self.type = self.commit_sha1 = None
+        else:
+            self.sha1 = sha1
+            self.type = type or read_git_output(['cat-file', '-t', self.sha1])
+
+            if self.type == 'commit':
+                self.commit_sha1 = self.sha1
+            elif self.type == 'tag':
+                try:
+                    self.commit_sha1 = read_git_output(
+                        ['rev-parse', '--verify', '%s^0' % (self.sha1,)]
+                        )
+                except CommandError:
+                    # Cannot deref tag to determine commit_sha1
+                    self.commit_sha1 = None
+            else:
+                self.commit_sha1 = None
+
+        self.short = read_git_output(['rev-parse', '--short', sha1])
+
+    def get_summary(self):
+        """Return (sha1_short, subject) for this commit."""
+
+        if not self.sha1:
+            raise ValueError('Empty commit has no summary')
+
+        return next(iter(generate_summaries('--no-walk', self.sha1)))
+
+    def __eq__(self, other):
+        return isinstance(other, GitObject) and self.sha1 == other.sha1
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __hash__(self):
+        return hash(self.sha1)
+
+    def __nonzero__(self):
+        return bool(self.sha1)
+
+    def __bool__(self):
+        """Python 2 backward compatibility"""
+        return self.__nonzero__()
+
+    def __str__(self):
+        return self.sha1 or ZEROS
+
+
+class Change(object):
+    """A Change that has been made to the Git repository.
+
+    Abstract class from which both Revisions and ReferenceChanges are
+    derived.  A Change knows how to generate a notification email
+    describing itself."""
+
+    def __init__(self, environment):
+        self.environment = environment
+        self._values = None
+        self._contains_html_diff = False
+
+    def _contains_diff(self):
+        # We do contain a diff, should it be rendered in HTML?
+        if self.environment.commit_email_format == "html":
+            self._contains_html_diff = True
+
+    def _compute_values(self):
+        """Return a dictionary {keyword: expansion} for this Change.
+
+        Derived classes overload this method to add more entries to
+        the return value.  This method is used internally by
+        get_values().  The return value should always be a new
+        dictionary."""
+
+        values = self.environment.get_values()
+        fromaddr = self.environment.get_fromaddr(change=self)
+        if fromaddr is not None:
+            values['fromaddr'] = fromaddr
+        values['multimail_version'] = get_version()
+        return values
+
+    # Aliases usable in template strings. Tuple of pairs (destination,
+    # source).
+    VALUES_ALIAS = (
+        ("id", "newrev"),
+        )
+
+    def get_values(self, **extra_values):
+        """Return a dictionary {keyword: expansion} for this Change.
+
+        Return a dictionary mapping keywords to the values that they
+        should be expanded to for this Change (used when interpolating
+        template strings).  If any keyword arguments are supplied, add
+        those to the return value as well.  The return value is always
+        a new dictionary."""
+
+        if self._values is None:
+            self._values = self._compute_values()
+
+        values = self._values.copy()
+        if extra_values:
+            values.update(extra_values)
+
+        for alias, val in self.VALUES_ALIAS:
+            values[alias] = values[val]
+        return values
+
+    def expand(self, template, **extra_values):
+        """Expand template.
+
+        Expand the template (which should be a string) using string
+        interpolation of the values for this Change.  If any keyword
+        arguments are provided, also include those in the keywords
+        available for interpolation."""
+
+        return template % self.get_values(**extra_values)
+
+    def expand_lines(self, template, html_escape_val=False, **extra_values):
+        """Break template into lines and expand each line."""
+
+        values = self.get_values(**extra_values)
+        if html_escape_val:
+            for k in values:
+                if is_string(values[k]):
+                    values[k] = html_escape(values[k])
+        for line in template.splitlines(True):
+            yield line % values
+
+    def expand_header_lines(self, template, **extra_values):
+        """Break template into lines and expand each line as an RFC 2822 header.
+
+        Encode values and split up lines that are too long.  Silently
+        skip lines that contain references to unknown variables."""
+
+        values = self.get_values(**extra_values)
+        if self._contains_html_diff:
+            self._content_type = 'html'
+        else:
+            self._content_type = 'plain'
+        values['contenttype'] = self._content_type
+
+        for line in template.splitlines():
+            (name, value) = line.split(': ', 1)
+
+            try:
+                value = value % values
+            except KeyError:
+                t, e, traceback = sys.exc_info()
+                if DEBUG:
+                    self.environment.log_warning(
+                        'Warning: unknown variable %r in the following line; line skipped:\n'
+                        '    %s\n'
+                        % (e.args[0], line,)
+                        )
+            else:
+                if name.lower() in ADDR_HEADERS:
+                    value = addr_header_encode(value, name)
+                else:
+                    value = header_encode(value, name)
+                for splitline in ('%s: %s\n' % (name, value)).splitlines(True):
+                    yield splitline
+
+    def generate_email_header(self):
+        """Generate the RFC 2822 email headers for this Change, a line at a time.
+
+        The output should not include the trailing blank line."""
+
+        raise NotImplementedError()
+
+    def generate_browse_link(self, base_url):
+        """Generate a link to an online repository browser."""
+        return iter(())
+
+    def generate_email_intro(self, html_escape_val=False):
+        """Generate the email intro for this Change, a line at a time.
+
+        The output will be used as the standard boilerplate at the top
+        of the email body."""
+
+        raise NotImplementedError()
+
+    def generate_email_body(self, push):
+        """Generate the main part of the email body, a line at a time.
+
+        The text in the body might be truncated after a specified
+        number of lines (see multimailhook.emailmaxlines)."""
+
+        raise NotImplementedError()
+
+    def generate_email_footer(self, html_escape_val):
+        """Generate the footer of the email, a line at a time.
+
+        The footer is always included, irrespective of
+        multimailhook.emailmaxlines."""
+
+        raise NotImplementedError()
+
+    def _wrap_for_html(self, lines):
+        """Wrap the lines in HTML <pre> tag when using HTML format.
+
+        Escape special HTML characters and add <pre> and </pre> tags around
+        the given lines if we should be generating HTML as indicated by
+        self._contains_html_diff being set to true.
+        """
+        if self._contains_html_diff:
+            yield "<pre style='margin:0'>\n"
+
+            for line in lines:
+                yield html_escape(line)
+
+            yield '</pre>\n'
+        else:
+            for line in lines:
+                yield line
+
+    def generate_email(self, push, body_filter=None, extra_header_values={}):
+        """Generate an email describing this change.
+
+        Iterate over the lines (including the header lines) of an
+        email describing this change.  If body_filter is not None,
+        then use it to filter the lines that are intended for the
+        email body.
+
+        The extra_header_values field is received as a dict and not as
+        **kwargs, to allow passing other keyword arguments in the
+        future (e.g. passing extra values to generate_email_intro()"""
+
+        for line in self.generate_email_header(**extra_header_values):
+            yield line
+        yield '\n'
+        html_escape_val = (self.environment.html_in_intro and
+                           self._contains_html_diff)
+        intro = self.generate_email_intro(html_escape_val)
+        if not self.environment.html_in_intro:
+            intro = self._wrap_for_html(intro)
+        for line in intro:
+            yield line
+
+        if self.environment.commitBrowseURL:
+            for line in self.generate_browse_link(self.environment.commitBrowseURL):
+                yield line
+
+        body = self.generate_email_body(push)
+        if body_filter is not None:
+            body = body_filter(body)
+
+        diff_started = False
+        if self._contains_html_diff:
+            # "white-space: pre" is the default, but we need to
+            # specify it again in case the message is viewed in a
+            # webmail which wraps it in an element setting white-space
+            # to something else (Zimbra does this and sets
+            # white-space: pre-line).
+            yield '<pre style="white-space: pre; background: #F8F8F8">'
+        for line in body:
+            if self._contains_html_diff:
+                # This is very, very naive. It would be much better to really
+                # parse the diff, i.e. look at how many lines do we have in
+                # the hunk headers instead of blindly highlighting everything
+                # that looks like it might be part of a diff.
+                bgcolor = ''
+                fgcolor = ''
+                if line.startswith('--- a/'):
+                    diff_started = True
+                    bgcolor = 'e0e0ff'
+                elif line.startswith('diff ') or line.startswith('index '):
+                    diff_started = True
+                    fgcolor = '808080'
+                elif diff_started:
+                    if line.startswith('+++ '):
+                        bgcolor = 'e0e0ff'
+                    elif line.startswith('@@'):
+                        bgcolor = 'e0e0e0'
+                    elif line.startswith('+'):
+                        bgcolor = 'e0ffe0'
+                    elif line.startswith('-'):
+                        bgcolor = 'ffe0e0'
+                elif line.startswith('commit '):
+                    fgcolor = '808000'
+                elif line.startswith('    '):
+                    fgcolor = '404040'
+
+                # Chop the trailing LF, we don't want it inside <pre>.
+                line = html_escape(line[:-1])
+
+                if bgcolor or fgcolor:
+                    style = 'display:block; white-space:pre;'
+                    if bgcolor:
+                        style += 'background:#' + bgcolor + ';'
+                    if fgcolor:
+                        style += 'color:#' + fgcolor + ';'
+                    # Use a <span style='display:block> to color the
+                    # whole line. The newline must be inside the span
+                    # to display properly both in Firefox and in
+                    # text-based browser.
+                    line = "<span style='%s'>%s\n</span>" % (style, line)
+                else:
+                    line = line + '\n'
+
+            yield line
+        if self._contains_html_diff:
+            yield '</pre>'
+        html_escape_val = (self.environment.html_in_footer and
+                           self._contains_html_diff)
+        footer = self.generate_email_footer(html_escape_val)
+        if not self.environment.html_in_footer:
+            footer = self._wrap_for_html(footer)
+        for line in footer:
+            yield line
+
+    def get_specific_fromaddr(self):
+        """For kinds of Changes which specify it, return the kind-specific
+        From address to use."""
+        return None
+
+
+class Revision(Change):
+    """A Change consisting of a single git commit."""
+
+    CC_RE = re.compile(r'^\s*C[Cc]:\s*(?P<to>[^#]+@[^\s#]*)\s*(#.*)?$')
+
+    def __init__(self, reference_change, rev, num, tot):
+        Change.__init__(self, reference_change.environment)
+        self.reference_change = reference_change
+        self.rev = rev
+        self.change_type = self.reference_change.change_type
+        self.refname = self.reference_change.refname
+        self.num = num
+        self.tot = tot
+        self.author = read_git_output(['log', '--no-walk', '--format=%aN <%aE>', self.rev.sha1])
+        self.recipients = self.environment.get_revision_recipients(self)
+
+        # -s is short for --no-patch, but -s works on older git's (e.g. 1.7)
+        self.parents = read_git_lines(['show', '-s', '--format=%P',
+                                      self.rev.sha1])[0].split()
+
+        self.cc_recipients = ''
+        if self.environment.get_scancommitforcc():
+            self.cc_recipients = ', '.join(to.strip() for to in self._cc_recipients())
+            if self.cc_recipients:
+                self.environment.log_msg(
+                    'Add %s to CC for %s' % (self.cc_recipients, self.rev.sha1))
+
+    def _cc_recipients(self):
+        cc_recipients = []
+        message = read_git_output(['log', '--no-walk', '--format=%b', self.rev.sha1])
+        lines = message.strip().split('\n')
+        for line in lines:
+            m = re.match(self.CC_RE, line)
+            if m:
+                cc_recipients.append(m.group('to'))
+
+        return cc_recipients
+
+    def _compute_values(self):
+        values = Change._compute_values(self)
+
+        oneline = read_git_output(
+            ['log', '--format=%s', '--no-walk', self.rev.sha1]
+            )
+
+        max_subject_length = self.environment.get_max_subject_length()
+        if max_subject_length > 0 and len(oneline) > max_subject_length:
+            oneline = oneline[:max_subject_length - 6] + ' [...]'
+
+        values['rev'] = self.rev.sha1
+        values['parents'] = ' '.join(self.parents)
+        values['rev_short'] = self.rev.short
+        values['change_type'] = self.change_type
+        values['refname'] = self.refname
+        values['newrev'] = self.rev.sha1
+        values['short_refname'] = self.reference_change.short_refname
+        values['refname_type'] = self.reference_change.refname_type
+        values['reply_to_msgid'] = self.reference_change.msgid
+        values['thread_index'] = self.reference_change.thread_index
+        values['num'] = self.num
+        values['tot'] = self.tot
+        values['recipients'] = self.recipients
+        if self.cc_recipients:
+            values['cc_recipients'] = self.cc_recipients
+        values['oneline'] = oneline
+        values['author'] = self.author
+
+        reply_to = self.environment.get_reply_to_commit(self)
+        if reply_to:
+            values['reply_to'] = reply_to
+
+        return values
+
+    def generate_email_header(self, **extra_values):
+        for line in self.expand_header_lines(
+                REVISION_HEADER_TEMPLATE, **extra_values
+                ):
+            yield line
+
+    def generate_browse_link(self, base_url):
+        if '%(' not in base_url:
+            base_url += '%(id)s'
+        url = "".join(self.expand_lines(base_url))
+        if self._content_type == 'html':
+            for line in self.expand_lines(LINK_HTML_TEMPLATE,
+                                          html_escape_val=True,
+                                          browse_url=url):
+                yield line
+        elif self._content_type == 'plain':
+            for line in self.expand_lines(LINK_TEXT_TEMPLATE,
+                                          html_escape_val=False,
+                                          browse_url=url):
+                yield line
+        else:
+            raise NotImplementedError("Content-type %s unsupported. Please report it as a bug.")
+
+    def generate_email_intro(self, html_escape_val=False):
+        for line in self.expand_lines(REVISION_INTRO_TEMPLATE,
+                                      html_escape_val=html_escape_val):
+            yield line
+
+    def generate_email_body(self, push):
+        """Show this revision."""
+
+        for line in read_git_lines(
+                ['log'] + self.environment.commitlogopts + ['-1', self.rev.sha1],
+                keepends=True,
+                errors='replace'):
+            if line.startswith('Date:   ') and self.environment.date_substitute:
+                yield self.environment.date_substitute + line[len('Date:   '):]
+            else:
+                yield line
+
+    def generate_email_footer(self, html_escape_val):
+        return self.expand_lines(REVISION_FOOTER_TEMPLATE,
+                                 html_escape_val=html_escape_val)
+
+    def generate_email(self, push, body_filter=None, extra_header_values={}):
+        self._contains_diff()
+        return Change.generate_email(self, push, body_filter, extra_header_values)
+
+    def get_specific_fromaddr(self):
+        return self.environment.from_commit
+
+
+class ReferenceChange(Change):
+    """A Change to a Git reference.
+
+    An abstract class representing a create, update, or delete of a
+    Git reference.  Derived classes handle specific types of reference
+    (e.g., tags vs. branches).  These classes generate the main
+    reference change email summarizing the reference change and
+    whether it caused any any commits to be added or removed.
+
+    ReferenceChange objects are usually created using the static
+    create() method, which has the logic to decide which derived class
+    to instantiate."""
+
+    REF_RE = re.compile(r'^refs\/(?P<area>[^\/]+)\/(?P<shortname>.*)$')
+
+    @staticmethod
+    def create(environment, oldrev, newrev, refname):
+        """Return a ReferenceChange object representing the change.
+
+        Return an object that represents the type of change that is being
+        made. oldrev and newrev should be SHA1s or ZEROS."""
+
+        old = GitObject(oldrev)
+        new = GitObject(newrev)
+        rev = new or old
+
+        # The revision type tells us what type the commit is, combined with
+        # the location of the ref we can decide between
+        #  - working branch
+        #  - tracking branch
+        #  - unannotated tag
+        #  - annotated tag
+        m = ReferenceChange.REF_RE.match(refname)
+        if m:
+            area = m.group('area')
+            short_refname = m.group('shortname')
+        else:
+            area = ''
+            short_refname = refname
+
+        if rev.type == 'tag':
+            # Annotated tag:
+            klass = AnnotatedTagChange
+        elif rev.type == 'commit':
+            if area == 'tags':
+                # Non-annotated tag:
+                klass = NonAnnotatedTagChange
+            elif area == 'heads':
+                # Branch:
+                klass = BranchChange
+            elif area == 'remotes':
+                # Tracking branch:
+                environment.log_warning(
+                    '*** Push-update of tracking branch %r\n'
+                    '***  - incomplete email generated.'
+                    % (refname,)
+                    )
+                klass = OtherReferenceChange
+            else:
+                # Some other reference namespace:
+                environment.log_warning(
+                    '*** Push-update of strange reference %r\n'
+                    '***  - incomplete email generated.'
+                    % (refname,)
+                    )
+                klass = OtherReferenceChange
+        else:
+            # Anything else (is there anything else?)
+            environment.log_warning(
+                '*** Unknown type of update to %r (%s)\n'
+                '***  - incomplete email generated.'
+                % (refname, rev.type,)
+                )
+            klass = OtherReferenceChange
+
+        return klass(
+            environment,
+            refname=refname, short_refname=short_refname,
+            old=old, new=new, rev=rev,
+            )
+
+    @staticmethod
+    def make_thread_index():
+        """Return a string appropriate for the Thread-Index header,
+        needed by MS Outlook to get threading right.
+
+        The format is (base64-encoded):
+        - 1 byte must be 1
+        - 5 bytes encode a date (hardcoded here)
+        - 16 bytes for a globally unique identifier
+
+        FIXME: Unfortunately, even with the Thread-Index field, MS
+        Outlook doesn't seem to do the threading reliably (see
+        https://github.com/git-multimail/git-multimail/pull/194).
+        """
+        thread_index = b'\x01\x00\x00\x12\x34\x56' + uuid.uuid4().bytes
+        return base64.standard_b64encode(thread_index).decode('ascii')
+
+    def __init__(self, environment, refname, short_refname, old, new, rev):
+        Change.__init__(self, environment)
+        self.change_type = {
+            (False, True): 'create',
+            (True, True): 'update',
+            (True, False): 'delete',
+            }[bool(old), bool(new)]
+        self.refname = refname
+        self.short_refname = short_refname
+        self.old = old
+        self.new = new
+        self.rev = rev
+        self.msgid = make_msgid()
+        self.thread_index = self.make_thread_index()
+        self.diffopts = environment.diffopts
+        self.graphopts = environment.graphopts
+        self.logopts = environment.logopts
+        self.commitlogopts = environment.commitlogopts
+        self.showgraph = environment.refchange_showgraph
+        self.showlog = environment.refchange_showlog
+
+        self.header_template = REFCHANGE_HEADER_TEMPLATE
+        self.intro_template = REFCHANGE_INTRO_TEMPLATE
+        self.footer_template = FOOTER_TEMPLATE
+
+    def _compute_values(self):
+        values = Change._compute_values(self)
+
+        values['change_type'] = self.change_type
+        values['refname_type'] = self.refname_type
+        values['refname'] = self.refname
+        values['short_refname'] = self.short_refname
+        values['msgid'] = self.msgid
+        values['thread_index'] = self.thread_index
+        values['recipients'] = self.recipients
+        values['oldrev'] = str(self.old)
+        values['oldrev_short'] = self.old.short
+        values['newrev'] = str(self.new)
+        values['newrev_short'] = self.new.short
+
+        if self.old:
+            values['oldrev_type'] = self.old.type
+        if self.new:
+            values['newrev_type'] = self.new.type
+
+        reply_to = self.environment.get_reply_to_refchange(self)
+        if reply_to:
+            values['reply_to'] = reply_to
+
+        return values
+
+    def send_single_combined_email(self, known_added_sha1s):
+        """Determine if a combined refchange/revision email should be sent
+
+        If there is only a single new (non-merge) commit added by a
+        change, it is useful to combine the ReferenceChange and
+        Revision emails into one.  In such a case, return the single
+        revision; otherwise, return None.
+
+        This method is overridden in BranchChange."""
+
+        return None
+
+    def generate_combined_email(self, push, revision, body_filter=None, extra_header_values={}):
+        """Generate an email describing this change AND specified revision.
+
+        Iterate over the lines (including the header lines) of an
+        email describing this change.  If body_filter is not None,
+        then use it to filter the lines that are intended for the
+        email body.
+
+        The extra_header_values field is received as a dict and not as
+        **kwargs, to allow passing other keyword arguments in the
+        future (e.g. passing extra values to generate_email_intro()
+
+        This method is overridden in BranchChange."""
+
+        raise NotImplementedError
+
+    def get_subject(self):
+        template = {
+            'create': REF_CREATED_SUBJECT_TEMPLATE,
+            'update': REF_UPDATED_SUBJECT_TEMPLATE,
+            'delete': REF_DELETED_SUBJECT_TEMPLATE,
+            }[self.change_type]
+        return self.expand(template)
+
+    def generate_email_header(self, **extra_values):
+        if 'subject' not in extra_values:
+            extra_values['subject'] = self.get_subject()
+
+        for line in self.expand_header_lines(
+                self.header_template, **extra_values
+                ):
+            yield line
+
+    def generate_email_intro(self, html_escape_val=False):
+        for line in self.expand_lines(self.intro_template,
+                                      html_escape_val=html_escape_val):
+            yield line
+
+    def generate_email_body(self, push):
+        """Call the appropriate body-generation routine.
+
+        Call one of generate_create_summary() /
+        generate_update_summary() / generate_delete_summary()."""
+
+        change_summary = {
+            'create': self.generate_create_summary,
+            'delete': self.generate_delete_summary,
+            'update': self.generate_update_summary,
+            }[self.change_type](push)
+        for line in change_summary:
+            yield line
+
+        for line in self.generate_revision_change_summary(push):
+            yield line
+
+    def generate_email_footer(self, html_escape_val):
+        return self.expand_lines(self.footer_template,
+                                 html_escape_val=html_escape_val)
+
+    def generate_revision_change_graph(self, push):
+        if self.showgraph:
+            args = ['--graph'] + self.graphopts
+            for newold in ('new', 'old'):
+                has_newold = False
+                spec = push.get_commits_spec(newold, self)
+                for line in git_log(spec, args=args, keepends=True):
+                    if not has_newold:
+                        has_newold = True
+                        yield '\n'
+                        yield 'Graph of %s commits:\n\n' % (
+                            {'new': 'new', 'old': 'discarded'}[newold],)
+                    yield '  ' + line
+                if has_newold:
+                    yield '\n'
+
+    def generate_revision_change_log(self, new_commits_list):
+        if self.showlog:
+            yield '\n'
+            yield 'Detailed log of new commits:\n\n'
+            for line in read_git_lines(
+                    ['log', '--no-walk'] +
+                    self.logopts +
+                    new_commits_list +
+                    ['--'],
+                    keepends=True,
+                    ):
+                yield line
+
+    def generate_new_revision_summary(self, tot, new_commits_list, push):
+        for line in self.expand_lines(NEW_REVISIONS_TEMPLATE, tot=tot):
+            yield line
+        for line in self.generate_revision_change_graph(push):
+            yield line
+        for line in self.generate_revision_change_log(new_commits_list):
+            yield line
+
+    def generate_revision_change_summary(self, push):
+        """Generate a summary of the revisions added/removed by this change."""
+
+        if self.new.commit_sha1 and not self.old.commit_sha1:
+            # A new reference was created.  List the new revisions
+            # brought by the new reference (i.e., those revisions that
+            # were not in the repository before this reference
+            # change).
+            sha1s = list(push.get_new_commits(self))
+            sha1s.reverse()
+            tot = len(sha1s)
+            new_revisions = [
+                Revision(self, GitObject(sha1), num=i + 1, tot=tot)
+                for (i, sha1) in enumerate(sha1s)
+                ]
+
+            if new_revisions:
+                yield self.expand('This %(refname_type)s includes the following new commits:\n')
+                yield '\n'
+                for r in new_revisions:
+                    (sha1, subject) = r.rev.get_summary()
+                    yield r.expand(
+                        BRIEF_SUMMARY_TEMPLATE, action='new', text=subject,
+                        )
+                yield '\n'
+                for line in self.generate_new_revision_summary(
+                        tot, [r.rev.sha1 for r in new_revisions], push):
+                    yield line
+            else:
+                for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE):
+                    yield line
+
+        elif self.new.commit_sha1 and self.old.commit_sha1:
+            # A reference was changed to point at a different commit.
+            # List the revisions that were removed and/or added *from
+            # that reference* by this reference change, along with a
+            # diff between the trees for its old and new values.
+
+            # List of the revisions that were added to the branch by
+            # this update.  Note this list can include revisions that
+            # have already had notification emails; we want such
+            # revisions in the summary even though we will not send
+            # new notification emails for them.
+            adds = list(generate_summaries(
+                '--topo-order', '--reverse', '%s..%s'
+                % (self.old.commit_sha1, self.new.commit_sha1,)
+                ))
+
+            # List of the revisions that were removed from the branch
+            # by this update.  This will be empty except for
+            # non-fast-forward updates.
+            discards = list(generate_summaries(
+                '%s..%s' % (self.new.commit_sha1, self.old.commit_sha1,)
+                ))
+
+            if adds:
+                new_commits_list = push.get_new_commits(self)
+            else:
+                new_commits_list = []
+            new_commits = CommitSet(new_commits_list)
+
+            if discards:
+                discarded_commits = CommitSet(push.get_discarded_commits(self))
+            else:
+                discarded_commits = CommitSet([])
+
+            if discards and adds:
+                for (sha1, subject) in discards:
+                    if sha1 in discarded_commits:
+                        action = 'discard'
+                    else:
+                        action = 'omit'
+                    yield self.expand(
+                        BRIEF_SUMMARY_TEMPLATE, action=action,
+                        rev_short=sha1, text=subject,
+                        )
+                for (sha1, subject) in adds:
+                    if sha1 in new_commits:
+                        action = 'new'
+                    else:
+                        action = 'add'
+                    yield self.expand(
+                        BRIEF_SUMMARY_TEMPLATE, action=action,
+                        rev_short=sha1, text=subject,
+                        )
+                yield '\n'
+                for line in self.expand_lines(NON_FF_TEMPLATE):
+                    yield line
+
+            elif discards:
+                for (sha1, subject) in discards:
+                    if sha1 in discarded_commits:
+                        action = 'discard'
+                    else:
+                        action = 'omit'
+                    yield self.expand(
+                        BRIEF_SUMMARY_TEMPLATE, action=action,
+                        rev_short=sha1, text=subject,
+                        )
+                yield '\n'
+                for line in self.expand_lines(REWIND_ONLY_TEMPLATE):
+                    yield line
+
+            elif adds:
+                (sha1, subject) = self.old.get_summary()
+                yield self.expand(
+                    BRIEF_SUMMARY_TEMPLATE, action='from',
+                    rev_short=sha1, text=subject,
+                    )
+                for (sha1, subject) in adds:
+                    if sha1 in new_commits:
+                        action = 'new'
+                    else:
+                        action = 'add'
+                    yield self.expand(
+                        BRIEF_SUMMARY_TEMPLATE, action=action,
+                        rev_short=sha1, text=subject,
+                        )
+
+            yield '\n'
+
+            if new_commits:
+                for line in self.generate_new_revision_summary(
+                        len(new_commits), new_commits_list, push):
+                    yield line
+            else:
+                for line in self.expand_lines(NO_NEW_REVISIONS_TEMPLATE):
+                    yield line
+                for line in self.generate_revision_change_graph(push):
+                    yield line
+
+            # The diffstat is shown from the old revision to the new
+            # revision.  This is to show the truth of what happened in
+            # this change.  There's no point showing the stat from the
+            # base to the new revision because the base is effectively a
+            # random revision at this point - the user will be interested
+            # in what this revision changed - including the undoing of
+            # previous revisions in the case of non-fast-forward updates.
+            yield '\n'
+            yield 'Summary of changes:\n'
+            for line in read_git_lines(
+                    ['diff-tree'] +
+                    self.diffopts +
+                    ['%s..%s' % (self.old.commit_sha1, self.new.commit_sha1,)],
+                    keepends=True,
+                    ):
+                yield line
+
+        elif self.old.commit_sha1 and not self.new.commit_sha1:
+            # A reference was deleted.  List the revisions that were
+            # removed from the repository by this reference change.
+
+            sha1s = list(push.get_discarded_commits(self))
+            tot = len(sha1s)
+            discarded_revisions = [
+                Revision(self, GitObject(sha1), num=i + 1, tot=tot)
+                for (i, sha1) in enumerate(sha1s)
+                ]
+
+            if discarded_revisions:
+                for line in self.expand_lines(DISCARDED_REVISIONS_TEMPLATE):
+                    yield line
+                yield '\n'
+                for r in discarded_revisions:
+                    (sha1, subject) = r.rev.get_summary()
+                    yield r.expand(
+                        BRIEF_SUMMARY_TEMPLATE, action='discard', text=subject,
+                        )
+                for line in self.generate_revision_change_graph(push):
+                    yield line
+            else:
+                for line in self.expand_lines(NO_DISCARDED_REVISIONS_TEMPLATE):
+                    yield line
+
+        elif not self.old.commit_sha1 and not self.new.commit_sha1:
+            for line in self.expand_lines(NON_COMMIT_UPDATE_TEMPLATE):
+                yield line
+
+    def generate_create_summary(self, push):
+        """Called for the creation of a reference."""
+
+        # This is a new reference and so oldrev is not valid
+        (sha1, subject) = self.new.get_summary()
+        yield self.expand(
+            BRIEF_SUMMARY_TEMPLATE, action='at',
+            rev_short=sha1, text=subject,
+            )
+        yield '\n'
+
+    def generate_update_summary(self, push):
+        """Called for the change of a pre-existing branch."""
+
+        return iter([])
+
+    def generate_delete_summary(self, push):
+        """Called for the deletion of any type of reference."""
+
+        (sha1, subject) = self.old.get_summary()
+        yield self.expand(
+            BRIEF_SUMMARY_TEMPLATE, action='was',
+            rev_short=sha1, text=subject,
+            )
+        yield '\n'
+
+    def get_specific_fromaddr(self):
+        return self.environment.from_refchange
+
+
+class BranchChange(ReferenceChange):
+    refname_type = 'branch'
+
+    def __init__(self, environment, refname, short_refname, old, new, rev):
+        ReferenceChange.__init__(
+            self, environment,
+            refname=refname, short_refname=short_refname,
+            old=old, new=new, rev=rev,
+            )
+        self.recipients = environment.get_refchange_recipients(self)
+        self._single_revision = None
+
+    def send_single_combined_email(self, known_added_sha1s):
+        if not self.environment.combine_when_single_commit:
+            return None
+
+        # In the sadly-all-too-frequent usecase of people pushing only
+        # one of their commits at a time to a repository, users feel
+        # the reference change summary emails are noise rather than
+        # important signal.  This is because, in this particular
+        # usecase, there is a reference change summary email for each
+        # new commit, and all these summaries do is point out that
+        # there is one new commit (which can readily be inferred by
+        # the existence of the individual revision email that is also
+        # sent).  In such cases, our users prefer there to be a combined
+        # reference change summary/new revision email.
+        #
+        # So, if the change is an update and it doesn't discard any
+        # commits, and it adds exactly one non-merge commit (gerrit
+        # forces a workflow where every commit is individually merged
+        # and the git-multimail hook fired off for just this one
+        # change), then we send a combined refchange/revision email.
+        try:
+            # If this change is a reference update that doesn't discard
+            # any commits...
+            if self.change_type != 'update':
+                return None
+
+            if read_git_lines(
+                    ['merge-base', self.old.sha1, self.new.sha1]
+                    ) != [self.old.sha1]:
+                return None
+
+            # Check if this update introduced exactly one non-merge
+            # commit:
+
+            def split_line(line):
+                """Split line into (sha1, [parent,...])."""
+
+                words = line.split()
+                return (words[0], words[1:])
+
+            # Get the new commits introduced by the push as a list of
+            # (sha1, [parent,...])
+            new_commits = [
+                split_line(line)
+                for line in read_git_lines(
+                    [
+                        'log', '-3', '--format=%H %P',
+                        '%s..%s' % (self.old.sha1, self.new.sha1),
+                        ]
+                    )
+                ]
+
+            if not new_commits:
+                return None
+
+            # If the newest commit is a merge, save it for a later check
+            # but otherwise ignore it
+            merge = None
+            tot = len(new_commits)
+            if len(new_commits[0][1]) > 1:
+                merge = new_commits[0][0]
+                del new_commits[0]
+
+            # Our primary check: we can't combine if more than one commit
+            # is introduced.  We also currently only combine if the new
+            # commit is a non-merge commit, though it may make sense to
+            # combine if it is a merge as well.
+            if not (
+                    len(new_commits) == 1 and
+                    len(new_commits[0][1]) == 1 and
+                    new_commits[0][0] in known_added_sha1s
+                    ):
+                return None
+
+            # We do not want to combine revision and refchange emails if
+            # those go to separate locations.
+            rev = Revision(self, GitObject(new_commits[0][0]), 1, tot)
+            if rev.recipients != self.recipients:
+                return None
+
+            # We ignored the newest commit if it was just a merge of the one
+            # commit being introduced.  But we don't want to ignore that
+            # merge commit it it involved conflict resolutions.  Check that.
+            if merge and merge != read_git_output(['diff-tree', '--cc', merge]):
+                return None
+
+            # We can combine the refchange and one new revision emails
+            # into one.  Return the Revision that a combined email should
+            # be sent about.
+            return rev
+        except CommandError:
+            # Cannot determine number of commits in old..new or new..old;
+            # don't combine reference/revision emails:
+            return None
+
+    def generate_combined_email(self, push, revision, body_filter=None, extra_header_values={}):
+        values = revision.get_values()
+        if extra_header_values:
+            values.update(extra_header_values)
+        if 'subject' not in extra_header_values:
+            values['subject'] = self.expand(COMBINED_REFCHANGE_REVISION_SUBJECT_TEMPLATE, **values)
+
+        self._single_revision = revision
+        self._contains_diff()
+        self.header_template = COMBINED_HEADER_TEMPLATE
+        self.intro_template = COMBINED_INTRO_TEMPLATE
+        self.footer_template = COMBINED_FOOTER_TEMPLATE
+
+        def revision_gen_link(base_url):
+            # revision is used only to generate the body, and
+            # _content_type is set while generating headers. Get it
+            # from the BranchChange object.
+            revision._content_type = self._content_type
+            return revision.generate_browse_link(base_url)
+        self.generate_browse_link = revision_gen_link
+        for line in self.generate_email(push, body_filter, values):
+            yield line
+
+    def generate_email_body(self, push):
+        '''Call the appropriate body generation routine.
+
+        If this is a combined refchange/revision email, the special logic
+        for handling this combined email comes from this function.  For
+        other cases, we just use the normal handling.'''
+
+        # If self._single_revision isn't set; don't override
+        if not self._single_revision:
+            for line in super(BranchChange, self).generate_email_body(push):
+                yield line
+            return
+
+        # This is a combined refchange/revision email; we first provide
+        # some info from the refchange portion, and then call the revision
+        # generate_email_body function to handle the revision portion.
+        adds = list(generate_summaries(
+            '--topo-order', '--reverse', '%s..%s'
+            % (self.old.commit_sha1, self.new.commit_sha1,)
+            ))
+
+        yield self.expand("The following commit(s) were added to %(refname)s by this push:\n")
+        for (sha1, subject) in adds:
+            yield self.expand(
+                BRIEF_SUMMARY_TEMPLATE, action='new',
+                rev_short=sha1, text=subject,
+                )
+
+        yield self._single_revision.rev.short + " is described below\n"
+        yield '\n'
+
+        for line in self._single_revision.generate_email_body(push):
+            yield line
+
+
+class AnnotatedTagChange(ReferenceChange):
+    refname_type = 'annotated tag'
+
+    def __init__(self, environment, refname, short_refname, old, new, rev):
+        ReferenceChange.__init__(
+            self, environment,
+            refname=refname, short_refname=short_refname,
+            old=old, new=new, rev=rev,
+            )
+        self.recipients = environment.get_announce_recipients(self)
+        self.show_shortlog = environment.announce_show_shortlog
+
+    ANNOTATED_TAG_FORMAT = (
+        '%(*objectname)\n'
+        '%(*objecttype)\n'
+        '%(taggername)\n'
+        '%(taggerdate)'
+        )
+
+    def describe_tag(self, push):
+        """Describe the new value of an annotated tag."""
+
+        # Use git for-each-ref to pull out the individual fields from
+        # the tag
+        [tagobject, tagtype, tagger, tagged] = read_git_lines(
+            ['for-each-ref', '--format=%s' % (self.ANNOTATED_TAG_FORMAT,), self.refname],
+            )
+
+        yield self.expand(
+            BRIEF_SUMMARY_TEMPLATE, action='tagging',
+            rev_short=tagobject, text='(%s)' % (tagtype,),
+            )
+        if tagtype == 'commit':
+            # If the tagged object is a commit, then we assume this is a
+            # release, and so we calculate which tag this tag is
+            # replacing
+            try:
+                prevtag = read_git_output(['describe', '--abbrev=0', '%s^' % (self.new,)])
+            except CommandError:
+                prevtag = None
+            if prevtag:
+                yield ' replaces %s\n' % (prevtag,)
+        else:
+            prevtag = None
+            yield '  length %s bytes\n' % (read_git_output(['cat-file', '-s', tagobject]),)
+
+        yield '      by %s\n' % (tagger,)
+        yield '      on %s\n' % (tagged,)
+        yield '\n'
+
+        # Show the content of the tag message; this might contain a
+        # change log or release notes so is worth displaying.
+        yield LOGBEGIN
+        contents = list(read_git_lines(['cat-file', 'tag', self.new.sha1], keepends=True))
+        contents = contents[contents.index('\n') + 1:]
+        if contents and contents[-1][-1:] != '\n':
+            contents.append('\n')
+        for line in contents:
+            yield line
+
+        if self.show_shortlog and tagtype == 'commit':
+            # Only commit tags make sense to have rev-list operations
+            # performed on them
+            yield '\n'
+            if prevtag:
+                # Show changes since the previous release
+                revlist = read_git_output(
+                    ['rev-list', '--pretty=short', '%s..%s' % (prevtag, self.new,)],
+                    keepends=True,
+                    )
+            else:
+                # No previous tag, show all the changes since time
+                # began
+                revlist = read_git_output(
+                    ['rev-list', '--pretty=short', '%s' % (self.new,)],
+                    keepends=True,
+                    )
+            for line in read_git_lines(['shortlog'], input=revlist, keepends=True):
+                yield line
+
+        yield LOGEND
+        yield '\n'
+
+    def generate_create_summary(self, push):
+        """Called for the creation of an annotated tag."""
+
+        for line in self.expand_lines(TAG_CREATED_TEMPLATE):
+            yield line
+
+        for line in self.describe_tag(push):
+            yield line
+
+    def generate_update_summary(self, push):
+        """Called for the update of an annotated tag.
+
+        This is probably a rare event and may not even be allowed."""
+
+        for line in self.expand_lines(TAG_UPDATED_TEMPLATE):
+            yield line
+
+        for line in self.describe_tag(push):
+            yield line
+
+    def generate_delete_summary(self, push):
+        """Called when a non-annotated reference is updated."""
+
+        for line in self.expand_lines(TAG_DELETED_TEMPLATE):
+            yield line
+
+        yield self.expand('   tag was  %(oldrev_short)s\n')
+        yield '\n'
+
+
+class NonAnnotatedTagChange(ReferenceChange):
+    refname_type = 'tag'
+
+    def __init__(self, environment, refname, short_refname, old, new, rev):
+        ReferenceChange.__init__(
+            self, environment,
+            refname=refname, short_refname=short_refname,
+            old=old, new=new, rev=rev,
+            )
+        self.recipients = environment.get_refchange_recipients(self)
+
+    def generate_create_summary(self, push):
+        """Called for the creation of an annotated tag."""
+
+        for line in self.expand_lines(TAG_CREATED_TEMPLATE):
+            yield line
+
+    def generate_update_summary(self, push):
+        """Called when a non-annotated reference is updated."""
+
+        for line in self.expand_lines(TAG_UPDATED_TEMPLATE):
+            yield line
+
+    def generate_delete_summary(self, push):
+        """Called when a non-annotated reference is updated."""
+
+        for line in self.expand_lines(TAG_DELETED_TEMPLATE):
+            yield line
+
+        for line in ReferenceChange.generate_delete_summary(self, push):
+            yield line
+
+
+class OtherReferenceChange(ReferenceChange):
+    refname_type = 'reference'
+
+    def __init__(self, environment, refname, short_refname, old, new, rev):
+        # We use the full refname as short_refname, because otherwise
+        # the full name of the reference would not be obvious from the
+        # text of the email.
+        ReferenceChange.__init__(
+            self, environment,
+            refname=refname, short_refname=refname,
+            old=old, new=new, rev=rev,
+            )
+        self.recipients = environment.get_refchange_recipients(self)
+
+
+class Mailer(object):
+    """An object that can send emails."""
+
+    def __init__(self, environment):
+        self.environment = environment
+
+    def close(self):
+        pass
+
+    def send(self, lines, to_addrs):
+        """Send an email consisting of lines.
+
+        lines must be an iterable over the lines constituting the
+        header and body of the email.  to_addrs is a list of recipient
+        addresses (can be needed even if lines already contains a
+        "To:" field).  It can be either a string (comma-separated list
+        of email addresses) or a Python list of individual email
+        addresses.
+
+        """
+
+        raise NotImplementedError()
+
+
+class SendMailer(Mailer):
+    """Send emails using 'sendmail -oi -t'."""
+
+    SENDMAIL_CANDIDATES = [
+        '/usr/sbin/sendmail',
+        '/usr/lib/sendmail',
+        ]
+
+    @staticmethod
+    def find_sendmail():
+        for path in SendMailer.SENDMAIL_CANDIDATES:
+            if os.access(path, os.X_OK):
+                return path
+        else:
+            raise ConfigurationException(
+                'No sendmail executable found.  '
+                'Try setting multimailhook.sendmailCommand.'
+                )
+
+    def __init__(self, environment, command=None, envelopesender=None):
+        """Construct a SendMailer instance.
+
+        command should be the command and arguments used to invoke
+        sendmail, as a list of strings.  If an envelopesender is
+        provided, it will also be passed to the command, via '-f
+        envelopesender'."""
+        super(SendMailer, self).__init__(environment)
+        if command:
+            self.command = command[:]
+        else:
+            self.command = [self.find_sendmail(), '-oi', '-t']
+
+        if envelopesender:
+            self.command.extend(['-f', envelopesender])
+
+    def send(self, lines, to_addrs):
+        try:
+            p = subprocess.Popen(self.command, stdin=subprocess.PIPE)
+        except OSError:
+            self.environment.get_logger().error(
+                '*** Cannot execute command: %s\n' % ' '.join(self.command) +
+                '*** %s\n' % sys.exc_info()[1] +
+                '*** Try setting multimailhook.mailer to "smtp"\n' +
+                '*** to send emails without using the sendmail command.\n'
+                )
+            sys.exit(1)
+        try:
+            lines = (str_to_bytes(line) for line in lines)
+            p.stdin.writelines(lines)
+        except Exception:
+            self.environment.get_logger().error(
+                '*** Error while generating commit email\n'
+                '***  - mail sending aborted.\n'
+                )
+            if hasattr(p, 'terminate'):
+                # subprocess.terminate() is not available in Python 2.4
+                p.terminate()
+            else:
+                import signal
+                os.kill(p.pid, signal.SIGTERM)
+            raise
+        else:
+            p.stdin.close()
+            retcode = p.wait()
+            if retcode:
+                raise CommandError(self.command, retcode)
+
+
+class SMTPMailer(Mailer):
+    """Send emails using Python's smtplib."""
+
+    def __init__(self, environment,
+                 envelopesender, smtpserver,
+                 smtpservertimeout=10.0, smtpserverdebuglevel=0,
+                 smtpencryption='none',
+                 smtpuser='', smtppass='',
+                 smtpcacerts=''
+                 ):
+        super(SMTPMailer, self).__init__(environment)
+        if not envelopesender:
+            self.environment.get_logger().error(
+                'fatal: git_multimail: cannot use SMTPMailer without a sender address.\n'
+                'please set either multimailhook.envelopeSender or user.email\n'
+                )
+            sys.exit(1)
+        if smtpencryption == 'ssl' and not (smtpuser and smtppass):
+            raise ConfigurationException(
+                'Cannot use SMTPMailer with security option ssl '
+                'without options username and password.'
+                )
+        self.envelopesender = envelopesender
+        self.smtpserver = smtpserver
+        self.smtpservertimeout = smtpservertimeout
+        self.smtpserverdebuglevel = smtpserverdebuglevel
+        self.security = smtpencryption
+        self.username = smtpuser
+        self.password = smtppass
+        self.smtpcacerts = smtpcacerts
+        self.loggedin = False
+        try:
+            def call(klass, server, timeout):
+                try:
+                    return klass(server, timeout=timeout)
+                except TypeError:
+                    # Old Python versions do not have timeout= argument.
+                    return klass(server)
+            if self.security == 'none':
+                self.smtp = call(smtplib.SMTP, self.smtpserver, timeout=self.smtpservertimeout)
+            elif self.security == 'ssl':
+                if self.smtpcacerts:
+                    raise smtplib.SMTPException(
+                        "Checking certificate is not supported for ssl, prefer starttls"
+                        )
+                self.smtp = call(smtplib.SMTP_SSL, self.smtpserver, timeout=self.smtpservertimeout)
+            elif self.security == 'tls':
+                if 'ssl' not in sys.modules:
+                    self.environment.get_logger().error(
+                        '*** Your Python version does not have the ssl library installed\n'
+                        '*** smtpEncryption=tls is not available.\n'
+                        '*** Either upgrade Python to 2.6 or later\n'
+                        '    or use git_multimail.py version 1.2.\n')
+                if ':' not in self.smtpserver:
+                    self.smtpserver += ':587'  # default port for TLS
+                self.smtp = call(smtplib.SMTP, self.smtpserver, timeout=self.smtpservertimeout)
+                # start: ehlo + starttls
+                # equivalent to
+                #     self.smtp.ehlo()
+                #     self.smtp.starttls()
+                # with access to the ssl layer
+                self.smtp.ehlo()
+                if not self.smtp.has_extn("starttls"):
+                    raise smtplib.SMTPException("STARTTLS extension not supported by server")
+                resp, reply = self.smtp.docmd("STARTTLS")
+                if resp != 220:
+                    raise smtplib.SMTPException("Wrong answer to the STARTTLS command")
+                if self.smtpcacerts:
+                    self.smtp.sock = ssl.wrap_socket(
+                        self.smtp.sock,
+                        ca_certs=self.smtpcacerts,
+                        cert_reqs=ssl.CERT_REQUIRED
+                        )
+                else:
+                    self.smtp.sock = ssl.wrap_socket(
+                        self.smtp.sock,
+                        cert_reqs=ssl.CERT_NONE
+                        )
+                    self.environment.get_logger().error(
+                        '*** Warning, the server certificate is not verified (smtp) ***\n'
+                        '***          set the option smtpCACerts                   ***\n'
+                        )
+                if not hasattr(self.smtp.sock, "read"):
+                    # using httplib.FakeSocket with Python 2.5.x or earlier
+                    self.smtp.sock.read = self.smtp.sock.recv
+                self.smtp.file = smtplib.SSLFakeFile(self.smtp.sock)
+                self.smtp.helo_resp = None
+                self.smtp.ehlo_resp = None
+                self.smtp.esmtp_features = {}
+                self.smtp.does_esmtp = 0
+                # end:   ehlo + starttls
+                self.smtp.ehlo()
+            else:
+                sys.stdout.write('*** Error: Control reached an invalid option. ***')
+                sys.exit(1)
+            if self.smtpserverdebuglevel > 0:
+                sys.stdout.write(
+                    "*** Setting debug on for SMTP server connection (%s) ***\n"
+                    % self.smtpserverdebuglevel)
+                self.smtp.set_debuglevel(self.smtpserverdebuglevel)
+        except Exception:
+            self.environment.get_logger().error(
+                '*** Error establishing SMTP connection to %s ***\n'
+                '*** %s\n'
+                % (self.smtpserver, sys.exc_info()[1]))
+            sys.exit(1)
+
+    def close(self):
+        if hasattr(self, 'smtp'):
+            self.smtp.quit()
+            del self.smtp
+
+    def __del__(self):
+        self.close()
+
+    def send(self, lines, to_addrs):
+        try:
+            if self.username or self.password:
+                if not self.loggedin:
+                    self.smtp.login(self.username, self.password)
+                    self.loggedin = True
+            msg = ''.join(lines)
+            # turn comma-separated list into Python list if needed.
+            if is_string(to_addrs):
+                to_addrs = [email for (name, email) in getaddresses([to_addrs])]
+            self.smtp.sendmail(self.envelopesender, to_addrs, msg)
+        except socket.timeout:
+            self.environment.get_logger().error(
+                '*** Error sending email ***\n'
+                '*** SMTP server timed out (timeout is %s)\n'
+                % self.smtpservertimeout)
+        except smtplib.SMTPResponseException:
+            err = sys.exc_info()[1]
+            self.environment.get_logger().error(
+                '*** Error sending email ***\n'
+                '*** Error %d: %s\n'
+                % (err.smtp_code, bytes_to_str(err.smtp_error)))
+            try:
+                smtp = self.smtp
+                # delete the field before quit() so that in case of
+                # error, self.smtp is deleted anyway.
+                del self.smtp
+                smtp.quit()
+            except:
+                self.environment.get_logger().error(
+                    '*** Error closing the SMTP connection ***\n'
+                    '*** Exiting anyway ... ***\n'
+                    '*** %s\n' % sys.exc_info()[1])
+            sys.exit(1)
+
+
+class OutputMailer(Mailer):
+    """Write emails to an output stream, bracketed by lines of '=' characters.
+
+    This is intended for debugging purposes."""
+
+    SEPARATOR = '=' * 75 + '\n'
+
+    def __init__(self, f, environment=None):
+        super(OutputMailer, self).__init__(environment=environment)
+        self.f = f
+
+    def send(self, lines, to_addrs):
+        write_str(self.f, self.SEPARATOR)
+        for line in lines:
+            write_str(self.f, line)
+        write_str(self.f, self.SEPARATOR)
+
+
+def get_git_dir():
+    """Determine GIT_DIR.
+
+    Determine GIT_DIR either from the GIT_DIR environment variable or
+    from the working directory, using Git's usual rules."""
+
+    try:
+        return read_git_output(['rev-parse', '--git-dir'])
+    except CommandError:
+        sys.stderr.write('fatal: git_multimail: not in a git directory\n')
+        sys.exit(1)
+
+
+class Environment(object):
+    """Describes the environment in which the push is occurring.
+
+    An Environment object encapsulates information about the local
+    environment.  For example, it knows how to determine:
+
+    * the name of the repository to which the push occurred
+
+    * what user did the push
+
+    * what users want to be informed about various types of changes.
+
+    An Environment object is expected to have the following methods:
+
+        get_repo_shortname()
+
+            Return a short name for the repository, for display
+            purposes.
+
+        get_repo_path()
+
+            Return the absolute path to the Git repository.
+
+        get_emailprefix()
+
+            Return a string that will be prefixed to every email's
+            subject.
+
+        get_pusher()
+
+            Return the username of the person who pushed the changes.
+            This value is used in the email body to indicate who
+            pushed the change.
+
+        get_pusher_email() (may return None)
+
+            Return the email address of the person who pushed the
+            changes.  The value should be a single RFC 2822 email
+            address as a string; e.g., "Joe User <user@example.com>"
+            if available, otherwise "user@example.com".  If set, the
+            value is used as the Reply-To address for refchange
+            emails.  If it is impossible to determine the pusher's
+            email, this attribute should be set to None (in which case
+            no Reply-To header will be output).
+
+        get_sender()
+
+            Return the address to be used as the 'From' email address
+            in the email envelope.
+
+        get_fromaddr(change=None)
+
+            Return the 'From' email address used in the email 'From:'
+            headers.  If the change is known when this function is
+            called, it is passed in as the 'change' parameter.  (May
+            be a full RFC 2822 email address like 'Joe User
+            <user@example.com>'.)
+
+        get_administrator()
+
+            Return the name and/or email of the repository
+            administrator.  This value is used in the footer as the
+            person to whom requests to be removed from the
+            notification list should be sent.  Ideally, it should
+            include a valid email address.
+
+        get_reply_to_refchange()
+        get_reply_to_commit()
+
+            Return the address to use in the email "Reply-To" header,
+            as a string.  These can be an RFC 2822 email address, or
+            None to omit the "Reply-To" header.
+            get_reply_to_refchange() is used for refchange emails;
+            get_reply_to_commit() is used for individual commit
+            emails.
+
+        get_ref_filter_regex()
+
+            Return a tuple -- a compiled regex, and a boolean indicating
+            whether the regex picks refs to include (if False, the regex
+            matches on refs to exclude).
+
+        get_default_ref_ignore_regex()
+
+            Return a regex that should be ignored for both what emails
+            to send and when computing what commits are considered new
+            to the repository.  Default is "^refs/notes/".
+
+        get_max_subject_length()
+
+            Return an int giving the maximal length for the subject
+            (git log --oneline).
+
+    They should also define the following attributes:
+
+        announce_show_shortlog (bool)
+
+            True iff announce emails should include a shortlog.
+
+        commit_email_format (string)
+
+            If "html", generate commit emails in HTML instead of plain text
+            used by default.
+
+        html_in_intro (bool)
+        html_in_footer (bool)
+
+            When generating HTML emails, the introduction (respectively,
+            the footer) will be HTML-escaped iff html_in_intro (respectively,
+            the footer) is true. When false, only the values used to expand
+            the template are escaped.
+
+        refchange_showgraph (bool)
+
+            True iff refchanges emails should include a detailed graph.
+
+        refchange_showlog (bool)
+
+            True iff refchanges emails should include a detailed log.
+
+        diffopts (list of strings)
+
+            The options that should be passed to 'git diff' for the
+            summary email.  The value should be a list of strings
+            representing words to be passed to the command.
+
+        graphopts (list of strings)
+
+            Analogous to diffopts, but contains options passed to
+            'git log --graph' when generating the detailed graph for
+            a set of commits (see refchange_showgraph)
+
+        logopts (list of strings)
+
+            Analogous to diffopts, but contains options passed to
+            'git log' when generating the detailed log for a set of
+            commits (see refchange_showlog)
+
+        commitlogopts (list of strings)
+
+            The options that should be passed to 'git log' for each
+            commit mail.  The value should be a list of strings
+            representing words to be passed to the command.
+
+        date_substitute (string)
+
+            String to be used in substitution for 'Date:' at start of
+            line in the output of 'git log'.
+
+        quiet (bool)
+            On success do not write to stderr
+
+        stdout (bool)
+            Write email to stdout rather than emailing. Useful for debugging
+
+        combine_when_single_commit (bool)
+
+            True if a combined email should be produced when a single
+            new commit is pushed to a branch, False otherwise.
+
+        from_refchange, from_commit (strings)
+
+            Addresses to use for the From: field for refchange emails
+            and commit emails respectively.  Set from
+            multimailhook.fromRefchange and multimailhook.fromCommit
+            by ConfigEnvironmentMixin.
+
+        log_file, error_log_file, debug_log_file (string)
+
+            Name of a file to which logs should be sent.
+
+        verbose (int)
+
+            How verbose the system should be.
+            - 0 (default): show info, errors, ...
+            - 1 : show basic debug info
+    """
+
+    REPO_NAME_RE = re.compile(r'^(?P<name>.+?)(?:\.git)$')
+
+    def __init__(self, osenv=None):
+        self.osenv = osenv or os.environ
+        self.announce_show_shortlog = False
+        self.commit_email_format = "text"
+        self.html_in_intro = False
+        self.html_in_footer = False
+        self.commitBrowseURL = None
+        self.maxcommitemails = 500
+        self.excludemergerevisions = False
+        self.diffopts = ['--stat', '--summary', '--find-copies-harder']
+        self.graphopts = ['--oneline', '--decorate']
+        self.logopts = []
+        self.refchange_showgraph = False
+        self.refchange_showlog = False
+        self.commitlogopts = ['-C', '--stat', '-p', '--cc']
+        self.date_substitute = 'AuthorDate: '
+        self.quiet = False
+        self.stdout = False
+        self.combine_when_single_commit = True
+        self.logger = None
+
+        self.COMPUTED_KEYS = [
+            'administrator',
+            'charset',
+            'emailprefix',
+            'pusher',
+            'pusher_email',
+            'repo_path',
+            'repo_shortname',
+            'sender',
+            ]
+
+        self._values = None
+
+    def get_logger(self):
+        """Get (possibly creates) the logger associated to this environment."""
+        if self.logger is None:
+            self.logger = Logger(self)
+        return self.logger
+
+    def get_repo_shortname(self):
+        """Use the last part of the repo path, with ".git" stripped off if present."""
+
+        basename = os.path.basename(os.path.abspath(self.get_repo_path()))
+        m = self.REPO_NAME_RE.match(basename)
+        if m:
+            return m.group('name')
+        else:
+            return basename
+
+    def get_pusher(self):
+        raise NotImplementedError()
+
+    def get_pusher_email(self):
+        return None
+
+    def get_fromaddr(self, change=None):
+        config = Config('user')
+        fromname = config.get('name', default='')
+        fromemail = config.get('email', default='')
+        if fromemail:
+            return formataddr([fromname, fromemail])
+        return self.get_sender()
+
+    def get_administrator(self):
+        return 'the administrator of this repository'
+
+    def get_emailprefix(self):
+        return ''
+
+    def get_repo_path(self):
+        if read_git_output(['rev-parse', '--is-bare-repository']) == 'true':
+            path = get_git_dir()
+        else:
+            path = read_git_output(['rev-parse', '--show-toplevel'])
+        return os.path.abspath(path)
+
+    def get_charset(self):
+        return CHARSET
+
+    def get_values(self):
+        """Return a dictionary {keyword: expansion} for this Environment.
+
+        This method is called by Change._compute_values().  The keys
+        in the returned dictionary are available to be used in any of
+        the templates.  The dictionary is created by calling
+        self.get_NAME() for each of the attributes named in
+        COMPUTED_KEYS and recording those that do not return None.
+        The return value is always a new dictionary."""
+
+        if self._values is None:
+            values = {'': ''}  # %()s expands to the empty string.
+
+            for key in self.COMPUTED_KEYS:
+                value = getattr(self, 'get_%s' % (key,))()
+                if value is not None:
+                    values[key] = value
+
+            self._values = values
+
+        return self._values.copy()
+
+    def get_refchange_recipients(self, refchange):
+        """Return the recipients for notifications about refchange.
+
+        Return the list of email addresses to which notifications
+        about the specified ReferenceChange should be sent."""
+
+        raise NotImplementedError()
+
+    def get_announce_recipients(self, annotated_tag_change):
+        """Return the recipients for notifications about annotated_tag_change.
+
+        Return the list of email addresses to which notifications
+        about the specified AnnotatedTagChange should be sent."""
+
+        raise NotImplementedError()
+
+    def get_reply_to_refchange(self, refchange):
+        return self.get_pusher_email()
+
+    def get_revision_recipients(self, revision):
+        """Return the recipients for messages about revision.
+
+        Return the list of email addresses to which notifications
+        about the specified Revision should be sent.  This method
+        could be overridden, for example, to take into account the
+        contents of the revision when deciding whom to notify about
+        it.  For example, there could be a scheme for users to express
+        interest in particular files or subdirectories, and only
+        receive notification emails for revisions that affecting those
+        files."""
+
+        raise NotImplementedError()
+
+    def get_reply_to_commit(self, revision):
+        return revision.author
+
+    def get_default_ref_ignore_regex(self):
+        # The commit messages of git notes are essentially meaningless
+        # and "filenames" in git notes commits are an implementational
+        # detail that might surprise users at first.  As such, we
+        # would need a completely different method for handling emails
+        # of git notes in order for them to be of benefit for users,
+        # which we simply do not have right now.
+        return "^refs/notes/"
+
+    def get_max_subject_length(self):
+        """Return the maximal subject line (git log --oneline) length.
+        Longer subject lines will be truncated."""
+        raise NotImplementedError()
+
+    def filter_body(self, lines):
+        """Filter the lines intended for an email body.
+
+        lines is an iterable over the lines that would go into the
+        email body.  Filter it (e.g., limit the number of lines, the
+        line length, character set, etc.), returning another iterable.
+        See FilterLinesEnvironmentMixin and MaxlinesEnvironmentMixin
+        for classes implementing this functionality."""
+
+        return lines
+
+    def log_msg(self, msg):
+        """Write the string msg on a log file or on stderr.
+
+        Sends the text to stderr by default, override to change the behavior."""
+        self.get_logger().info(msg)
+
+    def log_warning(self, msg):
+        """Write the string msg on a log file or on stderr.
+
+        Sends the text to stderr by default, override to change the behavior."""
+        self.get_logger().warning(msg)
+
+    def log_error(self, msg):
+        """Write the string msg on a log file or on stderr.
+
+        Sends the text to stderr by default, override to change the behavior."""
+        self.get_logger().error(msg)
+
+    def check(self):
+        pass
+
+
+class ConfigEnvironmentMixin(Environment):
+    """A mixin that sets self.config to its constructor's config argument.
+
+    This class's constructor consumes the "config" argument.
+
+    Mixins that need to inspect the config should inherit from this
+    class (1) to make sure that "config" is still in the constructor
+    arguments with its own constructor runs and/or (2) to be sure that
+    self.config is set after construction."""
+
+    def __init__(self, config, **kw):
+        super(ConfigEnvironmentMixin, self).__init__(**kw)
+        self.config = config
+
+
+class ConfigOptionsEnvironmentMixin(ConfigEnvironmentMixin):
+    """An Environment that reads most of its information from "git config"."""
+
+    @staticmethod
+    def forbid_field_values(name, value, forbidden):
+        for forbidden_val in forbidden:
+            if value is not None and value.lower() == forbidden:
+                raise ConfigurationException(
+                    '"%s" is not an allowed setting for %s' % (value, name)
+                    )
+
+    def __init__(self, config, **kw):
+        super(ConfigOptionsEnvironmentMixin, self).__init__(
+            config=config, **kw
+            )
+
+        for var, cfg in (
+                ('announce_show_shortlog', 'announceshortlog'),
+                ('refchange_showgraph', 'refchangeShowGraph'),
+                ('refchange_showlog', 'refchangeshowlog'),
+                ('quiet', 'quiet'),
+                ('stdout', 'stdout'),
+                ):
+            val = config.get_bool(cfg)
+            if val is not None:
+                setattr(self, var, val)
+
+        commit_email_format = config.get('commitEmailFormat')
+        if commit_email_format is not None:
+            if commit_email_format != "html" and commit_email_format != "text":
+                self.log_warning(
+                    '*** Unknown value for multimailhook.commitEmailFormat: %s\n' %
+                    commit_email_format +
+                    '*** Expected either "text" or "html".  Ignoring.\n'
+                    )
+            else:
+                self.commit_email_format = commit_email_format
+
+        html_in_intro = config.get_bool('htmlInIntro')
+        if html_in_intro is not None:
+            self.html_in_intro = html_in_intro
+
+        html_in_footer = config.get_bool('htmlInFooter')
+        if html_in_footer is not None:
+            self.html_in_footer = html_in_footer
+
+        self.commitBrowseURL = config.get('commitBrowseURL')
+
+        self.excludemergerevisions = config.get('excludeMergeRevisions')
+
+        maxcommitemails = config.get('maxcommitemails')
+        if maxcommitemails is not None:
+            try:
+                self.maxcommitemails = int(maxcommitemails)
+            except ValueError:
+                self.log_warning(
+                    '*** Malformed value for multimailhook.maxCommitEmails: %s\n'
+                    % maxcommitemails +
+                    '*** Expected a number.  Ignoring.\n'
+                    )
+
+        diffopts = config.get('diffopts')
+        if diffopts is not None:
+            self.diffopts = shlex.split(diffopts)
+
+        graphopts = config.get('graphOpts')
+        if graphopts is not None:
+            self.graphopts = shlex.split(graphopts)
+
+        logopts = config.get('logopts')
+        if logopts is not None:
+            self.logopts = shlex.split(logopts)
+
+        commitlogopts = config.get('commitlogopts')
+        if commitlogopts is not None:
+            self.commitlogopts = shlex.split(commitlogopts)
+
+        date_substitute = config.get('dateSubstitute')
+        if date_substitute == 'none':
+            self.date_substitute = None
+        elif date_substitute is not None:
+            self.date_substitute = date_substitute
+
+        reply_to = config.get('replyTo')
+        self.__reply_to_refchange = config.get('replyToRefchange', default=reply_to)
+        self.forbid_field_values('replyToRefchange',
+                                 self.__reply_to_refchange,
+                                 ['author'])
+        self.__reply_to_commit = config.get('replyToCommit', default=reply_to)
+
+        self.from_refchange = config.get('fromRefchange')
+        self.forbid_field_values('fromRefchange',
+                                 self.from_refchange,
+                                 ['author', 'none'])
+        self.from_commit = config.get('fromCommit')
+        self.forbid_field_values('fromCommit',
+                                 self.from_commit,
+                                 ['none'])
+
+        combine = config.get_bool('combineWhenSingleCommit')
+        if combine is not None:
+            self.combine_when_single_commit = combine
+
+        self.log_file = config.get('logFile', default=None)
+        self.error_log_file = config.get('errorLogFile', default=None)
+        self.debug_log_file = config.get('debugLogFile', default=None)
+        if config.get_bool('Verbose', default=False):
+            self.verbose = 1
+        else:
+            self.verbose = 0
+
+    def get_administrator(self):
+        return (
+            self.config.get('administrator') or
+            self.get_sender() or
+            super(ConfigOptionsEnvironmentMixin, self).get_administrator()
+            )
+
+    def get_repo_shortname(self):
+        return (
+            self.config.get('reponame') or
+            super(ConfigOptionsEnvironmentMixin, self).get_repo_shortname()
+            )
+
+    def get_emailprefix(self):
+        emailprefix = self.config.get('emailprefix')
+        if emailprefix is not None:
+            emailprefix = emailprefix.strip()
+            if emailprefix:
+                emailprefix += ' '
+        else:
+            emailprefix = '[%(repo_shortname)s] '
+        short_name = self.get_repo_shortname()
+        try:
+            return emailprefix % {'repo_shortname': short_name}
+        except:
+            self.get_logger().error(
+                '*** Invalid multimailhook.emailPrefix: %s\n' % emailprefix +
+                '*** %s\n' % sys.exc_info()[1] +
+                "*** Only the '%(repo_shortname)s' placeholder is allowed\n"
+                )
+            raise ConfigurationException(
+                '"%s" is not an allowed setting for emailPrefix' % emailprefix
+                )
+
+    def get_sender(self):
+        return self.config.get('envelopesender')
+
+    def process_addr(self, addr, change):
+        if addr.lower() == 'author':
+            if hasattr(change, 'author'):
+                return change.author
+            else:
+                return None
+        elif addr.lower() == 'pusher':
+            return self.get_pusher_email()
+        elif addr.lower() == 'none':
+            return None
+        else:
+            return addr
+
+    def get_fromaddr(self, change=None):
+        fromaddr = self.config.get('from')
+        if change:
+            specific_fromaddr = change.get_specific_fromaddr()
+            if specific_fromaddr:
+                fromaddr = specific_fromaddr
+        if fromaddr:
+            fromaddr = self.process_addr(fromaddr, change)
+        if fromaddr:
+            return fromaddr
+        return super(ConfigOptionsEnvironmentMixin, self).get_fromaddr(change)
+
+    def get_reply_to_refchange(self, refchange):
+        if self.__reply_to_refchange is None:
+            return super(ConfigOptionsEnvironmentMixin, self).get_reply_to_refchange(refchange)
+        else:
+            return self.process_addr(self.__reply_to_refchange, refchange)
+
+    def get_reply_to_commit(self, revision):
+        if self.__reply_to_commit is None:
+            return super(ConfigOptionsEnvironmentMixin, self).get_reply_to_commit(revision)
+        else:
+            return self.process_addr(self.__reply_to_commit, revision)
+
+    def get_scancommitforcc(self):
+        return self.config.get('scancommitforcc')
+
+
+class FilterLinesEnvironmentMixin(Environment):
+    """Handle encoding and maximum line length of body lines.
+
+        email_max_line_length (int or None)
+
+            The maximum length of any single line in the email body.
+            Longer lines are truncated at that length with ' [...]'
+            appended.
+
+        strict_utf8 (bool)
+
+            If this field is set to True, then the email body text is
+            expected to be UTF-8.  Any invalid characters are
+            converted to U+FFFD, the Unicode replacement character
+            (encoded as UTF-8, of course).
+
+    """
+
+    def __init__(self, strict_utf8=True,
+                 email_max_line_length=500, max_subject_length=500,
+                 **kw):
+        super(FilterLinesEnvironmentMixin, self).__init__(**kw)
+        self.__strict_utf8 = strict_utf8
+        self.__email_max_line_length = email_max_line_length
+        self.__max_subject_length = max_subject_length
+
+    def filter_body(self, lines):
+        lines = super(FilterLinesEnvironmentMixin, self).filter_body(lines)
+        if self.__strict_utf8:
+            if not PYTHON3:
+                lines = (line.decode(ENCODING, 'replace') for line in lines)
+            # Limit the line length in Unicode-space to avoid
+            # splitting characters:
+            if self.__email_max_line_length > 0:
+                lines = limit_linelength(lines, self.__email_max_line_length)
+            if not PYTHON3:
+                lines = (line.encode(ENCODING, 'replace') for line in lines)
+        elif self.__email_max_line_length:
+            lines = limit_linelength(lines, self.__email_max_line_length)
+
+        return lines
+
+    def get_max_subject_length(self):
+        return self.__max_subject_length
+
+
+class ConfigFilterLinesEnvironmentMixin(
+        ConfigEnvironmentMixin,
+        FilterLinesEnvironmentMixin,
+        ):
+    """Handle encoding and maximum line length based on config."""
+
+    def __init__(self, config, **kw):
+        strict_utf8 = config.get_bool('emailstrictutf8', default=None)
+        if strict_utf8 is not None:
+            kw['strict_utf8'] = strict_utf8
+
+        email_max_line_length = config.get('emailmaxlinelength')
+        if email_max_line_length is not None:
+            kw['email_max_line_length'] = int(email_max_line_length)
+
+        max_subject_length = config.get('subjectMaxLength', default=email_max_line_length)
+        if max_subject_length is not None:
+            kw['max_subject_length'] = int(max_subject_length)
+
+        super(ConfigFilterLinesEnvironmentMixin, self).__init__(
+            config=config, **kw
+            )
+
+
+class MaxlinesEnvironmentMixin(Environment):
+    """Limit the email body to a specified number of lines."""
+
+    def __init__(self, emailmaxlines, **kw):
+        super(MaxlinesEnvironmentMixin, self).__init__(**kw)
+        self.__emailmaxlines = emailmaxlines
+
+    def filter_body(self, lines):
+        lines = super(MaxlinesEnvironmentMixin, self).filter_body(lines)
+        if self.__emailmaxlines > 0:
+            lines = limit_lines(lines, self.__emailmaxlines)
+        return lines
+
+
+class ConfigMaxlinesEnvironmentMixin(
+        ConfigEnvironmentMixin,
+        MaxlinesEnvironmentMixin,
+        ):
+    """Limit the email body to the number of lines specified in config."""
+
+    def __init__(self, config, **kw):
+        emailmaxlines = int(config.get('emailmaxlines', default='0'))
+        super(ConfigMaxlinesEnvironmentMixin, self).__init__(
+            config=config,
+            emailmaxlines=emailmaxlines,
+            **kw
+            )
+
+
+class FQDNEnvironmentMixin(Environment):
+    """A mixin that sets the host's FQDN to its constructor argument."""
+
+    def __init__(self, fqdn, **kw):
+        super(FQDNEnvironmentMixin, self).__init__(**kw)
+        self.COMPUTED_KEYS += ['fqdn']
+        self.__fqdn = fqdn
+
+    def get_fqdn(self):
+        """Return the fully-qualified domain name for this host.
+
+        Return None if it is unavailable or unwanted."""
+
+        return self.__fqdn
+
+
+class ConfigFQDNEnvironmentMixin(
+        ConfigEnvironmentMixin,
+        FQDNEnvironmentMixin,
+        ):
+    """Read the FQDN from the config."""
+
+    def __init__(self, config, **kw):
+        fqdn = config.get('fqdn')
+        super(ConfigFQDNEnvironmentMixin, self).__init__(
+            config=config,
+            fqdn=fqdn,
+            **kw
+            )
+
+
+class ComputeFQDNEnvironmentMixin(FQDNEnvironmentMixin):
+    """Get the FQDN by calling socket.getfqdn()."""
+
+    def __init__(self, **kw):
+        super(ComputeFQDNEnvironmentMixin, self).__init__(
+            fqdn=socket.getfqdn(),
+            **kw
+            )
+
+
+class PusherDomainEnvironmentMixin(ConfigEnvironmentMixin):
+    """Deduce pusher_email from pusher by appending an emaildomain."""
+
+    def __init__(self, **kw):
+        super(PusherDomainEnvironmentMixin, self).__init__(**kw)
+        self.__emaildomain = self.config.get('emaildomain')
+
+    def get_pusher_email(self):
+        if self.__emaildomain:
+            # Derive the pusher's full email address in the default way:
+            return '%s@%s' % (self.get_pusher(), self.__emaildomain)
+        else:
+            return super(PusherDomainEnvironmentMixin, self).get_pusher_email()
+
+
+class StaticRecipientsEnvironmentMixin(Environment):
+    """Set recipients statically based on constructor parameters."""
+
+    def __init__(
+            self,
+            refchange_recipients, announce_recipients, revision_recipients, scancommitforcc,
+            **kw
+            ):
+        super(StaticRecipientsEnvironmentMixin, self).__init__(**kw)
+
+        # The recipients for various types of notification emails, as
+        # RFC 2822 email addresses separated by commas (or the empty
+        # string if no recipients are configured).  Although there is
+        # a mechanism to choose the recipient lists based on on the
+        # actual *contents* of the change being reported, we only
+        # choose based on the *type* of the change.  Therefore we can
+        # compute them once and for all:
+        self.__refchange_recipients = refchange_recipients
+        self.__announce_recipients = announce_recipients
+        self.__revision_recipients = revision_recipients
+
+    def check(self):
+        if not (self.get_refchange_recipients(None) or
+                self.get_announce_recipients(None) or
+                self.get_revision_recipients(None) or
+                self.get_scancommitforcc()):
+            raise ConfigurationException('No email recipients configured!')
+        super(StaticRecipientsEnvironmentMixin, self).check()
+
+    def get_refchange_recipients(self, refchange):
+        if self.__refchange_recipients is None:
+            return super(StaticRecipientsEnvironmentMixin,
+                         self).get_refchange_recipients(refchange)
+        return self.__refchange_recipients
+
+    def get_announce_recipients(self, annotated_tag_change):
+        if self.__announce_recipients is None:
+            return super(StaticRecipientsEnvironmentMixin,
+                         self).get_refchange_recipients(annotated_tag_change)
+        return self.__announce_recipients
+
+    def get_revision_recipients(self, revision):
+        if self.__revision_recipients is None:
+            return super(StaticRecipientsEnvironmentMixin,
+                         self).get_refchange_recipients(revision)
+        return self.__revision_recipients
+
+
+class CLIRecipientsEnvironmentMixin(Environment):
+    """Mixin storing recipients information coming from the
+    command-line."""
+
+    def __init__(self, cli_recipients=None, **kw):
+        super(CLIRecipientsEnvironmentMixin, self).__init__(**kw)
+        self.__cli_recipients = cli_recipients
+
+    def get_refchange_recipients(self, refchange):
+        if self.__cli_recipients is None:
+            return super(CLIRecipientsEnvironmentMixin,
+                         self).get_refchange_recipients(refchange)
+        return self.__cli_recipients
+
+    def get_announce_recipients(self, annotated_tag_change):
+        if self.__cli_recipients is None:
+            return super(CLIRecipientsEnvironmentMixin,
+                         self).get_announce_recipients(annotated_tag_change)
+        return self.__cli_recipients
+
+    def get_revision_recipients(self, revision):
+        if self.__cli_recipients is None:
+            return super(CLIRecipientsEnvironmentMixin,
+                         self).get_revision_recipients(revision)
+        return self.__cli_recipients
+
+
+class ConfigRecipientsEnvironmentMixin(
+        ConfigEnvironmentMixin,
+        StaticRecipientsEnvironmentMixin
+        ):
+    """Determine recipients statically based on config."""
+
+    def __init__(self, config, **kw):
+        super(ConfigRecipientsEnvironmentMixin, self).__init__(
+            config=config,
+            refchange_recipients=self._get_recipients(
+                config, 'refchangelist', 'mailinglist',
+                ),
+            announce_recipients=self._get_recipients(
+                config, 'announcelist', 'refchangelist', 'mailinglist',
+                ),
+            revision_recipients=self._get_recipients(
+                config, 'commitlist', 'mailinglist',
+                ),
+            scancommitforcc=config.get('scancommitforcc'),
+            **kw
+            )
+
+    def _get_recipients(self, config, *names):
+        """Return the recipients for a particular type of message.
+
+        Return the list of email addresses to which a particular type
+        of notification email should be sent, by looking at the config
+        value for "multimailhook.$name" for each of names.  Use the
+        value from the first name that is configured.  The return
+        value is a (possibly empty) string containing RFC 2822 email
+        addresses separated by commas.  If no configuration could be
+        found, raise a ConfigurationException."""
+
+        for name in names:
+            lines = config.get_all(name)
+            if lines is not None:
+                lines = [line.strip() for line in lines]
+                # Single "none" is a special value equivalen to empty string.
+                if lines == ['none']:
+                    lines = ['']
+                return ', '.join(lines)
+        else:
+            return ''
+
+
+class StaticRefFilterEnvironmentMixin(Environment):
+    """Set branch filter statically based on constructor parameters."""
+
+    def __init__(self, ref_filter_incl_regex, ref_filter_excl_regex,
+                 ref_filter_do_send_regex, ref_filter_dont_send_regex,
+                 **kw):
+        super(StaticRefFilterEnvironmentMixin, self).__init__(**kw)
+
+        if ref_filter_incl_regex and ref_filter_excl_regex:
+            raise ConfigurationException(
+                "Cannot specify both a ref inclusion and exclusion regex.")
+        self.__is_inclusion_filter = bool(ref_filter_incl_regex)
+        default_exclude = self.get_default_ref_ignore_regex()
+        if ref_filter_incl_regex:
+            ref_filter_regex = ref_filter_incl_regex
+        elif ref_filter_excl_regex:
+            ref_filter_regex = ref_filter_excl_regex + '|' + default_exclude
+        else:
+            ref_filter_regex = default_exclude
+        try:
+            self.__compiled_regex = re.compile(ref_filter_regex)
+        except Exception:
+            raise ConfigurationException(
+                'Invalid Ref Filter Regex "%s": %s' % (ref_filter_regex, sys.exc_info()[1]))
+
+        if ref_filter_do_send_regex and ref_filter_dont_send_regex:
+            raise ConfigurationException(
+                "Cannot specify both a ref doSend and dontSend regex.")
+        self.__is_do_send_filter = bool(ref_filter_do_send_regex)
+        if ref_filter_do_send_regex:
+            ref_filter_send_regex = ref_filter_do_send_regex
+        elif ref_filter_dont_send_regex:
+            ref_filter_send_regex = ref_filter_dont_send_regex
+        else:
+            ref_filter_send_regex = '.*'
+            self.__is_do_send_filter = True
+        try:
+            self.__send_compiled_regex = re.compile(ref_filter_send_regex)
+        except Exception:
+            raise ConfigurationException(
+                'Invalid Ref Filter Regex "%s": %s' %
+                (ref_filter_send_regex, sys.exc_info()[1]))
+
+    def get_ref_filter_regex(self, send_filter=False):
+        if send_filter:
+            return self.__send_compiled_regex, self.__is_do_send_filter
+        else:
+            return self.__compiled_regex, self.__is_inclusion_filter
+
+
+class ConfigRefFilterEnvironmentMixin(
+        ConfigEnvironmentMixin,
+        StaticRefFilterEnvironmentMixin
+        ):
+    """Determine branch filtering statically based on config."""
+
+    def _get_regex(self, config, key):
+        """Get a list of whitespace-separated regex. The refFilter* config
+        variables are multivalued (hence the use of get_all), and we
+        allow each entry to be a whitespace-separated list (hence the
+        split on each line). The whole thing is glued into a single regex."""
+        values = config.get_all(key)
+        if values is None:
+            return values
+        items = []
+        for line in values:
+            for i in line.split():
+                items.append(i)
+        if items == []:
+            return None
+        return '|'.join(items)
+
+    def __init__(self, config, **kw):
+        super(ConfigRefFilterEnvironmentMixin, self).__init__(
+            config=config,
+            ref_filter_incl_regex=self._get_regex(config, 'refFilterInclusionRegex'),
+            ref_filter_excl_regex=self._get_regex(config, 'refFilterExclusionRegex'),
+            ref_filter_do_send_regex=self._get_regex(config, 'refFilterDoSendRegex'),
+            ref_filter_dont_send_regex=self._get_regex(config, 'refFilterDontSendRegex'),
+            **kw
+            )
+
+
+class ProjectdescEnvironmentMixin(Environment):
+    """Make a "projectdesc" value available for templates.
+
+    By default, it is set to the first line of $GIT_DIR/description
+    (if that file is present and appears to be set meaningfully)."""
+
+    def __init__(self, **kw):
+        super(ProjectdescEnvironmentMixin, self).__init__(**kw)
+        self.COMPUTED_KEYS += ['projectdesc']
+
+    def get_projectdesc(self):
+        """Return a one-line description of the project."""
+
+        git_dir = get_git_dir()
+        try:
+            projectdesc = open(os.path.join(git_dir, 'description')).readline().strip()
+            if projectdesc and not projectdesc.startswith('Unnamed repository'):
+                return projectdesc
+        except IOError:
+            pass
+
+        return 'UNNAMED PROJECT'
+
+
+class GenericEnvironmentMixin(Environment):
+    def get_pusher(self):
+        return self.osenv.get('USER', self.osenv.get('USERNAME', 'unknown user'))
+
+
+class GitoliteEnvironmentHighPrecMixin(Environment):
+    def get_pusher(self):
+        return self.osenv.get('GL_USER', 'unknown user')
+
+
+class GitoliteEnvironmentLowPrecMixin(
+        ConfigEnvironmentMixin,
+        Environment):
+
+    def get_repo_shortname(self):
+        # The gitolite environment variable $GL_REPO is a pretty good
+        # repo_shortname (though it's probably not as good as a value
+        # the user might have explicitly put in his config).
+        return (
+            self.osenv.get('GL_REPO', None) or
+            super(GitoliteEnvironmentLowPrecMixin, self).get_repo_shortname()
+            )
+
+    @staticmethod
+    def _compile_regex(re_template):
+        return (
+            re.compile(re_template % x)
+            for x in (
+                r'BEGIN\s+USER\s+EMAILS',
+                r'([^\s]+)\s+(.*)',
+                r'END\s+USER\s+EMAILS',
+                ))
+
+    def get_fromaddr(self, change=None):
+        GL_USER = self.osenv.get('GL_USER')
+        if GL_USER is not None:
+            # Find the path to gitolite.conf.  Note that gitolite v3
+            # did away with the GL_ADMINDIR and GL_CONF environment
+            # variables (they are now hard-coded).
+            GL_ADMINDIR = self.osenv.get(
+                'GL_ADMINDIR',
+                os.path.expanduser(os.path.join('~', '.gitolite')))
+            GL_CONF = self.osenv.get(
+                'GL_CONF',
+                os.path.join(GL_ADMINDIR, 'conf', 'gitolite.conf'))
+
+            mailaddress_map = self.config.get('MailaddressMap')
+            # If relative, consider relative to GL_CONF:
+            if mailaddress_map:
+                mailaddress_map = os.path.join(os.path.dirname(GL_CONF),
+                                               mailaddress_map)
+                if os.path.isfile(mailaddress_map):
+                    f = open(mailaddress_map, 'rU')
+                    try:
+                        # Leading '#' is optional
+                        re_begin, re_user, re_end = self._compile_regex(
+                            r'^(?:\s*#)?\s*%s\s*$')
+                        for l in f:
+                            l = l.rstrip('\n')
+                            if re_begin.match(l) or re_end.match(l):
+                                continue  # Ignore these lines
+                            m = re_user.match(l)
+                            if m:
+                                if m.group(1) == GL_USER:
+                                    return m.group(2)
+                                else:
+                                    continue  # Not this user, but not an error
+                            raise ConfigurationException(
+                                "Syntax error in mail address map.\n"
+                                "Check file {}.\n"
+                                "Line: {}".format(mailaddress_map, l))
+
+                    finally:
+                        f.close()
+
+            if os.path.isfile(GL_CONF):
+                f = open(GL_CONF, 'rU')
+                try:
+                    in_user_emails_section = False
+                    re_begin, re_user, re_end = self._compile_regex(
+                        r'^\s*#\s*%s\s*$')
+                    for l in f:
+                        l = l.rstrip('\n')
+                        if not in_user_emails_section:
+                            if re_begin.match(l):
+                                in_user_emails_section = True
+                            continue
+                        if re_end.match(l):
+                            break
+                        m = re_user.match(l)
+                        if m and m.group(1) == GL_USER:
+                            return m.group(2)
+                finally:
+                    f.close()
+        return super(GitoliteEnvironmentLowPrecMixin, self).get_fromaddr(change)
+
+
+class IncrementalDateTime(object):
+    """Simple wrapper to give incremental date/times.
+
+    Each call will result in a date/time a second later than the
+    previous call.  This can be used to falsify email headers, to
+    increase the likelihood that email clients sort the emails
+    correctly."""
+
+    def __init__(self):
+        self.time = time.time()
+        self.next = self.__next__  # Python 2 backward compatibility
+
+    def __next__(self):
+        formatted = formatdate(self.time, True)
+        self.time += 1
+        return formatted
+
+
+class StashEnvironmentHighPrecMixin(Environment):
+    def __init__(self, user=None, repo=None, **kw):
+        super(StashEnvironmentHighPrecMixin,
+              self).__init__(user=user, repo=repo, **kw)
+        self.__user = user
+        self.__repo = repo
+
+    def get_pusher(self):
+        return re.match(r'(.*?)\s*<', self.__user).group(1)
+
+    def get_pusher_email(self):
+        return self.__user
+
+
+class StashEnvironmentLowPrecMixin(Environment):
+    def __init__(self, user=None, repo=None, **kw):
+        super(StashEnvironmentLowPrecMixin, self).__init__(**kw)
+        self.__repo = repo
+        self.__user = user
+
+    def get_repo_shortname(self):
+        return self.__repo
+
+    def get_fromaddr(self, change=None):
+        return self.__user
+
+
+class GerritEnvironmentHighPrecMixin(Environment):
+    def __init__(self, project=None, submitter=None, update_method=None, **kw):
+        super(GerritEnvironmentHighPrecMixin,
+              self).__init__(submitter=submitter, project=project, **kw)
+        self.__project = project
+        self.__submitter = submitter
+        self.__update_method = update_method
+        "Make an 'update_method' value available for templates."
+        self.COMPUTED_KEYS += ['update_method']
+
+    def get_pusher(self):
+        if self.__submitter:
+            if self.__submitter.find('<') != -1:
+                # Submitter has a configured email, we transformed
+                # __submitter into an RFC 2822 string already.
+                return re.match(r'(.*?)\s*<', self.__submitter).group(1)
+            else:
+                # Submitter has no configured email, it's just his name.
+                return self.__submitter
+        else:
+            # If we arrive here, this means someone pushed "Submit" from
+            # the gerrit web UI for the CR (or used one of the programmatic
+            # APIs to do the same, such as gerrit review) and the
+            # merge/push was done by the Gerrit user.  It was technically
+            # triggered by someone else, but sadly we have no way of
+            # determining who that someone else is at this point.
+            return 'Gerrit'  # 'unknown user'?
+
+    def get_pusher_email(self):
+        if self.__submitter:
+            return self.__submitter
+        else:
+            return super(GerritEnvironmentHighPrecMixin, self).get_pusher_email()
+
+    def get_default_ref_ignore_regex(self):
+        default = super(GerritEnvironmentHighPrecMixin, self).get_default_ref_ignore_regex()
+        return default + '|^refs/changes/|^refs/cache-automerge/|^refs/meta/'
+
+    def get_revision_recipients(self, revision):
+        # Merge commits created by Gerrit when users hit "Submit this patchset"
+        # in the Web UI (or do equivalently with REST APIs or the gerrit review
+        # command) are not something users want to see an individual email for.
+        # Filter them out.
+        committer = read_git_output(['log', '--no-walk', '--format=%cN',
+                                     revision.rev.sha1])
+        if committer == 'Gerrit Code Review':
+            return []
+        else:
+            return super(GerritEnvironmentHighPrecMixin, self).get_revision_recipients(revision)
+
+    def get_update_method(self):
+        return self.__update_method
+
+
+class GerritEnvironmentLowPrecMixin(Environment):
+    def __init__(self, project=None, submitter=None, **kw):
+        super(GerritEnvironmentLowPrecMixin, self).__init__(**kw)
+        self.__project = project
+        self.__submitter = submitter
+
+    def get_repo_shortname(self):
+        return self.__project
+
+    def get_fromaddr(self, change=None):
+        if self.__submitter and self.__submitter.find('<') != -1:
+            return self.__submitter
+        else:
+            return super(GerritEnvironmentLowPrecMixin, self).get_fromaddr(change)
+
+
+class Push(object):
+    """Represent an entire push (i.e., a group of ReferenceChanges).
+
+    It is easy to figure out what commits were added to a *branch* by
+    a Reference change:
+
+        git rev-list change.old..change.new
+
+    or removed from a *branch*:
+
+        git rev-list change.new..change.old
+
+    But it is not quite so trivial to determine which entirely new
+    commits were added to the *repository* by a push and which old
+    commits were discarded by a push.  A big part of the job of this
+    class is to figure out these things, and to make sure that new
+    commits are only detailed once even if they were added to multiple
+    references.
+
+    The first step is to determine the "other" references--those
+    unaffected by the current push.  They are computed by listing all
+    references then removing any affected by this push.  The results
+    are stored in Push._other_ref_sha1s.
+
+    The commits contained in the repository before this push were
+
+        git rev-list other1 other2 other3 ... change1.old change2.old ...
+
+    Where "changeN.old" is the old value of one of the references
+    affected by this push.
+
+    The commits contained in the repository after this push are
+
+        git rev-list other1 other2 other3 ... change1.new change2.new ...
+
+    The commits added by this push are the difference between these
+    two sets, which can be written
+
+        git rev-list \
+            ^other1 ^other2 ... \
+            ^change1.old ^change2.old ... \
+            change1.new change2.new ...
+
+    The commits removed by this push can be computed by
+
+        git rev-list \
+            ^other1 ^other2 ... \
+            ^change1.new ^change2.new ... \
+            change1.old change2.old ...
+
+    The last point is that it is possible that other pushes are
+    occurring simultaneously to this one, so reference values can
+    change at any time.  It is impossible to eliminate all race
+    conditions, but we reduce the window of time during which problems
+    can occur by translating reference names to SHA1s as soon as
+    possible and working with SHA1s thereafter (because SHA1s are
+    immutable)."""
+
+    # A map {(changeclass, changetype): integer} specifying the order
+    # that reference changes will be processed if multiple reference
+    # changes are included in a single push.  The order is significant
+    # mostly because new commit notifications are threaded together
+    # with the first reference change that includes the commit.  The
+    # following order thus causes commits to be grouped with branch
+    # changes (as opposed to tag changes) if possible.
+    SORT_ORDER = dict(
+        (value, i) for (i, value) in enumerate([
+            (BranchChange, 'update'),
+            (BranchChange, 'create'),
+            (AnnotatedTagChange, 'update'),
+            (AnnotatedTagChange, 'create'),
+            (NonAnnotatedTagChange, 'update'),
+            (NonAnnotatedTagChange, 'create'),
+            (BranchChange, 'delete'),
+            (AnnotatedTagChange, 'delete'),
+            (NonAnnotatedTagChange, 'delete'),
+            (OtherReferenceChange, 'update'),
+            (OtherReferenceChange, 'create'),
+            (OtherReferenceChange, 'delete'),
+            ])
+        )
+
+    def __init__(self, environment, changes, ignore_other_refs=False):
+        self.changes = sorted(changes, key=self._sort_key)
+        self.__other_ref_sha1s = None
+        self.__cached_commits_spec = {}
+        self.environment = environment
+
+        if ignore_other_refs:
+            self.__other_ref_sha1s = set()
+
+    @classmethod
+    def _sort_key(klass, change):
+        return (klass.SORT_ORDER[change.__class__, change.change_type], change.refname,)
+
+    @property
+    def _other_ref_sha1s(self):
+        """The GitObjects referred to by references unaffected by this push.
+        """
+        if self.__other_ref_sha1s is None:
+            # The refnames being changed by this push:
+            updated_refs = set(
+                change.refname
+                for change in self.changes
+                )
+
+            # The SHA-1s of commits referred to by all references in this
+            # repository *except* updated_refs:
+            sha1s = set()
+            fmt = (
+                '%(objectname) %(objecttype) %(refname)\n'
+                '%(*objectname) %(*objecttype) %(refname)'
+                )
+            ref_filter_regex, is_inclusion_filter = \
+                self.environment.get_ref_filter_regex()
+            for line in read_git_lines(
+                    ['for-each-ref', '--format=%s' % (fmt,)]):
+                (sha1, type, name) = line.split(' ', 2)
+                if (sha1 and type == 'commit' and
+                        name not in updated_refs and
+                        include_ref(name, ref_filter_regex, is_inclusion_filter)):
+                    sha1s.add(sha1)
+
+            self.__other_ref_sha1s = sha1s
+
+        return self.__other_ref_sha1s
+
+    def _get_commits_spec_incl(self, new_or_old, reference_change=None):
+        """Get new or old SHA-1 from one or each of the changed refs.
+
+        Return a list of SHA-1 commit identifier strings suitable as
+        arguments to 'git rev-list' (or 'git log' or ...).  The
+        returned identifiers are either the old or new values from one
+        or all of the changed references, depending on the values of
+        new_or_old and reference_change.
+
+        new_or_old is either the string 'new' or the string 'old'.  If
+        'new', the returned SHA-1 identifiers are the new values from
+        each changed reference.  If 'old', the SHA-1 identifiers are
+        the old values from each changed reference.
+
+        If reference_change is specified and not None, only the new or
+        old reference from the specified reference is included in the
+        return value.
+
+        This function returns None if there are no matching revisions
+        (e.g., because a branch was deleted and new_or_old is 'new').
+        """
+
+        if not reference_change:
+            incl_spec = sorted(
+                getattr(change, new_or_old).sha1
+                for change in self.changes
+                if getattr(change, new_or_old)
+                )
+            if not incl_spec:
+                incl_spec = None
+        elif not getattr(reference_change, new_or_old).commit_sha1:
+            incl_spec = None
+        else:
+            incl_spec = [getattr(reference_change, new_or_old).commit_sha1]
+        return incl_spec
+
+    def _get_commits_spec_excl(self, new_or_old):
+        """Get exclusion revisions for determining new or discarded commits.
+
+        Return a list of strings suitable as arguments to 'git
+        rev-list' (or 'git log' or ...) that will exclude all
+        commits that, depending on the value of new_or_old, were
+        either previously in the repository (useful for determining
+        which commits are new to the repository) or currently in the
+        repository (useful for determining which commits were
+        discarded from the repository).
+
+        new_or_old is either the string 'new' or the string 'old'.  If
+        'new', the commits to be excluded are those that were in the
+        repository before the push.  If 'old', the commits to be
+        excluded are those that are currently in the repository.  """
+
+        old_or_new = {'old': 'new', 'new': 'old'}[new_or_old]
+        excl_revs = self._other_ref_sha1s.union(
+            getattr(change, old_or_new).sha1
+            for change in self.changes
+            if getattr(change, old_or_new).type in ['commit', 'tag']
+            )
+        return ['^' + sha1 for sha1 in sorted(excl_revs)]
+
+    def get_commits_spec(self, new_or_old, reference_change=None):
+        """Get rev-list arguments for added or discarded commits.
+
+        Return a list of strings suitable as arguments to 'git
+        rev-list' (or 'git log' or ...) that select those commits
+        that, depending on the value of new_or_old, are either new to
+        the repository or were discarded from the repository.
+
+        new_or_old is either the string 'new' or the string 'old'.  If
+        'new', the returned list is used to select commits that are
+        new to the repository.  If 'old', the returned value is used
+        to select the commits that have been discarded from the
+        repository.
+
+        If reference_change is specified and not None, the new or
+        discarded commits are limited to those that are reachable from
+        the new or old value of the specified reference.
+
+        This function returns None if there are no added (or discarded)
+        revisions.
+        """
+        key = (new_or_old, reference_change)
+        if key not in self.__cached_commits_spec:
+            ret = self._get_commits_spec_incl(new_or_old, reference_change)
+            if ret is not None:
+                ret.extend(self._get_commits_spec_excl(new_or_old))
+            self.__cached_commits_spec[key] = ret
+        return self.__cached_commits_spec[key]
+
+    def get_new_commits(self, reference_change=None):
+        """Return a list of commits added by this push.
+
+        Return a list of the object names of commits that were added
+        by the part of this push represented by reference_change.  If
+        reference_change is None, then return a list of *all* commits
+        added by this push."""
+
+        spec = self.get_commits_spec('new', reference_change)
+        return git_rev_list(spec)
+
+    def get_discarded_commits(self, reference_change):
+        """Return a list of commits discarded by this push.
+
+        Return a list of the object names of commits that were
+        entirely discarded from the repository by the part of this
+        push represented by reference_change."""
+
+        spec = self.get_commits_spec('old', reference_change)
+        return git_rev_list(spec)
+
+    def send_emails(self, mailer, body_filter=None):
+        """Use send all of the notification emails needed for this push.
+
+        Use send all of the notification emails (including reference
+        change emails and commit emails) needed for this push.  Send
+        the emails using mailer.  If body_filter is not None, then use
+        it to filter the lines that are intended for the email
+        body."""
+
+        # The sha1s of commits that were introduced by this push.
+        # They will be removed from this set as they are processed, to
+        # guarantee that one (and only one) email is generated for
+        # each new commit.
+        unhandled_sha1s = set(self.get_new_commits())
+        send_date = IncrementalDateTime()
+        for change in self.changes:
+            sha1s = []
+            for sha1 in reversed(list(self.get_new_commits(change))):
+                if sha1 in unhandled_sha1s:
+                    sha1s.append(sha1)
+                    unhandled_sha1s.remove(sha1)
+
+            # Check if we've got anyone to send to
+            if not change.recipients:
+                change.environment.log_warning(
+                    '*** no recipients configured so no email will be sent\n'
+                    '*** for %r update %s->%s'
+                    % (change.refname, change.old.sha1, change.new.sha1,)
+                    )
+            else:
+                if not change.environment.quiet:
+                    change.environment.log_msg(
+                        'Sending notification emails to: %s' % (change.recipients,))
+                extra_values = {'send_date': next(send_date)}
+
+                rev = change.send_single_combined_email(sha1s)
+                if rev:
+                    mailer.send(
+                        change.generate_combined_email(self, rev, body_filter, extra_values),
+                        rev.recipients,
+                        )
+                    # This change is now fully handled; no need to handle
+                    # individual revisions any further.
+                    continue
+                else:
+                    mailer.send(
+                        change.generate_email(self, body_filter, extra_values),
+                        change.recipients,
+                        )
+
+            max_emails = change.environment.maxcommitemails
+            if max_emails and len(sha1s) > max_emails:
+                change.environment.log_warning(
+                    '*** Too many new commits (%d), not sending commit emails.\n' % len(sha1s) +
+                    '*** Try setting multimailhook.maxCommitEmails to a greater value\n' +
+                    '*** Currently, multimailhook.maxCommitEmails=%d' % max_emails
+                    )
+                return
+
+            for (num, sha1) in enumerate(sha1s):
+                rev = Revision(change, GitObject(sha1), num=num + 1, tot=len(sha1s))
+                if len(rev.parents) > 1 and change.environment.excludemergerevisions:
+                    # skipping a merge commit
+                    continue
+                if not rev.recipients and rev.cc_recipients:
+                    change.environment.log_msg('*** Replacing Cc: with To:')
+                    rev.recipients = rev.cc_recipients
+                    rev.cc_recipients = None
+                if rev.recipients:
+                    extra_values = {'send_date': next(send_date)}
+                    mailer.send(
+                        rev.generate_email(self, body_filter, extra_values),
+                        rev.recipients,
+                        )
+
+        # Consistency check:
+        if unhandled_sha1s:
+            change.environment.log_error(
+                'ERROR: No emails were sent for the following new commits:\n'
+                '    %s'
+                % ('\n    '.join(sorted(unhandled_sha1s)),)
+                )
+
+
+def include_ref(refname, ref_filter_regex, is_inclusion_filter):
+    does_match = bool(ref_filter_regex.search(refname))
+    if is_inclusion_filter:
+        return does_match
+    else:  # exclusion filter -- we include the ref if the regex doesn't match
+        return not does_match
+
+
+def run_as_post_receive_hook(environment, mailer):
+    environment.check()
+    send_filter_regex, send_is_inclusion_filter = environment.get_ref_filter_regex(True)
+    ref_filter_regex, is_inclusion_filter = environment.get_ref_filter_regex(False)
+    changes = []
+    while True:
+        line = read_line(sys.stdin)
+        if line == '':
+            break
+        (oldrev, newrev, refname) = line.strip().split(' ', 2)
+        environment.get_logger().debug(
+            "run_as_post_receive_hook: oldrev=%s, newrev=%s, refname=%s" %
+            (oldrev, newrev, refname))
+
+        if not include_ref(refname, ref_filter_regex, is_inclusion_filter):
+            continue
+        if not include_ref(refname, send_filter_regex, send_is_inclusion_filter):
+            continue
+        changes.append(
+            ReferenceChange.create(environment, oldrev, newrev, refname)
+            )
+    if not changes:
+        mailer.close()
+        return
+    push = Push(environment, changes)
+    try:
+        push.send_emails(mailer, body_filter=environment.filter_body)
+    finally:
+        mailer.close()
+
+
+def run_as_update_hook(environment, mailer, refname, oldrev, newrev, force_send=False):
+    environment.check()
+    send_filter_regex, send_is_inclusion_filter = environment.get_ref_filter_regex(True)
+    ref_filter_regex, is_inclusion_filter = environment.get_ref_filter_regex(False)
+    if not include_ref(refname, ref_filter_regex, is_inclusion_filter):
+        return
+    if not include_ref(refname, send_filter_regex, send_is_inclusion_filter):
+        return
+    changes = [
+        ReferenceChange.create(
+            environment,
+            read_git_output(['rev-parse', '--verify', oldrev]),
+            read_git_output(['rev-parse', '--verify', newrev]),
+            refname,
+            ),
+        ]
+    if not changes:
+        mailer.close()
+        return
+    push = Push(environment, changes, force_send)
+    try:
+        push.send_emails(mailer, body_filter=environment.filter_body)
+    finally:
+        mailer.close()
+
+
+def check_ref_filter(environment):
+    send_filter_regex, send_is_inclusion = environment.get_ref_filter_regex(True)
+    ref_filter_regex, ref_is_inclusion = environment.get_ref_filter_regex(False)
+
+    def inc_exc_lusion(b):
+        if b:
+            return 'inclusion'
+        else:
+            return 'exclusion'
+
+    if send_filter_regex:
+        sys.stdout.write("DoSend/DontSend filter regex (" +
+                         (inc_exc_lusion(send_is_inclusion)) +
+                         '): ' + send_filter_regex.pattern +
+                         '\n')
+    if send_filter_regex:
+        sys.stdout.write("Include/Exclude filter regex (" +
+                         (inc_exc_lusion(ref_is_inclusion)) +
+                         '): ' + ref_filter_regex.pattern +
+                         '\n')
+    sys.stdout.write(os.linesep)
+
+    sys.stdout.write(
+        "Refs marked as EXCLUDE are excluded by either refFilterInclusionRegex\n"
+        "or refFilterExclusionRegex. No emails will be sent for commits included\n"
+        "in these refs.\n"
+        "Refs marked as DONT-SEND are excluded by either refFilterDoSendRegex or\n"
+        "refFilterDontSendRegex, but not by either refFilterInclusionRegex or\n"
+        "refFilterExclusionRegex. Emails will be sent for commits included in these\n"
+        "refs only when the commit reaches a ref which isn't excluded.\n"
+        "Refs marked as DO-SEND are not excluded by any filter. Emails will\n"
+        "be sent normally for commits included in these refs.\n")
+
+    sys.stdout.write(os.linesep)
+
+    for refname in read_git_lines(['for-each-ref', '--format', '%(refname)']):
+        sys.stdout.write(refname)
+        if not include_ref(refname, ref_filter_regex, ref_is_inclusion):
+            sys.stdout.write(' EXCLUDE')
+        elif not include_ref(refname, send_filter_regex, send_is_inclusion):
+            sys.stdout.write(' DONT-SEND')
+        else:
+            sys.stdout.write(' DO-SEND')
+
+        sys.stdout.write(os.linesep)
+
+
+def show_env(environment, out):
+    out.write('Environment values:\n')
+    for (k, v) in sorted(environment.get_values().items()):
+        if k:  # Don't show the {'' : ''} pair.
+            out.write('    %s : %r\n' % (k, v))
+    out.write('\n')
+    # Flush to avoid interleaving with further log output
+    out.flush()
+
+
+def check_setup(environment):
+    environment.check()
+    show_env(environment, sys.stdout)
+    sys.stdout.write("Now, checking that git-multimail's standard input "
+                     "is properly set ..." + os.linesep)
+    sys.stdout.write("Please type some text and then press Return" + os.linesep)
+    stdin = sys.stdin.readline()
+    sys.stdout.write("You have just entered:" + os.linesep)
+    sys.stdout.write(stdin)
+    sys.stdout.write("git-multimail seems properly set up." + os.linesep)
+
+
+def choose_mailer(config, environment):
+    mailer = config.get('mailer', default='sendmail')
+
+    if mailer == 'smtp':
+        smtpserver = config.get('smtpserver', default='localhost')
+        smtpservertimeout = float(config.get('smtpservertimeout', default=10.0))
+        smtpserverdebuglevel = int(config.get('smtpserverdebuglevel', default=0))
+        smtpencryption = config.get('smtpencryption', default='none')
+        smtpuser = config.get('smtpuser', default='')
+        smtppass = config.get('smtppass', default='')
+        smtpcacerts = config.get('smtpcacerts', default='')
+        mailer = SMTPMailer(
+            environment,
+            envelopesender=(environment.get_sender() or environment.get_fromaddr()),
+            smtpserver=smtpserver, smtpservertimeout=smtpservertimeout,
+            smtpserverdebuglevel=smtpserverdebuglevel,
+            smtpencryption=smtpencryption,
+            smtpuser=smtpuser,
+            smtppass=smtppass,
+            smtpcacerts=smtpcacerts
+            )
+    elif mailer == 'sendmail':
+        command = config.get('sendmailcommand')
+        if command:
+            command = shlex.split(command)
+        mailer = SendMailer(environment,
+                            command=command, envelopesender=environment.get_sender())
+    else:
+        environment.log_error(
+            'fatal: multimailhook.mailer is set to an incorrect value: "%s"\n' % mailer +
+            'please use one of "smtp" or "sendmail".'
+            )
+        sys.exit(1)
+    return mailer
+
+
+KNOWN_ENVIRONMENTS = {
+    'generic': {'highprec': GenericEnvironmentMixin},
+    'gitolite': {'highprec': GitoliteEnvironmentHighPrecMixin,
+                 'lowprec': GitoliteEnvironmentLowPrecMixin},
+    'stash': {'highprec': StashEnvironmentHighPrecMixin,
+              'lowprec': StashEnvironmentLowPrecMixin},
+    'gerrit': {'highprec': GerritEnvironmentHighPrecMixin,
+               'lowprec': GerritEnvironmentLowPrecMixin},
+    }
+
+
+def choose_environment(config, osenv=None, env=None, recipients=None,
+                       hook_info=None):
+    env_name = choose_environment_name(config, env, osenv)
+    environment_klass = build_environment_klass(env_name)
+    env = build_environment(environment_klass, env_name, config,
+                            osenv, recipients, hook_info)
+    return env
+
+
+def choose_environment_name(config, env, osenv):
+    if not osenv:
+        osenv = os.environ
+
+    if not env:
+        env = config.get('environment')
+
+    if not env:
+        if 'GL_USER' in osenv and 'GL_REPO' in osenv:
+            env = 'gitolite'
+        else:
+            env = 'generic'
+    return env
+
+
+COMMON_ENVIRONMENT_MIXINS = [
+    ConfigRecipientsEnvironmentMixin,
+    CLIRecipientsEnvironmentMixin,
+    ConfigRefFilterEnvironmentMixin,
+    ProjectdescEnvironmentMixin,
+    ConfigMaxlinesEnvironmentMixin,
+    ComputeFQDNEnvironmentMixin,
+    ConfigFilterLinesEnvironmentMixin,
+    PusherDomainEnvironmentMixin,
+    ConfigOptionsEnvironmentMixin,
+    ]
+
+
+def build_environment_klass(env_name):
+    if 'class' in KNOWN_ENVIRONMENTS[env_name]:
+        return KNOWN_ENVIRONMENTS[env_name]['class']
+
+    environment_mixins = []
+    known_env = KNOWN_ENVIRONMENTS[env_name]
+    if 'highprec' in known_env:
+        high_prec_mixin = known_env['highprec']
+        environment_mixins.append(high_prec_mixin)
+    environment_mixins = environment_mixins + COMMON_ENVIRONMENT_MIXINS
+    if 'lowprec' in known_env:
+        low_prec_mixin = known_env['lowprec']
+        environment_mixins.append(low_prec_mixin)
+    environment_mixins.append(Environment)
+    klass_name = env_name.capitalize() + 'Environment'
+    environment_klass = type(
+        klass_name,
+        tuple(environment_mixins),
+        {},
+        )
+    KNOWN_ENVIRONMENTS[env_name]['class'] = environment_klass
+    return environment_klass
+
+
+GerritEnvironment = build_environment_klass('gerrit')
+StashEnvironment = build_environment_klass('stash')
+GitoliteEnvironment = build_environment_klass('gitolite')
+GenericEnvironment = build_environment_klass('generic')
+
+
+def build_environment(environment_klass, env, config,
+                      osenv, recipients, hook_info):
+    environment_kw = {
+        'osenv': osenv,
+        'config': config,
+        }
+
+    if env == 'stash':
+        environment_kw['user'] = hook_info['stash_user']
+        environment_kw['repo'] = hook_info['stash_repo']
+    elif env == 'gerrit':
+        environment_kw['project'] = hook_info['project']
+        environment_kw['submitter'] = hook_info['submitter']
+        environment_kw['update_method'] = hook_info['update_method']
+
+    environment_kw['cli_recipients'] = recipients
+
+    return environment_klass(**environment_kw)
+
+
+def get_version():
+    oldcwd = os.getcwd()
+    try:
+        try:
+            os.chdir(os.path.dirname(os.path.realpath(__file__)))
+            git_version = read_git_output(['describe', '--tags', 'HEAD'])
+            if git_version == __version__:
+                return git_version
+            else:
+                return '%s (%s)' % (__version__, git_version)
+        except:
+            pass
+    finally:
+        os.chdir(oldcwd)
+    return __version__
+
+
+def compute_gerrit_options(options, args, required_gerrit_options,
+                           raw_refname):
+    if None in required_gerrit_options:
+        raise SystemExit("Error: Specify all of --oldrev, --newrev, --refname, "
+                         "and --project; or none of them.")
+
+    if options.environment not in (None, 'gerrit'):
+        raise SystemExit("Non-gerrit environments incompatible with --oldrev, "
+                         "--newrev, --refname, and --project")
+    options.environment = 'gerrit'
+
+    if args:
+        raise SystemExit("Error: Positional parameters not allowed with "
+                         "--oldrev, --newrev, and --refname.")
+
+    # Gerrit oddly omits 'refs/heads/' in the refname when calling
+    # ref-updated hook; put it back.
+    git_dir = get_git_dir()
+    if (not os.path.exists(os.path.join(git_dir, raw_refname)) and
+        os.path.exists(os.path.join(git_dir, 'refs', 'heads',
+                                    raw_refname))):
+        options.refname = 'refs/heads/' + options.refname
+
+    # New revisions can appear in a gerrit repository either due to someone
+    # pushing directly (in which case options.submitter will be set), or they
+    # can press "Submit this patchset" in the web UI for some CR (in which
+    # case options.submitter will not be set and gerrit will not have provided
+    # us the information about who pressed the button).
+    #
+    # Note for the nit-picky: I'm lumping in REST API calls and the ssh
+    # gerrit review command in with "Submit this patchset" button, since they
+    # have the same effect.
+    if options.submitter:
+        update_method = 'pushed'
+        # The submitter argument is almost an RFC 2822 email address; change it
+        # from 'User Name (email@domain)' to 'User Name <email@domain>' so it is
+        options.submitter = options.submitter.replace('(', '<').replace(')', '>')
+    else:
+        update_method = 'submitted'
+        # Gerrit knew who submitted this patchset, but threw that information
+        # away when it invoked this hook.  However, *IF* Gerrit created a
+        # merge to bring the patchset in (project 'Submit Type' is either
+        # "Always Merge", or is "Merge if Necessary" and happens to be
+        # necessary for this particular CR), then it will have the committer
+        # of that merge be 'Gerrit Code Review' and the author will be the
+        # person who requested the submission of the CR.  Since this is fairly
+        # likely for most gerrit installations (of a reasonable size), it's
+        # worth the extra effort to try to determine the actual submitter.
+        rev_info = read_git_lines(['log', '--no-walk', '--merges',
+                                   '--format=%cN%n%aN <%aE>', options.newrev])
+        if rev_info and rev_info[0] == 'Gerrit Code Review':
+            options.submitter = rev_info[1]
+
+    # We pass back refname, oldrev, newrev as args because then the
+    # gerrit ref-updated hook is much like the git update hook
+    return (options,
+            [options.refname, options.oldrev, options.newrev],
+            {'project': options.project, 'submitter': options.submitter,
+             'update_method': update_method})
+
+
+def check_hook_specific_args(options, args):
+    raw_refname = options.refname
+    # Convert each string option unicode for Python3.
+    if PYTHON3:
+        opts = ['environment', 'recipients', 'oldrev', 'newrev', 'refname',
+                'project', 'submitter', 'stash_user', 'stash_repo']
+        for opt in opts:
+            if not hasattr(options, opt):
+                continue
+            obj = getattr(options, opt)
+            if obj:
+                enc = obj.encode('utf-8', 'surrogateescape')
+                dec = enc.decode('utf-8', 'replace')
+                setattr(options, opt, dec)
+
+    # First check for stash arguments
+    if (options.stash_user is None) != (options.stash_repo is None):
+        raise SystemExit("Error: Specify both of --stash-user and "
+                         "--stash-repo or neither.")
+    if options.stash_user:
+        options.environment = 'stash'
+        return options, args, {'stash_user': options.stash_user,
+                               'stash_repo': options.stash_repo}
+
+    # Finally, check for gerrit specific arguments
+    required_gerrit_options = (options.oldrev, options.newrev, options.refname,
+                               options.project)
+    if required_gerrit_options != (None,) * 4:
+        return compute_gerrit_options(options, args, required_gerrit_options,
+                                      raw_refname)
+
+    # No special options in use, just return what we started with
+    return options, args, {}
+
+
+class Logger(object):
+    def parse_verbose(self, verbose):
+        if verbose > 0:
+            return logging.DEBUG
+        else:
+            return logging.INFO
+
+    def create_log_file(self, environment, name, path, verbosity):
+        log_file = logging.getLogger(name)
+        file_handler = logging.FileHandler(path)
+        log_fmt = logging.Formatter("%(asctime)s [%(levelname)-5.5s]  %(message)s")
+        file_handler.setFormatter(log_fmt)
+        log_file.addHandler(file_handler)
+        log_file.setLevel(verbosity)
+        return log_file
+
+    def __init__(self, environment):
+        self.environment = environment
+        self.loggers = []
+        stderr_log = logging.getLogger('git_multimail.stderr')
+
+        class EncodedStderr(object):
+            def write(self, x):
+                write_str(sys.stderr, x)
+
+            def flush(self):
+                sys.stderr.flush()
+
+        stderr_handler = logging.StreamHandler(EncodedStderr())
+        stderr_log.addHandler(stderr_handler)
+        stderr_log.setLevel(self.parse_verbose(environment.verbose))
+        self.loggers.append(stderr_log)
+
+        if environment.debug_log_file is not None:
+            debug_log_file = self.create_log_file(
+                environment, 'git_multimail.debug', environment.debug_log_file, logging.DEBUG)
+            self.loggers.append(debug_log_file)
+
+        if environment.log_file is not None:
+            log_file = self.create_log_file(
+                environment, 'git_multimail.file', environment.log_file, logging.INFO)
+            self.loggers.append(log_file)
+
+        if environment.error_log_file is not None:
+            error_log_file = self.create_log_file(
+                environment, 'git_multimail.error', environment.error_log_file, logging.ERROR)
+            self.loggers.append(error_log_file)
+
+    def info(self, msg, *args, **kwargs):
+        for l in self.loggers:
+            l.info(msg, *args, **kwargs)
+
+    def debug(self, msg, *args, **kwargs):
+        for l in self.loggers:
+            l.debug(msg, *args, **kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        for l in self.loggers:
+            l.warning(msg, *args, **kwargs)
+
+    def error(self, msg, *args, **kwargs):
+        for l in self.loggers:
+            l.error(msg, *args, **kwargs)
+
+
+def main(args):
+    parser = optparse.OptionParser(
+        description=__doc__,
+        usage='%prog [OPTIONS]\n   or: %prog [OPTIONS] REFNAME OLDREV NEWREV',
+        )
+
+    parser.add_option(
+        '--environment', '--env', action='store', type='choice',
+        choices=list(KNOWN_ENVIRONMENTS.keys()), default=None,
+        help=(
+            'Choose type of environment is in use.  Default is taken from '
+            'multimailhook.environment if set; otherwise "generic".'
+            ),
+        )
+    parser.add_option(
+        '--stdout', action='store_true', default=False,
+        help='Output emails to stdout rather than sending them.',
+        )
+    parser.add_option(
+        '--recipients', action='store', default=None,
+        help='Set list of email recipients for all types of emails.',
+        )
+    parser.add_option(
+        '--show-env', action='store_true', default=False,
+        help=(
+            'Write to stderr the values determined for the environment '
+            '(intended for debugging purposes), then proceed normally.'
+            ),
+        )
+    parser.add_option(
+        '--force-send', action='store_true', default=False,
+        help=(
+            'Force sending refchange email when using as an update hook. '
+            'This is useful to work around the unreliable new commits '
+            'detection in this mode.'
+            ),
+        )
+    parser.add_option(
+        '-c', metavar="<name>=<value>", action='append',
+        help=(
+            'Pass a configuration parameter through to git.  The value given '
+            'will override values from configuration files.  See the -c option '
+            'of git(1) for more details.  (Only works with git >= 1.7.3)'
+            ),
+        )
+    parser.add_option(
+        '--version', '-v', action='store_true', default=False,
+        help=(
+            "Display git-multimail's version"
+            ),
+        )
+
+    parser.add_option(
+        '--python-version', action='store_true', default=False,
+        help=(
+            "Display the version of Python used by git-multimail"
+            ),
+        )
+
+    parser.add_option(
+        '--check-ref-filter', action='store_true', default=False,
+        help=(
+            'List refs and show information on how git-multimail '
+            'will process them.'
+            )
+        )
+
+    # The following options permit this script to be run as a gerrit
+    # ref-updated hook.  See e.g.
+    # code.google.com/p/gerrit/source/browse/Documentation/config-hooks.txt
+    # We suppress help for these items, since these are specific to gerrit,
+    # and we don't want users directly using them any way other than how the
+    # gerrit ref-updated hook is called.
+    parser.add_option('--oldrev', action='store', help=optparse.SUPPRESS_HELP)
+    parser.add_option('--newrev', action='store', help=optparse.SUPPRESS_HELP)
+    parser.add_option('--refname', action='store', help=optparse.SUPPRESS_HELP)
+    parser.add_option('--project', action='store', help=optparse.SUPPRESS_HELP)
+    parser.add_option('--submitter', action='store', help=optparse.SUPPRESS_HELP)
+
+    # The following allow this to be run as a stash asynchronous post-receive
+    # hook (almost identical to a git post-receive hook but triggered also for
+    # merges of pull requests from the UI).  We suppress help for these items,
+    # since these are specific to stash.
+    parser.add_option('--stash-user', action='store', help=optparse.SUPPRESS_HELP)
+    parser.add_option('--stash-repo', action='store', help=optparse.SUPPRESS_HELP)
+
+    (options, args) = parser.parse_args(args)
+    (options, args, hook_info) = check_hook_specific_args(options, args)
+
+    if options.version:
+        sys.stdout.write('git-multimail version ' + get_version() + '\n')
+        return
+
+    if options.python_version:
+        sys.stdout.write('Python version ' + sys.version + '\n')
+        return
+
+    if options.c:
+        Config.add_config_parameters(options.c)
+
+    config = Config('multimailhook')
+
+    environment = None
+    try:
+        environment = choose_environment(
+            config, osenv=os.environ,
+            env=options.environment,
+            recipients=options.recipients,
+            hook_info=hook_info,
+            )
+
+        if options.show_env:
+            show_env(environment, sys.stderr)
+
+        if options.stdout or environment.stdout:
+            mailer = OutputMailer(sys.stdout, environment)
+        else:
+            mailer = choose_mailer(config, environment)
+
+        must_check_setup = os.environ.get('GIT_MULTIMAIL_CHECK_SETUP')
+        if must_check_setup == '':
+            must_check_setup = False
+        if options.check_ref_filter:
+            check_ref_filter(environment)
+        elif must_check_setup:
+            check_setup(environment)
+        # Dual mode: if arguments were specified on the command line, run
+        # like an update hook; otherwise, run as a post-receive hook.
+        elif args:
+            if len(args) != 3:
+                parser.error('Need zero or three non-option arguments')
+            (refname, oldrev, newrev) = args
+            environment.get_logger().debug(
+                "run_as_update_hook: refname=%s, oldrev=%s, newrev=%s, force_send=%s" %
+                (refname, oldrev, newrev, options.force_send))
+            run_as_update_hook(environment, mailer, refname, oldrev, newrev, options.force_send)
+        else:
+            run_as_post_receive_hook(environment, mailer)
+    except ConfigurationException:
+        sys.exit(sys.exc_info()[1])
+    except SystemExit:
+        raise
+    except Exception:
+        t, e, tb = sys.exc_info()
+        import traceback
+        sys.stderr.write('\n')  # Avoid mixing message with previous output
+        msg = (
+            'Exception \'' + t.__name__ +
+            '\' raised. Please report this as a bug to\n'
+            'https://github.com/git-multimail/git-multimail/issues\n'
+            'with the information below:\n\n'
+            'git-multimail version ' + get_version() + '\n'
+            'Python version ' + sys.version + '\n' +
+            traceback.format_exc())
+        try:
+            environment.get_logger().error(msg)
+        except:
+            sys.stderr.write(msg)
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff --git a/third_party/git/contrib/hooks/multimail/migrate-mailhook-config b/third_party/git/contrib/hooks/multimail/migrate-mailhook-config
new file mode 100755
index 000000000000..241ba22fa3c8
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/migrate-mailhook-config
@@ -0,0 +1,274 @@
+#! /usr/bin/env python
+
+"""Migrate a post-receive-email configuration to be usable with git_multimail.py.
+
+See README.migrate-from-post-receive-email for more information.
+
+"""
+
+import sys
+import optparse
+
+from git_multimail import CommandError
+from git_multimail import Config
+from git_multimail import read_output
+
+
+OLD_NAMES = [
+    'mailinglist',
+    'announcelist',
+    'envelopesender',
+    'emailprefix',
+    'showrev',
+    'emailmaxlines',
+    'diffopts',
+    'scancommitforcc',
+    ]
+
+NEW_NAMES = [
+    'environment',
+    'reponame',
+    'mailinglist',
+    'refchangelist',
+    'commitlist',
+    'announcelist',
+    'announceshortlog',
+    'envelopesender',
+    'administrator',
+    'emailprefix',
+    'emailmaxlines',
+    'diffopts',
+    'emaildomain',
+    'scancommitforcc',
+    ]
+
+
+INFO = """\
+
+SUCCESS!
+
+Your post-receive-email configuration has been converted to
+git-multimail format.  Please see README and
+README.migrate-from-post-receive-email to learn about other
+git-multimail configuration possibilities.
+
+For example, git-multimail has the following new options with no
+equivalent in post-receive-email.  You might want to read about them
+to see if they would be useful in your situation:
+
+"""
+
+
+def _check_old_config_exists(old):
+    """Check that at least one old configuration value is set."""
+
+    for name in OLD_NAMES:
+        if name in old:
+            return True
+
+    return False
+
+
+def _check_new_config_clear(new):
+    """Check that none of the new configuration names are set."""
+
+    retval = True
+    for name in NEW_NAMES:
+        if name in new:
+            if retval:
+                sys.stderr.write('INFO: The following configuration values already exist:\n\n')
+            sys.stderr.write('    "%s.%s"\n' % (new.section, name))
+            retval = False
+
+    return retval
+
+
+def erase_values(config, names):
+    for name in names:
+        if name in config:
+            try:
+                sys.stderr.write('...unsetting "%s.%s"\n' % (config.section, name))
+                config.unset_all(name)
+            except CommandError:
+                sys.stderr.write(
+                    '\nWARNING: could not unset "%s.%s".  '
+                    'Perhaps it is not set at the --local level?\n\n'
+                    % (config.section, name)
+                    )
+
+
+def is_section_empty(section, local):
+    """Return True iff the specified configuration section is empty.
+
+    Iff local is True, use the --local option when invoking 'git
+    config'."""
+
+    if local:
+        local_option = ['--local']
+    else:
+        local_option = []
+
+    try:
+        read_output(
+            ['git', 'config'] +
+            local_option +
+            ['--get-regexp', '^%s\.' % (section,)]
+            )
+    except CommandError:
+        t, e, traceback = sys.exc_info()
+        if e.retcode == 1:
+            # This means that no settings were found.
+            return True
+        else:
+            raise
+    else:
+        return False
+
+
+def remove_section_if_empty(section):
+    """If the specified configuration section is empty, delete it."""
+
+    try:
+        empty = is_section_empty(section, local=True)
+    except CommandError:
+        # Older versions of git do not support the --local option, so
+        # if the first attempt fails, try without --local.
+        try:
+            empty = is_section_empty(section, local=False)
+        except CommandError:
+            sys.stderr.write(
+                '\nINFO: If configuration section "%s.*" is empty, you might want '
+                'to delete it.\n\n'
+                % (section,)
+                )
+            return
+
+    if empty:
+        sys.stderr.write('...removing section "%s.*"\n' % (section,))
+        read_output(['git', 'config', '--remove-section', section])
+    else:
+        sys.stderr.write(
+            '\nINFO: Configuration section "%s.*" still has contents.  '
+            'It will not be deleted.\n\n'
+            % (section,)
+            )
+
+
+def migrate_config(strict=False, retain=False, overwrite=False):
+    old = Config('hooks')
+    new = Config('multimailhook')
+    if not _check_old_config_exists(old):
+        sys.exit(
+            'Your repository has no post-receive-email configuration.  '
+            'Nothing to do.'
+            )
+    if not _check_new_config_clear(new):
+        if overwrite:
+            sys.stderr.write('\nWARNING: Erasing the above values...\n\n')
+            erase_values(new, NEW_NAMES)
+        else:
+            sys.exit(
+                '\nERROR: Refusing to overwrite existing values.  Use the --overwrite\n'
+                'option to continue anyway.'
+                )
+
+    name = 'showrev'
+    if name in old:
+        msg = 'git-multimail does not support "%s.%s"' % (old.section, name,)
+        if strict:
+            sys.exit(
+                'ERROR: %s.\n'
+                'Please unset that value then try again, or run without --strict.'
+                % (msg,)
+                )
+        else:
+            sys.stderr.write('\nWARNING: %s (ignoring).\n\n' % (msg,))
+
+    for name in ['mailinglist', 'announcelist']:
+        if name in old:
+            sys.stderr.write(
+                '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+                )
+            old_recipients = old.get_all(name, default=None)
+            old_recipients = ', '.join(o.strip() for o in old_recipients)
+            new.set_recipients(name, old_recipients)
+
+    if strict:
+        sys.stderr.write(
+            '...setting "%s.commitlist" to the empty string\n' % (new.section,)
+            )
+        new.set_recipients('commitlist', '')
+        sys.stderr.write(
+            '...setting "%s.announceshortlog" to "true"\n' % (new.section,)
+            )
+        new.set('announceshortlog', 'true')
+
+    for name in ['envelopesender', 'emailmaxlines', 'diffopts', 'scancommitforcc']:
+        if name in old:
+            sys.stderr.write(
+                '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+                )
+            new.set(name, old.get(name))
+
+    name = 'emailprefix'
+    if name in old:
+        sys.stderr.write(
+            '...copying "%s.%s" to "%s.%s"\n' % (old.section, name, new.section, name)
+            )
+        new.set(name, old.get(name))
+    elif strict:
+        sys.stderr.write(
+            '...setting "%s.%s" to "[SCM]" to preserve old subject lines\n'
+            % (new.section, name)
+            )
+        new.set(name, '[SCM]')
+
+    if not retain:
+        erase_values(old, OLD_NAMES)
+        remove_section_if_empty(old.section)
+
+    sys.stderr.write(INFO)
+    for name in NEW_NAMES:
+        if name not in OLD_NAMES:
+            sys.stderr.write('    "%s.%s"\n' % (new.section, name,))
+    sys.stderr.write('\n')
+
+
+def main(args):
+    parser = optparse.OptionParser(
+        description=__doc__,
+        usage='%prog [OPTIONS]',
+        )
+
+    parser.add_option(
+        '--strict', action='store_true', default=False,
+        help=(
+            'Slavishly configure git-multimail as closely as possible to '
+            'the post-receive-email configuration.  Default is to turn '
+            'on some new features that have no equivalent in post-receive-email.'
+            ),
+        )
+    parser.add_option(
+        '--retain', action='store_true', default=False,
+        help=(
+            'Retain the post-receive-email configuration values.  '
+            'Default is to delete them after the new values are set.'
+            ),
+        )
+    parser.add_option(
+        '--overwrite', action='store_true', default=False,
+        help=(
+            'Overwrite any existing git-multimail configuration settings.  '
+            'Default is to abort if such settings already exist.'
+            ),
+        )
+
+    (options, args) = parser.parse_args(args)
+
+    if args:
+        parser.error('Unexpected arguments: %s' % (' '.join(args),))
+
+    migrate_config(strict=options.strict, retain=options.retain, overwrite=options.overwrite)
+
+
+main(sys.argv[1:])
diff --git a/third_party/git/contrib/hooks/multimail/post-receive.example b/third_party/git/contrib/hooks/multimail/post-receive.example
new file mode 100755
index 000000000000..0f98c5a23db1
--- /dev/null
+++ b/third_party/git/contrib/hooks/multimail/post-receive.example
@@ -0,0 +1,101 @@
+#! /usr/bin/env python
+
+"""Example post-receive hook based on git-multimail.
+
+The simplest way to use git-multimail is to use the script
+git_multimail.py directly as a post-receive hook, and to configure it
+using Git's configuration files and command-line parameters.  You can
+also write your own Python wrapper for more advanced configurability,
+using git_multimail.py as a Python module.
+
+This script is a simple example of such a post-receive hook.  It is
+intended to be customized before use; see the comments in the script
+to help you get started.
+
+Using git-multimail as a Python module as done here provides more
+flexibility.  It has the following advantages:
+
+* The tool's behavior can be customized using arbitrary Python code,
+  without having to edit git_multimail.py.
+
+* Configuration settings can be read from other sources; for example,
+  user names and email addresses could be read from LDAP or from a
+  database.  Or the settings can even be hardcoded in the importing
+  Python script, if this is preferred.
+
+This script is a very basic example of how to use git_multimail.py as
+a module.  The comments below explain some of the points at which the
+script's behavior could be changed or customized.
+
+"""
+
+import sys
+
+# If necessary, add the path to the directory containing
+# git_multimail.py to the Python path as follows.  (This is not
+# necessary if git_multimail.py is in the same directory as this
+# script):
+
+#LIBDIR = 'path/to/directory/containing/module'
+#sys.path.insert(0, LIBDIR)
+
+import git_multimail
+
+# It is possible to modify the output templates here; e.g.:
+
+#git_multimail.FOOTER_TEMPLATE = """\
+#
+#-- \n\
+#This email was generated by the wonderful git-multimail tool.
+#"""
+
+
+# Specify which "git config" section contains the configuration for
+# git-multimail:
+config = git_multimail.Config('multimailhook')
+
+# Set some Git configuration variables. Equivalent to passing var=val
+# to "git -c var=val" each time git is called, or to adding the
+# configuration in .git/config (must come before instantiating the
+# environment) :
+#git_multimail.Config.add_config_parameters('multimailhook.commitEmailFormat=html')
+#git_multimail.Config.add_config_parameters(('user.name=foo', 'user.email=foo@example.com'))
+
+# Select the type of environment:
+try:
+    environment = git_multimail.GenericEnvironment(config=config)
+    #environment = git_multimail.GitoliteEnvironment(config=config)
+except git_multimail.ConfigurationException:
+    sys.stderr.write('*** %s\n' % sys.exc_info()[1])
+    sys.exit(1)
+
+
+# Choose the method of sending emails based on the git config:
+mailer = git_multimail.choose_mailer(config, environment)
+
+# Alternatively, you may hardcode the mailer using code like one of
+# the following:
+
+# Use "/usr/sbin/sendmail -oi -t" to send emails.  The envelopesender
+# argument is optional:
+#mailer = git_multimail.SendMailer(
+#    command=['/usr/sbin/sendmail', '-oi', '-t'],
+#    envelopesender='git-repo@example.com',
+#    )
+
+# Use Python's smtplib to send emails.  Both arguments are required.
+#mailer = git_multimail.SMTPMailer(
+#    environment=environment,
+#    envelopesender='git-repo@example.com',
+#    # The smtpserver argument can also include a port number; e.g.,
+#    #     smtpserver='mail.example.com:25'
+#    smtpserver='mail.example.com',
+#    )
+
+# OutputMailer is intended only for testing; it writes the emails to
+# the specified file stream.
+#mailer = git_multimail.OutputMailer(sys.stdout)
+
+
+# Read changes from stdin and send notification emails:
+git_multimail.run_as_post_receive_hook(environment, mailer)
diff --git a/third_party/git/contrib/hooks/post-receive-email b/third_party/git/contrib/hooks/post-receive-email
new file mode 100755
index 000000000000..ff565eb3d881
--- /dev/null
+++ b/third_party/git/contrib/hooks/post-receive-email
@@ -0,0 +1,759 @@
+#!/bin/sh
+#
+# Copyright (c) 2007 Andy Parkins
+#
+# An example hook script to mail out commit update information.
+#
+# NOTE: This script is no longer under active development.  There
+# is another script, git-multimail, which is more capable and
+# configurable and is largely backwards-compatible with this script;
+# please see "contrib/hooks/multimail/".  For instructions on how to
+# migrate from post-receive-email to git-multimail, please see
+# "README.migrate-from-post-receive-email" in that directory.
+#
+# This hook sends emails listing new revisions to the repository
+# introduced by the change being reported.  The rule is that (for
+# branch updates) each commit will appear on one email and one email
+# only.
+#
+# This hook is stored in the contrib/hooks directory.  Your distribution
+# will have put this somewhere standard.  You should make this script
+# executable then link to it in the repository you would like to use it in.
+# For example, on debian the hook is stored in
+# /usr/share/git-core/contrib/hooks/post-receive-email:
+#
+#  cd /path/to/your/repository.git
+#  ln -sf /usr/share/git-core/contrib/hooks/post-receive-email hooks/post-receive
+#
+# This hook script assumes it is enabled on the central repository of a
+# project, with all users pushing only to it and not between each other.  It
+# will still work if you don't operate in that style, but it would become
+# possible for the email to be from someone other than the person doing the
+# push.
+#
+# To help with debugging and use on pre-v1.5.1 git servers, this script will
+# also obey the interface of hooks/update, taking its arguments on the
+# command line.  Unfortunately, hooks/update is called once for each ref.
+# To avoid firing one email per ref, this script just prints its output to
+# the screen when used in this mode.  The output can then be redirected if
+# wanted.
+#
+# Config
+# ------
+# hooks.mailinglist
+#   This is the list that all pushes will go to; leave it blank to not send
+#   emails for every ref update.
+# hooks.announcelist
+#   This is the list that all pushes of annotated tags will go to.  Leave it
+#   blank to default to the mailinglist field.  The announce emails lists
+#   the short log summary of the changes since the last annotated tag.
+# hooks.envelopesender
+#   If set then the -f option is passed to sendmail to allow the envelope
+#   sender address to be set
+# hooks.emailprefix
+#   All emails have their subjects prefixed with this prefix, or "[SCM]"
+#   if emailprefix is unset, to aid filtering
+# hooks.showrev
+#   The shell command used to format each revision in the email, with
+#   "%s" replaced with the commit id.  Defaults to "git rev-list -1
+#   --pretty %s", displaying the commit id, author, date and log
+#   message.  To list full patches separated by a blank line, you
+#   could set this to "git show -C %s; echo".
+#   To list a gitweb/cgit URL *and* a full patch for each change set, use this:
+#     "t=%s; printf 'http://.../?id=%%s' \$t; echo;echo; git show -C \$t; echo"
+#   Be careful if "..." contains things that will be expanded by shell "eval"
+#   or printf.
+# hooks.emailmaxlines
+#   The maximum number of lines that should be included in the generated
+#   email body. If not specified, there is no limit.
+#   Lines beyond the limit are suppressed and counted, and a final
+#   line is added indicating the number of suppressed lines.
+# hooks.diffopts
+#   Alternate options for the git diff-tree invocation that shows changes.
+#   Default is "--stat --summary --find-copies-harder". Add -p to those
+#   options to include a unified diff of changes in addition to the usual
+#   summary output.
+#
+# Notes
+# -----
+# All emails include the headers "X-Git-Refname", "X-Git-Oldrev",
+# "X-Git-Newrev", and "X-Git-Reftype" to enable fine tuned filtering and
+# give information for debugging.
+#
+
+# ---------------------------- Functions
+
+#
+# Function to prepare for email generation. This decides what type
+# of update this is and whether an email should even be generated.
+#
+prep_for_email()
+{
+	# --- Arguments
+	oldrev=$(git rev-parse $1)
+	newrev=$(git rev-parse $2)
+	refname="$3"
+
+	# --- Interpret
+	# 0000->1234 (create)
+	# 1234->2345 (update)
+	# 2345->0000 (delete)
+	if expr "$oldrev" : '0*$' >/dev/null
+	then
+		change_type="create"
+	else
+		if expr "$newrev" : '0*$' >/dev/null
+		then
+			change_type="delete"
+		else
+			change_type="update"
+		fi
+	fi
+
+	# --- Get the revision types
+	newrev_type=$(git cat-file -t $newrev 2> /dev/null)
+	oldrev_type=$(git cat-file -t "$oldrev" 2> /dev/null)
+	case "$change_type" in
+	create|update)
+		rev="$newrev"
+		rev_type="$newrev_type"
+		;;
+	delete)
+		rev="$oldrev"
+		rev_type="$oldrev_type"
+		;;
+	esac
+
+	# The revision type tells us what type the commit is, combined with
+	# the location of the ref we can decide between
+	#  - working branch
+	#  - tracking branch
+	#  - unannoted tag
+	#  - annotated tag
+	case "$refname","$rev_type" in
+		refs/tags/*,commit)
+			# un-annotated tag
+			refname_type="tag"
+			short_refname=${refname##refs/tags/}
+			;;
+		refs/tags/*,tag)
+			# annotated tag
+			refname_type="annotated tag"
+			short_refname=${refname##refs/tags/}
+			# change recipients
+			if [ -n "$announcerecipients" ]; then
+				recipients="$announcerecipients"
+			fi
+			;;
+		refs/heads/*,commit)
+			# branch
+			refname_type="branch"
+			short_refname=${refname##refs/heads/}
+			;;
+		refs/remotes/*,commit)
+			# tracking branch
+			refname_type="tracking branch"
+			short_refname=${refname##refs/remotes/}
+			echo >&2 "*** Push-update of tracking branch, $refname"
+			echo >&2 "***  - no email generated."
+			return 1
+			;;
+		*)
+			# Anything else (is there anything else?)
+			echo >&2 "*** Unknown type of update to $refname ($rev_type)"
+			echo >&2 "***  - no email generated"
+			return 1
+			;;
+	esac
+
+	# Check if we've got anyone to send to
+	if [ -z "$recipients" ]; then
+		case "$refname_type" in
+			"annotated tag")
+				config_name="hooks.announcelist"
+				;;
+			*)
+				config_name="hooks.mailinglist"
+				;;
+		esac
+		echo >&2 "*** $config_name is not set so no email will be sent"
+		echo >&2 "*** for $refname update $oldrev->$newrev"
+		return 1
+	fi
+
+	return 0
+}
+
+#
+# Top level email generation function.  This calls the appropriate
+# body-generation routine after outputting the common header.
+#
+# Note this function doesn't actually generate any email output, that is
+# taken care of by the functions it calls:
+#  - generate_email_header
+#  - generate_create_XXXX_email
+#  - generate_update_XXXX_email
+#  - generate_delete_XXXX_email
+#  - generate_email_footer
+#
+# Note also that this function cannot 'exit' from the script; when this
+# function is running (in hook script mode), the send_mail() function
+# is already executing in another process, connected via a pipe, and
+# if this function exits without, whatever has been generated to that
+# point will be sent as an email... even if nothing has been generated.
+#
+generate_email()
+{
+	# Email parameters
+	# The email subject will contain the best description of the ref
+	# that we can build from the parameters
+	describe=$(git describe $rev 2>/dev/null)
+	if [ -z "$describe" ]; then
+		describe=$rev
+	fi
+
+	generate_email_header
+
+	# Call the correct body generation function
+	fn_name=general
+	case "$refname_type" in
+	"tracking branch"|branch)
+		fn_name=branch
+		;;
+	"annotated tag")
+		fn_name=atag
+		;;
+	esac
+
+	if [ -z "$maxlines" ]; then
+		generate_${change_type}_${fn_name}_email
+	else
+		generate_${change_type}_${fn_name}_email | limit_lines $maxlines
+	fi
+
+	generate_email_footer
+}
+
+generate_email_header()
+{
+	# --- Email (all stdout will be the email)
+	# Generate header
+	cat <<-EOF
+	To: $recipients
+	Subject: ${emailprefix}$projectdesc $refname_type $short_refname ${change_type}d. $describe
+	MIME-Version: 1.0
+	Content-Type: text/plain; charset=utf-8
+	Content-Transfer-Encoding: 8bit
+	X-Git-Refname: $refname
+	X-Git-Reftype: $refname_type
+	X-Git-Oldrev: $oldrev
+	X-Git-Newrev: $newrev
+	Auto-Submitted: auto-generated
+
+	This is an automated email from the git hooks/post-receive script. It was
+	generated because a ref change was pushed to the repository containing
+	the project "$projectdesc".
+
+	The $refname_type, $short_refname has been ${change_type}d
+	EOF
+}
+
+generate_email_footer()
+{
+	SPACE=" "
+	cat <<-EOF
+
+
+	hooks/post-receive
+	--${SPACE}
+	$projectdesc
+	EOF
+}
+
+# --------------- Branches
+
+#
+# Called for the creation of a branch
+#
+generate_create_branch_email()
+{
+	# This is a new branch and so oldrev is not valid
+	echo "        at  $newrev ($newrev_type)"
+	echo ""
+
+	echo $LOGBEGIN
+	show_new_revisions
+	echo $LOGEND
+}
+
+#
+# Called for the change of a pre-existing branch
+#
+generate_update_branch_email()
+{
+	# Consider this:
+	#   1 --- 2 --- O --- X --- 3 --- 4 --- N
+	#
+	# O is $oldrev for $refname
+	# N is $newrev for $refname
+	# X is a revision pointed to by some other ref, for which we may
+	#   assume that an email has already been generated.
+	# In this case we want to issue an email containing only revisions
+	# 3, 4, and N.  Given (almost) by
+	#
+	#  git rev-list N ^O --not --all
+	#
+	# The reason for the "almost", is that the "--not --all" will take
+	# precedence over the "N", and effectively will translate to
+	#
+	#  git rev-list N ^O ^X ^N
+	#
+	# So, we need to build up the list more carefully.  git rev-parse
+	# will generate a list of revs that may be fed into git rev-list.
+	# We can get it to make the "--not --all" part and then filter out
+	# the "^N" with:
+	#
+	#  git rev-parse --not --all | grep -v N
+	#
+	# Then, using the --stdin switch to git rev-list we have effectively
+	# manufactured
+	#
+	#  git rev-list N ^O ^X
+	#
+	# This leaves a problem when someone else updates the repository
+	# while this script is running.  Their new value of the ref we're
+	# working on would be included in the "--not --all" output; and as
+	# our $newrev would be an ancestor of that commit, it would exclude
+	# all of our commits.  What we really want is to exclude the current
+	# value of $refname from the --not list, rather than N itself.  So:
+	#
+	#  git rev-parse --not --all | grep -v $(git rev-parse $refname)
+	#
+	# Gets us to something pretty safe (apart from the small time
+	# between refname being read, and git rev-parse running - for that,
+	# I give up)
+	#
+	#
+	# Next problem, consider this:
+	#   * --- B --- * --- O ($oldrev)
+	#          \
+	#           * --- X --- * --- N ($newrev)
+	#
+	# That is to say, there is no guarantee that oldrev is a strict
+	# subset of newrev (it would have required a --force, but that's
+	# allowed).  So, we can't simply say rev-list $oldrev..$newrev.
+	# Instead we find the common base of the two revs and list from
+	# there.
+	#
+	# As above, we need to take into account the presence of X; if
+	# another branch is already in the repository and points at some of
+	# the revisions that we are about to output - we don't want them.
+	# The solution is as before: git rev-parse output filtered.
+	#
+	# Finally, tags: 1 --- 2 --- O --- T --- 3 --- 4 --- N
+	#
+	# Tags pushed into the repository generate nice shortlog emails that
+	# summarise the commits between them and the previous tag.  However,
+	# those emails don't include the full commit messages that we output
+	# for a branch update.  Therefore we still want to output revisions
+	# that have been output on a tag email.
+	#
+	# Luckily, git rev-parse includes just the tool.  Instead of using
+	# "--all" we use "--branches"; this has the added benefit that
+	# "remotes/" will be ignored as well.
+
+	# List all of the revisions that were removed by this update, in a
+	# fast-forward update, this list will be empty, because rev-list O
+	# ^N is empty.  For a non-fast-forward, O ^N is the list of removed
+	# revisions
+	fast_forward=""
+	rev=""
+	for rev in $(git rev-list $newrev..$oldrev)
+	do
+		revtype=$(git cat-file -t "$rev")
+		echo "  discards  $rev ($revtype)"
+	done
+	if [ -z "$rev" ]; then
+		fast_forward=1
+	fi
+
+	# List all the revisions from baserev to newrev in a kind of
+	# "table-of-contents"; note this list can include revisions that
+	# have already had notification emails and is present to show the
+	# full detail of the change from rolling back the old revision to
+	# the base revision and then forward to the new revision
+	for rev in $(git rev-list $oldrev..$newrev)
+	do
+		revtype=$(git cat-file -t "$rev")
+		echo "       via  $rev ($revtype)"
+	done
+
+	if [ "$fast_forward" ]; then
+		echo "      from  $oldrev ($oldrev_type)"
+	else
+		#  1. Existing revisions were removed.  In this case newrev
+		#     is a subset of oldrev - this is the reverse of a
+		#     fast-forward, a rewind
+		#  2. New revisions were added on top of an old revision,
+		#     this is a rewind and addition.
+
+		# (1) certainly happened, (2) possibly.  When (2) hasn't
+		# happened, we set a flag to indicate that no log printout
+		# is required.
+
+		echo ""
+
+		# Find the common ancestor of the old and new revisions and
+		# compare it with newrev
+		baserev=$(git merge-base $oldrev $newrev)
+		rewind_only=""
+		if [ "$baserev" = "$newrev" ]; then
+			echo "This update discarded existing revisions and left the branch pointing at"
+			echo "a previous point in the repository history."
+			echo ""
+			echo " * -- * -- N ($newrev)"
+			echo "            \\"
+			echo "             O -- O -- O ($oldrev)"
+			echo ""
+			echo "The removed revisions are not necessarily gone - if another reference"
+			echo "still refers to them they will stay in the repository."
+			rewind_only=1
+		else
+			echo "This update added new revisions after undoing existing revisions.  That is"
+			echo "to say, the old revision is not a strict subset of the new revision.  This"
+			echo "situation occurs when you --force push a change and generate a repository"
+			echo "containing something like this:"
+			echo ""
+			echo " * -- * -- B -- O -- O -- O ($oldrev)"
+			echo "            \\"
+			echo "             N -- N -- N ($newrev)"
+			echo ""
+			echo "When this happens we assume that you've already had alert emails for all"
+			echo "of the O revisions, and so we here report only the revisions in the N"
+			echo "branch from the common base, B."
+		fi
+	fi
+
+	echo ""
+	if [ -z "$rewind_only" ]; then
+		echo "Those revisions listed above that are new to this repository have"
+		echo "not appeared on any other notification email; so we list those"
+		echo "revisions in full, below."
+
+		echo ""
+		echo $LOGBEGIN
+		show_new_revisions
+
+		# XXX: Need a way of detecting whether git rev-list actually
+		# outputted anything, so that we can issue a "no new
+		# revisions added by this update" message
+
+		echo $LOGEND
+	else
+		echo "No new revisions were added by this update."
+	fi
+
+	# The diffstat is shown from the old revision to the new revision.
+	# This is to show the truth of what happened in this change.
+	# There's no point showing the stat from the base to the new
+	# revision because the base is effectively a random revision at this
+	# point - the user will be interested in what this revision changed
+	# - including the undoing of previous revisions in the case of
+	# non-fast-forward updates.
+	echo ""
+	echo "Summary of changes:"
+	git diff-tree $diffopts $oldrev..$newrev
+}
+
+#
+# Called for the deletion of a branch
+#
+generate_delete_branch_email()
+{
+	echo "       was  $oldrev"
+	echo ""
+	echo $LOGBEGIN
+	git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
+	echo $LOGEND
+}
+
+# --------------- Annotated tags
+
+#
+# Called for the creation of an annotated tag
+#
+generate_create_atag_email()
+{
+	echo "        at  $newrev ($newrev_type)"
+
+	generate_atag_email
+}
+
+#
+# Called for the update of an annotated tag (this is probably a rare event
+# and may not even be allowed)
+#
+generate_update_atag_email()
+{
+	echo "        to  $newrev ($newrev_type)"
+	echo "      from  $oldrev (which is now obsolete)"
+
+	generate_atag_email
+}
+
+#
+# Called when an annotated tag is created or changed
+#
+generate_atag_email()
+{
+	# Use git for-each-ref to pull out the individual fields from the
+	# tag
+	eval $(git for-each-ref --shell --format='
+	tagobject=%(*objectname)
+	tagtype=%(*objecttype)
+	tagger=%(taggername)
+	tagged=%(taggerdate)' $refname
+	)
+
+	echo "   tagging  $tagobject ($tagtype)"
+	case "$tagtype" in
+	commit)
+
+		# If the tagged object is a commit, then we assume this is a
+		# release, and so we calculate which tag this tag is
+		# replacing
+		prevtag=$(git describe --abbrev=0 $newrev^ 2>/dev/null)
+
+		if [ -n "$prevtag" ]; then
+			echo "  replaces  $prevtag"
+		fi
+		;;
+	*)
+		echo "    length  $(git cat-file -s $tagobject) bytes"
+		;;
+	esac
+	echo " tagged by  $tagger"
+	echo "        on  $tagged"
+
+	echo ""
+	echo $LOGBEGIN
+
+	# Show the content of the tag message; this might contain a change
+	# log or release notes so is worth displaying.
+	git cat-file tag $newrev | sed -e '1,/^$/d'
+
+	echo ""
+	case "$tagtype" in
+	commit)
+		# Only commit tags make sense to have rev-list operations
+		# performed on them
+		if [ -n "$prevtag" ]; then
+			# Show changes since the previous release
+			git shortlog "$prevtag..$newrev"
+		else
+			# No previous tag, show all the changes since time
+			# began
+			git shortlog $newrev
+		fi
+		;;
+	*)
+		# XXX: Is there anything useful we can do for non-commit
+		# objects?
+		;;
+	esac
+
+	echo $LOGEND
+}
+
+#
+# Called for the deletion of an annotated tag
+#
+generate_delete_atag_email()
+{
+	echo "       was  $oldrev"
+	echo ""
+	echo $LOGBEGIN
+	git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
+	echo $LOGEND
+}
+
+# --------------- General references
+
+#
+# Called when any other type of reference is created (most likely a
+# non-annotated tag)
+#
+generate_create_general_email()
+{
+	echo "        at  $newrev ($newrev_type)"
+
+	generate_general_email
+}
+
+#
+# Called when any other type of reference is updated (most likely a
+# non-annotated tag)
+#
+generate_update_general_email()
+{
+	echo "        to  $newrev ($newrev_type)"
+	echo "      from  $oldrev"
+
+	generate_general_email
+}
+
+#
+# Called for creation or update of any other type of reference
+#
+generate_general_email()
+{
+	# Unannotated tags are more about marking a point than releasing a
+	# version; therefore we don't do the shortlog summary that we do for
+	# annotated tags above - we simply show that the point has been
+	# marked, and print the log message for the marked point for
+	# reference purposes
+	#
+	# Note this section also catches any other reference type (although
+	# there aren't any) and deals with them in the same way.
+
+	echo ""
+	if [ "$newrev_type" = "commit" ]; then
+		echo $LOGBEGIN
+		git diff-tree -s --always --encoding=UTF-8 --pretty=medium $newrev
+		echo $LOGEND
+	else
+		# What can we do here?  The tag marks an object that is not
+		# a commit, so there is no log for us to display.  It's
+		# probably not wise to output git cat-file as it could be a
+		# binary blob.  We'll just say how big it is
+		echo "$newrev is a $newrev_type, and is $(git cat-file -s $newrev) bytes long."
+	fi
+}
+
+#
+# Called for the deletion of any other type of reference
+#
+generate_delete_general_email()
+{
+	echo "       was  $oldrev"
+	echo ""
+	echo $LOGBEGIN
+	git diff-tree -s --always --encoding=UTF-8 --pretty=oneline $oldrev
+	echo $LOGEND
+}
+
+
+# --------------- Miscellaneous utilities
+
+#
+# Show new revisions as the user would like to see them in the email.
+#
+show_new_revisions()
+{
+	# This shows all log entries that are not already covered by
+	# another ref - i.e. commits that are now accessible from this
+	# ref that were previously not accessible
+	# (see generate_update_branch_email for the explanation of this
+	# command)
+
+	# Revision range passed to rev-list differs for new vs. updated
+	# branches.
+	if [ "$change_type" = create ]
+	then
+		# Show all revisions exclusive to this (new) branch.
+		revspec=$newrev
+	else
+		# Branch update; show revisions not part of $oldrev.
+		revspec=$oldrev..$newrev
+	fi
+
+	other_branches=$(git for-each-ref --format='%(refname)' refs/heads/ |
+	    grep -F -v $refname)
+	git rev-parse --not $other_branches |
+	if [ -z "$custom_showrev" ]
+	then
+		git rev-list --pretty --stdin $revspec
+	else
+		git rev-list --stdin $revspec |
+		while read onerev
+		do
+			eval $(printf "$custom_showrev" $onerev)
+		done
+	fi
+}
+
+
+limit_lines()
+{
+	lines=0
+	skipped=0
+	while IFS="" read -r line; do
+		lines=$((lines + 1))
+		if [ $lines -gt $1 ]; then
+			skipped=$((skipped + 1))
+		else
+			printf "%s\n" "$line"
+		fi
+	done
+	if [ $skipped -ne 0 ]; then
+		echo "... $skipped lines suppressed ..."
+	fi
+}
+
+
+send_mail()
+{
+	if [ -n "$envelopesender" ]; then
+		/usr/sbin/sendmail -t -f "$envelopesender"
+	else
+		/usr/sbin/sendmail -t
+	fi
+}
+
+# ---------------------------- main()
+
+# --- Constants
+LOGBEGIN="- Log -----------------------------------------------------------------"
+LOGEND="-----------------------------------------------------------------------"
+
+# --- Config
+# Set GIT_DIR either from the working directory, or from the environment
+# variable.
+GIT_DIR=$(git rev-parse --git-dir 2>/dev/null)
+if [ -z "$GIT_DIR" ]; then
+	echo >&2 "fatal: post-receive: GIT_DIR not set"
+	exit 1
+fi
+
+projectdesc=$(sed -ne '1p' "$GIT_DIR/description" 2>/dev/null)
+# Check if the description is unchanged from it's default, and shorten it to
+# a more manageable length if it is
+if expr "$projectdesc" : "Unnamed repository.*$" >/dev/null
+then
+	projectdesc="UNNAMED PROJECT"
+fi
+
+recipients=$(git config hooks.mailinglist)
+announcerecipients=$(git config hooks.announcelist)
+envelopesender=$(git config hooks.envelopesender)
+emailprefix=$(git config hooks.emailprefix || echo '[SCM] ')
+custom_showrev=$(git config hooks.showrev)
+maxlines=$(git config hooks.emailmaxlines)
+diffopts=$(git config hooks.diffopts)
+: ${diffopts:="--stat --summary --find-copies-harder"}
+
+# --- Main loop
+# Allow dual mode: run from the command line just like the update hook, or
+# if no arguments are given then run as a hook script
+if [ -n "$1" -a -n "$2" -a -n "$3" ]; then
+	# Output to the terminal in command line mode - if someone wanted to
+	# resend an email; they could redirect the output to sendmail
+	# themselves
+	prep_for_email $2 $3 $1 && PAGER= generate_email
+else
+	while read oldrev newrev refname
+	do
+		prep_for_email $oldrev $newrev $refname || continue
+		generate_email $maxlines | send_mail
+	done
+fi
diff --git a/third_party/git/contrib/hooks/pre-auto-gc-battery b/third_party/git/contrib/hooks/pre-auto-gc-battery
new file mode 100755
index 000000000000..7ba78c4dff68
--- /dev/null
+++ b/third_party/git/contrib/hooks/pre-auto-gc-battery
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# An example hook script to verify if you are on battery, in case you
+# are running Linux or OS X. Called by git-gc --auto with no arguments.
+# The hook should exit with non-zero status after issuing an appropriate
+# message if it wants to stop the auto repacking.
+#
+# This hook is stored in the contrib/hooks directory. Your distribution
+# may have put this somewhere else. If you want to use this hook, you
+# should make this script executable then link to it in the repository
+# you would like to use it in.
+#
+# For example, if the hook is stored in
+# /usr/share/git-core/contrib/hooks/pre-auto-gc-battery:
+#
+# cd /path/to/your/repository.git
+# ln -sf /usr/share/git-core/contrib/hooks/pre-auto-gc-battery \
+#	hooks/pre-auto-gc
+
+if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
+then
+	exit 0
+elif test "$(cat /sys/class/power_supply/AC/online 2>/dev/null)" = 1
+then
+	exit 0
+elif grep -q 'on-line' /proc/acpi/ac_adapter/AC/state 2>/dev/null
+then
+	exit 0
+elif grep -q '0x01$' /proc/apm 2>/dev/null
+then
+	exit 0
+elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
+then
+	exit 0
+elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
+	grep -q "drawing from 'AC Power'"
+then
+	exit 0
+fi
+
+echo "Auto packing deferred; not on AC"
+exit 1
diff --git a/third_party/git/contrib/hooks/setgitperms.perl b/third_party/git/contrib/hooks/setgitperms.perl
new file mode 100755
index 000000000000..2770a1b1d205
--- /dev/null
+++ b/third_party/git/contrib/hooks/setgitperms.perl
@@ -0,0 +1,214 @@
+#!/usr/bin/perl
+#
+# Copyright (c) 2006 Josh England
+#
+# This script can be used to save/restore full permissions and ownership data
+# within a git working tree.
+#
+# To save permissions/ownership data, place this script in your .git/hooks
+# directory and enable a `pre-commit` hook with the following lines:
+#      #!/bin/sh
+#     SUBDIRECTORY_OK=1 . git-sh-setup
+#     $GIT_DIR/hooks/setgitperms.perl -r
+#
+# To restore permissions/ownership data, place this script in your .git/hooks
+# directory and enable a `post-merge` and `post-checkout` hook with the
+# following lines:
+#      #!/bin/sh
+#     SUBDIRECTORY_OK=1 . git-sh-setup
+#     $GIT_DIR/hooks/setgitperms.perl -w
+#
+use strict;
+use Getopt::Long;
+use File::Find;
+use File::Basename;
+
+my $usage =
+"usage: setgitperms.perl [OPTION]... <--read|--write>
+This program uses a file `.gitmeta` to store/restore permissions and uid/gid
+info for all files/dirs tracked by git in the repository.
+
+---------------------------------Read Mode-------------------------------------
+-r,  --read         Reads perms/etc from working dir into a .gitmeta file
+-s,  --stdout       Output to stdout instead of .gitmeta
+-d,  --diff         Show unified diff of perms file (XOR with --stdout)
+
+---------------------------------Write Mode------------------------------------
+-w,  --write        Modify perms/etc in working dir to match the .gitmeta file
+-v,  --verbose      Be verbose
+
+\n";
+
+my ($stdout, $showdiff, $verbose, $read_mode, $write_mode);
+
+if ((@ARGV < 0) || !GetOptions(
+			       "stdout",         \$stdout,
+			       "diff",           \$showdiff,
+			       "read",           \$read_mode,
+			       "write",          \$write_mode,
+			       "verbose",        \$verbose,
+			      )) { die $usage; }
+die $usage unless ($read_mode xor $write_mode);
+
+my $topdir = `git rev-parse --show-cdup` or die "\n"; chomp $topdir;
+my $gitdir = $topdir . '.git';
+my $gitmeta = $topdir . '.gitmeta';
+
+if ($write_mode) {
+    # Update the working dir permissions/ownership based on data from .gitmeta
+    open (IN, "<$gitmeta") or die "Could not open $gitmeta for reading: $!\n";
+    while (defined ($_ = <IN>)) {
+	chomp;
+	if (/^(.*)  mode=(\S+)\s+uid=(\d+)\s+gid=(\d+)/) {
+	    # Compare recorded perms to actual perms in the working dir
+	    my ($path, $mode, $uid, $gid) = ($1, $2, $3, $4);
+	    my $fullpath = $topdir . $path;
+	    my (undef,undef,$wmode,undef,$wuid,$wgid) = lstat($fullpath);
+	    $wmode = sprintf "%04o", $wmode & 07777;
+	    if ($mode ne $wmode) {
+		$verbose && print "Updating permissions on $path: old=$wmode, new=$mode\n";
+		chmod oct($mode), $fullpath;
+	    }
+	    if ($uid != $wuid || $gid != $wgid) {
+		if ($verbose) {
+		    # Print out user/group names instead of uid/gid
+		    my $pwname  = getpwuid($uid);
+		    my $grpname  = getgrgid($gid);
+		    my $wpwname  = getpwuid($wuid);
+		    my $wgrpname  = getgrgid($wgid);
+		    $pwname = $uid if !defined $pwname;
+		    $grpname = $gid if !defined $grpname;
+		    $wpwname = $wuid if !defined $wpwname;
+		    $wgrpname = $wgid if !defined $wgrpname;
+
+		    print "Updating uid/gid on $path: old=$wpwname/$wgrpname, new=$pwname/$grpname\n";
+		}
+		chown $uid, $gid, $fullpath;
+	    }
+	}
+	else {
+	    warn "Invalid input format in $gitmeta:\n\t$_\n";
+	}
+    }
+    close IN;
+}
+elsif ($read_mode) {
+    # Handle merge conflicts in the .gitperms file
+    if (-e "$gitdir/MERGE_MSG") {
+	if (`grep ====== $gitmeta`) {
+	    # Conflict not resolved -- abort the commit
+	    print "PERMISSIONS/OWNERSHIP CONFLICT\n";
+	    print "    Resolve the conflict in the $gitmeta file and then run\n";
+	    print "    `.git/hooks/setgitperms.perl --write` to reconcile.\n";
+	    exit 1;
+	}
+	elsif (`grep $gitmeta $gitdir/MERGE_MSG`) {
+	    # A conflict in .gitmeta has been manually resolved. Verify that
+	    # the working dir perms matches the current .gitmeta perms for
+	    # each file/dir that conflicted.
+	    # This is here because a `setgitperms.perl --write` was not
+	    # performed due to a merge conflict, so permissions/ownership
+	    # may not be consistent with the manually merged .gitmeta file.
+	    my @conflict_diff = `git show \$(cat $gitdir/MERGE_HEAD)`;
+	    my @conflict_files;
+	    my $metadiff = 0;
+
+	    # Build a list of files that conflicted from the .gitmeta diff
+	    foreach my $line (@conflict_diff) {
+		if ($line =~ m|^diff --git a/$gitmeta b/$gitmeta|) {
+		    $metadiff = 1;
+		}
+		elsif ($line =~ /^diff --git/) {
+		    $metadiff = 0;
+		}
+		elsif ($metadiff && $line =~ /^\+(.*)  mode=/) {
+		    push @conflict_files, $1;
+		}
+	    }
+
+	    # Verify that each conflict file now has permissions consistent
+	    # with the .gitmeta file
+	    foreach my $file (@conflict_files) {
+		my $absfile = $topdir . $file;
+		my $gm_entry = `grep "^$file  mode=" $gitmeta`;
+		if ($gm_entry =~ /mode=(\d+)  uid=(\d+)  gid=(\d+)/) {
+		    my ($gm_mode, $gm_uid, $gm_gid) = ($1, $2, $3);
+		    my (undef,undef,$mode,undef,$uid,$gid) = lstat("$absfile");
+		    $mode = sprintf("%04o", $mode & 07777);
+		    if (($gm_mode ne $mode) || ($gm_uid != $uid)
+			|| ($gm_gid != $gid)) {
+			print "PERMISSIONS/OWNERSHIP CONFLICT\n";
+			print "    Mismatch found for file: $file\n";
+			print "    Run `.git/hooks/setgitperms.perl --write` to reconcile.\n";
+			exit 1;
+		    }
+		}
+		else {
+		    print "Warning! Permissions/ownership no longer being tracked for file: $file\n";
+		}
+	    }
+	}
+    }
+
+    # No merge conflicts -- write out perms/ownership data to .gitmeta file
+    unless ($stdout) {
+	open (OUT, ">$gitmeta.tmp") or die "Could not open $gitmeta.tmp for writing: $!\n";
+    }
+
+    my @files = `git ls-files`;
+    my %dirs;
+
+    foreach my $path (@files) {
+	chomp $path;
+	# We have to manually add stats for parent directories
+	my $parent = dirname($path);
+	while (!exists $dirs{$parent}) {
+	    $dirs{$parent} = 1;
+	    next if $parent eq '.';
+	    printstats($parent);
+	    $parent = dirname($parent);
+	}
+	# Now the git-tracked file
+	printstats($path);
+    }
+
+    # diff the temporary metadata file to see if anything has changed
+    # If no metadata has changed, don't overwrite the real file
+    # This is just so `git commit -a` doesn't try to commit a bogus update
+    unless ($stdout) {
+	if (! -e $gitmeta) {
+	    rename "$gitmeta.tmp", $gitmeta;
+	}
+	else {
+	    my $diff = `diff -U 0 $gitmeta $gitmeta.tmp`;
+	    if ($diff ne '') {
+		rename "$gitmeta.tmp", $gitmeta;
+	    }
+	    else {
+		unlink "$gitmeta.tmp";
+	    }
+	    if ($showdiff) {
+		print $diff;
+	    }
+	}
+	close OUT;
+    }
+    # Make sure the .gitmeta file is tracked
+    system("git add $gitmeta");
+}
+
+
+sub printstats {
+    my $path = $_[0];
+    $path =~ s/@/\@/g;
+    my (undef,undef,$mode,undef,$uid,$gid) = lstat($path);
+    $path =~ s/%/\%/g;
+    if ($stdout) {
+	print $path;
+	printf "  mode=%04o  uid=$uid  gid=$gid\n", $mode & 07777;
+    }
+    else {
+	print OUT $path;
+	printf OUT "  mode=%04o  uid=$uid  gid=$gid\n", $mode & 07777;
+    }
+}
diff --git a/third_party/git/contrib/hooks/update-paranoid b/third_party/git/contrib/hooks/update-paranoid
new file mode 100755
index 000000000000..0092d67b8a47
--- /dev/null
+++ b/third_party/git/contrib/hooks/update-paranoid
@@ -0,0 +1,421 @@
+#!/usr/bin/perl
+
+use strict;
+use File::Spec;
+
+$ENV{PATH}     = '/opt/git/bin';
+my $acl_git    = '/vcs/acls.git';
+my $acl_branch = 'refs/heads/master';
+my $debug      = 0;
+
+=doc
+Invoked as: update refname old-sha1 new-sha1
+
+This script is run by git-receive-pack once for each ref that the
+client is trying to modify.  If we exit with a non-zero exit value
+then the update for that particular ref is denied, but updates for
+other refs in the same run of receive-pack may still be allowed.
+
+We are run after the objects have been uploaded, but before the
+ref is actually modified.  We take advantage of that fact when we
+look for "new" commits and tags (the new objects won't show up in
+`rev-list --all`).
+
+This script loads and parses the content of the config file
+"users/$this_user.acl" from the $acl_branch commit of $acl_git ODB.
+The acl file is a git-config style file, but uses a slightly more
+restricted syntax as the Perl parser contained within this script
+is not nearly as permissive as git-config.
+
+Example:
+
+  [user]
+    committer = John Doe <john.doe@example.com>
+    committer = John R. Doe <john.doe@example.com>
+
+  [repository "acls"]
+    allow = heads/master
+    allow = CDUR for heads/jd/
+    allow = C    for ^tags/v\\d+$
+
+For all new commit or tag objects the committer (or tagger) line
+within the object must exactly match one of the user.committer
+values listed in the acl file ("HEAD:users/$this_user.acl").
+
+For a branch to be modified an allow line within the matching
+repository section must be matched for both the refname and the
+opcode.
+
+Repository sections are matched on the basename of the repository
+(after removing the .git suffix).
+
+The opcode abbreviations are:
+
+  C: create new ref
+  D: delete existing ref
+  U: fast-forward existing ref (no commit loss)
+  R: rewind/rebase existing ref (commit loss)
+
+if no opcodes are listed before the "for" keyword then "U" (for
+fast-forward update only) is assumed as this is the most common
+usage.
+
+Refnames are matched by always assuming a prefix of "refs/".
+This hook forbids pushing or deleting anything not under "refs/".
+
+Refnames that start with ^ are Perl regular expressions, and the ^
+is kept as part of the regexp.  \\ is needed to get just one \, so
+\\d expands to \d in Perl.  The 3rd allow line above is an example.
+
+Refnames that don't start with ^ but that end with / are prefix
+matches (2nd allow line above); all other refnames are strict
+equality matches (1st allow line).
+
+Anything pushed to "heads/" (ok, really "refs/heads/") must be
+a commit.  Tags are not permitted here.
+
+Anything pushed to "tags/" (err, really "refs/tags/") must be an
+annotated tag.  Commits, blobs, trees, etc. are not permitted here.
+Annotated tag signatures aren't checked, nor are they required.
+
+The special subrepository of 'info/new-commit-check' can
+be created and used to allow users to push new commits and
+tags from another local repository to this one, even if they
+aren't the committer/tagger of those objects.  In a nut shell
+the info/new-commit-check directory is a Git repository whose
+objects/info/alternates file lists this repository and all other
+possible sources, and whose refs subdirectory contains symlinks
+to this repository's refs subdirectory, and to all other possible
+sources refs subdirectories.  Yes, this means that you cannot
+use packed-refs in those repositories as they won't be resolved
+correctly.
+
+=cut
+
+my $git_dir = $ENV{GIT_DIR};
+my $new_commit_check = "$git_dir/info/new-commit-check";
+my $ref = $ARGV[0];
+my $old = $ARGV[1];
+my $new = $ARGV[2];
+my $new_type;
+my ($this_user) = getpwuid $<; # REAL_USER_ID
+my $repository_name;
+my %user_committer;
+my @allow_rules;
+my @path_rules;
+my %diff_cache;
+
+sub deny ($) {
+	print STDERR "-Deny-    $_[0]\n" if $debug;
+	print STDERR "\ndenied: $_[0]\n\n";
+	exit 1;
+}
+
+sub grant ($) {
+	print STDERR "-Grant-   $_[0]\n" if $debug;
+	exit 0;
+}
+
+sub info ($) {
+	print STDERR "-Info-    $_[0]\n" if $debug;
+}
+
+sub git_value (@) {
+	open(T,'-|','git',@_); local $_ = <T>; chop; close T; $_;
+}
+
+sub match_string ($$) {
+	my ($acl_n, $ref) = @_;
+	   ($acl_n eq $ref)
+	|| ($acl_n =~ m,/$, && substr($ref,0,length $acl_n) eq $acl_n)
+	|| ($acl_n =~ m,^\^, && $ref =~ m:$acl_n:);
+}
+
+sub parse_config ($$$$) {
+	my $data = shift;
+	local $ENV{GIT_DIR} = shift;
+	my $br = shift;
+	my $fn = shift;
+	return unless git_value('rev-list','--max-count=1',$br,'--',$fn);
+	info "Loading $br:$fn";
+	open(I,'-|','git','cat-file','blob',"$br:$fn");
+	my $section = '';
+	while (<I>) {
+		chomp;
+		if (/^\s*$/ || /^\s*#/) {
+		} elsif (/^\[([a-z]+)\]$/i) {
+			$section = lc $1;
+		} elsif (/^\[([a-z]+)\s+"(.*)"\]$/i) {
+			$section = join('.',lc $1,$2);
+		} elsif (/^\s*([a-z][a-z0-9]+)\s*=\s*(.*?)\s*$/i) {
+			push @{$data->{join('.',$section,lc $1)}}, $2;
+		} else {
+			deny "bad config file line $. in $br:$fn";
+		}
+	}
+	close I;
+}
+
+sub all_new_committers () {
+	local $ENV{GIT_DIR} = $git_dir;
+	$ENV{GIT_DIR} = $new_commit_check if -d $new_commit_check;
+
+	info "Getting committers of new commits.";
+	my %used;
+	open(T,'-|','git','rev-list','--pretty=raw',$new,'--not','--all');
+	while (<T>) {
+		next unless s/^committer //;
+		chop;
+		s/>.*$/>/;
+		info "Found $_." unless $used{$_}++;
+	}
+	close T;
+	info "No new commits." unless %used;
+	keys %used;
+}
+
+sub all_new_taggers () {
+	my %exists;
+	open(T,'-|','git','for-each-ref','--format=%(objectname)','refs/tags');
+	while (<T>) {
+		chop;
+		$exists{$_} = 1;
+	}
+	close T;
+
+	info "Getting taggers of new tags.";
+	my %used;
+	my $obj = $new;
+	my $obj_type = $new_type;
+	while ($obj_type eq 'tag') {
+		last if $exists{$obj};
+		$obj_type = '';
+		open(T,'-|','git','cat-file','tag',$obj);
+		while (<T>) {
+			chop;
+			if (/^object ([a-z0-9]{40})$/) {
+				$obj = $1;
+			} elsif (/^type (.+)$/) {
+				$obj_type = $1;
+			} elsif (s/^tagger //) {
+				s/>.*$/>/;
+				info "Found $_." unless $used{$_}++;
+				last;
+			}
+		}
+		close T;
+	}
+	info "No new tags." unless %used;
+	keys %used;
+}
+
+sub check_committers (@) {
+	my @bad;
+	foreach (@_) { push @bad, $_ unless $user_committer{$_}; }
+	if (@bad) {
+		print STDERR "\n";
+		print STDERR "You are not $_.\n" foreach (sort @bad);
+		deny "You cannot push changes not committed by you.";
+	}
+}
+
+sub load_diff ($) {
+	my $base = shift;
+	my $d = $diff_cache{$base};
+	unless ($d) {
+		local $/ = "\0";
+		my %this_diff;
+		if ($base =~ /^0{40}$/) {
+			# Don't load the diff at all; we are making the
+			# branch and have no base to compare to in this
+			# case.  A file level ACL makes no sense in this
+			# context.  Having an empty diff will allow the
+			# branch creation.
+			#
+		} else {
+			open(T,'-|','git','diff-tree',
+				'-r','--name-status','-z',
+				$base,$new) or return undef;
+			while (<T>) {
+				my $op = $_;
+				chop $op;
+
+				my $path = <T>;
+				chop $path;
+
+				$this_diff{$path} = $op;
+			}
+			close T or return undef;
+		}
+		$d = \%this_diff;
+		$diff_cache{$base} = $d;
+	}
+	return $d;
+}
+
+deny "No GIT_DIR inherited from caller" unless $git_dir;
+deny "Need a ref name" unless $ref;
+deny "Refusing funny ref $ref" unless $ref =~ s,^refs/,,;
+deny "Bad old value $old" unless $old =~ /^[a-z0-9]{40}$/;
+deny "Bad new value $new" unless $new =~ /^[a-z0-9]{40}$/;
+deny "Cannot determine who you are." unless $this_user;
+grant "No change requested." if $old eq $new;
+
+$repository_name = File::Spec->rel2abs($git_dir);
+$repository_name =~ m,/([^/]+)(?:\.git|/\.git)$,;
+$repository_name = $1;
+info "Updating in '$repository_name'.";
+
+my $op;
+if    ($old =~ /^0{40}$/) { $op = 'C'; }
+elsif ($new =~ /^0{40}$/) { $op = 'D'; }
+else                      { $op = 'R'; }
+
+# This is really an update (fast-forward) if the
+# merge base of $old and $new is $old.
+#
+$op = 'U' if ($op eq 'R'
+	&& $ref =~ m,^heads/,
+	&& $old eq git_value('merge-base',$old,$new));
+
+# Load the user's ACL file. Expand groups (user.memberof) one level.
+{
+	my %data = ('user.committer' => []);
+	parse_config(\%data,$acl_git,$acl_branch,"external/$repository_name.acl");
+
+	%data = (
+		'user.committer' => $data{'user.committer'},
+		'user.memberof' => [],
+	);
+	parse_config(\%data,$acl_git,$acl_branch,"users/$this_user.acl");
+
+	%user_committer = map {$_ => $_} @{$data{'user.committer'}};
+	my $rule_key = "repository.$repository_name.allow";
+	my $rules = $data{$rule_key} || [];
+
+	foreach my $group (@{$data{'user.memberof'}}) {
+		my %g;
+		parse_config(\%g,$acl_git,$acl_branch,"groups/$group.acl");
+		my $group_rules = $g{$rule_key};
+		push @$rules, @$group_rules if $group_rules;
+	}
+
+RULE:
+	foreach (@$rules) {
+		while (/\${user\.([a-z][a-zA-Z0-9]+)}/) {
+			my $k = lc $1;
+			my $v = $data{"user.$k"};
+			next RULE unless defined $v;
+			next RULE if @$v != 1;
+			next RULE unless defined $v->[0];
+			s/\${user\.$k}/$v->[0]/g;
+		}
+
+		if (/^([AMD ]+)\s+of\s+([^\s]+)\s+for\s+([^\s]+)\s+diff\s+([^\s]+)$/) {
+			my ($ops, $pth, $ref, $bst) = ($1, $2, $3, $4);
+			$ops =~ s/ //g;
+			$pth =~ s/\\\\/\\/g;
+			$ref =~ s/\\\\/\\/g;
+			push @path_rules, [$ops, $pth, $ref, $bst];
+		} elsif (/^([AMD ]+)\s+of\s+([^\s]+)\s+for\s+([^\s]+)$/) {
+			my ($ops, $pth, $ref) = ($1, $2, $3);
+			$ops =~ s/ //g;
+			$pth =~ s/\\\\/\\/g;
+			$ref =~ s/\\\\/\\/g;
+			push @path_rules, [$ops, $pth, $ref, $old];
+		} elsif (/^([CDRU ]+)\s+for\s+([^\s]+)$/) {
+			my $ops = $1;
+			my $ref = $2;
+			$ops =~ s/ //g;
+			$ref =~ s/\\\\/\\/g;
+			push @allow_rules, [$ops, $ref];
+		} elsif (/^for\s+([^\s]+)$/) {
+			# Mentioned, but nothing granted?
+		} elsif (/^[^\s]+$/) {
+			s/\\\\/\\/g;
+			push @allow_rules, ['U', $_];
+		}
+	}
+}
+
+if ($op ne 'D') {
+	$new_type = git_value('cat-file','-t',$new);
+
+	if ($ref =~ m,^heads/,) {
+		deny "$ref must be a commit." unless $new_type eq 'commit';
+	} elsif ($ref =~ m,^tags/,) {
+		deny "$ref must be an annotated tag." unless $new_type eq 'tag';
+	}
+
+	check_committers (all_new_committers);
+	check_committers (all_new_taggers) if $new_type eq 'tag';
+}
+
+info "$this_user wants $op for $ref";
+foreach my $acl_entry (@allow_rules) {
+	my ($acl_ops, $acl_n) = @$acl_entry;
+	next unless $acl_ops =~ /^[CDRU]+$/; # Uhh.... shouldn't happen.
+	next unless $acl_n;
+	next unless $op =~ /^[$acl_ops]$/;
+	next unless match_string $acl_n, $ref;
+
+	# Don't test path rules on branch deletes.
+	#
+	grant "Allowed by: $acl_ops for $acl_n" if $op eq 'D';
+
+	# Aggregate matching path rules; allow if there aren't
+	# any matching this ref.
+	#
+	my %pr;
+	foreach my $p_entry (@path_rules) {
+		my ($p_ops, $p_n, $p_ref, $p_bst) = @$p_entry;
+		next unless $p_ref;
+		push @{$pr{$p_bst}}, $p_entry if match_string $p_ref, $ref;
+	}
+	grant "Allowed by: $acl_ops for $acl_n" unless %pr;
+
+	# Allow only if all changes against a single base are
+	# allowed by file path rules.
+	#
+	my @bad;
+	foreach my $p_bst (keys %pr) {
+		my $diff_ref = load_diff $p_bst;
+		deny "Cannot difference trees." unless ref $diff_ref;
+
+		my %fd = %$diff_ref;
+		foreach my $p_entry (@{$pr{$p_bst}}) {
+			my ($p_ops, $p_n, $p_ref, $p_bst) = @$p_entry;
+			next unless $p_ops =~ /^[AMD]+$/;
+			next unless $p_n;
+
+			foreach my $f_n (keys %fd) {
+				my $f_op = $fd{$f_n};
+				next unless $f_op;
+				next unless $f_op =~ /^[$p_ops]$/;
+				delete $fd{$f_n} if match_string $p_n, $f_n;
+			}
+			last unless %fd;
+		}
+
+		if (%fd) {
+			push @bad, [$p_bst, \%fd];
+		} else {
+			# All changes relative to $p_bst were allowed.
+			#
+			grant "Allowed by: $acl_ops for $acl_n diff $p_bst";
+		}
+	}
+
+	foreach my $bad_ref (@bad) {
+		my ($p_bst, $fd) = @$bad_ref;
+		print STDERR "\n";
+		print STDERR "Not allowed to make the following changes:\n";
+		print STDERR "(base: $p_bst)\n";
+		foreach my $f_n (sort keys %$fd) {
+			print STDERR "  $fd->{$f_n} $f_n\n";
+		}
+	}
+	deny "You are not permitted to $op $ref";
+}
+close A;
+deny "You are not permitted to $op $ref";
diff --git a/third_party/git/contrib/long-running-filter/example.pl b/third_party/git/contrib/long-running-filter/example.pl
new file mode 100755
index 000000000000..a677569ddd95
--- /dev/null
+++ b/third_party/git/contrib/long-running-filter/example.pl
@@ -0,0 +1,132 @@
+#!/usr/bin/perl
+#
+# Example implementation for the Git filter protocol version 2
+# See Documentation/gitattributes.txt, section "Filter Protocol"
+#
+# Please note, this pass-thru filter is a minimal skeleton. No proper
+# error handling was implemented.
+#
+
+use strict;
+use warnings;
+
+my $MAX_PACKET_CONTENT_SIZE = 65516;
+
+sub packet_bin_read {
+	my $buffer;
+	my $bytes_read = read STDIN, $buffer, 4;
+	if ( $bytes_read == 0 ) {
+
+		# EOF - Git stopped talking to us!
+		exit();
+	}
+	elsif ( $bytes_read != 4 ) {
+		die "invalid packet: '$buffer'";
+	}
+	my $pkt_size = hex($buffer);
+	if ( $pkt_size == 0 ) {
+		return ( 1, "" );
+	}
+	elsif ( $pkt_size > 4 ) {
+		my $content_size = $pkt_size - 4;
+		$bytes_read = read STDIN, $buffer, $content_size;
+		if ( $bytes_read != $content_size ) {
+			die "invalid packet ($content_size bytes expected; $bytes_read bytes read)";
+		}
+		return ( 0, $buffer );
+	}
+	else {
+		die "invalid packet size: $pkt_size";
+	}
+}
+
+sub packet_txt_read {
+	my ( $res, $buf ) = packet_bin_read();
+	unless ( $buf =~ s/\n$// ) {
+		die "A non-binary line MUST be terminated by an LF.";
+	}
+	return ( $res, $buf );
+}
+
+sub packet_bin_write {
+	my $buf = shift;
+	print STDOUT sprintf( "%04x", length($buf) + 4 );
+	print STDOUT $buf;
+	STDOUT->flush();
+}
+
+sub packet_txt_write {
+	packet_bin_write( $_[0] . "\n" );
+}
+
+sub packet_flush {
+	print STDOUT sprintf( "%04x", 0 );
+	STDOUT->flush();
+}
+
+( packet_txt_read() eq ( 0, "git-filter-client" ) ) || die "bad initialize";
+( packet_txt_read() eq ( 0, "version=2" ) )         || die "bad version";
+( packet_bin_read() eq ( 1, "" ) )                  || die "bad version end";
+
+packet_txt_write("git-filter-server");
+packet_txt_write("version=2");
+packet_flush();
+
+( packet_txt_read() eq ( 0, "capability=clean" ) )  || die "bad capability";
+( packet_txt_read() eq ( 0, "capability=smudge" ) ) || die "bad capability";
+( packet_bin_read() eq ( 1, "" ) )                  || die "bad capability end";
+
+packet_txt_write("capability=clean");
+packet_txt_write("capability=smudge");
+packet_flush();
+
+while (1) {
+	my ($command)  = packet_txt_read() =~ /^command=(.+)$/;
+	my ($pathname) = packet_txt_read() =~ /^pathname=(.+)$/;
+
+	if ( $pathname eq "" ) {
+		die "bad pathname '$pathname'";
+	}
+
+	packet_bin_read();
+
+	my $input = "";
+	{
+		binmode(STDIN);
+		my $buffer;
+		my $done = 0;
+		while ( !$done ) {
+			( $done, $buffer ) = packet_bin_read();
+			$input .= $buffer;
+		}
+	}
+
+	my $output;
+	if ( $command eq "clean" ) {
+		### Perform clean here ###
+		$output = $input;
+	}
+	elsif ( $command eq "smudge" ) {
+		### Perform smudge here ###
+		$output = $input;
+	}
+	else {
+		die "bad command '$command'";
+	}
+
+	packet_txt_write("status=success");
+	packet_flush();
+	while ( length($output) > 0 ) {
+		my $packet = substr( $output, 0, $MAX_PACKET_CONTENT_SIZE );
+		packet_bin_write($packet);
+		if ( length($output) > $MAX_PACKET_CONTENT_SIZE ) {
+			$output = substr( $output, $MAX_PACKET_CONTENT_SIZE );
+		}
+		else {
+			$output = "";
+		}
+	}
+	packet_flush();    # flush content!
+	packet_flush();    # empty list, keep "status=success" unchanged!
+
+}
diff --git a/third_party/git/contrib/mw-to-git/.gitignore b/third_party/git/contrib/mw-to-git/.gitignore
new file mode 100644
index 000000000000..ae545b013dc8
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/.gitignore
@@ -0,0 +1,2 @@
+git-remote-mediawiki
+git-mw
diff --git a/third_party/git/contrib/mw-to-git/.perlcriticrc b/third_party/git/contrib/mw-to-git/.perlcriticrc
new file mode 100644
index 000000000000..b7333267adad
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/.perlcriticrc
@@ -0,0 +1,28 @@
+# These 3 rules demand to add the s, m and x flag to *every* regexp. This is
+# overkill and would be harmful for readability.
+[-RegularExpressions::RequireExtendedFormatting]
+[-RegularExpressions::RequireDotMatchAnything]
+[-RegularExpressions::RequireLineBoundaryMatching]
+
+# This rule says that builtin functions should not be called with parentheses
+# e.g.: (taken from CPAN's documentation)
+# open($handle, '>', $filename); #not ok
+# open $handle, '>', $filename;  #ok
+# Applying such a rule would mean modifying a huge number of lines for a
+# question of style.
+[-CodeLayout::ProhibitParensWithBuiltins]
+
+# This rule states that each system call should have its return value checked
+# The problem is that it includes the print call. Checking every print call's
+# return value would be harmful to the code readability.
+# This configuration keeps all default function but print.
+[InputOutput::RequireCheckedSyscalls]
+functions = open say close
+
+# This rule demands to add a dependency for the Readonly module. This is not
+# wished.
+[-ValuesAndExpressions::ProhibitConstantPragma]
+
+# This rule is not really useful (rather a question of style) and produces many
+# warnings among the code.
+[-ValuesAndExpressions::ProhibitNoisyQuotes]
diff --git a/third_party/git/contrib/mw-to-git/Git/Mediawiki.pm b/third_party/git/contrib/mw-to-git/Git/Mediawiki.pm
new file mode 100644
index 000000000000..917d9e2d3222
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/Git/Mediawiki.pm
@@ -0,0 +1,101 @@
+package Git::Mediawiki;
+
+use 5.008;
+use strict;
+use POSIX;
+use Git;
+
+BEGIN {
+
+our ($VERSION, @ISA, @EXPORT, @EXPORT_OK);
+
+# Totally unstable API.
+$VERSION = '0.01';
+
+require Exporter;
+
+@ISA = qw(Exporter);
+
+@EXPORT = ();
+
+# Methods which can be called as standalone functions as well:
+@EXPORT_OK = qw(clean_filename smudge_filename connect_maybe
+				EMPTY HTTP_CODE_OK HTTP_CODE_PAGE_NOT_FOUND);
+}
+
+# Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
+use constant SLASH_REPLACEMENT => '%2F';
+
+# Used to test for empty strings
+use constant EMPTY => q{};
+
+# HTTP codes
+use constant HTTP_CODE_OK => 200;
+use constant HTTP_CODE_PAGE_NOT_FOUND => 404;
+
+sub clean_filename {
+	my $filename = shift;
+	$filename =~ s{@{[SLASH_REPLACEMENT]}}{/}g;
+	# [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
+	# Do a variant of URL-encoding, i.e. looks like URL-encoding,
+	# but with _ added to prevent MediaWiki from thinking this is
+	# an actual special character.
+	$filename =~ s/[\[\]\{\}\|]/sprintf("_%%_%x", ord($&))/ge;
+	# If we use the uri escape before
+	# we should unescape here, before anything
+
+	return $filename;
+}
+
+sub smudge_filename {
+	my $filename = shift;
+	$filename =~ s{/}{@{[SLASH_REPLACEMENT]}}g;
+	$filename =~ s/ /_/g;
+	# Decode forbidden characters encoded in clean_filename
+	$filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf('%c', hex($1))/ge;
+	return substr($filename, 0, NAME_MAX-length('.mw'));
+}
+
+sub connect_maybe {
+	my $wiki = shift;
+	if ($wiki) {
+		return $wiki;
+	}
+
+	my $remote_name = shift;
+	my $remote_url = shift;
+	my ($wiki_login, $wiki_password, $wiki_domain);
+
+	$wiki_login = Git::config("remote.${remote_name}.mwLogin");
+	$wiki_password = Git::config("remote.${remote_name}.mwPassword");
+	$wiki_domain = Git::config("remote.${remote_name}.mwDomain");
+
+	$wiki = MediaWiki::API->new;
+	$wiki->{config}->{api_url} = "${remote_url}/api.php";
+	if ($wiki_login) {
+		my %credential = (
+			'url' => $remote_url,
+			'username' => $wiki_login,
+			'password' => $wiki_password
+		);
+		Git::credential(\%credential);
+		my $request = {lgname => $credential{username},
+			       lgpassword => $credential{password},
+			       lgdomain => $wiki_domain};
+		if ($wiki->login($request)) {
+			Git::credential(\%credential, 'approve');
+			print {*STDERR} qq(Logged in mediawiki user "$credential{username}".\n);
+		} else {
+			print {*STDERR} qq(Failed to log in mediawiki user "$credential{username}" on ${remote_url}\n);
+			print {*STDERR} '  (error ' .
+				$wiki->{error}->{code} . ': ' .
+				$wiki->{error}->{details} . ")\n";
+			Git::credential(\%credential, 'reject');
+			exit 1;
+		}
+	}
+
+	return $wiki;
+}
+
+1; # Famous last words
diff --git a/third_party/git/contrib/mw-to-git/Makefile b/third_party/git/contrib/mw-to-git/Makefile
new file mode 100644
index 000000000000..4e603512a39f
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/Makefile
@@ -0,0 +1,58 @@
+#
+# Copyright (C) 2013
+#     Matthieu Moy <Matthieu.Moy@imag.fr>
+#
+# To build and test:
+#
+#   make
+#   bin-wrapper/git mw preview Some_page.mw
+#   bin-wrapper/git clone mediawiki::http://example.com/wiki/
+#
+# To install, run Git's toplevel 'make install' then run:
+#
+#   make install
+
+GIT_MEDIAWIKI_PM=Git/Mediawiki.pm
+SCRIPT_PERL=git-remote-mediawiki.perl
+SCRIPT_PERL+=git-mw.perl
+GIT_ROOT_DIR=../..
+HERE=contrib/mw-to-git/
+
+INSTALL = install
+
+SCRIPT_PERL_FULL=$(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
+INSTLIBDIR=$(shell $(MAKE) -C $(GIT_ROOT_DIR)/ \
+                -s --no-print-directory prefix=$(prefix) \
+                perllibdir=$(perllibdir) perllibdir)
+DESTDIR_SQ = $(subst ','\'',$(DESTDIR))
+INSTLIBDIR_SQ = $(subst ','\'',$(INSTLIBDIR))
+
+all: build
+
+test: all
+	$(MAKE) -C t
+
+check: perlcritic test
+
+install_pm:
+	$(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(INSTLIBDIR_SQ)/Git'
+	$(INSTALL) -m 644 $(GIT_MEDIAWIKI_PM) \
+		'$(DESTDIR_SQ)$(INSTLIBDIR_SQ)/$(GIT_MEDIAWIKI_PM)'
+
+build:
+	$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+                build-perl-script
+
+install: install_pm
+	$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+                install-perl-script
+
+clean:
+	$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
+                clean-perl-script
+
+perlcritic:
+	perlcritic -5 $(SCRIPT_PERL)
+	-perlcritic -2 $(SCRIPT_PERL)
+
+.PHONY: all test check install_pm install clean perlcritic
diff --git a/third_party/git/contrib/mw-to-git/bin-wrapper/git b/third_party/git/contrib/mw-to-git/bin-wrapper/git
new file mode 100755
index 000000000000..6663ae57e869
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/bin-wrapper/git
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# git executable wrapper script for Git-Mediawiki to run tests without
+# installing all the scripts and perl packages.
+
+GIT_ROOT_DIR=../../..
+GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd ${GIT_ROOT_DIR} && pwd)
+
+GITPERLLIB="$GIT_EXEC_PATH"'/contrib/mw-to-git'"${GITPERLLIB:+:$GITPERLLIB}"
+PATH="$GIT_EXEC_PATH"'/contrib/mw-to-git:'"$PATH"
+
+export GITPERLLIB PATH
+
+exec "${GIT_EXEC_PATH}/bin-wrappers/git" "$@"
diff --git a/third_party/git/contrib/mw-to-git/git-mw.perl b/third_party/git/contrib/mw-to-git/git-mw.perl
new file mode 100755
index 000000000000..28df3ee321ec
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/git-mw.perl
@@ -0,0 +1,368 @@
+#!/usr/bin/perl
+
+# Copyright (C) 2013
+#     Benoit Person <benoit.person@ensimag.imag.fr>
+#     Celestin Matte <celestin.matte@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Set of tools for git repo with a mediawiki remote.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use warnings;
+
+use Getopt::Long;
+use URI::URL qw(url);
+use LWP::UserAgent;
+use HTML::TreeBuilder;
+
+use Git;
+use MediaWiki::API;
+use Git::Mediawiki qw(clean_filename connect_maybe
+					EMPTY HTTP_CODE_PAGE_NOT_FOUND);
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
+
+# Global parameters
+my $verbose = 0;
+sub v_print {
+	if ($verbose) {
+		return print {*STDERR} @_;
+	}
+	return;
+}
+
+# Preview parameters
+my $file_name = EMPTY;
+my $remote_name = EMPTY;
+my $preview_file_name = EMPTY;
+my $autoload = 0;
+sub file {
+	$file_name = shift;
+	return $file_name;
+}
+
+my %commands = (
+	'help' =>
+		[\&help, {}, \&help],
+	'preview' =>
+		[\&preview, {
+			'<>' => \&file,
+			'output|o=s' => \$preview_file_name,
+			'remote|r=s' => \$remote_name,
+			'autoload|a' => \$autoload
+		}, \&preview_help]
+);
+
+# Search for sub-command
+my $cmd = $commands{'help'};
+for (0..@ARGV-1) {
+	if (defined $commands{$ARGV[$_]}) {
+		$cmd = $commands{$ARGV[$_]};
+		splice @ARGV, $_, 1;
+		last;
+	}
+};
+GetOptions( %{$cmd->[1]},
+	'help|h' => \&{$cmd->[2]},
+	'verbose|v'  => \$verbose);
+
+# Launch command
+&{$cmd->[0]};
+
+############################# Preview Functions ################################
+
+sub preview_help {
+	print {*STDOUT} <<'END';
+USAGE: git mw preview [--remote|-r <remote name>] [--autoload|-a]
+                      [--output|-o <output filename>] [--verbose|-v]
+                      <blob> | <filename>
+
+DESCRIPTION:
+Preview is an utiliy to preview local content of a mediawiki repo as if it was
+pushed on the remote.
+
+For that, preview searches for the remote name of the current branch's
+upstream if --remote is not set. If that remote is not found or if it
+is not a mediawiki, it lists all mediawiki remotes configured and asks
+you to replay your command with the --remote option set properly.
+
+Then, it searches for a file named 'filename'. If it's not found in
+the current dir, it will assume it's a blob.
+
+The content retrieved in the file (or in the blob) will then be parsed
+by the remote mediawiki and combined with a template retrieved from
+the mediawiki.
+
+Finally, preview will save the HTML result in a file. and autoload it
+in your default web browser if the option --autoload is present.
+
+OPTIONS:
+    -r <remote name>, --remote <remote name>
+        If the remote is a mediawiki, the template and the parse engine
+        used for the preview will be those of that remote.
+        If not, a list of valid remotes will be shown.
+
+    -a, --autoload
+        Try to load the HTML output in a new tab (or new window) of your
+        default web browser.
+
+    -o <output filename>, --output <output filename>
+        Change the HTML output filename. Default filename is based on the
+        input filename with its extension replaced by '.html'.
+
+    -v, --verbose
+        Show more information on what's going on under the hood.
+END
+	exit;
+}
+
+sub preview {
+	my $wiki;
+	my ($remote_url, $wiki_page_name);
+	my ($new_content, $template);
+	my $file_content;
+
+	if ($file_name eq EMPTY) {
+		die "Missing file argument, see `git mw help`\n";
+	}
+
+	v_print("### Selecting remote\n");
+	if ($remote_name eq EMPTY) {
+		$remote_name = find_upstream_remote_name();
+		if ($remote_name) {
+			$remote_url = mediawiki_remote_url_maybe($remote_name);
+		}
+
+		if (! $remote_url) {
+			my @valid_remotes = find_mediawiki_remotes();
+
+			if ($#valid_remotes == 0) {
+				print {*STDERR} "No mediawiki remote in this repo. \n";
+				exit 1;
+			} else {
+				my $remotes_list = join("\n\t", @valid_remotes);
+				print {*STDERR} <<"MESSAGE";
+There are multiple mediawiki remotes, which of:
+	${remotes_list}
+do you want ? Use the -r option to specify the remote.
+MESSAGE
+			}
+
+			exit 1;
+		}
+	} else {
+		if (!is_valid_remote($remote_name)) {
+			die "${remote_name} is not a remote\n";
+		}
+
+		$remote_url = mediawiki_remote_url_maybe($remote_name);
+		if (! $remote_url) {
+			die "${remote_name} is not a mediawiki remote\n";
+		}
+	}
+	v_print("selected remote:\n\tname: ${remote_name}\n\turl: ${remote_url}\n");
+
+	$wiki = connect_maybe($wiki, $remote_name, $remote_url);
+
+	# Read file content
+	if (! -e $file_name) {
+		$file_content = git_cmd_try {
+			Git::command('cat-file', 'blob', $file_name); }
+			"%s failed w/ code %d";
+
+		if ($file_name =~ /(.+):(.+)/) {
+			$file_name = $2;
+		}
+	} else {
+		open my $read_fh, "<", $file_name
+			or die "could not open ${file_name}: $!\n";
+		$file_content = do { local $/ = undef; <$read_fh> };
+		close $read_fh
+			or die "unable to close: $!\n";
+	}
+
+	v_print("### Retrieving template\n");
+	($wiki_page_name = clean_filename($file_name)) =~ s/\.[^.]+$//;
+	$template = get_template($remote_url, $wiki_page_name);
+
+	v_print("### Parsing local content\n");
+	$new_content = $wiki->api({
+		action => 'parse',
+		text => $file_content,
+		title => $wiki_page_name
+	}, {
+		skip_encoding => 1
+	}) or die "No response from remote mediawiki\n";
+	$new_content = $new_content->{'parse'}->{'text'}->{'*'};
+
+	v_print("### Merging contents\n");
+	if ($preview_file_name eq EMPTY) {
+		($preview_file_name = $file_name) =~ s/\.[^.]+$/.html/;
+	}
+	open(my $save_fh, '>:encoding(UTF-8)', $preview_file_name)
+		or die "Could not open: $!\n";
+	print {$save_fh} merge_contents($template, $new_content, $remote_url);
+	close($save_fh)
+		or die "Could not close: $!\n";
+
+	v_print("### Results\n");
+	if ($autoload) {
+		v_print("Launching browser w/ file: ${preview_file_name}");
+		system('git', 'web--browse', $preview_file_name);
+	} else {
+		print {*STDERR} "Preview file saved as: ${preview_file_name}\n";
+	}
+
+	exit;
+}
+
+# uses global scope variable: $remote_name
+sub merge_contents {
+	my $template = shift;
+	my $content = shift;
+	my $remote_url = shift;
+	my ($content_tree, $html_tree, $mw_content_text);
+	my $template_content_id = 'bodyContent';
+
+	$html_tree = HTML::TreeBuilder->new;
+	$html_tree->parse($template);
+
+	$content_tree = HTML::TreeBuilder->new;
+	$content_tree->parse($content);
+
+	$template_content_id = Git::config("remote.${remote_name}.mwIDcontent")
+		|| $template_content_id;
+	v_print("Using '${template_content_id}' as the content ID\n");
+
+	$mw_content_text = $html_tree->look_down('id', $template_content_id);
+	if (!defined $mw_content_text) {
+		print {*STDERR} <<"CONFIG";
+Could not combine the new content with the template. You might want to
+configure `mediawiki.IDContent` in your config:
+	git config --add remote.${remote_name}.mwIDcontent <id>
+and re-run the command afterward.
+CONFIG
+		exit 1;
+	}
+	$mw_content_text->delete_content();
+	$mw_content_text->push_content($content_tree);
+
+	make_links_absolute($html_tree, $remote_url);
+
+	return $html_tree->as_HTML;
+}
+
+sub make_links_absolute {
+	my $html_tree = shift;
+	my $remote_url = shift;
+	for (@{ $html_tree->extract_links() }) {
+		my ($link, $element, $attr) = @{ $_ };
+		my $url = url($link)->canonical;
+		if ($url !~ /#/) {
+			$element->attr($attr, URI->new_abs($url, $remote_url));
+		}
+	}
+	return $html_tree;
+}
+
+sub is_valid_remote {
+	my $remote = shift;
+	my @remotes = git_cmd_try {
+		Git::command('remote') }
+		"%s failed w/ code %d";
+	my $found_remote = 0;
+	foreach my $remote (@remotes) {
+		if ($remote eq $remote) {
+			$found_remote = 1;
+			last;
+		}
+	}
+	return $found_remote;
+}
+
+sub find_mediawiki_remotes {
+	my @remotes = git_cmd_try {
+		Git::command('remote'); }
+		"%s failed w/ code %d";
+	my $remote_url;
+	my @valid_remotes = ();
+	foreach my $remote (@remotes) {
+		$remote_url = mediawiki_remote_url_maybe($remote);
+		if ($remote_url) {
+			push(@valid_remotes, $remote);
+		}
+	}
+	return @valid_remotes;
+}
+
+sub find_upstream_remote_name {
+	my $current_branch = git_cmd_try {
+		Git::command_oneline('symbolic-ref', '--short', 'HEAD') }
+		"%s failed w/ code %d";
+	return Git::config("branch.${current_branch}.remote");
+}
+
+sub mediawiki_remote_url_maybe {
+	my $remote = shift;
+
+	# Find remote url
+	my $remote_url = Git::config("remote.${remote}.url");
+	if ($remote_url =~ s/mediawiki::(.*)/$1/) {
+		return url($remote_url)->canonical;
+	}
+
+	return;
+}
+
+sub get_template {
+	my $url = shift;
+	my $page_name = shift;
+	my ($req, $res, $code, $url_after);
+
+	$req = LWP::UserAgent->new;
+	if ($verbose) {
+		$req->show_progress(1);
+	}
+
+	$res = $req->get("${url}/index.php?title=${page_name}");
+	if (!$res->is_success) {
+		$code = $res->code;
+		$url_after = $res->request()->uri(); # resolve all redirections
+		if ($code == HTTP_CODE_PAGE_NOT_FOUND) {
+			if ($verbose) {
+				print {*STDERR} <<"WARNING";
+Warning: Failed to retrieve '$page_name'. Create it on the mediawiki if you want
+all the links to work properly.
+Trying to use the mediawiki homepage as a fallback template ...
+WARNING
+			}
+
+			# LWP automatically redirects GET request
+			$res = $req->get("${url}/index.php");
+			if (!$res->is_success) {
+				$url_after = $res->request()->uri(); # resolve all redirections
+				die "Failed to get homepage @ ${url_after} w/ code ${code}\n";
+			}
+		} else {
+			die "Failed to get '${page_name}' @ ${url_after} w/ code ${code}\n";
+		}
+	}
+
+	return $res->decoded_content;
+}
+
+############################## Help Functions ##################################
+
+sub help {
+	print {*STDOUT} <<'END';
+usage: git mw <command> <args>
+
+git mw commands are:
+    help        Display help information about git mw
+    preview     Parse and render local file into HTML
+END
+	exit;
+}
diff --git a/third_party/git/contrib/mw-to-git/git-remote-mediawiki.perl b/third_party/git/contrib/mw-to-git/git-remote-mediawiki.perl
new file mode 100755
index 000000000000..d8ff2e69c498
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -0,0 +1,1374 @@
+#! /usr/bin/perl
+
+# Copyright (C) 2011
+#     Jérémie Nikaes <jeremie.nikaes@ensimag.imag.fr>
+#     Arnaud Lacurie <arnaud.lacurie@ensimag.imag.fr>
+#     Claire Fousse <claire.fousse@ensimag.imag.fr>
+#     David Amouyal <david.amouyal@ensimag.imag.fr>
+#     Matthieu Moy <matthieu.moy@grenoble-inp.fr>
+# License: GPL v2 or later
+
+# Gateway between Git and MediaWiki.
+# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
+
+use strict;
+use MediaWiki::API;
+use Git;
+use Git::Mediawiki qw(clean_filename smudge_filename connect_maybe
+					EMPTY HTTP_CODE_OK);
+use DateTime::Format::ISO8601;
+use warnings;
+
+# By default, use UTF-8 to communicate with Git and the user
+binmode STDERR, ':encoding(UTF-8)';
+binmode STDOUT, ':encoding(UTF-8)';
+
+use URI::Escape;
+
+# It's not always possible to delete pages (may require some
+# privileges). Deleted pages are replaced with this content.
+use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
+
+# It's not possible to create empty pages. New empty files in Git are
+# sent with this content instead.
+use constant EMPTY_CONTENT => "<!-- empty page -->\n";
+
+# used to reflect file creation or deletion in diff.
+use constant NULL_SHA1 => '0000000000000000000000000000000000000000';
+
+# Used on Git's side to reflect empty edit messages on the wiki
+use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+
+# Number of pages taken into account at once in submodule get_mw_page_list
+use constant SLICE_SIZE => 50;
+
+# Number of linked mediafile to get at once in get_linked_mediafiles
+# The query is split in small batches because of the MW API limit of
+# the number of links to be returned (500 links max).
+use constant BATCH_SIZE => 10;
+
+if (@ARGV != 2) {
+	exit_error_usage();
+}
+
+my $remotename = $ARGV[0];
+my $url = $ARGV[1];
+
+# Accept both space-separated and multiple keys in config file.
+# Spaces should be written as _ anyway because we'll use chomp.
+my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
+chomp(@tracked_pages);
+
+# Just like @tracked_pages, but for MediaWiki categories.
+my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
+chomp(@tracked_categories);
+
+# Just like @tracked_categories, but for MediaWiki namespaces.
+my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
+for (@tracked_namespaces) { s/_/ /g; }
+chomp(@tracked_namespaces);
+
+# Import media files on pull
+my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
+chomp($import_media);
+$import_media = ($import_media eq 'true');
+
+# Export media files on push
+my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
+chomp($export_media);
+$export_media = !($export_media eq 'false');
+
+my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
+# Note: mwPassword is discouraged. Use the credential system instead.
+my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
+my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
+chomp($wiki_login);
+chomp($wiki_passwd);
+chomp($wiki_domain);
+
+# Import only last revisions (both for clone and fetch)
+my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
+chomp($shallow_import);
+$shallow_import = ($shallow_import eq 'true');
+
+# Fetch (clone and pull) by revisions instead of by pages. This behavior
+# is more efficient when we have a wiki with lots of pages and we fetch
+# the revisions quite often so that they concern only few pages.
+# Possible values:
+# - by_rev: perform one query per new revision on the remote wiki
+# - by_page: query each tracked page for new revision
+my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
+if (!$fetch_strategy) {
+	$fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
+}
+chomp($fetch_strategy);
+if (!$fetch_strategy) {
+	$fetch_strategy = 'by_page';
+}
+
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
+# Dumb push: don't update notes and mediawiki ref to reflect the last push.
+#
+# Configurable with mediawiki.dumbPush, or per-remote with
+# remote.<remotename>.dumbPush.
+#
+# This means the user will have to re-import the just-pushed
+# revisions. On the other hand, this means that the Git revisions
+# corresponding to MediaWiki revisions are all imported from the wiki,
+# regardless of whether they were initially created in Git or from the
+# web interface, hence all users will get the same history (i.e. if
+# the push from Git to MediaWiki loses some information, everybody
+# will get the history with information lost). If the import is
+# deterministic, this means everybody gets the same sha1 for each
+# MediaWiki revision.
+my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
+if (!$dumb_push) {
+	$dumb_push = run_git('config --get --bool mediawiki.dumbPush');
+}
+chomp($dumb_push);
+$dumb_push = ($dumb_push eq 'true');
+
+my $wiki_name = $url;
+$wiki_name =~ s{[^/]*://}{};
+# If URL is like http://user:password@example.com/, we clearly don't
+# want the password in $wiki_name. While we're there, also remove user
+# and '@' sign, to avoid author like MWUser@HTTPUser@host.com
+$wiki_name =~ s/^.*@//;
+
+# Commands parser
+while (<STDIN>) {
+	chomp;
+
+	if (!parse_command($_)) {
+		last;
+	}
+
+	BEGIN { $| = 1 } # flush STDOUT, to make sure the previous
+			 # command is fully processed.
+}
+
+########################## Functions ##############################
+
+## error handling
+sub exit_error_usage {
+	die "ERROR: git-remote-mediawiki module was not called with a correct number of\n" .
+	    "parameters\n" .
+	    "You may obtain this error because you attempted to run the git-remote-mediawiki\n" .
+            "module directly.\n" .
+	    "This module can be used the following way:\n" .
+	    "\tgit clone mediawiki://<address of a mediawiki>\n" .
+	    "Then, use git commit, push and pull as with every normal git repository.\n";
+}
+
+sub parse_command {
+	my ($line) = @_;
+	my @cmd = split(/ /, $line);
+	if (!defined $cmd[0]) {
+		return 0;
+	}
+	if ($cmd[0] eq 'capabilities') {
+		die("Too many arguments for capabilities\n")
+		    if (defined($cmd[1]));
+		mw_capabilities();
+	} elsif ($cmd[0] eq 'list') {
+		die("Too many arguments for list\n") if (defined($cmd[2]));
+		mw_list($cmd[1]);
+	} elsif ($cmd[0] eq 'import') {
+		die("Invalid argument for import\n")
+		    if ($cmd[1] eq EMPTY);
+		die("Too many arguments for import\n")
+		    if (defined($cmd[2]));
+		mw_import($cmd[1]);
+	} elsif ($cmd[0] eq 'option') {
+		die("Invalid arguments for option\n")
+		    if ($cmd[1] eq EMPTY || $cmd[2] eq EMPTY);
+		die("Too many arguments for option\n")
+		    if (defined($cmd[3]));
+		mw_option($cmd[1],$cmd[2]);
+	} elsif ($cmd[0] eq 'push') {
+		mw_push($cmd[1]);
+	} else {
+		print {*STDERR} "Unknown command. Aborting...\n";
+		return 0;
+	}
+	return 1;
+}
+
+# MediaWiki API instance, created lazily.
+my $mediawiki;
+
+sub fatal_mw_error {
+	my $action = shift;
+	print STDERR "fatal: could not $action.\n";
+	print STDERR "fatal: '$url' does not appear to be a mediawiki\n";
+	if ($url =~ /^https/) {
+		print STDERR "fatal: make sure '$url/api.php' is a valid page\n";
+		print STDERR "fatal: and the SSL certificate is correct.\n";
+	} else {
+		print STDERR "fatal: make sure '$url/api.php' is a valid page.\n";
+	}
+	print STDERR "fatal: (error " .
+	    $mediawiki->{error}->{code} . ': ' .
+	    $mediawiki->{error}->{details} . ")\n";
+	exit 1;
+}
+
+## Functions for listing pages on the remote wiki
+sub get_mw_tracked_pages {
+	my $pages = shift;
+	get_mw_page_list(\@tracked_pages, $pages);
+	return;
+}
+
+sub get_mw_page_list {
+	my $page_list = shift;
+	my $pages = shift;
+	my @some_pages = @{$page_list};
+	while (@some_pages) {
+		my $last_page = SLICE_SIZE;
+		if ($#some_pages < $last_page) {
+			$last_page = $#some_pages;
+		}
+		my @slice = @some_pages[0..$last_page];
+		get_mw_first_pages(\@slice, $pages);
+		@some_pages = @some_pages[(SLICE_SIZE + 1)..$#some_pages];
+	}
+	return;
+}
+
+sub get_mw_tracked_categories {
+	my $pages = shift;
+	foreach my $category (@tracked_categories) {
+		if (index($category, ':') < 0) {
+			# Mediawiki requires the Category
+			# prefix, but let's not force the user
+			# to specify it.
+			$category = "Category:${category}";
+		}
+		my $mw_pages = $mediawiki->list( {
+			action => 'query',
+			list => 'categorymembers',
+			cmtitle => $category,
+			cmlimit => 'max' } )
+			|| die $mediawiki->{error}->{code} . ': '
+				. $mediawiki->{error}->{details} . "\n";
+		foreach my $page (@{$mw_pages}) {
+			$pages->{$page->{title}} = $page;
+		}
+	}
+	return;
+}
+
+sub get_mw_tracked_namespaces {
+    my $pages = shift;
+    foreach my $local_namespace (sort @tracked_namespaces) {
+        my $namespace_id;
+        if ($local_namespace eq "(Main)") {
+            $namespace_id = 0;
+        } else {
+            $namespace_id = get_mw_namespace_id($local_namespace);
+        }
+        # virtual namespaces don't support allpages
+        next if !defined($namespace_id) || $namespace_id < 0;
+        my $mw_pages = $mediawiki->list( {
+            action => 'query',
+            list => 'allpages',
+            apnamespace => $namespace_id,
+            aplimit => 'max' } )
+            || die $mediawiki->{error}->{code} . ': '
+                . $mediawiki->{error}->{details} . "\n";
+        print {*STDERR} "$#{$mw_pages} found in namespace $local_namespace ($namespace_id)\n";
+        foreach my $page (@{$mw_pages}) {
+            $pages->{$page->{title}} = $page;
+        }
+    }
+    return;
+}
+
+sub get_mw_all_pages {
+	my $pages = shift;
+	# No user-provided list, get the list of pages from the API.
+	my $mw_pages = $mediawiki->list({
+		action => 'query',
+		list => 'allpages',
+		aplimit => 'max'
+	});
+	if (!defined($mw_pages)) {
+		fatal_mw_error("get the list of wiki pages");
+	}
+	foreach my $page (@{$mw_pages}) {
+		$pages->{$page->{title}} = $page;
+	}
+	return;
+}
+
+# queries the wiki for a set of pages. Meant to be used within a loop
+# querying the wiki for slices of page list.
+sub get_mw_first_pages {
+	my $some_pages = shift;
+	my @some_pages = @{$some_pages};
+
+	my $pages = shift;
+
+	# pattern 'page1|page2|...' required by the API
+	my $titles = join('|', @some_pages);
+
+	my $mw_pages = $mediawiki->api({
+		action => 'query',
+		titles => $titles,
+	});
+	if (!defined($mw_pages)) {
+		fatal_mw_error("query the list of wiki pages");
+	}
+	while (my ($id, $page) = each(%{$mw_pages->{query}->{pages}})) {
+		if ($id < 0) {
+			print {*STDERR} "Warning: page $page->{title} not found on wiki\n";
+		} else {
+			$pages->{$page->{title}} = $page;
+		}
+	}
+	return;
+}
+
+# Get the list of pages to be fetched according to configuration.
+sub get_mw_pages {
+	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+	print {*STDERR} "Listing pages on remote wiki...\n";
+
+	my %pages; # hash on page titles to avoid duplicates
+	my $user_defined;
+	if (@tracked_pages) {
+		$user_defined = 1;
+		# The user provided a list of pages titles, but we
+		# still need to query the API to get the page IDs.
+		get_mw_tracked_pages(\%pages);
+	}
+	if (@tracked_categories) {
+		$user_defined = 1;
+		get_mw_tracked_categories(\%pages);
+	}
+	if (@tracked_namespaces) {
+		$user_defined = 1;
+		get_mw_tracked_namespaces(\%pages);
+	}
+	if (!$user_defined) {
+		get_mw_all_pages(\%pages);
+	}
+	if ($import_media) {
+		print {*STDERR} "Getting media files for selected pages...\n";
+		if ($user_defined) {
+			get_linked_mediafiles(\%pages);
+		} else {
+			get_all_mediafiles(\%pages);
+		}
+	}
+	print {*STDERR} (scalar keys %pages) . " pages found.\n";
+	return %pages;
+}
+
+# usage: $out = run_git("command args");
+#        $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
+sub run_git {
+	my $args = shift;
+	my $encoding = (shift || 'encoding(UTF-8)');
+	open(my $git, "-|:${encoding}", "git ${args}")
+	    or die "Unable to fork: $!\n";
+	my $res = do {
+		local $/ = undef;
+		<$git>
+	};
+	close($git);
+
+	return $res;
+}
+
+
+sub get_all_mediafiles {
+	my $pages = shift;
+	# Attach list of all pages for media files from the API,
+	# they are in a different namespace, only one namespace
+	# can be queried at the same moment
+	my $mw_pages = $mediawiki->list({
+		action => 'query',
+		list => 'allpages',
+		apnamespace => get_mw_namespace_id('File'),
+		aplimit => 'max'
+	});
+	if (!defined($mw_pages)) {
+		print {*STDERR} "fatal: could not get the list of pages for media files.\n";
+		print {*STDERR} "fatal: '$url' does not appear to be a mediawiki\n";
+		print {*STDERR} "fatal: make sure '$url/api.php' is a valid page.\n";
+		exit 1;
+	}
+	foreach my $page (@{$mw_pages}) {
+		$pages->{$page->{title}} = $page;
+	}
+	return;
+}
+
+sub get_linked_mediafiles {
+	my $pages = shift;
+	my @titles = map { $_->{title} } values(%{$pages});
+
+	my $batch = BATCH_SIZE;
+	while (@titles) {
+		if ($#titles < $batch) {
+			$batch = $#titles;
+		}
+		my @slice = @titles[0..$batch];
+
+		# pattern 'page1|page2|...' required by the API
+		my $mw_titles = join('|', @slice);
+
+		# Media files could be included or linked from
+		# a page, get all related
+		my $query = {
+			action => 'query',
+			prop => 'links|images',
+			titles => $mw_titles,
+			plnamespace => get_mw_namespace_id('File'),
+			pllimit => 'max'
+		};
+		my $result = $mediawiki->api($query);
+
+		while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
+			my @media_titles;
+			if (defined($page->{links})) {
+				my @link_titles
+				    = map { $_->{title} } @{$page->{links}};
+				push(@media_titles, @link_titles);
+			}
+			if (defined($page->{images})) {
+				my @image_titles
+				    = map { $_->{title} } @{$page->{images}};
+				push(@media_titles, @image_titles);
+			}
+			if (@media_titles) {
+				get_mw_page_list(\@media_titles, $pages);
+			}
+		}
+
+		@titles = @titles[($batch+1)..$#titles];
+	}
+	return;
+}
+
+sub get_mw_mediafile_for_page_revision {
+	# Name of the file on Wiki, with the prefix.
+	my $filename = shift;
+	my $timestamp = shift;
+	my %mediafile;
+
+	# Search if on a media file with given timestamp exists on
+	# MediaWiki. In that case download the file.
+	my $query = {
+		action => 'query',
+		prop => 'imageinfo',
+		titles => "File:${filename}",
+		iistart => $timestamp,
+		iiend => $timestamp,
+		iiprop => 'timestamp|archivename|url',
+		iilimit => 1
+	};
+	my $result = $mediawiki->api($query);
+
+	my ($fileid, $file) = each( %{$result->{query}->{pages}} );
+	# If not defined it means there is no revision of the file for
+	# given timestamp.
+	if (defined($file->{imageinfo})) {
+		$mediafile{title} = $filename;
+
+		my $fileinfo = pop(@{$file->{imageinfo}});
+		$mediafile{timestamp} = $fileinfo->{timestamp};
+		# Mediawiki::API's download function doesn't support https URLs
+		# and can't download old versions of files.
+		print {*STDERR} "\tDownloading file $mediafile{title}, version $mediafile{timestamp}\n";
+		$mediafile{content} = download_mw_mediafile($fileinfo->{url});
+	}
+	return %mediafile;
+}
+
+sub download_mw_mediafile {
+	my $download_url = shift;
+
+	my $response = $mediawiki->{ua}->get($download_url);
+	if ($response->code == HTTP_CODE_OK) {
+		# It is tempting to return
+		# $response->decoded_content({charset => "none"}), but
+		# when doing so, utf8::downgrade($content) fails with
+		# "Wide character in subroutine entry".
+		$response->decode();
+		return $response->content();
+	} else {
+		print {*STDERR} "Error downloading mediafile from :\n";
+		print {*STDERR} "URL: ${download_url}\n";
+		print {*STDERR} 'Server response: ' . $response->code . q{ } . $response->message . "\n";
+		exit 1;
+	}
+}
+
+sub get_last_local_revision {
+	# Get note regarding last mediawiki revision
+	my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
+	my @note_info = split(/ /, $note);
+
+	my $lastrevision_number;
+	if (!(defined($note_info[0]) && $note_info[0] eq 'mediawiki_revision:')) {
+		print {*STDERR} 'No previous mediawiki revision found';
+		$lastrevision_number = 0;
+	} else {
+		# Notes are formatted : mediawiki_revision: #number
+		$lastrevision_number = $note_info[1];
+		chomp($lastrevision_number);
+		print {*STDERR} "Last local mediawiki revision found is ${lastrevision_number}";
+	}
+	return $lastrevision_number;
+}
+
+# Get the last remote revision without taking in account which pages are
+# tracked or not. This function makes a single request to the wiki thus
+# avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
+# option.
+sub get_last_global_remote_rev {
+	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+	my $query = {
+		action => 'query',
+		list => 'recentchanges',
+		prop => 'revisions',
+		rclimit => '1',
+		rcdir => 'older',
+	};
+	my $result = $mediawiki->api($query);
+	return $result->{query}->{recentchanges}[0]->{revid};
+}
+
+# Get the last remote revision concerning the tracked pages and the tracked
+# categories.
+sub get_last_remote_revision {
+	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+	my %pages_hash = get_mw_pages();
+	my @pages = values(%pages_hash);
+
+	my $max_rev_num = 0;
+
+	print {*STDERR} "Getting last revision id on tracked pages...\n";
+
+	foreach my $page (@pages) {
+		my $id = $page->{pageid};
+
+		my $query = {
+			action => 'query',
+			prop => 'revisions',
+			rvprop => 'ids|timestamp',
+			pageids => $id,
+		};
+
+		my $result = $mediawiki->api($query);
+
+		my $lastrev = pop(@{$result->{query}->{pages}->{$id}->{revisions}});
+
+		$basetimestamps{$lastrev->{revid}} = $lastrev->{timestamp};
+
+		$max_rev_num = ($lastrev->{revid} > $max_rev_num ? $lastrev->{revid} : $max_rev_num);
+	}
+
+	print {*STDERR} "Last remote revision found is $max_rev_num.\n";
+	return $max_rev_num;
+}
+
+# Clean content before sending it to MediaWiki
+sub mediawiki_clean {
+	my $string = shift;
+	my $page_created = shift;
+	# Mediawiki does not allow blank space at the end of a page and ends with a single \n.
+	# This function right trims a string and adds a \n at the end to follow this rule
+	$string =~ s/\s+$//;
+	if ($string eq EMPTY && $page_created) {
+		# Creating empty pages is forbidden.
+		$string = EMPTY_CONTENT;
+	}
+	return $string."\n";
+}
+
+# Filter applied on MediaWiki data before adding them to Git
+sub mediawiki_smudge {
+	my $string = shift;
+	if ($string eq EMPTY_CONTENT) {
+		$string = EMPTY;
+	}
+	# This \n is important. This is due to mediawiki's way to handle end of files.
+	return "${string}\n";
+}
+
+sub literal_data {
+	my ($content) = @_;
+	print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+	return;
+}
+
+sub literal_data_raw {
+	# Output possibly binary content.
+	my ($content) = @_;
+	# Avoid confusion between size in bytes and in characters
+	utf8::downgrade($content);
+	binmode STDOUT, ':raw';
+	print {*STDOUT} 'data ', bytes::length($content), "\n", $content;
+	binmode STDOUT, ':encoding(UTF-8)';
+	return;
+}
+
+sub mw_capabilities {
+	# Revisions are imported to the private namespace
+	# refs/mediawiki/$remotename/ by the helper and fetched into
+	# refs/remotes/$remotename later by fetch.
+	print {*STDOUT} "refspec refs/heads/*:refs/mediawiki/${remotename}/*\n";
+	print {*STDOUT} "import\n";
+	print {*STDOUT} "list\n";
+	print {*STDOUT} "push\n";
+	if ($dumb_push) {
+		print {*STDOUT} "no-private-update\n";
+	}
+	print {*STDOUT} "\n";
+	return;
+}
+
+sub mw_list {
+	# MediaWiki do not have branches, we consider one branch arbitrarily
+	# called master, and HEAD pointing to it.
+	print {*STDOUT} "? refs/heads/master\n";
+	print {*STDOUT} "\@refs/heads/master HEAD\n";
+	print {*STDOUT} "\n";
+	return;
+}
+
+sub mw_option {
+	print {*STDERR} "remote-helper command 'option $_[0]' not yet implemented\n";
+	print {*STDOUT} "unsupported\n";
+	return;
+}
+
+sub fetch_mw_revisions_for_page {
+	my $page = shift;
+	my $id = shift;
+	my $fetch_from = shift;
+	my @page_revs = ();
+	my $query = {
+		action => 'query',
+		prop => 'revisions',
+		rvprop => 'ids',
+		rvdir => 'newer',
+		rvstartid => $fetch_from,
+		rvlimit => 500,
+		pageids => $id,
+
+		# Let MediaWiki know that we support the latest API.
+		continue => '',
+	};
+
+	my $revnum = 0;
+	# Get 500 revisions at a time due to the mediawiki api limit
+	while (1) {
+		my $result = $mediawiki->api($query);
+
+		# Parse each of those 500 revisions
+		foreach my $revision (@{$result->{query}->{pages}->{$id}->{revisions}}) {
+			my $page_rev_ids;
+			$page_rev_ids->{pageid} = $page->{pageid};
+			$page_rev_ids->{revid} = $revision->{revid};
+			push(@page_revs, $page_rev_ids);
+			$revnum++;
+		}
+
+		if ($result->{'query-continue'}) { # For legacy APIs
+			$query->{rvstartid} = $result->{'query-continue'}->{revisions}->{rvstartid};
+		} elsif ($result->{continue}) { # For newer APIs
+			$query->{rvstartid} = $result->{continue}->{rvcontinue};
+			$query->{continue} = $result->{continue}->{continue};
+		} else {
+			last;
+		}
+	}
+	if ($shallow_import && @page_revs) {
+		print {*STDERR} "  Found 1 revision (shallow import).\n";
+		@page_revs = sort {$b->{revid} <=> $a->{revid}} (@page_revs);
+		return $page_revs[0];
+	}
+	print {*STDERR} "  Found ${revnum} revision(s).\n";
+	return @page_revs;
+}
+
+sub fetch_mw_revisions {
+	my $pages = shift; my @pages = @{$pages};
+	my $fetch_from = shift;
+
+	my @revisions = ();
+	my $n = 1;
+	foreach my $page (@pages) {
+		my $id = $page->{pageid};
+		print {*STDERR} "page ${n}/", scalar(@pages), ': ', $page->{title}, "\n";
+		$n++;
+		my @page_revs = fetch_mw_revisions_for_page($page, $id, $fetch_from);
+		@revisions = (@page_revs, @revisions);
+	}
+
+	return ($n, @revisions);
+}
+
+sub fe_escape_path {
+    my $path = shift;
+    $path =~ s/\\/\\\\/g;
+    $path =~ s/"/\\"/g;
+    $path =~ s/\n/\\n/g;
+    return qq("${path}");
+}
+
+sub import_file_revision {
+	my $commit = shift;
+	my %commit = %{$commit};
+	my $full_import = shift;
+	my $n = shift;
+	my $mediafile = shift;
+	my %mediafile;
+	if ($mediafile) {
+		%mediafile = %{$mediafile};
+	}
+
+	my $title = $commit{title};
+	my $comment = $commit{comment};
+	my $content = $commit{content};
+	my $author = $commit{author};
+	my $date = $commit{date};
+
+	print {*STDOUT} "commit refs/mediawiki/${remotename}/master\n";
+	print {*STDOUT} "mark :${n}\n";
+	print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+	literal_data($comment);
+
+	# If it's not a clone, we need to know where to start from
+	if (!$full_import && $n == 1) {
+		print {*STDOUT} "from refs/mediawiki/${remotename}/master^0\n";
+	}
+	if ($content ne DELETED_CONTENT) {
+		print {*STDOUT} 'M 644 inline ' .
+		    fe_escape_path("${title}.mw") . "\n";
+		literal_data($content);
+		if (%mediafile) {
+			print {*STDOUT} 'M 644 inline '
+			    . fe_escape_path($mediafile{title}) . "\n";
+			literal_data_raw($mediafile{content});
+		}
+		print {*STDOUT} "\n\n";
+	} else {
+		print {*STDOUT} 'D ' . fe_escape_path("${title}.mw") . "\n";
+	}
+
+	# mediawiki revision number in the git note
+	if ($full_import && $n == 1) {
+		print {*STDOUT} "reset refs/notes/${remotename}/mediawiki\n";
+	}
+	print {*STDOUT} "commit refs/notes/${remotename}/mediawiki\n";
+	print {*STDOUT} "committer ${author} <${author}\@${wiki_name}> " . $date->epoch . " +0000\n";
+	literal_data('Note added by git-mediawiki during import');
+	if (!$full_import && $n == 1) {
+		print {*STDOUT} "from refs/notes/${remotename}/mediawiki^0\n";
+	}
+	print {*STDOUT} "N inline :${n}\n";
+	literal_data("mediawiki_revision: $commit{mw_revision}");
+	print {*STDOUT} "\n\n";
+	return;
+}
+
+# parse a sequence of
+# <cmd> <arg1>
+# <cmd> <arg2>
+# \n
+# (like batch sequence of import and sequence of push statements)
+sub get_more_refs {
+	my $cmd = shift;
+	my @refs;
+	while (1) {
+		my $line = <STDIN>;
+		if ($line =~ /^$cmd (.*)$/) {
+			push(@refs, $1);
+		} elsif ($line eq "\n") {
+			return @refs;
+		} else {
+			die("Invalid command in a '$cmd' batch: $_\n");
+		}
+	}
+	return;
+}
+
+sub mw_import {
+	# multiple import commands can follow each other.
+	my @refs = (shift, get_more_refs('import'));
+	foreach my $ref (@refs) {
+		mw_import_ref($ref);
+	}
+	print {*STDOUT} "done\n";
+	return;
+}
+
+sub mw_import_ref {
+	my $ref = shift;
+	# The remote helper will call "import HEAD" and
+	# "import refs/heads/master".
+	# Since HEAD is a symbolic ref to master (by convention,
+	# followed by the output of the command "list" that we gave),
+	# we don't need to do anything in this case.
+	if ($ref eq 'HEAD') {
+		return;
+	}
+
+	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+	print {*STDERR} "Searching revisions...\n";
+	my $last_local = get_last_local_revision();
+	my $fetch_from = $last_local + 1;
+	if ($fetch_from == 1) {
+		print {*STDERR} ", fetching from beginning.\n";
+	} else {
+		print {*STDERR} ", fetching from here.\n";
+	}
+
+	my $n = 0;
+	if ($fetch_strategy eq 'by_rev') {
+		print {*STDERR} "Fetching & writing export data by revs...\n";
+		$n = mw_import_ref_by_revs($fetch_from);
+	} elsif ($fetch_strategy eq 'by_page') {
+		print {*STDERR} "Fetching & writing export data by pages...\n";
+		$n = mw_import_ref_by_pages($fetch_from);
+	} else {
+		print {*STDERR} qq(fatal: invalid fetch strategy "${fetch_strategy}".\n);
+		print {*STDERR} "Check your configuration variables remote.${remotename}.fetchStrategy and mediawiki.fetchStrategy\n";
+		exit 1;
+	}
+
+	if ($fetch_from == 1 && $n == 0) {
+		print {*STDERR} "You appear to have cloned an empty MediaWiki.\n";
+		# Something has to be done remote-helper side. If nothing is done, an error is
+		# thrown saying that HEAD is referring to unknown object 0000000000000000000
+		# and the clone fails.
+	}
+	return;
+}
+
+sub mw_import_ref_by_pages {
+
+	my $fetch_from = shift;
+	my %pages_hash = get_mw_pages();
+	my @pages = values(%pages_hash);
+
+	my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
+
+	@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
+	my @revision_ids = map { $_->{revid} } @revisions;
+
+	return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+sub mw_import_ref_by_revs {
+
+	my $fetch_from = shift;
+	my %pages_hash = get_mw_pages();
+
+	my $last_remote = get_last_global_remote_rev();
+	my @revision_ids = $fetch_from..$last_remote;
+	return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
+}
+
+# Import revisions given in second argument (array of integers).
+# Only pages appearing in the third argument (hash indexed by page titles)
+# will be imported.
+sub mw_import_revids {
+	my $fetch_from = shift;
+	my $revision_ids = shift;
+	my $pages = shift;
+
+	my $n = 0;
+	my $n_actual = 0;
+	my $last_timestamp = 0; # Placeholder in case $rev->timestamp is undefined
+
+	foreach my $pagerevid (@{$revision_ids}) {
+	        # Count page even if we skip it, since we display
+		# $n/$total and $total includes skipped pages.
+		$n++;
+
+		# fetch the content of the pages
+		my $query = {
+			action => 'query',
+			prop => 'revisions',
+			rvprop => 'content|timestamp|comment|user|ids',
+			revids => $pagerevid,
+		};
+
+		my $result = $mediawiki->api($query);
+
+		if (!$result) {
+			die "Failed to retrieve modified page for revision $pagerevid\n";
+		}
+
+		if (defined($result->{query}->{badrevids}->{$pagerevid})) {
+			# The revision id does not exist on the remote wiki.
+			next;
+		}
+
+		if (!defined($result->{query}->{pages})) {
+			die "Invalid revision ${pagerevid}.\n";
+		}
+
+		my @result_pages = values(%{$result->{query}->{pages}});
+		my $result_page = $result_pages[0];
+		my $rev = $result_pages[0]->{revisions}->[0];
+
+		my $page_title = $result_page->{title};
+
+		if (!exists($pages->{$page_title})) {
+			print {*STDERR} "${n}/", scalar(@{$revision_ids}),
+				": Skipping revision #$rev->{revid} of ${page_title}\n";
+			next;
+		}
+
+		$n_actual++;
+
+		my %commit;
+		$commit{author} = $rev->{user} || 'Anonymous';
+		$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
+		$commit{title} = smudge_filename($page_title);
+		$commit{mw_revision} = $rev->{revid};
+		$commit{content} = mediawiki_smudge($rev->{'*'});
+
+		if (!defined($rev->{timestamp})) {
+			$last_timestamp++;
+		} else {
+			$last_timestamp = $rev->{timestamp};
+		}
+		$commit{date} = DateTime::Format::ISO8601->parse_datetime($last_timestamp);
+
+		# Differentiates classic pages and media files.
+		my ($namespace, $filename) = $page_title =~ /^([^:]*):(.*)$/;
+		my %mediafile;
+		if ($namespace) {
+			my $id = get_mw_namespace_id($namespace);
+			if ($id && $id == get_mw_namespace_id('File')) {
+				%mediafile = get_mw_mediafile_for_page_revision($filename, $rev->{timestamp});
+			}
+		}
+		# If this is a revision of the media page for new version
+		# of a file do one common commit for both file and media page.
+		# Else do commit only for that page.
+		print {*STDERR} "${n}/", scalar(@{$revision_ids}), ": Revision #$rev->{revid} of $commit{title}\n";
+		import_file_revision(\%commit, ($fetch_from == 1), $n_actual, \%mediafile);
+	}
+
+	return $n_actual;
+}
+
+sub error_non_fast_forward {
+	my $advice = run_git('config --bool advice.pushNonFastForward');
+	chomp($advice);
+	if ($advice ne 'false') {
+		# Native git-push would show this after the summary.
+		# We can't ask it to display it cleanly, so print it
+		# ourselves before.
+		print {*STDERR} "To prevent you from losing history, non-fast-forward updates were rejected\n";
+		print {*STDERR} "Merge the remote changes (e.g. 'git pull') before pushing again. See the\n";
+		print {*STDERR} "'Note about fast-forwards' section of 'git push --help' for details.\n";
+	}
+	print {*STDOUT} qq(error $_[0] "non-fast-forward"\n);
+	return 0;
+}
+
+sub mw_upload_file {
+	my $complete_file_name = shift;
+	my $new_sha1 = shift;
+	my $extension = shift;
+	my $file_deleted = shift;
+	my $summary = shift;
+	my $newrevid;
+	my $path = "File:${complete_file_name}";
+	my %hashFiles = get_allowed_file_extensions();
+	if (!exists($hashFiles{$extension})) {
+		print {*STDERR} "${complete_file_name} is not a permitted file on this wiki.\n";
+		print {*STDERR} "Check the configuration of file uploads in your mediawiki.\n";
+		return $newrevid;
+	}
+	# Deleting and uploading a file requires a privileged user
+	if ($file_deleted) {
+		$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+		my $query = {
+			action => 'delete',
+			title => $path,
+			reason => $summary
+		};
+		if (!$mediawiki->edit($query)) {
+			print {*STDERR} "Failed to delete file on remote wiki\n";
+			print {*STDERR} "Check your permissions on the remote site. Error code:\n";
+			print {*STDERR} $mediawiki->{error}->{code} . ':' . $mediawiki->{error}->{details};
+			exit 1;
+		}
+	} else {
+		# Don't let perl try to interpret file content as UTF-8 => use "raw"
+		my $content = run_git("cat-file blob ${new_sha1}", 'raw');
+		if ($content ne EMPTY) {
+			$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+			$mediawiki->{config}->{upload_url} =
+				"${url}/index.php/Special:Upload";
+			$mediawiki->edit({
+				action => 'upload',
+				filename => $complete_file_name,
+				comment => $summary,
+				file => [undef,
+					 $complete_file_name,
+					 Content => $content],
+				ignorewarnings => 1,
+			}, {
+				skip_encoding => 1
+			} ) || die $mediawiki->{error}->{code} . ':'
+				 . $mediawiki->{error}->{details} . "\n";
+			my $last_file_page = $mediawiki->get_page({title => $path});
+			$newrevid = $last_file_page->{revid};
+			print {*STDERR} "Pushed file: ${new_sha1} - ${complete_file_name}.\n";
+		} else {
+			print {*STDERR} "Empty file ${complete_file_name} not pushed.\n";
+		}
+	}
+	return $newrevid;
+}
+
+sub mw_push_file {
+	my $diff_info = shift;
+	# $diff_info contains a string in this format:
+	# 100644 100644 <sha1_of_blob_before_commit> <sha1_of_blob_now> <status>
+	my @diff_info_split = split(/[ \t]/, $diff_info);
+
+	# Filename, including .mw extension
+	my $complete_file_name = shift;
+	# Commit message
+	my $summary = shift;
+	# MediaWiki revision number. Keep the previous one by default,
+	# in case there's no edit to perform.
+	my $oldrevid = shift;
+	my $newrevid;
+
+	if ($summary eq EMPTY_MESSAGE) {
+		$summary = EMPTY;
+	}
+
+	my $new_sha1 = $diff_info_split[3];
+	my $old_sha1 = $diff_info_split[2];
+	my $page_created = ($old_sha1 eq NULL_SHA1);
+	my $page_deleted = ($new_sha1 eq NULL_SHA1);
+	$complete_file_name = clean_filename($complete_file_name);
+
+	my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
+	if (!defined($extension)) {
+		$extension = EMPTY;
+	}
+	if ($extension eq 'mw') {
+		my $ns = get_mw_namespace_id_for_page($complete_file_name);
+		if ($ns && $ns == get_mw_namespace_id('File') && (!$export_media)) {
+			print {*STDERR} "Ignoring media file related page: ${complete_file_name}\n";
+			return ($oldrevid, 'ok');
+		}
+		my $file_content;
+		if ($page_deleted) {
+			# Deleting a page usually requires
+			# special privileges. A common
+			# convention is to replace the page
+			# with this content instead:
+			$file_content = DELETED_CONTENT;
+		} else {
+			$file_content = run_git("cat-file blob ${new_sha1}");
+		}
+
+		$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+		my $result = $mediawiki->edit( {
+			action => 'edit',
+			summary => $summary,
+			title => $title,
+			basetimestamp => $basetimestamps{$oldrevid},
+			text => mediawiki_clean($file_content, $page_created),
+				  }, {
+					  skip_encoding => 1 # Helps with names with accentuated characters
+				  });
+		if (!$result) {
+			if ($mediawiki->{error}->{code} == 3) {
+				# edit conflicts, considered as non-fast-forward
+				print {*STDERR} 'Warning: Error ' .
+				    $mediawiki->{error}->{code} .
+				    ' from mediawiki: ' . $mediawiki->{error}->{details} .
+				    ".\n";
+				return ($oldrevid, 'non-fast-forward');
+			} else {
+				# Other errors. Shouldn't happen => just die()
+				die 'Fatal: Error ' .
+				    $mediawiki->{error}->{code} .
+				    ' from mediawiki: ' . $mediawiki->{error}->{details} . "\n";
+			}
+		}
+		$newrevid = $result->{edit}->{newrevid};
+		print {*STDERR} "Pushed file: ${new_sha1} - ${title}\n";
+	} elsif ($export_media) {
+		$newrevid = mw_upload_file($complete_file_name, $new_sha1,
+					   $extension, $page_deleted,
+					   $summary);
+	} else {
+		print {*STDERR} "Ignoring media file ${title}\n";
+	}
+	$newrevid = ($newrevid or $oldrevid);
+	return ($newrevid, 'ok');
+}
+
+sub mw_push {
+	# multiple push statements can follow each other
+	my @refsspecs = (shift, get_more_refs('push'));
+	my $pushed;
+	for my $refspec (@refsspecs) {
+		my ($force, $local, $remote) = $refspec =~ /^(\+)?([^:]*):([^:]*)$/
+		    or die("Invalid refspec for push. Expected <src>:<dst> or +<src>:<dst>\n");
+		if ($force) {
+			print {*STDERR} "Warning: forced push not allowed on a MediaWiki.\n";
+		}
+		if ($local eq EMPTY) {
+			print {*STDERR} "Cannot delete remote branch on a MediaWiki\n";
+			print {*STDOUT} "error ${remote} cannot delete\n";
+			next;
+		}
+		if ($remote ne 'refs/heads/master') {
+			print {*STDERR} "Only push to the branch 'master' is supported on a MediaWiki\n";
+			print {*STDOUT} "error ${remote} only master allowed\n";
+			next;
+		}
+		if (mw_push_revision($local, $remote)) {
+			$pushed = 1;
+		}
+	}
+
+	# Notify Git that the push is done
+	print {*STDOUT} "\n";
+
+	if ($pushed && $dumb_push) {
+		print {*STDERR} "Just pushed some revisions to MediaWiki.\n";
+		print {*STDERR} "The pushed revisions now have to be re-imported, and your current branch\n";
+		print {*STDERR} "needs to be updated with these re-imported commits. You can do this with\n";
+		print {*STDERR} "\n";
+		print {*STDERR} "  git pull --rebase\n";
+		print {*STDERR} "\n";
+	}
+	return;
+}
+
+sub mw_push_revision {
+	my $local = shift;
+	my $remote = shift; # actually, this has to be "refs/heads/master" at this point.
+	my $last_local_revid = get_last_local_revision();
+	print {*STDERR} ".\n"; # Finish sentence started by get_last_local_revision()
+	my $last_remote_revid = get_last_remote_revision();
+	my $mw_revision = $last_remote_revid;
+
+	# Get sha1 of commit pointed by local HEAD
+	my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
+	chomp($HEAD_sha1);
+	# Get sha1 of commit pointed by remotes/$remotename/master
+	my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
+	chomp($remoteorigin_sha1);
+
+	if ($last_local_revid > 0 &&
+	    $last_local_revid < $last_remote_revid) {
+		return error_non_fast_forward($remote);
+	}
+
+	if ($HEAD_sha1 eq $remoteorigin_sha1) {
+		# nothing to push
+		return 0;
+	}
+
+	# Get every commit in between HEAD and refs/remotes/origin/master,
+	# including HEAD and refs/remotes/origin/master
+	my @commit_pairs = ();
+	if ($last_local_revid > 0) {
+		my $parsed_sha1 = $remoteorigin_sha1;
+		# Find a path from last MediaWiki commit to pushed commit
+		print {*STDERR} "Computing path from local to remote ...\n";
+		my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
+		my %local_ancestry;
+		foreach my $line (@local_ancestry) {
+			if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
+				foreach my $parent (split(/ /, $parents)) {
+					$local_ancestry{$parent} = $child;
+				}
+			} elsif (!$line =~ /^([a-f0-9]+)/) {
+				die "Unexpected output from git rev-list: ${line}\n";
+			}
+		}
+		while ($parsed_sha1 ne $HEAD_sha1) {
+			my $child = $local_ancestry{$parsed_sha1};
+			if (!$child) {
+				print {*STDERR} "Cannot find a path in history from remote commit to last commit\n";
+				return error_non_fast_forward($remote);
+			}
+			push(@commit_pairs, [$parsed_sha1, $child]);
+			$parsed_sha1 = $child;
+		}
+	} else {
+		# No remote mediawiki revision. Export the whole
+		# history (linearized with --first-parent)
+		print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
+		my $history = run_git("rev-list --first-parent --children ${local}");
+		my @history = split(/\n/, $history);
+		@history = @history[1..$#history];
+		foreach my $line (reverse @history) {
+			my @commit_info_split = split(/[ \n]/, $line);
+			push(@commit_pairs, \@commit_info_split);
+		}
+	}
+
+	foreach my $commit_info_split (@commit_pairs) {
+		my $sha1_child = @{$commit_info_split}[0];
+		my $sha1_commit = @{$commit_info_split}[1];
+		my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
+		# TODO: we could detect rename, and encode them with a #redirect on the wiki.
+		# TODO: for now, it's just a delete+add
+		my @diff_info_list = split(/\0/, $diff_infos);
+		# Keep the subject line of the commit message as mediawiki comment for the revision
+		my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
+		chomp($commit_msg);
+		# Push every blob
+		while (@diff_info_list) {
+			my $status;
+			# git diff-tree -z gives an output like
+			# <metadata>\0<filename1>\0
+			# <metadata>\0<filename2>\0
+			# and we've split on \0.
+			my $info = shift(@diff_info_list);
+			my $file = shift(@diff_info_list);
+			($mw_revision, $status) = mw_push_file($info, $file, $commit_msg, $mw_revision);
+			if ($status eq 'non-fast-forward') {
+				# we may already have sent part of the
+				# commit to MediaWiki, but it's too
+				# late to cancel it. Stop the push in
+				# the middle, but still give an
+				# accurate error message.
+				return error_non_fast_forward($remote);
+			}
+			if ($status ne 'ok') {
+				die("Unknown error from mw_push_file()\n");
+			}
+		}
+		if (!$dumb_push) {
+			run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
+		}
+	}
+
+	print {*STDOUT} "ok ${remote}\n";
+	return 1;
+}
+
+sub get_allowed_file_extensions {
+	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+
+	my $query = {
+		action => 'query',
+		meta => 'siteinfo',
+		siprop => 'fileextensions'
+		};
+	my $result = $mediawiki->api($query);
+	my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+	my %hashFile = map { $_ => 1 } @file_extensions;
+
+	return %hashFile;
+}
+
+# In memory cache for MediaWiki namespace ids.
+my %namespace_id;
+
+# Namespaces whose id is cached in the configuration file
+# (to avoid duplicates)
+my %cached_mw_namespace_id;
+
+# Return MediaWiki id for a canonical namespace name.
+# Ex.: "File", "Project".
+sub get_mw_namespace_id {
+	$mediawiki = connect_maybe($mediawiki, $remotename, $url);
+	my $name = shift;
+
+	if (!exists $namespace_id{$name}) {
+		# Look at configuration file, if the record for that namespace is
+		# already cached. Namespaces are stored in form:
+		# "Name_of_namespace:Id_namespace", ex.: "File:6".
+		my @temp = split(/\n/,
+				 run_git("config --get-all remote.${remotename}.namespaceCache"));
+		chomp(@temp);
+		foreach my $ns (@temp) {
+			my ($n, $id) = split(/:/, $ns);
+			if ($id eq 'notANameSpace') {
+				$namespace_id{$n} = {is_namespace => 0};
+			} else {
+				$namespace_id{$n} = {is_namespace => 1, id => $id};
+			}
+			$cached_mw_namespace_id{$n} = 1;
+		}
+	}
+
+	if (!exists $namespace_id{$name}) {
+		print {*STDERR} "Namespace ${name} not found in cache, querying the wiki ...\n";
+		# NS not found => get namespace id from MW and store it in
+	        # configuration file.
+	        my $query = {
+	                action => 'query',
+	                meta => 'siteinfo',
+	                siprop => 'namespaces'
+	        };
+	        my $result = $mediawiki->api($query);
+
+	        while (my ($id, $ns) = each(%{$result->{query}->{namespaces}})) {
+	                if (defined($ns->{id}) && defined($ns->{canonical})) {
+				$namespace_id{$ns->{canonical}} = {is_namespace => 1, id => $ns->{id}};
+				if ($ns->{'*'}) {
+					# alias (e.g. french Fichier: as alias for canonical File:)
+					$namespace_id{$ns->{'*'}} = {is_namespace => 1, id => $ns->{id}};
+				}
+			}
+	        }
+	}
+
+	my $ns = $namespace_id{$name};
+	my $id;
+
+	if (!defined $ns) {
+		my @namespaces = map { s/ /_/g; $_; } sort keys %namespace_id;
+		print {*STDERR} "No such namespace ${name} on MediaWiki, known namespaces: @namespaces\n";
+		$ns = {is_namespace => 0};
+		$namespace_id{$name} = $ns;
+	}
+
+	if ($ns->{is_namespace}) {
+		$id = $ns->{id};
+	}
+
+	# Store "notANameSpace" as special value for inexisting namespaces
+	my $store_id = ($id || 'notANameSpace');
+
+	# Store explicitly requested namespaces on disk
+	if (!exists $cached_mw_namespace_id{$name}) {
+		run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
+		$cached_mw_namespace_id{$name} = 1;
+	}
+	return $id;
+}
+
+sub get_mw_namespace_id_for_page {
+	my $namespace = shift;
+	if ($namespace =~ /^([^:]*):/) {
+		return get_mw_namespace_id($namespace);
+	} else {
+		return;
+	}
+}
diff --git a/third_party/git/contrib/mw-to-git/git-remote-mediawiki.txt b/third_party/git/contrib/mw-to-git/git-remote-mediawiki.txt
new file mode 100644
index 000000000000..23b7ef9f6208
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/git-remote-mediawiki.txt
@@ -0,0 +1,7 @@
+Git-Mediawiki is a project which aims the creation of a gate
+between git and mediawiki, allowing git users to push and pull
+objects from mediawiki just as one would do with a classic git
+repository thanks to remote-helpers.
+
+For more information, visit the wiki at
+https://github.com/moy/Git-Mediawiki/wiki
diff --git a/third_party/git/contrib/mw-to-git/t/.gitignore b/third_party/git/contrib/mw-to-git/t/.gitignore
new file mode 100644
index 000000000000..a7a40b49644f
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/.gitignore
@@ -0,0 +1,4 @@
+WEB/
+wiki/
+trash directory.t*/
+test-results/
diff --git a/third_party/git/contrib/mw-to-git/t/Makefile b/third_party/git/contrib/mw-to-git/t/Makefile
new file mode 100644
index 000000000000..f422203fa069
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/Makefile
@@ -0,0 +1,31 @@
+#
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+## Test git-remote-mediawiki
+
+all: test
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+T = $(wildcard t[0-9][0-9][0-9][0-9]-*.sh)
+
+.PHONY: help test clean all
+
+help:
+	@echo 'Run "$(MAKE) test" to launch test scripts'
+	@echo 'Run "$(MAKE) clean" to remove trash folders'
+
+test:
+	@for t in $(T); do \
+		echo "$$t"; \
+		"./$$t" || exit 1; \
+	done
+
+clean:
+	$(RM) -r 'trash directory'.*
diff --git a/third_party/git/contrib/mw-to-git/t/README b/third_party/git/contrib/mw-to-git/t/README
new file mode 100644
index 000000000000..2ee34be7e491
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/README
@@ -0,0 +1,124 @@
+Tests for Mediawiki-to-Git
+==========================
+
+Introduction
+------------
+This manual describes how to install the git-remote-mediawiki test
+environment on a machine with git installed on it.
+
+Prerequisite
+------------
+
+In order to run this test environment correctly, you will need to
+install the following packages (Debian/Ubuntu names, may need to be
+adapted for another distribution):
+
+* lighttpd
+* php5
+* php5-cgi
+* php5-cli
+* php5-curl
+* php5-sqlite
+
+Principles and Technical Choices
+--------------------------------
+
+The test environment makes it easy to install and manipulate one or
+several MediaWiki instances. To allow developers to run the testsuite
+easily, the environment does not require root privilege (except to
+install the required packages if needed). It starts a webserver
+instance on the user's account (using lighttpd greatly helps for
+that), and does not need a separate database daemon (thanks to the use
+of sqlite).
+
+Run the test environment
+------------------------
+
+Install a new wiki
+~~~~~~~~~~~~~~~~~~
+
+Once you have all the prerequisite, you need to install a MediaWiki
+instance on your machine. If you already have one, it is still
+strongly recommended to install one with the script provided. Here's
+how to work it:
+
+a. change directory to contrib/mw-to-git/t/
+b. if needed, edit test.config to choose your installation parameters
+c. run `./install-wiki.sh install`
+d. check on your favourite web browser if your wiki is correctly
+   installed.
+
+Remove an existing wiki
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Edit the file test.config to fit the wiki you want to delete, and then
+execute the command `./install-wiki.sh delete` from the
+contrib/mw-to-git/t directory.
+
+Run the existing tests
+~~~~~~~~~~~~~~~~~~~~~~
+
+The provided tests are currently in the `contrib/mw-to-git/t` directory.
+The files are all the t936[0-9]-*.sh shell scripts.
+
+a. Run all tests:
+To do so, run "make test" from the contrib/mw-to-git/ directory.
+
+b. Run a specific test:
+To run a given test <test_name>, run ./<test_name> from the
+contrib/mw-to-git/t directory.
+
+How to create new tests
+-----------------------
+
+Available functions
+~~~~~~~~~~~~~~~~~~~
+
+The test environment of git-remote-mediawiki provides some functions
+useful to test its behaviour. for more details about the functions'
+parameters, please refer to the `test-gitmw-lib.sh` and
+`test-gitmw.pl` files.
+
+** `test_check_wiki_precond`:
+Check if the tests must be skipped or not. Please use this function
+at the beginning of each new test file.
+
+** `wiki_getpage`:
+Fetch a given page from the wiki and puts its content in the
+directory in parameter.
+
+** `wiki_delete_page`:
+Delete a given page from the wiki.
+
+** `wiki_edit_page`:
+Create or modify a given page in the wiki. You can specify several
+parameters like a summary for the page edition, or add the page to a
+given category.
+See test-gitmw.pl for more details.
+
+** `wiki_getallpage`:
+Fetch all pages from the wiki into a given directory. The directory
+is created if it does not exists.
+
+** `test_diff_directories`:
+Compare the content of two directories. The content must be the same.
+Use this function to compare the content of a git directory and a wiki
+one created by wiki_getallpage.
+
+** `test_contains_N_files`:
+Check if the given directory contains a given number of file.
+
+** `wiki_page_exists`:
+Tests if a given page exists on the wiki.
+
+** `wiki_reset`:
+Reset the wiki, i.e. flush the database. Use this function at the
+beginning of each new test, except if the test re-uses the same wiki
+(and history) as the previous test.
+
+How to write a new test
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Please, follow the standards given by git. See git/t/README.
+New file should be named as t936[0-9]-*.sh.
+Be sure to reset your wiki regularly with the function `wiki_reset`.
diff --git a/third_party/git/contrib/mw-to-git/t/install-wiki.sh b/third_party/git/contrib/mw-to-git/t/install-wiki.sh
new file mode 100755
index 000000000000..c215213c4bfd
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/install-wiki.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+# This script installs or deletes a MediaWiki on your computer.
+# It requires a web server with PHP and SQLite running. In addition, if you
+# do not have MediaWiki sources on your computer, the option 'install'
+# downloads them for you.
+# Please set the CONFIGURATION VARIABLES in ./test-gitmw-lib.sh
+
+WIKI_TEST_DIR=$(cd "$(dirname "$0")" && pwd)
+
+if test -z "$WIKI_TEST_DIR"
+then
+	WIKI_TEST_DIR=.
+fi
+
+. "$WIKI_TEST_DIR"/test-gitmw-lib.sh
+usage () {
+	echo "usage: "
+	echo "	./install-wiki.sh <install | delete | --help>"
+	echo "		install | -i :	Install a wiki on your computer."
+	echo "		delete | -d : Delete the wiki and all its pages and "
+	echo "			content."
+	echo "		start  | -s : Start the previously configured lighttpd daemon"
+	echo "		stop        : Stop lighttpd daemon."
+}
+
+
+# Argument: install, delete, --help | -h
+case "$1" in
+	"install" | "-i")
+		wiki_install
+		exit 0
+		;;
+	"delete" | "-d")
+		wiki_delete
+		exit 0
+		;;
+	"start" | "-s")
+		start_lighttpd
+		exit
+		;;
+	"stop")
+		stop_lighttpd
+		exit
+		;;
+	"--help" | "-h")
+		usage
+		exit 0
+		;;
+	*)
+		echo "Invalid argument: $1"
+		usage
+		exit 1
+		;;
+esac
diff --git a/third_party/git/contrib/mw-to-git/t/install-wiki/.gitignore b/third_party/git/contrib/mw-to-git/t/install-wiki/.gitignore
new file mode 100644
index 000000000000..b5a2a4408c59
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/install-wiki/.gitignore
@@ -0,0 +1 @@
+wikidb.sqlite
diff --git a/third_party/git/contrib/mw-to-git/t/install-wiki/LocalSettings.php b/third_party/git/contrib/mw-to-git/t/install-wiki/LocalSettings.php
new file mode 100644
index 000000000000..745e47e88173
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/install-wiki/LocalSettings.php
@@ -0,0 +1,129 @@
+<?php
+# This file was automatically generated by the MediaWiki 1.19.0
+# installer. If you make manual changes, please keep track in case you
+# need to recreate them later.
+#
+# See includes/DefaultSettings.php for all configurable settings
+# and their default values, but don't forget to make changes in _this_
+# file, not there.
+#
+# Further documentation for configuration settings may be found at:
+# http://www.mediawiki.org/wiki/Manual:Configuration_settings
+
+# Protect against web entry
+if ( !defined( 'MEDIAWIKI' ) ) {
+	exit;
+}
+
+## Uncomment this to disable output compression
+# $wgDisableOutputCompression = true;
+
+$wgSitename      = "Git-MediaWiki-Test";
+$wgMetaNamespace = "Git-MediaWiki-Test";
+
+## The URL base path to the directory containing the wiki;
+## defaults for all runtime URL paths are based off of this.
+## For more information on customizing the URLs please see:
+## http://www.mediawiki.org/wiki/Manual:Short_URL
+$wgScriptPath       = "@WG_SCRIPT_PATH@";
+$wgScriptExtension  = ".php";
+
+## The protocol and server name to use in fully-qualified URLs
+$wgServer           = "@WG_SERVER@";
+
+## The relative URL path to the skins directory
+$wgStylePath        = "$wgScriptPath/skins";
+
+## The relative URL path to the logo.  Make sure you change this from the default,
+## or else you'll overwrite your logo when you upgrade!
+$wgLogo             = "$wgStylePath/common/images/wiki.png";
+
+## UPO means: this is also a user preference option
+
+$wgEnableEmail      = true;
+$wgEnableUserEmail  = true; # UPO
+
+$wgEmergencyContact = "apache@localhost";
+$wgPasswordSender   = "apache@localhost";
+
+$wgEnotifUserTalk      = false; # UPO
+$wgEnotifWatchlist     = false; # UPO
+$wgEmailAuthentication = true;
+
+## Database settings
+$wgDBtype           = "sqlite";
+$wgDBserver         = "";
+$wgDBname           = "@WG_SQLITE_DATAFILE@";
+$wgDBuser           = "";
+$wgDBpassword       = "";
+
+# SQLite-specific settings
+$wgSQLiteDataDir    = "@WG_SQLITE_DATADIR@";
+
+
+## Shared memory settings
+$wgMainCacheType    = CACHE_NONE;
+$wgMemCachedServers = array();
+
+## To enable image uploads, make sure the 'images' directory
+## is writable, then set this to true:
+$wgEnableUploads  = true;
+$wgUseImageMagick = true;
+$wgImageMagickConvertCommand ="@CONVERT@";
+$wgFileExtensions[] = 'txt';
+
+# InstantCommons allows wiki to use images from http://commons.wikimedia.org
+$wgUseInstantCommons  = false;
+
+## If you use ImageMagick (or any other shell command) on a
+## Linux server, this will need to be set to the name of an
+## available UTF-8 locale
+$wgShellLocale = "en_US.utf8";
+
+## If you want to use image uploads under safe mode,
+## create the directories images/archive, images/thumb and
+## images/temp, and make them all writable. Then uncomment
+## this, if it's not already uncommented:
+#$wgHashedUploadDirectory = false;
+
+## Set $wgCacheDirectory to a writable directory on the web server
+## to make your wiki go slightly faster. The directory should not
+## be publicly accessible from the web.
+#$wgCacheDirectory = "$IP/cache";
+
+# Site language code, should be one of the list in ./languages/Names.php
+$wgLanguageCode = "en";
+
+$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
+#$wgSecretKey = "@SECRETKEY@";
+
+
+# Site upgrade key. Must be set to a string (default provided) to turn on the
+# web installer while LocalSettings.php is in place
+$wgUpgradeKey = "ddae7dc87cd0a645";
+
+## Default skin: you can change the default skin. Use the internal symbolic
+## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
+$wgDefaultSkin = "vector";
+
+## For attaching licensing metadata to pages, and displaying an
+## appropriate copyright notice / icon. GNU Free Documentation
+## License and Creative Commons licenses are supported so far.
+$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
+$wgRightsUrl  = "";
+$wgRightsText = "";
+$wgRightsIcon = "";
+
+# Path to the GNU diff3 utility. Used for conflict resolution.
+$wgDiff3 = "/usr/bin/diff3";
+
+# Query string length limit for ResourceLoader. You should only set this if
+# your web server has a query string length limit (then set it to that limit),
+# or if you have suhosin.get.max_value_length set in php.ini (then set it to
+# that value)
+$wgResourceLoaderMaxQueryLength = -1;
+
+
+
+# End of automatically generated settings.
+# Add more configuration options below.
diff --git a/third_party/git/contrib/mw-to-git/t/install-wiki/db_install.php b/third_party/git/contrib/mw-to-git/t/install-wiki/db_install.php
new file mode 100644
index 000000000000..b033849800bc
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/install-wiki/db_install.php
@@ -0,0 +1,120 @@
+<?php
+/**
+ * This script generates a SQLite database for a MediaWiki version 1.19.0
+ * You must specify the login of the admin (argument 1) and its
+ * password (argument 2) and the folder where the database file
+ * is located (absolute path in argument 3).
+ * It is used by the script install-wiki.sh in order to make easy the
+ * installation of a MediaWiki.
+ *
+ * In order to generate a SQLite database file, MediaWiki ask the user
+ * to submit some forms in its web browser. This script simulates this
+ * behavior though the functions <get> and <submit>
+ *
+ */
+$argc = $_SERVER['argc'];
+$argv = $_SERVER['argv'];
+
+$login = $argv[2];
+$pass = $argv[3];
+$tmp = $argv[4];
+$port = $argv[5];
+
+$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
+$db_dir = urlencode($tmp);
+$tmp_cookie = tempnam($tmp, "COOKIE_");
+/*
+ * Fetches a page with cURL.
+ */
+function get($page_name = "") {
+	$curl = curl_init();
+	$page_name_add = "";
+	if ($page_name != "") {
+		$page_name_add = '?page='.$page_name;
+	}
+	$url = $GLOBALS['url'].$page_name_add;
+	$tmp_cookie = $GLOBALS['tmp_cookie'];
+	curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+	curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+	curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+	curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+	curl_setopt($curl, CURLOPT_HEADER, true);
+	curl_setopt($curl, CURLOPT_URL, $url);
+
+	$page = curl_exec($curl);
+	if (!$page) {
+		die("Could not get page: $url\n");
+	}
+	curl_close($curl);
+	return $page;
+}
+
+/*
+ * Submits a form with cURL.
+ */
+function submit($page_name, $option = "") {
+	$curl = curl_init();
+	$datapost = 'submit-continue=Continue+%E2%86%92';
+	if ($option != "") {
+		$datapost = $option.'&'.$datapost;
+	}
+	$url = $GLOBALS['url'].'?page='.$page_name;
+	$tmp_cookie = $GLOBALS['tmp_cookie'];
+	curl_setopt($curl, CURLOPT_URL, $url);
+	curl_setopt($curl, CURLOPT_POST, true);
+	curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
+	curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
+	curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
+	curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
+	curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
+
+	$page = curl_exec($curl);
+	if (!$page) {
+		die("Could not get page: $url\n");
+	}
+	curl_close($curl);
+	return "$page";
+}
+
+/*
+ * Here starts this script: simulates the behavior of the user
+ * submitting forms to generates the database file.
+ * Note this simulation was made for the MediaWiki version 1.19.0,
+ * we can't assume it works with other versions.
+ *
+ */
+
+$page = get();
+if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
+		$page, $matches)) {
+	echo "Unexpected content for page downloaded:\n";
+	echo "$page";
+	die;
+};
+$timestamp = $matches[1];
+$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
+$page = submit('Language', $language);
+
+submit('Welcome');
+
+$db_config = 'DBType=sqlite';
+$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
+$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
+submit('DBConnect', $db_config);
+
+$wiki_config = 'config_wgSitename=TEST';
+$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
+$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
+$wiki_config = $wiki_config.'&config__AdminName='.$login;
+
+$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
+$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
+
+$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
+$wiki_config = $wiki_config.'&config__SkipOptional=skip';
+submit('Name', $wiki_config);
+submit('Install');
+submit('Install');
+
+unlink($tmp_cookie);
+?>
diff --git a/third_party/git/contrib/mw-to-git/t/push-pull-tests.sh b/third_party/git/contrib/mw-to-git/t/push-pull-tests.sh
new file mode 100644
index 000000000000..9da2dc5ff036
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/push-pull-tests.sh
@@ -0,0 +1,144 @@
+test_push_pull () {
+
+	test_expect_success 'Git pull works after adding a new wiki page' '
+		wiki_reset &&
+
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+		wiki_editpage Foo "page created after the git clone" false &&
+
+		(
+			cd mw_dir_1 &&
+			git pull
+		) &&
+
+		wiki_getallpage ref_page_1 &&
+		test_diff_directories mw_dir_1 ref_page_1
+	'
+
+	test_expect_success 'Git pull works after editing a wiki page' '
+		wiki_reset &&
+
+		wiki_editpage Foo "page created before the git clone" false &&
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+		wiki_editpage Foo "new line added on the wiki" true &&
+
+		(
+			cd mw_dir_2 &&
+			git pull
+		) &&
+
+		wiki_getallpage ref_page_2 &&
+		test_diff_directories mw_dir_2 ref_page_2
+	'
+
+	test_expect_success 'git pull works on conflict handled by auto-merge' '
+		wiki_reset &&
+
+		wiki_editpage Foo "1 init
+3
+5
+	" false &&
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+
+		wiki_editpage Foo "1 init
+2 content added on wiki after clone
+3
+5
+	" false &&
+
+		(
+			cd mw_dir_3 &&
+		echo "1 init
+3
+4 content added on git after clone
+5
+" >Foo.mw &&
+			git commit -am "conflicting change on foo" &&
+			git pull &&
+			git push
+		)
+	'
+
+	test_expect_success 'Git push works after adding a file .mw' '
+		wiki_reset &&
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+		wiki_getallpage ref_page_4 &&
+		(
+			cd mw_dir_4 &&
+			test_path_is_missing Foo.mw &&
+			touch Foo.mw &&
+			echo "hello world" >>Foo.mw &&
+			git add Foo.mw &&
+			git commit -m "Foo" &&
+			git push
+		) &&
+		wiki_getallpage ref_page_4 &&
+		test_diff_directories mw_dir_4 ref_page_4
+	'
+
+	test_expect_success 'Git push works after editing a file .mw' '
+		wiki_reset &&
+		wiki_editpage "Foo" "page created before the git clone" false &&
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+
+		(
+			cd mw_dir_5 &&
+			echo "new line added in the file Foo.mw" >>Foo.mw &&
+			git commit -am "edit file Foo.mw" &&
+			git push
+		) &&
+
+		wiki_getallpage ref_page_5 &&
+		test_diff_directories mw_dir_5 ref_page_5
+	'
+
+	test_expect_failure 'Git push works after deleting a file' '
+		wiki_reset &&
+		wiki_editpage Foo "wiki page added before git clone" false &&
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+
+		(
+			cd mw_dir_6 &&
+			git rm Foo.mw &&
+			git commit -am "page Foo.mw deleted" &&
+			git push
+		) &&
+
+		test_must_fail wiki_page_exist Foo
+	'
+
+	test_expect_success 'Merge conflict expected and solving it' '
+		wiki_reset &&
+
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+		wiki_editpage Foo "1 conflict
+3 wiki
+4" false &&
+
+		(
+			cd mw_dir_7 &&
+		echo "1 conflict
+2 git
+4" >Foo.mw &&
+			git add Foo.mw &&
+			git commit -m "conflict created" &&
+			test_must_fail git pull &&
+			"$PERL_PATH" -pi -e "s/[<=>].*//g" Foo.mw &&
+			git commit -am "merge conflict solved" &&
+			git push
+		)
+	'
+
+	test_expect_failure 'git pull works after deleting a wiki page' '
+		wiki_reset &&
+		wiki_editpage Foo "wiki page added before the git clone" false &&
+		git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+
+		wiki_delete_page Foo &&
+		(
+			cd mw_dir_8 &&
+			git pull &&
+			test_path_is_missing Foo.mw
+		)
+	'
+}
diff --git a/third_party/git/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh b/third_party/git/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
new file mode 100755
index 000000000000..9106833578e0
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/t9360-mw-to-git-clone.sh
@@ -0,0 +1,257 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+
+test_description='Test the Git Mediawiki remote helper: git clone'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone creates the expected git log with one file' '
+	wiki_reset &&
+	wiki_editpage foo "this is not important" false -c cat -s "this must be the same" &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+	(
+		cd mw_dir_1 &&
+		git log --format=%s HEAD^..HEAD >log.tmp
+	) &&
+	echo "this must be the same" >msg.tmp &&
+	diff -b mw_dir_1/log.tmp msg.tmp
+'
+
+
+test_expect_success 'Git clone creates the expected git log with multiple files' '
+	wiki_reset &&
+	wiki_editpage daddy "this is not important" false -s="this must be the same" &&
+	wiki_editpage daddy "neither is this" true -s="this must also be the same" &&
+	wiki_editpage daddy "neither is this" true -s="same same same" &&
+	wiki_editpage dj "dont care" false -s="identical" &&
+	wiki_editpage dj "dont care either" true -s="identical too" &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+	(
+		cd mw_dir_2 &&
+		git log --format=%s Daddy.mw  >logDaddy.tmp &&
+		git log --format=%s Dj.mw >logDj.tmp
+	) &&
+	echo "same same same" >msgDaddy.tmp &&
+	echo "this must also be the same" >>msgDaddy.tmp &&
+	echo "this must be the same" >>msgDaddy.tmp &&
+	echo "identical too" >msgDj.tmp &&
+	echo "identical" >>msgDj.tmp &&
+	diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
+	diff -b mw_dir_2/logDj.tmp msgDj.tmp
+'
+
+
+test_expect_success 'Git clone creates only Main_Page.mw with an empty wiki' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+	test_contains_N_files mw_dir_3 1 &&
+	test_path_is_file mw_dir_3/Main_Page.mw
+'
+
+test_expect_success 'Git clone does not fetch a deleted page' '
+	wiki_reset &&
+	wiki_editpage foo "this page must be deleted before the clone" false &&
+	wiki_delete_page foo &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+	test_contains_N_files mw_dir_4 1 &&
+	test_path_is_file mw_dir_4/Main_Page.mw &&
+	test_path_is_missing mw_dir_4/Foo.mw
+'
+
+test_expect_success 'Git clone works with page added' '
+	wiki_reset &&
+	wiki_editpage foo " I will be cloned" false &&
+	wiki_editpage bar "I will be cloned" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+	wiki_getallpage ref_page_5 &&
+	test_diff_directories mw_dir_5 ref_page_5 &&
+	wiki_delete_page foo &&
+	wiki_delete_page bar
+'
+
+test_expect_success 'Git clone works with an edited page ' '
+	wiki_reset &&
+	wiki_editpage foo "this page will be edited" \
+		false -s "first edition of page foo"&&
+	wiki_editpage foo "this page has been edited and must be on the clone " true &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+	test_path_is_file mw_dir_6/Foo.mw &&
+	test_path_is_file mw_dir_6/Main_Page.mw &&
+	wiki_getallpage mw_dir_6/page_ref_6 &&
+	test_diff_directories mw_dir_6 mw_dir_6/page_ref_6 &&
+	(
+		cd mw_dir_6 &&
+		git log --format=%s HEAD^ Foo.mw > ../Foo.log
+	) &&
+	echo "first edition of page foo" > FooExpect.log &&
+	diff FooExpect.log Foo.log
+'
+
+
+test_expect_success 'Git clone works with several pages and some deleted ' '
+	wiki_reset &&
+	wiki_editpage foo "this page will not be deleted" false &&
+	wiki_editpage bar "I must not be erased" false &&
+	wiki_editpage namnam "I will not be there at the end" false &&
+	wiki_editpage nyancat "nyan nyan nyan delete me" false &&
+	wiki_delete_page namnam &&
+	wiki_delete_page nyancat &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+	test_path_is_file mw_dir_7/Foo.mw &&
+	test_path_is_file mw_dir_7/Bar.mw &&
+	test_path_is_missing mw_dir_7/Namnam.mw &&
+	test_path_is_missing mw_dir_7/Nyancat.mw &&
+	wiki_getallpage mw_dir_7/page_ref_7 &&
+	test_diff_directories mw_dir_7 mw_dir_7/page_ref_7
+'
+
+
+test_expect_success 'Git clone works with one specific page cloned ' '
+	wiki_reset &&
+	wiki_editpage foo "I will not be cloned" false &&
+	wiki_editpage bar "Do not clone me" false &&
+	wiki_editpage namnam "I will be cloned :)" false  -s="this log must stay" &&
+	wiki_editpage nyancat "nyan nyan nyan you cant clone me" false &&
+	git clone -c remote.origin.pages=namnam \
+		mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+	test_contains_N_files mw_dir_8 1 &&
+	test_path_is_file mw_dir_8/Namnam.mw &&
+	test_path_is_missing mw_dir_8/Main_Page.mw &&
+	(
+		cd mw_dir_8 &&
+		echo "this log must stay" >msg.tmp &&
+		git log --format=%s >log.tmp &&
+		diff -b msg.tmp log.tmp
+	) &&
+	wiki_check_content mw_dir_8/Namnam.mw Namnam
+'
+
+test_expect_success 'Git clone works with multiple specific page cloned ' '
+	wiki_reset &&
+	wiki_editpage foo "I will be there" false &&
+	wiki_editpage bar "I will not disappear" false &&
+	wiki_editpage namnam "I be erased" false &&
+	wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
+	wiki_delete_page namnam &&
+	git clone -c remote.origin.pages="foo bar nyancat namnam" \
+		mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+	test_contains_N_files mw_dir_9 3 &&
+	test_path_is_missing mw_dir_9/Namnam.mw &&
+	test_path_is_file mw_dir_9/Foo.mw &&
+	test_path_is_file mw_dir_9/Nyancat.mw &&
+	test_path_is_file mw_dir_9/Bar.mw &&
+	wiki_check_content mw_dir_9/Foo.mw Foo &&
+	wiki_check_content mw_dir_9/Bar.mw Bar &&
+	wiki_check_content mw_dir_9/Nyancat.mw Nyancat
+'
+
+test_expect_success 'Mediawiki-clone of several specific pages on wiki' '
+	wiki_reset &&
+	wiki_editpage foo "foo 1" false &&
+	wiki_editpage bar "bar 1" false &&
+	wiki_editpage dummy "dummy 1" false &&
+	wiki_editpage cloned_1 "cloned_1 1" false &&
+	wiki_editpage cloned_2 "cloned_2 2" false &&
+	wiki_editpage cloned_3 "cloned_3 3" false &&
+	mkdir -p ref_page_10 &&
+	wiki_getpage cloned_1 ref_page_10 &&
+	wiki_getpage cloned_2 ref_page_10 &&
+	wiki_getpage cloned_3 ref_page_10 &&
+	git clone -c remote.origin.pages="cloned_1 cloned_2 cloned_3" \
+		mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+	test_diff_directories mw_dir_10 ref_page_10
+'
+
+test_expect_success 'Git clone works with the shallow option' '
+	wiki_reset &&
+	wiki_editpage foo "1st revision, should be cloned" false &&
+	wiki_editpage bar "1st revision, should be cloned" false &&
+	wiki_editpage nyan "1st revision, should not be cloned" false &&
+	wiki_editpage nyan "2nd revision, should be cloned" false &&
+	git -c remote.origin.shallow=true clone \
+		mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+	test_contains_N_files mw_dir_11 4 &&
+	test_path_is_file mw_dir_11/Nyan.mw &&
+	test_path_is_file mw_dir_11/Foo.mw &&
+	test_path_is_file mw_dir_11/Bar.mw &&
+	test_path_is_file mw_dir_11/Main_Page.mw &&
+	(
+		cd mw_dir_11 &&
+		test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
+		test $(git log --oneline Foo.mw | wc -l) -eq 1 &&
+		test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
+		test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
+	) &&
+	wiki_check_content mw_dir_11/Nyan.mw Nyan &&
+	wiki_check_content mw_dir_11/Foo.mw Foo &&
+	wiki_check_content mw_dir_11/Bar.mw Bar &&
+	wiki_check_content mw_dir_11/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Git clone works with the shallow option with a delete page' '
+	wiki_reset &&
+	wiki_editpage foo "1st revision, will be deleted" false &&
+	wiki_editpage bar "1st revision, should be cloned" false &&
+	wiki_editpage nyan "1st revision, should not be cloned" false &&
+	wiki_editpage nyan "2nd revision, should be cloned" false &&
+	wiki_delete_page foo &&
+	git -c remote.origin.shallow=true clone \
+		mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+	test_contains_N_files mw_dir_12 3 &&
+	test_path_is_file mw_dir_12/Nyan.mw &&
+	test_path_is_missing mw_dir_12/Foo.mw &&
+	test_path_is_file mw_dir_12/Bar.mw &&
+	test_path_is_file mw_dir_12/Main_Page.mw &&
+	(
+		cd mw_dir_12 &&
+		test $(git log --oneline Nyan.mw | wc -l) -eq 1 &&
+		test $(git log --oneline Bar.mw | wc -l) -eq 1 &&
+		test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
+	) &&
+	wiki_check_content mw_dir_12/Nyan.mw Nyan &&
+	wiki_check_content mw_dir_12/Bar.mw Bar &&
+	wiki_check_content mw_dir_12/Main_Page.mw Main_Page
+'
+
+test_expect_success 'Test of fetching a category' '
+	wiki_reset &&
+	wiki_editpage Foo "I will be cloned" false -c=Category &&
+	wiki_editpage Bar "Meet me on the repository" false -c=Category &&
+	wiki_editpage Dummy "I will not come" false &&
+	wiki_editpage BarWrong "I will stay online only" false -c=NotCategory &&
+	git clone -c remote.origin.categories="Category" \
+		mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+	wiki_getallpage ref_page_13 Category &&
+	test_diff_directories mw_dir_13 ref_page_13
+'
+
+test_expect_success 'Test of resistance to modification of category on wiki for clone' '
+	wiki_reset &&
+	wiki_editpage Tobedeleted "this page will be deleted" false -c=Catone &&
+	wiki_editpage Tobeedited "this page will be modified" false -c=Catone &&
+	wiki_editpage Normalone "this page wont be modified and will be on git" false -c=Catone &&
+	wiki_editpage Notconsidered "this page will not appear on local" false &&
+	wiki_editpage Othercategory "this page will not appear on local" false -c=Cattwo &&
+	wiki_editpage Tobeedited "this page have been modified" true -c=Catone &&
+	wiki_delete_page Tobedeleted &&
+	git clone -c remote.origin.categories="Catone" \
+		mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+	wiki_getallpage ref_page_14 Catone &&
+	test_diff_directories mw_dir_14 ref_page_14
+'
+
+test_done
diff --git a/third_party/git/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh b/third_party/git/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
new file mode 100755
index 000000000000..9ea201459b53
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/t9361-mw-to-git-push-pull.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_push_pull
+
+test_done
diff --git a/third_party/git/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh b/third_party/git/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
new file mode 100755
index 000000000000..6b0dbdac4d35
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/t9362-mw-to-git-utf8.sh
@@ -0,0 +1,347 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test git-mediawiki with special characters in filenames'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_expect_success 'Git clone works for a wiki with accents in the page names' '
+	wiki_reset &&
+	wiki_editpage féé "This page must be délétéd before clone" false &&
+	wiki_editpage kèè "This page must be deleted before clone" false &&
+	wiki_editpage hàà "This page must be deleted before clone" false &&
+	wiki_editpage kîî "This page must be deleted before clone" false &&
+	wiki_editpage foo "This page must be deleted before clone" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_1 &&
+	wiki_getallpage ref_page_1 &&
+	test_diff_directories mw_dir_1 ref_page_1
+'
+
+
+test_expect_success 'Git pull works with a wiki with accents in the pages names' '
+	wiki_reset &&
+	wiki_editpage kîî "this page must be cloned" false &&
+	wiki_editpage foo "this page must be cloned" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_2 &&
+	wiki_editpage éàîôû "This page must be pulled" false &&
+	(
+		cd mw_dir_2 &&
+		git pull
+	) &&
+	wiki_getallpage ref_page_2 &&
+	test_diff_directories mw_dir_2 ref_page_2
+'
+
+
+test_expect_success 'Cloning a chosen page works with accents' '
+	wiki_reset &&
+	wiki_editpage kîî "this page must be cloned" false &&
+	git clone -c remote.origin.pages=kîî \
+		mediawiki::'"$WIKI_URL"' mw_dir_3 &&
+	wiki_check_content mw_dir_3/Kîî.mw Kîî &&
+	test_path_is_file mw_dir_3/Kîî.mw &&
+	rm -rf mw_dir_3
+'
+
+
+test_expect_success 'The shallow option works with accents' '
+	wiki_reset &&
+	wiki_editpage néoà "1st revision, should not be cloned" false &&
+	wiki_editpage néoà "2nd revision, should be cloned" false &&
+	git -c remote.origin.shallow=true clone \
+		mediawiki::'"$WIKI_URL"' mw_dir_4 &&
+	test_contains_N_files mw_dir_4 2 &&
+	test_path_is_file mw_dir_4/Néoà.mw &&
+	test_path_is_file mw_dir_4/Main_Page.mw &&
+	(
+		cd mw_dir_4 &&
+		test $(git log --oneline Néoà.mw | wc -l) -eq 1 &&
+		test $(git log --oneline Main_Page.mw | wc -l ) -eq 1
+	) &&
+	wiki_check_content mw_dir_4/Néoà.mw Néoà &&
+	wiki_check_content mw_dir_4/Main_Page.mw Main_Page
+'
+
+
+test_expect_success 'Cloning works when page name first letter has an accent' '
+	wiki_reset &&
+	wiki_editpage îî "this page must be cloned" false &&
+	git clone -c remote.origin.pages=îî \
+		mediawiki::'"$WIKI_URL"' mw_dir_5 &&
+	test_path_is_file mw_dir_5/Îî.mw &&
+	wiki_check_content mw_dir_5/Îî.mw Îî
+'
+
+
+test_expect_success 'Git push works with a wiki with accents' '
+	wiki_reset &&
+	wiki_editpage féé "lots of accents : éèàÖ" false &&
+	wiki_editpage foo "this page must be cloned" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_6 &&
+	(
+		cd mw_dir_6 &&
+		echo "A wild Pîkächû appears on the wiki" >Pîkächû.mw &&
+		git add Pîkächû.mw &&
+		git commit -m "A new page appears" &&
+		git push
+	) &&
+	wiki_getallpage ref_page_6 &&
+	test_diff_directories mw_dir_6 ref_page_6
+'
+
+test_expect_success 'Git clone works with accentsand spaces' '
+	wiki_reset &&
+	wiki_editpage "é à î" "this page must be délété before the clone" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_7 &&
+	wiki_getallpage ref_page_7 &&
+	test_diff_directories mw_dir_7 ref_page_7
+'
+
+test_expect_success 'character $ in page name (mw -> git)' '
+	wiki_reset &&
+	wiki_editpage file_\$_foo "expect to be called file_$_foo" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_8 &&
+	test_path_is_file mw_dir_8/File_\$_foo.mw &&
+	wiki_getallpage ref_page_8 &&
+	test_diff_directories mw_dir_8 ref_page_8
+'
+
+
+
+test_expect_success 'character $ in file name (git -> mw) ' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_9 &&
+	(
+		cd mw_dir_9 &&
+		echo "this file is called File_\$_foo.mw" >File_\$_foo.mw &&
+		git add . &&
+		git commit -am "file File_\$_foo.mw" &&
+		git pull &&
+		git push
+	) &&
+	wiki_getallpage ref_page_9 &&
+	test_diff_directories mw_dir_9 ref_page_9
+'
+
+
+test_expect_failure 'capital at the beginning of file names' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_10 &&
+	(
+		cd mw_dir_10 &&
+		echo "my new file foo" >foo.mw &&
+		echo "my new file Foo... Finger crossed" >Foo.mw &&
+		git add . &&
+		git commit -am "file foo.mw" &&
+		git pull &&
+		git push
+	) &&
+	wiki_getallpage ref_page_10 &&
+	test_diff_directories mw_dir_10 ref_page_10
+'
+
+
+test_expect_failure 'special character at the beginning of file name from mw to git' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_11 &&
+	wiki_editpage {char_1 "expect to be renamed {char_1" false &&
+	wiki_editpage [char_2 "expect to be renamed [char_2" false &&
+	(
+		cd mw_dir_11 &&
+		git pull
+	) &&
+	test_path_is_file mw_dir_11/{char_1 &&
+	test_path_is_file mw_dir_11/[char_2
+'
+
+test_expect_success 'Pull page with title containing ":" other than namespace separator' '
+	wiki_editpage Foo:Bar content false &&
+	(
+		cd mw_dir_11 &&
+		git pull
+	) &&
+	test_path_is_file mw_dir_11/Foo:Bar.mw
+'
+
+test_expect_success 'Push page with title containing ":" other than namespace separator' '
+	(
+		cd mw_dir_11 &&
+		echo content >NotANameSpace:Page.mw &&
+		git add NotANameSpace:Page.mw &&
+		git commit -m "add page with colon" &&
+		git push
+	) &&
+	wiki_page_exist NotANameSpace:Page
+'
+
+test_expect_success 'test of correct formatting for file name from mw to git' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_12 &&
+	wiki_editpage char_%_7b_1 "expect to be renamed char{_1" false &&
+	wiki_editpage char_%_5b_2 "expect to be renamed char{_2" false &&
+	(
+		cd mw_dir_12 &&
+		git pull
+	) &&
+	test_path_is_file mw_dir_12/Char\{_1.mw &&
+	test_path_is_file mw_dir_12/Char\[_2.mw &&
+	wiki_getallpage ref_page_12 &&
+	mv ref_page_12/Char_%_7b_1.mw ref_page_12/Char\{_1.mw &&
+	mv ref_page_12/Char_%_5b_2.mw ref_page_12/Char\[_2.mw &&
+	test_diff_directories mw_dir_12 ref_page_12
+'
+
+
+test_expect_failure 'test of correct formatting for file name beginning with special character' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_13 &&
+	(
+		cd mw_dir_13 &&
+		echo "my new file {char_1" >\{char_1.mw &&
+		echo "my new file [char_2" >\[char_2.mw &&
+		git add . &&
+		git commit -am "committing some exotic file name..." &&
+		git push &&
+		git pull
+	) &&
+	wiki_getallpage ref_page_13 &&
+	test_path_is_file ref_page_13/{char_1.mw &&
+	test_path_is_file ref_page_13/[char_2.mw &&
+	test_diff_directories mw_dir_13 ref_page_13
+'
+
+
+test_expect_success 'test of correct formatting for file name from git to mw' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_14 &&
+	(
+		cd mw_dir_14 &&
+		echo "my new file char{_1" >Char\{_1.mw &&
+		echo "my new file char[_2" >Char\[_2.mw &&
+		git add . &&
+		git commit -m "committing some exotic file name..." &&
+		git push
+	) &&
+	wiki_getallpage ref_page_14 &&
+	mv mw_dir_14/Char\{_1.mw mw_dir_14/Char_%_7b_1.mw &&
+	mv mw_dir_14/Char\[_2.mw mw_dir_14/Char_%_5b_2.mw &&
+	test_diff_directories mw_dir_14 ref_page_14
+'
+
+
+test_expect_success 'git clone with /' '
+	wiki_reset &&
+	wiki_editpage \/fo\/o "this is not important" false -c=Deleted &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+	test_path_is_file mw_dir_15/%2Ffo%2Fo.mw &&
+	wiki_check_content mw_dir_15/%2Ffo%2Fo.mw \/fo\/o
+'
+
+
+test_expect_success 'git push with /' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+	echo "I will be on the wiki" >mw_dir_16/%2Ffo%2Fo.mw &&
+	(
+		cd mw_dir_16 &&
+		git add %2Ffo%2Fo.mw &&
+		git commit -m " %2Ffo%2Fo added" &&
+		git push
+	) &&
+	wiki_page_exist \/fo\/o &&
+	wiki_check_content mw_dir_16/%2Ffo%2Fo.mw \/fo\/o
+
+'
+
+
+test_expect_success 'git clone with \' '
+	wiki_reset &&
+	wiki_editpage \\ko\\o "this is not important" false -c=Deleted &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+	test_path_is_file mw_dir_17/\\ko\\o.mw &&
+	wiki_check_content mw_dir_17/\\ko\\o.mw \\ko\\o
+'
+
+
+test_expect_success 'git push with \' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_18 &&
+	echo "I will be on the wiki" >mw_dir_18/\\ko\\o.mw &&
+	(
+		cd mw_dir_18 &&
+		git add \\ko\\o.mw &&
+		git commit -m " \\ko\\o added" &&
+		git push
+	)&&
+	wiki_page_exist \\ko\\o &&
+	wiki_check_content mw_dir_18/\\ko\\o.mw \\ko\\o
+
+'
+
+test_expect_success 'git clone with \ in format control' '
+	wiki_reset &&
+	wiki_editpage \\no\\o "this is not important" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_19 &&
+	test_path_is_file mw_dir_19/\\no\\o.mw &&
+	wiki_check_content mw_dir_19/\\no\\o.mw \\no\\o
+'
+
+
+test_expect_success 'git push with \ in format control' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_20 &&
+	echo "I will be on the wiki" >mw_dir_20/\\fo\\o.mw &&
+	(
+		cd mw_dir_20 &&
+		git add \\fo\\o.mw &&
+		git commit -m " \\fo\\o added" &&
+		git push
+	)&&
+	wiki_page_exist \\fo\\o &&
+	wiki_check_content mw_dir_20/\\fo\\o.mw \\fo\\o
+
+'
+
+
+test_expect_success 'fast-import meta-characters in page name (mw -> git)' '
+	wiki_reset &&
+	wiki_editpage \"file\"_\\_foo "expect to be called \"file\"_\\_foo" false &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_21 &&
+	test_path_is_file mw_dir_21/\"file\"_\\_foo.mw &&
+	wiki_getallpage ref_page_21 &&
+	test_diff_directories mw_dir_21 ref_page_21
+'
+
+
+test_expect_success 'fast-import meta-characters in page name (git -> mw) ' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir_22 &&
+	(
+		cd mw_dir_22 &&
+		echo "this file is called \"file\"_\\_foo.mw" >\"file\"_\\_foo &&
+		git add . &&
+		git commit -am "file \"file\"_\\_foo" &&
+		git pull &&
+		git push
+	) &&
+	wiki_getallpage ref_page_22 &&
+	test_diff_directories mw_dir_22 ref_page_22
+'
+
+
+test_done
diff --git a/third_party/git/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh b/third_party/git/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
new file mode 100755
index 000000000000..3ff3a095670e
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/t9363-mw-to-git-export-import.sh
@@ -0,0 +1,217 @@
+#!/bin/sh
+#
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+#
+# License: GPL v2 or later
+
+# tests for git-remote-mediawiki
+
+test_description='Test the Git Mediawiki remote helper: git push and git pull simple test cases'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+
+test_check_precond
+
+
+test_git_reimport () {
+	git -c remote.origin.dumbPush=true push &&
+	git -c remote.origin.mediaImport=true pull --rebase
+}
+
+# Don't bother with permissions, be administrator by default
+test_expect_success 'setup config' '
+	git config --global remote.origin.mwLogin WikiAdmin &&
+	git config --global remote.origin.mwPassword AdminPass &&
+	test_might_fail git config --global --unset remote.origin.mediaImport
+'
+
+test_expect_success 'git push can upload media (File:) files' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	(
+		cd mw_dir &&
+		echo "hello world" >Foo.txt &&
+		git add Foo.txt &&
+		git commit -m "add a text file" &&
+		git push &&
+		"$PERL_PATH" -e "print STDOUT \"binary content: \".chr(255);" >Foo.txt &&
+		git add Foo.txt &&
+		git commit -m "add a text file with binary content" &&
+		git push
+	)
+'
+
+test_expect_success 'git clone works on previously created wiki with media files' '
+	test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+	git clone -c remote.origin.mediaimport=true \
+		mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+	test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
+	(cd mw_dir_clone && git checkout HEAD^) &&
+	(cd mw_dir && git checkout HEAD^) &&
+	test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
+'
+
+test_expect_success 'git push can upload media (File:) files containing valid UTF-8' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	(
+		cd mw_dir &&
+		"$PERL_PATH" -e "print STDOUT \"UTF-8 content: éèàéê€.\";" >Bar.txt &&
+		git add Bar.txt &&
+		git commit -m "add a text file with UTF-8 content" &&
+		git push
+	)
+'
+
+test_expect_success 'git clone works on previously created wiki with media files containing valid UTF-8' '
+	test_when_finished "rm -rf mw_dir mw_dir_clone" &&
+	git clone -c remote.origin.mediaimport=true \
+		mediawiki::'"$WIKI_URL"' mw_dir_clone &&
+	test_cmp mw_dir_clone/Bar.txt mw_dir/Bar.txt
+'
+
+test_expect_success 'git push & pull work with locally renamed media files' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	test_when_finished "rm -fr mw_dir" &&
+	(
+		cd mw_dir &&
+		echo "A File" >Foo.txt &&
+		git add Foo.txt &&
+		git commit -m "add a file" &&
+		git mv Foo.txt Bar.txt &&
+		git commit -m "Rename a file" &&
+		test_git_reimport &&
+		echo "A File" >expect &&
+		test_cmp expect Bar.txt &&
+		test_path_is_missing Foo.txt
+	)
+'
+
+test_expect_success 'git push can propagate local page deletion' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	test_when_finished "rm -fr mw_dir" &&
+	(
+		cd mw_dir &&
+		test_path_is_missing Foo.mw &&
+		echo "hello world" >Foo.mw &&
+		git add Foo.mw &&
+		git commit -m "Add the page Foo" &&
+		git push &&
+		rm -f Foo.mw &&
+		git commit -am "Delete the page Foo" &&
+		test_git_reimport &&
+		test_path_is_missing Foo.mw
+	)
+'
+
+test_expect_success 'git push can propagate local media file deletion' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	test_when_finished "rm -fr mw_dir" &&
+	(
+		cd mw_dir &&
+		echo "hello world" >Foo.txt &&
+		git add Foo.txt &&
+		git commit -m "Add the text file Foo" &&
+		git rm Foo.txt &&
+		git commit -m "Delete the file Foo" &&
+		test_git_reimport &&
+		test_path_is_missing Foo.txt
+	)
+'
+
+# test failure: the file is correctly uploaded, and then deleted but
+# as no page link to it, the import (which looks at page revisions)
+# doesn't notice the file deletion on the wiki. We fetch the list of
+# files from the wiki, but as the file is deleted, it doesn't appear.
+test_expect_failure 'git pull correctly imports media file deletion when no page link to it' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	test_when_finished "rm -fr mw_dir" &&
+	(
+		cd mw_dir &&
+		echo "hello world" >Foo.txt &&
+		git add Foo.txt &&
+		git commit -m "Add the text file Foo" &&
+		git push &&
+		git rm Foo.txt &&
+		git commit -m "Delete the file Foo" &&
+		test_git_reimport &&
+		test_path_is_missing Foo.txt
+	)
+'
+
+test_expect_success 'git push properly warns about insufficient permissions' '
+	wiki_reset &&
+	git clone mediawiki::'"$WIKI_URL"' mw_dir &&
+	test_when_finished "rm -fr mw_dir" &&
+	(
+		cd mw_dir &&
+		echo "A File" >foo.forbidden &&
+		git add foo.forbidden &&
+		git commit -m "add a file" &&
+		git push 2>actual &&
+		test_i18ngrep "foo.forbidden is not a permitted file" actual
+	)
+'
+
+test_expect_success 'setup a repository with media files' '
+	wiki_reset &&
+	wiki_editpage testpage "I am linking a file [[File:File.txt]]" false &&
+	echo "File content" >File.txt &&
+	wiki_upload_file File.txt &&
+	echo "Another file content" >AnotherFile.txt &&
+	wiki_upload_file AnotherFile.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=true' '
+	git clone -c remote.origin.pages=testpage \
+		  -c remote.origin.mediaimport=true \
+			mediawiki::'"$WIKI_URL"' mw_dir_15 &&
+	test_when_finished "rm -rf mw_dir_15" &&
+	test_contains_N_files mw_dir_15 3 &&
+	test_path_is_file mw_dir_15/Testpage.mw &&
+	test_path_is_file mw_dir_15/File:File.txt.mw &&
+	test_path_is_file mw_dir_15/File.txt &&
+	test_path_is_missing mw_dir_15/Main_Page.mw &&
+	test_path_is_missing mw_dir_15/File:AnotherFile.txt.mw &&
+	test_path_is_missing mw_dir_15/AnothetFile.txt &&
+	wiki_check_content mw_dir_15/Testpage.mw Testpage &&
+	test_cmp mw_dir_15/File.txt File.txt
+'
+
+test_expect_success 'git clone works with one specific page cloned and mediaimport=false' '
+	test_when_finished "rm -rf mw_dir_16" &&
+	git clone -c remote.origin.pages=testpage \
+			mediawiki::'"$WIKI_URL"' mw_dir_16 &&
+	test_contains_N_files mw_dir_16 1 &&
+	test_path_is_file mw_dir_16/Testpage.mw &&
+	test_path_is_missing mw_dir_16/File:File.txt.mw &&
+	test_path_is_missing mw_dir_16/File.txt &&
+	test_path_is_missing mw_dir_16/Main_Page.mw &&
+	wiki_check_content mw_dir_16/Testpage.mw Testpage
+'
+
+# should behave like mediaimport=false
+test_expect_success 'git clone works with one specific page cloned and mediaimport unset' '
+	test_when_finished "rm -fr mw_dir_17" &&
+	git clone -c remote.origin.pages=testpage \
+		mediawiki::'"$WIKI_URL"' mw_dir_17 &&
+	test_contains_N_files mw_dir_17 1 &&
+	test_path_is_file mw_dir_17/Testpage.mw &&
+	test_path_is_missing mw_dir_17/File:File.txt.mw &&
+	test_path_is_missing mw_dir_17/File.txt &&
+	test_path_is_missing mw_dir_17/Main_Page.mw &&
+	wiki_check_content mw_dir_17/Testpage.mw Testpage
+'
+
+test_done
diff --git a/third_party/git/contrib/mw-to-git/t/t9364-pull-by-rev.sh b/third_party/git/contrib/mw-to-git/t/t9364-pull-by-rev.sh
new file mode 100755
index 000000000000..5c22457a0b6a
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/t9364-pull-by-rev.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: git pull by revision'
+
+. ./test-gitmw-lib.sh
+. ./push-pull-tests.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'configuration' '
+	git config --global mediawiki.fetchStrategy by_rev
+'
+
+test_push_pull
+
+test_done
diff --git a/third_party/git/contrib/mw-to-git/t/t9365-continuing-queries.sh b/third_party/git/contrib/mw-to-git/t/t9365-continuing-queries.sh
new file mode 100755
index 000000000000..016454749f8d
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/t9365-continuing-queries.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+test_description='Test the Git Mediawiki remote helper: queries w/ more than 500 results'
+
+. ./test-gitmw-lib.sh
+. $TEST_DIRECTORY/test-lib.sh
+
+test_check_precond
+
+test_expect_success 'creating page w/ >500 revisions' '
+	wiki_reset &&
+	for i in $(test_seq 501)
+	do
+		echo "creating revision $i" &&
+		wiki_editpage foo "revision $i<br/>" true
+	done
+'
+
+test_expect_success 'cloning page w/ >500 revisions' '
+	git clone mediawiki::'"$WIKI_URL"' mw_dir
+'
+
+test_done
diff --git a/third_party/git/contrib/mw-to-git/t/test-gitmw-lib.sh b/third_party/git/contrib/mw-to-git/t/test-gitmw-lib.sh
new file mode 100755
index 000000000000..3948a0028283
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/test-gitmw-lib.sh
@@ -0,0 +1,432 @@
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+#
+# CONFIGURATION VARIABLES
+# You might want to change these ones
+#
+
+. ./test.config
+
+WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
+CURR_DIR=$(pwd)
+TEST_OUTPUT_DIRECTORY=$(pwd)
+TEST_DIRECTORY="$CURR_DIR"/../../../t
+
+export TEST_OUTPUT_DIRECTORY TEST_DIRECTORY CURR_DIR
+
+if test "$LIGHTTPD" = "false" ; then
+	PORT=80
+else
+	WIKI_DIR_INST="$CURR_DIR/$WEB_WWW"
+fi
+
+wiki_upload_file () {
+	"$CURR_DIR"/test-gitmw.pl upload_file "$@"
+}
+
+wiki_getpage () {
+	"$CURR_DIR"/test-gitmw.pl get_page "$@"
+}
+
+wiki_delete_page () {
+	"$CURR_DIR"/test-gitmw.pl delete_page "$@"
+}
+
+wiki_editpage () {
+	"$CURR_DIR"/test-gitmw.pl edit_page "$@"
+}
+
+die () {
+	die_with_status 1 "$@"
+}
+
+die_with_status () {
+	status=$1
+	shift
+	echo >&2 "$*"
+	exit "$status"
+}
+
+
+# Check the preconditions to run git-remote-mediawiki's tests
+test_check_precond () {
+	if ! test_have_prereq PERL
+	then
+		skip_all='skipping gateway git-mw tests, perl not available'
+		test_done
+	fi
+
+	GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
+	PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
+
+	if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
+	then
+		skip_all='skipping gateway git-mw tests, no mediawiki found'
+		test_done
+	fi
+}
+
+# test_diff_directories <dir_git> <dir_wiki>
+#
+# Compare the contents of directories <dir_git> and <dir_wiki> with diff
+# and errors if they do not match. The program will
+# not look into .git in the process.
+# Warning: the first argument MUST be the directory containing the git data
+test_diff_directories () {
+	rm -rf "$1_tmp"
+	mkdir -p "$1_tmp"
+	cp "$1"/*.mw "$1_tmp"
+	diff -r -b "$1_tmp" "$2"
+}
+
+# $1=<dir>
+# $2=<N>
+#
+# Check that <dir> contains exactly <N> files
+test_contains_N_files () {
+	if test $(ls -- "$1" | wc -l) -ne "$2"; then
+		echo "directory $1 should contain $2 files"
+		echo "it contains these files:"
+		ls "$1"
+		false
+	fi
+}
+
+
+# wiki_check_content <file_name> <page_name>
+#
+# Compares the contents of the file <file_name> and the wiki page
+# <page_name> and exits with error 1 if they do not match.
+wiki_check_content () {
+	mkdir -p wiki_tmp
+	wiki_getpage "$2" wiki_tmp
+	# replacement of forbidden character in file name
+	page_name=$(printf "%s\n" "$2" | sed -e "s/\//%2F/g")
+
+	diff -b "$1" wiki_tmp/"$page_name".mw
+	if test $? -ne 0
+	then
+		rm -rf wiki_tmp
+		error "ERROR: file $2 not found on wiki"
+	fi
+	rm -rf wiki_tmp
+}
+
+# wiki_page_exist <page_name>
+#
+# Check the existence of the page <page_name> on the wiki and exits
+# with error if it is absent from it.
+wiki_page_exist () {
+	mkdir -p wiki_tmp
+	wiki_getpage "$1" wiki_tmp
+	page_name=$(printf "%s\n" "$1" | sed "s/\//%2F/g")
+	if test -f wiki_tmp/"$page_name".mw ; then
+		rm -rf wiki_tmp
+	else
+		rm -rf wiki_tmp
+		error "test failed: file $1 not found on wiki"
+	fi
+}
+
+# wiki_getallpagename
+#
+# Fetch the name of each page on the wiki.
+wiki_getallpagename () {
+	"$CURR_DIR"/test-gitmw.pl getallpagename
+}
+
+# wiki_getallpagecategory <category>
+#
+# Fetch the name of each page belonging to <category> on the wiki.
+wiki_getallpagecategory () {
+	"$CURR_DIR"/test-gitmw.pl getallpagename "$@"
+}
+
+# wiki_getallpage <dest_dir> [<category>]
+#
+# Fetch all the pages from the wiki and place them in the directory
+# <dest_dir>.
+# If <category> is define, then wiki_getallpage fetch the pages included
+# in <category>.
+wiki_getallpage () {
+	if test -z "$2";
+	then
+		wiki_getallpagename
+	else
+		wiki_getallpagecategory "$2"
+	fi
+	mkdir -p "$1"
+	while read -r line; do
+		wiki_getpage "$line" $1;
+	done < all.txt
+}
+
+# ================= Install part =================
+
+error () {
+	echo "$@" >&2
+	exit 1
+}
+
+# config_lighttpd
+#
+# Create the configuration files and the folders necessary to start lighttpd.
+# Overwrite any existing file.
+config_lighttpd () {
+	mkdir -p $WEB
+	mkdir -p $WEB_TMP
+	mkdir -p $WEB_WWW
+	cat > $WEB/lighttpd.conf <<EOF
+	server.document-root = "$CURR_DIR/$WEB_WWW"
+	server.port = $PORT
+	server.pid-file = "$CURR_DIR/$WEB_TMP/pid"
+
+	server.modules = (
+	"mod_rewrite",
+	"mod_redirect",
+	"mod_access",
+	"mod_accesslog",
+	"mod_fastcgi"
+	)
+
+	index-file.names = ("index.php" , "index.html")
+
+	mimetype.assign		    = (
+	".pdf"		=>	"application/pdf",
+	".sig"		=>	"application/pgp-signature",
+	".spl"		=>	"application/futuresplash",
+	".class"	=>	"application/octet-stream",
+	".ps"		=>	"application/postscript",
+	".torrent"	=>	"application/x-bittorrent",
+	".dvi"		=>	"application/x-dvi",
+	".gz"		=>	"application/x-gzip",
+	".pac"		=>	"application/x-ns-proxy-autoconfig",
+	".swf"		=>	"application/x-shockwave-flash",
+	".tar.gz"	=>	"application/x-tgz",
+	".tgz"		=>	"application/x-tgz",
+	".tar"		=>	"application/x-tar",
+	".zip"		=>	"application/zip",
+	".mp3"		=>	"audio/mpeg",
+	".m3u"		=>	"audio/x-mpegurl",
+	".wma"		=>	"audio/x-ms-wma",
+	".wax"		=>	"audio/x-ms-wax",
+	".ogg"		=>	"application/ogg",
+	".wav"		=>	"audio/x-wav",
+	".gif"		=>	"image/gif",
+	".jpg"		=>	"image/jpeg",
+	".jpeg"		=>	"image/jpeg",
+	".png"		=>	"image/png",
+	".xbm"		=>	"image/x-xbitmap",
+	".xpm"		=>	"image/x-xpixmap",
+	".xwd"		=>	"image/x-xwindowdump",
+	".css"		=>	"text/css",
+	".html"		=>	"text/html",
+	".htm"		=>	"text/html",
+	".js"		=>	"text/javascript",
+	".asc"		=>	"text/plain",
+	".c"		=>	"text/plain",
+	".cpp"		=>	"text/plain",
+	".log"		=>	"text/plain",
+	".conf"		=>	"text/plain",
+	".text"		=>	"text/plain",
+	".txt"		=>	"text/plain",
+	".dtd"		=>	"text/xml",
+	".xml"		=>	"text/xml",
+	".mpeg"		=>	"video/mpeg",
+	".mpg"		=>	"video/mpeg",
+	".mov"		=>	"video/quicktime",
+	".qt"		=>	"video/quicktime",
+	".avi"		=>	"video/x-msvideo",
+	".asf"		=>	"video/x-ms-asf",
+	".asx"		=>	"video/x-ms-asf",
+	".wmv"		=>	"video/x-ms-wmv",
+	".bz2"		=>	"application/x-bzip",
+	".tbz"		=>	"application/x-bzip-compressed-tar",
+	".tar.bz2"	=>	"application/x-bzip-compressed-tar",
+	""		=>	"text/plain"
+	)
+
+	fastcgi.server = ( ".php" =>
+	("localhost" =>
+	( "socket" => "$CURR_DIR/$WEB_TMP/php.socket",
+	"bin-path" => "$PHP_DIR/php-cgi -c $CURR_DIR/$WEB/php.ini"
+
+	)
+	)
+	)
+EOF
+
+	cat > $WEB/php.ini <<EOF
+	session.save_path ='$CURR_DIR/$WEB_TMP'
+EOF
+}
+
+# start_lighttpd
+#
+# Start or restart daemon lighttpd. If restart, rewrite configuration files.
+start_lighttpd () {
+	if test -f "$WEB_TMP/pid"; then
+		echo "Instance already running. Restarting..."
+		stop_lighttpd
+	fi
+	config_lighttpd
+	"$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
+
+	if test $? -ne 0 ; then
+		echo "Could not execute http daemon lighttpd"
+		exit 1
+	fi
+}
+
+# stop_lighttpd
+#
+# Kill daemon lighttpd and removes files and folders associated.
+stop_lighttpd () {
+	test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
+}
+
+# Create the SQLite database of the MediaWiki. If the database file already
+# exists, it will be deleted.
+# This script should be runned from the directory where $FILES_FOLDER is
+# located.
+create_db () {
+	rm -f "$TMP/$DB_FILE"
+
+	echo "Generating the SQLite database file. It can take some time ..."
+	# Run the php script to generate the SQLite database file
+	# with cURL calls.
+	php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
+		"$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
+
+	if [ ! -f "$TMP/$DB_FILE" ] ; then
+		error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
+	fi
+
+	# Copy the generated database file into the directory the
+	# user indicated.
+	cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
+		error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
+}
+
+# Install a wiki in your web server directory.
+wiki_install () {
+	if test $LIGHTTPD = "true" ; then
+		start_lighttpd
+	fi
+
+	SERVER_ADDR=$SERVER_ADDR:$PORT
+	# In this part, we change directory to $TMP in order to download,
+	# unpack and copy the files of MediaWiki
+	(
+	mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
+	if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
+		error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
+		Please create it and launch the script again."
+	fi
+
+	# Fetch MediaWiki's archive if not already present in the TMP directory
+	MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
+	cd "$TMP"
+	if [ ! -f $MW_FILENAME ] ; then
+		echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
+		wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
+			error "Unable to download "\
+			"http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
+			"$MW_FILENAME. "\
+			"Please fix your connection and launch the script again."
+		echo "$MW_FILENAME downloaded in $(pwd). "\
+			"You can delete it later if you want."
+	else
+		echo "Reusing existing $MW_FILENAME downloaded in $(pwd)."
+	fi
+	archive_abs_path=$(pwd)/$MW_FILENAME
+	cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
+		error "can't cd to $WIKI_DIR_INST/$WIKI_DIR_NAME/"
+	tar xzf "$archive_abs_path" --strip-components=1 ||
+		error "Unable to extract WikiMedia's files from $archive_abs_path to "\
+			"$WIKI_DIR_INST/$WIKI_DIR_NAME"
+	) || exit 1
+
+	create_db
+
+	# Copy the generic LocalSettings.php in the web server's directory
+	# And modify parameters according to the ones set at the top
+	# of this script.
+	# Note that LocalSettings.php is never modified.
+	if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
+		error "Can't find $FILES_FOLDER/LocalSettings.php " \
+			"in the current folder. "\
+		"Please run the script inside its folder."
+	fi
+	cp "$FILES_FOLDER/LocalSettings.php" \
+		"$FILES_FOLDER/LocalSettings-tmp.php" ||
+		error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
+		"to $FILES_FOLDER/LocalSettings-tmp.php"
+
+	# Parse and set the LocalSettings file of the user according to the
+	# CONFIGURATION VARIABLES section at the beginning of this script
+	file_swap="$FILES_FOLDER/LocalSettings-swap.php"
+	sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
+		"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+	mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+	sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
+		"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+	mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+	sed "s,@WG_SQLITE_DATADIR@,$TMP," \
+		"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+	mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+	sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
+		"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
+	mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
+
+	mv "$FILES_FOLDER/LocalSettings-tmp.php" \
+		"$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
+		error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
+		"in $WIKI_DIR_INST/$WIKI_DIR_NAME"
+	echo "File $FILES_FOLDER/LocalSettings.php is set in" \
+		" $WIKI_DIR_INST/$WIKI_DIR_NAME"
+
+	echo "Your wiki has been installed. You can check it at
+		http://$SERVER_ADDR/$WIKI_DIR_NAME"
+}
+
+# Reset the database of the wiki and the password of the admin
+#
+# Warning: This function must be called only in a subdirectory of t/ directory
+wiki_reset () {
+	# Copy initial database of the wiki
+	if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
+		error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
+	fi
+	cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
+		error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
+	echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
+}
+
+# Delete the wiki created in the web server's directory and all its content
+# saved in the database.
+wiki_delete () {
+	if test $LIGHTTPD = "true"; then
+		stop_lighttpd
+		rm -fr "$WEB"
+	else
+		# Delete the wiki's directory.
+		rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
+			error "Wiki's directory $WIKI_DIR_INST/" \
+			"$WIKI_DIR_NAME could not be deleted"
+		# Delete the wiki's SQLite database.
+		rm -f "$TMP/$DB_FILE" ||
+			error "Database $TMP/$DB_FILE could not be deleted."
+	fi
+
+	# Delete the wiki's SQLite database
+	rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
+	rm -f "$FILES_FOLDER/$DB_FILE"
+	rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
+}
diff --git a/third_party/git/contrib/mw-to-git/t/test-gitmw.pl b/third_party/git/contrib/mw-to-git/t/test-gitmw.pl
new file mode 100755
index 000000000000..0ff76259faa6
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/test-gitmw.pl
@@ -0,0 +1,225 @@
+#!/usr/bin/perl -w -s
+# Copyright (C) 2012
+#     Charles Roussel <charles.roussel@ensimag.imag.fr>
+#     Simon Cathebras <simon.cathebras@ensimag.imag.fr>
+#     Julien Khayat <julien.khayat@ensimag.imag.fr>
+#     Guillaume Sasdy <guillaume.sasdy@ensimag.imag.fr>
+#     Simon Perrat <simon.perrat@ensimag.imag.fr>
+# License: GPL v2 or later
+
+# Usage:
+#       ./test-gitmw.pl <command> [argument]*
+# Execute in terminal using the name of the function to call as first
+# parameter, and the function's arguments as following parameters
+#
+# Example:
+#     ./test-gitmw.pl "get_page" foo .
+# will call <wiki_getpage> with arguments <foo> and <.>
+#
+# Available functions are:
+#     "get_page"
+#     "delete_page"
+#     "edit_page"
+#     "getallpagename"
+
+use MediaWiki::API;
+use Getopt::Long;
+use encoding 'utf8';
+use DateTime::Format::ISO8601;
+use open ':encoding(utf8)';
+use constant SLASH_REPLACEMENT => "%2F";
+
+#Parsing of the config file
+
+my $configfile = "$ENV{'CURR_DIR'}/test.config";
+my %config;
+open my $CONFIG, "<",  $configfile or die "can't open $configfile: $!";
+while (<$CONFIG>)
+{
+	chomp;
+	s/#.*//;
+	s/^\s+//;
+	s/\s+$//;
+	next unless length;
+	my ($key, $value) = split (/\s*=\s*/,$_, 2);
+	$config{$key} = $value;
+	last if ($key eq 'LIGHTTPD' and $value eq 'false');
+	last if ($key eq 'PORT');
+}
+close $CONFIG or die "can't close $configfile: $!";
+
+my $wiki_address = "http://$config{'SERVER_ADDR'}".":"."$config{'PORT'}";
+my $wiki_url = "$wiki_address/$config{'WIKI_DIR_NAME'}/api.php";
+my $wiki_admin = "$config{'WIKI_ADMIN'}";
+my $wiki_admin_pass = "$config{'WIKI_PASSW'}";
+my $mw = MediaWiki::API->new;
+$mw->{config}->{api_url} = $wiki_url;
+
+
+# wiki_login <name> <password>
+#
+# Logs the user with <name> and <password> in the global variable
+# of the mediawiki $mw
+sub wiki_login {
+	$mw->login( { lgname => "$_[0]",lgpassword => "$_[1]" } )
+	|| die "getpage: login failed";
+}
+
+# wiki_getpage <wiki_page> <dest_path>
+#
+# fetch a page <wiki_page> from the wiki referenced in the global variable
+# $mw and copies its content in directory dest_path
+sub wiki_getpage {
+	my $pagename = $_[0];
+	my $destdir = $_[1];
+
+	my $page = $mw->get_page( { title => $pagename } );
+	if (!defined($page)) {
+		die "getpage: wiki does not exist";
+	}
+
+	my $content = $page->{'*'};
+	if (!defined($content)) {
+		die "getpage: page does not exist";
+	}
+
+	$pagename=$page->{'title'};
+	# Replace spaces by underscore in the page name
+	$pagename =~ s/ /_/g;
+	$pagename =~ s/\//%2F/g;
+	open(my $file, ">$destdir/$pagename.mw");
+	print $file "$content";
+	close ($file);
+
+}
+
+# wiki_delete_page <page_name>
+#
+# delete the page with name <page_name> from the wiki referenced
+# in the global variable $mw
+sub wiki_delete_page {
+	my $pagename = $_[0];
+
+	my $exist=$mw->get_page({title => $pagename});
+
+	if (defined($exist->{'*'})){
+		$mw->edit({ action => 'delete',
+				title => $pagename})
+		|| die $mw->{error}->{code} . ": " . $mw->{error}->{details};
+	} else {
+		die "no page with such name found: $pagename\n";
+	}
+}
+
+# wiki_editpage <wiki_page> <wiki_content> <wiki_append> [-c=<category>] [-s=<summary>]
+#
+# Edit a page named <wiki_page> with content <wiki_content> on the wiki
+# referenced with the global variable $mw
+# If <wiki_append> == true : append <wiki_content> at the end of the actual
+# content of the page <wiki_page>
+# If <wik_page> doesn't exist, that page is created with the <wiki_content>
+sub wiki_editpage {
+	my $wiki_page = $_[0];
+	my $wiki_content = $_[1];
+	my $wiki_append = $_[2];
+	my $summary = "";
+	my ($summ, $cat) = ();
+	GetOptions('s=s' => \$summ, 'c=s' => \$cat);
+
+	my $append = 0;
+	if (defined($wiki_append) && $wiki_append eq 'true') {
+		$append=1;
+	}
+
+	my $previous_text ="";
+
+	if ($append) {
+		my $ref = $mw->get_page( { title => $wiki_page } );
+		$previous_text = $ref->{'*'};
+	}
+
+	my $text = $wiki_content;
+	if (defined($previous_text)) {
+		$text="$previous_text$text";
+	}
+
+	# Eventually, add this page to a category.
+	if (defined($cat)) {
+		my $category_name="[[Category:$cat]]";
+		$text="$text\n $category_name";
+	}
+	if(defined($summ)){
+		$summary=$summ;
+	}
+
+	$mw->edit( { action => 'edit', title => $wiki_page, summary => $summary, text => "$text"} );
+}
+
+# wiki_getallpagename [<category>]
+#
+# Fetch all pages of the wiki referenced by the global variable $mw
+# and print the names of each one in the file all.txt with a new line
+# ("\n") between these.
+# If the argument <category> is defined, then this function get only the pages
+# belonging to <category>.
+sub wiki_getallpagename {
+	# fetch the pages of the wiki
+	if (defined($_[0])) {
+		my $mw_pages = $mw->list ( { action => 'query',
+				list => 'categorymembers',
+				cmtitle => "Category:$_[0]",
+				cmnamespace => 0,
+				cmlimit => 500 },
+		)
+		|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
+		open(my $file, ">all.txt");
+		foreach my $page (@{$mw_pages}) {
+			print $file "$page->{title}\n";
+		}
+		close ($file);
+
+	} else {
+		my $mw_pages = $mw->list({
+				action => 'query',
+				list => 'allpages',
+				aplimit => 500,
+			})
+		|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
+		open(my $file, ">all.txt");
+		foreach my $page (@{$mw_pages}) {
+			print $file "$page->{title}\n";
+		}
+		close ($file);
+	}
+}
+
+sub wiki_upload_file {
+	my $file_name = $_[0];
+	my $resultat = $mw->edit ( {
+		action => 'upload',
+		filename => $file_name,
+		comment => 'upload a file',
+		file => [ $file_name ],
+		ignorewarnings=>1,
+	}, {
+		skip_encoding => 1
+	} ) || die $mw->{error}->{code} . ' : ' . $mw->{error}->{details};
+}
+
+
+
+# Main part of this script: parse the command line arguments
+# and select which function to execute
+my $fct_to_call = shift;
+
+wiki_login($wiki_admin, $wiki_admin_pass);
+
+my %functions_to_call = qw(
+	upload_file    wiki_upload_file
+	get_page       wiki_getpage
+	delete_page    wiki_delete_page
+	edit_page      wiki_editpage
+	getallpagename wiki_getallpagename
+);
+die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
+&{$functions_to_call{$fct_to_call}}(@ARGV);
diff --git a/third_party/git/contrib/mw-to-git/t/test.config b/third_party/git/contrib/mw-to-git/t/test.config
new file mode 100644
index 000000000000..5ba068416247
--- /dev/null
+++ b/third_party/git/contrib/mw-to-git/t/test.config
@@ -0,0 +1,37 @@
+# Name of the web server's directory dedicated to the wiki is WIKI_DIR_NAME
+WIKI_DIR_NAME=wiki
+
+# Login and password of the wiki's admin
+WIKI_ADMIN=WikiAdmin
+WIKI_PASSW=AdminPass
+
+# Address of the web server
+SERVER_ADDR=localhost
+
+# SQLite database of the wiki, named DB_FILE, is located in TMP
+TMP=/tmp
+DB_FILE=wikidb.sqlite
+
+# If LIGHTTPD is not set to true, the script will use the default
+# web server running in WIKI_DIR_INST.
+WIKI_DIR_INST=/var/www
+
+# If LIGHTTPD is set to true, the script will use Lighttpd to run
+# the wiki.
+LIGHTTPD=true
+
+# The variables below are useful only if LIGHTTPD is set to true.
+PORT=1234
+PHP_DIR=/usr/bin
+LIGHTTPD_DIR=/usr/sbin
+WEB=WEB
+WEB_TMP=$WEB/tmp
+WEB_WWW=$WEB/www
+
+# The variables below are used by the script to install a wiki.
+# You should not modify these unless you are modifying the script itself.
+# tested versions: 1.19.X -> 1.21.1
+MW_VERSION_MAJOR=1.21
+MW_VERSION_MINOR=1
+FILES_FOLDER=install-wiki
+DB_INSTALL_SCRIPT=db_install.php
diff --git a/third_party/git/contrib/persistent-https/LICENSE b/third_party/git/contrib/persistent-https/LICENSE
new file mode 100644
index 000000000000..d64569567334
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/third_party/git/contrib/persistent-https/Makefile b/third_party/git/contrib/persistent-https/Makefile
new file mode 100644
index 000000000000..52b84ba3d439
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/Makefile
@@ -0,0 +1,40 @@
+# Copyright 2012 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+BUILD_LABEL=$(shell cut -d" " -f3 ../../GIT-VERSION-FILE)
+TAR_OUT=$(shell go env GOOS)_$(shell go env GOARCH).tar.gz
+
+all: git-remote-persistent-https git-remote-persistent-https--proxy \
+	git-remote-persistent-http
+
+git-remote-persistent-https--proxy: git-remote-persistent-https
+	ln -f -s git-remote-persistent-https git-remote-persistent-https--proxy
+
+git-remote-persistent-http: git-remote-persistent-https
+	ln -f -s git-remote-persistent-https git-remote-persistent-http
+
+git-remote-persistent-https:
+	case $$(go version) in \
+	"go version go"1.[0-5].*) EQ=" " ;; *) EQ="=" ;; esac && \
+	go build -o git-remote-persistent-https \
+		-ldflags "-X main._BUILD_EMBED_LABEL$${EQ}$(BUILD_LABEL)"
+
+clean:
+	rm -f git-remote-persistent-http* *.tar.gz
+
+tar: clean all
+	@chmod 555 git-remote-persistent-https
+	@tar -czf $(TAR_OUT) git-remote-persistent-http* README LICENSE
+	@echo
+	@echo "Created $(TAR_OUT)"
diff --git a/third_party/git/contrib/persistent-https/README b/third_party/git/contrib/persistent-https/README
new file mode 100644
index 000000000000..7c4cd8d257da
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/README
@@ -0,0 +1,72 @@
+git-remote-persistent-https
+
+The git-remote-persistent-https binary speeds up SSL operations
+by running a daemon job (git-remote-persistent-https--proxy) that
+keeps a connection open to a server.
+
+
+PRE-BUILT BINARIES
+
+Darwin amd64:
+https://commondatastorage.googleapis.com/git-remote-persistent-https/darwin_amd64.tar.gz
+
+Linux amd64:
+https://commondatastorage.googleapis.com/git-remote-persistent-https/linux_amd64.tar.gz
+
+
+INSTALLING
+
+Move all of the git-remote-persistent-http* binaries to a directory
+in PATH.
+
+
+USAGE
+
+HTTPS requests can be delegated to the proxy by using the
+"persistent-https" scheme, e.g.
+
+git clone persistent-https://kernel.googlesource.com/pub/scm/git/git
+
+Likewise, .gitconfig can be updated as follows to rewrite https urls
+to use persistent-https:
+
+[url "persistent-https"]
+	insteadof = https
+[url "persistent-http"]
+	insteadof = http
+
+You may also want to allow the use of the persistent-https helper for
+submodule URLs (since any https URLs pointing to submodules will be
+rewritten, and Git's out-of-the-box defaults forbid submodules from
+using unknown remote helpers):
+
+[protocol "persistent-https"]
+	allow = always
+[protocol "persistent-http"]
+	allow = always
+
+
+#####################################################################
+# BUILDING FROM SOURCE
+#####################################################################
+
+LOCATION
+
+The source is available in the contrib/persistent-https directory of
+the Git source repository. The Git source repository is available at
+git://git.kernel.org/pub/scm/git/git.git/
+https://kernel.googlesource.com/pub/scm/git/git
+
+
+PREREQUISITES
+
+The code is written in Go (http://golang.org/) and the Go compiler is
+required. Currently, the compiler must be built and installed from tip
+of source, in order to include a fix in the reverse http proxy:
+http://code.google.com/p/go/source/detail?r=a615b796570a2cd8591884767a7d67ede74f6648
+
+
+BUILDING
+
+Run "make" to build the binaries. See the section on
+INSTALLING above.
diff --git a/third_party/git/contrib/persistent-https/client.go b/third_party/git/contrib/persistent-https/client.go
new file mode 100644
index 000000000000..71125b5832d3
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/client.go
@@ -0,0 +1,189 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"errors"
+	"fmt"
+	"net"
+	"net/url"
+	"os"
+	"os/exec"
+	"strings"
+	"syscall"
+	"time"
+)
+
+type Client struct {
+	ProxyBin string
+	Args     []string
+
+	insecure bool
+}
+
+func (c *Client) Run() error {
+	if err := c.resolveArgs(); err != nil {
+		return fmt.Errorf("resolveArgs() got error: %v", err)
+	}
+
+	// Connect to the proxy.
+	uconn, hconn, addr, err := c.connect()
+	if err != nil {
+		return fmt.Errorf("connect() got error: %v", err)
+	}
+	// Keep the unix socket connection open for the duration of the request.
+	defer uconn.Close()
+	// Keep a connection to the HTTP server open, so no other user can
+	// bind on the same address so long as the process is running.
+	defer hconn.Close()
+
+	// Start the git-remote-http subprocess.
+	cargs := []string{"-c", fmt.Sprintf("http.proxy=%v", addr), "remote-http"}
+	cargs = append(cargs, c.Args...)
+	cmd := exec.Command("git", cargs...)
+
+	for _, v := range os.Environ() {
+		if !strings.HasPrefix(v, "GIT_PERSISTENT_HTTPS_SECURE=") {
+			cmd.Env = append(cmd.Env, v)
+		}
+	}
+	// Set the GIT_PERSISTENT_HTTPS_SECURE environment variable when
+	// the proxy is using a SSL connection.  This allows credential helpers
+	// to identify secure proxy connections, despite being passed an HTTP
+	// scheme.
+	if !c.insecure {
+		cmd.Env = append(cmd.Env, "GIT_PERSISTENT_HTTPS_SECURE=1")
+	}
+
+	cmd.Stdin = os.Stdin
+	cmd.Stdout = os.Stdout
+	cmd.Stderr = os.Stderr
+	if err := cmd.Run(); err != nil {
+		if eerr, ok := err.(*exec.ExitError); ok {
+			if stat, ok := eerr.ProcessState.Sys().(syscall.WaitStatus); ok && stat.ExitStatus() != 0 {
+				os.Exit(stat.ExitStatus())
+			}
+		}
+		return fmt.Errorf("git-remote-http subprocess got error: %v", err)
+	}
+	return nil
+}
+
+func (c *Client) connect() (uconn net.Conn, hconn net.Conn, addr string, err error) {
+	uconn, err = DefaultSocket.Dial()
+	if err != nil {
+		if e, ok := err.(*net.OpError); ok && (os.IsNotExist(e.Err) || e.Err == syscall.ECONNREFUSED) {
+			if err = c.startProxy(); err == nil {
+				uconn, err = DefaultSocket.Dial()
+			}
+		}
+		if err != nil {
+			return
+		}
+	}
+
+	if addr, err = c.readAddr(uconn); err != nil {
+		return
+	}
+
+	// Open a tcp connection to the proxy.
+	if hconn, err = net.Dial("tcp", addr); err != nil {
+		return
+	}
+
+	// Verify the address hasn't changed ownership.
+	var addr2 string
+	if addr2, err = c.readAddr(uconn); err != nil {
+		return
+	} else if addr != addr2 {
+		err = fmt.Errorf("address changed after connect. got %q, want %q", addr2, addr)
+		return
+	}
+	return
+}
+
+func (c *Client) readAddr(conn net.Conn) (string, error) {
+	conn.SetDeadline(time.Now().Add(5 * time.Second))
+	data := make([]byte, 100)
+	n, err := conn.Read(data)
+	if err != nil {
+		return "", fmt.Errorf("error reading unix socket: %v", err)
+	} else if n == 0 {
+		return "", errors.New("empty data response")
+	}
+	conn.Write([]byte{1}) // Ack
+
+	var addr string
+	if addrs := strings.Split(string(data[:n]), "\n"); len(addrs) != 2 {
+		return "", fmt.Errorf("got %q, wanted 2 addresses", data[:n])
+	} else if c.insecure {
+		addr = addrs[1]
+	} else {
+		addr = addrs[0]
+	}
+	return addr, nil
+}
+
+func (c *Client) startProxy() error {
+	cmd := exec.Command(c.ProxyBin)
+	cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
+	stdout, err := cmd.StdoutPipe()
+	if err != nil {
+		return err
+	}
+	defer stdout.Close()
+	if err := cmd.Start(); err != nil {
+		return err
+	}
+	result := make(chan error)
+	go func() {
+		bytes, _, err := bufio.NewReader(stdout).ReadLine()
+		if line := string(bytes); err == nil && line != "OK" {
+			err = fmt.Errorf("proxy returned %q, want \"OK\"", line)
+		}
+		result <- err
+	}()
+	select {
+	case err := <-result:
+		return err
+	case <-time.After(5 * time.Second):
+		return errors.New("timeout waiting for proxy to start")
+	}
+	panic("not reachable")
+}
+
+func (c *Client) resolveArgs() error {
+	if nargs := len(c.Args); nargs == 0 {
+		return errors.New("remote needed")
+	} else if nargs > 2 {
+		return fmt.Errorf("want at most 2 args, got %v", c.Args)
+	}
+
+	// Rewrite the url scheme to be http.
+	idx := len(c.Args) - 1
+	rawurl := c.Args[idx]
+	rurl, err := url.Parse(rawurl)
+	if err != nil {
+		return fmt.Errorf("invalid remote: %v", err)
+	}
+	c.insecure = rurl.Scheme == "persistent-http"
+	rurl.Scheme = "http"
+	c.Args[idx] = rurl.String()
+	if idx != 0 && c.Args[0] == rawurl {
+		c.Args[0] = c.Args[idx]
+	}
+	return nil
+}
diff --git a/third_party/git/contrib/persistent-https/main.go b/third_party/git/contrib/persistent-https/main.go
new file mode 100644
index 000000000000..fd1b1077439b
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/main.go
@@ -0,0 +1,82 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// The git-remote-persistent-https binary speeds up SSL operations by running
+// a daemon job that keeps a connection open to a Git server. This ensures the
+// git-remote-persistent-https--proxy is running and delegating execution
+// to the git-remote-http binary with the http_proxy set to the daemon job.
+// A unix socket is used to authenticate the proxy and discover the
+// HTTP address. Note, both the client and proxy are included in the same
+// binary.
+package main
+
+import (
+	"flag"
+	"fmt"
+	"log"
+	"os"
+	"strings"
+	"time"
+)
+
+var (
+	forceProxy = flag.Bool("proxy", false, "Whether to start the binary in proxy mode")
+	proxyBin   = flag.String("proxy_bin", "git-remote-persistent-https--proxy", "Path to the proxy binary")
+	printLabel = flag.Bool("print_label", false, "Prints the build label for the binary")
+
+	// Variable that should be defined through the -X linker flag.
+	_BUILD_EMBED_LABEL string
+)
+
+const (
+	defaultMaxIdleDuration    = 24 * time.Hour
+	defaultPollUpdateInterval = 15 * time.Minute
+)
+
+func main() {
+	flag.Parse()
+	if *printLabel {
+		// Short circuit execution to print the build label
+		fmt.Println(buildLabel())
+		return
+	}
+
+	var err error
+	if *forceProxy || strings.HasSuffix(os.Args[0], "--proxy") {
+		log.SetPrefix("git-remote-persistent-https--proxy: ")
+		proxy := &Proxy{
+			BuildLabel:         buildLabel(),
+			MaxIdleDuration:    defaultMaxIdleDuration,
+			PollUpdateInterval: defaultPollUpdateInterval,
+		}
+		err = proxy.Run()
+	} else {
+		log.SetPrefix("git-remote-persistent-https: ")
+		client := &Client{
+			ProxyBin: *proxyBin,
+			Args:     flag.Args(),
+		}
+		err = client.Run()
+	}
+	if err != nil {
+		log.Fatalln(err)
+	}
+}
+
+func buildLabel() string {
+	if _BUILD_EMBED_LABEL == "" {
+		log.Println(`unlabeled build; build with "make" to label`)
+	}
+	return _BUILD_EMBED_LABEL
+}
diff --git a/third_party/git/contrib/persistent-https/proxy.go b/third_party/git/contrib/persistent-https/proxy.go
new file mode 100644
index 000000000000..bb0cdba3864c
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/proxy.go
@@ -0,0 +1,190 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"fmt"
+	"log"
+	"net"
+	"net/http"
+	"net/http/httputil"
+	"os"
+	"os/exec"
+	"os/signal"
+	"sync"
+	"syscall"
+	"time"
+)
+
+type Proxy struct {
+	BuildLabel         string
+	MaxIdleDuration    time.Duration
+	PollUpdateInterval time.Duration
+
+	ul        net.Listener
+	httpAddr  string
+	httpsAddr string
+}
+
+func (p *Proxy) Run() error {
+	hl, err := net.Listen("tcp", "127.0.0.1:0")
+	if err != nil {
+		return fmt.Errorf("http listen failed: %v", err)
+	}
+	defer hl.Close()
+
+	hsl, err := net.Listen("tcp", "127.0.0.1:0")
+	if err != nil {
+		return fmt.Errorf("https listen failed: %v", err)
+	}
+	defer hsl.Close()
+
+	p.ul, err = DefaultSocket.Listen()
+	if err != nil {
+		c, derr := DefaultSocket.Dial()
+		if derr == nil {
+			c.Close()
+			fmt.Println("OK\nA proxy is already running... exiting")
+			return nil
+		} else if e, ok := derr.(*net.OpError); ok && e.Err == syscall.ECONNREFUSED {
+			// Nothing is listening on the socket, unlink it and try again.
+			syscall.Unlink(DefaultSocket.Path())
+			p.ul, err = DefaultSocket.Listen()
+		}
+		if err != nil {
+			return fmt.Errorf("unix listen failed on %v: %v", DefaultSocket.Path(), err)
+		}
+	}
+	defer p.ul.Close()
+	go p.closeOnSignal()
+	go p.closeOnUpdate()
+
+	p.httpAddr = hl.Addr().String()
+	p.httpsAddr = hsl.Addr().String()
+	fmt.Printf("OK\nListening on unix socket=%v http=%v https=%v\n",
+		p.ul.Addr(), p.httpAddr, p.httpsAddr)
+
+	result := make(chan error, 2)
+	go p.serveUnix(result)
+	go func() {
+		result <- http.Serve(hl, &httputil.ReverseProxy{
+			FlushInterval: 500 * time.Millisecond,
+			Director:      func(r *http.Request) {},
+		})
+	}()
+	go func() {
+		result <- http.Serve(hsl, &httputil.ReverseProxy{
+			FlushInterval: 500 * time.Millisecond,
+			Director: func(r *http.Request) {
+				r.URL.Scheme = "https"
+			},
+		})
+	}()
+	return <-result
+}
+
+type socketContext struct {
+	sync.WaitGroup
+	mutex sync.Mutex
+	last  time.Time
+}
+
+func (sc *socketContext) Done() {
+	sc.mutex.Lock()
+	defer sc.mutex.Unlock()
+	sc.last = time.Now()
+	sc.WaitGroup.Done()
+}
+
+func (p *Proxy) serveUnix(result chan<- error) {
+	sockCtx := &socketContext{}
+	go p.closeOnIdle(sockCtx)
+
+	var err error
+	for {
+		var uconn net.Conn
+		uconn, err = p.ul.Accept()
+		if err != nil {
+			err = fmt.Errorf("accept failed: %v", err)
+			break
+		}
+		sockCtx.Add(1)
+		go p.handleUnixConn(sockCtx, uconn)
+	}
+	sockCtx.Wait()
+	result <- err
+}
+
+func (p *Proxy) handleUnixConn(sockCtx *socketContext, uconn net.Conn) {
+	defer sockCtx.Done()
+	defer uconn.Close()
+	data := []byte(fmt.Sprintf("%v\n%v", p.httpsAddr, p.httpAddr))
+	uconn.SetDeadline(time.Now().Add(5 * time.Second))
+	for i := 0; i < 2; i++ {
+		if n, err := uconn.Write(data); err != nil {
+			log.Printf("error sending http addresses: %+v\n", err)
+			return
+		} else if n != len(data) {
+			log.Printf("sent %d data bytes, wanted %d\n", n, len(data))
+			return
+		}
+		if _, err := uconn.Read([]byte{0, 0, 0, 0}); err != nil {
+			log.Printf("error waiting for Ack: %+v\n", err)
+			return
+		}
+	}
+	// Wait without a deadline for the client to finish via EOF
+	uconn.SetDeadline(time.Time{})
+	uconn.Read([]byte{0, 0, 0, 0})
+}
+
+func (p *Proxy) closeOnIdle(sockCtx *socketContext) {
+	for d := p.MaxIdleDuration; d > 0; {
+		time.Sleep(d)
+		sockCtx.Wait()
+		sockCtx.mutex.Lock()
+		if d = sockCtx.last.Add(p.MaxIdleDuration).Sub(time.Now()); d <= 0 {
+			log.Println("graceful shutdown from idle timeout")
+			p.ul.Close()
+		}
+		sockCtx.mutex.Unlock()
+	}
+}
+
+func (p *Proxy) closeOnUpdate() {
+	for {
+		time.Sleep(p.PollUpdateInterval)
+		if out, err := exec.Command(os.Args[0], "--print_label").Output(); err != nil {
+			log.Printf("error polling for updated binary: %v\n", err)
+		} else if s := string(out[:len(out)-1]); p.BuildLabel != s {
+			log.Printf("graceful shutdown from updated binary: %q --> %q\n", p.BuildLabel, s)
+			p.ul.Close()
+			break
+		}
+	}
+}
+
+func (p *Proxy) closeOnSignal() {
+	ch := make(chan os.Signal, 10)
+	signal.Notify(ch, os.Interrupt, os.Kill, os.Signal(syscall.SIGTERM), os.Signal(syscall.SIGHUP))
+	sig := <-ch
+	p.ul.Close()
+	switch sig {
+	case os.Signal(syscall.SIGHUP):
+		log.Printf("graceful shutdown from signal: %v\n", sig)
+	default:
+		log.Fatalf("exiting from signal: %v\n", sig)
+	}
+}
diff --git a/third_party/git/contrib/persistent-https/socket.go b/third_party/git/contrib/persistent-https/socket.go
new file mode 100644
index 000000000000..193b911dd13a
--- /dev/null
+++ b/third_party/git/contrib/persistent-https/socket.go
@@ -0,0 +1,97 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"fmt"
+	"log"
+	"net"
+	"os"
+	"path/filepath"
+	"syscall"
+)
+
+// A Socket is a wrapper around a Unix socket that verifies directory
+// permissions.
+type Socket struct {
+	Dir string
+}
+
+func defaultDir() string {
+	sockPath := ".git-credential-cache"
+	if home := os.Getenv("HOME"); home != "" {
+		return filepath.Join(home, sockPath)
+	}
+	log.Printf("socket: cannot find HOME path. using relative directory %q for socket", sockPath)
+	return sockPath
+}
+
+// DefaultSocket is a Socket in the $HOME/.git-credential-cache directory.
+var DefaultSocket = Socket{Dir: defaultDir()}
+
+// Listen announces the local network address of the unix socket. The
+// permissions on the socket directory are verified before attempting
+// the actual listen.
+func (s Socket) Listen() (net.Listener, error) {
+	network, addr := "unix", s.Path()
+	if err := s.mkdir(); err != nil {
+		return nil, &net.OpError{Op: "listen", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
+	}
+	return net.Listen(network, addr)
+}
+
+// Dial connects to the unix socket. The permissions on the socket directory
+// are verified before attempting the actual dial.
+func (s Socket) Dial() (net.Conn, error) {
+	network, addr := "unix", s.Path()
+	if err := s.checkPermissions(); err != nil {
+		return nil, &net.OpError{Op: "dial", Net: network, Addr: &net.UnixAddr{Name: addr, Net: network}, Err: err}
+	}
+	return net.Dial(network, addr)
+}
+
+// Path returns the fully specified file name of the unix socket.
+func (s Socket) Path() string {
+	return filepath.Join(s.Dir, "persistent-https-proxy-socket")
+}
+
+func (s Socket) mkdir() error {
+	if err := s.checkPermissions(); err == nil {
+		return nil
+	} else if !os.IsNotExist(err) {
+		return err
+	}
+	if err := os.MkdirAll(s.Dir, 0700); err != nil {
+		return err
+	}
+	return s.checkPermissions()
+}
+
+func (s Socket) checkPermissions() error {
+	fi, err := os.Stat(s.Dir)
+	if err != nil {
+		return err
+	}
+	if !fi.IsDir() {
+		return fmt.Errorf("socket: got file, want directory for %q", s.Dir)
+	}
+	if fi.Mode().Perm() != 0700 {
+		return fmt.Errorf("socket: got perm %o, want 700 for %q", fi.Mode().Perm(), s.Dir)
+	}
+	if st := fi.Sys().(*syscall.Stat_t); int(st.Uid) != os.Getuid() {
+		return fmt.Errorf("socket: got uid %d, want %d for %q", st.Uid, os.Getuid(), s.Dir)
+	}
+	return nil
+}
diff --git a/third_party/git/contrib/remote-helpers/README b/third_party/git/contrib/remote-helpers/README
new file mode 100644
index 000000000000..ac72332517a5
--- /dev/null
+++ b/third_party/git/contrib/remote-helpers/README
@@ -0,0 +1,15 @@
+The remote-helper bridges to access data stored in Mercurial and
+Bazaar are maintained outside the git.git tree in the repositories
+of their primary author:
+
+    https://github.com/felipec/git-remote-hg (for Mercurial)
+    https://github.com/felipec/git-remote-bzr (for Bazaar)
+
+You can pick a directory on your $PATH and download them from these
+repositories, e.g.:
+
+  $ wget -O $HOME/bin/git-remote-hg \
+    https://raw.github.com/felipec/git-remote-hg/master/git-remote-hg
+  $ wget -O $HOME/bin/git-remote-bzr \
+    https://raw.github.com/felipec/git-remote-bzr/master/git-remote-bzr
+  $ chmod +x $HOME/bin/git-remote-hg $HOME/bin/git-remote-bzr
diff --git a/third_party/git/contrib/remote-helpers/git-remote-bzr b/third_party/git/contrib/remote-helpers/git-remote-bzr
new file mode 100755
index 000000000000..1c3d87f8619e
--- /dev/null
+++ b/third_party/git/contrib/remote-helpers/git-remote-bzr
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+cat >&2 <<'EOT'
+WARNING: git-remote-bzr is now maintained independently.
+WARNING: For more information visit https://github.com/felipec/git-remote-bzr
+WARNING:
+WARNING: You can pick a directory on your $PATH and download it, e.g.:
+WARNING:   $ wget -O $HOME/bin/git-remote-bzr \
+WARNING:     https://raw.github.com/felipec/git-remote-bzr/master/git-remote-bzr
+WARNING:   $ chmod +x $HOME/bin/git-remote-bzr
+EOT
diff --git a/third_party/git/contrib/remote-helpers/git-remote-hg b/third_party/git/contrib/remote-helpers/git-remote-hg
new file mode 100755
index 000000000000..8e9188364c6f
--- /dev/null
+++ b/third_party/git/contrib/remote-helpers/git-remote-hg
@@ -0,0 +1,11 @@
+#!/bin/sh
+
+cat >&2 <<'EOT'
+WARNING: git-remote-hg is now maintained independently.
+WARNING: For more information visit https://github.com/felipec/git-remote-hg
+WARNING:
+WARNING: You can pick a directory on your $PATH and download it, e.g.:
+WARNING:   $ wget -O $HOME/bin/git-remote-hg \
+WARNING:     https://raw.github.com/felipec/git-remote-hg/master/git-remote-hg
+WARNING:   $ chmod +x $HOME/bin/git-remote-hg
+EOT
diff --git a/third_party/git/contrib/remotes2config.sh b/third_party/git/contrib/remotes2config.sh
new file mode 100755
index 000000000000..1cda19f66af9
--- /dev/null
+++ b/third_party/git/contrib/remotes2config.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+
+# Use this tool to rewrite your .git/remotes/ files into the config.
+
+. git-sh-setup
+
+if [ -d "$GIT_DIR"/remotes ]; then
+	echo "Rewriting $GIT_DIR/remotes" >&2
+	error=0
+	# rewrite into config
+	{
+		cd "$GIT_DIR"/remotes
+		ls | while read f; do
+			name=$(printf "$f" | tr -c "A-Za-z0-9-" ".")
+			sed -n \
+			-e "s/^URL:[ 	]*\(.*\)$/remote.$name.url \1 ./p" \
+			-e "s/^Pull:[ 	]*\(.*\)$/remote.$name.fetch \1 ^$ /p" \
+			-e "s/^Push:[ 	]*\(.*\)$/remote.$name.push \1 ^$ /p" \
+			< "$f"
+		done
+		echo done
+	} | while read key value regex; do
+		case $key in
+		done)
+			if [ $error = 0 ]; then
+				mv "$GIT_DIR"/remotes "$GIT_DIR"/remotes.old
+			fi ;;
+		*)
+			echo "git config $key "$value" $regex"
+			git config $key "$value" $regex || error=1 ;;
+		esac
+	done
+fi
diff --git a/third_party/git/contrib/rerere-train.sh b/third_party/git/contrib/rerere-train.sh
new file mode 100755
index 000000000000..eeee45dd341b
--- /dev/null
+++ b/third_party/git/contrib/rerere-train.sh
@@ -0,0 +1,102 @@
+#!/bin/sh
+# Copyright (c) 2008, Nanako Shiraishi
+# Prime rerere database from existing merge commits
+
+me=rerere-train
+USAGE=$(cat <<-EOF
+usage: $me [--overwrite] <rev-list-args>
+
+    -h, --help            show the help
+    -o, --overwrite       overwrite any existing rerere cache
+EOF
+)
+
+SUBDIRECTORY_OK=Yes
+
+overwrite=0
+
+while test $# -gt 0
+do
+	opt="$1"
+	case "$opt" in
+	-h|--help)
+		echo "$USAGE"
+		exit 0
+		;;
+	-o|--overwrite)
+		overwrite=1
+		shift
+		break
+		;;
+	--)
+		shift
+		break
+		;;
+	*)
+		break
+		;;
+	esac
+done
+
+# Overwrite or help options are not valid except as first arg
+for opt in "$@"
+do
+	case "$opt" in
+	-h|--help)
+		echo "$USAGE"
+		exit 0
+		;;
+	-o|--overwrite)
+		echo "$USAGE"
+		exit 0
+		;;
+	esac
+done
+
+. "$(git --exec-path)/git-sh-setup"
+require_work_tree
+cd_to_toplevel
+
+# Remember original branch
+branch=$(git symbolic-ref -q HEAD) ||
+original_HEAD=$(git rev-parse --verify HEAD) || {
+	echo >&2 "Not on any branch and no commit yet?"
+	exit 1
+}
+
+mkdir -p "$GIT_DIR/rr-cache" || exit
+
+git rev-list --parents "$@" |
+while read commit parent1 other_parents
+do
+	if test -z "$other_parents"
+	then
+		# Skip non-merges
+		continue
+	fi
+	git checkout -q "$parent1^0"
+	if git merge $other_parents >/dev/null 2>&1
+	then
+		# Cleanly merges
+		continue
+	fi
+	if test $overwrite = 1
+	then
+		git rerere forget .
+	fi
+	if test -s "$GIT_DIR/MERGE_RR"
+	then
+		git show -s --pretty=format:"Learning from %h %s" "$commit"
+		git rerere
+		git checkout -q $commit -- .
+		git rerere
+	fi
+	git reset -q --hard
+done
+
+if test -z "$branch"
+then
+	git checkout "$original_HEAD"
+else
+	git checkout "${branch#refs/heads/}"
+fi
diff --git a/third_party/git/contrib/stats/git-common-hash b/third_party/git/contrib/stats/git-common-hash
new file mode 100755
index 000000000000..e27fd088be1b
--- /dev/null
+++ b/third_party/git/contrib/stats/git-common-hash
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+# This script displays the distribution of longest common hash prefixes.
+# This can be used to determine the minimum prefix length to use
+# for object names to be unique.
+
+git rev-list --objects --all | sort | perl -lne '
+  substr($_, 40) = "";
+  # uncomment next line for a distribution of bits instead of hex chars
+  # $_ = unpack("B*",pack("H*",$_));
+  if (defined $p) {
+    ($p ^ $_) =~ /^(\0*)/;
+    $common = length $1;
+    if (defined $pcommon) {
+      $count[$pcommon > $common ? $pcommon : $common]++;
+    } else {
+      $count[$common]++; # first item
+    }
+  }
+  $p = $_;
+  $pcommon = $common;
+  END {
+    $count[$common]++; # last item
+    print "$_: $count[$_]" for 0..$#count;
+  }
+'
diff --git a/third_party/git/contrib/stats/mailmap.pl b/third_party/git/contrib/stats/mailmap.pl
new file mode 100755
index 000000000000..9513f5e35b44
--- /dev/null
+++ b/third_party/git/contrib/stats/mailmap.pl
@@ -0,0 +1,70 @@
+#!/usr/bin/perl
+
+use warnings 'all';
+use strict;
+use Getopt::Long;
+
+my $match_emails;
+my $match_names;
+my $order_by = 'count';
+Getopt::Long::Configure(qw(bundling));
+GetOptions(
+	'emails|e!' => \$match_emails,
+	'names|n!'  => \$match_names,
+	'count|c'   => sub { $order_by = 'count' },
+	'time|t'    => sub { $order_by = 'stamp' },
+) or exit 1;
+$match_emails = 1 unless $match_names;
+
+my $email = {};
+my $name = {};
+
+open(my $fh, '-|', "git log --format='%at <%aE> %aN'");
+while(<$fh>) {
+	my ($t, $e, $n) = /(\S+) <(\S+)> (.*)/;
+	mark($email, $e, $n, $t);
+	mark($name, $n, $e, $t);
+}
+close($fh);
+
+if ($match_emails) {
+	foreach my $e (dups($email)) {
+		foreach my $n (vals($email->{$e})) {
+			show($n, $e, $email->{$e}->{$n});
+		}
+		print "\n";
+	}
+}
+if ($match_names) {
+	foreach my $n (dups($name)) {
+		foreach my $e (vals($name->{$n})) {
+			show($n, $e, $name->{$n}->{$e});
+		}
+		print "\n";
+	}
+}
+exit 0;
+
+sub mark {
+	my ($h, $k, $v, $t) = @_;
+	my $e = $h->{$k}->{$v} ||= { count => 0, stamp => 0 };
+	$e->{count}++;
+	$e->{stamp} = $t unless $t < $e->{stamp};
+}
+
+sub dups {
+	my $h = shift;
+	return grep { keys($h->{$_}) > 1 } keys($h);
+}
+
+sub vals {
+	my $h = shift;
+	return sort {
+		$h->{$b}->{$order_by} <=> $h->{$a}->{$order_by}
+	} keys($h);
+}
+
+sub show {
+	my ($n, $e, $h) = @_;
+	print "$n <$e> ($h->{$order_by})\n";
+}
diff --git a/third_party/git/contrib/stats/packinfo.pl b/third_party/git/contrib/stats/packinfo.pl
new file mode 100755
index 000000000000..be188c0f11db
--- /dev/null
+++ b/third_party/git/contrib/stats/packinfo.pl
@@ -0,0 +1,212 @@
+#!/usr/bin/perl
+#
+# This tool will print vaguely pretty information about a pack.  It
+# expects the output of "git verify-pack -v" as input on stdin.
+#
+# $ git verify-pack -v | packinfo.pl
+#
+# This prints some full-pack statistics; currently "all sizes", "all
+# path sizes", "tree sizes", "tree path sizes", and "depths".
+#
+# * "all sizes" stats are across every object size in the file;
+#   full sizes for base objects, and delta size for deltas.
+# * "all path sizes" stats are across all object's "path sizes".
+#   A path size is the sum of the size of the delta chain, including the
+#   base object.  In other words, it's how many bytes need be read to
+#   reassemble the file from deltas.
+# * "tree sizes" are object sizes grouped into delta trees.
+# * "tree path sizes" are path sizes grouped into delta trees.
+# * "depths" should be obvious.
+#
+# When run as:
+#
+# $ git verify-pack -v | packinfo.pl -tree
+#
+# the trees of objects are output along with the stats.  This looks
+# like:
+#
+#   0 commit 031321c6...      803      803
+#
+#   0   blob 03156f21...     1767     1767
+#   1    blob f52a9d7f...       10     1777
+#   2     blob a8cc5739...       51     1828
+#   3      blob 660e90b1...       15     1843
+#   4       blob 0cb8e3bb...       33     1876
+#   2     blob e48607f0...      311     2088
+#      size: count 6 total 2187 min 10 max 1767 mean 364.50 median 51 std_dev 635.85
+# path size: count 6 total 11179 min 1767 max 2088 mean 1863.17 median 1843 std_dev 107.26
+#
+# The first number after the sha1 is the object size, the second
+# number is the path size.  The statistics are across all objects in
+# the previous delta tree.  Obviously they are omitted for trees of
+# one object.
+#
+# When run as:
+#
+# $ git verify-pack -v | packinfo.pl -tree -filenames
+#
+# it adds filenames to the tree.  Getting this information is slow:
+#
+#   0   blob 03156f21...     1767     1767 Documentation/git-lost-found.txt @ tags/v1.2.0~142
+#   1    blob f52a9d7f...       10     1777 Documentation/git-lost-found.txt @ tags/v1.5.0-rc1~74
+#   2     blob a8cc5739...       51     1828 Documentation/git-lost+found.txt @ tags/v0.99.9h^0
+#   3      blob 660e90b1...       15     1843 Documentation/git-lost+found.txt @ master~3222^2~2
+#   4       blob 0cb8e3bb...       33     1876 Documentation/git-lost+found.txt @ master~3222^2~3
+#   2     blob e48607f0...      311     2088 Documentation/git-lost-found.txt @ tags/v1.5.2-rc3~4
+#      size: count 6 total 2187 min 10 max 1767 mean 364.50 median 51 std_dev 635.85
+# path size: count 6 total 11179 min 1767 max 2088 mean 1863.17 median 1843 std_dev 107.26
+#
+# When run as:
+#
+# $ git verify-pack -v | packinfo.pl -dump
+#
+# it prints out "sha1 size pathsize depth" for each sha1 in lexical
+# order.
+#
+# 000079a2eaef17b7eae70e1f0f635557ea67b644 30 472 7
+# 00013cafe6980411aa6fdd940784917b5ff50f0a 44 1542 4
+# 000182eacf99cde27d5916aa415921924b82972c 499 499 0
+# ...
+#
+# This is handy for comparing two packs.  Adding "-filenames" will add
+# filenames, as per "-tree -filenames" above.
+
+use strict;
+use Getopt::Long;
+
+my $filenames = 0;
+my $tree = 0;
+my $dump = 0;
+GetOptions("tree" => \$tree,
+           "filenames" => \$filenames,
+           "dump" => \$dump);
+
+my %parents;
+my %children;
+my %sizes;
+my @roots;
+my %paths;
+my %types;
+my @commits;
+my %names;
+my %depths;
+my @depths;
+
+while (<STDIN>) {
+    my ($sha1, $type, $size, $space, $offset, $depth, $parent) = split(/\s+/, $_);
+    next unless ($sha1 =~ /^[0-9a-f]{40}$/);
+    $depths{$sha1} = $depth || 0;
+    push(@depths, $depth || 0);
+    push(@commits, $sha1) if ($type eq 'commit');
+    push(@roots, $sha1) unless $parent;
+    $parents{$sha1} = $parent;
+    $types{$sha1} = $type;
+    push(@{$children{$parent}}, $sha1);
+    $sizes{$sha1} = $size;
+}
+
+if ($filenames && ($tree || $dump)) {
+    open(NAMES, "git name-rev --all|");
+    while (<NAMES>) {
+        if (/^(\S+)\s+(.*)$/) {
+            my ($sha1, $name) = ($1, $2);
+            $names{$sha1} = $name;
+        }
+    }
+    close NAMES;
+
+    for my $commit (@commits) {
+        my $name = $names{$commit};
+        open(TREE, "git ls-tree -t -r $commit|");
+        print STDERR "Plumbing tree $name\n";
+        while (<TREE>) {
+            if (/^(\S+)\s+(\S+)\s+(\S+)\s+(.*)$/) {
+                my ($mode, $type, $sha1, $path) = ($1, $2, $3, $4);
+                $paths{$sha1} = "$path @ $name";
+            }
+        }
+        close TREE;
+    }
+}
+
+sub stats {
+    my @data = sort {$a <=> $b} @_;
+    my $min = $data[0];
+    my $max = $data[$#data];
+    my $total = 0;
+    my $count = scalar @data;
+    for my $datum (@data) {
+        $total += $datum;
+    }
+    my $mean = $total / $count;
+    my $median = $data[int(@data / 2)];
+    my $diff_sum = 0;
+    for my $datum (@data) {
+        $diff_sum += ($datum - $mean)**2;
+    }
+    my $std_dev = sqrt($diff_sum / $count);
+    return ($count, $total, $min, $max, $mean, $median, $std_dev);
+}
+
+sub print_stats {
+    my $name = shift;
+    my ($count, $total, $min, $max, $mean, $median, $std_dev) = stats(@_);
+    printf("%s: count %s total %s min %s max %s mean %.2f median %s std_dev %.2f\n",
+           $name, $count, $total, $min, $max, $mean, $median, $std_dev);
+}
+
+my @sizes;
+my @path_sizes;
+my @all_sizes;
+my @all_path_sizes;
+my %path_sizes;
+
+sub dig {
+    my ($sha1, $depth, $path_size) = @_;
+    $path_size += $sizes{$sha1};
+    push(@sizes, $sizes{$sha1});
+    push(@all_sizes, $sizes{$sha1});
+    push(@path_sizes, $path_size);
+    push(@all_path_sizes, $path_size);
+    $path_sizes{$sha1} = $path_size;
+    if ($tree) {
+        printf("%3d%s %6s %s %8d %8d %s\n",
+               $depth, (" " x $depth), $types{$sha1},
+               $sha1, $sizes{$sha1}, $path_size, $paths{$sha1});
+    }
+    for my $child (@{$children{$sha1}}) {
+        dig($child, $depth + 1, $path_size);
+    }
+}
+
+my @tree_sizes;
+my @tree_path_sizes;
+
+for my $root (@roots) {
+    undef @sizes;
+    undef @path_sizes;
+    dig($root, 0, 0);
+    my ($aa, $sz_total) = stats(@sizes);
+    my ($bb, $psz_total) = stats(@path_sizes);
+    push(@tree_sizes, $sz_total);
+    push(@tree_path_sizes, $psz_total);
+    if ($tree) {
+        if (@sizes > 1) {
+            print_stats("     size", @sizes);
+            print_stats("path size", @path_sizes);
+        }
+        print "\n";
+    }
+}
+
+if ($dump) {
+    for my $sha1 (sort keys %sizes) {
+        print "$sha1 $sizes{$sha1} $path_sizes{$sha1} $depths{$sha1} $paths{$sha1}\n";
+    }
+} else {
+    print_stats("      all sizes", @all_sizes);
+    print_stats(" all path sizes", @all_path_sizes);
+    print_stats("     tree sizes", @tree_sizes);
+    print_stats("tree path sizes", @tree_path_sizes);
+    print_stats("         depths", @depths);
+}
diff --git a/third_party/git/contrib/subtree/.gitignore b/third_party/git/contrib/subtree/.gitignore
new file mode 100644
index 000000000000..0b9381abcad3
--- /dev/null
+++ b/third_party/git/contrib/subtree/.gitignore
@@ -0,0 +1,7 @@
+*~
+git-subtree
+git-subtree.1
+git-subtree.html
+git-subtree.xml
+mainline
+subproj
diff --git a/third_party/git/contrib/subtree/COPYING b/third_party/git/contrib/subtree/COPYING
new file mode 100644
index 000000000000..d511905c1647
--- /dev/null
+++ b/third_party/git/contrib/subtree/COPYING
@@ -0,0 +1,339 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License along
+    with this program; if not, write to the Free Software Foundation, Inc.,
+    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/third_party/git/contrib/subtree/INSTALL b/third_party/git/contrib/subtree/INSTALL
new file mode 100644
index 000000000000..7ab0cf450967
--- /dev/null
+++ b/third_party/git/contrib/subtree/INSTALL
@@ -0,0 +1,28 @@
+HOW TO INSTALL git-subtree
+==========================
+
+First, build from the top source directory.
+
+Then, in contrib/subtree, run:
+
+  make
+  make install
+  make install-doc
+
+If you used configure to do the main build the git-subtree build will
+pick up those settings.  If not, you will likely have to provide a
+value for prefix:
+
+  make prefix=<some dir>
+  make prefix=<some dir> install
+  make prefix=<some dir> install-doc
+
+To run tests first copy git-subtree to the main build area so the
+newly-built git can find it:
+
+  cp git-subtree ../..
+
+Then:
+
+  make test
+
diff --git a/third_party/git/contrib/subtree/Makefile b/third_party/git/contrib/subtree/Makefile
new file mode 100644
index 000000000000..6906aae44147
--- /dev/null
+++ b/third_party/git/contrib/subtree/Makefile
@@ -0,0 +1,101 @@
+# The default target of this Makefile is...
+all::
+
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+prefix ?= /usr/local
+gitexecdir ?= $(prefix)/libexec/git-core
+mandir ?= $(prefix)/share/man
+man1dir ?= $(mandir)/man1
+htmldir ?= $(prefix)/share/doc/git-doc
+
+../../GIT-VERSION-FILE: FORCE
+	$(MAKE) -C ../../ GIT-VERSION-FILE
+
+-include ../../GIT-VERSION-FILE
+
+# this should be set to a 'standard' bsd-type install program
+INSTALL  ?= install
+RM       ?= rm -f
+
+ASCIIDOC         = asciidoc
+ASCIIDOC_CONF    = -f ../../Documentation/asciidoc.conf
+ASCIIDOC_HTML    = xhtml11
+ASCIIDOC_DOCBOOK = docbook
+ASCIIDOC_EXTRA   =
+XMLTO            = xmlto
+
+ifdef USE_ASCIIDOCTOR
+ASCIIDOC         = asciidoctor
+ASCIIDOC_CONF    =
+ASCIIDOC_HTML    = xhtml5
+ASCIIDOC_DOCBOOK = docbook45
+ASCIIDOC_EXTRA  += -I../../Documentation -rasciidoctor-extensions
+ASCIIDOC_EXTRA  += -alitdd='&\#x2d;&\#x2d;'
+endif
+
+ifndef SHELL_PATH
+	SHELL_PATH = /bin/sh
+endif
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+
+MANPAGE_XSL   = ../../Documentation/manpage-normal.xsl
+
+GIT_SUBTREE_SH := git-subtree.sh
+GIT_SUBTREE    := git-subtree
+
+GIT_SUBTREE_DOC := git-subtree.1
+GIT_SUBTREE_XML := git-subtree.xml
+GIT_SUBTREE_TXT := git-subtree.txt
+GIT_SUBTREE_HTML := git-subtree.html
+GIT_SUBTREE_TEST := ../../git-subtree
+
+all:: $(GIT_SUBTREE)
+
+$(GIT_SUBTREE): $(GIT_SUBTREE_SH)
+	sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' $< >$@
+	chmod +x $@
+
+doc: $(GIT_SUBTREE_DOC) $(GIT_SUBTREE_HTML)
+
+man: $(GIT_SUBTREE_DOC)
+
+html: $(GIT_SUBTREE_HTML)
+
+install: $(GIT_SUBTREE)
+	$(INSTALL) -d -m 755 $(DESTDIR)$(gitexecdir)
+	$(INSTALL) -m 755 $(GIT_SUBTREE) $(DESTDIR)$(gitexecdir)
+
+install-doc: install-man install-html
+
+install-man: $(GIT_SUBTREE_DOC)
+	$(INSTALL) -d -m 755 $(DESTDIR)$(man1dir)
+	$(INSTALL) -m 644 $^ $(DESTDIR)$(man1dir)
+
+install-html: $(GIT_SUBTREE_HTML)
+	$(INSTALL) -d -m 755 $(DESTDIR)$(htmldir)
+	$(INSTALL) -m 644 $^ $(DESTDIR)$(htmldir)
+
+$(GIT_SUBTREE_DOC): $(GIT_SUBTREE_XML)
+	$(XMLTO) -m $(MANPAGE_XSL) man $^
+
+$(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT)
+	$(ASCIIDOC) -b $(ASCIIDOC_DOCBOOK) -d manpage $(ASCIIDOC_CONF) \
+		-agit_version=$(GIT_VERSION) $(ASCIIDOC_EXTRA) $^
+
+$(GIT_SUBTREE_HTML): $(GIT_SUBTREE_TXT)
+	$(ASCIIDOC) -b $(ASCIIDOC_HTML) -d manpage $(ASCIIDOC_CONF) \
+		-agit_version=$(GIT_VERSION) $(ASCIIDOC_EXTRA) $^
+
+$(GIT_SUBTREE_TEST): $(GIT_SUBTREE)
+	cp $< $@
+
+test: $(GIT_SUBTREE_TEST)
+	$(MAKE) -C t/ test
+
+clean:
+	$(RM) $(GIT_SUBTREE)
+	$(RM) *.xml *.html *.1
+
+.PHONY: FORCE
diff --git a/third_party/git/contrib/subtree/README b/third_party/git/contrib/subtree/README
new file mode 100644
index 000000000000..c686b4a69b12
--- /dev/null
+++ b/third_party/git/contrib/subtree/README
@@ -0,0 +1,8 @@
+
+Please read git-subtree.txt for documentation.
+
+Please don't contact me using github mail; it's slow, ugly, and worst of
+all, redundant. Email me instead at apenwarr@gmail.com and I'll be happy to
+help.
+
+Avery
diff --git a/third_party/git/contrib/subtree/git-subtree.sh b/third_party/git/contrib/subtree/git-subtree.sh
new file mode 100755
index 000000000000..868e18b9a1ab
--- /dev/null
+++ b/third_party/git/contrib/subtree/git-subtree.sh
@@ -0,0 +1,901 @@
+#!/bin/sh
+#
+# git-subtree.sh: split/join git repositories in subdirectories of this one
+#
+# Copyright (C) 2009 Avery Pennarun <apenwarr@gmail.com>
+#
+if test $# -eq 0
+then
+	set -- -h
+fi
+OPTS_SPEC="\
+git subtree add   --prefix=<prefix> <commit>
+git subtree add   --prefix=<prefix> <repository> <ref>
+git subtree merge --prefix=<prefix> <commit>
+git subtree pull  --prefix=<prefix> <repository> <ref>
+git subtree push  --prefix=<prefix> <repository> <ref>
+git subtree split --prefix=<prefix> <commit>
+--
+h,help        show the help
+q             quiet
+d             show debug messages
+P,prefix=     the name of the subdir to split out
+m,message=    use the given message as the commit message for the merge commit
+ options for 'split'
+annotate=     add a prefix to commit message of new commits
+b,branch=     create a new branch from the split subtree
+ignore-joins  ignore prior --rejoin commits
+onto=         try connecting new tree to an existing one
+rejoin        merge the new branch back into HEAD
+ options for 'add', 'merge', and 'pull'
+squash        merge subtree changes as a single commit
+"
+eval "$(echo "$OPTS_SPEC" | git rev-parse --parseopt -- "$@" || echo exit $?)"
+
+PATH=$PATH:$(git --exec-path)
+. git-sh-setup
+
+require_work_tree
+
+quiet=
+branch=
+debug=
+command=
+onto=
+rejoin=
+ignore_joins=
+annotate=
+squash=
+message=
+prefix=
+
+debug () {
+	if test -n "$debug"
+	then
+		printf "%s\n" "$*" >&2
+	fi
+}
+
+say () {
+	if test -z "$quiet"
+	then
+		printf "%s\n" "$*" >&2
+	fi
+}
+
+progress () {
+	if test -z "$quiet"
+	then
+		printf "%s\r" "$*" >&2
+	fi
+}
+
+assert () {
+	if ! "$@"
+	then
+		die "assertion failed: " "$@"
+	fi
+}
+
+ensure_single_rev () {
+	if test $# -ne 1
+	then
+		die "You must provide exactly one revision.  Got: '$@'"
+	fi
+}
+
+while test $# -gt 0
+do
+	opt="$1"
+	shift
+
+	case "$opt" in
+	-q)
+		quiet=1
+		;;
+	-d)
+		debug=1
+		;;
+	--annotate)
+		annotate="$1"
+		shift
+		;;
+	--no-annotate)
+		annotate=
+		;;
+	-b)
+		branch="$1"
+		shift
+		;;
+	-P)
+		prefix="${1%/}"
+		shift
+		;;
+	-m)
+		message="$1"
+		shift
+		;;
+	--no-prefix)
+		prefix=
+		;;
+	--onto)
+		onto="$1"
+		shift
+		;;
+	--no-onto)
+		onto=
+		;;
+	--rejoin)
+		rejoin=1
+		;;
+	--no-rejoin)
+		rejoin=
+		;;
+	--ignore-joins)
+		ignore_joins=1
+		;;
+	--no-ignore-joins)
+		ignore_joins=
+		;;
+	--squash)
+		squash=1
+		;;
+	--no-squash)
+		squash=
+		;;
+	--)
+		break
+		;;
+	*)
+		die "Unexpected option: $opt"
+		;;
+	esac
+done
+
+command="$1"
+shift
+
+case "$command" in
+add|merge|pull)
+	default=
+	;;
+split|push)
+	default="--default HEAD"
+	;;
+*)
+	die "Unknown command '$command'"
+	;;
+esac
+
+if test -z "$prefix"
+then
+	die "You must provide the --prefix option."
+fi
+
+case "$command" in
+add)
+	test -e "$prefix" &&
+		die "prefix '$prefix' already exists."
+	;;
+*)
+	test -e "$prefix" ||
+		die "'$prefix' does not exist; use 'git subtree add'"
+	;;
+esac
+
+dir="$(dirname "$prefix/.")"
+
+if test "$command" != "pull" &&
+		test "$command" != "add" &&
+		test "$command" != "push"
+then
+	revs=$(git rev-parse $default --revs-only "$@") || exit $?
+	dirs=$(git rev-parse --no-revs --no-flags "$@") || exit $?
+	ensure_single_rev $revs
+	if test -n "$dirs"
+	then
+		die "Error: Use --prefix instead of bare filenames."
+	fi
+fi
+
+debug "command: {$command}"
+debug "quiet: {$quiet}"
+debug "revs: {$revs}"
+debug "dir: {$dir}"
+debug "opts: {$*}"
+debug
+
+cache_setup () {
+	cachedir="$GIT_DIR/subtree-cache/$$"
+	rm -rf "$cachedir" ||
+		die "Can't delete old cachedir: $cachedir"
+	mkdir -p "$cachedir" ||
+		die "Can't create new cachedir: $cachedir"
+	mkdir -p "$cachedir/notree" ||
+		die "Can't create new cachedir: $cachedir/notree"
+	debug "Using cachedir: $cachedir" >&2
+}
+
+cache_get () {
+	for oldrev in "$@"
+	do
+		if test -r "$cachedir/$oldrev"
+		then
+			read newrev <"$cachedir/$oldrev"
+			echo $newrev
+		fi
+	done
+}
+
+cache_miss () {
+	for oldrev in "$@"
+	do
+		if ! test -r "$cachedir/$oldrev"
+		then
+			echo $oldrev
+		fi
+	done
+}
+
+check_parents () {
+	missed=$(cache_miss "$1")
+	local indent=$(($2 + 1))
+	for miss in $missed
+	do
+		if ! test -r "$cachedir/notree/$miss"
+		then
+			debug "  incorrect order: $miss"
+			process_split_commit "$miss" "" "$indent"
+		fi
+	done
+}
+
+set_notree () {
+	echo "1" > "$cachedir/notree/$1"
+}
+
+cache_set () {
+	oldrev="$1"
+	newrev="$2"
+	if test "$oldrev" != "latest_old" &&
+		test "$oldrev" != "latest_new" &&
+		test -e "$cachedir/$oldrev"
+	then
+		die "cache for $oldrev already exists!"
+	fi
+	echo "$newrev" >"$cachedir/$oldrev"
+}
+
+rev_exists () {
+	if git rev-parse "$1" >/dev/null 2>&1
+	then
+		return 0
+	else
+		return 1
+	fi
+}
+
+rev_is_descendant_of_branch () {
+	newrev="$1"
+	branch="$2"
+	branch_hash=$(git rev-parse "$branch")
+	match=$(git rev-list -1 "$branch_hash" "^$newrev")
+
+	if test -z "$match"
+	then
+		return 0
+	else
+		return 1
+	fi
+}
+
+# if a commit doesn't have a parent, this might not work.  But we only want
+# to remove the parent from the rev-list, and since it doesn't exist, it won't
+# be there anyway, so do nothing in that case.
+try_remove_previous () {
+	if rev_exists "$1^"
+	then
+		echo "^$1^"
+	fi
+}
+
+find_latest_squash () {
+	debug "Looking for latest squash ($dir)..."
+	dir="$1"
+	sq=
+	main=
+	sub=
+	git log --grep="^git-subtree-dir: $dir/*\$" \
+		--no-show-signature --pretty=format:'START %H%n%s%n%n%b%nEND%n' HEAD |
+	while read a b junk
+	do
+		debug "$a $b $junk"
+		debug "{{$sq/$main/$sub}}"
+		case "$a" in
+		START)
+			sq="$b"
+			;;
+		git-subtree-mainline:)
+			main="$b"
+			;;
+		git-subtree-split:)
+			sub="$(git rev-parse "$b^0")" ||
+			die "could not rev-parse split hash $b from commit $sq"
+			;;
+		END)
+			if test -n "$sub"
+			then
+				if test -n "$main"
+				then
+					# a rejoin commit?
+					# Pretend its sub was a squash.
+					sq="$sub"
+				fi
+				debug "Squash found: $sq $sub"
+				echo "$sq" "$sub"
+				break
+			fi
+			sq=
+			main=
+			sub=
+			;;
+		esac
+	done
+}
+
+find_existing_splits () {
+	debug "Looking for prior splits..."
+	dir="$1"
+	revs="$2"
+	main=
+	sub=
+	local grep_format="^git-subtree-dir: $dir/*\$"
+	if test -n "$ignore_joins"
+	then
+		grep_format="^Add '$dir/' from commit '"
+	fi
+	git log --grep="$grep_format" \
+		--no-show-signature --pretty=format:'START %H%n%s%n%n%b%nEND%n' $revs |
+	while read a b junk
+	do
+		case "$a" in
+		START)
+			sq="$b"
+			;;
+		git-subtree-mainline:)
+			main="$b"
+			;;
+		git-subtree-split:)
+			sub="$(git rev-parse "$b^0")" ||
+			die "could not rev-parse split hash $b from commit $sq"
+			;;
+		END)
+			debug "  Main is: '$main'"
+			if test -z "$main" -a -n "$sub"
+			then
+				# squash commits refer to a subtree
+				debug "  Squash: $sq from $sub"
+				cache_set "$sq" "$sub"
+			fi
+			if test -n "$main" -a -n "$sub"
+			then
+				debug "  Prior: $main -> $sub"
+				cache_set $main $sub
+				cache_set $sub $sub
+				try_remove_previous "$main"
+				try_remove_previous "$sub"
+			fi
+			main=
+			sub=
+			;;
+		esac
+	done
+}
+
+copy_commit () {
+	# We're going to set some environment vars here, so
+	# do it in a subshell to get rid of them safely later
+	debug copy_commit "{$1}" "{$2}" "{$3}"
+	git log -1 --no-show-signature --pretty=format:'%an%n%ae%n%aD%n%cn%n%ce%n%cD%n%B' "$1" |
+	(
+		read GIT_AUTHOR_NAME
+		read GIT_AUTHOR_EMAIL
+		read GIT_AUTHOR_DATE
+		read GIT_COMMITTER_NAME
+		read GIT_COMMITTER_EMAIL
+		read GIT_COMMITTER_DATE
+		export  GIT_AUTHOR_NAME \
+			GIT_AUTHOR_EMAIL \
+			GIT_AUTHOR_DATE \
+			GIT_COMMITTER_NAME \
+			GIT_COMMITTER_EMAIL \
+			GIT_COMMITTER_DATE
+		(
+			printf "%s" "$annotate"
+			cat
+		) |
+		git commit-tree "$2" $3  # reads the rest of stdin
+	) || die "Can't copy commit $1"
+}
+
+add_msg () {
+	dir="$1"
+	latest_old="$2"
+	latest_new="$3"
+	if test -n "$message"
+	then
+		commit_message="$message"
+	else
+		commit_message="Add '$dir/' from commit '$latest_new'"
+	fi
+	cat <<-EOF
+		$commit_message
+
+		git-subtree-dir: $dir
+		git-subtree-mainline: $latest_old
+		git-subtree-split: $latest_new
+	EOF
+}
+
+add_squashed_msg () {
+	if test -n "$message"
+	then
+		echo "$message"
+	else
+		echo "Merge commit '$1' as '$2'"
+	fi
+}
+
+rejoin_msg () {
+	dir="$1"
+	latest_old="$2"
+	latest_new="$3"
+	if test -n "$message"
+	then
+		commit_message="$message"
+	else
+		commit_message="Split '$dir/' into commit '$latest_new'"
+	fi
+	cat <<-EOF
+		$commit_message
+
+		git-subtree-dir: $dir
+		git-subtree-mainline: $latest_old
+		git-subtree-split: $latest_new
+	EOF
+}
+
+squash_msg () {
+	dir="$1"
+	oldsub="$2"
+	newsub="$3"
+	newsub_short=$(git rev-parse --short "$newsub")
+
+	if test -n "$oldsub"
+	then
+		oldsub_short=$(git rev-parse --short "$oldsub")
+		echo "Squashed '$dir/' changes from $oldsub_short..$newsub_short"
+		echo
+		git log --no-show-signature --pretty=tformat:'%h %s' "$oldsub..$newsub"
+		git log --no-show-signature --pretty=tformat:'REVERT: %h %s' "$newsub..$oldsub"
+	else
+		echo "Squashed '$dir/' content from commit $newsub_short"
+	fi
+
+	echo
+	echo "git-subtree-dir: $dir"
+	echo "git-subtree-split: $newsub"
+}
+
+toptree_for_commit () {
+	commit="$1"
+	git rev-parse --verify "$commit^{tree}" || exit $?
+}
+
+subtree_for_commit () {
+	commit="$1"
+	dir="$2"
+	git ls-tree "$commit" -- "$dir" |
+	while read mode type tree name
+	do
+		assert test "$name" = "$dir"
+		assert test "$type" = "tree" -o "$type" = "commit"
+		test "$type" = "commit" && continue  # ignore submodules
+		echo $tree
+		break
+	done
+}
+
+tree_changed () {
+	tree=$1
+	shift
+	if test $# -ne 1
+	then
+		return 0   # weird parents, consider it changed
+	else
+		ptree=$(toptree_for_commit $1)
+		if test "$ptree" != "$tree"
+		then
+			return 0   # changed
+		else
+			return 1   # not changed
+		fi
+	fi
+}
+
+new_squash_commit () {
+	old="$1"
+	oldsub="$2"
+	newsub="$3"
+	tree=$(toptree_for_commit $newsub) || exit $?
+	if test -n "$old"
+	then
+		squash_msg "$dir" "$oldsub" "$newsub" |
+		git commit-tree "$tree" -p "$old" || exit $?
+	else
+		squash_msg "$dir" "" "$newsub" |
+		git commit-tree "$tree" || exit $?
+	fi
+}
+
+copy_or_skip () {
+	rev="$1"
+	tree="$2"
+	newparents="$3"
+	assert test -n "$tree"
+
+	identical=
+	nonidentical=
+	p=
+	gotparents=
+	copycommit=
+	for parent in $newparents
+	do
+		ptree=$(toptree_for_commit $parent) || exit $?
+		test -z "$ptree" && continue
+		if test "$ptree" = "$tree"
+		then
+			# an identical parent could be used in place of this rev.
+			if test -n "$identical"
+			then
+				# if a previous identical parent was found, check whether
+				# one is already an ancestor of the other
+				mergebase=$(git merge-base $identical $parent)
+				if test "$identical" = "$mergebase"
+				then
+					# current identical commit is an ancestor of parent
+					identical="$parent"
+				elif test "$parent" != "$mergebase"
+				then
+					# no common history; commit must be copied
+					copycommit=1
+				fi
+			else
+				# first identical parent detected
+				identical="$parent"
+			fi
+		else
+			nonidentical="$parent"
+		fi
+
+		# sometimes both old parents map to the same newparent;
+		# eliminate duplicates
+		is_new=1
+		for gp in $gotparents
+		do
+			if test "$gp" = "$parent"
+			then
+				is_new=
+				break
+			fi
+		done
+		if test -n "$is_new"
+		then
+			gotparents="$gotparents $parent"
+			p="$p -p $parent"
+		fi
+	done
+
+	if test -n "$identical" && test -n "$nonidentical"
+	then
+		extras=$(git rev-list --count $identical..$nonidentical)
+		if test "$extras" -ne 0
+		then
+			# we need to preserve history along the other branch
+			copycommit=1
+		fi
+	fi
+	if test -n "$identical" && test -z "$copycommit"
+	then
+		echo $identical
+	else
+		copy_commit "$rev" "$tree" "$p" || exit $?
+	fi
+}
+
+ensure_clean () {
+	if ! git diff-index HEAD --exit-code --quiet 2>&1
+	then
+		die "Working tree has modifications.  Cannot add."
+	fi
+	if ! git diff-index --cached HEAD --exit-code --quiet 2>&1
+	then
+		die "Index has modifications.  Cannot add."
+	fi
+}
+
+ensure_valid_ref_format () {
+	git check-ref-format "refs/heads/$1" ||
+		die "'$1' does not look like a ref"
+}
+
+process_split_commit () {
+	local rev="$1"
+	local parents="$2"
+	local indent=$3
+
+	if test $indent -eq 0
+	then
+		revcount=$(($revcount + 1))
+	else
+		# processing commit without normal parent information;
+		# fetch from repo
+		parents=$(git rev-parse "$rev^@")
+		extracount=$(($extracount + 1))
+	fi
+
+	progress "$revcount/$revmax ($createcount) [$extracount]"
+
+	debug "Processing commit: $rev"
+	exists=$(cache_get "$rev")
+	if test -n "$exists"
+	then
+		debug "  prior: $exists"
+		return
+	fi
+	createcount=$(($createcount + 1))
+	debug "  parents: $parents"
+	check_parents "$parents" "$indent"
+	newparents=$(cache_get $parents)
+	debug "  newparents: $newparents"
+
+	tree=$(subtree_for_commit "$rev" "$dir")
+	debug "  tree is: $tree"
+
+	# ugly.  is there no better way to tell if this is a subtree
+	# vs. a mainline commit?  Does it matter?
+	if test -z "$tree"
+	then
+		set_notree "$rev"
+		if test -n "$newparents"
+		then
+			cache_set "$rev" "$rev"
+		fi
+		return
+	fi
+
+	newrev=$(copy_or_skip "$rev" "$tree" "$newparents") || exit $?
+	debug "  newrev is: $newrev"
+	cache_set "$rev" "$newrev"
+	cache_set latest_new "$newrev"
+	cache_set latest_old "$rev"
+}
+
+cmd_add () {
+	if test -e "$dir"
+	then
+		die "'$dir' already exists.  Cannot add."
+	fi
+
+	ensure_clean
+
+	if test $# -eq 1
+	then
+		git rev-parse -q --verify "$1^{commit}" >/dev/null ||
+			die "'$1' does not refer to a commit"
+
+		cmd_add_commit "$@"
+
+	elif test $# -eq 2
+	then
+		# Technically we could accept a refspec here but we're
+		# just going to turn around and add FETCH_HEAD under the
+		# specified directory.  Allowing a refspec might be
+		# misleading because we won't do anything with any other
+		# branches fetched via the refspec.
+		ensure_valid_ref_format "$2"
+
+		cmd_add_repository "$@"
+	else
+		say "error: parameters were '$@'"
+		die "Provide either a commit or a repository and commit."
+	fi
+}
+
+cmd_add_repository () {
+	echo "git fetch" "$@"
+	repository=$1
+	refspec=$2
+	git fetch "$@" || exit $?
+	revs=FETCH_HEAD
+	set -- $revs
+	cmd_add_commit "$@"
+}
+
+cmd_add_commit () {
+	rev=$(git rev-parse $default --revs-only "$@") || exit $?
+	ensure_single_rev $rev
+
+	debug "Adding $dir as '$rev'..."
+	git read-tree --prefix="$dir" $rev || exit $?
+	git checkout -- "$dir" || exit $?
+	tree=$(git write-tree) || exit $?
+
+	headrev=$(git rev-parse HEAD) || exit $?
+	if test -n "$headrev" && test "$headrev" != "$rev"
+	then
+		headp="-p $headrev"
+	else
+		headp=
+	fi
+
+	if test -n "$squash"
+	then
+		rev=$(new_squash_commit "" "" "$rev") || exit $?
+		commit=$(add_squashed_msg "$rev" "$dir" |
+			git commit-tree "$tree" $headp -p "$rev") || exit $?
+	else
+		revp=$(peel_committish "$rev") &&
+		commit=$(add_msg "$dir" $headrev "$rev" |
+			git commit-tree "$tree" $headp -p "$revp") || exit $?
+	fi
+	git reset "$commit" || exit $?
+
+	say "Added dir '$dir'"
+}
+
+cmd_split () {
+	debug "Splitting $dir..."
+	cache_setup || exit $?
+
+	if test -n "$onto"
+	then
+		debug "Reading history for --onto=$onto..."
+		git rev-list $onto |
+		while read rev
+		do
+			# the 'onto' history is already just the subdir, so
+			# any parent we find there can be used verbatim
+			debug "  cache: $rev"
+			cache_set "$rev" "$rev"
+		done
+	fi
+
+	unrevs="$(find_existing_splits "$dir" "$revs")"
+
+	# We can't restrict rev-list to only $dir here, because some of our
+	# parents have the $dir contents the root, and those won't match.
+	# (and rev-list --follow doesn't seem to solve this)
+	grl='git rev-list --topo-order --reverse --parents $revs $unrevs'
+	revmax=$(eval "$grl" | wc -l)
+	revcount=0
+	createcount=0
+	extracount=0
+	eval "$grl" |
+	while read rev parents
+	do
+		process_split_commit "$rev" "$parents" 0
+	done || exit $?
+
+	latest_new=$(cache_get latest_new)
+	if test -z "$latest_new"
+	then
+		die "No new revisions were found"
+	fi
+
+	if test -n "$rejoin"
+	then
+		debug "Merging split branch into HEAD..."
+		latest_old=$(cache_get latest_old)
+		git merge -s ours \
+			--allow-unrelated-histories \
+			-m "$(rejoin_msg "$dir" "$latest_old" "$latest_new")" \
+			"$latest_new" >&2 || exit $?
+	fi
+	if test -n "$branch"
+	then
+		if rev_exists "refs/heads/$branch"
+		then
+			if ! rev_is_descendant_of_branch "$latest_new" "$branch"
+			then
+				die "Branch '$branch' is not an ancestor of commit '$latest_new'."
+			fi
+			action='Updated'
+		else
+			action='Created'
+		fi
+		git update-ref -m 'subtree split' \
+			"refs/heads/$branch" "$latest_new" || exit $?
+		say "$action branch '$branch'"
+	fi
+	echo "$latest_new"
+	exit 0
+}
+
+cmd_merge () {
+	rev=$(git rev-parse $default --revs-only "$@") || exit $?
+	ensure_single_rev $rev
+	ensure_clean
+
+	if test -n "$squash"
+	then
+		first_split="$(find_latest_squash "$dir")"
+		if test -z "$first_split"
+		then
+			die "Can't squash-merge: '$dir' was never added."
+		fi
+		set $first_split
+		old=$1
+		sub=$2
+		if test "$sub" = "$rev"
+		then
+			say "Subtree is already at commit $rev."
+			exit 0
+		fi
+		new=$(new_squash_commit "$old" "$sub" "$rev") || exit $?
+		debug "New squash commit: $new"
+		rev="$new"
+	fi
+
+	version=$(git version)
+	if test "$version" \< "git version 1.7"
+	then
+		if test -n "$message"
+		then
+			git merge -s subtree --message="$message" "$rev"
+		else
+			git merge -s subtree "$rev"
+		fi
+	else
+		if test -n "$message"
+		then
+			git merge -Xsubtree="$prefix" \
+				--message="$message" "$rev"
+		else
+			git merge -Xsubtree="$prefix" $rev
+		fi
+	fi
+}
+
+cmd_pull () {
+	if test $# -ne 2
+	then
+		die "You must provide <repository> <ref>"
+	fi
+	ensure_clean
+	ensure_valid_ref_format "$2"
+	git fetch "$@" || exit $?
+	revs=FETCH_HEAD
+	set -- $revs
+	cmd_merge "$@"
+}
+
+cmd_push () {
+	if test $# -ne 2
+	then
+		die "You must provide <repository> <ref>"
+	fi
+	ensure_valid_ref_format "$2"
+	if test -e "$dir"
+	then
+		repository=$1
+		refspec=$2
+		echo "git push using: " "$repository" "$refspec"
+		localrev=$(git subtree split --prefix="$prefix") || die
+		git push "$repository" "$localrev":"refs/heads/$refspec"
+	else
+		die "'$dir' must already exist. Try 'git subtree add'."
+	fi
+}
+
+"cmd_$command" "$@"
diff --git a/third_party/git/contrib/subtree/git-subtree.txt b/third_party/git/contrib/subtree/git-subtree.txt
new file mode 100644
index 000000000000..352deda69dcf
--- /dev/null
+++ b/third_party/git/contrib/subtree/git-subtree.txt
@@ -0,0 +1,351 @@
+git-subtree(1)
+==============
+
+NAME
+----
+git-subtree - Merge subtrees together and split repository into subtrees
+
+
+SYNOPSIS
+--------
+[verse]
+'git subtree' add   -P <prefix> <commit>
+'git subtree' add   -P <prefix> <repository> <ref>
+'git subtree' pull  -P <prefix> <repository> <ref>
+'git subtree' push  -P <prefix> <repository> <ref>
+'git subtree' merge -P <prefix> <commit>
+'git subtree' split -P <prefix> [OPTIONS] [<commit>]
+
+
+DESCRIPTION
+-----------
+Subtrees allow subprojects to be included within a subdirectory
+of the main project, optionally including the subproject's
+entire history.
+
+For example, you could include the source code for a library
+as a subdirectory of your application.
+
+Subtrees are not to be confused with submodules, which are meant for
+the same task. Unlike submodules, subtrees do not need any special
+constructions (like .gitmodules files or gitlinks) be present in
+your repository, and do not force end-users of your
+repository to do anything special or to understand how subtrees
+work. A subtree is just a subdirectory that can be
+committed to, branched, and merged along with your project in
+any way you want.
+
+They are also not to be confused with using the subtree merge
+strategy. The main difference is that, besides merging
+the other project as a subdirectory, you can also extract the
+entire history of a subdirectory from your project and make it
+into a standalone project. Unlike the subtree merge strategy
+you can alternate back and forth between these
+two operations. If the standalone library gets updated, you can
+automatically merge the changes into your project; if you
+update the library inside your project, you can "split" the
+changes back out again and merge them back into the library
+project.
+
+For example, if a library you made for one application ends up being
+useful elsewhere, you can extract its entire history and publish
+that as its own git repository, without accidentally
+intermingling the history of your application project.
+
+[TIP]
+In order to keep your commit messages clean, we recommend that
+people split their commits between the subtrees and the main
+project as much as possible.  That is, if you make a change that
+affects both the library and the main application, commit it in
+two pieces.  That way, when you split the library commits out
+later, their descriptions will still make sense.  But if this
+isn't important to you, it's not *necessary*.  git subtree will
+simply leave out the non-library-related parts of the commit
+when it splits it out into the subproject later.
+
+
+COMMANDS
+--------
+add::
+	Create the <prefix> subtree by importing its contents
+	from the given <commit> or <repository> and remote <ref>.
+	A new commit is created	automatically, joining the imported
+	project's history with your own.  With '--squash', imports
+	only a single commit from the subproject, rather than its
+	entire history.
+
+merge::
+	Merge recent changes up to <commit> into the <prefix>
+	subtree.  As with normal 'git merge', this doesn't
+	remove your own local changes; it just merges those
+	changes into the latest <commit>.  With '--squash',
+	creates only one commit that contains all the changes,
+	rather than merging in the entire history.
++
+If you use '--squash', the merge direction doesn't always have to be
+forward; you can use this command to go back in time from v2.5 to v2.4,
+for example.  If your merge introduces a conflict, you can resolve it in
+the usual ways.
+	
+pull::
+	Exactly like 'merge', but parallels 'git pull' in that
+	it fetches the given ref from the specified remote
+	repository.
+	
+push::
+	Does a 'split' (see below) using the <prefix> supplied
+	and then does a 'git push' to push the result to the 
+	repository and ref. This can be used to push your
+	subtree to different branches of the remote repository.
+
+split::
+	Extract a new, synthetic project history from the
+	history of the <prefix> subtree.  The new history
+	includes only the commits (including merges) that
+	affected <prefix>, and each of those commits now has the
+	contents of <prefix> at the root of the project instead
+	of in a subdirectory.  Thus, the newly created history
+	is suitable for export as a separate git repository.
++
+After splitting successfully, a single commit id is printed to stdout.
+This corresponds to the HEAD of the newly created tree, which you can
+manipulate however you want.
++
+Repeated splits of exactly the same history are guaranteed to be
+identical (i.e. to produce the same commit ids).  Because of this, if
+you add new commits and then re-split, the new commits will be attached
+as commits on top of the history you generated last time, so 'git merge'
+and friends will work as expected.
++
+Note that if you use '--squash' when you merge, you should usually not
+just '--rejoin' when you split.
+
+
+OPTIONS
+-------
+-q::
+--quiet::
+	Suppress unnecessary output messages on stderr.
+
+-d::
+--debug::
+	Produce even more unnecessary output messages on stderr.
+
+-P <prefix>::
+--prefix=<prefix>::
+	Specify the path in the repository to the subtree you
+	want to manipulate.  This option is mandatory
+	for all commands.
+
+-m <message>::
+--message=<message>::
+	This option is only valid for add, merge and pull (unsure).
+	Specify <message> as the commit message for the merge commit.
+
+
+OPTIONS FOR add, merge, push, pull
+----------------------------------
+--squash::
+	This option is only valid for add, merge, and pull
+	commands.
++
+Instead of merging the entire history from the subtree project, produce
+only a single commit that contains all the differences you want to
+merge, and then merge that new commit into your project.
++
+Using this option helps to reduce log clutter. People rarely want to see
+every change that happened between v1.0 and v1.1 of the library they're
+using, since none of the interim versions were ever included in their
+application.
++
+Using '--squash' also helps avoid problems when the same subproject is
+included multiple times in the same project, or is removed and then
+re-added.  In such a case, it doesn't make sense to combine the
+histories anyway, since it's unclear which part of the history belongs
+to which subtree.
++
+Furthermore, with '--squash', you can switch back and forth between
+different versions of a subtree, rather than strictly forward.  'git
+subtree merge --squash' always adjusts the subtree to match the exactly
+specified commit, even if getting to that commit would require undoing
+some changes that were added earlier.
++
+Whether or not you use '--squash', changes made in your local repository
+remain intact and can be later split and send upstream to the
+subproject.
+
+
+OPTIONS FOR split
+-----------------
+--annotate=<annotation>::
+	This option is only valid for the split command.
++
+When generating synthetic history, add <annotation> as a prefix to each
+commit message.  Since we're creating new commits with the same commit
+message, but possibly different content, from the original commits, this
+can help to differentiate them and avoid confusion.
++
+Whenever you split, you need to use the same <annotation>, or else you
+don't have a guarantee that the new re-created history will be identical
+to the old one.  That will prevent merging from working correctly.  git
+subtree tries to make it work anyway, particularly if you use --rejoin,
+but it may not always be effective.
+
+-b <branch>::
+--branch=<branch>::
+	This option is only valid for the split command.
++
+After generating the synthetic history, create a new branch called
+<branch> that contains the new history.  This is suitable for immediate
+pushing upstream.  <branch> must not already exist.
+
+--ignore-joins::
+	This option is only valid for the split command.
++
+If you use '--rejoin', git subtree attempts to optimize its history
+reconstruction to generate only the new commits since the last
+'--rejoin'.  '--ignore-join' disables this behaviour, forcing it to
+regenerate the entire history.  In a large project, this can take a long
+time.
+
+--onto=<onto>::
+	This option is only valid for the split command.
++
+If your subtree was originally imported using something other than git
+subtree, its history may not match what git subtree is expecting.  In
+that case, you can specify the commit id <onto> that corresponds to the
+first revision of the subproject's history that was imported into your
+project, and git subtree will attempt to build its history from there.
++
+If you used 'git subtree add', you should never need this option.
+
+--rejoin::
+	This option is only valid for the split command.
++
+After splitting, merge the newly created synthetic history back into
+your main project.  That way, future splits can search only the part of
+history that has been added since the most recent --rejoin.
++
+If your split commits end up merged into the upstream subproject, and
+then you want to get the latest upstream version, this will allow git's
+merge algorithm to more intelligently avoid conflicts (since it knows
+these synthetic commits are already part of the upstream repository).
++
+Unfortunately, using this option results in 'git log' showing an extra
+copy of every new commit that was created (the original, and the
+synthetic one).
++
+If you do all your merges with '--squash', don't use '--rejoin' when you
+split, because you don't want the subproject's history to be part of
+your project anyway.
+
+
+EXAMPLE 1. Add command
+----------------------
+Let's assume that you have a local repository that you would like
+to add an external vendor library to. In this case we will add the
+git-subtree repository as a subdirectory of your already existing
+git-extensions repository in ~/git-extensions/:
+
+	$ git subtree add --prefix=git-subtree --squash \
+		git://github.com/apenwarr/git-subtree.git master
+
+'master' needs to be a valid remote ref and can be a different branch
+name
+
+You can omit the --squash flag, but doing so will increase the number
+of commits that are included in your local repository.
+
+We now have a ~/git-extensions/git-subtree directory containing code
+from the master branch of git://github.com/apenwarr/git-subtree.git
+in our git-extensions repository.
+
+EXAMPLE 2. Extract a subtree using commit, merge and pull
+---------------------------------------------------------
+Let's use the repository for the git source code as an example.
+First, get your own copy of the git.git repository:
+
+	$ git clone git://git.kernel.org/pub/scm/git/git.git test-git
+	$ cd test-git
+
+gitweb (commit 1130ef3) was merged into git as of commit
+0a8f4f0, after which it was no longer maintained separately. 
+But imagine it had been maintained separately, and we wanted to
+extract git's changes to gitweb since that time, to share with
+the upstream.  You could do this:
+
+	$ git subtree split --prefix=gitweb --annotate='(split) ' \
+        	0a8f4f0^.. --onto=1130ef3 --rejoin \
+        	--branch gitweb-latest
+        $ gitk gitweb-latest
+        $ git push git@github.com:whatever/gitweb.git gitweb-latest:master
+        
+(We use '0a8f4f0^..' because that means "all the changes from
+0a8f4f0 to the current version, including 0a8f4f0 itself.")
+
+If gitweb had originally been merged using 'git subtree add' (or
+a previous split had already been done with --rejoin specified)
+then you can do all your splits without having to remember any
+weird commit ids:
+
+	$ git subtree split --prefix=gitweb --annotate='(split) ' --rejoin \
+		--branch gitweb-latest2
+
+And you can merge changes back in from the upstream project just
+as easily:
+
+	$ git subtree pull --prefix=gitweb \
+		git@github.com:whatever/gitweb.git master
+
+Or, using '--squash', you can actually rewind to an earlier
+version of gitweb:
+
+	$ git subtree merge --prefix=gitweb --squash gitweb-latest~10
+
+Then make some changes:
+
+	$ date >gitweb/myfile
+	$ git add gitweb/myfile
+	$ git commit -m 'created myfile'
+
+And fast forward again:
+
+	$ git subtree merge --prefix=gitweb --squash gitweb-latest
+
+And notice that your change is still intact:
+	
+	$ ls -l gitweb/myfile
+
+And you can split it out and look at your changes versus
+the standard gitweb:
+
+	git log gitweb-latest..$(git subtree split --prefix=gitweb)
+
+EXAMPLE 3. Extract a subtree using branch
+-----------------------------------------
+Suppose you have a source directory with many files and
+subdirectories, and you want to extract the lib directory to its own
+git project. Here's a short way to do it:
+
+First, make the new repository wherever you want:
+
+	$ <go to the new location>
+	$ git init --bare
+
+Back in your original directory:
+
+	$ git subtree split --prefix=lib --annotate="(split)" -b split
+
+Then push the new branch onto the new empty repository:
+
+	$ git push <new-repo> split:master
+
+
+AUTHOR
+------
+Written by Avery Pennarun <apenwarr@gmail.com>
+
+
+GIT
+---
+Part of the linkgit:git[1] suite
diff --git a/third_party/git/contrib/subtree/t/Makefile b/third_party/git/contrib/subtree/t/Makefile
new file mode 100644
index 000000000000..276898eb6bd7
--- /dev/null
+++ b/third_party/git/contrib/subtree/t/Makefile
@@ -0,0 +1,86 @@
+# Run tests
+#
+# Copyright (c) 2005 Junio C Hamano
+#
+
+-include ../../../config.mak.autogen
+-include ../../../config.mak
+
+#GIT_TEST_OPTS=--verbose --debug
+SHELL_PATH ?= $(SHELL)
+PERL_PATH ?= /usr/bin/perl
+TAR ?= $(TAR)
+RM ?= rm -f
+PROVE ?= prove
+DEFAULT_TEST_TARGET ?= test
+TEST_LINT ?= test-lint
+
+ifdef TEST_OUTPUT_DIRECTORY
+TEST_RESULTS_DIRECTORY = $(TEST_OUTPUT_DIRECTORY)/test-results
+else
+TEST_RESULTS_DIRECTORY = ../../../t/test-results
+endif
+
+# Shell quote;
+SHELL_PATH_SQ = $(subst ','\'',$(SHELL_PATH))
+PERL_PATH_SQ = $(subst ','\'',$(PERL_PATH))
+TEST_RESULTS_DIRECTORY_SQ = $(subst ','\'',$(TEST_RESULTS_DIRECTORY))
+
+T = $(sort $(wildcard t[0-9][0-9][0-9][0-9]-*.sh))
+TSVN = $(sort $(wildcard t91[0-9][0-9]-*.sh))
+TGITWEB = $(sort $(wildcard t95[0-9][0-9]-*.sh))
+THELPERS = $(sort $(filter-out $(T),$(wildcard *.sh)))
+
+all: $(DEFAULT_TEST_TARGET)
+
+test: pre-clean $(TEST_LINT)
+	$(MAKE) aggregate-results-and-cleanup
+
+prove: pre-clean $(TEST_LINT)
+	@echo "*** prove ***"; GIT_CONFIG=.git/config $(PROVE) --exec '$(SHELL_PATH_SQ)' $(GIT_PROVE_OPTS) $(T) :: $(GIT_TEST_OPTS)
+	$(MAKE) clean-except-prove-cache
+
+$(T):
+	@echo "*** $@ ***"; GIT_CONFIG=.git/config '$(SHELL_PATH_SQ)' $@ $(GIT_TEST_OPTS)
+
+pre-clean:
+	$(RM) -r '$(TEST_RESULTS_DIRECTORY_SQ)'
+
+clean-except-prove-cache:
+	$(RM) -r 'trash directory'.* '$(TEST_RESULTS_DIRECTORY_SQ)'
+	$(RM) -r valgrind/bin
+
+clean: clean-except-prove-cache
+	$(RM) .prove
+
+test-lint: test-lint-duplicates test-lint-executable test-lint-shell-syntax
+
+test-lint-duplicates:
+	@dups=`echo $(T) | tr ' ' '\n' | sed 's/-.*//' | sort | uniq -d` && \
+		test -z "$$dups" || { \
+		echo >&2 "duplicate test numbers:" $$dups; exit 1; }
+
+test-lint-executable:
+	@bad=`for i in $(T); do test -x "$$i" || echo $$i; done` && \
+		test -z "$$bad" || { \
+		echo >&2 "non-executable tests:" $$bad; exit 1; }
+
+test-lint-shell-syntax:
+	@'$(PERL_PATH_SQ)' ../../../t/check-non-portable-shell.pl $(T) $(THELPERS)
+
+aggregate-results-and-cleanup: $(T)
+	$(MAKE) aggregate-results
+	$(MAKE) clean
+
+aggregate-results:
+	for f in '$(TEST_RESULTS_DIRECTORY_SQ)'/t*-*.counts; do \
+		echo "$$f"; \
+	done | '$(SHELL_PATH_SQ)' ../../../t/aggregate-results.sh
+
+valgrind:
+	$(MAKE) GIT_TEST_OPTS="$(GIT_TEST_OPTS) --valgrind"
+
+test-results:
+	mkdir -p test-results
+
+.PHONY: pre-clean $(T) aggregate-results clean valgrind
diff --git a/third_party/git/contrib/subtree/t/t7900-subtree.sh b/third_party/git/contrib/subtree/t/t7900-subtree.sh
new file mode 100755
index 000000000000..57ff4b25c17e
--- /dev/null
+++ b/third_party/git/contrib/subtree/t/t7900-subtree.sh
@@ -0,0 +1,1034 @@
+#!/bin/sh
+#
+# Copyright (c) 2012 Avery Pennaraum
+# Copyright (c) 2015 Alexey Shumkin
+#
+test_description='Basic porcelain support for subtrees
+
+This test verifies the basic operation of the add, pull, merge
+and split subcommands of git subtree.
+'
+
+TEST_DIRECTORY=$(pwd)/../../../t
+export TEST_DIRECTORY
+
+. ../../../t/test-lib.sh
+
+subtree_test_create_repo()
+{
+	test_create_repo "$1" &&
+	(
+		cd "$1" &&
+		git config log.date relative
+	)
+}
+
+create()
+{
+	echo "$1" >"$1" &&
+	git add "$1"
+}
+
+check_equal()
+{
+	test_debug 'echo'
+	test_debug "echo \"check a:\" \"{$1}\""
+	test_debug "echo \"      b:\" \"{$2}\""
+	if [ "$1" = "$2" ]; then
+		return 0
+	else
+		return 1
+	fi
+}
+
+undo()
+{
+	git reset --hard HEAD~
+}
+
+# Make sure no patch changes more than one file.
+# The original set of commits changed only one file each.
+# A multi-file change would imply that we pruned commits
+# too aggressively.
+join_commits()
+{
+	commit=
+	all=
+	while read x y; do
+		if [ -z "$x" ]; then
+			continue
+		elif [ "$x" = "commit:" ]; then
+			if [ -n "$commit" ]; then
+				echo "$commit $all"
+				all=
+			fi
+			commit="$y"
+		else
+			all="$all $y"
+		fi
+	done
+	echo "$commit $all"
+}
+
+test_create_commit() (
+	repo=$1 &&
+	commit=$2 &&
+	cd "$repo" &&
+	mkdir -p "$(dirname "$commit")" \
+	|| error "Could not create directory for commit"
+	echo "$commit" >"$commit" &&
+	git add "$commit" || error "Could not add commit"
+	git commit -m "$commit" || error "Could not commit"
+)
+
+last_commit_message()
+{
+	git log --pretty=format:%s -1
+}
+
+subtree_test_count=0
+next_test() {
+	subtree_test_count=$(($subtree_test_count+1))
+}
+
+#
+# Tests for 'git subtree add'
+#
+
+next_test
+test_expect_success 'no merge from non-existent subtree' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		test_must_fail git subtree merge --prefix="sub dir" FETCH_HEAD
+	)
+'
+
+next_test
+test_expect_success 'no pull from non-existent subtree' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		test_must_fail git subtree pull --prefix="sub dir" ./"sub proj" master
+	)'
+
+next_test
+test_expect_success 'add subproj as subtree into sub dir/ with --prefix' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Add '\''sub dir/'\'' from commit '\''$(git rev-parse FETCH_HEAD)'\''"
+	)
+'
+
+next_test
+test_expect_success 'add subproj as subtree into sub dir/ with --prefix and --message' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" --message="Added subproject" FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Added subproject"
+	)
+'
+
+next_test
+test_expect_success 'add subproj as subtree into sub dir/ with --prefix as -P and --message as -m' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add -P "sub dir" -m "Added subproject" FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Added subproject"
+	)
+'
+
+next_test
+test_expect_success 'add subproj as subtree into sub dir/ with --squash and --prefix and --message' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" --message="Added subproject with squash" --squash FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Added subproject with squash"
+	)
+'
+
+#
+# Tests for 'git subtree merge'
+#
+
+next_test
+test_expect_success 'merge new subproj history into sub dir/ with --prefix' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Merge commit '\''$(git rev-parse FETCH_HEAD)'\''"
+	)
+'
+
+next_test
+test_expect_success 'merge new subproj history into sub dir/ with --prefix and --message' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" --message="Merged changes from subproject" FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Merged changes from subproject"
+	)
+'
+
+next_test
+test_expect_success 'merge new subproj history into sub dir/ with --squash and --prefix and --message' '
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	subtree_test_create_repo "$subtree_test_count" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" --message="Merged changes from subproject using squash" --squash FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Merged changes from subproject using squash"
+	)
+'
+
+next_test
+test_expect_success 'merge the added subproj again, should do nothing' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD &&
+		# this shouldn not actually do anything, since FETCH_HEAD
+		# is already a parent
+		result=$(git merge -s ours -m "merge -s -ours" FETCH_HEAD) &&
+		check_equal "${result}" "Already up to date."
+	)
+'
+
+next_test
+test_expect_success 'merge new subproj history into subdir/ with a slash appended to the argument of --prefix' '
+	test_create_repo "$test_count" &&
+	test_create_repo "$test_count/subproj" &&
+	test_create_commit "$test_count" main1 &&
+	test_create_commit "$test_count/subproj" sub1 &&
+	(
+		cd "$test_count" &&
+		git fetch ./subproj master &&
+		git subtree add --prefix=subdir/ FETCH_HEAD
+	) &&
+	test_create_commit "$test_count/subproj" sub2 &&
+	(
+		cd "$test_count" &&
+		git fetch ./subproj master &&
+		git subtree merge --prefix=subdir/ FETCH_HEAD &&
+		check_equal "$(last_commit_message)" "Merge commit '\''$(git rev-parse FETCH_HEAD)'\''"
+	)
+'
+
+#
+# Tests for 'git subtree split'
+#
+
+next_test
+test_expect_success 'split requires option --prefix' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD &&
+		echo "You must provide the --prefix option." > expected &&
+		test_must_fail git subtree split > actual 2>&1 &&
+		test_debug "printf '"expected: "'" &&
+		test_debug "cat expected" &&
+		test_debug "printf '"actual: "'" &&
+		test_debug "cat actual" &&
+		test_cmp expected actual
+	)
+'
+
+next_test
+test_expect_success 'split requires path given by option --prefix must exist' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD &&
+		echo "'\''non-existent-directory'\'' does not exist; use '\''git subtree add'\''" > expected &&
+		test_must_fail git subtree split --prefix=non-existent-directory > actual 2>&1 &&
+		test_debug "printf '"expected: "'" &&
+		test_debug "cat expected" &&
+		test_debug "printf '"actual: "'" &&
+		test_debug "cat actual" &&
+		test_cmp expected actual
+	)
+'
+
+next_test
+test_expect_success 'split sub dir/ with --rejoin' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		split_hash=$(git subtree split --prefix="sub dir" --annotate="*") &&
+		git subtree split --prefix="sub dir" --annotate="*" --rejoin &&
+		check_equal "$(last_commit_message)" "Split '\''sub dir/'\'' into commit '\''$split_hash'\''"
+	)
+ '
+
+next_test
+test_expect_success 'split sub dir/ with --rejoin from scratch' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	(
+		cd "$subtree_test_count" &&
+		mkdir "sub dir" &&
+		echo file >"sub dir"/file &&
+		git add "sub dir/file" &&
+		git commit -m"sub dir file" &&
+		split_hash=$(git subtree split --prefix="sub dir" --rejoin) &&
+		git subtree split --prefix="sub dir" --rejoin &&
+		check_equal "$(last_commit_message)" "Split '\''sub dir/'\'' into commit '\''$split_hash'\''"
+	)
+ '
+
+next_test
+test_expect_success 'split sub dir/ with --rejoin and --message' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --message="Split & rejoin" --annotate="*" --rejoin &&
+		check_equal "$(last_commit_message)" "Split & rejoin"
+	)
+'
+
+next_test
+test_expect_success 'split "sub dir"/ with --branch' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		split_hash=$(git subtree split --prefix="sub dir" --annotate="*") &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br &&
+		check_equal "$(git rev-parse subproj-br)" "$split_hash"
+	)
+'
+
+next_test
+test_expect_success 'check hash of split' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		split_hash=$(git subtree split --prefix="sub dir" --annotate="*") &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br &&
+		check_equal "$(git rev-parse subproj-br)" "$split_hash" &&
+		# Check hash of split
+		new_hash=$(git rev-parse subproj-br^2) &&
+		(
+			cd ./"sub proj" &&
+			subdir_hash=$(git rev-parse HEAD) &&
+			check_equal ''"$new_hash"'' "$subdir_hash"
+		)
+	)
+'
+
+next_test
+test_expect_success 'split "sub dir"/ with --branch for an existing branch' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git branch subproj-br FETCH_HEAD &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		split_hash=$(git subtree split --prefix="sub dir" --annotate="*") &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br &&
+		check_equal "$(git rev-parse subproj-br)" "$split_hash"
+	)
+'
+
+next_test
+test_expect_success 'split "sub dir"/ with --branch for an incompatible branch' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git branch init HEAD &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		test_must_fail git subtree split --prefix="sub dir" --branch init
+	)
+'
+
+#
+# Validity checking
+#
+
+next_test
+test_expect_success 'make sure exactly the right set of files ends up in the subproj' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub3 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD &&
+
+		test_write_lines main-sub1 main-sub2 main-sub3 main-sub4 \
+			sub1 sub2 sub3 sub4 >expect &&
+		git ls-files >actual &&
+		test_cmp expect actual
+	)
+'
+
+next_test
+test_expect_success 'make sure the subproj *only* contains commits that affect the "sub dir"' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub3 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD &&
+
+		test_write_lines main-sub1 main-sub2 main-sub3 main-sub4 \
+			sub1 sub2 sub3 sub4 >expect &&
+		git log --name-only --pretty=format:"" >log &&
+		sort <log | sed "/^\$/ d" >actual &&
+		test_cmp expect actual
+	)
+'
+
+next_test
+test_expect_success 'make sure exactly the right set of files ends up in the mainline' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub3 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree pull --prefix="sub dir" ./"sub proj" master &&
+
+		test_write_lines main1 main2 >chkm &&
+		test_write_lines main-sub1 main-sub2 main-sub3 main-sub4 >chkms &&
+		sed "s,^,sub dir/," chkms >chkms_sub &&
+		test_write_lines sub1 sub2 sub3 sub4 >chks &&
+		sed "s,^,sub dir/," chks >chks_sub &&
+
+		cat chkm chkms_sub chks_sub >expect &&
+		git ls-files >actual &&
+		test_cmp expect actual
+	)
+'
+
+next_test
+test_expect_success 'make sure each filename changed exactly once in the entire history' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git config log.date relative &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub3 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree pull --prefix="sub dir" ./"sub proj" master &&
+
+		test_write_lines main1 main2 >chkm &&
+		test_write_lines sub1 sub2 sub3 sub4 >chks &&
+		test_write_lines main-sub1 main-sub2 main-sub3 main-sub4 >chkms &&
+		sed "s,^,sub dir/," chkms >chkms_sub &&
+
+		# main-sub?? and /"sub dir"/main-sub?? both change, because those are the
+		# changes that were split into their own history.  And "sub dir"/sub?? never
+		# change, since they were *only* changed in the subtree branch.
+		git log --name-only --pretty=format:"" >log &&
+		sort <log >sorted-log &&
+		sed "/^$/ d" sorted-log >actual &&
+
+		cat chkms chkm chks chkms_sub >expect-unsorted &&
+		sort expect-unsorted >expect &&
+		test_cmp expect actual
+	)
+'
+
+next_test
+test_expect_success 'make sure the --rejoin commits never make it into subproj' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub3 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree pull --prefix="sub dir" ./"sub proj" master &&
+		check_equal "$(git log --pretty=format:"%s" HEAD^2 | grep -i split)" ""
+	)
+'
+
+next_test
+test_expect_success 'make sure no "git subtree" tagged commits make it into subproj' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub3 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		 git merge FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub4 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --annotate="*" --branch subproj-br --rejoin
+	) &&
+	(
+		cd "$subtree_test_count/sub proj" &&
+		git fetch .. subproj-br &&
+		git merge FETCH_HEAD
+	) &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree pull --prefix="sub dir" ./"sub proj" master &&
+
+		# They are meaningless to subproj since one side of the merge refers to the mainline
+		check_equal "$(git log --pretty=format:"%s%n%b" HEAD^2 | grep "git-subtree.*:")" ""
+	)
+'
+
+#
+# A new set of tests
+#
+
+next_test
+test_expect_success 'make sure "git subtree split" find the correct parent' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git branch subproj-ref FETCH_HEAD &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --branch subproj-br &&
+
+		# at this point, the new commit parent should be subproj-ref, if it is
+		# not, something went wrong (the "newparent" of "master~" commit should
+		# have been sub2, but it was not, because its cache was not set to
+		# itself)
+		check_equal "$(git log --pretty=format:%P -1 subproj-br)" "$(git rev-parse subproj-ref)"
+	)
+'
+
+next_test
+test_expect_success 'split a new subtree without --onto option' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --branch subproj-br
+	) &&
+	mkdir "$subtree_test_count"/"sub dir2" &&
+	test_create_commit "$subtree_test_count" "sub dir2"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+
+		# also test that we still can split out an entirely new subtree
+		# if the parent of the first commit in the tree is not empty,
+		# then the new subtree has accidentally been attached to something
+		git subtree split --prefix="sub dir2" --branch subproj2-br &&
+		check_equal "$(git log --pretty=format:%P -1 subproj2-br)" ""
+	)
+'
+
+next_test
+test_expect_success 'verify one file change per commit' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git branch sub1 FETCH_HEAD &&
+		git subtree add --prefix="sub dir" sub1
+	) &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir" --branch subproj-br
+	) &&
+	mkdir "$subtree_test_count"/"sub dir2" &&
+	test_create_commit "$subtree_test_count" "sub dir2"/main-sub2 &&
+	(
+		cd "$subtree_test_count" &&
+		git subtree split --prefix="sub dir2" --branch subproj2-br &&
+
+		x= &&
+		git log --pretty=format:"commit: %H" | join_commits |
+		(
+			while read commit a b; do
+				test_debug "echo Verifying commit $commit"
+				test_debug "echo a: $a"
+				test_debug "echo b: $b"
+				check_equal "$b" ""
+				x=1
+			done
+			check_equal "$x" 1
+		)
+	)
+'
+
+next_test
+test_expect_success 'push split to subproj' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	subtree_test_create_repo "$subtree_test_count/sub proj" &&
+	test_create_commit "$subtree_test_count" main1 &&
+	test_create_commit "$subtree_test_count/sub proj" sub1 &&
+	(
+		cd "$subtree_test_count" &&
+		git fetch ./"sub proj" master &&
+		git subtree add --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub1 &&
+	test_create_commit "$subtree_test_count" main2 &&
+	test_create_commit "$subtree_test_count/sub proj" sub2 &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub2 &&
+	(
+		cd $subtree_test_count/"sub proj" &&
+                git branch sub-branch-1 &&
+                cd .. &&
+		git fetch ./"sub proj" master &&
+		git subtree merge --prefix="sub dir" FETCH_HEAD
+	) &&
+	test_create_commit "$subtree_test_count" "sub dir"/main-sub3 &&
+        (
+		cd "$subtree_test_count" &&
+	        git subtree push ./"sub proj" --prefix "sub dir" sub-branch-1 &&
+                cd ./"sub proj" &&
+                git checkout sub-branch-1 &&
+         	check_equal "$(last_commit_message)" "sub dir/main-sub3"
+	)
+'
+
+#
+# This test covers 2 cases in subtree split copy_or_skip code
+# 1) Merges where one parent is a superset of the changes of the other
+#    parent regarding changes to the subtree, in this case the merge
+#    commit should be copied
+# 2) Merges where only one parent operate on the subtree, and the merge
+#    commit should be skipped
+#
+# (1) is checked by ensuring subtree_tip is a descendent of subtree_branch
+# (2) should have a check added (not_a_subtree_change shouldn't be present
+#     on the produced subtree)
+#
+# Other related cases which are not tested (or currently handled correctly)
+# - Case (1) where there are more than 2 parents, it will sometimes correctly copy
+#   the merge, and sometimes not
+# - Merge commit where both parents have same tree as the merge, currently
+#   will always be skipped, even if they reached that state via different
+#   set of commits.
+#
+
+next_test
+test_expect_success 'subtree descendant check' '
+	subtree_test_create_repo "$subtree_test_count" &&
+	test_create_commit "$subtree_test_count" folder_subtree/a &&
+	(
+		cd "$subtree_test_count" &&
+		git branch branch
+	) &&
+	test_create_commit "$subtree_test_count" folder_subtree/0 &&
+	test_create_commit "$subtree_test_count" folder_subtree/b &&
+	cherry=$(cd "$subtree_test_count"; git rev-parse HEAD) &&
+	(
+		cd "$subtree_test_count" &&
+		git checkout branch
+	) &&
+	test_create_commit "$subtree_test_count" commit_on_branch &&
+	(
+		cd "$subtree_test_count" &&
+		git cherry-pick $cherry &&
+		git checkout master &&
+		git merge -m "merge should be kept on subtree" branch &&
+		git branch no_subtree_work_branch
+	) &&
+	test_create_commit "$subtree_test_count" folder_subtree/d &&
+	(
+		cd "$subtree_test_count" &&
+		git checkout no_subtree_work_branch
+	) &&
+	test_create_commit "$subtree_test_count" not_a_subtree_change &&
+	(
+		cd "$subtree_test_count" &&
+		git checkout master &&
+		git merge -m "merge should be skipped on subtree" no_subtree_work_branch &&
+
+		git subtree split --prefix folder_subtree/ --branch subtree_tip master &&
+		git subtree split --prefix folder_subtree/ --branch subtree_branch branch &&
+		check_equal $(git rev-list --count subtree_tip..subtree_branch) 0
+	)
+'
+
+test_done
diff --git a/third_party/git/contrib/subtree/todo b/third_party/git/contrib/subtree/todo
new file mode 100644
index 000000000000..0d0e77765175
--- /dev/null
+++ b/third_party/git/contrib/subtree/todo
@@ -0,0 +1,48 @@
+
+	delete tempdir
+
+	'git subtree rejoin' option to do the same as --rejoin, eg. after a
+	  rebase
+
+	--prefix doesn't force the subtree correctly in merge/pull:
+	"-s subtree" should be given an explicit subtree option?
+		There doesn't seem to be a way to do this.  We'd have to
+		patch git-merge-subtree.  Ugh.
+		(but we could avoid this problem by generating squashes with
+		exactly the right subtree structure, rather than using
+		subtree merge...)
+
+	add a 'log' subcommand to see what's new in a subtree?
+
+	add to-submodule and from-submodule commands
+
+	automated tests for --squash stuff
+
+	"add" command non-obviously requires a commitid; would be easier if
+		it had a "pull" sort of mode instead
+
+	"pull" and "merge" commands should fail if you've never merged
+		that --prefix before
+		
+	docs should provide an example of "add"
+	
+	note that the initial split doesn't *have* to have a commitid
+		specified... that's just an optimization
+
+	if you try to add (or maybe merge?) with an invalid commitid, you
+		get a misleading "prefix must end with /" message from
+		one of the other git tools that git-subtree calls.  Should
+		detect this situation and print the *real* problem.
+	
+	"pull --squash" should do fetch-synthesize-merge, but instead just
+		does "pull" directly, which doesn't work at all.
+
+	make a 'force-update' that does what 'add' does even if the subtree
+		already exists.  That way we can help people who imported
+		subtrees "incorrectly" (eg. by just copying in the files) in
+		the past.
+
+	guess --prefix automatically if possible based on pwd
+
+	make a 'git subtree grafts' that automatically expands --squash'd
+		commits so you can see the full history if you want it.
diff --git a/third_party/git/contrib/svn-fe/.gitignore b/third_party/git/contrib/svn-fe/.gitignore
new file mode 100644
index 000000000000..02a779158572
--- /dev/null
+++ b/third_party/git/contrib/svn-fe/.gitignore
@@ -0,0 +1,4 @@
+/*.xml
+/*.1
+/*.html
+/svn-fe
diff --git a/third_party/git/contrib/svn-fe/Makefile b/third_party/git/contrib/svn-fe/Makefile
new file mode 100644
index 000000000000..e8651aaf4b53
--- /dev/null
+++ b/third_party/git/contrib/svn-fe/Makefile
@@ -0,0 +1,105 @@
+all:: svn-fe$X
+
+CC = cc
+RM = rm -f
+MV = mv
+
+CFLAGS = -g -O2 -Wall
+LDFLAGS =
+EXTLIBS = -lz
+
+include ../../config.mak.uname
+-include ../../config.mak.autogen
+-include ../../config.mak
+
+ifeq ($(uname_S),Darwin)
+	ifndef NO_FINK
+		ifeq ($(shell test -d /sw/lib && echo y),y)
+			CFLAGS += -I/sw/include
+			LDFLAGS += -L/sw/lib
+		endif
+	endif
+	ifndef NO_DARWIN_PORTS
+		ifeq ($(shell test -d /opt/local/lib && echo y),y)
+			CFLAGS += -I/opt/local/include
+			LDFLAGS += -L/opt/local/lib
+		endif
+	endif
+endif
+
+ifndef NO_OPENSSL
+	EXTLIBS += -lssl
+	ifdef NEEDS_CRYPTO_WITH_SSL
+		EXTLIBS += -lcrypto
+	endif
+endif
+
+ifndef NO_PTHREADS
+	CFLAGS += $(PTHREADS_CFLAGS)
+	EXTLIBS += $(PTHREAD_LIBS)
+endif
+
+ifdef HAVE_CLOCK_GETTIME
+	CFLAGS += -DHAVE_CLOCK_GETTIME
+	EXTLIBS += -lrt
+endif
+
+ifdef NEEDS_LIBICONV
+	EXTLIBS += -liconv
+endif
+
+GIT_LIB = ../../libgit.a
+VCSSVN_LIB = ../../vcs-svn/lib.a
+XDIFF_LIB = ../../xdiff/lib.a
+
+LIBS = $(VCSSVN_LIB) $(GIT_LIB) $(XDIFF_LIB)
+
+QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
+QUIET_SUBDIR1 =
+
+ifneq ($(findstring $(MAKEFLAGS),w),w)
+PRINT_DIR = --no-print-directory
+else # "make -w"
+NO_SUBDIR = :
+endif
+
+ifneq ($(findstring $(MAKEFLAGS),s),s)
+ifndef V
+	QUIET_CC      = @echo '   ' CC $@;
+	QUIET_LINK    = @echo '   ' LINK $@;
+	QUIET_SUBDIR0 = +@subdir=
+	QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo '   ' SUBDIR $$subdir; \
+	                $(MAKE) $(PRINT_DIR) -C $$subdir
+endif
+endif
+
+svn-fe$X: svn-fe.o $(VCSSVN_LIB) $(XDIFF_LIB) $(GIT_LIB)
+	$(QUIET_LINK)$(CC) $(CFLAGS) $(LDFLAGS) $(EXTLIBS) -o $@ svn-fe.o $(LIBS)
+
+svn-fe.o: svn-fe.c ../../vcs-svn/svndump.h
+	$(QUIET_CC)$(CC) $(CFLAGS) -I../../vcs-svn -o $*.o -c $<
+
+svn-fe.html: svn-fe.txt
+	$(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
+		MAN_TXT=../contrib/svn-fe/svn-fe.txt \
+		../contrib/svn-fe/$@
+
+svn-fe.1: svn-fe.txt
+	$(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
+		MAN_TXT=../contrib/svn-fe/svn-fe.txt \
+		../contrib/svn-fe/$@
+	$(MV) ../../Documentation/svn-fe.1 .
+
+../../vcs-svn/lib.a: FORCE
+	$(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) vcs-svn/lib.a
+
+../../xdiff/lib.a: FORCE
+	$(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) xdiff/lib.a
+
+../../libgit.a: FORCE
+	$(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) libgit.a
+
+clean:
+	$(RM) svn-fe$X svn-fe.o svn-fe.html svn-fe.xml svn-fe.1
+
+.PHONY: all clean FORCE
diff --git a/third_party/git/contrib/svn-fe/svn-fe.c b/third_party/git/contrib/svn-fe/svn-fe.c
new file mode 100644
index 000000000000..f363505abb27
--- /dev/null
+++ b/third_party/git/contrib/svn-fe/svn-fe.c
@@ -0,0 +1,18 @@
+/*
+ * This file is in the public domain.
+ * You may freely use, modify, distribute, and relicense it.
+ */
+
+#include <stdlib.h>
+#include "svndump.h"
+
+int main(int argc, char **argv)
+{
+	if (svndump_init(NULL))
+		return 1;
+	svndump_read((argc > 1) ? argv[1] : NULL, "refs/heads/master",
+			"refs/notes/svn/revs");
+	svndump_deinit();
+	svndump_reset();
+	return 0;
+}
diff --git a/third_party/git/contrib/svn-fe/svn-fe.txt b/third_party/git/contrib/svn-fe/svn-fe.txt
new file mode 100644
index 000000000000..19333fc8dff3
--- /dev/null
+++ b/third_party/git/contrib/svn-fe/svn-fe.txt
@@ -0,0 +1,71 @@
+svn-fe(1)
+=========
+
+NAME
+----
+svn-fe - convert an SVN "dumpfile" to a fast-import stream
+
+SYNOPSIS
+--------
+[verse]
+mkfifo backchannel &&
+svnadmin dump --deltas REPO |
+	svn-fe [url] 3<backchannel |
+	git fast-import --cat-blob-fd=3 3>backchannel
+
+DESCRIPTION
+-----------
+
+Converts a Subversion dumpfile into input suitable for
+git-fast-import(1) and similar importers. REPO is a path to a
+Subversion repository mirrored on the local disk. Remote Subversion
+repositories can be mirrored on local disk using the `svnsync`
+command.
+
+Note: this tool is very young.  The details of its commandline
+interface may change in backward incompatible ways.
+
+INPUT FORMAT
+------------
+Subversion's repository dump format is documented in full in
+`notes/dump-load-format.txt` from the Subversion source tree.
+Files in this format can be generated using the 'svnadmin dump' or
+'svk admin dump' command.
+
+OUTPUT FORMAT
+-------------
+The fast-import format is documented by the git-fast-import(1)
+manual page.
+
+NOTES
+-----
+Subversion dumps do not record a separate author and committer for
+each revision, nor do they record a separate display name and email
+address for each author.  Like git-svn(1), 'svn-fe' will use the name
+
+---------
+user <user@UUID>
+---------
+
+as committer, where 'user' is the value of the `svn:author` property
+and 'UUID' the repository's identifier.
+
+To support incremental imports, 'svn-fe' puts a `git-svn-id` line at
+the end of each commit log message if passed a URL on the command
+line.  This line has the form `git-svn-id: URL@REVNO UUID`.
+
+The resulting repository will generally require further processing
+to put each project in its own repository and to separate the history
+of each branch.  The 'git filter-repo --subdirectory-filter' command
+may be useful for this purpose.
+
+BUGS
+----
+Empty directories and unknown properties are silently discarded.
+
+The exit status does not reflect whether an error was detected.
+
+SEE ALSO
+--------
+git-svn(1), svn2git(1), svk(1), git-filter-repo(1), git-fast-import(1),
+https://svn.apache.org/repos/asf/subversion/trunk/notes/dump-load-format.txt
diff --git a/third_party/git/contrib/svn-fe/svnrdump_sim.py b/third_party/git/contrib/svn-fe/svnrdump_sim.py
new file mode 100755
index 000000000000..8a3cee617524
--- /dev/null
+++ b/third_party/git/contrib/svn-fe/svnrdump_sim.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+"""
+Simulates svnrdump by replaying an existing dump from a file, taking care
+of the specified revision range.
+To simulate incremental imports the environment variable SVNRMAX can be set
+to the highest revision that should be available.
+"""
+import sys
+import os
+
+if sys.hexversion < 0x02040000:
+    # The limiter is the ValueError() calls. This may be too conservative
+    sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n")
+    sys.exit(1)
+
+
+def getrevlimit():
+    var = 'SVNRMAX'
+    if var in os.environ:
+        return os.environ[var]
+    return None
+
+
+def writedump(url, lower, upper):
+    if url.startswith('sim://'):
+        filename = url[6:]
+        if filename[-1] == '/':
+            filename = filename[:-1]  # remove terminating slash
+    else:
+        raise ValueError('sim:// url required')
+    f = open(filename, 'r')
+    state = 'header'
+    wroterev = False
+    while(True):
+        l = f.readline()
+        if l == '':
+            break
+        if state == 'header' and l.startswith('Revision-number: '):
+            state = 'prefix'
+        if state == 'prefix' and l == 'Revision-number: %s\n' % lower:
+            state = 'selection'
+        if not upper == 'HEAD' and state == 'selection' and \
+                l == 'Revision-number: %s\n' % upper:
+            break
+
+        if state == 'header' or state == 'selection':
+            if state == 'selection':
+                wroterev = True
+            sys.stdout.write(l)
+    return wroterev
+
+if __name__ == "__main__":
+    if not (len(sys.argv) in (3, 4, 5)):
+        print("usage: %s dump URL -rLOWER:UPPER")
+        sys.exit(1)
+    if not sys.argv[1] == 'dump':
+        raise NotImplementedError('only "dump" is supported.')
+    url = sys.argv[2]
+    r = ('0', 'HEAD')
+    if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r':
+        r = sys.argv[3][2:].lstrip().split(':')
+    if not getrevlimit() is None:
+        r[1] = getrevlimit()
+    if writedump(url, r[0], r[1]):
+        ret = 0
+    else:
+        ret = 1
+    sys.exit(ret)
diff --git a/third_party/git/contrib/thunderbird-patch-inline/README b/third_party/git/contrib/thunderbird-patch-inline/README
new file mode 100644
index 000000000000..000147bbe4a0
--- /dev/null
+++ b/third_party/git/contrib/thunderbird-patch-inline/README
@@ -0,0 +1,20 @@
+appp.sh is a script that is supposed to be used together with ExternalEditor
+for Mozilla Thunderbird. It will let you include patches inline in e-mails
+in an easy way.
+
+Usage:
+- Generate the patch with git format-patch.
+- Start writing a new e-mail in Thunderbird.
+- Press the external editor button (or Ctrl-E) to run appp.sh
+- Select the previously generated patch file.
+- Finish editing the e-mail.
+
+Any text that is entered into the message editor before appp.sh is called
+will be moved to the section between the --- and the diffstat.
+
+All S-O-B:s and Cc:s in the patch will be added to the CC list.
+
+To set it up, just install External Editor and tell it to use appp.sh as the
+editor.
+
+Zenity is a required dependency.
diff --git a/third_party/git/contrib/thunderbird-patch-inline/appp.sh b/third_party/git/contrib/thunderbird-patch-inline/appp.sh
new file mode 100755
index 000000000000..1053872eea90
--- /dev/null
+++ b/third_party/git/contrib/thunderbird-patch-inline/appp.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+# Copyright 2008 Lukas Sandström <luksan@gmail.com>
+#
+# AppendPatch - A script to be used together with ExternalEditor
+# for Mozilla Thunderbird to properly include patches inline in e-mails.
+
+# ExternalEditor can be downloaded at http://globs.org/articles.php?lng=en&pg=2
+
+CONFFILE=~/.appprc
+
+SEP="-=-=-=-=-=-=-=-=-=# Don't remove this line #=-=-=-=-=-=-=-=-=-"
+if [ -e "$CONFFILE" ] ; then
+	LAST_DIR=$(grep -m 1 "^LAST_DIR=" "${CONFFILE}"|sed -e 's/^LAST_DIR=//')
+	cd "${LAST_DIR}"
+else
+	cd > /dev/null
+fi
+
+PATCH=$(zenity --file-selection)
+
+if [ "$?" != "0" ] ; then
+	#zenity --error --text "No patchfile given."
+	exit 1
+fi
+
+cd - > /dev/null
+
+SUBJECT=$(sed -n -e '/^Subject: /p' "${PATCH}")
+HEADERS=$(sed -e '/^'"${SEP}"'$/,$d' $1)
+BODY=$(sed -e "1,/${SEP}/d" $1)
+CMT_MSG=$(sed -e '1,/^$/d' -e '/^---$/,$d' "${PATCH}")
+DIFF=$(sed -e '1,/^---$/d' "${PATCH}")
+
+CCS=$(echo -e "$CMT_MSG\n$HEADERS" | sed -n -e 's/^Cc: \(.*\)$/\1,/gp' \
+	-e 's/^Signed-off-by: \(.*\)/\1,/gp')
+
+echo "$SUBJECT" > $1
+echo "Cc: $CCS" >> $1
+echo "$HEADERS" | sed -e '/^Subject: /d' -e '/^Cc: /d' >> $1
+echo "$SEP" >> $1
+
+echo "$CMT_MSG" >> $1
+echo "---" >> $1
+if [ "x${BODY}x" != "xx" ] ; then
+	echo >> $1
+	echo "$BODY" >> $1
+	echo >> $1
+fi
+echo "$DIFF" >> $1
+
+LAST_DIR=$(dirname "${PATCH}")
+
+grep -v "^LAST_DIR=" "${CONFFILE}" > "${CONFFILE}_"
+echo "LAST_DIR=${LAST_DIR}" >> "${CONFFILE}_"
+mv "${CONFFILE}_" "${CONFFILE}"
diff --git a/third_party/git/contrib/update-unicode/.gitignore b/third_party/git/contrib/update-unicode/.gitignore
new file mode 100644
index 000000000000..b0ebc6aad214
--- /dev/null
+++ b/third_party/git/contrib/update-unicode/.gitignore
@@ -0,0 +1,3 @@
+uniset/
+UnicodeData.txt
+EastAsianWidth.txt
diff --git a/third_party/git/contrib/update-unicode/README b/third_party/git/contrib/update-unicode/README
new file mode 100644
index 000000000000..151a1970419f
--- /dev/null
+++ b/third_party/git/contrib/update-unicode/README
@@ -0,0 +1,20 @@
+TL;DR: Run update_unicode.sh after the publication of a new Unicode
+standard and commit the resulting unicode-widths.h file.
+
+The long version
+================
+
+The Git source code ships the file unicode-widths.h which contains
+tables of zero and double width Unicode code points, respectively.
+These tables are generated using update_unicode.sh in this directory.
+update_unicode.sh itself uses a third-party tool, uniset, to query two
+Unicode data files for the interesting code points.
+
+On first run, update_unicode.sh clones uniset from Github and builds it.
+This requires a current-ish version of autoconf (2.69 works per December
+2016).
+
+On each run, update_unicode.sh checks whether more recent Unicode data
+files are available from the Unicode consortium, and rebuilds the header
+unicode-widths.h with the new data. The new header can then be
+committed.
diff --git a/third_party/git/contrib/update-unicode/update_unicode.sh b/third_party/git/contrib/update-unicode/update_unicode.sh
new file mode 100755
index 000000000000..aa90865befa4
--- /dev/null
+++ b/third_party/git/contrib/update-unicode/update_unicode.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+#See http://www.unicode.org/reports/tr44/
+#
+#Me Enclosing_Mark  an enclosing combining mark
+#Mn Nonspacing_Mark a nonspacing combining mark (zero advance width)
+#Cf Format          a format control character
+#
+cd "$(dirname "$0")"
+UNICODEWIDTH_H=$(git rev-parse --show-toplevel)/unicode-width.h
+
+wget -N http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt \
+	http://www.unicode.org/Public/UCD/latest/ucd/EastAsianWidth.txt &&
+if ! test -d uniset; then
+	git clone https://github.com/depp/uniset.git &&
+	( cd uniset && git checkout 4b186196dd )
+fi &&
+(
+	cd uniset &&
+	if ! test -x uniset; then
+		autoreconf -i &&
+		./configure --enable-warnings=-Werror CFLAGS='-O0 -ggdb'
+	fi &&
+	make
+) &&
+UNICODE_DIR=. && export UNICODE_DIR &&
+cat >$UNICODEWIDTH_H <<-EOF
+static const struct interval zero_width[] = {
+	$(uniset/uniset --32 cat:Me,Mn,Cf + U+1160..U+11FF - U+00AD)
+};
+static const struct interval double_width[] = {
+	$(uniset/uniset --32 eaw:F,W)
+};
+EOF
diff --git a/third_party/git/contrib/vscode/.gitattributes b/third_party/git/contrib/vscode/.gitattributes
new file mode 100644
index 000000000000..e89f2236efea
--- /dev/null
+++ b/third_party/git/contrib/vscode/.gitattributes
@@ -0,0 +1 @@
+init.sh whitespace=-indent-with-non-tab
diff --git a/third_party/git/contrib/vscode/README.md b/third_party/git/contrib/vscode/README.md
new file mode 100644
index 000000000000..8202d62035f9
--- /dev/null
+++ b/third_party/git/contrib/vscode/README.md
@@ -0,0 +1,14 @@
+Configuration for VS Code
+=========================
+
+[VS Code](https://code.visualstudio.com/) is a lightweight but powerful source
+code editor which runs on your desktop and is available for
+[Windows](https://code.visualstudio.com/docs/setup/windows),
+[macOS](https://code.visualstudio.com/docs/setup/mac) and
+[Linux](https://code.visualstudio.com/docs/setup/linux). Among other languages,
+it has [support for C/C++ via an extension](https://github.com/Microsoft/vscode-cpptools).
+
+To start developing Git with VS Code, simply run the Unix shell script called
+`init.sh` in this directory, which creates the configuration files in
+`.vscode/` that VS Code consumes. `init.sh` needs access to `make` and `gcc`,
+so run the script in a Git SDK shell if you are using Windows.
diff --git a/third_party/git/contrib/vscode/init.sh b/third_party/git/contrib/vscode/init.sh
new file mode 100755
index 000000000000..27de94994b5d
--- /dev/null
+++ b/third_party/git/contrib/vscode/init.sh
@@ -0,0 +1,375 @@
+#!/bin/sh
+
+die () {
+	echo "$*" >&2
+	exit 1
+}
+
+cd "$(dirname "$0")"/../.. ||
+die "Could not cd to top-level directory"
+
+mkdir -p .vscode ||
+die "Could not create .vscode/"
+
+# General settings
+
+cat >.vscode/settings.json.new <<\EOF ||
+{
+    "C_Cpp.intelliSenseEngine": "Default",
+    "C_Cpp.intelliSenseEngineFallback": "Disabled",
+    "[git-commit]": {
+        "editor.wordWrap": "wordWrapColumn",
+        "editor.wordWrapColumn": 72
+    },
+    "[c]": {
+        "editor.detectIndentation": false,
+        "editor.insertSpaces": false,
+        "editor.tabSize": 8,
+        "editor.wordWrap": "wordWrapColumn",
+        "editor.wordWrapColumn": 80,
+        "files.trimTrailingWhitespace": true
+    },
+    "files.associations": {
+        "*.h": "c",
+        "*.c": "c"
+    },
+    "cSpell.ignorePaths": [
+    ],
+    "cSpell.words": [
+        "DATAW",
+        "DBCACHED",
+        "DFCHECK",
+        "DTYPE",
+        "Hamano",
+        "HCAST",
+        "HEXSZ",
+        "HKEY",
+        "HKLM",
+        "IFGITLINK",
+        "IFINVALID",
+        "ISBROKEN",
+        "ISGITLINK",
+        "ISSYMREF",
+        "Junio",
+        "LPDWORD",
+        "LPPROC",
+        "LPWSTR",
+        "MSVCRT",
+        "NOARG",
+        "NOCOMPLETE",
+        "NOINHERIT",
+        "RENORMALIZE",
+        "STARTF",
+        "STARTUPINFOEXW",
+        "Schindelin",
+        "UCRT",
+        "YESNO",
+        "argcp",
+        "beginthreadex",
+        "committish",
+        "contentp",
+        "cpath",
+        "cpidx",
+        "ctim",
+        "dequote",
+        "envw",
+        "ewah",
+        "fdata",
+        "fherr",
+        "fhin",
+        "fhout",
+        "fragp",
+        "fsmonitor",
+        "hnsec",
+        "idents",
+        "includeif",
+        "interpr",
+        "iprog",
+        "isexe",
+        "iskeychar",
+        "kompare",
+        "mksnpath",
+        "mktag",
+        "mktree",
+        "mmblob",
+        "mmbuffer",
+        "mmfile",
+        "noenv",
+        "nparents",
+        "ntpath",
+        "ondisk",
+        "ooid",
+        "oplen",
+        "osdl",
+        "pnew",
+        "pold",
+        "ppinfo",
+        "pushf",
+        "pushv",
+        "rawsz",
+        "rebasing",
+        "reencode",
+        "repo",
+        "rerere",
+        "scld",
+        "sharedrepo",
+        "spawnv",
+        "spawnve",
+        "spawnvpe",
+        "strdup'ing",
+        "submodule",
+        "submodules",
+        "topath",
+        "topo",
+        "tpatch",
+        "unexecutable",
+        "unhide",
+        "unkc",
+        "unkv",
+        "unmark",
+        "unmatch",
+        "unsets",
+        "unshown",
+        "untracked",
+        "untrackedcache",
+        "unuse",
+        "upos",
+        "uval",
+        "vreportf",
+        "wargs",
+        "wargv",
+        "wbuffer",
+        "wcmd",
+        "wcsnicmp",
+        "wcstoutfdup",
+        "wdeltaenv",
+        "wdir",
+        "wenv",
+        "wenvblk",
+        "wenvcmp",
+        "wenviron",
+        "wenvpos",
+        "wenvsz",
+        "wfile",
+        "wfilename",
+        "wfopen",
+        "wfreopen",
+        "wfullpath",
+        "which'll",
+        "wlink",
+        "wmain",
+        "wmkdir",
+        "wmktemp",
+        "wnewpath",
+        "wotype",
+        "wpath",
+        "wpathname",
+        "wpgmptr",
+        "wpnew",
+        "wpointer",
+        "wpold",
+        "wpos",
+        "wputenv",
+        "wrmdir",
+        "wship",
+        "wtarget",
+        "wtemplate",
+        "wunlink",
+        "xcalloc",
+        "xgetcwd",
+        "xmallocz",
+        "xmemdupz",
+        "xmmap",
+        "xopts",
+        "xrealloc",
+        "xsnprintf",
+        "xutftowcs",
+        "xutftowcsn",
+        "xwcstoutf"
+    ],
+    "cSpell.ignoreRegExpList": [
+        "\\\"(DIRC|FSMN|REUC|UNTR)\\\"",
+        "\\\\u[0-9a-fA-Fx]{4}\\b",
+        "\\b(filfre|frotz|xyzzy)\\b",
+        "\\bCMIT_FMT_DEFAULT\\b",
+        "\\bde-munge\\b",
+        "\\bGET_OID_DISAMBIGUATORS\\b",
+        "\\bHASH_RENORMALIZE\\b",
+        "\\bTREESAMEness\\b",
+        "\\bUSE_STDEV\\b",
+        "\\Wchar *\\*\\W*utfs\\W",
+        "cURL's",
+        "nedmalloc'ed",
+        "ntifs\\.h",
+    ],
+}
+EOF
+die "Could not write settings.json"
+
+# Infer some setup-specific locations/names
+
+GCCPATH="$(which gcc)"
+GDBPATH="$(which gdb)"
+MAKECOMMAND="make -j5 DEVELOPER=1"
+OSNAME=
+X=
+case "$(uname -s)" in
+MINGW*)
+	GCCPATH="$(cygpath -am "$GCCPATH")"
+	GDBPATH="$(cygpath -am "$GDBPATH")"
+	MAKE_BASH="$(cygpath -am /git-cmd.exe) --command=usr\\\\bin\\\\bash.exe"
+	MAKECOMMAND="$MAKE_BASH -lc \\\"$MAKECOMMAND\\\""
+	OSNAME=Win32
+	X=.exe
+	;;
+Linux)
+	OSNAME=Linux
+	;;
+Darwin)
+	OSNAME=macOS
+	;;
+esac
+
+# Default build task
+
+cat >.vscode/tasks.json.new <<EOF ||
+{
+    // See https://go.microsoft.com/fwlink/?LinkId=733558
+    // for the documentation about the tasks.json format
+    "version": "2.0.0",
+    "tasks": [
+        {
+            "label": "make",
+            "type": "shell",
+            "command": "$MAKECOMMAND",
+            "group": {
+                "kind": "build",
+                "isDefault": true
+            }
+        }
+    ]
+}
+EOF
+die "Could not install default build task"
+
+# Debugger settings
+
+cat >.vscode/launch.json.new <<EOF ||
+{
+    // Use IntelliSense to learn about possible attributes.
+    // Hover to view descriptions of existing attributes.
+    // For more information, visit:
+    // https://go.microsoft.com/fwlink/?linkid=830387
+    "version": "0.2.0",
+    "configurations": [
+        {
+            "name": "(gdb) Launch",
+            "type": "cppdbg",
+            "request": "launch",
+            "program": "\${workspaceFolder}/git$X",
+            "args": [],
+            "stopAtEntry": false,
+            "cwd": "\${workspaceFolder}",
+            "environment": [],
+            "externalConsole": true,
+            "MIMode": "gdb",
+            "miDebuggerPath": "$GDBPATH",
+            "setupCommands": [
+                {
+                    "description": "Enable pretty-printing for gdb",
+                    "text": "-enable-pretty-printing",
+                    "ignoreFailures": true
+                }
+            ]
+        }
+    ]
+}
+EOF
+die "Could not write launch configuration"
+
+# C/C++ extension settings
+
+make -f - OSNAME=$OSNAME GCCPATH="$GCCPATH" vscode-init \
+	>.vscode/c_cpp_properties.json <<\EOF ||
+include Makefile
+
+vscode-init:
+	@mkdir -p .vscode && \
+	incs= && defs= && \
+	for e in $(ALL_CFLAGS) \
+			'-DGIT_EXEC_PATH="$(gitexecdir_SQ)"' \
+			'-DGIT_LOCALE_PATH="$(localedir_relative_SQ)"' \
+			'-DBINDIR="$(bindir_relative_SQ)"' \
+			'-DFALLBACK_RUNTIME_PREFIX="$(prefix_SQ)"' \
+			'-DDEFAULT_GIT_TEMPLATE_DIR="$(template_dir_SQ)"' \
+			'-DETC_GITCONFIG="$(ETC_GITCONFIG_SQ)"' \
+			'-DETC_GITATTRIBUTES="$(ETC_GITATTRIBUTES_SQ)"' \
+			'-DGIT_LOCALE_PATH="$(localedir_relative_SQ)"' \
+			'-DCURL_DISABLE_TYPECHECK', \
+			'-DGIT_HTML_PATH="$(htmldir_relative_SQ)"' \
+			'-DGIT_MAN_PATH="$(mandir_relative_SQ)"' \
+			'-DGIT_INFO_PATH="$(infodir_relative_SQ)"'; do \
+		case "$$e" in \
+		-I.) \
+			incs="$$(printf '% 16s"$${workspaceRoot}",\n%s' \
+				"" "$$incs")" \
+			;; \
+		-I/*) \
+			incs="$$(printf '% 16s"%s",\n%s' \
+				"" "$${e#-I}" "$$incs")" \
+			;; \
+		-I*) \
+			incs="$$(printf '% 16s"$${workspaceRoot}/%s",\n%s' \
+				"" "$${e#-I}" "$$incs")" \
+			;; \
+		-D*) \
+			defs="$$(printf '% 16s"%s",\n%s' \
+				"" "$$(echo "$${e#-D}" | sed 's/"/\\&/g')" \
+				"$$defs")" \
+			;; \
+		esac; \
+	done && \
+	echo '{' && \
+	echo '    "configurations": [' && \
+	echo '        {' && \
+	echo '            "name": "$(OSNAME)",' && \
+	echo '            "intelliSenseMode": "clang-x64",' && \
+	echo '            "includePath": [' && \
+	echo "$$incs" | sort | sed '$$s/,$$//' && \
+	echo '            ],' && \
+	echo '            "defines": [' && \
+	echo "$$defs" | sort | sed '$$s/,$$//' && \
+	echo '            ],' && \
+	echo '            "browse": {' && \
+	echo '                "limitSymbolsToIncludedHeaders": true,' && \
+	echo '                "databaseFilename": "",' && \
+	echo '                "path": [' && \
+	echo '                    "$${workspaceRoot}"' && \
+	echo '                ]' && \
+	echo '            },' && \
+	echo '            "cStandard": "c11",' && \
+	echo '            "cppStandard": "c++17",' && \
+	echo '            "compilerPath": "$(GCCPATH)"' && \
+	echo '        }' && \
+	echo '    ],' && \
+	echo '    "version": 4' && \
+	echo '}'
+EOF
+die "Could not write settings for the C/C++ extension"
+
+for file in .vscode/settings.json .vscode/tasks.json .vscode/launch.json
+do
+	if test -f $file
+	then
+		if git diff --no-index --quiet --exit-code $file $file.new
+		then
+			rm $file.new
+		else
+			printf "The file $file.new has these changes:\n\n"
+			git --no-pager diff --no-index $file $file.new
+			printf "\n\nMaybe \`mv $file.new $file\`?\n\n"
+		fi
+	else
+		mv $file.new $file
+	fi
+done
diff --git a/third_party/git/contrib/workdir/.gitattributes b/third_party/git/contrib/workdir/.gitattributes
new file mode 100644
index 000000000000..1f78c5d1bd30
--- /dev/null
+++ b/third_party/git/contrib/workdir/.gitattributes
@@ -0,0 +1 @@
+/git-new-workdir eol=lf
diff --git a/third_party/git/contrib/workdir/git-new-workdir b/third_party/git/contrib/workdir/git-new-workdir
new file mode 100755
index 000000000000..888c34a52152
--- /dev/null
+++ b/third_party/git/contrib/workdir/git-new-workdir
@@ -0,0 +1,105 @@
+#!/bin/sh
+
+usage () {
+	echo "usage:" $@
+	exit 127
+}
+
+die () {
+	echo $@
+	exit 128
+}
+
+failed () {
+	die "unable to create new workdir '$new_workdir'!"
+}
+
+if test $# -lt 2 || test $# -gt 3
+then
+	usage "$0 <repository> <new_workdir> [<branch>]"
+fi
+
+orig_git=$1
+new_workdir=$2
+branch=$3
+
+# want to make sure that what is pointed to has a .git directory ...
+git_dir=$(cd "$orig_git" 2>/dev/null &&
+  git rev-parse --git-dir 2>/dev/null) ||
+  die "Not a git repository: \"$orig_git\""
+
+case "$git_dir" in
+.git)
+	git_dir="$orig_git/.git"
+	;;
+.)
+	git_dir=$orig_git
+	;;
+esac
+
+# don't link to a configured bare repository
+isbare=$(git --git-dir="$git_dir" config --bool --get core.bare)
+if test ztrue = "z$isbare"
+then
+	die "\"$git_dir\" has core.bare set to true," \
+		" remove from \"$git_dir/config\" to use $0"
+fi
+
+# don't link to a workdir
+if test -h "$git_dir/config"
+then
+	die "\"$orig_git\" is a working directory only, please specify" \
+		"a complete repository."
+fi
+
+# make sure the links in the workdir have full paths to the original repo
+git_dir=$(cd "$git_dir" && pwd) || exit 1
+
+# don't recreate a workdir over an existing directory, unless it's empty
+if test -d "$new_workdir"
+then
+	if test $(ls -a1 "$new_workdir/." | wc -l) -ne 2
+	then
+		die "destination directory '$new_workdir' is not empty."
+	fi
+	cleandir="$new_workdir/.git"
+else
+	cleandir="$new_workdir"
+fi
+
+mkdir -p "$new_workdir/.git" || failed
+cleandir=$(cd "$cleandir" && pwd) || failed
+
+cleanup () {
+	rm -rf "$cleandir"
+}
+siglist="0 1 2 15"
+trap cleanup $siglist
+
+# create the links to the original repo.  explicitly exclude index, HEAD and
+# logs/HEAD from the list since they are purely related to the current working
+# directory, and should not be shared.
+for x in config refs logs/refs objects info hooks packed-refs remotes rr-cache svn
+do
+	# create a containing directory if needed
+	case $x in
+	*/*)
+		mkdir -p "$new_workdir/.git/${x%/*}"
+		;;
+	esac
+
+	ln -s "$git_dir/$x" "$new_workdir/.git/$x" || failed
+done
+
+# commands below this are run in the context of the new workdir
+cd "$new_workdir" || failed
+
+# copy the HEAD from the original repository as a default branch
+cp "$git_dir/HEAD" .git/HEAD || failed
+
+# the workdir is set up.  if the checkout fails, the user can fix it.
+trap - $siglist
+
+# checkout the branch (either the same as HEAD from the original repository,
+# or the one that was asked for)
+git checkout -f $branch