diff --git a/.gitattributes b/.gitattributes index acf853e029..f0658ef4f5 100644 --- a/.gitattributes +++ b/.gitattributes @@ -5,6 +5,7 @@ *.pl eof=lf diff=perl *.pm eol=lf diff=perl *.py eol=lf diff=python +*.bat eol=crlf /Documentation/**/*.txt eol=lf /command-list.txt eol=lf /GIT-VERSION-GEN eol=lf diff --git a/.gitignore b/.gitignore index 0d77ea5894..56a7fefa76 100644 --- a/.gitignore +++ b/.gitignore @@ -190,6 +190,42 @@ /gitweb/static/gitweb.js /gitweb/static/gitweb.min.* /command-list.h +/libgit +/test-chmtime +/test-ctype +/test-config +/test-date +/test-delta +/test-dump-cache-tree +/test-dump-split-index +/test-dump-untracked-cache +/test-fake-ssh +/test-scrap-cache-tree +/test-genrandom +/test-hashmap +/test-index-version +/test-line-buffer +/test-match-trees +/test-mergesort +/test-mktemp +/test-parse-options +/test-path-utils +/test-prio-queue +/test-read-cache +/test-regex +/test-revision-walking +/test-run-command +/test-sha1 +/test-sha1-array +/test-sigchain +/test-string-list +/test-submodule-config +/test-subprocess +/test-svn-fe +/test-urlmatch-normalization +/test-wildmatch +/vcs-svn_lib +/xdiff_lib *.tar.gz *.dsc *.deb @@ -227,5 +263,14 @@ *.user *.idb *.pdb -/Debug/ -/Release/ +*.ilk +*.iobj +*.ipdb +*.dll +.vs/ +*.manifest +Debug/ +Release/ +/UpgradeLog*.htm +/git.VC.VC.opendb +/git.VC.db diff --git a/Documentation/config.txt b/Documentation/config.txt index d87846faa6..e9b2d10e99 100644 --- a/Documentation/config.txt +++ b/Documentation/config.txt @@ -406,6 +406,8 @@ include::config/reset.txt[] include::config/sendemail.txt[] +include::config/sendpack.txt[] + include::config/sequencer.txt[] include::config/showbranch.txt[] diff --git a/Documentation/config/core.txt b/Documentation/config/core.txt index d0e6635fe0..e909b26001 100644 --- a/Documentation/config/core.txt +++ b/Documentation/config/core.txt @@ -548,6 +548,19 @@ relatively high IO latencies. When enabled, Git will do the index comparison to the filesystem data in parallel, allowing overlapping IO's. Defaults to true. +core.fscache:: + Enable additional caching of file system data for some operations. ++ +Git for Windows uses this to bulk-read and cache lstat data of entire +directories (instead of doing lstat file by file). + +core.longpaths:: + Enable long path (> 260) support for builtin commands in Git for + Windows. This is disabled by default, as long paths are not supported + by Windows Explorer, cmd.exe and the Git for Windows tool chain + (msys, bash, tcl, perl...). Only enable this if you know what you're + doing and are prepared to live with a few quirks. + core.unsetenvvars:: Windows-only: comma-separated list of environment variables' names that need to be unset before spawning any other process. diff --git a/Documentation/config/sendpack.txt b/Documentation/config/sendpack.txt new file mode 100644 index 0000000000..e306f657fb --- /dev/null +++ b/Documentation/config/sendpack.txt @@ -0,0 +1,5 @@ +sendpack.sideband:: + Allows to disable the side-band-64k capability for send-pack even + when it is advertised by the server. Makes it possible to work + around a limitation in the git for windows implementation together + with the dump git protocol. Defaults to true. diff --git a/Documentation/git-reset.txt b/Documentation/git-reset.txt index 2dac95c71a..4c0f808ac9 100644 --- a/Documentation/git-reset.txt +++ b/Documentation/git-reset.txt @@ -10,6 +10,7 @@ SYNOPSIS [verse] 'git reset' [-q] [] [--] ... 'git reset' (--patch | -p) [] [--] [...] +EXPERIMENTAL: 'git reset' [-q] [--stdin [-z]] [] 'git reset' [--soft | --mixed [-N] | --hard | --merge | --keep] [-q] [] DESCRIPTION @@ -100,6 +101,15 @@ OPTIONS `reset.quiet` config option. `--quiet` and `--no-quiet` will override the default behavior. +--stdin:: + EXPERIMENTAL: Instead of taking list of paths from the + command line, read list of paths from the standard input. + Paths are separated by LF (i.e. one path per line) by + default. + +-z:: + EXPERIMENTAL: Only meaningful with `--stdin`; paths are + separated with NUL character instead of LF. EXAMPLES -------- diff --git a/Makefile b/Makefile index 1a44c811aa..3531ff367f 100644 --- a/Makefile +++ b/Makefile @@ -1203,7 +1203,7 @@ endif ifdef SANE_TOOL_PATH SANE_TOOL_PATH_SQ = $(subst ','\'',$(SANE_TOOL_PATH)) -BROKEN_PATH_FIX = 's|^\# @@BROKEN_PATH_FIX@@$$|git_broken_path_fix $(SANE_TOOL_PATH_SQ)|' +BROKEN_PATH_FIX = 's|^\# @@BROKEN_PATH_FIX@@$$|git_broken_path_fix "$(SANE_TOOL_PATH_SQ)"|' PATH := $(SANE_TOOL_PATH):${PATH} else BROKEN_PATH_FIX = '/^\# @@BROKEN_PATH_FIX@@$$/d' @@ -2650,7 +2650,7 @@ bin-wrappers/%: wrap-for-bin.sh @mkdir -p bin-wrappers $(QUIET_GEN)sed -e '1s|#!.*/sh|#!$(SHELL_PATH_SQ)|' \ -e 's|@@BUILD_DIR@@|$(shell pwd)|' \ - -e 's|@@PROG@@|$(patsubst test-%,t/helper/test-%,$(@F))|' < $< > $@ && \ + -e 's|@@PROG@@|$(patsubst test-%,t/helper/test-%$(X),$(@F))$(patsubst git%,$(X),$(filter $(@F),$(BINDIR_PROGRAMS_NEED_X)))|' < $< > $@ && \ chmod +x $@ # GNU make supports exporting all variables by "export" without parameters. @@ -2788,6 +2788,33 @@ install: all $(INSTALL) $(ALL_PROGRAMS) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' $(INSTALL) -m 644 $(SCRIPT_LIB) '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' $(INSTALL) $(install_bindir_programs) '$(DESTDIR_SQ)$(bindir_SQ)' +ifdef MSVC + # We DO NOT install the individual foo.o.pdb files because they + # have already been rolled up into the exe's pdb file. + # We DO NOT have pdb files for the builtin commands (like git-status.exe) + # because it is just a copy/hardlink of git.exe, rather than a unique binary. + $(INSTALL) git.pdb '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) git-shell.pdb '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) git-upload-pack.pdb '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) git-credential-store.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-daemon.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-fast-import.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-http-backend.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-http-fetch.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-http-push.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-imap-send.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-remote-http.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-remote-testsvn.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-sh-i18n--envsubst.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' + $(INSTALL) git-show-index.pdb '$(DESTDIR_SQ)$(gitexec_instdir_SQ)' +ifndef DEBUG + $(INSTALL) $(vcpkg_rel_bin)/*.dll '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) $(vcpkg_rel_bin)/*.pdb '$(DESTDIR_SQ)$(bindir_SQ)' +else + $(INSTALL) $(vcpkg_dbg_bin)/*.dll '$(DESTDIR_SQ)$(bindir_SQ)' + $(INSTALL) $(vcpkg_dbg_bin)/*.pdb '$(DESTDIR_SQ)$(bindir_SQ)' +endif +endif $(MAKE) -C templates DESTDIR='$(DESTDIR_SQ)' install $(INSTALL) -d -m 755 '$(DESTDIR_SQ)$(mergetools_instdir_SQ)' $(INSTALL) -m 644 mergetools/* '$(DESTDIR_SQ)$(mergetools_instdir_SQ)' @@ -2989,6 +3016,19 @@ endif $(RM) GIT-VERSION-FILE GIT-CFLAGS GIT-LDFLAGS GIT-BUILD-OPTIONS $(RM) GIT-USER-AGENT GIT-PREFIX $(RM) GIT-SCRIPT-DEFINES GIT-PERL-DEFINES GIT-PERL-HEADER GIT-PYTHON-VARS +ifdef MSVC + $(RM) $(patsubst %.o,%.o.pdb,$(OBJECTS)) + $(RM) $(patsubst %.exe,%.pdb,$(OTHER_PROGRAMS)) + $(RM) $(patsubst %.exe,%.iobj,$(OTHER_PROGRAMS)) + $(RM) $(patsubst %.exe,%.ipdb,$(OTHER_PROGRAMS)) + $(RM) $(patsubst %.exe,%.pdb,$(PROGRAMS)) + $(RM) $(patsubst %.exe,%.iobj,$(PROGRAMS)) + $(RM) $(patsubst %.exe,%.ipdb,$(PROGRAMS)) + $(RM) $(patsubst %.exe,%.pdb,$(TEST_PROGRAMS)) + $(RM) $(patsubst %.exe,%.iobj,$(TEST_PROGRAMS)) + $(RM) $(patsubst %.exe,%.ipdb,$(TEST_PROGRAMS)) + $(RM) compat/vcbuild/MSVC-DEFS-GEN +endif .PHONY: all install profile-clean cocciclean clean strip .PHONY: shell_compatibility_test please_set_SHELL_PATH_to_a_more_modern_shell diff --git a/README.md b/README.md index f920a42fad..bf4780c22d 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +[![Build Status](https:/dev.azure.com/git/git/_apis/build/status/test-git.git)](https://dev.azure.com/git/git/_build/latest?definitionId=2) + Git - fast, scalable, distributed revision control system ========================================================= diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000000..4dc6fea129 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,325 @@ +resources: +- repo: self + fetchDepth: 1 + +phases: +- phase: linux_clang + displayName: linux-clang + condition: succeeded() + queue: + name: Hosted Ubuntu 1604 + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + sudo apt-get update && + sudo rm /var/lib/apt/lists/lock && + sudo apt-get -y install git gcc make libssl-dev libcurl4-openssl-dev libexpat-dev tcl tk gettext git-email zlib1g-dev apache2-bin && + + export CC=clang || exit 1 + + ci/install-dependencies.sh + ci/run-build-and-tests.sh || { + ci/print-test-failures.sh + exit 1 + } + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || sudo umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-build-and-tests.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'linux-clang' + platform: Linux + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: linux_gcc + displayName: linux-gcc + condition: succeeded() + queue: + name: Hosted Ubuntu 1604 + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + sudo apt-get update && + sudo rm /var/lib/apt/lists/lock && + sudo apt-get -y install git gcc make libssl-dev libcurl4-openssl-dev libexpat-dev tcl tk gettext git-email zlib1g-dev apache2-bin || exit 1 + + ci/install-dependencies.sh + ci/run-build-and-tests.sh || { + ci/print-test-failures.sh + exit 1 + } + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || sudo umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-build-and-tests.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'linux-gcc' + platform: Linux + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: osx_clang + displayName: osx-clang + condition: succeeded() + queue: + name: Hosted macOS + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + export CC=clang + + ci/install-dependencies.sh + ci/run-build-and-tests.sh || { + ci/print-test-failures.sh + exit 1 + } + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-build-and-tests.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'osx-clang' + platform: macOS + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: osx_gcc + displayName: osx-gcc + condition: succeeded() + queue: + name: Hosted macOS + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + ci/install-dependencies.sh + ci/run-build-and-tests.sh || { + ci/print-test-failures.sh + exit 1 + } + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-build-and-tests.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'osx-gcc' + platform: macOS + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: gettext_poison + displayName: GETTEXT_POISON + condition: succeeded() + queue: + name: Hosted Ubuntu 1604 + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + sudo apt-get update && + sudo rm /var/lib/apt/lists/lock && + sudo apt-get -y install git gcc make libssl-dev libcurl4-openssl-dev libexpat-dev tcl tk gettext git-email zlib1g-dev && + + export jobname=GETTEXT_POISON || exit 1 + + ci/run-build-and-tests.sh || { + ci/print-test-failures.sh + exit 1 + } + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || sudo umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-build-and-tests.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'gettext-poison' + platform: Linux + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: windows + displayName: Windows + condition: succeeded() + queue: + name: Hosted + timeoutInMinutes: 240 + steps: + - powershell: | + # Helper to check the error level of the latest command (exit with error when appropriate) + function c() { if (!$?) { exit(1) } } + + if ("$GITFILESHAREPWD" -ne "" -and "$GITFILESHAREPWD" -ne "`$`(gitfileshare.pwd)") { + net use s: \\gitfileshare.file.core.windows.net\test-cache "$GITFILESHAREPWD" /user:AZURE\gitfileshare /persistent:no; c + cmd /c mklink /d "$(Build.SourcesDirectory)\test-cache" S:\; c + } + + # Add build agent's MinGit to PATH + $env:PATH = $env:AGENT_HOMEDIRECTORY +"\externals\\git\cmd;" +$env:PATH + + # Helper to initialize (or update) a Git worktree + function init ($path, $url, $set_origin) { + if (Test-Path $path) { + cd $path; c + if (Test-Path .git) { + git init; c + } else { + git status + } + } else { + git init $path; c + cd $path; c + } + git config core.autocrlf false; c + git config core.untrackedCache true; c + if (($set_origin -ne 0) -and !(git config remote.origin.url)) { + git remote add origin $url; c + } + git fetch --depth=1 $url master; c + git reset --hard FETCH_HEAD; c + git clean -df; c + } + + # Initialize Git for Windows' SDK + $sdk_path = "$(Build.SourcesDirectory)\git-sdk-64" + init "$sdk_path" "https://dev.azure.com/git-for-windows/git-sdk-64/_git/git-sdk-64" 0 + init usr\src\build-extra https://github.com/git-for-windows/build-extra 1 + + cd "$(Build.SourcesDirectory)"; c + + $env:HOME = "$(Build.SourcesDirectory)" + $env:MSYSTEM = "MINGW64" + git-sdk-64\git-cmd --command=usr\\bin\\bash.exe -lc @" + . ci/lib.sh + + make -j10 DEVELOPER=1 NO_PERL=1 || exit 1 + NO_PERL=1 NO_SVN_TESTS=1 GIT_TEST_OPTS=\"--no-chain-lint --no-bin-wrappers --quiet --write-junit-xml\" time make -j15 -k DEVELOPER=1 test || { + NO_PERL=1 NO_SVN_TESTS=1 GIT_TEST_OPTS=\"-i -v -x\" make -k -C t failed; exit 1 + } + + save_good_tree + "@ + c + + if ("$GITFILESHAREPWD" -ne "" -and "$GITFILESHAREPWD" -ne "`$`(gitfileshare.pwd)") { + cmd /c rmdir "$(Build.SourcesDirectory)\test-cache" + } + displayName: 'build & test' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'windows' + platform: Windows + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: linux32 + displayName: Linux32 + condition: succeeded() + queue: + name: Hosted Ubuntu 1604 + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + sudo apt-get update && + sudo rm /var/lib/apt/lists/lock && + sudo apt-get -y install \ + apt-transport-https \ + ca-certificates \ + curl \ + software-properties-common && + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - && + sudo add-apt-repository \ + "deb [arch=amd64] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) \ + stable" && + sudo apt-get update && + sudo apt-get -y install docker-ce && + + sudo AGENT_OS="$AGENT_OS" BUILD_BUILDNUMBER="$BUILD_BUILDNUMBER" BUILD_REPOSITORY_URI="$BUILD_REPOSITORY_URI" BUILD_SOURCEBRANCH="$BUILD_SOURCEBRANCH" BUILD_SOURCEVERSION="$BUILD_SOURCEVERSION" SYSTEM_PHASENAME="$SYSTEM_PHASENAME" SYSTEM_TASKDEFINITIONSURI="$SYSTEM_TASKDEFINITIONSURI" SYSTEM_TEAMPROJECT="$SYSTEM_TEAMPROJECT" CC=$CC MAKEFLAGS=-j3 bash -lxc ci/run-linux32-docker.sh || exit 1 + + sudo chmod a+r t/out/TEST-*.xml + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || sudo umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-linux32-docker.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + - task: PublishTestResults@2 + displayName: 'Publish Test Results **/TEST-*.xml' + inputs: + mergeTestResults: true + testRunTitle: 'linux32' + platform: Linux + publishRunAttachments: false + condition: succeededOrFailed() + +- phase: static_analysis + displayName: StaticAnalysis + condition: succeeded() + queue: + name: Hosted Ubuntu 1604 + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + sudo apt-get update && + sudo rm /var/lib/apt/lists/lock && + sudo apt-get install -y coccinelle && + + export jobname=StaticAnalysis && + + ci/run-static-analysis.sh || exit 1 + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || sudo umount "$HOME/test-cache" || exit 1 + displayName: 'ci/run-static-analysis.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) + +- phase: documentation + displayName: Documentation + condition: succeeded() + queue: + name: Hosted Ubuntu 1604 + steps: + - bash: | + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || ci/mount-fileshare.sh //gitfileshare.file.core.windows.net/test-cache gitfileshare "$GITFILESHAREPWD" "$HOME/test-cache" || exit 1 + + sudo apt-get update && + sudo rm /var/lib/apt/lists/lock && + sudo apt-get install -y asciidoc xmlto asciidoctor && + + export ALREADY_HAVE_ASCIIDOCTOR=yes. && + export jobname=Documentation && + + ci/test-documentation.sh || exit 1 + + test "$GITFILESHAREPWD" = '$(gitfileshare.pwd)' || sudo umount "$HOME/test-cache" || exit 1 + displayName: 'ci/test-documentation.sh' + env: + GITFILESHAREPWD: $(gitfileshare.pwd) diff --git a/builtin/add.c b/builtin/add.c index f65c172299..53c18ea429 100644 --- a/builtin/add.c +++ b/builtin/add.c @@ -239,7 +239,7 @@ static int edit_patch(int argc, const char **argv, const char *prefix) rev.diffopt.output_format = DIFF_FORMAT_PATCH; rev.diffopt.use_color = 0; rev.diffopt.flags.ignore_dirty_submodules = 1; - out = open(file, O_CREAT | O_WRONLY, 0666); + out = open(file, O_CREAT | O_WRONLY | O_TRUNC, 0666); if (out < 0) die(_("Could not open '%s' for writing."), file); rev.diffopt.file = xfdopen(out, "w"); diff --git a/builtin/clone.c b/builtin/clone.c index 15b142d646..40b6c10392 100644 --- a/builtin/clone.c +++ b/builtin/clone.c @@ -1167,7 +1167,9 @@ int cmd_clone(int argc, const char **argv, const char *prefix) } if (!is_local && !complete_refs_before_fetch) - transport_fetch_refs(transport, mapped_refs); + if (transport_fetch_refs(transport, mapped_refs)) + die(_("could not fetch refs from %s"), + transport->url); remote_head = find_ref_by_name(refs, "HEAD"); remote_head_points_at = diff --git a/builtin/commit.c b/builtin/commit.c index c021b119bb..4e9e180ad6 100644 --- a/builtin/commit.c +++ b/builtin/commit.c @@ -1363,6 +1363,7 @@ int cmd_status(int argc, const char **argv, const char *prefix) PATHSPEC_PREFER_FULL, prefix, argv); + enable_fscache(1); if (status_format != STATUS_FORMAT_PORCELAIN && status_format != STATUS_FORMAT_PORCELAIN_V2) progress_flag = REFRESH_PROGRESS; diff --git a/builtin/fast-export.c b/builtin/fast-export.c index 5790f0d554..19d2d1de37 100644 --- a/builtin/fast-export.c +++ b/builtin/fast-export.c @@ -874,9 +874,20 @@ static void get_tags_and_duplicates(struct rev_cmdline_info *info) } } +static void handle_reset(const char *name, struct object *object) +{ + int mark = get_object_mark(object); + + if (mark) + printf("reset %s\nfrom :%d\n\n", name, + get_object_mark(object)); + else + printf("reset %s\nfrom %s\n\n", name, + oid_to_hex(&object->oid)); +} + static void handle_tags_and_duplicates(void) { - struct commit *commit; int i; for (i = extra_refs.nr - 1; i >= 0; i--) { @@ -890,9 +901,7 @@ static void handle_tags_and_duplicates(void) if (anonymize) name = anonymize_refname(name); /* create refs pointing to already seen commits */ - commit = (struct commit *)object; - printf("reset %s\nfrom :%d\n\n", name, - get_object_mark(&commit->object)); + handle_reset(name, object); show_progress(); break; } diff --git a/builtin/gc.c b/builtin/gc.c index 871a56f1c5..df90fd7f51 100644 --- a/builtin/gc.c +++ b/builtin/gc.c @@ -659,8 +659,10 @@ int cmd_gc(int argc, const char **argv, const char *prefix) report_garbage = report_pack_garbage; reprepare_packed_git(the_repository); - if (pack_garbage.nr > 0) + if (pack_garbage.nr > 0) { + close_all_packs(the_repository->objects); clean_pack_garbage(); + } if (gc_write_commit_graph) write_commit_graph_reachable(get_object_directory(), 0, diff --git a/builtin/init-db.c b/builtin/init-db.c index 41faffd28d..bcefe4a203 100644 --- a/builtin/init-db.c +++ b/builtin/init-db.c @@ -155,6 +155,9 @@ static int git_init_db_config(const char *k, const char *v, void *cb) if (!strcmp(k, "init.templatedir")) return git_config_pathname(&init_db_template_dir, k, v); + if (starts_with(k, "core.")) + return platform_core_config(k, v, cb); + return 0; } @@ -361,6 +364,9 @@ int init_db(const char *git_dir, const char *real_git_dir, } startup_info->have_repository = 1; + /* Just look for `init.templatedir` and `core.hidedotfiles` */ + git_config(git_init_db_config, NULL); + safe_create_dir(git_dir, 0); init_is_bare_repository = is_bare_repository(); diff --git a/builtin/repack.c b/builtin/repack.c index 45583683ee..f9319defe4 100644 --- a/builtin/repack.c +++ b/builtin/repack.c @@ -419,6 +419,8 @@ int cmd_repack(int argc, const char **argv, const char *prefix) if (!names.nr && !po_args.quiet) printf("Nothing new to pack.\n"); + close_all_packs(the_repository->objects); + /* * Ok we have prepared all new packfiles. * First see if there are packs of the same name and if so diff --git a/builtin/reset.c b/builtin/reset.c index 58166964f8..4ba8badd0f 100644 --- a/builtin/reset.c +++ b/builtin/reset.c @@ -24,12 +24,15 @@ #include "cache-tree.h" #include "submodule.h" #include "submodule-config.h" +#include "strbuf.h" +#include "quote.h" #define REFRESH_INDEX_DELAY_WARNING_IN_MS (2 * 1000) static const char * const git_reset_usage[] = { N_("git reset [--mixed | --soft | --hard | --merge | --keep] [-q] []"), N_("git reset [-q] [] [--] ..."), + N_("EXPERIMENTAL: git reset [-q] [--stdin [-z]] []"), N_("git reset --patch [] [--] [...]"), NULL }; @@ -283,7 +286,9 @@ static int git_reset_config(const char *var, const char *value, void *cb) int cmd_reset(int argc, const char **argv, const char *prefix) { int reset_type = NONE, update_ref_status = 0, quiet = 0; - int patch_mode = 0, unborn; + int patch_mode = 0, nul_term_line = 0, read_from_stdin = 0, unborn; + char **stdin_paths = NULL; + int stdin_nr = 0, stdin_alloc = 0; const char *rev; struct object_id oid; struct pathspec pathspec; @@ -305,6 +310,10 @@ int cmd_reset(int argc, const char **argv, const char *prefix) OPT_BOOL('p', "patch", &patch_mode, N_("select hunks interactively")), OPT_BOOL('N', "intent-to-add", &intent_to_add, N_("record only the fact that removed paths will be added later")), + OPT_BOOL('z', NULL, &nul_term_line, + N_("EXPERIMENTAL: paths are separated with NUL character")), + OPT_BOOL(0, "stdin", &read_from_stdin, + N_("EXPERIMENTAL: read paths from ")), OPT_END() }; @@ -315,6 +324,42 @@ int cmd_reset(int argc, const char **argv, const char *prefix) PARSE_OPT_KEEP_DASHDASH); parse_args(&pathspec, argv, prefix, patch_mode, &rev); + if (read_from_stdin) { + strbuf_getline_fn getline_fn = nul_term_line ? + strbuf_getline_nul : strbuf_getline_lf; + int flags = PATHSPEC_PREFER_FULL; + struct strbuf buf = STRBUF_INIT; + struct strbuf unquoted = STRBUF_INIT; + + if (patch_mode) + die(_("--stdin is incompatible with --patch")); + + if (pathspec.nr) + die(_("--stdin is incompatible with path arguments")); + + while (getline_fn(&buf, stdin) != EOF) { + if (!nul_term_line && buf.buf[0] == '"') { + strbuf_reset(&unquoted); + if (unquote_c_style(&unquoted, buf.buf, NULL)) + die(_("line is badly quoted")); + strbuf_swap(&buf, &unquoted); + } + ALLOC_GROW(stdin_paths, stdin_nr + 1, stdin_alloc); + stdin_paths[stdin_nr++] = xstrdup(buf.buf); + strbuf_reset(&buf); + } + strbuf_release(&unquoted); + strbuf_release(&buf); + + ALLOC_GROW(stdin_paths, stdin_nr + 1, stdin_alloc); + stdin_paths[stdin_nr++] = NULL; + flags |= PATHSPEC_LITERAL_PATH; + parse_pathspec(&pathspec, 0, flags, prefix, + (const char **)stdin_paths); + + } else if (nul_term_line) + die(_("-z requires --stdin")); + unborn = !strcmp(rev, "HEAD") && get_oid("HEAD", &oid); if (unborn) { /* reset on unborn branch: treat as reset to empty tree */ @@ -415,5 +460,11 @@ int cmd_reset(int argc, const char **argv, const char *prefix) if (!pathspec.nr) remove_branch_state(); + if (stdin_paths) { + while (stdin_nr) + free(stdin_paths[--stdin_nr]); + free(stdin_paths); + } + return update_ref_status; } diff --git a/cache-tree.c b/cache-tree.c index 9d454d24bc..7361a42ded 100644 --- a/cache-tree.c +++ b/cache-tree.c @@ -6,8 +6,8 @@ #include "object-store.h" #include "replace-object.h" -#ifndef DEBUG -#define DEBUG 0 +#ifndef DEBUG_CACHE_TREE +#define DEBUG_CACHE_TREE 0 #endif struct cache_tree *cache_tree(void) @@ -111,7 +111,7 @@ static int do_invalidate_path(struct cache_tree *it, const char *path) int namelen; struct cache_tree_sub *down; -#if DEBUG +#if DEBUG_CACHE_TREE fprintf(stderr, "cache-tree invalidate <%s>\n", path); #endif @@ -398,7 +398,7 @@ static int update_one(struct cache_tree *it, strbuf_addf(&buffer, "%o %.*s%c", mode, entlen, path + baselen, '\0'); strbuf_add(&buffer, oid->hash, the_hash_algo->rawsz); -#if DEBUG +#if DEBUG_CACHE_TREE fprintf(stderr, "cache-tree update-one %o %.*s\n", mode, entlen, path + baselen); #endif @@ -421,7 +421,7 @@ static int update_one(struct cache_tree *it, strbuf_release(&buffer); it->entry_count = to_invalidate ? -1 : i - *skip_count; -#if DEBUG +#if DEBUG_CACHE_TREE fprintf(stderr, "cache-tree update-one (%d ent, %d subtree) %s\n", it->entry_count, it->subtree_nr, oid_to_hex(&it->oid)); @@ -462,7 +462,7 @@ static void write_one(struct strbuf *buffer, struct cache_tree *it, strbuf_add(buffer, path, pathlen); strbuf_addf(buffer, "%c%d %d\n", 0, it->entry_count, it->subtree_nr); -#if DEBUG +#if DEBUG_CACHE_TREE if (0 <= it->entry_count) fprintf(stderr, "cache-tree <%.*s> (%d ent, %d subtree) %s\n", pathlen, path, it->entry_count, it->subtree_nr, @@ -536,7 +536,7 @@ static struct cache_tree *read_one(const char **buffer, unsigned long *size_p) size -= rawsz; } -#if DEBUG +#if DEBUG_CACHE_TREE if (0 <= it->entry_count) fprintf(stderr, "cache-tree <%s> (%d ent, %d subtree) %s\n", *buffer, it->entry_count, subtree_nr, diff --git a/ci/install-dependencies.sh b/ci/install-dependencies.sh index 06c3546e1e..bcdcc71592 100755 --- a/ci/install-dependencies.sh +++ b/ci/install-dependencies.sh @@ -3,7 +3,7 @@ # Install dependencies required to build and test Git on Linux and macOS # -. ${0%/*}/lib-travisci.sh +. ${0%/*}/lib.sh P4WHENCE=http://filehost.perforce.com/perforce/r$LINUX_P4_VERSION LFSWHENCE=https://github.com/github/git-lfs/releases/download/v$LINUX_GIT_LFS_VERSION @@ -37,7 +37,8 @@ osx-clang|osx-gcc) brew update --quiet # Uncomment this if you want to run perf tests: # brew install gnu-time - brew install git-lfs gettext + test -z "$BREW_INSTALL_PACKAGES" || + brew install $BREW_INSTALL_PACKAGES brew link --force gettext brew install caskroom/cask/perforce ;; diff --git a/ci/lib-travisci.sh b/ci/lib.sh similarity index 61% rename from ci/lib-travisci.sh rename to ci/lib.sh index 69dff4d1ec..cb0b893442 100755 --- a/ci/lib-travisci.sh +++ b/ci/lib.sh @@ -1,5 +1,49 @@ # Library of functions shared by all CI scripts +if test true = "$TRAVIS" +then + # We are running within Travis CI + CI_BRANCH="${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH}" + CI_COMMIT="$TRAVIS_COMMIT" + CI_JOB_ID="$TRAVIS_JOB_ID" + CI_JOB_NUMBER="$TRAVIS_JOB_NUMBER" + CI_OS_NAME="$TRAVIS_OS_NAME" + CI_REPO_SLUG="$TRAVIS_REPO_SLUG" + + cache_dir="$HOME/travis-cache" + + url_for_job_id () { + echo "https://travis-ci.org/$CI_REPO_SLUG/jobs/$1" + } + + BREW_INSTALL_PACKAGES="git-lfs gettext" + export GIT_PROVE_OPTS="--timer --jobs 3 --state=failed,slow,save" + export GIT_TEST_OPTS="--verbose-log -x --immediate" +elif test -n "$SYSTEM_TASKDEFINITIONSURI" +then + # We are running in Azure Pipelines + CI_BRANCH="$BUILD_SOURCEBRANCH" + CI_COMMIT="$BUILD_SOURCEVERSION" + CI_JOB_ID="$BUILD_BUILDID" + CI_JOB_NUMBER="$BUILD_BUILDNUMBER" + CI_OS_NAME="$(echo "$AGENT_OS" | tr A-Z a-z)" + test darwin != "$CI_OS_NAME" || CI_OS_NAME=osx + CI_REPO_SLUG="$(expr "$BUILD_REPOSITORY_URI" : '.*/\([^/]*/[^/]*\)$')" + CC="${CC:-gcc}" + + # use a subdirectory of the cache dir (because the file share is shared + # among *all* phases) + cache_dir="$HOME/test-cache/$SYSTEM_PHASENAME" + + url_for_job_id () { + echo "$SYSTEM_TASKDEFINITIONSURI$SYSTEM_TEAMPROJECT/_build/results?buildId=$1" + } + + BREW_INSTALL_PACKAGES= + export GIT_PROVE_OPTS="--timer --jobs 10 --state=failed,slow,save" + export GIT_TEST_OPTS="--quiet --write-junit-xml" +fi + skip_branch_tip_with_tag () { # Sometimes, a branch is pushed at the same time the tag that points # at the same commit as the tip of the branch is pushed, and building @@ -13,10 +57,10 @@ skip_branch_tip_with_tag () { # we can skip the build because we won't be skipping a build # of a tag. - if TAG=$(git describe --exact-match "$TRAVIS_BRANCH" 2>/dev/null) && - test "$TAG" != "$TRAVIS_BRANCH" + if TAG=$(git describe --exact-match "$CI_BRANCH" 2>/dev/null) && + test "$TAG" != "$CI_BRANCH" then - echo "$(tput setaf 2)Tip of $TRAVIS_BRANCH is exactly at $TAG$(tput sgr0)" + echo "$(tput setaf 2)Tip of $CI_BRANCH is exactly at $TAG$(tput sgr0)" exit 0 fi } @@ -25,7 +69,7 @@ skip_branch_tip_with_tag () { # job if we encounter the same tree again and can provide a useful info # message. save_good_tree () { - echo "$(git rev-parse $TRAVIS_COMMIT^{tree}) $TRAVIS_COMMIT $TRAVIS_JOB_NUMBER $TRAVIS_JOB_ID" >>"$good_trees_file" + echo "$(git rev-parse $CI_COMMIT^{tree}) $CI_COMMIT $CI_JOB_NUMBER $CI_JOB_ID" >>"$good_trees_file" # limit the file size tail -1000 "$good_trees_file" >"$good_trees_file".tmp mv "$good_trees_file".tmp "$good_trees_file" @@ -35,7 +79,7 @@ save_good_tree () { # successfully before (e.g. because the branch got rebased, changing only # the commit messages). skip_good_tree () { - if ! good_tree_info="$(grep "^$(git rev-parse $TRAVIS_COMMIT^{tree}) " "$good_trees_file")" + if ! good_tree_info="$(grep "^$(git rev-parse $CI_COMMIT^{tree}) " "$good_trees_file")" then # Haven't seen this tree yet, or no cached good trees file yet. # Continue the build job. @@ -45,18 +89,18 @@ skip_good_tree () { echo "$good_tree_info" | { read tree prev_good_commit prev_good_job_number prev_good_job_id - if test "$TRAVIS_JOB_ID" = "$prev_good_job_id" + if test "$CI_JOB_ID" = "$prev_good_job_id" then cat <<-EOF - $(tput setaf 2)Skipping build job for commit $TRAVIS_COMMIT.$(tput sgr0) + $(tput setaf 2)Skipping build job for commit $CI_COMMIT.$(tput sgr0) This commit has already been built and tested successfully by this build job. To force a re-build delete the branch's cache and then hit 'Restart job'. EOF else cat <<-EOF - $(tput setaf 2)Skipping build job for commit $TRAVIS_COMMIT.$(tput sgr0) + $(tput setaf 2)Skipping build job for commit $CI_COMMIT.$(tput sgr0) This commit's tree has already been built and tested successfully in build job $prev_good_job_number for commit $prev_good_commit. - The log of that build job is available at https://travis-ci.org/$TRAVIS_REPO_SLUG/jobs/$prev_good_job_id + The log of that build job is available at $(url_for_job_id $prev_good_job_id) To force a re-build delete the branch's cache and then hit 'Restart job'. EOF fi @@ -81,7 +125,6 @@ check_unignored_build_artifacts () # and installing dependencies. set -ex -cache_dir="$HOME/travis-cache" good_trees_file="$cache_dir/good-trees" mkdir -p "$cache_dir" @@ -91,13 +134,11 @@ skip_good_tree if test -z "$jobname" then - jobname="$TRAVIS_OS_NAME-$CC" + jobname="$CI_OS_NAME-$CC" fi export DEVELOPER=1 export DEFAULT_TEST_TARGET=prove -export GIT_PROVE_OPTS="--timer --jobs 3 --state=failed,slow,save" -export GIT_TEST_OPTS="--verbose-log -x --immediate" export GIT_TEST_CLONE_2GB=YesPlease if [ "$jobname" = linux-gcc ]; then export CC=gcc-8 diff --git a/ci/mount-fileshare.sh b/ci/mount-fileshare.sh new file mode 100755 index 0000000000..5fb5f74b70 --- /dev/null +++ b/ci/mount-fileshare.sh @@ -0,0 +1,26 @@ +#!/bin/sh + +die () { + echo "$*" >&2 + exit 1 +} + +test $# = 4 || +die "Usage: $0 dwHighDateTime << 32) + ft->dwLowDateTime; - /* Windows to Unix Epoch conversion */ - return winTime - 116444736000000000LL; -} - -static inline void filetime_to_timespec(const FILETIME *ft, struct timespec *ts) -{ - long long hnsec = filetime_to_hnsec(ft); - ts->tv_sec = (time_t)(hnsec / 10000000); - ts->tv_nsec = (hnsec % 10000000) * 100; -} - /** * Verifies that safe_create_leading_directories() would succeed. */ @@ -698,8 +701,8 @@ static int has_valid_directory_prefix(wchar_t *wfilename) static int do_lstat(int follow, const char *file_name, struct stat *buf) { WIN32_FILE_ATTRIBUTE_DATA fdata; - wchar_t wfilename[MAX_PATH]; - if (xutftowcs_path(wfilename, file_name) < 0) + wchar_t wfilename[MAX_LONG_PATH]; + if (xutftowcs_long_path(wfilename, file_name) < 0) return -1; if (GetFileAttributesExW(wfilename, GetFileExInfoStandard, &fdata)) { @@ -770,7 +773,7 @@ static int do_lstat(int follow, const char *file_name, struct stat *buf) static int do_stat_internal(int follow, const char *file_name, struct stat *buf) { int namelen; - char alt_name[PATH_MAX]; + char alt_name[MAX_LONG_PATH]; if (!do_lstat(follow, file_name, buf)) return 0; @@ -786,7 +789,7 @@ static int do_stat_internal(int follow, const char *file_name, struct stat *buf) return -1; while (namelen && file_name[namelen-1] == '/') --namelen; - if (!namelen || namelen >= PATH_MAX) + if (!namelen || namelen >= MAX_LONG_PATH) return -1; memcpy(alt_name, file_name, namelen); @@ -794,6 +797,8 @@ static int do_stat_internal(int follow, const char *file_name, struct stat *buf) return do_lstat(follow, alt_name, buf); } +int (*lstat)(const char *file_name, struct stat *buf) = mingw_lstat; + static int get_file_info_by_handle(HANDLE hnd, struct stat *buf) { BY_HANDLE_FILE_INFORMATION fdata; @@ -868,8 +873,8 @@ int mingw_utime (const char *file_name, const struct utimbuf *times) FILETIME mft, aft; int fh, rc; DWORD attrs; - wchar_t wfilename[MAX_PATH]; - if (xutftowcs_path(wfilename, file_name) < 0) + wchar_t wfilename[MAX_LONG_PATH]; + if (xutftowcs_long_path(wfilename, file_name) < 0) return -1; /* must have write permission */ @@ -928,11 +933,20 @@ unsigned int sleep (unsigned int seconds) char *mingw_mktemp(char *template) { wchar_t wtemplate[MAX_PATH]; + int offset = 0; + + /* we need to return the path, thus no long paths here! */ if (xutftowcs_path(wtemplate, template) < 0) return NULL; + + if (is_dir_sep(template[0]) && !is_dir_sep(template[1]) && + iswalpha(wtemplate[0]) && wtemplate[1] == L':') { + /* We have an absolute path missing the drive prefix */ + offset = 2; + } if (!_wmktemp(wtemplate)) return NULL; - if (xwcstoutf(template, wtemplate, strlen(template) + 1) < 0) + if (xwcstoutf(template, wtemplate + offset, strlen(template) + 1) < 0) return NULL; return template; } @@ -1031,7 +1045,7 @@ char *mingw_getcwd(char *pointer, int len) * See "Parsing C++ Command-Line Arguments" at Microsoft's Docs: * https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments */ -static const char *quote_arg(const char *arg) +static const char *quote_arg_msvc(const char *arg) { /* count chars to quote */ int len = 0, n = 0; @@ -1086,6 +1100,37 @@ static const char *quote_arg(const char *arg) return q; } +#include "quote.h" + +static const char *quote_arg_msys2(const char *arg) +{ + struct strbuf buf = STRBUF_INIT; + const char *p2 = arg, *p; + + for (p = arg; *p; p++) { + int ws = isspace(*p); + if (!ws && *p != '\\' && *p != '"' && *p != '{') + continue; + if (!buf.len) + strbuf_addch(&buf, '"'); + if (p != p2) + strbuf_add(&buf, p2, p - p2); + if (!ws && *p != '{') + strbuf_addch(&buf, '\\'); + p2 = p; + } + + if (p == arg) + strbuf_addch(&buf, '"'); + else if (!buf.len) + return arg; + else + strbuf_add(&buf, p2, p - p2), + + strbuf_addch(&buf, '"'); + return strbuf_detach(&buf, 0); +} + static const char *parse_interpreter(const char *cmd) { static char buf[100]; @@ -1129,14 +1174,21 @@ static char *lookup_prog(const char *dir, int dirlen, const char *cmd, int isexe, int exe_only) { char path[MAX_PATH]; + wchar_t wpath[MAX_PATH]; snprintf(path, sizeof(path), "%.*s\\%s.exe", dirlen, dir, cmd); - if (!isexe && access(path, F_OK) == 0) + if (xutftowcs_path(wpath, path) < 0) + return NULL; + + if (!isexe && _waccess(wpath, F_OK) == 0) return xstrdup(path); - path[strlen(path)-4] = '\0'; - if ((!exe_only || isexe) && access(path, F_OK) == 0) - if (!(GetFileAttributes(path) & FILE_ATTRIBUTE_DIRECTORY)) + wpath[wcslen(wpath)-4] = '\0'; + if ((!exe_only || isexe) && _waccess(wpath, F_OK) == 0) { + if (!(GetFileAttributesW(wpath) & FILE_ATTRIBUTE_DIRECTORY)) { + path[strlen(path)-4] = '\0'; return xstrdup(path); + } + } return NULL; } @@ -1317,6 +1369,34 @@ struct pinfo_t { static struct pinfo_t *pinfo = NULL; CRITICAL_SECTION pinfo_cs; +static int is_msys2_sh(const char *cmd) +{ + if (cmd && !strcmp(cmd, "sh")) { + static int ret = -1; + char *p; + + if (ret >= 0) + return ret; + + p = path_lookup(cmd, 0); + if (!p) + ret = 0; + else { + size_t len = strlen(p); + ret = len > 15 && + is_dir_sep(p[len - 15]) && + !strncasecmp(p + len - 14, "usr", 3) && + is_dir_sep(p[len - 11]) && + !strncasecmp(p + len - 10, "bin", 3) && + is_dir_sep(p[len - 7]) && + !strcasecmp(p + len - 6, "sh.exe"); + free(p); + } + return ret; + } + return 0; +} + static pid_t mingw_spawnve_fd(const char *cmd, const char **argv, char **deltaenv, const char *dir, int prepend_cmd, int fhin, int fhout, int fherr) @@ -1328,6 +1408,8 @@ static pid_t mingw_spawnve_fd(const char *cmd, const char **argv, char **deltaen unsigned flags = CREATE_UNICODE_ENVIRONMENT; BOOL ret; HANDLE cons; + const char *(*quote_arg)(const char *arg) = + is_msys2_sh(*argv) ? quote_arg_msys2 : quote_arg_msvc; do_unset_environment_variables(); @@ -1362,7 +1444,10 @@ static pid_t mingw_spawnve_fd(const char *cmd, const char **argv, char **deltaen si.hStdOutput = winansi_get_osfhandle(fhout); si.hStdError = winansi_get_osfhandle(fherr); - if (xutftowcs_path(wcmd, cmd) < 0) + /* executables and the current directory don't support long paths */ + if (*argv && !strcmp(cmd, *argv)) + wcmd[0] = L'\0'; + else if (xutftowcs_path(wcmd, cmd) < 0) return -1; if (dir && xutftowcs_path(wdir, dir) < 0) return -1; @@ -1391,8 +1476,8 @@ static pid_t mingw_spawnve_fd(const char *cmd, const char **argv, char **deltaen wenvblk = make_environment_block(deltaenv); memset(&pi, 0, sizeof(pi)); - ret = CreateProcessW(wcmd, wargs, NULL, NULL, TRUE, flags, - wenvblk, dir ? wdir : NULL, &si, &pi); + ret = CreateProcessW(*wcmd ? wcmd : NULL, wargs, NULL, NULL, TRUE, + flags, wenvblk, dir ? wdir : NULL, &si, &pi); free(wenvblk); free(wargs); @@ -1477,7 +1562,10 @@ static int try_shell_exec(const char *cmd, char *const *argv) prog = path_lookup(interpr, 1); if (prog) { int argc = 0; - const char **argv2; +#ifndef _MSC_VER + const +#endif + char **argv2; while (argv[argc]) argc++; ALLOC_ARRAY(argv2, argc + 1); argv2[0] = (char *)cmd; /* full path to the script file */ @@ -1927,8 +2015,9 @@ int mingw_rename(const char *pold, const char *pnew) { DWORD attrs, gle; int tries = 0; - wchar_t wpold[MAX_PATH], wpnew[MAX_PATH]; - if (xutftowcs_path(wpold, pold) < 0 || xutftowcs_path(wpnew, pnew) < 0) + wchar_t wpold[MAX_LONG_PATH], wpnew[MAX_LONG_PATH]; + if (xutftowcs_long_path(wpold, pold) < 0 || + xutftowcs_long_path(wpnew, pnew) < 0) return -1; /* @@ -2204,16 +2293,42 @@ int mingw_raise(int sig) sigint_fn(SIGINT); return 0; +#if defined(_MSC_VER) + /* + * in the CRT defines 8 signals as being + * supported on the platform. Anything else causes + * an "Invalid signal or error" (which in DEBUG builds + * causes the Abort/Retry/Ignore dialog). We by-pass + * the CRT for things we already know will fail. + */ + /*case SIGINT:*/ + case SIGILL: + case SIGFPE: + case SIGSEGV: + case SIGTERM: + case SIGBREAK: + case SIGABRT: + case SIGABRT_COMPAT: + return raise(sig); + default: + errno = EINVAL; + return -1; + +#else + default: return raise(sig); + +#endif + } } int link(const char *oldpath, const char *newpath) { - wchar_t woldpath[MAX_PATH], wnewpath[MAX_PATH]; - if (xutftowcs_path(woldpath, oldpath) < 0 || - xutftowcs_path(wnewpath, newpath) < 0) + wchar_t woldpath[MAX_LONG_PATH], wnewpath[MAX_LONG_PATH]; + if (xutftowcs_long_path(woldpath, oldpath) < 0 || + xutftowcs_long_path(wnewpath, newpath) < 0) return -1; if (!CreateHardLinkW(wnewpath, woldpath, NULL)) { @@ -2416,18 +2531,75 @@ static void setup_windows_environment(void) setenv("TERM", "cygwin", 1); } +int handle_long_path(wchar_t *path, int len, int max_path, int expand) +{ + int result; + wchar_t buf[MAX_LONG_PATH]; + + /* + * we don't need special handling if path is relative to the current + * directory, and current directory + path don't exceed the desired + * max_path limit. This should cover > 99 % of cases with minimal + * performance impact (git almost always uses relative paths). + */ + if ((len < 2 || (!is_dir_sep(path[0]) && path[1] != ':')) && + (current_directory_len + len < max_path)) + return len; + + /* + * handle everything else: + * - absolute paths: "C:\dir\file" + * - absolute UNC paths: "\\server\share\dir\file" + * - absolute paths on current drive: "\dir\file" + * - relative paths on other drive: "X:file" + * - prefixed paths: "\\?\...", "\\.\..." + */ + + /* convert to absolute path using GetFullPathNameW */ + result = GetFullPathNameW(path, MAX_LONG_PATH, buf, NULL); + if (!result) { + errno = err_win_to_posix(GetLastError()); + return -1; + } + + /* + * return absolute path if it fits within max_path (even if + * "cwd + path" doesn't due to '..' components) + */ + if (result < max_path) { + wcscpy(path, buf); + return result; + } + + /* error out if we shouldn't expand the path or buf is too small */ + if (!expand || result >= MAX_LONG_PATH - 6) { + errno = ENAMETOOLONG; + return -1; + } + + /* prefix full path with "\\?\" or "\\?\UNC\" */ + if (buf[0] == '\\') { + /* ...unless already prefixed */ + if (buf[1] == '\\' && (buf[2] == '?' || buf[2] == '.')) + return len; + + wcscpy(path, L"\\\\?\\UNC\\"); + wcscpy(path + 8, buf + 2); + return result + 6; + } else { + wcscpy(path, L"\\\\?\\"); + wcscpy(path + 4, buf); + return result + 4; + } +} + +#if !defined(_MSC_VER) /* * Disable MSVCRT command line wildcard expansion (__getmainargs called from * mingw startup code, see init.c in mingw runtime). */ int _CRT_glob = 0; - -typedef struct { - int newmode; -} _startupinfo; - -extern int __wgetmainargs(int *argc, wchar_t ***argv, wchar_t ***env, int glob, - _startupinfo *si); +#endif static NORETURN void die_startup(void) { @@ -2505,20 +2677,39 @@ static void maybe_redirect_std_handles(void) GENERIC_WRITE, FILE_FLAG_NO_BUFFERING); } -void mingw_startup(void) +#ifdef _MSC_VER +#ifdef _DEBUG +#include +#endif +#endif + +/* + * We implement wmain() and compile with -municode, which would + * normally ignore main(), but we call the latter from the former + * so that we can handle non-ASCII command-line parameters + * appropriately. + * + * To be more compatible with the core git code, we convert + * argv into UTF8 and pass them directly to main(). + */ +int wmain(int argc, const wchar_t **wargv) { - int i, maxlen, argc; - char *buffer; - wchar_t **wenv, **wargv; - _startupinfo si; + int i, maxlen, exit_status; + char *buffer, **save; + const char **argv; + +#ifdef _MSC_VER +#ifdef _DEBUG + _CrtSetReportMode(_CRT_ASSERT, _CRTDBG_MODE_DEBUG); +#endif + +#ifdef USE_MSVC_CRTDBG + _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF); +#endif +#endif maybe_redirect_std_handles(); - /* get wide char arguments and environment */ - si.newmode = 0; - if (__wgetmainargs(&argc, &wargv, &wenv, _CRT_glob, &si) < 0) - die_startup(); - /* determine size of argv and environ conversion buffer */ maxlen = wcslen(wargv[0]); for (i = 1; i < argc; i++) @@ -2528,9 +2719,16 @@ void mingw_startup(void) maxlen = 3 * maxlen + 1; buffer = malloc_startup(maxlen); - /* convert command line arguments and environment to UTF-8 */ + /* + * Create a UTF-8 version of w_argv. Also create a "save" copy + * to remember all the string pointers because parse_options() + * will remove claimed items from the argv that we pass down. + */ + ALLOC_ARRAY(argv, argc + 1); + ALLOC_ARRAY(save, argc + 1); for (i = 0; i < argc; i++) - __argv[i] = wcstoutfdup_startup(buffer, wargv[i], maxlen); + argv[i] = save[i] = wcstoutfdup_startup(buffer, wargv[i], maxlen); + argv[i] = save[i] = NULL; free(buffer); /* fix Windows specific environment settings */ @@ -2549,6 +2747,19 @@ void mingw_startup(void) /* initialize Unicode console */ winansi_init(); + + /* init length of current directory for handle_long_path */ + current_directory_len = GetCurrentDirectoryW(0, NULL); + + /* invoke the real main() using our utf8 version of argv. */ + exit_status = main(argc, argv); + + for (i = 0; i < argc; i++) + free(save[i]); + free(save); + free(argv); + + return exit_status; } int uname(struct utsname *buf) diff --git a/compat/mingw.h b/compat/mingw.h index 8c24ddaa3e..4aa84ab783 100644 --- a/compat/mingw.h +++ b/compat/mingw.h @@ -11,6 +11,9 @@ typedef _sigset_t sigset_t; #undef _POSIX_THREAD_SAFE_FUNCTIONS #endif +extern int core_fscache; +extern int core_long_paths; + extern int mingw_core_config(const char *var, const char *value, void *cb); #define platform_core_config mingw_core_config @@ -353,6 +356,17 @@ static inline int getrlimit(int resource, struct rlimit *rlp) return 0; } +/* + * The unit of FILETIME is 100-nanoseconds since January 1, 1601, UTC. + * Returns the 100-nanoseconds ("hekto nanoseconds") since the epoch. + */ +static inline long long filetime_to_hnsec(const FILETIME *ft) +{ + long long winTime = ((long long)ft->dwHighDateTime << 32) + ft->dwLowDateTime; + /* Windows to Unix Epoch conversion */ + return winTime - 116444736000000000LL; +} + /* * Use mingw specific stat()/lstat()/fstat() implementations on Windows, * including our own struct stat with 64 bit st_size and nanosecond-precision @@ -361,11 +375,20 @@ static inline int getrlimit(int resource, struct rlimit *rlp) #ifndef __MINGW64_VERSION_MAJOR #define off_t off64_t #define lseek _lseeki64 +#ifndef _MSC_VER struct timespec { time_t tv_sec; long tv_nsec; }; #endif +#endif + +static inline void filetime_to_timespec(const FILETIME *ft, struct timespec *ts) +{ + long long hnsec = filetime_to_hnsec(ft); + ts->tv_sec = (time_t)(hnsec / 10000000); + ts->tv_nsec = (hnsec % 10000000) * 100; +} struct mingw_stat { _dev_t st_dev; @@ -399,7 +422,7 @@ int mingw_fstat(int fd, struct stat *buf); #ifdef lstat #undef lstat #endif -#define lstat mingw_lstat +extern int (*lstat)(const char *file_name, struct stat *buf); int mingw_utime(const char *file_name, const struct utimbuf *times); @@ -479,6 +502,42 @@ extern char *mingw_query_user_email(void); #include #endif +/** + * Max length of long paths (exceeding MAX_PATH). The actual maximum supported + * by NTFS is 32,767 (* sizeof(wchar_t)), but we choose an arbitrary smaller + * value to limit required stack memory. + */ +#define MAX_LONG_PATH 4096 + +/** + * Handles paths that would exceed the MAX_PATH limit of Windows Unicode APIs. + * + * With expand == false, the function checks for over-long paths and fails + * with ENAMETOOLONG. The path parameter is not modified, except if cwd + path + * exceeds max_path, but the resulting absolute path doesn't (e.g. due to + * eliminating '..' components). The path parameter must point to a buffer + * of max_path wide characters. + * + * With expand == true, an over-long path is automatically converted in place + * to an absolute path prefixed with '\\?\', and the new length is returned. + * The path parameter must point to a buffer of MAX_LONG_PATH wide characters. + * + * Parameters: + * path: path to check and / or convert + * len: size of path on input (number of wide chars without \0) + * max_path: max short path length to check (usually MAX_PATH = 260, but just + * 248 for CreateDirectoryW) + * expand: false to only check the length, true to expand the path to a + * '\\?\'-prefixed absolute path + * + * Return: + * length of the resulting path, or -1 on failure + * + * Errors: + * ENAMETOOLONG if path is too long + */ +int handle_long_path(wchar_t *path, int len, int max_path, int expand); + /** * Converts UTF-8 encoded string to UTF-16LE. * @@ -536,17 +595,45 @@ static inline int xutftowcs(wchar_t *wcs, const char *utf, size_t wcslen) return xutftowcsn(wcs, utf, wcslen, -1); } +/** + * Simplified file system specific wrapper of xutftowcsn and handle_long_path. + * Converts ERANGE to ENAMETOOLONG. If expand is true, wcs must be at least + * MAX_LONG_PATH wide chars (see handle_long_path). + */ +static inline int xutftowcs_path_ex(wchar_t *wcs, const char *utf, + size_t wcslen, int utflen, int max_path, int expand) +{ + int result = xutftowcsn(wcs, utf, wcslen, utflen); + if (result < 0 && errno == ERANGE) + errno = ENAMETOOLONG; + if (result >= 0) + result = handle_long_path(wcs, result, max_path, expand); + return result; +} + /** * Simplified file system specific variant of xutftowcsn, assumes output * buffer size is MAX_PATH wide chars and input string is \0-terminated, - * fails with ENAMETOOLONG if input string is too long. + * fails with ENAMETOOLONG if input string is too long. Typically used for + * Windows APIs that don't support long paths, e.g. SetCurrentDirectory, + * LoadLibrary, CreateProcess... */ static inline int xutftowcs_path(wchar_t *wcs, const char *utf) { - int result = xutftowcsn(wcs, utf, MAX_PATH, -1); - if (result < 0 && errno == ERANGE) - errno = ENAMETOOLONG; - return result; + return xutftowcs_path_ex(wcs, utf, MAX_PATH, -1, MAX_PATH, 0); +} + +/** + * Simplified file system specific variant of xutftowcsn for Windows APIs + * that support long paths via '\\?\'-prefix, assumes output buffer size is + * MAX_LONG_PATH wide chars, fails with ENAMETOOLONG if input string is too + * long. The 'core.longpaths' git-config option controls whether the path + * is only checked or expanded to a long path. + */ +static inline int xutftowcs_long_path(wchar_t *wcs, const char *utf) +{ + return xutftowcs_path_ex(wcs, utf, MAX_LONG_PATH, -1, MAX_PATH, + core_long_paths); } /** @@ -591,18 +678,18 @@ int xwcstoutf(char *utf, const wchar_t *wcs, size_t utflen); extern CRITICAL_SECTION pinfo_cs; /* - * A replacement of main() that adds win32 specific initialization. + * Git, like most portable C applications, implements a main() function. On + * Windows, this main() function would receive parameters encoded in the + * current locale, but Git for Windows would prefer UTF-8 encoded parameters. + * + * To make that happen, we still declare main() here, and then declare and + * implement wmain() (which is the Unicode variant of main()) and compile with + * -municode. This wmain() function reencodes the parameters from UTF-16 to + * UTF-8 format, sets up a couple of other things as required on Windows, and + * then hands off to the main() function. */ - -void mingw_startup(void); -#define main(c,v) dummy_decl_mingw_main(void); \ -static int mingw_main(c,v); \ -int main(int argc, const char **argv) \ -{ \ - mingw_startup(); \ - return mingw_main(__argc, (void *)__argv); \ -} \ -static int mingw_main(c,v) +int wmain(int argc, const wchar_t **w_argv); +int main(int argc, const char **argv); /* * Used by Pthread API implementation for Windows diff --git a/compat/msvc.h b/compat/msvc.h index 580bb55bf4..d081043f53 100644 --- a/compat/msvc.h +++ b/compat/msvc.h @@ -6,6 +6,10 @@ #include #include +#pragma warning(disable: 4018) /* signed/unsigned comparison */ +#pragma warning(disable: 4244) /* type conversion, possible loss of data */ +#pragma warning(disable: 4090) /* 'function' : different 'const' qualifiers (ALLOC_GROW etc.)*/ + /* porting function */ #define inline __inline #define __inline__ __inline @@ -24,6 +28,12 @@ static __inline int strcasecmp (const char *s1, const char *s2) #undef ERROR +#define ftello _ftelli64 + +typedef int sigset_t; +/* open for reading, writing, or both (not in fcntl.h) */ +#define O_ACCMODE (_O_RDONLY | _O_WRONLY | _O_RDWR) + #include "compat/mingw.h" #endif diff --git a/compat/obstack.h b/compat/obstack.h index 6bc24b7644..f0807eaa3b 100644 --- a/compat/obstack.h +++ b/compat/obstack.h @@ -492,7 +492,7 @@ __extension__ \ ( (h)->temp.tempint = (char *) (obj) - (char *) (h)->chunk, \ ((((h)->temp.tempint > 0 \ && (h)->temp.tempint < (h)->chunk_limit - (char *) (h)->chunk)) \ - ? (int) ((h)->next_free = (h)->object_base \ + ? (ptrdiff_t) ((h)->next_free = (h)->object_base \ = (h)->temp.tempint + (char *) (h)->chunk) \ : (((obstack_free) ((h), (h)->temp.tempint + (char *) (h)->chunk), 0), 0))) diff --git a/compat/poll/poll.c b/compat/poll/poll.c index 4459408c7d..8e6b8860c5 100644 --- a/compat/poll/poll.c +++ b/compat/poll/poll.c @@ -269,6 +269,20 @@ win32_compute_revents_socket (SOCKET h, int sought, long lNetworkEvents) return happened; } +#include +#include "compat/win32/lazyload.h" + +static ULONGLONG CompatGetTickCount64(void) +{ + DECLARE_PROC_ADDR(kernel32.dll, ULONGLONG, GetTickCount64, void); + + if (!INIT_PROC_ADDR(GetTickCount64)) + return (ULONGLONG)GetTickCount(); + + return GetTickCount64(); +} +#define GetTickCount64 CompatGetTickCount64 + #else /* !MinGW */ /* Convert select(2) returned fd_sets into poll(2) revents values. */ diff --git a/compat/vcbuild/.gitignore b/compat/vcbuild/.gitignore new file mode 100644 index 0000000000..8f8b794ef3 --- /dev/null +++ b/compat/vcbuild/.gitignore @@ -0,0 +1,3 @@ +/vcpkg/ +/MSVC-DEFS-GEN +/VCPKG-DEFS diff --git a/compat/vcbuild/README b/compat/vcbuild/README index 60fd873fe8..81da36a93b 100644 --- a/compat/vcbuild/README +++ b/compat/vcbuild/README @@ -1,3 +1,54 @@ +The Steps to Build Git with VS2015 or VS2017 from the command line. + +1. Install the "vcpkg" open source package manager and build essential + third-party libraries. The steps for this have been captured in a + set of convenience scripts. These can be run from a stock Command + Prompt or from an SDK bash window: + + $ cd + $ ./compat/vcbuild/vcpkg_install.bat + + The vcpkg tools and all of the third-party sources will be installed + in this folder: + /compat/vcbuild/vcpkg/ + + A file will be created with a set of Makefile macros pointing to a + unified "include", "lib", and "bin" directory (release and debug) for + all of the required packages. This file will be included by the main + Makefile: + /compat/vcbuild/MSVC-DEFS-GEN + +2. OPTIONALLY copy the third-party *.dll and *.pdb files into the repo + root to make it easier to run and debug git.exe without having to + manipulate your PATH. This is especially true for debug sessions in + Visual Studio. + + Use ONE of the following forms which should match how you want to + compile git.exe. + + $ ./compat/vcbuild/vcpkg_copy_packages.bat debug + $ ./compat/vcbuild/vcpkg_copy_packages.bat release + +3. Build git using MSVC from an SDK bash window using one of the + following commands: + + $ make MSVC=1 + $ make MSVC=1 DEBUG=1 + +================================================================ + +Alternatively, run `make MSVC=1 vcxproj` and then load the generated +git.sln in Visual Studio. The initial build will install the vcpkg +system and build the dependencies automatically. This will take a while. + +Note that this will automatically add and commit the generated +.sln and .vcxproj files to the repo. You may want to drop this +commit before submitting a Pull Request.... + +Or maybe we should put the .sln/.vcxproj files in the .gitignore file +and not do this. I'm not sure. + +================================================================ The Steps of Build Git with VS2008 1. You need the build environment, which contains the Git dependencies diff --git a/compat/vcbuild/find_vs_env.bat b/compat/vcbuild/find_vs_env.bat new file mode 100644 index 0000000000..1232f200f7 --- /dev/null +++ b/compat/vcbuild/find_vs_env.bat @@ -0,0 +1,169 @@ +@ECHO OFF +REM ================================================================ +REM You can use either GCC (the default) or MSVC to build git +REM using the GIT-SDK command line tools. +REM $ make +REM $ make MSVC=1 +REM +REM GIT-SDK BASH windows inherit environment variables with all of +REM the bin/lib/include paths for GCC. It DOES NOT inherit values +REM for the corresponding MSVC tools. +REM +REM During normal (non-git) Windows development, you launch one +REM of the provided "developer command prompts" to set environment +REM variables for the MSVC tools. +REM +REM Therefore, to allow MSVC command line builds of git from BASH +REM and MAKE, we must blend these two different worlds. This script +REM attempts to do that. +REM ================================================================ +REM This BAT file starts in a plain (non-developer) command prompt, +REM searches for the "best" commmand prompt setup script, installs +REM it into the current CMD process, and exports the various MSVC +REM environment variables for use by MAKE. +REM +REM The output of this script should be written to a make "include +REM file" and referenced by the top-level Makefile. +REM +REM See "config.mak.uname" (look for compat/vcbuild/MSVC-DEFS-GEN). +REM ================================================================ +REM The provided command prompts are custom to each VS release and +REM filled with lots of internal knowledge (such as Registry settings); +REM even their names vary by release, so it is not appropriate for us +REM to look inside them. Rather, just run them in a subordinate +REM process and extract the settings we need. +REM ================================================================ +REM +REM Current (VS2017 and beyond) +REM ------------------- +REM Visual Studio 2017 introduced a new installation layout and +REM support for side-by-side installation of multiple versions of +REM VS2017. Furthermore, these can all coexist with installations +REM of previous versions of VS (which have a completely different +REM layout on disk). +REM +REM VS2017 Update 2 introduced a "vswhere.exe" command: +REM https://github.com/Microsoft/vswhere +REM https://blogs.msdn.microsoft.com/heaths/2017/02/25/vswhere-available/ +REM https://blogs.msdn.microsoft.com/vcblog/2017/03/06/finding-the-visual-c-compiler-tools-in-visual-studio-2017/ +REM +REM VS2015 +REM ------ +REM Visual Studio 2015 uses the traditional VcVarsAll. +REM +REM Earlier Versions +REM ---------------- +REM TODO +REM +REM ================================================================ +REM Note: Throughout this script we use "dir && " rather +REM than "if exist " because of script problems with pathnames +REM containing spaces. +REM ================================================================ + +REM Sanitize PATH to prevent git-sdk paths from confusing "wmic.exe" +REM (called internally in some of the system BAT files). +SET PATH=%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem; + +REM ================================================================ + +:current + SET vs_where=C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe + dir "%vs_where%" >nul 2>nul && GOTO have_vs_where + GOTO not_2017 + +:have_vs_where + REM Try to use VsWhere to get the location of VsDevCmd. + + REM Keep VsDevCmd from cd'ing away. + SET VSCMD_START_DIR=. + + REM Get the root of the VS product installation. + FOR /F "usebackq tokens=*" %%i IN (`"%vs_where%" -latest -requires Microsoft.VisualStudio.Workload.NativeDesktop -property installationPath`) DO @SET vs_ip=%%i + + SET vs_devcmd=%vs_ip%\Common7\Tools\VsDevCmd.bat + dir "%vs_devcmd%" >nul 2>nul && GOTO have_vs_devcmd + GOTO not_2017 + +:have_vs_devcmd + REM Use VsDevCmd to setup the environment of this process. + REM Setup CL for building 64-bit apps using 64-bit tools. + @call "%vs_devcmd%" -no_logo -arch=x64 -host_arch=x64 + + SET tgt=%VSCMD_ARG_TGT_ARCH% + + SET mn=%VCToolsInstallDir% + SET msvc_includes=-I"%mn%INCLUDE" + SET msvc_libs=-L"%mn%lib\%tgt%" + SET msvc_bin_dir=%mn%bin\Host%VSCMD_ARG_HOST_ARCH%\%tgt% + + SET sdk_dir=%WindowsSdkDir% + SET sdk_ver=%WindowsSDKVersion% + SET si=%sdk_dir%Include\%sdk_ver% + SET sdk_includes=-I"%si%ucrt" -I"%si%um" -I"%si%shared" + SET sl=%sdk_dir%lib\%sdk_ver% + SET sdk_libs=-L"%sl%ucrt\%tgt%" -L"%sl%um\%tgt%" + + SET vs_ver=%VisualStudioVersion% + + GOTO print_vars + +REM ================================================================ + +:not_2017 + REM See if VS2015 is installed. + + SET vs_2015_bat=C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat + dir "%vs_2015_bat%" >nul 2>nul && GOTO have_vs_2015 + GOTO not_2015 + +:have_vs_2015 + REM Use VcVarsAll like the "x64 Native" command prompt. + REM Setup CL for building 64-bit apps using 64-bit tools. + @call "%vs_2015_bat%" amd64 + + REM Note that in VS2015 they use "x64" in some contexts and "amd64" in others. + SET mn=C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\ + SET msvc_includes=-I"%mn%INCLUDE" + SET msvc_libs=-L"%mn%lib\amd64" + SET msvc_bin_dir=%mn%bin\amd64 + + SET sdk_dir=%WindowsSdkDir% + SET sdk_ver=%WindowsSDKVersion% + SET si=%sdk_dir%Include\%sdk_ver% + SET sdk_includes=-I"%si%ucrt" -I"%si%um" -I"%si%shared" -I"%si%winrt" + SET sl=%sdk_dir%lib\%sdk_ver% + SET sdk_libs=-L"%sl%ucrt\x64" -L"%sl%um\x64" + + SET vs_ver=%VisualStudioVersion% + + GOTO print_vars + +REM ================================================================ + +:not_2015 + REM TODO.... + echo TODO support older versions of VS. >&2 + EXIT /B 1 + +REM ================================================================ + +:print_vars + REM Dump the essential vars to stdout to allow the main + REM Makefile to include it. See config.mak.uname. + REM Include DOS-style and BASH-style path for bin dir. + + echo msvc_bin_dir=%msvc_bin_dir% + SET X1=%msvc_bin_dir:C:=/C% + SET X2=%X1:\=/% + echo msvc_bin_dir_msys=%X2% + + echo msvc_includes=%msvc_includes% + echo msvc_libs=%msvc_libs% + + echo sdk_includes=%sdk_includes% + echo sdk_libs=%sdk_libs% + + echo vs_ver=%vs_ver% + + EXIT /B 0 diff --git a/compat/vcbuild/scripts/clink.pl b/compat/vcbuild/scripts/clink.pl index a87d0da512..3d6fa21c1e 100755 --- a/compat/vcbuild/scripts/clink.pl +++ b/compat/vcbuild/scripts/clink.pl @@ -12,32 +12,49 @@ use strict; my @args = (); my @cflags = (); +my @lflags = (); my $is_linking = 0; +my $is_debug = 0; while (@ARGV) { my $arg = shift @ARGV; - if ("$arg" =~ /^-[DIMGO]/) { + if ("$arg" eq "-DDEBUG") { + # Some vcpkg-based libraries have different names for release + # and debug versions. This hack assumes that -DDEBUG comes + # before any "-l*" flags. + $is_debug = 1; + } + if ("$arg" =~ /^-[DIMGOZ]/) { push(@cflags, $arg); } elsif ("$arg" eq "-o") { my $file_out = shift @ARGV; if ("$file_out" =~ /exe$/) { $is_linking = 1; + # Create foo.exe and foo.pdb push(@args, "-OUT:$file_out"); } else { + # Create foo.o and foo.o.pdb push(@args, "-Fo$file_out"); + push(@args, "-Fd$file_out.pdb"); } } elsif ("$arg" eq "-lz") { + if ($is_debug) { + push(@args, "zlibd.lib"); + } else{ push(@args, "zlib.lib"); + } } elsif ("$arg" eq "-liconv") { - push(@args, "iconv.lib"); + push(@args, "libiconv.lib"); } elsif ("$arg" eq "-lcrypto") { push(@args, "libeay32.lib"); } elsif ("$arg" eq "-lssl") { push(@args, "ssleay32.lib"); } elsif ("$arg" eq "-lcurl") { push(@args, "libcurl.lib"); + } elsif ("$arg" eq "-lexpat") { + push(@args, "expat.lib"); } elsif ("$arg" =~ /^-L/ && "$arg" ne "-LTCG") { $arg =~ s/^-L/-LIBPATH:/; - push(@args, $arg); + push(@lflags, $arg); } elsif ("$arg" =~ /^-R/) { # eat } else { @@ -45,10 +62,11 @@ while (@ARGV) { } } if ($is_linking) { + push(@args, @lflags); unshift(@args, "link.exe"); } else { unshift(@args, "cl.exe"); push(@args, @cflags); } -#printf("**** @args\n"); +printf(STDERR "**** @args\n\n\n") if (!defined($ENV{'QUIET_GEN'})); exit (system(@args) != 0); diff --git a/compat/vcbuild/vcpkg_copy_dlls.bat b/compat/vcbuild/vcpkg_copy_dlls.bat new file mode 100644 index 0000000000..13661c14f8 --- /dev/null +++ b/compat/vcbuild/vcpkg_copy_dlls.bat @@ -0,0 +1,39 @@ +@ECHO OFF +REM ================================================================ +REM This script is an optional step. It copies the *.dll and *.pdb +REM files (created by vcpkg_install.bat) into the top-level directory +REM of the repo so that you can type "./git.exe" and find them without +REM having to fixup your PATH. +REM +REM NOTE: Because the names of some DLL files change between DEBUG and +REM NOTE: RELEASE builds when built using "vcpkg.exe", you will need +REM NOTE: to copy up the corresponding version. +REM ================================================================ + + SETLOCAL EnableDelayedExpansion + + @FOR /F "delims=" %%D IN ("%~dp0") DO @SET cwd=%%~fD + cd %cwd% + + SET arch=x64-windows + SET inst=%cwd%vcpkg\installed\%arch% + + IF [%1]==[release] ( + echo Copying RELEASE mode DLLs to repo root... + ) ELSE IF [%1]==[debug] ( + SET inst=%inst%\debug + echo Copying DEBUG mode DLLs to repo root... + ) ELSE ( + echo ERROR: Invalid argument. + echo Usage: %~0 release + echo Usage: %~0 debug + EXIT /B 1 + ) + + xcopy /e/s/v/y %inst%\bin\*.dll ..\..\ + xcopy /e/s/v/y %inst%\bin\*.pdb ..\..\ + + xcopy /e/s/v/y %inst%\bin\*.dll ..\..\t\helper\ + xcopy /e/s/v/y %inst%\bin\*.pdb ..\..\t\helper\ + + EXIT /B 0 diff --git a/compat/vcbuild/vcpkg_install.bat b/compat/vcbuild/vcpkg_install.bat new file mode 100644 index 0000000000..3d086c39c3 --- /dev/null +++ b/compat/vcbuild/vcpkg_install.bat @@ -0,0 +1,81 @@ +@ECHO OFF +REM ================================================================ +REM This script installs the "vcpkg" source package manager and uses +REM it to build the third-party libraries that git requires when it +REM is built using MSVC. +REM +REM [1] Install VCPKG. +REM [a] Create /compat/vcbuild/vcpkg/ +REM [b] Download "vcpkg". +REM [c] Compile using the currently installed version of VS. +REM [d] Create /compat/vcbuild/vcpkg/vcpkg.exe +REM +REM [2] Install third-party libraries. +REM [a] Download each (which may also install CMAKE). +REM [b] Compile in RELEASE mode and install in: +REM vcpkg/installed//{bin,lib} +REM [c] Compile in DEBUG mode and install in: +REM vcpkg/installed//debug/{bin,lib} +REM [d] Install headers in: +REM vcpkg/installed//include +REM +REM [3] Create a set of MAKE definitions for the top-level +REM Makefile to allow "make MSVC=1" to find the above +REM third-party libraries. +REM [a] Write vcpkg/VCPGK-DEFS +REM +REM https://blogs.msdn.microsoft.com/vcblog/2016/09/19/vcpkg-a-tool-to-acquire-and-build-c-open-source-libraries-on-windows/ +REM https://github.com/Microsoft/vcpkg +REM https://vcpkg.readthedocs.io/en/latest/ +REM ================================================================ + + SETLOCAL EnableDelayedExpansion + + @FOR /F "delims=" %%D IN ("%~dp0") DO @SET cwd=%%~fD + cd %cwd% + + dir vcpkg\vcpkg.exe >nul 2>nul && GOTO :install_libraries + + echo Fetching vcpkg in %cwd%vcpkg + git.exe clone https://github.com/Microsoft/vcpkg vcpkg + IF ERRORLEVEL 1 ( EXIT /B 1 ) + + cd vcpkg + echo Building vcpkg + powershell -exec bypass scripts\bootstrap.ps1 + IF ERRORLEVEL 1 ( EXIT /B 1 ) + + echo Successfully installed %cwd%vcpkg\vcpkg.exe + +:install_libraries + SET arch=x64-windows + + echo Installing third-party libraries... + FOR %%i IN (zlib expat libiconv openssl libssh2 curl) DO ( + cd %cwd%vcpkg + SET p="packages\%%i_%arch%" + IF NOT EXIST "%p%" CALL :sub__install_one %%i + IF ERRORLEVEL 1 ( EXIT /B 1 ) + ) + +:install_defines + cd %cwd% + SET inst=%cwd%vcpkg\installed\%arch% + + echo vcpkg_inc=-I"%inst%\include">VCPKG-DEFS + echo vcpkg_rel_lib=-L"%inst%\lib">>VCPKG-DEFS + echo vcpkg_rel_bin="%inst%\bin">>VCPKG-DEFS + echo vcpkg_dbg_lib=-L"%inst%\debug\lib">>VCPKG-DEFS + echo vcpkg_dbg_bin="%inst%\debug\bin">>VCPKG-DEFS + + EXIT /B 0 + + +:sub__install_one + echo Installing package %1... + + .\vcpkg.exe install %1:%arch% + IF ERRORLEVEL 1 ( EXIT /B 1 ) + + echo Finished %1 + goto :EOF diff --git a/compat/win32/dirent.c b/compat/win32/dirent.c index 52420ec7d4..b3bd8d7af7 100644 --- a/compat/win32/dirent.c +++ b/compat/win32/dirent.c @@ -1,15 +1,19 @@ #include "../../git-compat-util.h" -struct DIR { +typedef struct dirent_DIR { + struct DIR base_dir; /* extend base struct DIR */ struct dirent dd_dir; /* includes d_type */ HANDLE dd_handle; /* FindFirstFile handle */ int dd_stat; /* 0-based index */ -}; + char dd_name[MAX_PATH * 3]; /* file name (* 3 for UTF-8 conversion) */ +} dirent_DIR; + +DIR *(*opendir)(const char *dirname) = dirent_opendir; static inline void finddata2dirent(struct dirent *ent, WIN32_FIND_DATAW *fdata) { - /* convert UTF-16 name to UTF-8 */ - xwcstoutf(ent->d_name, fdata->cFileName, sizeof(ent->d_name)); + /* convert UTF-16 name to UTF-8 (d_name points to dirent_DIR.dd_name) */ + xwcstoutf(ent->d_name, fdata->cFileName, MAX_PATH * 3); /* Set file type, based on WIN32_FIND_DATA */ if (fdata->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) @@ -18,41 +22,7 @@ static inline void finddata2dirent(struct dirent *ent, WIN32_FIND_DATAW *fdata) ent->d_type = DT_REG; } -DIR *opendir(const char *name) -{ - wchar_t pattern[MAX_PATH + 2]; /* + 2 for '/' '*' */ - WIN32_FIND_DATAW fdata; - HANDLE h; - int len; - DIR *dir; - - /* convert name to UTF-16 and check length < MAX_PATH */ - if ((len = xutftowcs_path(pattern, name)) < 0) - return NULL; - - /* append optional '/' and wildcard '*' */ - if (len && !is_dir_sep(pattern[len - 1])) - pattern[len++] = '/'; - pattern[len++] = '*'; - pattern[len] = 0; - - /* open find handle */ - h = FindFirstFileW(pattern, &fdata); - if (h == INVALID_HANDLE_VALUE) { - DWORD err = GetLastError(); - errno = (err == ERROR_DIRECTORY) ? ENOTDIR : err_win_to_posix(err); - return NULL; - } - - /* initialize DIR structure and copy first dir entry */ - dir = xmalloc(sizeof(DIR)); - dir->dd_handle = h; - dir->dd_stat = 0; - finddata2dirent(&dir->dd_dir, &fdata); - return dir; -} - -struct dirent *readdir(DIR *dir) +static struct dirent *dirent_readdir(dirent_DIR *dir) { if (!dir) { errno = EBADF; /* No set_errno for mingw */ @@ -79,7 +49,7 @@ struct dirent *readdir(DIR *dir) return &dir->dd_dir; } -int closedir(DIR *dir) +static int dirent_closedir(dirent_DIR *dir) { if (!dir) { errno = EBADF; @@ -90,3 +60,44 @@ int closedir(DIR *dir) free(dir); return 0; } + +DIR *dirent_opendir(const char *name) +{ + wchar_t pattern[MAX_LONG_PATH + 2]; /* + 2 for "\*" */ + WIN32_FIND_DATAW fdata; + HANDLE h; + int len; + dirent_DIR *dir; + + /* convert name to UTF-16 and check length */ + if ((len = xutftowcs_path_ex(pattern, name, MAX_LONG_PATH, -1, + MAX_PATH - 2, core_long_paths)) < 0) + return NULL; + + /* + * append optional '\' and wildcard '*'. Note: we need to use '\' as + * Windows doesn't translate '/' to '\' for "\\?\"-prefixed paths. + */ + if (len && !is_dir_sep(pattern[len - 1])) + pattern[len++] = '\\'; + pattern[len++] = '*'; + pattern[len] = 0; + + /* open find handle */ + h = FindFirstFileW(pattern, &fdata); + if (h == INVALID_HANDLE_VALUE) { + DWORD err = GetLastError(); + errno = (err == ERROR_DIRECTORY) ? ENOTDIR : err_win_to_posix(err); + return NULL; + } + + /* initialize DIR structure and copy first dir entry */ + dir = xmalloc(sizeof(dirent_DIR)); + dir->base_dir.preaddir = (struct dirent *(*)(DIR *dir)) dirent_readdir; + dir->base_dir.pclosedir = (int (*)(DIR *dir)) dirent_closedir; + dir->dd_dir.d_name = dir->dd_name; + dir->dd_handle = h; + dir->dd_stat = 0; + finddata2dirent(&dir->dd_dir, &fdata); + return (DIR*) dir; +} diff --git a/compat/win32/dirent.h b/compat/win32/dirent.h index 058207e4bf..6b3ddee51b 100644 --- a/compat/win32/dirent.h +++ b/compat/win32/dirent.h @@ -1,20 +1,32 @@ #ifndef DIRENT_H #define DIRENT_H -typedef struct DIR DIR; - #define DT_UNKNOWN 0 #define DT_DIR 1 #define DT_REG 2 #define DT_LNK 3 struct dirent { - unsigned char d_type; /* file type to prevent lstat after readdir */ - char d_name[MAX_PATH * 3]; /* file name (* 3 for UTF-8 conversion) */ + unsigned char d_type; /* file type to prevent lstat after readdir */ + char *d_name; /* file name */ }; -DIR *opendir(const char *dirname); -struct dirent *readdir(DIR *dir); -int closedir(DIR *dir); +/* + * Base DIR structure, contains pointers to readdir/closedir implementations so + * that opendir may choose a concrete implementation on a call-by-call basis. + */ +typedef struct DIR { + struct dirent *(*preaddir)(struct DIR *dir); + int (*pclosedir)(struct DIR *dir); +} DIR; + +/* default dirent implementation */ +extern DIR *dirent_opendir(const char *dirname); + +/* current dirent implementation */ +extern DIR *(*opendir)(const char *dirname); + +#define readdir(dir) (dir->preaddir(dir)) +#define closedir(dir) (dir->pclosedir(dir)) #endif /* DIRENT_H */ diff --git a/compat/win32/fscache.c b/compat/win32/fscache.c new file mode 100644 index 0000000000..4ebd15e426 --- /dev/null +++ b/compat/win32/fscache.c @@ -0,0 +1,492 @@ +#include "../../cache.h" +#include "../../hashmap.h" +#include "../win32.h" +#include "fscache.h" + +static int initialized; +static volatile long enabled; +static struct hashmap map; +static CRITICAL_SECTION mutex; + +/* + * An entry in the file system cache. Used for both entire directory listings + * and file entries. + */ +struct fsentry { + struct hashmap_entry ent; + mode_t st_mode; + /* Length of name. */ + unsigned short len; + /* + * Name of the entry. For directory listings: relative path of the + * directory, without trailing '/' (empty for cwd()). For file entries: + * name of the file. Typically points to the end of the structure if + * the fsentry is allocated on the heap (see fsentry_alloc), or to a + * local variable if on the stack (see fsentry_init). + */ + const char *name; + /* Pointer to the directory listing, or NULL for the listing itself. */ + struct fsentry *list; + /* Pointer to the next file entry of the list. */ + struct fsentry *next; + + union { + /* Reference count of the directory listing. */ + volatile long refcnt; + /* Handle to wait on the loading thread. */ + HANDLE hwait; + struct { + /* More stat members (only used for file entries). */ + off64_t st_size; + struct timespec st_atim; + struct timespec st_mtim; + struct timespec st_ctim; + }; + }; +}; + +/* + * Compares the paths of two fsentry structures for equality. + */ +static int fsentry_cmp(void *unused_cmp_data, + const struct fsentry *fse1, const struct fsentry *fse2, + void *unused_keydata) +{ + int res; + if (fse1 == fse2) + return 0; + + /* compare the list parts first */ + if (fse1->list != fse2->list && + (res = fsentry_cmp(NULL, fse1->list ? fse1->list : fse1, + fse2->list ? fse2->list : fse2, NULL))) + return res; + + /* if list parts are equal, compare len and name */ + if (fse1->len != fse2->len) + return fse1->len - fse2->len; + return strnicmp(fse1->name, fse2->name, fse1->len); +} + +/* + * Calculates the hash code of an fsentry structure's path. + */ +static unsigned int fsentry_hash(const struct fsentry *fse) +{ + unsigned int hash = fse->list ? fse->list->ent.hash : 0; + return hash ^ memihash(fse->name, fse->len); +} + +/* + * Initialize an fsentry structure for use by fsentry_hash and fsentry_cmp. + */ +static void fsentry_init(struct fsentry *fse, struct fsentry *list, + const char *name, size_t len) +{ + fse->list = list; + fse->name = name; + fse->len = len; + hashmap_entry_init(fse, fsentry_hash(fse)); +} + +/* + * Allocate an fsentry structure on the heap. + */ +static struct fsentry *fsentry_alloc(struct fsentry *list, const char *name, + size_t len) +{ + /* overallocate fsentry and copy the name to the end */ + struct fsentry *fse = xmalloc(sizeof(struct fsentry) + len + 1); + char *nm = ((char*) fse) + sizeof(struct fsentry); + memcpy(nm, name, len); + nm[len] = 0; + /* init the rest of the structure */ + fsentry_init(fse, list, nm, len); + fse->next = NULL; + fse->refcnt = 1; + return fse; +} + +/* + * Add a reference to an fsentry. + */ +inline static void fsentry_addref(struct fsentry *fse) +{ + if (fse->list) + fse = fse->list; + + InterlockedIncrement(&(fse->refcnt)); +} + +/* + * Release the reference to an fsentry, frees the memory if its the last ref. + */ +static void fsentry_release(struct fsentry *fse) +{ + if (fse->list) + fse = fse->list; + + if (InterlockedDecrement(&(fse->refcnt))) + return; + + while (fse) { + struct fsentry *next = fse->next; + free(fse); + fse = next; + } +} + +/* + * Allocate and initialize an fsentry from a WIN32_FIND_DATA structure. + */ +static struct fsentry *fseentry_create_entry(struct fsentry *list, + const WIN32_FIND_DATAW *fdata) +{ + char buf[MAX_PATH * 3]; + int len; + struct fsentry *fse; + len = xwcstoutf(buf, fdata->cFileName, ARRAY_SIZE(buf)); + + fse = fsentry_alloc(list, buf, len); + + fse->st_mode = file_attr_to_st_mode(fdata->dwFileAttributes); + fse->st_size = (((off64_t) (fdata->nFileSizeHigh)) << 32) + | fdata->nFileSizeLow; + filetime_to_timespec(&(fdata->ftLastAccessTime), &(fse->st_atim)); + filetime_to_timespec(&(fdata->ftLastWriteTime), &(fse->st_mtim)); + filetime_to_timespec(&(fdata->ftCreationTime), &(fse->st_ctim)); + + return fse; +} + +/* + * Create an fsentry-based directory listing (similar to opendir / readdir). + * Dir should not contain trailing '/'. Use an empty string for the current + * directory (not "."!). + */ +static struct fsentry *fsentry_create_list(const struct fsentry *dir) +{ + wchar_t pattern[MAX_LONG_PATH + 2]; /* + 2 for "\*" */ + WIN32_FIND_DATAW fdata; + HANDLE h; + int wlen; + struct fsentry *list, **phead; + DWORD err; + + /* convert name to UTF-16 and check length */ + if ((wlen = xutftowcs_path_ex(pattern, dir->name, MAX_LONG_PATH, + dir->len, MAX_PATH - 2, core_long_paths)) < 0) + return NULL; + + /* + * append optional '\' and wildcard '*'. Note: we need to use '\' as + * Windows doesn't translate '/' to '\' for "\\?\"-prefixed paths. + */ + if (wlen) + pattern[wlen++] = '\\'; + pattern[wlen++] = '*'; + pattern[wlen] = 0; + + /* open find handle */ + h = FindFirstFileW(pattern, &fdata); + if (h == INVALID_HANDLE_VALUE) { + err = GetLastError(); + errno = (err == ERROR_DIRECTORY) ? ENOTDIR : err_win_to_posix(err); + return NULL; + } + + /* allocate object to hold directory listing */ + list = fsentry_alloc(NULL, dir->name, dir->len); + + /* walk directory and build linked list of fsentry structures */ + phead = &list->next; + do { + *phead = fseentry_create_entry(list, &fdata); + phead = &(*phead)->next; + } while (FindNextFileW(h, &fdata)); + + /* remember result of last FindNextFile, then close find handle */ + err = GetLastError(); + FindClose(h); + + /* return the list if we've got all the files */ + if (err == ERROR_NO_MORE_FILES) + return list; + + /* otherwise free the list and return error */ + fsentry_release(list); + errno = err_win_to_posix(err); + return NULL; +} + +/* + * Adds a directory listing to the cache. + */ +static void fscache_add(struct fsentry *fse) +{ + if (fse->list) + fse = fse->list; + + for (; fse; fse = fse->next) + hashmap_add(&map, fse); +} + +/* + * Clears the cache. + */ +static void fscache_clear(void) +{ + hashmap_free(&map, 1); + hashmap_init(&map, (hashmap_cmp_fn)fsentry_cmp, NULL, 0); +} + +/* + * Checks if the cache is enabled for the given path. + */ +static inline int fscache_enabled(const char *path) +{ + return enabled > 0 && !is_absolute_path(path); +} + +/* + * Looks up a cache entry, waits if its being loaded by another thread. + * The mutex must be owned by the calling thread. + */ +static struct fsentry *fscache_get_wait(struct fsentry *key) +{ + struct fsentry *fse = hashmap_get(&map, key, NULL); + + /* return if its a 'real' entry (future entries have refcnt == 0) */ + if (!fse || fse->list || fse->refcnt) + return fse; + + /* create an event and link our key to the future entry */ + key->hwait = CreateEvent(NULL, TRUE, FALSE, NULL); + key->next = fse->next; + fse->next = key; + + /* wait for the loading thread to signal us */ + LeaveCriticalSection(&mutex); + WaitForSingleObject(key->hwait, INFINITE); + CloseHandle(key->hwait); + EnterCriticalSection(&mutex); + + /* repeat cache lookup */ + return hashmap_get(&map, key, NULL); +} + +/* + * Looks up or creates a cache entry for the specified key. + */ +static struct fsentry *fscache_get(struct fsentry *key) +{ + struct fsentry *fse, *future, *waiter; + + EnterCriticalSection(&mutex); + /* check if entry is in cache */ + fse = fscache_get_wait(key); + if (fse) { + fsentry_addref(fse); + LeaveCriticalSection(&mutex); + return fse; + } + /* if looking for a file, check if directory listing is in cache */ + if (!fse && key->list) { + fse = fscache_get_wait(key->list); + if (fse) { + LeaveCriticalSection(&mutex); + /* dir entry without file entry -> file doesn't exist */ + errno = ENOENT; + return NULL; + } + } + + /* add future entry to indicate that we're loading it */ + future = key->list ? key->list : key; + future->next = NULL; + future->refcnt = 0; + hashmap_add(&map, future); + + /* create the directory listing (outside mutex!) */ + LeaveCriticalSection(&mutex); + fse = fsentry_create_list(future); + EnterCriticalSection(&mutex); + + /* remove future entry and signal waiting threads */ + hashmap_remove(&map, future, NULL); + waiter = future->next; + while (waiter) { + HANDLE h = waiter->hwait; + waiter = waiter->next; + SetEvent(h); + } + + /* leave on error (errno set by fsentry_create_list) */ + if (!fse) { + LeaveCriticalSection(&mutex); + return NULL; + } + + /* add directory listing to the cache */ + fscache_add(fse); + + /* lookup file entry if requested (fse already points to directory) */ + if (key->list) + fse = hashmap_get(&map, key, NULL); + + /* return entry or ENOENT */ + if (fse) + fsentry_addref(fse); + else + errno = ENOENT; + + LeaveCriticalSection(&mutex); + return fse; +} + +/* + * Enables or disables the cache. Note that the cache is read-only, changes to + * the working directory are NOT reflected in the cache while enabled. + */ +int fscache_enable(int enable) +{ + int result; + + if (!initialized) { + /* allow the cache to be disabled entirely */ + if (!core_fscache) + return 0; + + InitializeCriticalSection(&mutex); + hashmap_init(&map, (hashmap_cmp_fn) fsentry_cmp, NULL, 0); + initialized = 1; + } + + result = enable ? InterlockedIncrement(&enabled) + : InterlockedDecrement(&enabled); + + if (enable && result == 1) { + /* redirect opendir and lstat to the fscache implementations */ + opendir = fscache_opendir; + lstat = fscache_lstat; + } else if (!enable && !result) { + /* reset opendir and lstat to the original implementations */ + opendir = dirent_opendir; + lstat = mingw_lstat; + EnterCriticalSection(&mutex); + fscache_clear(); + LeaveCriticalSection(&mutex); + } + return result; +} + +/* + * Lstat replacement, uses the cache if enabled, otherwise redirects to + * mingw_lstat. + */ +int fscache_lstat(const char *filename, struct stat *st) +{ + int dirlen, base, len; + struct fsentry key[2], *fse; + + if (!fscache_enabled(filename)) + return mingw_lstat(filename, st); + + /* split filename into path + name */ + len = strlen(filename); + if (len && is_dir_sep(filename[len - 1])) + len--; + base = len; + while (base && !is_dir_sep(filename[base - 1])) + base--; + dirlen = base ? base - 1 : 0; + + /* lookup entry for path + name in cache */ + fsentry_init(key, NULL, filename, dirlen); + fsentry_init(key + 1, key, filename + base, len - base); + fse = fscache_get(key + 1); + if (!fse) + return -1; + + /* copy stat data */ + st->st_ino = 0; + st->st_gid = 0; + st->st_uid = 0; + st->st_dev = 0; + st->st_rdev = 0; + st->st_nlink = 1; + st->st_mode = fse->st_mode; + st->st_size = fse->st_size; + st->st_atim = fse->st_atim; + st->st_mtim = fse->st_mtim; + st->st_ctim = fse->st_ctim; + + /* don't forget to release fsentry */ + fsentry_release(fse); + return 0; +} + +typedef struct fscache_DIR { + struct DIR base_dir; /* extend base struct DIR */ + struct fsentry *pfsentry; + struct dirent dirent; +} fscache_DIR; + +/* + * Readdir replacement. + */ +static struct dirent *fscache_readdir(DIR *base_dir) +{ + fscache_DIR *dir = (fscache_DIR*) base_dir; + struct fsentry *next = dir->pfsentry->next; + if (!next) + return NULL; + dir->pfsentry = next; + dir->dirent.d_type = S_ISDIR(next->st_mode) ? DT_DIR : DT_REG; + dir->dirent.d_name = (char*) next->name; + return &(dir->dirent); +} + +/* + * Closedir replacement. + */ +static int fscache_closedir(DIR *base_dir) +{ + fscache_DIR *dir = (fscache_DIR*) base_dir; + fsentry_release(dir->pfsentry); + free(dir); + return 0; +} + +/* + * Opendir replacement, uses a directory listing from the cache if enabled, + * otherwise calls original dirent implementation. + */ +DIR *fscache_opendir(const char *dirname) +{ + struct fsentry key, *list; + fscache_DIR *dir; + int len; + + if (!fscache_enabled(dirname)) + return dirent_opendir(dirname); + + /* prepare name (strip trailing '/', replace '.') */ + len = strlen(dirname); + if ((len == 1 && dirname[0] == '.') || + (len && is_dir_sep(dirname[len - 1]))) + len--; + + /* get directory listing from cache */ + fsentry_init(&key, NULL, dirname, len); + list = fscache_get(&key); + if (!list) + return NULL; + + /* alloc and return DIR structure */ + dir = (fscache_DIR*) xmalloc(sizeof(fscache_DIR)); + dir->base_dir.preaddir = fscache_readdir; + dir->base_dir.pclosedir = fscache_closedir; + dir->pfsentry = list; + return (DIR*) dir; +} diff --git a/compat/win32/fscache.h b/compat/win32/fscache.h new file mode 100644 index 0000000000..ed518b422d --- /dev/null +++ b/compat/win32/fscache.h @@ -0,0 +1,10 @@ +#ifndef FSCACHE_H +#define FSCACHE_H + +int fscache_enable(int enable); +#define enable_fscache(x) fscache_enable(x) + +DIR *fscache_opendir(const char *dir); +int fscache_lstat(const char *file_name, struct stat *buf); + +#endif diff --git a/compat/winansi.c b/compat/winansi.c index d81547fa0f..6c0a6c83d7 100644 --- a/compat/winansi.c +++ b/compat/winansi.c @@ -544,7 +544,20 @@ static HANDLE swap_osfhnd(int fd, HANDLE new_handle) #ifdef DETECT_MSYS_TTY #include + +#if defined(_MSC_VER) + +typedef struct _OBJECT_NAME_INFORMATION +{ + UNICODE_STRING Name; + WCHAR NameBuffer[0]; +} OBJECT_NAME_INFORMATION, *POBJECT_NAME_INFORMATION; + +#define ObjectNameInformation 1 + +#else #include +#endif static void detect_msys_tty(int fd) { diff --git a/config.mak.uname b/config.mak.uname index 3ee7da0e23..6d29f437de 100644 --- a/config.mak.uname +++ b/config.mak.uname @@ -1,5 +1,9 @@ # Platform specific Makefile tweaks based on uname detection +# Define NO_SAFESEH if you need MSVC/Visual Studio to ignore the lack of +# Microsoft's Safe Exception Handling in libraries (such as zlib). +# Typically required for VS2013+/32-bit compilation on Vista+ versions. + uname_S := $(shell sh -c 'uname -s 2>/dev/null || echo not') uname_M := $(shell sh -c 'uname -m 2>/dev/null || echo not') uname_O := $(shell sh -c 'uname -o 2>/dev/null || echo not') @@ -11,6 +15,21 @@ ifdef MSVC # avoid the MingW and Cygwin configuration sections uname_S := Windows uname_O := Windows + + # Generate and include makefile variables that point to the + # currently installed set of MSVC command line tools. +compat/vcbuild/MSVC-DEFS-GEN: compat/vcbuild/find_vs_env.bat + @"$<" | tr '\\' / >"$@" +include compat/vcbuild/MSVC-DEFS-GEN + + # See if vcpkg and the vcpkg-build versions of the third-party + # libraries that we use are installed. We include the result + # to get $(vcpkg_*) variables defined for the Makefile. +ifeq (,$(SKIP_VCPKG)) +compat/vcbuild/VCPKG-DEFS: compat/vcbuild/vcpkg_install.bat + @"$<" +include compat/vcbuild/VCPKG-DEFS +endif endif # We choose to avoid "if .. else if .. else .. endif endif" @@ -349,6 +368,19 @@ endif ifeq ($(uname_S),Windows) GIT_VERSION := $(GIT_VERSION).MSVC pathsep = ; + # Assume that this is built in Git for Windows' SDK + ifeq (MINGW32,$(MSYSTEM)) + prefix = /mingw32 + else + prefix = /mingw64 + endif + # Prepend MSVC 64-bit tool-chain to PATH. + # + # A regular Git Bash *does not* have cl.exe in its $PATH. As there is a + # link.exe next to, and required by, cl.exe, we have to prepend this + # onto the existing $PATH. + # + SANE_TOOL_PATH ?= $(msvc_bin_dir_msys) HAVE_ALLOCA_H = YesPlease NO_PREAD = YesPlease NEEDS_CRYPTO_WITH_SSL = YesPlease @@ -361,11 +393,14 @@ ifeq ($(uname_S),Windows) NO_STRCASESTR = YesPlease NO_STRLCPY = YesPlease NO_MEMMEM = YesPlease - # NEEDS_LIBICONV = YesPlease - NO_ICONV = YesPlease + NEEDS_LIBICONV = YesPlease NO_STRTOUMAX = YesPlease NO_MKDTEMP = YesPlease - SNPRINTF_RETURNS_BOGUS = YesPlease + NO_INTTYPES_H = YesPlease + # VS2015 with UCRT claims that snprintf and friends are C99 compliant, + # so we don't need this: + # + # SNPRINTF_RETURNS_BOGUS = YesPlease NO_SVN_TESTS = YesPlease RUNTIME_PREFIX = YesPlease HAVE_WPGMPTR = YesWeDo @@ -378,7 +413,6 @@ ifeq ($(uname_S),Windows) NO_REGEX = YesPlease NO_GETTEXT = YesPlease NO_PYTHON = YesPlease - BLK_SHA1 = YesPlease ETAGS_TARGET = ETAGS NO_POSIX_GOODIES = UnfortunatelyYes NATIVE_CRLF = YesPlease @@ -387,24 +421,48 @@ ifeq ($(uname_S),Windows) CC = compat/vcbuild/scripts/clink.pl AR = compat/vcbuild/scripts/lib.pl CFLAGS = - BASIC_CFLAGS = -nologo -I. -I../zlib -Icompat/vcbuild -Icompat/vcbuild/include -DWIN32 -D_CONSOLE -DHAVE_STRING_H -D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE + BASIC_CFLAGS = -nologo -I. -Icompat/vcbuild/include -DWIN32 -D_CONSOLE -DHAVE_STRING_H -D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE COMPAT_OBJS = compat/msvc.o compat/winansi.o \ compat/win32/pthread.o compat/win32/syslog.o \ - compat/win32/dirent.o - COMPAT_CFLAGS = -D__USE_MINGW_ACCESS -DNOGDI -DHAVE_STRING_H -Icompat -Icompat/regex -Icompat/win32 -DSTRIP_EXTENSION=\".exe\" + compat/win32/dirent.o compat/win32/fscache.o + COMPAT_CFLAGS = -D__USE_MINGW_ACCESS -DDETECT_MSYS_TTY -DNOGDI -DHAVE_STRING_H -Icompat -Icompat/regex -Icompat/win32 -DSTRIP_EXTENSION=\".exe\" BASIC_LDFLAGS = -IGNORE:4217 -IGNORE:4049 -NOLOGO -SUBSYSTEM:CONSOLE - EXTLIBS = user32.lib advapi32.lib shell32.lib wininet.lib ws2_32.lib invalidcontinue.obj + # invalidcontinue.obj allows Git's source code to close the same file + # handle twice, or to access the osfhandle of an already-closed stdout + # See https://msdn.microsoft.com/en-us/library/ms235330.aspx + EXTLIBS = user32.lib advapi32.lib shell32.lib wininet.lib ws2_32.lib invalidcontinue.obj kernel32.lib ntdll.lib PTHREAD_LIBS = lib = - BASIC_CFLAGS += -DPROTECT_NTFS_DEFAULT=1 + BASIC_CFLAGS += $(vcpkg_inc) $(sdk_includes) $(msvc_includes) ifndef DEBUG - BASIC_CFLAGS += -GL -Os -MD - BASIC_LDFLAGS += -LTCG + BASIC_CFLAGS += $(vcpkg_rel_lib) +else + BASIC_CFLAGS += $(vcpkg_dbg_lib) +endif + BASIC_CFLAGS += $(sdk_libs) $(msvc_libs) + + # Optionally enable memory leak reporting. + # BASIC_CLFAGS += -DUSE_MSVC_CRTDBG + BASIC_CFLAGS += -DPROTECT_NTFS_DEFAULT=1 + # Always give "-Zi" to the compiler and "-debug" to linker (even in + # release mode) to force a PDB to be generated (like RelWithDebInfo). + BASIC_CFLAGS += -Zi + BASIC_LDFLAGS += -debug + +ifdef NO_SAFESEH + LDFLAGS += -SAFESEH:NO +endif + +ifndef DEBUG + BASIC_CFLAGS += -GL -Gy -O2 -Oy- -MD -DNDEBUG + BASIC_LDFLAGS += -release -LTCG /OPT:REF /OPT:ICF /INCREMENTAL:NO /DEBUGTYPE:CV,FIXUP AR += -LTCG else - BASIC_CFLAGS += -Zi -MDd + BASIC_CFLAGS += -MDd -DDEBUG -D_DEBUG endif X = .exe + +compat/msvc.o: compat/msvc.c compat/mingw.c GIT-CFLAGS endif ifeq ($(uname_S),Interix) NO_INITGROUPS = YesPlease @@ -524,11 +582,12 @@ ifneq (,$(findstring MINGW,$(uname_S))) ETAGS_TARGET = ETAGS NO_POSIX_GOODIES = UnfortunatelyYes DEFAULT_HELP_FORMAT = html + BASIC_LDFLAGS += -municode COMPAT_CFLAGS += -DNOGDI -Icompat -Icompat/win32 COMPAT_CFLAGS += -DSTRIP_EXTENSION=\".exe\" COMPAT_OBJS += compat/mingw.o compat/winansi.o \ compat/win32/pthread.o compat/win32/syslog.o \ - compat/win32/dirent.o + compat/win32/dirent.o compat/win32/fscache.o BASIC_CFLAGS += -DWIN32 -DPROTECT_NTFS_DEFAULT=1 EXTLIBS += -lws2_32 GITLIBS += git.res @@ -551,11 +610,19 @@ else ifeq ($(shell expr "$(uname_R)" : '2\.'),2) # MSys2 prefix = /usr/ + # Enable DEP + BASIC_LDFLAGS += -Wl,--nxcompat + # Enable ASLR (unless debugging) + ifneq (,$(findstring -O,$(CFLAGS))) + BASIC_LDFLAGS += -Wl,--dynamicbase + endif ifeq (MINGW32,$(MSYSTEM)) prefix = /mingw32 + BASIC_LDFLAGS += -Wl,--pic-executable,-e,_mainCRTStartup endif ifeq (MINGW64,$(MSYSTEM)) prefix = /mingw64 + BASIC_LDFLAGS += -Wl,--pic-executable,-e,mainCRTStartup else COMPAT_CFLAGS += -D_USE_32BIT_TIME_T BASIC_LDFLAGS += -Wl,--large-address-aware @@ -594,3 +661,77 @@ ifeq ($(uname_S),QNX) NO_STRCASESTR = YesPlease NO_STRLCPY = YesPlease endif + +vcxproj: + # Require clean work tree + git update-index -q --refresh && \ + git diff-files --quiet && \ + git diff-index --cached --quiet HEAD -- + + # Make .vcxproj files and add them + unset QUIET_GEN QUIET_BUILT_IN; \ + perl contrib/buildsystems/generate -g Vcxproj + git add -f git.sln {*,*/lib,t/helper/*}/*.vcxproj + + # Generate the LinkOrCopyBuiltins.targets file + (echo '' && \ + echo ' ' && \ + for name in $(BUILT_INS);\ + do \ + echo ' '; \ + done && \ + for name in $(REMOTE_CURL_ALIASES); \ + do \ + echo ' '; \ + done && \ + echo ' ' && \ + echo '') >git/LinkOrCopyBuiltins.targets + git add -f git/LinkOrCopyBuiltins.targets + + # Add command-list.h + $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 command-list.h + git add -f command-list.h + + # Add scripts + rm -f perl/perl.mak + $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 \ + $(SCRIPT_LIB) $(SCRIPT_SH_GEN) $(SCRIPT_PERL_GEN) + # Strip out the sane tool path, needed only for building + sed -i '/^git_broken_path_fix ".*/d' git-sh-setup + git add -f $(SCRIPT_LIB) $(SCRIPT_SH_GEN) $(SCRIPT_PERL_GEN) + + # Add Perl module + $(MAKE) $(LIB_PERL_GEN) + git add -f perl/build + + # Add bin-wrappers, for testing + rm -rf bin-wrappers/ + $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 $(test_bindir_programs) + # Ensure that the GIT_EXEC_PATH is a Unix-y one, and that the absolute + # path of the repository is not hard-coded (GIT_EXEC_PATH will be set + # by test-lib.sh according to the current setup) + sed -i -e 's/^\(GIT_EXEC_PATH\)=.*/test -n "$${\1##*:*}" ||\ + \1="$$(cygpath -u "$$\1")"/' \ + -e "s|'$$(pwd)|\"\$$GIT_EXEC_PATH\"'|g" bin-wrappers/* + # Ensure that test-* helpers find the .dll files copied to top-level + sed -i 's|^PATH=.*|&:"$$GIT_EXEC_PATH"|' bin-wrappers/test-* + # We do not want to force hard-linking builtins + sed -i 's|\(git\)-\([-a-z]*\)\.exe"|\1.exe" \2|g' \ + bin-wrappers/git-{receive-pack,upload-archive} + git add -f $(test_bindir_programs) + # remote-ext is a builtin, but invoked as if it were external + sed 's|receive-pack|remote-ext|g' \ + bin-wrappers/git-remote-ext + git add -f bin-wrappers/git-remote-ext + + # Add templates + $(MAKE) -C templates + git add -f templates/boilerplates.made templates/blt/ + + # Add build options + $(MAKE) MSVC=1 SKIP_VCPKG=1 prefix=/mingw64 GIT-BUILD-OPTIONS + git add -f GIT-BUILD-OPTIONS + + # Commit the whole shebang + git commit -m "Generate Visual Studio solution" \ + -m "Auto-generated by \`$(MAKE)$(MAKEFLAGS) $@\`" diff --git a/connect.c b/connect.c index 24281b6082..d72772a36d 100644 --- a/connect.c +++ b/connect.c @@ -918,6 +918,10 @@ static enum protocol parse_connect_url(const char *url_orig, char **ret_host, if (protocol == PROTO_LOCAL) path = end; + else if (protocol == PROTO_FILE && *host != '/' && + !has_dos_drive_prefix(host) && + offset_1st_component(host - 2) > 1) + path = host - 2; /* include the leading "//" */ else if (protocol == PROTO_FILE && has_dos_drive_prefix(end)) path = end; /* "file://$(pwd)" may be "file://C:/projects/repo" */ else diff --git a/contrib/buildsystems/Generators.pm b/contrib/buildsystems/Generators.pm index 408ef714b8..aa4cbaa2ad 100644 --- a/contrib/buildsystems/Generators.pm +++ b/contrib/buildsystems/Generators.pm @@ -17,7 +17,7 @@ BEGIN { $me = dirname($me); if (opendir(D,"$me/Generators")) { foreach my $gen (readdir(D)) { - next if ($gen =~ /^\.\.?$/); + next unless ($gen =~ /\.pm$/); require "${me}/Generators/$gen"; $gen =~ s,\.pm,,; push(@AVAILABLE, $gen); diff --git a/contrib/buildsystems/Generators/Vcproj.pm b/contrib/buildsystems/Generators/Vcproj.pm index cfa74adcc2..737647e76a 100644 --- a/contrib/buildsystems/Generators/Vcproj.pm +++ b/contrib/buildsystems/Generators/Vcproj.pm @@ -3,6 +3,7 @@ require Exporter; use strict; use vars qw($VERSION); +use Digest::SHA qw(sha256_hex); our $VERSION = '1.00'; our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE); @@ -12,59 +13,12 @@ BEGIN { push @EXPORT_OK, qw(generate); } -my $guid_index = 0; -my @GUIDS = ( - "{E07B9989-2BF7-4F21-8918-BE22BA467AC3}", - "{278FFB51-0296-4A44-A81A-22B87B7C3592}", - "{7346A2C4-F0FD-444F-9EBE-1AF23B2B5650}", - "{67F421AC-EB34-4D49-820B-3196807B423F}", - "{385DCFE1-CC8C-4211-A451-80FCFC31CA51}", - "{97CC46C5-D2CC-4D26-B634-E75792B79916}", - "{C7CE21FE-6EF8-4012-A5C7-A22BCEDFBA11}", - "{51575134-3FDF-42D1-BABD-3FB12669C6C9}", - "{0AE195E4-9823-4B87-8E6F-20C5614AF2FF}", - "{4B918255-67CA-43BB-A46C-26704B666E6B}", - "{18CCFEEF-C8EE-4CC1-A265-26F95C9F4649}", - "{5D5D90FA-01B7-4973-AFE5-CA88C53AC197}", - "{1F054320-036D-49E1-B384-FB5DF0BC8AC0}", - "{7CED65EE-F2D9-4171-825B-C7D561FE5786}", - "{8D341679-0F07-4664-9A56-3BA0DE88B9BC}", - "{C189FEDC-2957-4BD7-9FA4-7622241EA145}", - "{66844203-1B9F-4C53-9274-164FFF95B847}", - "{E4FEA145-DECC-440D-AEEA-598CF381FD43}", - "{73300A8E-C8AC-41B0-B555-4F596B681BA7}", - "{873FDEB1-D01D-40BF-A1BF-8BBC58EC0F51}", - "{7922C8BE-76C5-4AC6-8BF7-885C0F93B782}", - "{E245D370-308B-4A49-BFC1-1E527827975F}", - "{F6FA957B-66FC-4ED7-B260-E59BBE4FE813}", - "{E6055070-0198-431A-BC49-8DB6CEE770AE}", - "{54159234-C3EB-43DA-906B-CE5DA5C74654}", - "{594CFC35-0B60-46F6-B8EF-9983ACC1187D}", - "{D93FCAB7-1F01-48D2-B832-F761B83231A5}", - "{DBA5E6AC-E7BE-42D3-8703-4E787141526E}", - "{6171953F-DD26-44C7-A3BE-CC45F86FC11F}", - "{9E19DDBE-F5E4-4A26-A2FE-0616E04879B8}", - "{AE81A615-99E3-4885-9CE0-D9CAA193E867}", - "{FBF4067E-1855-4F6C-8BCD-4D62E801A04D}", - "{17007948-6593-4AEB-8106-F7884B4F2C19}", - "{199D4C8D-8639-4DA6-82EF-08668C35DEE0}", - "{E085E50E-C140-4CF3-BE4B-094B14F0DDD6}", - "{00785268-A9CC-4E40-AC29-BAC0019159CE}", - "{4C06F56A-DCDB-46A6-B67C-02339935CF12}", - "{3A62D3FD-519E-4EC9-8171-D2C1BFEA022F}", - "{3A62D3FD-519E-4EC9-8171-D2C1BFEA022F}", - "{9392EB58-D7BA-410B-B1F0-B2FAA6BC89A7}", - "{2ACAB2D5-E0CE-4027-BCA0-D78B2D7A6C66}", - "{86E216C3-43CE-481A-BCB2-BE5E62850635}", - "{FB631291-7923-4B91-9A57-7B18FDBB7A42}", - "{0A176EC9-E934-45B8-B87F-16C7F4C80039}", - "{DF55CA80-46E8-4C53-B65B-4990A23DD444}", - "{3A0F9895-55D2-4710-BE5E-AD7498B5BF44}", - "{294BDC5A-F448-48B6-8110-DD0A81820F8C}", - "{4B9F66E9-FAC9-47AB-B1EF-C16756FBFD06}", - "{72EA49C6-2806-48BD-B81B-D4905102E19C}", - "{5728EB7E-8929-486C-8CD5-3238D060E768}" -); +sub generate_guid ($) { + my $hex = sha256_hex($_[0]); + $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/; + $hex =~ tr/a-z/A-Z/; + return $hex; +} sub generate { my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_; @@ -92,9 +46,8 @@ sub createLibProject { $target =~ s/\//_/g; $target =~ s/\.a//; - my $uuid = $GUIDS[$guid_index]; + my $uuid = generate_guid($libname); $$build_structure{"LIBS_${target}_GUID"} = $uuid; - $guid_index += 1; my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"LIBS_${libname}_SOURCES"}})); my @sources; @@ -106,6 +59,8 @@ sub createLibProject { my $includes= join(";", sort(map(""$rel_dir\\$_"", @{$$build_structure{"LIBS_${libname}_INCLUDES"}}))); my $cflags = join(" ", sort(@{$$build_structure{"LIBS_${libname}_CFLAGS"}})); $cflags =~ s/\"/"/g; + $cflags =~ s//>/g; my $cflags_debug = $cflags; $cflags_debug =~ s/-MT/-MTd/; @@ -127,6 +82,8 @@ sub createLibProject { $defines =~ s/-D//g; $defines =~ s/\"/\\"/g; + $defines =~ s//>/g; $defines =~ s/\'//g; $includes =~ s/-I//g; mkdir "$target" || die "Could not create the directory $target for lib project!\n"; @@ -162,9 +119,6 @@ sub createLibProject { - @@ -228,9 +182,6 @@ sub createLibProject { - @@ -311,9 +262,8 @@ sub createAppProject { $target =~ s/\//_/g; $target =~ s/\.exe//; - my $uuid = $GUIDS[$guid_index]; + my $uuid = generate_guid($appname); $$build_structure{"APPS_${target}_GUID"} = $uuid; - $guid_index += 1; my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"APPS_${appname}_SOURCES"}})); my @sources; @@ -325,6 +275,8 @@ sub createAppProject { my $includes= join(";", sort(map(""$rel_dir\\$_"", @{$$build_structure{"APPS_${appname}_INCLUDES"}}))); my $cflags = join(" ", sort(@{$$build_structure{"APPS_${appname}_CFLAGS"}})); $cflags =~ s/\"/"/g; + $cflags =~ s//>/g; my $cflags_debug = $cflags; $cflags_debug =~ s/-MT/-MTd/; @@ -351,6 +303,8 @@ sub createAppProject { $defines =~ s/-D//g; $defines =~ s/\"/\\"/g; + $defines =~ s//>/g; $defines =~ s/\'//g; $defines =~ s/\\\\/\\/g; $includes =~ s/-I//g; @@ -387,9 +341,6 @@ sub createAppProject { - @@ -458,9 +409,6 @@ sub createAppProject { - @@ -561,20 +509,18 @@ sub createGlueProject { foreach (@apps) { $_ =~ s/\//_/g; $_ =~ s/\.exe//; - push(@tmp, $_); + if ($_ eq "git" ) { + unshift(@tmp, $_); + } else { + push(@tmp, $_); + } } @apps = @tmp; open F, ">git.sln" || die "Could not open git.sln for writing!\n"; binmode F, ":crlf"; print F "$SLN_HEAD"; - foreach (@libs) { - my $libname = $_; - my $uuid = $build_structure{"LIBS_${libname}_GUID"}; - print F "$SLN_PRE"; - print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\""; - print F "$SLN_POST"; - } + my $uuid_libgit = $build_structure{"LIBS_libgit_GUID"}; my $uuid_xdiff_lib = $build_structure{"LIBS_xdiff_lib_GUID"}; foreach (@apps) { @@ -588,6 +534,13 @@ sub createGlueProject { print F " EndProjectSection"; print F "$SLN_POST"; } + foreach (@libs) { + my $libname = $_; + my $uuid = $build_structure{"LIBS_${libname}_GUID"}; + print F "$SLN_PRE"; + print F "\"${libname}\", \"${libname}\\${libname}.vcproj\", \"${uuid}\""; + print F "$SLN_POST"; + } print F << "EOM"; Global @@ -599,17 +552,17 @@ EOM print F << "EOM"; GlobalSection(ProjectConfigurationPlatforms) = postSolution EOM - foreach (@libs) { - my $libname = $_; - my $uuid = $build_structure{"LIBS_${libname}_GUID"}; + foreach (@apps) { + my $appname = $_; + my $uuid = $build_structure{"APPS_${appname}_GUID"}; print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n"; print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n"; print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n"; print F "\t\t${uuid}.Release|Win32.Build.0 = Release|Win32\n"; } - foreach (@apps) { - my $appname = $_; - my $uuid = $build_structure{"APPS_${appname}_GUID"}; + foreach (@libs) { + my $libname = $_; + my $uuid = $build_structure{"LIBS_${libname}_GUID"}; print F "\t\t${uuid}.Debug|Win32.ActiveCfg = Debug|Win32\n"; print F "\t\t${uuid}.Debug|Win32.Build.0 = Debug|Win32\n"; print F "\t\t${uuid}.Release|Win32.ActiveCfg = Release|Win32\n"; diff --git a/contrib/buildsystems/Generators/Vcxproj.pm b/contrib/buildsystems/Generators/Vcxproj.pm new file mode 100644 index 0000000000..4bdb8008d1 --- /dev/null +++ b/contrib/buildsystems/Generators/Vcxproj.pm @@ -0,0 +1,383 @@ +package Generators::Vcxproj; +require Exporter; + +use strict; +use vars qw($VERSION); +use Digest::SHA qw(sha256_hex); + +our $VERSION = '1.00'; +our(@ISA, @EXPORT, @EXPORT_OK, @AVAILABLE); +@ISA = qw(Exporter); + +BEGIN { + push @EXPORT_OK, qw(generate); +} + +sub generate_guid ($) { + my $hex = sha256_hex($_[0]); + $hex =~ s/^(.{8})(.{4})(.{4})(.{4})(.{12}).*/{$1-$2-$3-$4-$5}/; + $hex =~ tr/a-z/A-Z/; + return $hex; +} + +sub generate { + my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_; + my @libs = @{$build_structure{"LIBS"}}; + foreach (@libs) { + createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 1); + } + + my @apps = @{$build_structure{"APPS"}}; + foreach (@apps) { + createProject($_, $git_dir, $out_dir, $rel_dir, \%build_structure, 0); + } + + createGlueProject($git_dir, $out_dir, $rel_dir, %build_structure); + return 0; +} + +sub createProject { + my ($name, $git_dir, $out_dir, $rel_dir, $build_structure, $static_library) = @_; + my $label = $static_library ? "lib" : "app"; + my $prefix = $static_library ? "LIBS_" : "APPS_"; + my $config_type = $static_library ? "StaticLibrary" : "Application"; + print "Generate $name vcxproj $label project\n"; + my $cdup = $name; + $cdup =~ s/[^\/]+/../g; + $cdup =~ s/\//\\/g; + $rel_dir = $rel_dir eq "." ? $cdup : "$cdup\\$rel_dir"; + $rel_dir =~ s/\//\\/g; + + my $target = $name; + if ($static_library) { + $target =~ s/\.a//; + } else { + $target =~ s/\.exe//; + } + + my $uuid = generate_guid($name); + $$build_structure{"$prefix${target}_GUID"} = $uuid; + my $vcxproj = $target; + $vcxproj =~ s/(.*\/)?(.*)/$&\/$2.vcxproj/; + $vcxproj =~ s/([^\/]*)(\/lib)\/(lib.vcxproj)/$1$2\/$1_$3/; + $$build_structure{"$prefix${target}_VCXPROJ"} = $vcxproj; + + my @srcs = sort(map("$rel_dir\\$_", @{$$build_structure{"$prefix${name}_SOURCES"}})); + my @sources; + foreach (@srcs) { + $_ =~ s/\//\\/g; + push(@sources, $_); + } + my $defines = join(";", sort(@{$$build_structure{"$prefix${name}_DEFINES"}})); + my $includes= join(";", sort(map { s/^-I//; s/\//\\/g; File::Spec->file_name_is_absolute($_) ? $_ : "$rel_dir\\$_" } @{$$build_structure{"$prefix${name}_INCLUDES"}})); + my $cflags = join(" ", sort(map { s/^-[GLMOWZ].*//; s/.* .*/"$&"/; $_; } @{$$build_structure{"$prefix${name}_CFLAGS"}})); + $cflags =~ s//>/g; + + my $libs_release = "\n "; + my $libs_debug = "\n "; + if (!$static_library) { + $libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}})); + $libs_debug = $libs_release; + $libs_debug =~ s/zlib\.lib/zlibd\.lib/; + } + + $defines =~ s/-D//g; + $defines =~ s//>/g; + $defines =~ s/\'//g; + + die "Could not create the directory $target for $label project!\n" unless (-d "$target" || mkdir "$target"); + + open F, ">$vcxproj" or die "Could not open $vcxproj for writing!\n"; + binmode F, ":crlf :utf8"; + print F chr(0xFEFF); + print F << "EOM"; + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + $uuid + Win32Proj + x86-windows + x64-windows + $cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch) + \$(VCPKGArchDirectory)\\debug\\bin + \$(VCPKGArchDirectory)\\debug\\lib + \$(VCPKGArchDirectory)\\bin + \$(VCPKGArchDirectory)\\lib + \$(VCPKGArchDirectory)\\include + $libs_debug + $libs_release + + + + true + true + + + false + true + + + $config_type + v140 + + ..\\ + + + + + + + + + + + + + false + true + + + + $cflags %(AdditionalOptions) + $cdup;$cdup\\compat;$cdup\\compat\\regex;$cdup\\compat\\win32;$cdup\\compat\\poll;$cdup\\compat\\vcbuild\\include;\$(VCPKGIncludeDirectory);%(AdditionalIncludeDirectories) + + true + OnlyExplicitInline + + ProgramDatabase + + + true + + + \$(VCPKGLibDirectory);%(AdditionalLibraryDirectories) + \$(VCPKGLibs);\$(AdditionalDependencies) + invalidcontinue.obj %(AdditionalOptions) + $cdup\\compat\\win32\\git.manifest + Console + +EOM + if ($target eq 'libgit') { + print F << "EOM"; + + Initialize VCPKG + del "$cdup\\compat\\vcbuild\\vcpkg" + call "$cdup\\compat\\vcbuild\\vcpkg_install.bat" + +EOM + } + print F << "EOM"; + + + + MachineX86 + + + + + Disabled + WIN32;_DEBUG;$defines;%(PreprocessorDefinitions) + MultiThreadedDebugDLL + + + true + + + + + MaxSpeed + true + WIN32;NDEBUG;$defines;%(PreprocessorDefinitions) + MultiThreadedDLL + true + Speed + + + true + true + true + + + +EOM + foreach(@sources) { + print F << "EOM"; + +EOM + } + print F << "EOM"; + +EOM + if (!$static_library || $target =~ 'vcs-svn') { + my $uuid_libgit = $$build_structure{"LIBS_libgit_GUID"}; + my $uuid_xdiff_lib = $$build_structure{"LIBS_xdiff/lib_GUID"}; + + print F << "EOM"; + + + $uuid_libgit + false + + + $uuid_xdiff_lib + false + +EOM + if ($name =~ /(test-(line-buffer|svn-fe)|^git-remote-testsvn)\.exe$/) { + my $uuid_vcs_svn_lib = $$build_structure{"LIBS_vcs-svn/lib_GUID"}; + print F << "EOM"; + + $uuid_vcs_svn_lib + false + +EOM + } + print F << "EOM"; + +EOM + } + print F << "EOM"; + +EOM + if (!$static_library) { + print F << "EOM"; + + + + + + +EOM + } + if ($target eq 'git') { + print F " \n"; + } + print F << "EOM"; + +EOM + close F; +} + +sub createGlueProject { + my ($git_dir, $out_dir, $rel_dir, %build_structure) = @_; + print "Generate solutions file\n"; + $rel_dir = "..\\$rel_dir"; + $rel_dir =~ s/\//\\/g; + my $SLN_HEAD = "Microsoft Visual Studio Solution File, Format Version 11.00\n# Visual Studio 2010\n"; + my $SLN_PRE = "Project(\"{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}\") = "; + my $SLN_POST = "\nEndProject\n"; + + my @libs = @{$build_structure{"LIBS"}}; + my @tmp; + foreach (@libs) { + $_ =~ s/\.a//; + push(@tmp, $_); + } + @libs = @tmp; + + my @apps = @{$build_structure{"APPS"}}; + @tmp = (); + foreach (@apps) { + $_ =~ s/\.exe//; + if ($_ eq "git" ) { + unshift(@tmp, $_); + } else { + push(@tmp, $_); + } + } + @apps = @tmp; + + open F, ">git.sln" || die "Could not open git.sln for writing!\n"; + binmode F, ":crlf :utf8"; + print F chr(0xFEFF); + print F "$SLN_HEAD"; + + foreach (@apps) { + my $appname = $_; + my $uuid = $build_structure{"APPS_${appname}_GUID"}; + print F "$SLN_PRE"; + my $vcxproj = $build_structure{"APPS_${appname}_VCXPROJ"}; + $vcxproj =~ s/\//\\/g; + $appname =~ s/.*\///; + print F "\"${appname}\", \"${vcxproj}\", \"${uuid}\""; + print F "$SLN_POST"; + } + foreach (@libs) { + my $libname = $_; + my $uuid = $build_structure{"LIBS_${libname}_GUID"}; + print F "$SLN_PRE"; + my $vcxproj = $build_structure{"LIBS_${libname}_VCXPROJ"}; + $vcxproj =~ s/\//\\/g; + $libname =~ s/\//_/g; + print F "\"${libname}\", \"${vcxproj}\", \"${uuid}\""; + print F "$SLN_POST"; + } + + print F << "EOM"; +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection +EOM + print F << "EOM"; + GlobalSection(ProjectConfigurationPlatforms) = postSolution +EOM + foreach (@apps) { + my $appname = $_; + my $uuid = $build_structure{"APPS_${appname}_GUID"}; + print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n"; + print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n"; + print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n"; + print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n"; + print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n"; + print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n"; + print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n"; + print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n"; + } + foreach (@libs) { + my $libname = $_; + my $uuid = $build_structure{"LIBS_${libname}_GUID"}; + print F "\t\t${uuid}.Debug|x64.ActiveCfg = Debug|x64\n"; + print F "\t\t${uuid}.Debug|x64.Build.0 = Debug|x64\n"; + print F "\t\t${uuid}.Debug|x86.ActiveCfg = Debug|Win32\n"; + print F "\t\t${uuid}.Debug|x86.Build.0 = Debug|Win32\n"; + print F "\t\t${uuid}.Release|x64.ActiveCfg = Release|x64\n"; + print F "\t\t${uuid}.Release|x64.Build.0 = Release|x64\n"; + print F "\t\t${uuid}.Release|x86.ActiveCfg = Release|Win32\n"; + print F "\t\t${uuid}.Release|x86.Build.0 = Release|Win32\n"; + } + + print F << "EOM"; + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal +EOM + close F; +} + +1; diff --git a/contrib/buildsystems/engine.pl b/contrib/buildsystems/engine.pl index 23da787dc5..fba8a3f056 100755 --- a/contrib/buildsystems/engine.pl +++ b/contrib/buildsystems/engine.pl @@ -12,6 +12,7 @@ use File::Basename; use File::Spec; use Cwd; use Generators; +use Text::ParseWords; my (%build_structure, %compile_options, @makedry); my $out_dir = getcwd(); @@ -31,6 +32,7 @@ generate usage: -g --gen Specify the buildsystem generator (default: $gen) Available: $genlist -o --out Specify output directory generation (default: .) + --make-out Write the output of GNU Make into a file -i --in Specify input file, instead of running GNU Make -h,-? --help This help EOM @@ -38,6 +40,7 @@ EOM } # Parse command-line options +my $make_out; while (@ARGV) { my $arg = shift @ARGV; if ("$arg" eq "-h" || "$arg" eq "--help" || "$arg" eq "-?") { @@ -45,6 +48,8 @@ while (@ARGV) { exit(0); } elsif("$arg" eq "--out" || "$arg" eq "-o") { $out_dir = shift @ARGV; + } elsif("$arg" eq "--make-out") { + $make_out = shift @ARGV; } elsif("$arg" eq "--gen" || "$arg" eq "-g") { $gen = shift @ARGV; } elsif("$arg" eq "--in" || "$arg" eq "-i") { @@ -52,6 +57,8 @@ while (@ARGV) { open(F, "<$infile") || die "Couldn't open file $infile"; @makedry = ; close(F); + } else { + die "Unknown option: " . $arg; } } @@ -72,7 +79,19 @@ Running GNU Make to figure out build structure... EOM # Pipe a make --dry-run into a variable, if not already loaded from file -@makedry = `cd $git_dir && make -n MSVC=1 V=1 2>/dev/null` if !@makedry; +# Capture the make dry stderr to file for review (will be empty for a release build). + +my $ErrsFile = "msvc-build-makedryerrors.txt"; +@makedry = `make -C $git_dir -n MSVC=1 SKIP_VCPKG=1 V=1 2>$ErrsFile` +if !@makedry; +# test for an empty Errors file and remove it +unlink $ErrsFile if -f -z $ErrsFile; + +if (defined $make_out) { + open OUT, ">" . $make_out; + print OUT @makedry; + close OUT; +} # Parse the make output into usable info parseMakeOutput(); @@ -140,6 +159,12 @@ sub parseMakeOutput next; } + if ($text =~ /^(mkdir|msgfmt) /) { + # options to the Portable Object translations + # the line "mkdir ... && msgfmt ..." contains no linker options + next; + } + if($text =~ / -c /) { # compilation handleCompileLine($text, $line); @@ -231,7 +256,7 @@ sub removeDuplicates sub handleCompileLine { my ($line, $lineno) = @_; - my @parts = split(' ', $line); + my @parts = shellwords($line); my $sourcefile; shift(@parts); # ignore cmd while (my $part = shift @parts) { @@ -265,7 +290,7 @@ sub handleLibLine my (@objfiles, @lflags, $libout, $part); # kill cmd and rm 'prefix' $line =~ s/^rm -f .* && .* rcs //; - my @parts = split(' ', $line); + my @parts = shellwords($line); while ($part = shift @parts) { if ($part =~ /^-/) { push(@lflags, $part); @@ -282,7 +307,7 @@ sub handleLibLine # exit(1); foreach (@objfiles) { my $sourcefile = $_; - $sourcefile =~ s/\.o/.c/; + $sourcefile =~ s/\.o$/.c/; push(@sources, $sourcefile); push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}}); push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}}); @@ -306,7 +331,7 @@ sub handleLinkLine { my ($line, $lineno) = @_; my (@objfiles, @lflags, @libs, $appout, $part); - my @parts = split(' ', $line); + my @parts = shellwords($line); shift(@parts); # ignore cmd while ($part = shift @parts) { if ($part =~ /^-IGNORE/) { @@ -317,26 +342,36 @@ sub handleLinkLine $appout = shift @parts; } elsif ("$part" eq "-lz") { push(@libs, "zlib.lib"); - } elsif ("$part" eq "-lcrypto") { + } elsif ("$part" eq "-lcrypto") { push(@libs, "libeay32.lib"); } elsif ("$part" eq "-lssl") { push(@libs, "ssleay32.lib"); - } elsif ($part =~ /^-/) { + } elsif ("$part" eq "-lcurl") { + push(@libs, "libcurl.lib"); + } elsif ("$part" eq "-lexpat") { + push(@libs, "expat.lib"); + } elsif ("$part" eq "-liconv") { + push(@libs, "libiconv.lib"); + } elsif ($part =~ /^[-\/]/) { push(@lflags, $part); } elsif ($part =~ /\.(a|lib)$/) { $part =~ s/\.a$/.lib/; push(@libs, $part); - } elsif ($part =~ /\.(o|obj)$/) { + } elsif ($part eq 'invalidcontinue.obj') { + # ignore - known to MSVC + } elsif ($part =~ /\.o$/) { push(@objfiles, $part); + } elsif ($part =~ /\.obj$/) { + # do nothing, 'make' should not be producing .obj, only .o files } else { - die "Unhandled lib option @ line $lineno: $part"; + die "Unhandled link option @ line $lineno: $part"; } } # print "AppOut: '$appout'\nLFlags: @lflags\nLibs : @libs\nOfiles: @objfiles\n"; # exit(1); foreach (@objfiles) { my $sourcefile = $_; - $sourcefile =~ s/\.o/.c/; + $sourcefile =~ s/\.o$/.c/; push(@sources, $sourcefile); push(@cflags, @{$compile_options{"${sourcefile}_CFLAGS"}}); push(@defines, @{$compile_options{"${sourcefile}_DEFINES"}}); diff --git a/credential.c b/credential.c index 62be651b03..e9108a9e8a 100644 --- a/credential.c +++ b/credential.c @@ -136,7 +136,9 @@ static void credential_getpass(struct credential *c) { if (!c->username) c->username = credential_ask_one("Username", c, - PROMPT_ASKPASS|PROMPT_ECHO); + (getenv("GIT_ASKPASS") ? + PROMPT_ASKPASS : 0) | + PROMPT_ECHO); if (!c->password) c->password = credential_ask_one("Password", c, PROMPT_ASKPASS); diff --git a/diffcore-rename.c b/diffcore-rename.c index 07bd34b631..5bfc5f6c22 100644 --- a/diffcore-rename.c +++ b/diffcore-rename.c @@ -82,6 +82,18 @@ static struct diff_rename_src *register_rename_src(struct diff_filepair *p) first = 0; last = rename_src_nr; + + if (last > 0) { + struct diff_rename_src *src = &(rename_src[last-1]); + int cmp = strcmp(one->path, src->p->one->path); + if (!cmp) + return src; + if (cmp > 0) { + first = last; + goto append_it; + } + } + while (last > first) { int next = (last + first) >> 1; struct diff_rename_src *src = &(rename_src[next]); @@ -95,6 +107,7 @@ static struct diff_rename_src *register_rename_src(struct diff_filepair *p) first = next+1; } +append_it: /* insert to make it at "first" */ ALLOC_GROW(rename_src, rename_src_nr + 1, rename_src_alloc); rename_src_nr++; diff --git a/git-compat-util.h b/git-compat-util.h index 09b0102cae..3ea303cfca 100644 --- a/git-compat-util.h +++ b/git-compat-util.h @@ -1,6 +1,15 @@ #ifndef GIT_COMPAT_UTIL_H #define GIT_COMPAT_UTIL_H +#ifdef USE_MSVC_CRTDBG +/* + * For these to work they must appear very early in each + * file -- before most of the standard header files. + */ +#include +#include +#endif + #define _FILE_OFFSET_BITS 64 @@ -198,8 +207,10 @@ #if defined(__MINGW32__) /* pull in Windows compatibility stuff */ #include "compat/mingw.h" +#include "compat/win32/fscache.h" #elif defined(_MSC_VER) #include "compat/msvc.h" +#include "compat/win32/fscache.h" #else #include #include @@ -1235,6 +1246,21 @@ static inline int is_missing_file_error(int errno_) return (errno_ == ENOENT || errno_ == ENOTDIR); } +/* + * Enable/disable a read-only cache for file system data on platforms that + * support it. + * + * Implementing a live-cache is complicated and requires special platform + * support (inotify, ReadDirectoryChangesW...). enable_fscache shall be used + * to mark sections of git code that extensively read from the file system + * without modifying anything. Implementations can use this to cache e.g. stat + * data or even file content without the need to synchronize with the file + * system. + */ +#ifndef enable_fscache +#define enable_fscache(x) /* noop */ +#endif + extern int cmd_main(int, const char **); /* diff --git a/git-cvsexportcommit.perl b/git-cvsexportcommit.perl index d13f02da95..fc00d5946a 100755 --- a/git-cvsexportcommit.perl +++ b/git-cvsexportcommit.perl @@ -431,6 +431,7 @@ END sub safe_pipe_capture { my @output; if (my $pid = open my $child, '-|') { + binmode($child, ":crlf"); @output = (<$child>); close $child or die join(' ',@_).": $! $?"; } else { diff --git a/git.c b/git.c index 2f604a41ea..a4a6942e16 100644 --- a/git.c +++ b/git.c @@ -699,6 +699,31 @@ static int run_argv(int *argcp, const char ***argv) */ if (!done_alias) handle_builtin(*argcp, *argv); + else if (get_builtin(**argv)) { + struct argv_array args = ARGV_ARRAY_INIT; + int i; + + if (get_super_prefix()) + die("%s doesn't support --super-prefix", **argv); + + commit_pager_choice(); + + argv_array_push(&args, "git"); + for (i = 0; i < *argcp; i++) + argv_array_push(&args, (*argv)[i]); + + trace_argv_printf(args.argv, "trace: exec:"); + + /* + * if we fail because the command is not found, it is + * OK to return. Otherwise, we just pass along the status code. + */ + i = run_command_v_opt(args.argv, RUN_SILENT_EXEC_FAILURE | + RUN_CLEAN_ON_EXIT); + if (i >= 0 || errno != ENOENT) + exit(i); + die("could not execute builtin %s", **argv); + } /* .. then try the external ones */ execv_dashed_external(*argv); diff --git a/path.c b/path.c index dc3294c71e..9b15a8483c 100644 --- a/path.c +++ b/path.c @@ -11,6 +11,7 @@ #include "path.h" #include "packfile.h" #include "object-store.h" +#include "exec-cmd.h" static int get_st_mode_bits(const char *path, int *mode) { @@ -711,6 +712,10 @@ char *expand_user_path(const char *path, int real_home) if (path == NULL) goto return_null; +#ifdef __MINGW32__ + if (path[0] == '/') + return system_path(path + 1); +#endif if (path[0] == '~') { const char *first_slash = strchrnul(path, '/'); const char *username = path + 1; diff --git a/preload-index.c b/preload-index.c index c7dc3f2b9f..4ec7feedd2 100644 --- a/preload-index.c +++ b/preload-index.c @@ -119,6 +119,7 @@ void preload_index(struct index_state *index, pthread_mutex_init(&pd.mutex, NULL); } + enable_fscache(1); for (i = 0; i < threads; i++) { struct thread_data *p = data+i; int err; @@ -144,6 +145,7 @@ void preload_index(struct index_state *index, stop_progress(&pd.progress); trace_performance_leave("preload index"); + enable_fscache(0); } int read_index_preload(struct index_state *index, diff --git a/send-pack.c b/send-pack.c index f692686770..9ff928e2f7 100644 --- a/send-pack.c +++ b/send-pack.c @@ -38,6 +38,16 @@ int option_parse_push_signed(const struct option *opt, die("bad %s argument: %s", opt->long_name, arg); } +static int config_use_sideband = 1; + +static int send_pack_config(const char *var, const char *value, void *unused) +{ + if (!strcmp("sendpack.sideband", var)) + config_use_sideband = git_config_bool(var, value); + + return 0; +} + static void feed_object(const struct object_id *oid, FILE *fh, int negative) { if (negative && !has_sha1_file(oid->hash)) @@ -391,6 +401,8 @@ int send_pack(struct send_pack_args *args, struct async demux; const char *push_cert_nonce = NULL; + git_config(send_pack_config, NULL); + /* Does the other end support the reporting? */ if (server_supports("report-status")) status_report = 1; @@ -398,7 +410,7 @@ int send_pack(struct send_pack_args *args, allow_deleting_refs = 1; if (server_supports("ofs-delta")) args->use_ofs_delta = 1; - if (server_supports("side-band-64k")) + if (config_use_sideband && server_supports("side-band-64k")) use_sideband = 1; if (server_supports("quiet")) quiet_supported = 1; diff --git a/setup.c b/setup.c index 1be5037f12..7589360e52 100644 --- a/setup.c +++ b/setup.c @@ -39,7 +39,7 @@ static int abspath_part_inside_repo(char *path) off = offset_1st_component(path); /* check if work tree is already the prefix */ - if (wtlen <= len && !strncmp(path, work_tree, wtlen)) { + if (wtlen <= len && !fspathncmp(path, work_tree, wtlen)) { if (path[wtlen] == '/') { memmove(path, path + wtlen + 1, len - wtlen); return 0; @@ -59,7 +59,7 @@ static int abspath_part_inside_repo(char *path) path++; if (*path == '/') { *path = '\0'; - if (strcmp(real_path(path0), work_tree) == 0) { + if (fspathcmp(real_path(path0), work_tree) == 0) { memmove(path0, path + 1, len - (path - path0)); return 0; } @@ -68,7 +68,7 @@ static int abspath_part_inside_repo(char *path) } /* check whole path */ - if (strcmp(real_path(path0), work_tree) == 0) { + if (fspathcmp(real_path(path0), work_tree) == 0) { *path0 = '\0'; return 0; } @@ -784,7 +784,7 @@ static const char *setup_discovered_git_dir(const char *gitdir, set_git_dir(gitdir); inside_git_dir = 0; inside_work_tree = 1; - if (offset == cwd->len) + if (offset >= cwd->len) return NULL; /* Make "offset" point past the '/' (already the case for root dirs) */ @@ -916,7 +916,7 @@ static enum discovery_result setup_git_directory_gently_1(struct strbuf *dir, const char *env_ceiling_dirs = getenv(CEILING_DIRECTORIES_ENVIRONMENT); struct string_list ceiling_dirs = STRING_LIST_INIT_DUP; const char *gitdirenv; - int ceil_offset = -1, min_offset = has_dos_drive_prefix(dir->buf) ? 3 : 1; + int ceil_offset = -1, min_offset = offset_1st_component(dir->buf); dev_t current_device = 0; int one_filesystem = 1; @@ -944,6 +944,12 @@ static enum discovery_result setup_git_directory_gently_1(struct strbuf *dir, if (ceil_offset < 0) ceil_offset = min_offset - 2; + if (min_offset && min_offset == dir->len && + !is_dir_sep(dir->buf[min_offset - 1])) { + strbuf_addch(dir, '/'); + min_offset++; + } + /* * Test in the following order (relative to the dir): * - .git (file containing "gitdir: ") diff --git a/t/.gitignore b/t/.gitignore index 348715f0e4..91cf5772fe 100644 --- a/t/.gitignore +++ b/t/.gitignore @@ -2,3 +2,4 @@ /test-results /.prove /chainlinttmp +/out/ diff --git a/t/README b/t/README index 28711cc508..3f645b083a 100644 --- a/t/README +++ b/t/README @@ -170,6 +170,15 @@ appropriately before running "make". implied by other options like --valgrind and GIT_TEST_INSTALLED. +--no-bin-wrappers:: + By default, the test suite uses the wrappers in + `../bin-wrappers/` to execute `git` and friends. With this option, + `../git` and friends are run directly. This is not recommended + in general, as the wrappers contain safeguards to ensure that no + files from an installed Git are used, but can speed up test runs + especially on platforms where running shell scripts is expensive + (most notably, Windows). + --root=:: Create "trash" directories used to store all temporary data during testing under , instead of the t/ directory. diff --git a/t/helper/test-date.c b/t/helper/test-date.c index a0837371ab..792a805374 100644 --- a/t/helper/test-date.c +++ b/t/helper/test-date.c @@ -7,6 +7,7 @@ static const char *usage_msg = "\n" " test-tool date parse [date]...\n" " test-tool date approxidate [date]...\n" " test-tool date timestamp [date]...\n" +" test-tool date getnanos [start-nanos]\n" " test-tool date is64bit\n" " test-tool date time_t-is64bit\n"; @@ -82,6 +83,15 @@ static void parse_approx_timestamp(const char **argv, struct timeval *now) } } +static void getnanos(const char **argv, struct timeval *now) +{ + double seconds = getnanotime() / 1.0e9; + + if (*argv) + seconds -= strtod(*argv, NULL); + printf("%lf\n", seconds); +} + int cmd__date(int argc, const char **argv) { struct timeval now; @@ -108,6 +118,8 @@ int cmd__date(int argc, const char **argv) parse_approxidate(argv+1, &now); else if (!strcmp(*argv, "timestamp")) parse_approx_timestamp(argv+1, &now); + else if (!strcmp(*argv, "getnanos")) + getnanos(argv+1, &now); else if (!strcmp(*argv, "is64bit")) return sizeof(timestamp_t) == 8 ? 0 : 1; else if (!strcmp(*argv, "time_t-is64bit")) diff --git a/t/interop/i5500-git-daemon.sh b/t/interop/i5500-git-daemon.sh index 1daf69420b..4d22e42f84 100755 --- a/t/interop/i5500-git-daemon.sh +++ b/t/interop/i5500-git-daemon.sh @@ -37,5 +37,4 @@ test_expect_success "fetch with $VERSION_B" ' test_cmp expect actual ' -stop_git_daemon test_done diff --git a/t/lib-git-daemon.sh b/t/lib-git-daemon.sh index edbea2d986..a896af2284 100644 --- a/t/lib-git-daemon.sh +++ b/t/lib-git-daemon.sh @@ -13,7 +13,6 @@ # # test_expect_success ... # -# stop_git_daemon # test_done test_tristate GIT_TEST_GIT_DAEMON @@ -43,7 +42,7 @@ start_git_daemon() { mkdir -p "$GIT_DAEMON_DOCUMENT_ROOT_PATH" - trap 'code=$?; stop_git_daemon; (exit $code); die' EXIT + test_atexit 'stop_git_daemon' say >&3 "Starting git daemon ..." mkfifo git_daemon_output diff --git a/t/lib-git-p4.sh b/t/lib-git-p4.sh index c27599474c..f4f5d7d296 100644 --- a/t/lib-git-p4.sh +++ b/t/lib-git-p4.sh @@ -74,15 +74,6 @@ cli="$TRASH_DIRECTORY/cli" git="$TRASH_DIRECTORY/git" pidfile="$TRASH_DIRECTORY/p4d.pid" -# Sometimes "prove" seems to hang on exit because p4d is still running -cleanup () { - if test -f "$pidfile" - then - kill -9 $(cat "$pidfile") 2>/dev/null && exit 255 - fi -} -trap cleanup EXIT - # git p4 submit generates a temp file, which will # not get cleaned up if the submission fails. Don't # clutter up /tmp on the test machine. @@ -141,6 +132,7 @@ start_p4d () { # p4d failed to start return 1 fi + test_atexit kill_p4d # build a p4 user so author@example.com has an entry p4_add_user author diff --git a/t/t0000-basic.sh b/t/t0000-basic.sh index b6566003dd..12cb2679a1 100755 --- a/t/t0000-basic.sh +++ b/t/t0000-basic.sh @@ -138,6 +138,7 @@ check_sub_test_lib_test_err () { ) } +cat >/dev/null <<\DDD test_expect_success 'pretend we have a fully passing test suite' " run_sub_test_lib_test full-pass '3 passing tests' <<-\\EOF && for i in 1 2 3 @@ -824,6 +825,25 @@ test_expect_success 'tests clean up even on failures' " > 1..2 EOF " +DDD + +test_expect_success 'test_atexit is run' " + test_must_fail run_sub_test_lib_test \ + atexit-cleanup 'Run atexit commands' -i <<-\\EOF && + test_expect_success 'tests clean up even after a failure' ' + > ../../clean-atexit && + test_atexit rm ../../clean-atexit && + > ../../also-clean-atexit && + test_atexit rm ../../also-clean-atexit && + > ../../dont-clean-atexit && + (exit 1) + ' + test_done + EOF + test_path_exists dont-clean-atexit && + test_path_is_missing clean-atexit && + test_path_is_missing also-clean-atexit +" test_expect_success 'test_oid setup' ' test_oid_init diff --git a/t/t0001-init.sh b/t/t0001-init.sh index 182da069f1..be69a10d73 100755 --- a/t/t0001-init.sh +++ b/t/t0001-init.sh @@ -453,6 +453,18 @@ test_expect_success 're-init from a linked worktree' ' ) ' +test_expect_success MINGW 'core.hidedotfiles = false' ' + git config --global core.hidedotfiles false && + rm -rf newdir && + ( + sane_unset GIT_DIR GIT_WORK_TREE GIT_CONFIG && + mkdir newdir && + cd newdir && + git init + ) && + ! is_hidden newdir/.git +' + test_expect_success MINGW 'redirect std handles' ' GIT_REDIRECT_STDOUT=output.txt git rev-parse --git-dir && test .git = "$(cat output.txt)" && @@ -462,7 +474,8 @@ test_expect_success MINGW 'redirect std handles' ' GIT_REDIRECT_STDERR="2>&1" \ git rev-parse --git-dir --verify refs/invalid && printf ".git\nfatal: Needed a single revision\n" >expect && - test_cmp expect output.txt + sort output.sorted && + test_cmp expect output.sorted ' test_done diff --git a/t/t0061-run-command.sh b/t/t0061-run-command.sh index 854b494ff8..35bd74d111 100755 --- a/t/t0061-run-command.sh +++ b/t/t0061-run-command.sh @@ -159,7 +159,8 @@ test_trace () { expect="$1" shift GIT_TRACE=1 test-tool run-command "$@" run-command true 2>&1 >/dev/null | \ - sed -e 's/.* run_command: //' -e '/trace: .*/d' >actual && + sed -e 's/.* run_command: //' -e '/trace: .*/d' \ + -e '/RUNTIME_PREFIX requested/d' >actual && echo "$expect true" >expect && test_cmp expect actual } @@ -192,4 +193,14 @@ test_expect_success 'GIT_TRACE with environment variables' ' ) ' +test_expect_success MINGW 'verify curlies are quoted properly' ' + : force the rev-parse through the MSYS2 Bash && + git -c alias.r="!git rev-parse" r -- a{b}c >actual && + cat >expect <<-\EOF && + -- + a{b}c + EOF + test_cmp expect actual +' + test_done diff --git a/t/t2031-checkout-long-paths.sh b/t/t2031-checkout-long-paths.sh new file mode 100755 index 0000000000..f30f8920ca --- /dev/null +++ b/t/t2031-checkout-long-paths.sh @@ -0,0 +1,102 @@ +#!/bin/sh + +test_description='checkout long paths on Windows + +Ensures that Git for Windows can deal with long paths (>260) enabled via core.longpaths' + +. ./test-lib.sh + +if test_have_prereq !MINGW +then + skip_all='skipping MINGW specific long paths test' + test_done +fi + +test_expect_success setup ' + p=longpathxx && # -> 10 + p=$p$p$p$p$p && # -> 50 + p=$p$p$p$p$p && # -> 250 + + path=${p}/longtestfile && # -> 263 (MAX_PATH = 260) + + blob=$(echo foobar | git hash-object -w --stdin) && + + printf "100644 %s 0\t%s\n" "$blob" "$path" | + git update-index --add --index-info && + git commit -m initial -q +' + +test_expect_success 'checkout of long paths without core.longpaths fails' ' + git config core.longpaths false && + test_must_fail git checkout -f 2>error && + grep -q "Filename too long" error && + test ! -d longpa* +' + +test_expect_success 'checkout of long paths with core.longpaths works' ' + git config core.longpaths true && + git checkout -f && + test_path_is_file longpa*/longtestfile +' + +test_expect_success 'update of long paths' ' + echo frotz >>$(ls longpa*/longtestfile) && + echo $path > expect && + git ls-files -m > actual && + test_cmp expect actual && + git add $path && + git commit -m second && + git grep "frotz" HEAD -- $path +' + +test_expect_success cleanup ' + # bash cannot delete the trash dir if it contains a long path + # lets help cleaning up (unless in debug mode) + if test -z "$debug" + then + rm -rf longpa~1 + fi +' + +# check that the template used in the test won't be too long: +abspath="$(pwd)"/testdir +test ${#abspath} -gt 230 || +test_set_prereq SHORTABSPATH + +test_expect_success SHORTABSPATH 'clean up path close to MAX_PATH' ' + p=/123456789abcdef/123456789abcdef/123456789abcdef/123456789abc/ef && + p=y$p$p$p$p && + subdir="x$(echo "$p" | tail -c $((253 - ${#abspath})) - )" && + # Now, $abspath/$subdir has exactly 254 characters, and is inside CWD + p2="$abspath/$subdir" && + test 254 = ${#p2} && + + # Be careful to overcome path limitations of the MSys tools and split + # the $subdir into two parts. ($subdir2 has to contain 16 chars and a + # slash somewhere following; that is why we asked for abspath <= 230 and + # why we placed a slash near the end of the $subdir template.) + subdir2=${subdir#????????????????*/} && + subdir1=testdir/${subdir%/$subdir2} && + mkdir -p "$subdir1" && + i=0 && + # The most important case is when absolute path is 258 characters long, + # and that will be when i == 4. + while test $i -le 7 + do + mkdir -p $subdir2 && + touch $subdir2/one-file && + mv ${subdir2%%/*} "$subdir1/" && + subdir2=z${subdir2} && + i=$(($i+1)) || + exit 1 + done && + + # now check that git is able to clear the tree: + (cd testdir && + git init && + git config core.longpaths yes && + git clean -fdx) && + test ! -d "$subdir1" +' + +test_done diff --git a/t/t3700-add.sh b/t/t3700-add.sh index 37729ba258..8ee4fc70ad 100755 --- a/t/t3700-add.sh +++ b/t/t3700-add.sh @@ -402,4 +402,11 @@ test_expect_success 'all statuses changed in folder if . is given' ' test $(git ls-files --stage | grep ^100755 | wc -l) -eq 0 ' +test_expect_success MINGW 'path is case-insensitive' ' + path="$(pwd)/BLUB" && + touch "$path" && + downcased="$(echo "$path" | tr A-Z a-z)" && + git add "$downcased" +' + test_done diff --git a/t/t5500-fetch-pack.sh b/t/t5500-fetch-pack.sh index 086f2c40f6..f021db44b1 100755 --- a/t/t5500-fetch-pack.sh +++ b/t/t5500-fetch-pack.sh @@ -698,13 +698,22 @@ do # file with scheme for p in file do - test_expect_success "fetch-pack --diag-url $p://$h/$r" ' + test_expect_success !MINGW "fetch-pack --diag-url $p://$h/$r" ' check_prot_path $p://$h/$r $p "/$r" ' + test_expect_success MINGW "fetch-pack --diag-url $p://$h/$r" ' + check_prot_path $p://$h/$r $p "//$h/$r" + ' + test_expect_success MINGW "fetch-pack --diag-url $p:///$r" ' + check_prot_path $p:///$r $p "/$r" + ' # No "/~" -> "~" conversion for file - test_expect_success "fetch-pack --diag-url $p://$h/~$r" ' + test_expect_success !MINGW "fetch-pack --diag-url $p://$h/~$r" ' check_prot_path $p://$h/~$r $p "/~$r" ' + test_expect_success MINGW "fetch-pack --diag-url $p://$h/~$r" ' + check_prot_path $p://$h/~$r $p "//$h/~$r" + ' done # file without scheme for h in nohost nohost:12 [::1] [::1]:23 [ [:aa diff --git a/t/t5505-remote.sh b/t/t5505-remote.sh index d2a2cdd453..b26732a080 100755 --- a/t/t5505-remote.sh +++ b/t/t5505-remote.sh @@ -824,6 +824,7 @@ test_expect_success 'migrate a remote from named file in $GIT_DIR/branches' ' ( cd six && git remote rm origin && + mkdir -p .git/branches && echo "$origin_url" >.git/branches/origin && git remote rename origin origin && test_path_is_missing .git/branches/origin && @@ -838,6 +839,7 @@ test_expect_success 'migrate a remote from named file in $GIT_DIR/branches (2)' ( cd seven && git remote rm origin && + mkdir -p .git/branches && echo "quux#foom" > .git/branches/origin && git remote rename origin origin && test_path_is_missing .git/branches/origin && diff --git a/t/t5516-fetch-push.sh b/t/t5516-fetch-push.sh index 7316365a24..033098793d 100755 --- a/t/t5516-fetch-push.sh +++ b/t/t5516-fetch-push.sh @@ -866,6 +866,7 @@ test_expect_success 'fetch with branches' ' mk_empty testrepo && git branch second $the_first_commit && git checkout second && + mkdir -p testrepo/.git/branches && echo ".." > testrepo/.git/branches/branch1 && ( cd testrepo && @@ -879,6 +880,7 @@ test_expect_success 'fetch with branches' ' test_expect_success 'fetch with branches containing #' ' mk_empty testrepo && + mkdir -p testrepo/.git/branches && echo "..#second" > testrepo/.git/branches/branch2 && ( cd testrepo && @@ -893,6 +895,7 @@ test_expect_success 'fetch with branches containing #' ' test_expect_success 'push with branches' ' mk_empty testrepo && git checkout second && + mkdir -p .git/branches && echo "testrepo" > .git/branches/branch1 && git push branch1 && ( @@ -905,6 +908,7 @@ test_expect_success 'push with branches' ' test_expect_success 'push with branches containing #' ' mk_empty testrepo && + mkdir -p .git/branches && echo "testrepo#branch3" > .git/branches/branch2 && git push branch2 && ( diff --git a/t/t5570-git-daemon.sh b/t/t5570-git-daemon.sh index 7466aad111..08f95c80a2 100755 --- a/t/t5570-git-daemon.sh +++ b/t/t5570-git-daemon.sh @@ -211,5 +211,4 @@ test_expect_success FAKENC 'hostname interpolation works after LF-stripping' ' test_cmp expect actual ' -stop_git_daemon test_done diff --git a/t/t5580-clone-push-unc.sh b/t/t5580-unc-paths.sh similarity index 53% rename from t/t5580-clone-push-unc.sh rename to t/t5580-unc-paths.sh index ba548df4a9..f9f0ef6b2d 100755 --- a/t/t5580-clone-push-unc.sh +++ b/t/t5580-unc-paths.sh @@ -17,14 +17,11 @@ fi UNCPATH="$(winpwd)" case "$UNCPATH" in [A-Z]:*) + WITHOUTDRIVE="${UNCPATH#?:}" # Use administrative share e.g. \\localhost\C$\git-sdk-64\usr\src\git # (we use forward slashes here because MSYS2 and Git accept them, and # they are easier on the eyes) - UNCPATH="//localhost/${UNCPATH%%:*}\$/${UNCPATH#?:}" - test -d "$UNCPATH" || { - skip_all='could not access administrative share; skipping' - test_done - } + UNCPATH="//localhost/${UNCPATH%%:*}\$$WITHOUTDRIVE" ;; *) skip_all='skipping UNC path tests, cannot determine current path as UNC' @@ -32,6 +29,18 @@ case "$UNCPATH" in ;; esac +test_expect_success 'clone into absolute path lacking a drive prefix' ' + USINGBACKSLASHES="$(echo "$WITHOUTDRIVE"/without-drive-prefix | + tr / \\\\)" && + git clone . "$USINGBACKSLASHES" && + test -f without-drive-prefix/.git/HEAD +' + +test -d "$UNCPATH" || { + skip_all='could not access administrative share; skipping' + test_done +} + test_expect_success setup ' test_commit initial ' @@ -40,6 +49,23 @@ test_expect_success clone ' git clone "file://$UNCPATH" clone ' +test_expect_success 'clone without file://' ' + git clone "$UNCPATH" clone-without-file +' + +test_expect_success 'clone with backslashed path' ' + BACKSLASHED="$(echo "$UNCPATH" | tr / \\\\)" && + git clone "$BACKSLASHED" backslashed +' + +test_expect_success fetch ' + git init to-fetch && + ( + cd to-fetch && + git fetch "$UNCPATH" master + ) +' + test_expect_success push ' ( cd clone && @@ -57,4 +83,16 @@ test_expect_success MINGW 'remote nick cannot contain backslashes' ' test_i18ngrep ! "unable to access" err ' +test_expect_success 'unc alternates' ' + tree="$(git rev-parse HEAD:)" && + mkdir test-unc-alternate && + ( + cd test-unc-alternate && + git init && + test_must_fail git show $tree && + echo "$UNCPATH/.git/objects" >.git/objects/info/alternates && + git show $tree + ) +' + test_done diff --git a/t/t5801-remote-helpers.sh b/t/t5801-remote-helpers.sh index aaaa722cca..18cf138343 100755 --- a/t/t5801-remote-helpers.sh +++ b/t/t5801-remote-helpers.sh @@ -228,7 +228,7 @@ test_expect_success 'push update refs failure' ' echo "update fail" >>file && git commit -a -m "update fail" && git rev-parse --verify testgit/origin/heads/update >expect && - test_expect_code 1 env GIT_REMOTE_TESTGIT_FAILURE="non-fast forward" \ + test_must_fail env GIT_REMOTE_TESTGIT_FAILURE="non-fast forward" \ git push origin update && git rev-parse --verify testgit/origin/heads/update >actual && test_cmp expect actual diff --git a/t/t7108-reset-stdin.sh b/t/t7108-reset-stdin.sh new file mode 100755 index 0000000000..b7cbcbf869 --- /dev/null +++ b/t/t7108-reset-stdin.sh @@ -0,0 +1,32 @@ +#!/bin/sh + +test_description='reset --stdin' + +. ./test-lib.sh + +test_expect_success 'reset --stdin' ' + test_commit hello && + git rm hello.t && + test -z "$(git ls-files hello.t)" && + echo hello.t | git reset --stdin && + test hello.t = "$(git ls-files hello.t)" +' + +test_expect_success 'reset --stdin -z' ' + test_commit world && + git rm hello.t world.t && + test -z "$(git ls-files hello.t world.t)" && + printf world.tQworld.tQhello.tQ | q_to_nul | git reset --stdin -z && + printf "hello.t\nworld.t\n" >expect && + git ls-files >actual && + test_cmp expect actual +' + +test_expect_success '--stdin requires --mixed' ' + echo hello.t >list && + test_must_fail git reset --soft --stdin = 110) +longpath36=0123456789abcdefghijklmnopqrstuvwxyz +longpath180=$longpath36$longpath36$longpath36$longpath36$longpath36 + +# the git database must fit within PATH_MAX, which limits the submodule name +# to PATH_MAX - len(pwd) - ~90 (= len("/objects//") + 40-byte sha1 + some +# overhead from the test case) +pwd=$(pwd) +pwdlen=$(echo "$pwd" | wc -c) +longpath=$(echo $longpath180 | cut -c 1-$((170-$pwdlen))) + +test_expect_success 'submodule with a long path' ' + git init --bare remote && + test_create_repo bundle1 && + ( + cd bundle1 && + test_commit "shoot" && + git rev-parse --verify HEAD >../expect + ) && + mkdir home && + ( + cd home && + git clone ../remote test && + cd test && + git submodule add ../bundle1 $longpath && + test_commit "sogood" && + ( + cd $longpath && + git rev-parse --verify HEAD >actual && + test_cmp ../../../expect actual + ) && + git push origin master + ) && + mkdir home2 && + ( + cd home2 && + git clone ../remote test && + cd test && + git checkout master && + git submodule update --init && + ( + cd $longpath && + git rev-parse --verify HEAD >actual && + test_cmp ../../../expect actual + ) + ) +' + +test_expect_success 'recursive submodule with a long path' ' + git init --bare super && + test_create_repo child && + ( + cd child && + test_commit "shoot" && + git rev-parse --verify HEAD >../expect + ) && + test_create_repo parent && + ( + cd parent && + git submodule add ../child $longpath && + test_commit "aim" + ) && + mkdir home3 && + ( + cd home3 && + git clone ../super test && + cd test && + git submodule add ../parent foo && + git submodule update --init --recursive && + test_commit "sogood" && + ( + cd foo/$longpath && + git rev-parse --verify HEAD >actual && + test_cmp ../../../../expect actual + ) && + git push origin master + ) && + mkdir home4 && + ( + cd home4 && + git clone ../super test --recursive && + ( + cd test/foo/$longpath && + git rev-parse --verify HEAD >actual && + test_cmp ../../../../expect actual + ) + ) +' + +test_done diff --git a/t/t9350-fast-export.sh b/t/t9350-fast-export.sh index 6a392e87bc..6d6d5050a2 100755 --- a/t/t9350-fast-export.sh +++ b/t/t9350-fast-export.sh @@ -556,4 +556,15 @@ test_expect_success 'merge commit gets exported with --import-marks' ' ) ' +cat > expected << EOF +reset refs/heads/master +from $(git rev-parse master) + +EOF + +test_expect_failure 'refs are updated even if no commits need to be exported' ' + git fast-export master..master > actual && + test_cmp expected actual +' + test_done diff --git a/t/t9800-git-p4-basic.sh b/t/t9800-git-p4-basic.sh index 729cd25770..5856563068 100755 --- a/t/t9800-git-p4-basic.sh +++ b/t/t9800-git-p4-basic.sh @@ -326,8 +326,4 @@ test_expect_success 'submit from worktree' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9801-git-p4-branch.sh b/t/t9801-git-p4-branch.sh index 6a86d6996b..50013132c8 100755 --- a/t/t9801-git-p4-branch.sh +++ b/t/t9801-git-p4-branch.sh @@ -610,8 +610,4 @@ test_expect_success 'Update a file in git side and submit to P4 using client vie ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9802-git-p4-filetype.sh b/t/t9802-git-p4-filetype.sh index 9978352d78..94edebe272 100755 --- a/t/t9802-git-p4-filetype.sh +++ b/t/t9802-git-p4-filetype.sh @@ -333,8 +333,4 @@ test_expect_success SYMLINKS 'empty symlink target' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9803-git-p4-shell-metachars.sh b/t/t9803-git-p4-shell-metachars.sh index d5c3675100..2913277013 100755 --- a/t/t9803-git-p4-shell-metachars.sh +++ b/t/t9803-git-p4-shell-metachars.sh @@ -105,8 +105,4 @@ test_expect_success 'branch with shell char' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9804-git-p4-label.sh b/t/t9804-git-p4-label.sh index e30f80e617..3236457106 100755 --- a/t/t9804-git-p4-label.sh +++ b/t/t9804-git-p4-label.sh @@ -108,8 +108,4 @@ test_expect_failure 'two labels on the same changelist' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9805-git-p4-skip-submit-edit.sh b/t/t9805-git-p4-skip-submit-edit.sh index 5fbf904dc8..90ef647db7 100755 --- a/t/t9805-git-p4-skip-submit-edit.sh +++ b/t/t9805-git-p4-skip-submit-edit.sh @@ -98,8 +98,4 @@ test_expect_success 'no config, edited' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9806-git-p4-options.sh b/t/t9806-git-p4-options.sh index 3f5291b857..4e794a01bf 100755 --- a/t/t9806-git-p4-options.sh +++ b/t/t9806-git-p4-options.sh @@ -300,9 +300,4 @@ test_expect_success 'use --git-dir option and GIT_DIR' ' test_path_is_file "$git"/cli_file2.t ' - -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9807-git-p4-submit.sh b/t/t9807-git-p4-submit.sh index 2325599ee6..488d916c10 100755 --- a/t/t9807-git-p4-submit.sh +++ b/t/t9807-git-p4-submit.sh @@ -542,8 +542,4 @@ test_expect_success 'submit --update-shelve' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9808-git-p4-chdir.sh b/t/t9808-git-p4-chdir.sh index 11d2b5102c..58a9b3b71e 100755 --- a/t/t9808-git-p4-chdir.sh +++ b/t/t9808-git-p4-chdir.sh @@ -83,8 +83,4 @@ test_expect_success SYMLINKS 'p4 client root symlink should stay symbolic' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9809-git-p4-client-view.sh b/t/t9809-git-p4-client-view.sh index 897b3c3034..3cff1fce1b 100755 --- a/t/t9809-git-p4-client-view.sh +++ b/t/t9809-git-p4-client-view.sh @@ -836,8 +836,4 @@ test_expect_success 'quotes on both sides' ' git_verify "cdir 1/file11" "cdir 1/file12" ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9810-git-p4-rcs.sh b/t/t9810-git-p4-rcs.sh index cc53debe19..57b533dc6f 100755 --- a/t/t9810-git-p4-rcs.sh +++ b/t/t9810-git-p4-rcs.sh @@ -360,8 +360,4 @@ test_expect_failure 'Add keywords in git which do not match the default p4 value ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9811-git-p4-label-import.sh b/t/t9811-git-p4-label-import.sh index 602b0a5d5c..b70e81c3cd 100755 --- a/t/t9811-git-p4-label-import.sh +++ b/t/t9811-git-p4-label-import.sh @@ -259,9 +259,4 @@ test_expect_success 'importing labels with missing revisions' ' ) ' - -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9812-git-p4-wildcards.sh b/t/t9812-git-p4-wildcards.sh index 0206771fbb..254a7c2446 100755 --- a/t/t9812-git-p4-wildcards.sh +++ b/t/t9812-git-p4-wildcards.sh @@ -211,8 +211,4 @@ test_expect_success 'wildcard files requiring keyword scrub' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9813-git-p4-preserve-users.sh b/t/t9813-git-p4-preserve-users.sh index 783c6ad165..fd018c87a8 100755 --- a/t/t9813-git-p4-preserve-users.sh +++ b/t/t9813-git-p4-preserve-users.sh @@ -138,8 +138,4 @@ test_expect_success 'not preserving user with mixed authorship' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9814-git-p4-rename.sh b/t/t9814-git-p4-rename.sh index 60baa06e27..468767cbf4 100755 --- a/t/t9814-git-p4-rename.sh +++ b/t/t9814-git-p4-rename.sh @@ -242,8 +242,4 @@ test_expect_success P4D_HAVE_CONFIGURABLE_RUN_MOVE_ALLOW \ ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9815-git-p4-submit-fail.sh b/t/t9815-git-p4-submit-fail.sh index eaf03a6563..9779dc0d11 100755 --- a/t/t9815-git-p4-submit-fail.sh +++ b/t/t9815-git-p4-submit-fail.sh @@ -422,8 +422,4 @@ test_expect_success 'cleanup chmod after submit cancel' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9816-git-p4-locked.sh b/t/t9816-git-p4-locked.sh index d048bd33fa..932841003c 100755 --- a/t/t9816-git-p4-locked.sh +++ b/t/t9816-git-p4-locked.sh @@ -138,8 +138,4 @@ test_expect_failure 'move with lock taken' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9817-git-p4-exclude.sh b/t/t9817-git-p4-exclude.sh index aac568eadf..96d25f0c02 100755 --- a/t/t9817-git-p4-exclude.sh +++ b/t/t9817-git-p4-exclude.sh @@ -64,8 +64,4 @@ test_expect_success 'clone, then sync with exclude' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9818-git-p4-block.sh b/t/t9818-git-p4-block.sh index ce7cb22ad3..0db7ab9918 100755 --- a/t/t9818-git-p4-block.sh +++ b/t/t9818-git-p4-block.sh @@ -146,8 +146,4 @@ test_expect_success 'Clone repo with self-sizing block size' ' test_line_count \> 10 log ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9819-git-p4-case-folding.sh b/t/t9819-git-p4-case-folding.sh index d808c008c1..600ce1e0b0 100755 --- a/t/t9819-git-p4-case-folding.sh +++ b/t/t9819-git-p4-case-folding.sh @@ -53,8 +53,4 @@ test_expect_failure 'Clone UC repo with lc name' ' test_must_fail git p4 clone //depot/uc/... ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9820-git-p4-editor-handling.sh b/t/t9820-git-p4-editor-handling.sh index 3c22f74bd4..fa1bba1dd9 100755 --- a/t/t9820-git-p4-editor-handling.sh +++ b/t/t9820-git-p4-editor-handling.sh @@ -31,8 +31,4 @@ test_expect_success 'EDITOR with options' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9821-git-p4-path-variations.sh b/t/t9821-git-p4-path-variations.sh index 81e46acfa8..ef80f1690b 100755 --- a/t/t9821-git-p4-path-variations.sh +++ b/t/t9821-git-p4-path-variations.sh @@ -193,8 +193,4 @@ test_expect_success 'Add a new file and clone path with new file (ignorecase)' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9822-git-p4-path-encoding.sh b/t/t9822-git-p4-path-encoding.sh index c78477c19b..1bf7635016 100755 --- a/t/t9822-git-p4-path-encoding.sh +++ b/t/t9822-git-p4-path-encoding.sh @@ -67,8 +67,4 @@ test_expect_success 'Delete iso8859-1 encoded paths and clone' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9823-git-p4-mock-lfs.sh b/t/t9823-git-p4-mock-lfs.sh index 1f2dc369bf..88b76dc4d6 100755 --- a/t/t9823-git-p4-mock-lfs.sh +++ b/t/t9823-git-p4-mock-lfs.sh @@ -185,8 +185,4 @@ test_expect_success 'Run git p4 submit in repo configured with large file system ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9824-git-p4-git-lfs.sh b/t/t9824-git-p4-git-lfs.sh index ed80ca858c..a28dbbdd56 100755 --- a/t/t9824-git-p4-git-lfs.sh +++ b/t/t9824-git-p4-git-lfs.sh @@ -287,8 +287,4 @@ test_expect_success 'Add big files to repo and store files in LFS based on compr ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9825-git-p4-handle-utf16-without-bom.sh b/t/t9825-git-p4-handle-utf16-without-bom.sh index 1551845dc1..f049ff8229 100755 --- a/t/t9825-git-p4-handle-utf16-without-bom.sh +++ b/t/t9825-git-p4-handle-utf16-without-bom.sh @@ -43,8 +43,4 @@ test_expect_failure 'clone depot with invalid UTF-16 file in non-verbose mode' ' git p4 clone --dest="$git" //depot ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9826-git-p4-keep-empty-commits.sh b/t/t9826-git-p4-keep-empty-commits.sh index fa8b9daf1f..fd64afe064 100755 --- a/t/t9826-git-p4-keep-empty-commits.sh +++ b/t/t9826-git-p4-keep-empty-commits.sh @@ -127,8 +127,4 @@ test_expect_success 'Clone repo subdir with all history' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9827-git-p4-change-filetype.sh b/t/t9827-git-p4-change-filetype.sh index 7433998f47..d3670bd7a2 100755 --- a/t/t9827-git-p4-change-filetype.sh +++ b/t/t9827-git-p4-change-filetype.sh @@ -59,8 +59,4 @@ test_expect_success SYMLINKS 'change symbolic link to file' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9828-git-p4-map-user.sh b/t/t9828-git-p4-map-user.sh index e20395c89f..ca6c2942bd 100755 --- a/t/t9828-git-p4-map-user.sh +++ b/t/t9828-git-p4-map-user.sh @@ -54,8 +54,4 @@ test_expect_success 'Clone repo root path with all history' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9829-git-p4-jobs.sh b/t/t9829-git-p4-jobs.sh index 971aeeea1f..88cfb1fcd3 100755 --- a/t/t9829-git-p4-jobs.sh +++ b/t/t9829-git-p4-jobs.sh @@ -92,8 +92,4 @@ test_expect_success 'check log message of changelist with more jobs' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9830-git-p4-symlink-dir.sh b/t/t9830-git-p4-symlink-dir.sh index 2ad1b0810d..3fb6960c18 100755 --- a/t/t9830-git-p4-symlink-dir.sh +++ b/t/t9830-git-p4-symlink-dir.sh @@ -36,8 +36,4 @@ test_expect_success 'symlinked directory' ' ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9831-git-p4-triggers.sh b/t/t9831-git-p4-triggers.sh index be44c9751a..d743ca33ee 100755 --- a/t/t9831-git-p4-triggers.sh +++ b/t/t9831-git-p4-triggers.sh @@ -96,8 +96,4 @@ test_expect_success 'submit description with extra info lines from verbose p4 ch ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - test_done diff --git a/t/t9832-unshelve.sh b/t/t9832-unshelve.sh index 41c09f11f4..1286a5b824 100755 --- a/t/t9832-unshelve.sh +++ b/t/t9832-unshelve.sh @@ -174,8 +174,5 @@ test_expect_success 'unshelve specifying the origin' ' test_path_is_file file_to_shelve ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' test_done diff --git a/t/t9833-errors.sh b/t/t9833-errors.sh index 277d347012..1f3d879122 100755 --- a/t/t9833-errors.sh +++ b/t/t9833-errors.sh @@ -72,9 +72,4 @@ test_expect_success 'git operation with expired ticket' ' ) ' -test_expect_success 'kill p4d' ' - kill_p4d -' - - test_done diff --git a/t/test-lib-functions.sh b/t/test-lib-functions.sh index b4e391526a..256a260f71 100644 --- a/t/test-lib-functions.sh +++ b/t/test-lib-functions.sh @@ -914,6 +914,35 @@ test_when_finished () { } && (exit \"\$eval_ret\"); eval_ret=\$?; $test_cleanup" } +# This function can be used to schedule some commands to be run +# unconditionally at the end of the test script, e.g. to stop a daemon: +# +# test_expect_success 'test git daemon' ' +# git daemon & +# daemon_pid=$! && +# test_atexit "kill $daemon_pid" && +# hello world +# ' + +test_atexit () { + # We cannot detect when we are in a subshell in general, but by + # doing so on Bash is better than nothing (the test will + # silently pass on other shells). + test "${BASH_SUBSHELL-0}" = 0 || + error "bug in test script: test_atexit does nothing in a subshell" + test_atexit_cleanup="{ $* + } && (exit \"\$eval_ret\"); eval_ret=\$?; $test_atexit_cleanup" +} + +test_atexit_handler () { + test : != "$test_atexit_cleanup" || return 0 + + setup_malloc_check + test_eval_ "$test_atexit_cleanup" + test_atexit_cleanup=: + teardown_malloc_check +} + # Most tests can use the created repository, but some may need to create more. # Usage: test_create_repo test_create_repo () { diff --git a/t/test-lib.sh b/t/test-lib.sh index 6c6c0af7a1..b3b00c21f9 100644 --- a/t/test-lib.sh +++ b/t/test-lib.sh @@ -95,6 +95,13 @@ done,*) test "$(cat "$BASE.exit")" = 0 exit ;; +*' --write-junit-xml '*) + # record how to call this script *with* --verbose-log, in case + # we encounter a breakage + junit_rerun_options_sq="$(printf '%s\n' "$0" --verbose-log -x "$@" | + sed -e "s/'/'\\\\''/g" -e "s/^/'/" -e "s/\$/'/" | + tr '\012' ' ')" + ;; esac # For repeatability, reset the environment to known value. @@ -294,6 +301,8 @@ do test -z "$HARNESS_ACTIVE" && quiet=t; shift ;; --with-dashes) with_dashes=t; shift ;; + --no-bin-wrappers) + no_bin_wrappers=t; shift ;; --no-color) color=; shift ;; --va|--val|--valg|--valgr|--valgri|--valgrin|--valgrind) @@ -339,6 +348,9 @@ do -V|--verbose-log) verbose_log=t shift ;; + --write-junit-xml) + write_junit_xml=t + shift ;; *) echo "error: unknown test option '$1'" >&2; exit 1 ;; esac @@ -461,6 +473,7 @@ test_external_has_tap=0 die () { code=$? + test_atexit_handler || code=$? if test -n "$GIT_EXIT_OK" then exit $code @@ -482,11 +495,48 @@ trap 'exit $?' INT # the test_expect_* functions instead. test_ok_ () { + if test -n "$write_junit_xml" + then + write_junit_xml_testcase "$*" + fi test_success=$(($test_success + 1)) say_color "" "ok $test_count - $@" } test_failure_ () { + if test -n "$write_junit_xml" + then + if test -z "$GIT_TEST_TEE_OUTPUT_FILE" + then + # clean up + test_atexit_handler + + # re-run with --verbose-log + echo "# Re-running: $junit_rerun_options_sq" >&2 + + cd "$TEST_DIRECTORY" && + eval "${TEST_SHELL_PATH}" "$junit_rerun_options_sq" \ + >/dev/null 2>&1 + status=$? + + say_color "" "$(test 0 = $status || + echo "not ")ok $test_count - (re-ran with trace)" + say "1..$test_count" + GIT_EXIT_OK=t + exit $status + fi + + junit_insert="" + junit_insert="$junit_insert $(xml_attr_encode \ + "$(cat "$GIT_TEST_TEE_OUTPUT_FILE")")" + >"$GIT_TEST_TEE_OUTPUT_FILE" + junit_insert="$junit_insert" + junit_insert="$junit_insert$(xml_attr_encode \ + "$(cat "$GIT_TEST_TEE_OUTPUT_FILE.err")")" + >"$GIT_TEST_TEE_OUTPUT_FILE.err" + write_junit_xml_testcase "$1" " $junit_insert" + fi test_failure=$(($test_failure + 1)) say_color error "not ok $test_count - $1" shift @@ -495,11 +545,19 @@ test_failure_ () { } test_known_broken_ok_ () { + if test -n "$write_junit_xml" + then + write_junit_xml_testcase "$* (breakage fixed)" + fi test_fixed=$(($test_fixed+1)) say_color error "ok $test_count - $@ # TODO known breakage vanished" } test_known_broken_failure_ () { + if test -n "$write_junit_xml" + then + write_junit_xml_testcase "$* (known breakage)" + fi test_broken=$(($test_broken+1)) say_color warn "not ok $test_count - $@ # TODO known breakage" } @@ -757,6 +815,17 @@ test_start_ () { test_count=$(($test_count+1)) maybe_setup_verbose maybe_setup_valgrind + if test -n "$write_junit_xml" + then + junit_start=$(test-tool date getnanos) + + # append to future ; truncate output + test -z "$GIT_TEST_TEE_OUTPUT_FILE" || { + cat "$GIT_TEST_TEE_OUTPUT_FILE" \ + >>"$GIT_TEST_TEE_OUTPUT_FILE.err" + >"$GIT_TEST_TEE_OUTPUT_FILE" + } + fi } test_finish_ () { @@ -794,6 +863,13 @@ test_skip () { case "$to_skip" in t) + if test -n "$write_junit_xml" + then + message="$(xml_attr_encode "$skipped_reason")" + write_junit_xml_testcase "$1" \ + " " + fi + say_color skip >&3 "skipping test: $@" say_color skip "ok $test_count # skip $1 ($skipped_reason)" : true @@ -809,9 +885,61 @@ test_at_end_hook_ () { : } +write_junit_xml () { + case "$1" in + --truncate) + >"$junit_xml_path" + junit_have_testcase= + shift + ;; + esac + printf '%s\n' "$@" >>"$junit_xml_path" +} + +xml_attr_encode () { + # We do not translate CR to because BSD sed does not handle + # \r in the regex. In practice, the output should not even have any + # carriage returns. + printf '%s\n' "$@" | + sed -e 's/&/\&/g' -e "s/'/\'/g" -e 's/"/\"/g' \ + -e 's//\>/g' \ + -e 's/ /\ /g' -e 's/$/\ /' -e '$s/ $//' | + tr -d '\012\015' +} + +write_junit_xml_testcase () { + junit_attrs="name=\"$(xml_attr_encode "$this_test.$test_count $1")\"" + shift + junit_attrs="$junit_attrs classname=\"$this_test\"" + junit_attrs="$junit_attrs time=\"$(test-tool \ + date getnanos $junit_start)\"" + write_junit_xml "$(printf '%s\n' \ + " " "$@" " ")" + junit_have_testcase=t +} + +test_atexit_cleanup=: test_done () { GIT_EXIT_OK=t + test -n "$immediate" || test_atexit_handler + + if test -n "$write_junit_xml" && test -n "$junit_xml_path" + then + test -n "$junit_have_testcase" || { + junit_start=$(test-tool date getnanos) + write_junit_xml_testcase "all tests skipped" + } + + # adjust the overall time + junit_time=$(test-tool date getnanos $junit_suite_start) + sed "s/]*/& time=\"$junit_time\"/" \ + <"$junit_xml_path" >"$junit_xml_path.new" + mv "$junit_xml_path.new" "$junit_xml_path" + + write_junit_xml " " "" + fi + if test -z "$HARNESS_ACTIVE" then test_results_dir="$TEST_OUTPUT_DIRECTORY/test-results" @@ -980,20 +1108,25 @@ then PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$PATH GIT_EXEC_PATH=${GIT_TEST_EXEC_PATH:-$GIT_EXEC_PATH} else # normal case, use ../bin-wrappers only unless $with_dashes: - git_bin_dir="$GIT_BUILD_DIR/bin-wrappers" - if ! test -x "$git_bin_dir/git" + if test -n "$no_bin_wrappers" then - if test -z "$with_dashes" - then - say "$git_bin_dir/git is not executable; using GIT_EXEC_PATH" - fi with_dashes=t + else + git_bin_dir="$GIT_BUILD_DIR/bin-wrappers" + if ! test -x "$git_bin_dir/git" + then + if test -z "$with_dashes" + then + say "$git_bin_dir/git is not executable; using GIT_EXEC_PATH" + fi + with_dashes=t + fi + PATH="$git_bin_dir:$PATH" fi - PATH="$git_bin_dir:$PATH" GIT_EXEC_PATH=$GIT_BUILD_DIR if test -n "$with_dashes" then - PATH="$GIT_BUILD_DIR:$PATH" + PATH="$GIT_BUILD_DIR:$GIT_BUILD_DIR/t/helper:$PATH" fi fi GIT_TEMPLATE_DIR="$GIT_BUILD_DIR"/templates/blt @@ -1017,7 +1150,7 @@ test -d "$GIT_BUILD_DIR"/templates/blt || { error "You haven't built things yet, have you?" } -if ! test -x "$GIT_BUILD_DIR"/t/helper/test-tool +if ! test -x "$GIT_BUILD_DIR"/t/helper/test-tool$X then echo >&2 'You need to build test-tool:' echo >&2 'Run "make t/helper/test-tool" in the source (toplevel) directory' @@ -1047,6 +1180,7 @@ then else mkdir -p "$TRASH_DIRECTORY" fi + # Use -P to resolve symlinks in our working directory so that the cwd # in subprocesses like git equals our $PWD (for pathname comparisons). cd -P "$TRASH_DIRECTORY" || exit 1 @@ -1060,6 +1194,19 @@ then test_done fi +if test -n "$write_junit_xml" +then + junit_xml_dir="$TEST_OUTPUT_DIRECTORY/out" + mkdir -p "$junit_xml_dir" + junit_xml_base=${0##*/} + junit_xml_path="$junit_xml_dir/TEST-${junit_xml_base%.sh}.xml" + junit_attrs="name=\"${junit_xml_base%.sh}\"" + junit_attrs="$junit_attrs timestamp=\"$(TZ=UTC \ + date +%Y-%m-%dT%H:%M:%S)\"" + write_junit_xml --truncate "" " " + junit_suite_start=$(test-tool date getnanos) +fi + # Provide an implementation of the 'yes' utility yes () { if test $# = 0 diff --git a/transport-helper.c b/transport-helper.c index 7213fa0d32..c7c44ff041 100644 --- a/transport-helper.c +++ b/transport-helper.c @@ -16,6 +16,8 @@ #include "protocol.h" static int debug; +/* TODO: put somewhere sensible, e.g. git_transport_options? */ +static int auto_gc = 1; struct helper_data { const char *name; @@ -460,10 +462,25 @@ static int get_exporter(struct transport *transport, for (i = 0; i < revlist_args->nr; i++) argv_array_push(&fastexport->args, revlist_args->items[i].string); + argv_array_push(&fastexport->args, "--"); + fastexport->git_cmd = 1; return start_command(fastexport); } +static void check_helper_status(struct helper_data *data) +{ + int pid, status; + + pid = waitpid(data->helper->pid, &status, WNOHANG); + if (pid < 0) + die("Could not retrieve status of remote helper '%s'", + data->name); + if (pid > 0 && WIFEXITED(status)) + die("Remote helper '%s' died with %d", + data->name, WEXITSTATUS(status)); +} + static int fetch_with_import(struct transport *transport, int nr_heads, struct ref **to_fetch) { @@ -500,6 +517,7 @@ static int fetch_with_import(struct transport *transport, if (finish_command(&fastimport)) die(_("error while running fast-import")); + check_helper_status(data); /* * The fast-import stream of a remote helper that advertises @@ -533,6 +551,12 @@ static int fetch_with_import(struct transport *transport, } } strbuf_release(&buf); + if (auto_gc) { + const char *argv_gc_auto[] = { + "gc", "--auto", "--quiet", NULL, + }; + run_command_v_opt(argv_gc_auto, RUN_GIT_CMD); + } return 0; } @@ -987,6 +1011,7 @@ static int push_refs_with_export(struct transport *transport, if (finish_command(&exporter)) die(_("error while running fast-export")); + check_helper_status(data); if (push_update_refs_status(data, remote_refs, flags)) return 1;