mirror of
https://github.com/git/git.git
synced 2026-03-13 10:23:30 +01:00
Merge branch 'master' of git://repo.or.cz/alt-git
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -144,6 +144,7 @@ git-core-*/?*
|
||||
gitk-wish
|
||||
gitweb/gitweb.cgi
|
||||
test-chmtime
|
||||
test-ctype
|
||||
test-date
|
||||
test-delta
|
||||
test-dump-cache-tree
|
||||
|
||||
@@ -21,8 +21,13 @@ code. For git in general, three rough rules are:
|
||||
|
||||
As for more concrete guidelines, just imitate the existing code
|
||||
(this is a good guideline, no matter which project you are
|
||||
contributing to). But if you must have a list of rules,
|
||||
here they are.
|
||||
contributing to). It is always preferable to match the _local_
|
||||
convention. New code added to git suite is expected to match
|
||||
the overall style of existing code. Modifications to existing
|
||||
code is expected to match the style the surrounding code already
|
||||
uses (even if it doesn't match the overall style of existing code).
|
||||
|
||||
But if you must have a list of rules, here they are.
|
||||
|
||||
For shell scripts specifically (not exhaustive):
|
||||
|
||||
|
||||
@@ -4,9 +4,14 @@ GIT v1.6.1.1 Release Notes
|
||||
Fixes since v1.6.1
|
||||
------------------
|
||||
|
||||
* "git add frotz/nitfol" when "frotz" is a submodule should have errored
|
||||
out, but it didn't.
|
||||
|
||||
* "git apply" took file modes from the patch text and updated the mode
|
||||
bits of the target tree even when the patch was not about mode changes.
|
||||
|
||||
* "git bisect view" on Cygwin did not launch gitk
|
||||
|
||||
* "git checkout $tree" did not trigger an error.
|
||||
|
||||
* "git commit" tried to remove COMMIT_EDITMSG from the work tree by mistake.
|
||||
@@ -14,6 +19,12 @@ Fixes since v1.6.1
|
||||
* "git describe --all" complained when a commit is described with a tag,
|
||||
which was nonsense.
|
||||
|
||||
* "git diff --no-index --" did not trigger no-index (aka "use git-diff as
|
||||
a replacement of diff on untracked files") behaviour.
|
||||
|
||||
* "git format-patch -1 HEAD" on a root commit failed to produce patch
|
||||
text.
|
||||
|
||||
* "git fsck branch" did not work as advertised; instead it behaved the same
|
||||
way as "git fsck".
|
||||
|
||||
@@ -36,14 +47,13 @@ Fixes since v1.6.1
|
||||
|
||||
* "git mv -k" with more than one errorneous paths misbehaved.
|
||||
|
||||
* "git read-tree -m -u" hence branch switching incorrectly lost a
|
||||
subdirectory in rare cases.
|
||||
|
||||
* "git rebase -i" issued an unnecessary error message upon a user error of
|
||||
marking the first commit to be "squash"ed.
|
||||
|
||||
Other documentation updates.
|
||||
|
||||
---
|
||||
exec >/var/tmp/1
|
||||
O=v1.6.1-60-g78f111e
|
||||
echo O=$(git describe maint)
|
||||
git shortlog --no-merges $O..maint
|
||||
* "git shortlog" did not format a commit message with multi-line
|
||||
subject correctly.
|
||||
|
||||
Many documentation updates.
|
||||
|
||||
@@ -639,6 +639,12 @@ diff.suppressBlankEmpty::
|
||||
A boolean to inhibit the standard behavior of printing a space
|
||||
before each empty output line. Defaults to false.
|
||||
|
||||
diff.wordRegex::
|
||||
A POSIX Extended Regular Expression used to determine what is a "word"
|
||||
when performing word-by-word difference calculations. Character
|
||||
sequences that match the regular expression are "words", all other
|
||||
characters are *ignorable* whitespace.
|
||||
|
||||
fetch.unpackLimit::
|
||||
If the number of objects fetched over the git native
|
||||
transfer is below this
|
||||
|
||||
@@ -36,6 +36,9 @@ endif::git-format-patch[]
|
||||
--patch-with-raw::
|
||||
Synonym for "-p --raw".
|
||||
|
||||
--patience::
|
||||
Generate a diff using the "patience diff" algorithm.
|
||||
|
||||
--stat[=width[,name-width]]::
|
||||
Generate a diffstat. You can override the default
|
||||
output width for 80-column terminal by "--stat=width".
|
||||
@@ -91,8 +94,22 @@ endif::git-format-patch[]
|
||||
Turn off colored diff, even when the configuration file
|
||||
gives the default to color output.
|
||||
|
||||
--color-words::
|
||||
Show colored word diff, i.e. color words which have changed.
|
||||
--color-words[=<regex>]::
|
||||
Show colored word diff, i.e., color words which have changed.
|
||||
By default, words are separated by whitespace.
|
||||
+
|
||||
When a <regex> is specified, every non-overlapping match of the
|
||||
<regex> is considered a word. Anything between these matches is
|
||||
considered whitespace and ignored(!) for the purposes of finding
|
||||
differences. You may want to append `|[^[:space:]]` to your regular
|
||||
expression to make sure that it matches all non-whitespace characters.
|
||||
A match that contains a newline is silently truncated(!) at the
|
||||
newline.
|
||||
+
|
||||
The regex can also be set via a diff driver or configuration option, see
|
||||
linkgit:gitattributes[1] or linkgit:git-config[1]. Giving it explicitly
|
||||
overrides any diff driver or configuration setting. Diff drivers
|
||||
override configuration settings.
|
||||
|
||||
--no-renames::
|
||||
Turn off rename detection, even when the configuration
|
||||
|
||||
@@ -12,6 +12,7 @@ SYNOPSIS
|
||||
'git am' [--signoff] [--keep] [--utf8 | --no-utf8]
|
||||
[--3way] [--interactive]
|
||||
[--whitespace=<option>] [-C<n>] [-p<n>] [--directory=<dir>]
|
||||
[--reject]
|
||||
[<mbox> | <Maildir>...]
|
||||
'git am' (--skip | --resolved | --abort)
|
||||
|
||||
@@ -63,6 +64,7 @@ default. You could use `--no-utf8` to override this.
|
||||
-C<n>::
|
||||
-p<n>::
|
||||
--directory=<dir>::
|
||||
--reject::
|
||||
These flags are passed to the 'git-apply' (see linkgit:git-apply[1])
|
||||
program that applies
|
||||
the patch.
|
||||
|
||||
@@ -43,7 +43,7 @@ OPTIONS
|
||||
Automatically implies --tags.
|
||||
|
||||
--abbrev=<n>::
|
||||
Instead of using the default 8 hexadecimal digits as the
|
||||
Instead of using the default 7 hexadecimal digits as the
|
||||
abbreviated object name, use <n> digits.
|
||||
|
||||
--candidates=<n>::
|
||||
|
||||
@@ -103,6 +103,19 @@ repository to be able to interoperate with someone else's local Git
|
||||
repository, either don't use this option or you should both use it in
|
||||
the same local timezone.
|
||||
|
||||
--ignore-paths=<regex>;;
|
||||
This allows one to specify Perl regular expression that will
|
||||
cause skipping of all matching paths from checkout from SVN.
|
||||
Examples:
|
||||
|
||||
--ignore-paths="^doc" - skip "doc*" directory for every fetch.
|
||||
|
||||
--ignore-paths="^[^/]+/(?:branches|tags)" - skip "branches"
|
||||
and "tags" of first level directories.
|
||||
|
||||
Regular expression is not persistent, you should specify
|
||||
it every time when fetching.
|
||||
|
||||
'clone'::
|
||||
Runs 'init' and 'fetch'. It will automatically create a
|
||||
directory based on the basename of the URL passed to it;
|
||||
|
||||
@@ -43,9 +43,10 @@ unreleased) version of git, that is available from 'master'
|
||||
branch of the `git.git` repository.
|
||||
Documentation for older releases are available here:
|
||||
|
||||
* link:v1.6.1/git.html[documentation for release 1.6.1]
|
||||
* link:v1.6.1.1/git.html[documentation for release 1.6.1.1]
|
||||
|
||||
* release notes for
|
||||
link:RelNotes-1.6.1.1.txt[1.6.1.1],
|
||||
link:RelNotes-1.6.1.txt[1.6.1].
|
||||
|
||||
* link:v1.6.0.6/git.html[documentation for release 1.6.0.6]
|
||||
|
||||
@@ -317,6 +317,8 @@ patterns are available:
|
||||
|
||||
- `bibtex` suitable for files with BibTeX coded references.
|
||||
|
||||
- `cpp` suitable for source code in the C and C++ languages.
|
||||
|
||||
- `html` suitable for HTML/XHTML documents.
|
||||
|
||||
- `java` suitable for source code in the Java language.
|
||||
@@ -334,6 +336,25 @@ patterns are available:
|
||||
- `tex` suitable for source code for LaTeX documents.
|
||||
|
||||
|
||||
Customizing word diff
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can customize the rules that `git diff --color-words` uses to
|
||||
split words in a line, by specifying an appropriate regular expression
|
||||
in the "diff.*.wordRegex" configuration variable. For example, in TeX
|
||||
a backslash followed by a sequence of letters forms a command, but
|
||||
several such commands can be run together without intervening
|
||||
whitespace. To separate them, use a regular expression such as
|
||||
|
||||
------------------------
|
||||
[diff "tex"]
|
||||
wordRegex = "\\\\[a-zA-Z]+|[{}]|\\\\.|[^\\{}[:space:]]+"
|
||||
------------------------
|
||||
|
||||
A built-in pattern is provided for all languages listed in the
|
||||
previous section.
|
||||
|
||||
|
||||
Performing text diffs of binary files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
||||
@@ -308,9 +308,7 @@ alice$ git pull /home/bob/myrepo master
|
||||
|
||||
This merges the changes from Bob's "master" branch into Alice's
|
||||
current branch. If Alice has made her own changes in the meantime,
|
||||
then she may need to manually fix any conflicts. (Note that the
|
||||
"master" argument in the above command is actually unnecessary, as it
|
||||
is the default.)
|
||||
then she may need to manually fix any conflicts.
|
||||
|
||||
The "pull" command thus performs two operations: it fetches changes
|
||||
from a remote branch, then merges them into the current branch.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
GVF=GIT-VERSION-FILE
|
||||
DEF_VER=v1.6.0.2.GIT
|
||||
DEF_VER=v1.6.1.GIT
|
||||
|
||||
LF='
|
||||
'
|
||||
|
||||
3
Makefile
3
Makefile
@@ -819,6 +819,7 @@ ifeq ($(uname_S),Darwin)
|
||||
BASIC_LDFLAGS += -L/opt/local/lib
|
||||
endif
|
||||
endif
|
||||
PTHREAD_LIBS =
|
||||
endif
|
||||
|
||||
ifndef CC_LD_DYNPATH
|
||||
@@ -1298,7 +1299,7 @@ $(LIB_FILE): $(LIB_OBJS)
|
||||
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(LIB_OBJS)
|
||||
|
||||
XDIFF_OBJS=xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o \
|
||||
xdiff/xmerge.o
|
||||
xdiff/xmerge.o xdiff/xpatience.o
|
||||
$(XDIFF_OBJS): xdiff/xinclude.h xdiff/xmacros.h xdiff/xdiff.h xdiff/xtypes.h \
|
||||
xdiff/xutils.h xdiff/xprepare.h xdiff/xdiffi.h xdiff/xemit.h
|
||||
|
||||
|
||||
@@ -230,7 +230,7 @@ static int checkout_paths(struct tree *source_tree, const char **pathspec,
|
||||
|
||||
for (pos = 0; pos < active_nr; pos++) {
|
||||
struct cache_entry *ce = active_cache[pos];
|
||||
pathspec_match(pathspec, ps_matched, ce->name, 0);
|
||||
match_pathspec(pathspec, ce->name, ce_namelen(ce), 0, ps_matched);
|
||||
}
|
||||
|
||||
if (report_path_error(ps_matched, pathspec, 0))
|
||||
@@ -239,7 +239,7 @@ static int checkout_paths(struct tree *source_tree, const char **pathspec,
|
||||
/* Any unmerged paths? */
|
||||
for (pos = 0; pos < active_nr; pos++) {
|
||||
struct cache_entry *ce = active_cache[pos];
|
||||
if (pathspec_match(pathspec, NULL, ce->name, 0)) {
|
||||
if (match_pathspec(pathspec, ce->name, ce_namelen(ce), 0, NULL)) {
|
||||
if (!ce_stage(ce))
|
||||
continue;
|
||||
if (opts->force) {
|
||||
@@ -264,7 +264,7 @@ static int checkout_paths(struct tree *source_tree, const char **pathspec,
|
||||
state.refresh_cache = 1;
|
||||
for (pos = 0; pos < active_nr; pos++) {
|
||||
struct cache_entry *ce = active_cache[pos];
|
||||
if (pathspec_match(pathspec, NULL, ce->name, 0)) {
|
||||
if (match_pathspec(pathspec, ce->name, ce_namelen(ce), 0, NULL)) {
|
||||
if (!ce_stage(ce)) {
|
||||
errs |= checkout_entry(ce, &state, NULL);
|
||||
continue;
|
||||
|
||||
@@ -522,14 +522,23 @@ int cmd_clone(int argc, const char **argv, const char *prefix)
|
||||
option_upload_pack);
|
||||
|
||||
refs = transport_get_remote_refs(transport);
|
||||
transport_fetch_refs(transport, refs);
|
||||
if(refs)
|
||||
transport_fetch_refs(transport, refs);
|
||||
}
|
||||
|
||||
clear_extra_refs();
|
||||
if (refs) {
|
||||
clear_extra_refs();
|
||||
|
||||
mapped_refs = write_remote_refs(refs, &refspec, reflog_msg.buf);
|
||||
mapped_refs = write_remote_refs(refs, &refspec, reflog_msg.buf);
|
||||
|
||||
head_points_at = locate_head(refs, mapped_refs, &remote_head);
|
||||
head_points_at = locate_head(refs, mapped_refs, &remote_head);
|
||||
}
|
||||
else {
|
||||
warning("You appear to have cloned an empty repository.");
|
||||
head_points_at = NULL;
|
||||
remote_head = NULL;
|
||||
option_no_checkout = 1;
|
||||
}
|
||||
|
||||
if (head_points_at) {
|
||||
/* Local default branch link */
|
||||
|
||||
@@ -166,7 +166,7 @@ static int list_paths(struct string_list *list, const char *with_tree,
|
||||
struct cache_entry *ce = active_cache[i];
|
||||
if (ce->ce_flags & CE_UPDATE)
|
||||
continue;
|
||||
if (!pathspec_match(pattern, m, ce->name, 0))
|
||||
if (!match_pathspec(pattern, ce->name, ce_namelen(ce), 0, m))
|
||||
continue;
|
||||
string_list_insert(ce->name, list);
|
||||
}
|
||||
|
||||
@@ -36,42 +36,6 @@ static const char *tag_other = "";
|
||||
static const char *tag_killed = "";
|
||||
static const char *tag_modified = "";
|
||||
|
||||
|
||||
/*
|
||||
* Match a pathspec against a filename. The first "skiplen" characters
|
||||
* are the common prefix
|
||||
*/
|
||||
int pathspec_match(const char **spec, char *ps_matched,
|
||||
const char *filename, int skiplen)
|
||||
{
|
||||
const char *m;
|
||||
|
||||
while ((m = *spec++) != NULL) {
|
||||
int matchlen = strlen(m + skiplen);
|
||||
|
||||
if (!matchlen)
|
||||
goto matched;
|
||||
if (!strncmp(m + skiplen, filename + skiplen, matchlen)) {
|
||||
if (m[skiplen + matchlen - 1] == '/')
|
||||
goto matched;
|
||||
switch (filename[skiplen + matchlen]) {
|
||||
case '/': case '\0':
|
||||
goto matched;
|
||||
}
|
||||
}
|
||||
if (!fnmatch(m + skiplen, filename + skiplen, 0))
|
||||
goto matched;
|
||||
if (ps_matched)
|
||||
ps_matched++;
|
||||
continue;
|
||||
matched:
|
||||
if (ps_matched)
|
||||
*ps_matched = 1;
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void show_dir_entry(const char *tag, struct dir_entry *ent)
|
||||
{
|
||||
int len = prefix_len;
|
||||
@@ -80,7 +44,7 @@ static void show_dir_entry(const char *tag, struct dir_entry *ent)
|
||||
if (len >= ent->len)
|
||||
die("git ls-files: internal error - directory entry not superset of prefix");
|
||||
|
||||
if (pathspec && !pathspec_match(pathspec, ps_matched, ent->name, len))
|
||||
if (!match_pathspec(pathspec, ent->name, ent->len, len, ps_matched))
|
||||
return;
|
||||
|
||||
fputs(tag, stdout);
|
||||
@@ -156,7 +120,7 @@ static void show_ce_entry(const char *tag, struct cache_entry *ce)
|
||||
if (len >= ce_namelen(ce))
|
||||
die("git ls-files: internal error - cache entry not superset of prefix");
|
||||
|
||||
if (pathspec && !pathspec_match(pathspec, ps_matched, ce->name, len))
|
||||
if (!match_pathspec(pathspec, ce->name, ce_namelen(ce), len, ps_matched))
|
||||
return;
|
||||
|
||||
if (tag && *tag && show_valid_bit &&
|
||||
|
||||
2
bundle.c
2
bundle.c
@@ -266,6 +266,8 @@ int create_bundle(struct bundle_header *header, const char *path,
|
||||
return error("unrecognized argument: %s'", argv[i]);
|
||||
}
|
||||
|
||||
object_array_remove_duplicates(&revs.pending);
|
||||
|
||||
for (i = 0; i < revs.pending.nr; i++) {
|
||||
struct object_array_entry *e = revs.pending.objects + i;
|
||||
unsigned char sha1[20];
|
||||
|
||||
5
cache.h
5
cache.h
@@ -721,6 +721,10 @@ struct checkout {
|
||||
|
||||
extern int checkout_entry(struct cache_entry *ce, const struct checkout *state, char *topath);
|
||||
extern int has_symlink_leading_path(int len, const char *name);
|
||||
extern int has_symlink_or_noent_leading_path(int len, const char *name);
|
||||
extern int has_dirs_only_path(int len, const char *name, int prefix_len);
|
||||
extern void invalidate_lstat_cache(int len, const char *name);
|
||||
extern void clear_lstat_cache(void);
|
||||
|
||||
extern struct alternate_object_database {
|
||||
struct alternate_object_database *next;
|
||||
@@ -937,7 +941,6 @@ extern int ws_fix_copy(char *, const char *, int, unsigned, int *);
|
||||
extern int ws_blank_line(const char *line, int len, unsigned ws_rule);
|
||||
|
||||
/* ls-files */
|
||||
int pathspec_match(const char **spec, char *matched, const char *filename, int skiplen);
|
||||
int report_path_error(const char *ps_matched, const char **pathspec, int prefix_offset);
|
||||
void overlay_tree_on_cache(const char *tree_name, const char *prefix);
|
||||
|
||||
|
||||
28
color.c
28
color.c
@@ -202,3 +202,31 @@ int color_fprintf_ln(FILE *fp, const char *color, const char *fmt, ...)
|
||||
va_end(args);
|
||||
return r;
|
||||
}
|
||||
|
||||
/*
|
||||
* This function splits the buffer by newlines and colors the lines individually.
|
||||
*
|
||||
* Returns 0 on success.
|
||||
*/
|
||||
int color_fwrite_lines(FILE *fp, const char *color,
|
||||
size_t count, const char *buf)
|
||||
{
|
||||
if (!*color)
|
||||
return fwrite(buf, count, 1, fp) != 1;
|
||||
while (count) {
|
||||
char *p = memchr(buf, '\n', count);
|
||||
if (p != buf && (fputs(color, fp) < 0 ||
|
||||
fwrite(buf, p ? p - buf : count, 1, fp) != 1 ||
|
||||
fputs(COLOR_RESET, fp) < 0))
|
||||
return -1;
|
||||
if (!p)
|
||||
return 0;
|
||||
if (fputc('\n', fp) < 0)
|
||||
return -1;
|
||||
count -= p + 1 - buf;
|
||||
buf = p + 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
1
color.h
1
color.h
@@ -20,5 +20,6 @@ void color_parse(const char *value, const char *var, char *dst);
|
||||
void color_parse_mem(const char *value, int len, const char *var, char *dst);
|
||||
int color_fprintf(FILE *fp, const char *color, const char *fmt, ...);
|
||||
int color_fprintf_ln(FILE *fp, const char *color, const char *fmt, ...);
|
||||
int color_fwrite_lines(FILE *fp, const char *color, size_t count, const char *buf);
|
||||
|
||||
#endif /* COLOR_H */
|
||||
|
||||
@@ -783,6 +783,7 @@ __git_diff_common_options="--stat --numstat --shortstat --summary
|
||||
--no-ext-diff
|
||||
--no-prefix --src-prefix= --dst-prefix=
|
||||
--inter-hunk-context=
|
||||
--patience
|
||||
--raw
|
||||
"
|
||||
|
||||
|
||||
241
diff.c
241
diff.c
@@ -23,6 +23,7 @@ static int diff_detect_rename_default;
|
||||
static int diff_rename_limit_default = 200;
|
||||
static int diff_suppress_blank_empty;
|
||||
int diff_use_color_default = -1;
|
||||
static const char *diff_word_regex_cfg;
|
||||
static const char *external_diff_cmd_cfg;
|
||||
int diff_auto_refresh_index = 1;
|
||||
static int diff_mnemonic_prefix;
|
||||
@@ -92,6 +93,8 @@ int git_diff_ui_config(const char *var, const char *value, void *cb)
|
||||
}
|
||||
if (!strcmp(var, "diff.external"))
|
||||
return git_config_string(&external_diff_cmd_cfg, var, value);
|
||||
if (!strcmp(var, "diff.wordregex"))
|
||||
return git_config_string(&diff_word_regex_cfg, var, value);
|
||||
|
||||
return git_diff_basic_config(var, value, cb);
|
||||
}
|
||||
@@ -321,82 +324,138 @@ static int fill_mmfile(mmfile_t *mf, struct diff_filespec *one)
|
||||
struct diff_words_buffer {
|
||||
mmfile_t text;
|
||||
long alloc;
|
||||
long current; /* output pointer */
|
||||
int suppressed_newline;
|
||||
struct diff_words_orig {
|
||||
const char *begin, *end;
|
||||
} *orig;
|
||||
int orig_nr, orig_alloc;
|
||||
};
|
||||
|
||||
static void diff_words_append(char *line, unsigned long len,
|
||||
struct diff_words_buffer *buffer)
|
||||
{
|
||||
if (buffer->text.size + len > buffer->alloc) {
|
||||
buffer->alloc = (buffer->text.size + len) * 3 / 2;
|
||||
buffer->text.ptr = xrealloc(buffer->text.ptr, buffer->alloc);
|
||||
}
|
||||
ALLOC_GROW(buffer->text.ptr, buffer->text.size + len, buffer->alloc);
|
||||
line++;
|
||||
len--;
|
||||
memcpy(buffer->text.ptr + buffer->text.size, line, len);
|
||||
buffer->text.size += len;
|
||||
buffer->text.ptr[buffer->text.size] = '\0';
|
||||
}
|
||||
|
||||
struct diff_words_data {
|
||||
struct diff_words_buffer minus, plus;
|
||||
const char *current_plus;
|
||||
FILE *file;
|
||||
regex_t *word_regex;
|
||||
};
|
||||
|
||||
static void print_word(FILE *file, struct diff_words_buffer *buffer, int len, int color,
|
||||
int suppress_newline)
|
||||
{
|
||||
const char *ptr;
|
||||
int eol = 0;
|
||||
|
||||
if (len == 0)
|
||||
return;
|
||||
|
||||
ptr = buffer->text.ptr + buffer->current;
|
||||
buffer->current += len;
|
||||
|
||||
if (ptr[len - 1] == '\n') {
|
||||
eol = 1;
|
||||
len--;
|
||||
}
|
||||
|
||||
fputs(diff_get_color(1, color), file);
|
||||
fwrite(ptr, len, 1, file);
|
||||
fputs(diff_get_color(1, DIFF_RESET), file);
|
||||
|
||||
if (eol) {
|
||||
if (suppress_newline)
|
||||
buffer->suppressed_newline = 1;
|
||||
else
|
||||
putc('\n', file);
|
||||
}
|
||||
}
|
||||
|
||||
static void fn_out_diff_words_aux(void *priv, char *line, unsigned long len)
|
||||
{
|
||||
struct diff_words_data *diff_words = priv;
|
||||
int minus_first, minus_len, plus_first, plus_len;
|
||||
const char *minus_begin, *minus_end, *plus_begin, *plus_end;
|
||||
|
||||
if (diff_words->minus.suppressed_newline) {
|
||||
if (line[0] != '+')
|
||||
putc('\n', diff_words->file);
|
||||
diff_words->minus.suppressed_newline = 0;
|
||||
if (line[0] != '@' || parse_hunk_header(line, len,
|
||||
&minus_first, &minus_len, &plus_first, &plus_len))
|
||||
return;
|
||||
|
||||
/* POSIX requires that first be decremented by one if len == 0... */
|
||||
if (minus_len) {
|
||||
minus_begin = diff_words->minus.orig[minus_first].begin;
|
||||
minus_end =
|
||||
diff_words->minus.orig[minus_first + minus_len - 1].end;
|
||||
} else
|
||||
minus_begin = minus_end =
|
||||
diff_words->minus.orig[minus_first].end;
|
||||
|
||||
if (plus_len) {
|
||||
plus_begin = diff_words->plus.orig[plus_first].begin;
|
||||
plus_end = diff_words->plus.orig[plus_first + plus_len - 1].end;
|
||||
} else
|
||||
plus_begin = plus_end = diff_words->plus.orig[plus_first].end;
|
||||
|
||||
if (diff_words->current_plus != plus_begin)
|
||||
fwrite(diff_words->current_plus,
|
||||
plus_begin - diff_words->current_plus, 1,
|
||||
diff_words->file);
|
||||
if (minus_begin != minus_end)
|
||||
color_fwrite_lines(diff_words->file,
|
||||
diff_get_color(1, DIFF_FILE_OLD),
|
||||
minus_end - minus_begin, minus_begin);
|
||||
if (plus_begin != plus_end)
|
||||
color_fwrite_lines(diff_words->file,
|
||||
diff_get_color(1, DIFF_FILE_NEW),
|
||||
plus_end - plus_begin, plus_begin);
|
||||
|
||||
diff_words->current_plus = plus_end;
|
||||
}
|
||||
|
||||
/* This function starts looking at *begin, and returns 0 iff a word was found. */
|
||||
static int find_word_boundaries(mmfile_t *buffer, regex_t *word_regex,
|
||||
int *begin, int *end)
|
||||
{
|
||||
if (word_regex && *begin < buffer->size) {
|
||||
regmatch_t match[1];
|
||||
if (!regexec(word_regex, buffer->ptr + *begin, 1, match, 0)) {
|
||||
char *p = memchr(buffer->ptr + *begin + match[0].rm_so,
|
||||
'\n', match[0].rm_eo - match[0].rm_so);
|
||||
*end = p ? p - buffer->ptr : match[0].rm_eo + *begin;
|
||||
*begin += match[0].rm_so;
|
||||
return *begin >= *end;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
len--;
|
||||
switch (line[0]) {
|
||||
case '-':
|
||||
print_word(diff_words->file,
|
||||
&diff_words->minus, len, DIFF_FILE_OLD, 1);
|
||||
break;
|
||||
case '+':
|
||||
print_word(diff_words->file,
|
||||
&diff_words->plus, len, DIFF_FILE_NEW, 0);
|
||||
break;
|
||||
case ' ':
|
||||
print_word(diff_words->file,
|
||||
&diff_words->plus, len, DIFF_PLAIN, 0);
|
||||
diff_words->minus.current += len;
|
||||
break;
|
||||
/* find the next word */
|
||||
while (*begin < buffer->size && isspace(buffer->ptr[*begin]))
|
||||
(*begin)++;
|
||||
if (*begin >= buffer->size)
|
||||
return -1;
|
||||
|
||||
/* find the end of the word */
|
||||
*end = *begin + 1;
|
||||
while (*end < buffer->size && !isspace(buffer->ptr[*end]))
|
||||
(*end)++;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* This function splits the words in buffer->text, stores the list with
|
||||
* newline separator into out, and saves the offsets of the original words
|
||||
* in buffer->orig.
|
||||
*/
|
||||
static void diff_words_fill(struct diff_words_buffer *buffer, mmfile_t *out,
|
||||
regex_t *word_regex)
|
||||
{
|
||||
int i, j;
|
||||
long alloc = 0;
|
||||
|
||||
out->size = 0;
|
||||
out->ptr = NULL;
|
||||
|
||||
/* fake an empty "0th" word */
|
||||
ALLOC_GROW(buffer->orig, 1, buffer->orig_alloc);
|
||||
buffer->orig[0].begin = buffer->orig[0].end = buffer->text.ptr;
|
||||
buffer->orig_nr = 1;
|
||||
|
||||
for (i = 0; i < buffer->text.size; i++) {
|
||||
if (find_word_boundaries(&buffer->text, word_regex, &i, &j))
|
||||
return;
|
||||
|
||||
/* store original boundaries */
|
||||
ALLOC_GROW(buffer->orig, buffer->orig_nr + 1,
|
||||
buffer->orig_alloc);
|
||||
buffer->orig[buffer->orig_nr].begin = buffer->text.ptr + i;
|
||||
buffer->orig[buffer->orig_nr].end = buffer->text.ptr + j;
|
||||
buffer->orig_nr++;
|
||||
|
||||
/* store one word */
|
||||
ALLOC_GROW(out->ptr, out->size + j - i + 1, alloc);
|
||||
memcpy(out->ptr + out->size, buffer->text.ptr + i, j - i);
|
||||
out->ptr[out->size + j - i] = '\n';
|
||||
out->size += j - i + 1;
|
||||
|
||||
i = j - 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -407,38 +466,36 @@ static void diff_words_show(struct diff_words_data *diff_words)
|
||||
xdemitconf_t xecfg;
|
||||
xdemitcb_t ecb;
|
||||
mmfile_t minus, plus;
|
||||
int i;
|
||||
|
||||
/* special case: only removal */
|
||||
if (!diff_words->plus.text.size) {
|
||||
color_fwrite_lines(diff_words->file,
|
||||
diff_get_color(1, DIFF_FILE_OLD),
|
||||
diff_words->minus.text.size, diff_words->minus.text.ptr);
|
||||
diff_words->minus.text.size = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
diff_words->current_plus = diff_words->plus.text.ptr;
|
||||
|
||||
memset(&xpp, 0, sizeof(xpp));
|
||||
memset(&xecfg, 0, sizeof(xecfg));
|
||||
minus.size = diff_words->minus.text.size;
|
||||
minus.ptr = xmalloc(minus.size);
|
||||
memcpy(minus.ptr, diff_words->minus.text.ptr, minus.size);
|
||||
for (i = 0; i < minus.size; i++)
|
||||
if (isspace(minus.ptr[i]))
|
||||
minus.ptr[i] = '\n';
|
||||
diff_words->minus.current = 0;
|
||||
|
||||
plus.size = diff_words->plus.text.size;
|
||||
plus.ptr = xmalloc(plus.size);
|
||||
memcpy(plus.ptr, diff_words->plus.text.ptr, plus.size);
|
||||
for (i = 0; i < plus.size; i++)
|
||||
if (isspace(plus.ptr[i]))
|
||||
plus.ptr[i] = '\n';
|
||||
diff_words->plus.current = 0;
|
||||
|
||||
diff_words_fill(&diff_words->minus, &minus, diff_words->word_regex);
|
||||
diff_words_fill(&diff_words->plus, &plus, diff_words->word_regex);
|
||||
xpp.flags = XDF_NEED_MINIMAL;
|
||||
xecfg.ctxlen = diff_words->minus.alloc + diff_words->plus.alloc;
|
||||
/* as only the hunk header will be parsed, we need a 0-context */
|
||||
xecfg.ctxlen = 0;
|
||||
xdi_diff_outf(&minus, &plus, fn_out_diff_words_aux, diff_words,
|
||||
&xpp, &xecfg, &ecb);
|
||||
free(minus.ptr);
|
||||
free(plus.ptr);
|
||||
if (diff_words->current_plus != diff_words->plus.text.ptr +
|
||||
diff_words->plus.text.size)
|
||||
fwrite(diff_words->current_plus,
|
||||
diff_words->plus.text.ptr + diff_words->plus.text.size
|
||||
- diff_words->current_plus, 1,
|
||||
diff_words->file);
|
||||
diff_words->minus.text.size = diff_words->plus.text.size = 0;
|
||||
|
||||
if (diff_words->minus.suppressed_newline) {
|
||||
putc('\n', diff_words->file);
|
||||
diff_words->minus.suppressed_newline = 0;
|
||||
}
|
||||
}
|
||||
|
||||
typedef unsigned long (*sane_truncate_fn)(char *line, unsigned long len);
|
||||
@@ -462,7 +519,10 @@ static void free_diff_words_data(struct emit_callback *ecbdata)
|
||||
diff_words_show(ecbdata->diff_words);
|
||||
|
||||
free (ecbdata->diff_words->minus.text.ptr);
|
||||
free (ecbdata->diff_words->minus.orig);
|
||||
free (ecbdata->diff_words->plus.text.ptr);
|
||||
free (ecbdata->diff_words->plus.orig);
|
||||
free(ecbdata->diff_words->word_regex);
|
||||
free(ecbdata->diff_words);
|
||||
ecbdata->diff_words = NULL;
|
||||
}
|
||||
@@ -1325,6 +1385,12 @@ static const struct userdiff_funcname *diff_funcname_pattern(struct diff_filespe
|
||||
return one->driver->funcname.pattern ? &one->driver->funcname : NULL;
|
||||
}
|
||||
|
||||
static const char *userdiff_word_regex(struct diff_filespec *one)
|
||||
{
|
||||
diff_filespec_load_driver(one);
|
||||
return one->driver->word_regex;
|
||||
}
|
||||
|
||||
void diff_set_mnemonic_prefix(struct diff_options *options, const char *a, const char *b)
|
||||
{
|
||||
if (!options->a_prefix)
|
||||
@@ -1485,6 +1551,21 @@ static void builtin_diff(const char *name_a,
|
||||
ecbdata.diff_words =
|
||||
xcalloc(1, sizeof(struct diff_words_data));
|
||||
ecbdata.diff_words->file = o->file;
|
||||
if (!o->word_regex)
|
||||
o->word_regex = userdiff_word_regex(one);
|
||||
if (!o->word_regex)
|
||||
o->word_regex = userdiff_word_regex(two);
|
||||
if (!o->word_regex)
|
||||
o->word_regex = diff_word_regex_cfg;
|
||||
if (o->word_regex) {
|
||||
ecbdata.diff_words->word_regex = (regex_t *)
|
||||
xmalloc(sizeof(regex_t));
|
||||
if (regcomp(ecbdata.diff_words->word_regex,
|
||||
o->word_regex,
|
||||
REG_EXTENDED | REG_NEWLINE))
|
||||
die ("Invalid regular expression: %s",
|
||||
o->word_regex);
|
||||
}
|
||||
}
|
||||
xdi_diff_outf(&mf1, &mf2, fn_out_consume, &ecbdata,
|
||||
&xpp, &xecfg, &ecb);
|
||||
@@ -2474,6 +2555,8 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac)
|
||||
options->xdl_opts |= XDF_IGNORE_WHITESPACE_CHANGE;
|
||||
else if (!strcmp(arg, "--ignore-space-at-eol"))
|
||||
options->xdl_opts |= XDF_IGNORE_WHITESPACE_AT_EOL;
|
||||
else if (!strcmp(arg, "--patience"))
|
||||
options->xdl_opts |= XDF_PATIENCE_DIFF;
|
||||
|
||||
/* flags options */
|
||||
else if (!strcmp(arg, "--binary")) {
|
||||
@@ -2496,6 +2579,10 @@ int diff_opt_parse(struct diff_options *options, const char **av, int ac)
|
||||
DIFF_OPT_CLR(options, COLOR_DIFF);
|
||||
else if (!strcmp(arg, "--color-words"))
|
||||
options->flags |= DIFF_OPT_COLOR_DIFF | DIFF_OPT_COLOR_DIFF_WORDS;
|
||||
else if (!prefixcmp(arg, "--color-words=")) {
|
||||
options->flags |= DIFF_OPT_COLOR_DIFF | DIFF_OPT_COLOR_DIFF_WORDS;
|
||||
options->word_regex = arg + 14;
|
||||
}
|
||||
else if (!strcmp(arg, "--exit-code"))
|
||||
DIFF_OPT_SET(options, EXIT_WITH_STATUS);
|
||||
else if (!strcmp(arg, "--quiet"))
|
||||
|
||||
1
diff.h
1
diff.h
@@ -98,6 +98,7 @@ struct diff_options {
|
||||
|
||||
int stat_width;
|
||||
int stat_name_width;
|
||||
const char *word_regex;
|
||||
|
||||
/* this is set by diffcore for DIFF_FORMAT_PATCH */
|
||||
int found_changes;
|
||||
|
||||
19
dir.c
19
dir.c
@@ -108,25 +108,28 @@ static int match_one(const char *match, const char *name, int namelen)
|
||||
* and a mark is left in seen[] array for pathspec element that
|
||||
* actually matched anything.
|
||||
*/
|
||||
int match_pathspec(const char **pathspec, const char *name, int namelen, int prefix, char *seen)
|
||||
int match_pathspec(const char **pathspec, const char *name, int namelen,
|
||||
int prefix, char *seen)
|
||||
{
|
||||
int retval;
|
||||
const char *match;
|
||||
int i, retval = 0;
|
||||
|
||||
if (!pathspec)
|
||||
return 1;
|
||||
|
||||
name += prefix;
|
||||
namelen -= prefix;
|
||||
|
||||
for (retval = 0; (match = *pathspec++) != NULL; seen++) {
|
||||
for (i = 0; pathspec[i] != NULL; i++) {
|
||||
int how;
|
||||
if (retval && *seen == MATCHED_EXACTLY)
|
||||
const char *match = pathspec[i] + prefix;
|
||||
if (seen && seen[i] == MATCHED_EXACTLY)
|
||||
continue;
|
||||
match += prefix;
|
||||
how = match_one(match, name, namelen);
|
||||
if (how) {
|
||||
if (retval < how)
|
||||
retval = how;
|
||||
if (*seen < how)
|
||||
*seen = how;
|
||||
if (seen && seen[i] < how)
|
||||
seen[i] = how;
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
|
||||
34
entry.c
34
entry.c
@@ -9,35 +9,25 @@ static void create_directories(const char *path, const struct checkout *state)
|
||||
const char *slash = path;
|
||||
|
||||
while ((slash = strchr(slash+1, '/')) != NULL) {
|
||||
struct stat st;
|
||||
int stat_status;
|
||||
|
||||
len = slash - path;
|
||||
memcpy(buf, path, len);
|
||||
buf[len] = 0;
|
||||
|
||||
if (len <= state->base_dir_len)
|
||||
/*
|
||||
* checkout-index --prefix=<dir>; <dir> is
|
||||
* allowed to be a symlink to an existing
|
||||
* directory.
|
||||
*/
|
||||
stat_status = stat(buf, &st);
|
||||
else
|
||||
/*
|
||||
* if there currently is a symlink, we would
|
||||
* want to replace it with a real directory.
|
||||
*/
|
||||
stat_status = lstat(buf, &st);
|
||||
|
||||
if (!stat_status && S_ISDIR(st.st_mode))
|
||||
/*
|
||||
* For 'checkout-index --prefix=<dir>', <dir> is
|
||||
* allowed to be a symlink to an existing directory,
|
||||
* and we set 'state->base_dir_len' below, such that
|
||||
* we test the path components of the prefix with the
|
||||
* stat() function instead of the lstat() function.
|
||||
*/
|
||||
if (has_dirs_only_path(len, buf, state->base_dir_len))
|
||||
continue; /* ok, it is already a directory. */
|
||||
|
||||
/*
|
||||
* We know stat_status == 0 means something exists
|
||||
* there and this mkdir would fail, but that is an
|
||||
* error codepath; we do not care, as we unlink and
|
||||
* mkdir again in such a case.
|
||||
* If this mkdir() would fail, it could be that there
|
||||
* is already a symlink or something else exists
|
||||
* there, therefore we then try to unlink it and try
|
||||
* one more time to create the directory.
|
||||
*/
|
||||
if (mkdir(buf, 0777)) {
|
||||
if (errno == EEXIST && state->force &&
|
||||
|
||||
@@ -19,6 +19,7 @@ whitespace= pass it through git-apply
|
||||
directory= pass it through git-apply
|
||||
C= pass it through git-apply
|
||||
p= pass it through git-apply
|
||||
reject pass it through git-apply
|
||||
resolvemsg= override error message when patch failure occurs
|
||||
r,resolved to be used after a patch failure
|
||||
skip skip the current patch
|
||||
@@ -168,6 +169,8 @@ do
|
||||
git_apply_opt="$git_apply_opt $(sq "$1=$2")"; shift ;;
|
||||
-C|-p)
|
||||
git_apply_opt="$git_apply_opt $(sq "$1$2")"; shift ;;
|
||||
--reject)
|
||||
git_apply_opt="$git_apply_opt $1" ;;
|
||||
--)
|
||||
shift; break ;;
|
||||
*)
|
||||
|
||||
@@ -456,7 +456,7 @@ get_saved_options () {
|
||||
test -d "$REWRITTEN" && PRESERVE_MERGES=t
|
||||
test -f "$DOTEST"/strategy && STRATEGY="$(cat "$DOTEST"/strategy)"
|
||||
test -f "$DOTEST"/verbose && VERBOSE=t
|
||||
test ! -s "$DOTEST"/upstream && REBASE_ROOT=t
|
||||
test -f "$DOTEST"/rebase-root && REBASE_ROOT=t
|
||||
}
|
||||
|
||||
while test $# != 0
|
||||
@@ -571,7 +571,8 @@ first and then run 'git rebase --continue' again."
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
test ! -z "$REBASE_ROOT" -o $# -eq 1 -o $# -eq 2 || usage
|
||||
test -z "$REBASE_ROOT" -a $# -ge 1 -a $# -le 2 ||
|
||||
test ! -z "$REBASE_ROOT" -a $# -le 1 || usage
|
||||
test -d "$DOTEST" &&
|
||||
die "Interactive rebase already started"
|
||||
|
||||
@@ -585,6 +586,7 @@ first and then run 'git rebase --continue' again."
|
||||
test -z "$ONTO" && ONTO=$UPSTREAM
|
||||
shift
|
||||
else
|
||||
UPSTREAM=
|
||||
UPSTREAM_ARG=--root
|
||||
test -z "$ONTO" &&
|
||||
die "You must specify --onto when using --root"
|
||||
@@ -611,7 +613,12 @@ first and then run 'git rebase --continue' again."
|
||||
echo "detached HEAD" > "$DOTEST"/head-name
|
||||
|
||||
echo $HEAD > "$DOTEST"/head
|
||||
echo $UPSTREAM > "$DOTEST"/upstream
|
||||
case "$REBASE_ROOT" in
|
||||
'')
|
||||
rm -f "$DOTEST"/rebase-root ;;
|
||||
*)
|
||||
: >"$DOTEST"/rebase-root ;;
|
||||
esac
|
||||
echo $ONTO > "$DOTEST"/onto
|
||||
test -z "$STRATEGY" || echo "$STRATEGY" > "$DOTEST"/strategy
|
||||
test t = "$VERBOSE" && : > "$DOTEST"/verbose
|
||||
|
||||
36
git-svn.perl
36
git-svn.perl
@@ -70,7 +70,8 @@ my ($_stdin, $_help, $_edit,
|
||||
$Git::SVN::_follow_parent = 1;
|
||||
my %remote_opts = ( 'username=s' => \$Git::SVN::Prompt::_username,
|
||||
'config-dir=s' => \$Git::SVN::Ra::config_dir,
|
||||
'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache );
|
||||
'no-auth-cache' => \$Git::SVN::Prompt::_no_auth_cache,
|
||||
'ignore-paths=s' => \$SVN::Git::Fetcher::_ignore_regex );
|
||||
my %fc_opts = ( 'follow-parent|follow!' => \$Git::SVN::_follow_parent,
|
||||
'authors-file|A=s' => \$_authors,
|
||||
'repack:i' => \$Git::SVN::_repack,
|
||||
@@ -3245,6 +3246,7 @@ use warnings;
|
||||
use Carp qw/croak/;
|
||||
use File::Temp qw/tempfile/;
|
||||
use IO::File qw//;
|
||||
use vars qw/$_ignore_regex/;
|
||||
|
||||
# file baton members: path, mode_a, mode_b, pool, fh, blob, base
|
||||
sub new {
|
||||
@@ -3297,6 +3299,15 @@ sub in_dot_git {
|
||||
$_[0] =~ m{(?:^|/)\.git(?:/|$)};
|
||||
}
|
||||
|
||||
# return value: 0 -- don't ignore, 1 -- ignore
|
||||
sub is_path_ignored {
|
||||
my ($path) = @_;
|
||||
return 1 if in_dot_git($path);
|
||||
return 0 unless defined($_ignore_regex);
|
||||
return 1 if $path =~ m!$_ignore_regex!o;
|
||||
return 0;
|
||||
}
|
||||
|
||||
sub set_path_strip {
|
||||
my ($self, $path) = @_;
|
||||
$self->{path_strip} = qr/^\Q$path\E(\/|$)/ if length $path;
|
||||
@@ -3322,7 +3333,7 @@ sub git_path {
|
||||
|
||||
sub delete_entry {
|
||||
my ($self, $path, $rev, $pb) = @_;
|
||||
return undef if in_dot_git($path);
|
||||
return undef if is_path_ignored($path);
|
||||
|
||||
my $gpath = $self->git_path($path);
|
||||
return undef if ($gpath eq '');
|
||||
@@ -3352,7 +3363,7 @@ sub open_file {
|
||||
my ($self, $path, $pb, $rev) = @_;
|
||||
my ($mode, $blob);
|
||||
|
||||
goto out if in_dot_git($path);
|
||||
goto out if is_path_ignored($path);
|
||||
|
||||
my $gpath = $self->git_path($path);
|
||||
($mode, $blob) = (command('ls-tree', $self->{c}, '--', $gpath)
|
||||
@@ -3372,7 +3383,7 @@ sub add_file {
|
||||
my ($self, $path, $pb, $cp_path, $cp_rev) = @_;
|
||||
my $mode;
|
||||
|
||||
if (!in_dot_git($path)) {
|
||||
if (!is_path_ignored($path)) {
|
||||
my ($dir, $file) = ($path =~ m#^(.*?)/?([^/]+)$#);
|
||||
delete $self->{empty}->{$dir};
|
||||
$mode = '100644';
|
||||
@@ -3383,7 +3394,7 @@ sub add_file {
|
||||
|
||||
sub add_directory {
|
||||
my ($self, $path, $cp_path, $cp_rev) = @_;
|
||||
goto out if in_dot_git($path);
|
||||
goto out if is_path_ignored($path);
|
||||
my $gpath = $self->git_path($path);
|
||||
if ($gpath eq '') {
|
||||
my ($ls, $ctx) = command_output_pipe(qw/ls-tree
|
||||
@@ -3407,7 +3418,7 @@ out:
|
||||
|
||||
sub change_dir_prop {
|
||||
my ($self, $db, $prop, $value) = @_;
|
||||
return undef if in_dot_git($db->{path});
|
||||
return undef if is_path_ignored($db->{path});
|
||||
$self->{dir_prop}->{$db->{path}} ||= {};
|
||||
$self->{dir_prop}->{$db->{path}}->{$prop} = $value;
|
||||
undef;
|
||||
@@ -3415,7 +3426,7 @@ sub change_dir_prop {
|
||||
|
||||
sub absent_directory {
|
||||
my ($self, $path, $pb) = @_;
|
||||
return undef if in_dot_git($pb->{path});
|
||||
return undef if is_path_ignored($path);
|
||||
$self->{absent_dir}->{$pb->{path}} ||= [];
|
||||
push @{$self->{absent_dir}->{$pb->{path}}}, $path;
|
||||
undef;
|
||||
@@ -3423,7 +3434,7 @@ sub absent_directory {
|
||||
|
||||
sub absent_file {
|
||||
my ($self, $path, $pb) = @_;
|
||||
return undef if in_dot_git($pb->{path});
|
||||
return undef if is_path_ignored($path);
|
||||
$self->{absent_file}->{$pb->{path}} ||= [];
|
||||
push @{$self->{absent_file}->{$pb->{path}}}, $path;
|
||||
undef;
|
||||
@@ -3431,7 +3442,7 @@ sub absent_file {
|
||||
|
||||
sub change_file_prop {
|
||||
my ($self, $fb, $prop, $value) = @_;
|
||||
return undef if in_dot_git($fb->{path});
|
||||
return undef if is_path_ignored($fb->{path});
|
||||
if ($prop eq 'svn:executable') {
|
||||
if ($fb->{mode_b} != 120000) {
|
||||
$fb->{mode_b} = defined $value ? 100755 : 100644;
|
||||
@@ -3447,7 +3458,7 @@ sub change_file_prop {
|
||||
|
||||
sub apply_textdelta {
|
||||
my ($self, $fb, $exp) = @_;
|
||||
return undef if (in_dot_git($fb->{path}));
|
||||
return undef if is_path_ignored($fb->{path});
|
||||
my $fh = $::_repository->temp_acquire('svn_delta');
|
||||
# $fh gets auto-closed() by SVN::TxDelta::apply(),
|
||||
# (but $base does not,) so dup() it for reading in close_file
|
||||
@@ -3494,7 +3505,7 @@ sub apply_textdelta {
|
||||
|
||||
sub close_file {
|
||||
my ($self, $fb, $exp) = @_;
|
||||
return undef if (in_dot_git($fb->{path}));
|
||||
return undef if is_path_ignored($fb->{path});
|
||||
|
||||
my $hash;
|
||||
my $path = $self->git_path($fb->{path});
|
||||
@@ -4021,7 +4032,8 @@ my ($ra_invalid, $can_do_switch, %ignored_err, $RA);
|
||||
BEGIN {
|
||||
# enforce temporary pool usage for some simple functions
|
||||
no strict 'refs';
|
||||
for my $f (qw/rev_proplist get_latest_revnum get_uuid get_repos_root/) {
|
||||
for my $f (qw/rev_proplist get_latest_revnum get_uuid get_repos_root
|
||||
get_file/) {
|
||||
my $SUPER = "SUPER::$f";
|
||||
*$f = sub {
|
||||
my $self = shift;
|
||||
|
||||
69
http-push.c
69
http-push.c
@@ -177,6 +177,38 @@ struct remote_ls_ctx
|
||||
struct remote_ls_ctx *parent;
|
||||
};
|
||||
|
||||
/* get_dav_token_headers options */
|
||||
enum dav_header_flag {
|
||||
DAV_HEADER_IF = (1u << 0),
|
||||
DAV_HEADER_LOCK = (1u << 1),
|
||||
DAV_HEADER_TIMEOUT = (1u << 2)
|
||||
};
|
||||
|
||||
static struct curl_slist *get_dav_token_headers(struct remote_lock *lock, enum dav_header_flag options)
|
||||
{
|
||||
struct strbuf buf = STRBUF_INIT;
|
||||
struct curl_slist *dav_headers = NULL;
|
||||
|
||||
if (options & DAV_HEADER_IF) {
|
||||
strbuf_addf(&buf, "If: (<%s>)", lock->token);
|
||||
dav_headers = curl_slist_append(dav_headers, buf.buf);
|
||||
strbuf_reset(&buf);
|
||||
}
|
||||
if (options & DAV_HEADER_LOCK) {
|
||||
strbuf_addf(&buf, "Lock-Token: <%s>", lock->token);
|
||||
dav_headers = curl_slist_append(dav_headers, buf.buf);
|
||||
strbuf_reset(&buf);
|
||||
}
|
||||
if (options & DAV_HEADER_TIMEOUT) {
|
||||
strbuf_addf(&buf, "Timeout: Second-%ld", lock->timeout);
|
||||
dav_headers = curl_slist_append(dav_headers, buf.buf);
|
||||
strbuf_reset(&buf);
|
||||
}
|
||||
strbuf_release(&buf);
|
||||
|
||||
return dav_headers;
|
||||
}
|
||||
|
||||
static void finish_request(struct transfer_request *request);
|
||||
static void release_request(struct transfer_request *request);
|
||||
|
||||
@@ -588,18 +620,12 @@ static int refresh_lock(struct remote_lock *lock)
|
||||
{
|
||||
struct active_request_slot *slot;
|
||||
struct slot_results results;
|
||||
char *if_header;
|
||||
char timeout_header[25];
|
||||
struct curl_slist *dav_headers = NULL;
|
||||
struct curl_slist *dav_headers;
|
||||
int rc = 0;
|
||||
|
||||
lock->refreshing = 1;
|
||||
|
||||
if_header = xmalloc(strlen(lock->token) + 25);
|
||||
sprintf(if_header, "If: (<%s>)", lock->token);
|
||||
sprintf(timeout_header, "Timeout: Second-%ld", lock->timeout);
|
||||
dav_headers = curl_slist_append(dav_headers, if_header);
|
||||
dav_headers = curl_slist_append(dav_headers, timeout_header);
|
||||
dav_headers = get_dav_token_headers(lock, DAV_HEADER_IF | DAV_HEADER_TIMEOUT);
|
||||
|
||||
slot = get_active_slot();
|
||||
slot->results = &results;
|
||||
@@ -622,7 +648,6 @@ static int refresh_lock(struct remote_lock *lock)
|
||||
|
||||
lock->refreshing = 0;
|
||||
curl_slist_free_all(dav_headers);
|
||||
free(if_header);
|
||||
|
||||
return rc;
|
||||
}
|
||||
@@ -1303,14 +1328,10 @@ static int unlock_remote(struct remote_lock *lock)
|
||||
struct active_request_slot *slot;
|
||||
struct slot_results results;
|
||||
struct remote_lock *prev = remote->locks;
|
||||
char *lock_token_header;
|
||||
struct curl_slist *dav_headers = NULL;
|
||||
struct curl_slist *dav_headers;
|
||||
int rc = 0;
|
||||
|
||||
lock_token_header = xmalloc(strlen(lock->token) + 31);
|
||||
sprintf(lock_token_header, "Lock-Token: <%s>",
|
||||
lock->token);
|
||||
dav_headers = curl_slist_append(dav_headers, lock_token_header);
|
||||
dav_headers = get_dav_token_headers(lock, DAV_HEADER_LOCK);
|
||||
|
||||
slot = get_active_slot();
|
||||
slot->results = &results;
|
||||
@@ -1331,7 +1352,6 @@ static int unlock_remote(struct remote_lock *lock)
|
||||
}
|
||||
|
||||
curl_slist_free_all(dav_headers);
|
||||
free(lock_token_header);
|
||||
|
||||
if (remote->locks == lock) {
|
||||
remote->locks = lock->next;
|
||||
@@ -1731,13 +1751,10 @@ static int update_remote(unsigned char *sha1, struct remote_lock *lock)
|
||||
{
|
||||
struct active_request_slot *slot;
|
||||
struct slot_results results;
|
||||
char *if_header;
|
||||
struct buffer out_buffer = { STRBUF_INIT, 0 };
|
||||
struct curl_slist *dav_headers = NULL;
|
||||
struct curl_slist *dav_headers;
|
||||
|
||||
if_header = xmalloc(strlen(lock->token) + 25);
|
||||
sprintf(if_header, "If: (<%s>)", lock->token);
|
||||
dav_headers = curl_slist_append(dav_headers, if_header);
|
||||
dav_headers = get_dav_token_headers(lock, DAV_HEADER_IF);
|
||||
|
||||
strbuf_addf(&out_buffer.buf, "%s\n", sha1_to_hex(sha1));
|
||||
|
||||
@@ -1756,7 +1773,6 @@ static int update_remote(unsigned char *sha1, struct remote_lock *lock)
|
||||
if (start_active_slot(slot)) {
|
||||
run_active_slot(slot);
|
||||
strbuf_release(&out_buffer.buf);
|
||||
free(if_header);
|
||||
if (results.curl_result != CURLE_OK) {
|
||||
fprintf(stderr,
|
||||
"PUT error: curl result=%d, HTTP code=%ld\n",
|
||||
@@ -1766,7 +1782,6 @@ static int update_remote(unsigned char *sha1, struct remote_lock *lock)
|
||||
}
|
||||
} else {
|
||||
strbuf_release(&out_buffer.buf);
|
||||
free(if_header);
|
||||
fprintf(stderr, "Unable to start PUT request\n");
|
||||
return 0;
|
||||
}
|
||||
@@ -1948,15 +1963,12 @@ static void update_remote_info_refs(struct remote_lock *lock)
|
||||
struct buffer buffer = { STRBUF_INIT, 0 };
|
||||
struct active_request_slot *slot;
|
||||
struct slot_results results;
|
||||
char *if_header;
|
||||
struct curl_slist *dav_headers = NULL;
|
||||
struct curl_slist *dav_headers;
|
||||
|
||||
remote_ls("refs/", (PROCESS_FILES | RECURSIVE),
|
||||
add_remote_info_ref, &buffer.buf);
|
||||
if (!aborted) {
|
||||
if_header = xmalloc(strlen(lock->token) + 25);
|
||||
sprintf(if_header, "If: (<%s>)", lock->token);
|
||||
dav_headers = curl_slist_append(dav_headers, if_header);
|
||||
dav_headers = get_dav_token_headers(lock, DAV_HEADER_IF);
|
||||
|
||||
slot = get_active_slot();
|
||||
slot->results = &results;
|
||||
@@ -1978,7 +1990,6 @@ static void update_remote_info_refs(struct remote_lock *lock)
|
||||
results.curl_result, results.http_code);
|
||||
}
|
||||
}
|
||||
free(if_header);
|
||||
}
|
||||
strbuf_release(&buffer.buf);
|
||||
}
|
||||
|
||||
19
object.c
19
object.c
@@ -268,3 +268,22 @@ void add_object_array_with_mode(struct object *obj, const char *name, struct obj
|
||||
objects[nr].mode = mode;
|
||||
array->nr = ++nr;
|
||||
}
|
||||
|
||||
void object_array_remove_duplicates(struct object_array *array)
|
||||
{
|
||||
int ref, src, dst;
|
||||
struct object_array_entry *objects = array->objects;
|
||||
|
||||
for (ref = 0; ref < array->nr - 1; ref++) {
|
||||
for (src = ref + 1, dst = src;
|
||||
src < array->nr;
|
||||
src++) {
|
||||
if (!strcmp(objects[ref].name, objects[src].name))
|
||||
continue;
|
||||
if (src != dst)
|
||||
objects[dst] = objects[src];
|
||||
dst++;
|
||||
}
|
||||
array->nr = dst;
|
||||
}
|
||||
}
|
||||
|
||||
1
object.h
1
object.h
@@ -82,5 +82,6 @@ int object_list_contains(struct object_list *list, struct object *obj);
|
||||
/* Object array handling .. */
|
||||
void add_object_array(struct object *obj, const char *name, struct object_array *array);
|
||||
void add_object_array_with_mode(struct object *obj, const char *name, struct object_array *array, unsigned mode);
|
||||
void object_array_remove_duplicates(struct object_array *);
|
||||
|
||||
#endif /* OBJECT_H */
|
||||
|
||||
@@ -1263,6 +1263,7 @@ int setup_revisions(int argc, const char **argv, struct rev_info *revs, const ch
|
||||
|
||||
if (!strcmp(arg, "--all")) {
|
||||
handle_refs(revs, flags, for_each_ref);
|
||||
handle_refs(revs, flags, head_ref);
|
||||
continue;
|
||||
}
|
||||
if (!strcmp(arg, "--branches")) {
|
||||
|
||||
283
symlinks.c
283
symlinks.c
@@ -1,64 +1,241 @@
|
||||
#include "cache.h"
|
||||
|
||||
struct pathname {
|
||||
static struct cache_def {
|
||||
char path[PATH_MAX + 1];
|
||||
int len;
|
||||
char path[PATH_MAX];
|
||||
};
|
||||
int flags;
|
||||
int track_flags;
|
||||
int prefix_len_stat_func;
|
||||
} cache;
|
||||
|
||||
/* Return matching pathname prefix length, or zero if not matching */
|
||||
static inline int match_pathname(int len, const char *name, struct pathname *match)
|
||||
/*
|
||||
* Returns the length (on a path component basis) of the longest
|
||||
* common prefix match of 'name' and the cached path string.
|
||||
*/
|
||||
static inline int longest_match_lstat_cache(int len, const char *name,
|
||||
int *previous_slash)
|
||||
{
|
||||
int match_len = match->len;
|
||||
return (len > match_len &&
|
||||
name[match_len] == '/' &&
|
||||
!memcmp(name, match->path, match_len)) ? match_len : 0;
|
||||
}
|
||||
int max_len, match_len = 0, match_len_prev = 0, i = 0;
|
||||
|
||||
static inline void set_pathname(int len, const char *name, struct pathname *match)
|
||||
{
|
||||
if (len < PATH_MAX) {
|
||||
match->len = len;
|
||||
memcpy(match->path, name, len);
|
||||
match->path[len] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
int has_symlink_leading_path(int len, const char *name)
|
||||
{
|
||||
static struct pathname link, nonlink;
|
||||
char path[PATH_MAX];
|
||||
struct stat st;
|
||||
char *sp;
|
||||
int known_dir;
|
||||
|
||||
/*
|
||||
* See if the last known symlink cache matches.
|
||||
*/
|
||||
if (match_pathname(len, name, &link))
|
||||
return 1;
|
||||
|
||||
/*
|
||||
* Get rid of the last known directory part
|
||||
*/
|
||||
known_dir = match_pathname(len, name, &nonlink);
|
||||
|
||||
while ((sp = strchr(name + known_dir + 1, '/')) != NULL) {
|
||||
int thislen = sp - name ;
|
||||
memcpy(path, name, thislen);
|
||||
path[thislen] = 0;
|
||||
|
||||
if (lstat(path, &st))
|
||||
return 0;
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
set_pathname(thislen, path, &nonlink);
|
||||
known_dir = thislen;
|
||||
continue;
|
||||
max_len = len < cache.len ? len : cache.len;
|
||||
while (i < max_len && name[i] == cache.path[i]) {
|
||||
if (name[i] == '/') {
|
||||
match_len_prev = match_len;
|
||||
match_len = i;
|
||||
}
|
||||
if (S_ISLNK(st.st_mode)) {
|
||||
set_pathname(thislen, path, &link);
|
||||
return 1;
|
||||
i++;
|
||||
}
|
||||
/* Is the cached path string a substring of 'name'? */
|
||||
if (i == cache.len && cache.len < len && name[cache.len] == '/') {
|
||||
match_len_prev = match_len;
|
||||
match_len = cache.len;
|
||||
/* Is 'name' a substring of the cached path string? */
|
||||
} else if ((i == len && len < cache.len && cache.path[len] == '/') ||
|
||||
(i == len && len == cache.len)) {
|
||||
match_len_prev = match_len;
|
||||
match_len = len;
|
||||
}
|
||||
*previous_slash = match_len_prev;
|
||||
return match_len;
|
||||
}
|
||||
|
||||
static inline void reset_lstat_cache(int track_flags, int prefix_len_stat_func)
|
||||
{
|
||||
cache.path[0] = '\0';
|
||||
cache.len = 0;
|
||||
cache.flags = 0;
|
||||
cache.track_flags = track_flags;
|
||||
cache.prefix_len_stat_func = prefix_len_stat_func;
|
||||
}
|
||||
|
||||
#define FL_DIR (1 << 0)
|
||||
#define FL_NOENT (1 << 1)
|
||||
#define FL_SYMLINK (1 << 2)
|
||||
#define FL_LSTATERR (1 << 3)
|
||||
#define FL_ERR (1 << 4)
|
||||
#define FL_FULLPATH (1 << 5)
|
||||
|
||||
/*
|
||||
* Check if name 'name' of length 'len' has a symlink leading
|
||||
* component, or if the directory exists and is real, or not.
|
||||
*
|
||||
* To speed up the check, some information is allowed to be cached.
|
||||
* This can be indicated by the 'track_flags' argument, which also can
|
||||
* be used to indicate that we should check the full path.
|
||||
*
|
||||
* The 'prefix_len_stat_func' parameter can be used to set the length
|
||||
* of the prefix, where the cache should use the stat() function
|
||||
* instead of the lstat() function to test each path component.
|
||||
*/
|
||||
static int lstat_cache(int len, const char *name,
|
||||
int track_flags, int prefix_len_stat_func)
|
||||
{
|
||||
int match_len, last_slash, last_slash_dir, previous_slash;
|
||||
int match_flags, ret_flags, save_flags, max_len, ret;
|
||||
struct stat st;
|
||||
|
||||
if (cache.track_flags != track_flags ||
|
||||
cache.prefix_len_stat_func != prefix_len_stat_func) {
|
||||
/*
|
||||
* As a safeguard we clear the cache if the values of
|
||||
* track_flags and/or prefix_len_stat_func does not
|
||||
* match with the last supplied values.
|
||||
*/
|
||||
reset_lstat_cache(track_flags, prefix_len_stat_func);
|
||||
match_len = last_slash = 0;
|
||||
} else {
|
||||
/*
|
||||
* Check to see if we have a match from the cache for
|
||||
* the 2 "excluding" path types.
|
||||
*/
|
||||
match_len = last_slash =
|
||||
longest_match_lstat_cache(len, name, &previous_slash);
|
||||
match_flags = cache.flags & track_flags & (FL_NOENT|FL_SYMLINK);
|
||||
if (match_flags && match_len == cache.len)
|
||||
return match_flags;
|
||||
/*
|
||||
* If we now have match_len > 0, we would know that
|
||||
* the matched part will always be a directory.
|
||||
*
|
||||
* Also, if we are tracking directories and 'name' is
|
||||
* a substring of the cache on a path component basis,
|
||||
* we can return immediately.
|
||||
*/
|
||||
match_flags = track_flags & FL_DIR;
|
||||
if (match_flags && len == match_len)
|
||||
return match_flags;
|
||||
}
|
||||
|
||||
/*
|
||||
* Okay, no match from the cache so far, so now we have to
|
||||
* check the rest of the path components.
|
||||
*/
|
||||
ret_flags = FL_DIR;
|
||||
last_slash_dir = last_slash;
|
||||
max_len = len < PATH_MAX ? len : PATH_MAX;
|
||||
while (match_len < max_len) {
|
||||
do {
|
||||
cache.path[match_len] = name[match_len];
|
||||
match_len++;
|
||||
} while (match_len < max_len && name[match_len] != '/');
|
||||
if (match_len >= max_len && !(track_flags & FL_FULLPATH))
|
||||
break;
|
||||
last_slash = match_len;
|
||||
cache.path[last_slash] = '\0';
|
||||
|
||||
if (last_slash <= prefix_len_stat_func)
|
||||
ret = stat(cache.path, &st);
|
||||
else
|
||||
ret = lstat(cache.path, &st);
|
||||
|
||||
if (ret) {
|
||||
ret_flags = FL_LSTATERR;
|
||||
if (errno == ENOENT)
|
||||
ret_flags |= FL_NOENT;
|
||||
} else if (S_ISDIR(st.st_mode)) {
|
||||
last_slash_dir = last_slash;
|
||||
continue;
|
||||
} else if (S_ISLNK(st.st_mode)) {
|
||||
ret_flags = FL_SYMLINK;
|
||||
} else {
|
||||
ret_flags = FL_ERR;
|
||||
}
|
||||
break;
|
||||
}
|
||||
return 0;
|
||||
|
||||
/*
|
||||
* At the end update the cache. Note that max 3 different
|
||||
* path types, FL_NOENT, FL_SYMLINK and FL_DIR, can be cached
|
||||
* for the moment!
|
||||
*/
|
||||
save_flags = ret_flags & track_flags & (FL_NOENT|FL_SYMLINK);
|
||||
if (save_flags && last_slash > 0 && last_slash <= PATH_MAX) {
|
||||
cache.path[last_slash] = '\0';
|
||||
cache.len = last_slash;
|
||||
cache.flags = save_flags;
|
||||
} else if (track_flags & FL_DIR &&
|
||||
last_slash_dir > 0 && last_slash_dir <= PATH_MAX) {
|
||||
/*
|
||||
* We have a separate test for the directory case,
|
||||
* since it could be that we have found a symlink or a
|
||||
* non-existing directory and the track_flags says
|
||||
* that we cannot cache this fact, so the cache would
|
||||
* then have been left empty in this case.
|
||||
*
|
||||
* But if we are allowed to track real directories, we
|
||||
* can still cache the path components before the last
|
||||
* one (the found symlink or non-existing component).
|
||||
*/
|
||||
cache.path[last_slash_dir] = '\0';
|
||||
cache.len = last_slash_dir;
|
||||
cache.flags = FL_DIR;
|
||||
} else {
|
||||
reset_lstat_cache(track_flags, prefix_len_stat_func);
|
||||
}
|
||||
return ret_flags;
|
||||
}
|
||||
|
||||
/*
|
||||
* Invalidate the given 'name' from the cache, if 'name' matches
|
||||
* completely with the cache.
|
||||
*/
|
||||
void invalidate_lstat_cache(int len, const char *name)
|
||||
{
|
||||
int match_len, previous_slash;
|
||||
|
||||
match_len = longest_match_lstat_cache(len, name, &previous_slash);
|
||||
if (len == match_len) {
|
||||
if ((cache.track_flags & FL_DIR) && previous_slash > 0) {
|
||||
cache.path[previous_slash] = '\0';
|
||||
cache.len = previous_slash;
|
||||
cache.flags = FL_DIR;
|
||||
} else
|
||||
reset_lstat_cache(cache.track_flags,
|
||||
cache.prefix_len_stat_func);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Completely clear the contents of the cache
|
||||
*/
|
||||
void clear_lstat_cache(void)
|
||||
{
|
||||
reset_lstat_cache(0, 0);
|
||||
}
|
||||
|
||||
#define USE_ONLY_LSTAT 0
|
||||
|
||||
/*
|
||||
* Return non-zero if path 'name' has a leading symlink component
|
||||
*/
|
||||
int has_symlink_leading_path(int len, const char *name)
|
||||
{
|
||||
return lstat_cache(len, name,
|
||||
FL_SYMLINK|FL_DIR, USE_ONLY_LSTAT) &
|
||||
FL_SYMLINK;
|
||||
}
|
||||
|
||||
/*
|
||||
* Return non-zero if path 'name' has a leading symlink component or
|
||||
* if some leading path component does not exists.
|
||||
*/
|
||||
int has_symlink_or_noent_leading_path(int len, const char *name)
|
||||
{
|
||||
return lstat_cache(len, name,
|
||||
FL_SYMLINK|FL_NOENT|FL_DIR, USE_ONLY_LSTAT) &
|
||||
(FL_SYMLINK|FL_NOENT);
|
||||
}
|
||||
|
||||
/*
|
||||
* Return non-zero if all path components of 'name' exists as a
|
||||
* directory. If prefix_len > 0, we will test with the stat()
|
||||
* function instead of the lstat() function for a prefix length of
|
||||
* 'prefix_len', thus we then allow for symlinks in the prefix part as
|
||||
* long as those points to real existing directories.
|
||||
*/
|
||||
int has_dirs_only_path(int len, const char *name, int prefix_len)
|
||||
{
|
||||
return lstat_cache(len, name,
|
||||
FL_DIR|FL_FULLPATH, prefix_len) &
|
||||
FL_DIR;
|
||||
}
|
||||
|
||||
@@ -118,7 +118,11 @@ test_expect_success 'pre-rebase hook stops rebase (1)' '
|
||||
test_expect_success 'pre-rebase hook stops rebase (2)' '
|
||||
git checkout test &&
|
||||
git reset --hard side &&
|
||||
EDITOR=true test_must_fail git rebase -i master &&
|
||||
(
|
||||
EDITOR=:
|
||||
export EDITOR
|
||||
test_must_fail git rebase -i master
|
||||
) &&
|
||||
test "z$(git symbolic-ref HEAD)" = zrefs/heads/test &&
|
||||
test 0 = $(git rev-list HEAD...side | wc -l)
|
||||
'
|
||||
|
||||
@@ -172,16 +172,126 @@ EOF
|
||||
|
||||
test_expect_success 'pre-rebase hook stops rebase' '
|
||||
git checkout -b stops1 other &&
|
||||
GIT_EDITOR=: test_must_fail git rebase --root --onto master &&
|
||||
(
|
||||
GIT_EDITOR=:
|
||||
export GIT_EDITOR
|
||||
test_must_fail git rebase --root --onto master
|
||||
) &&
|
||||
test "z$(git symbolic-ref HEAD)" = zrefs/heads/stops1
|
||||
test 0 = $(git rev-list other...stops1 | wc -l)
|
||||
'
|
||||
|
||||
test_expect_success 'pre-rebase hook stops rebase -i' '
|
||||
git checkout -b stops2 other &&
|
||||
GIT_EDITOR=: test_must_fail git rebase --root --onto master &&
|
||||
(
|
||||
GIT_EDITOR=:
|
||||
export GIT_EDITOR
|
||||
test_must_fail git rebase --root --onto master
|
||||
) &&
|
||||
test "z$(git symbolic-ref HEAD)" = zrefs/heads/stops2
|
||||
test 0 = $(git rev-list other...stops2 | wc -l)
|
||||
'
|
||||
|
||||
test_expect_success 'remove pre-rebase hook' '
|
||||
rm -f .git/hooks/pre-rebase
|
||||
'
|
||||
|
||||
test_expect_success 'set up a conflict' '
|
||||
git checkout master &&
|
||||
echo conflict > B &&
|
||||
git add B &&
|
||||
git commit -m conflict
|
||||
'
|
||||
|
||||
test_expect_success 'rebase --root with conflict (first part)' '
|
||||
git checkout -b conflict1 other &&
|
||||
test_must_fail git rebase --root --onto master &&
|
||||
git ls-files -u | grep "B$"
|
||||
'
|
||||
|
||||
test_expect_success 'fix the conflict' '
|
||||
echo 3 > B &&
|
||||
git add B
|
||||
'
|
||||
|
||||
cat > expect-conflict <<EOF
|
||||
6
|
||||
5
|
||||
4
|
||||
3
|
||||
conflict
|
||||
2
|
||||
1
|
||||
EOF
|
||||
|
||||
test_expect_success 'rebase --root with conflict (second part)' '
|
||||
git rebase --continue &&
|
||||
git log --pretty=tformat:"%s" > conflict1 &&
|
||||
test_cmp expect-conflict conflict1
|
||||
'
|
||||
|
||||
test_expect_success 'rebase -i --root with conflict (first part)' '
|
||||
git checkout -b conflict2 other &&
|
||||
(
|
||||
GIT_EDITOR=:
|
||||
export GIT_EDITOR
|
||||
test_must_fail git rebase -i --root --onto master
|
||||
) &&
|
||||
git ls-files -u | grep "B$"
|
||||
'
|
||||
|
||||
test_expect_success 'fix the conflict' '
|
||||
echo 3 > B &&
|
||||
git add B
|
||||
'
|
||||
|
||||
test_expect_success 'rebase -i --root with conflict (second part)' '
|
||||
git rebase --continue &&
|
||||
git log --pretty=tformat:"%s" > conflict2 &&
|
||||
test_cmp expect-conflict conflict2
|
||||
'
|
||||
|
||||
cat >expect-conflict-p <<\EOF
|
||||
commit conflict3 conflict3~1 conflict3^2
|
||||
Merge branch 'third' into other
|
||||
commit conflict3^2 conflict3~4
|
||||
6
|
||||
commit conflict3~1 conflict3~2 conflict3~1^2
|
||||
Merge branch 'side' into other
|
||||
commit conflict3~1^2 conflict3~3
|
||||
5
|
||||
commit conflict3~2 conflict3~3
|
||||
4
|
||||
commit conflict3~3 conflict3~4
|
||||
3
|
||||
commit conflict3~4 conflict3~5
|
||||
conflict
|
||||
commit conflict3~5 conflict3~6
|
||||
2
|
||||
commit conflict3~6
|
||||
1
|
||||
EOF
|
||||
|
||||
test_expect_success 'rebase -i -p --root with conflict (first part)' '
|
||||
git checkout -b conflict3 other &&
|
||||
(
|
||||
GIT_EDITOR=:
|
||||
export GIT_EDITOR
|
||||
test_must_fail git rebase -i -p --root --onto master
|
||||
) &&
|
||||
git ls-files -u | grep "B$"
|
||||
'
|
||||
|
||||
test_expect_success 'fix the conflict' '
|
||||
echo 3 > B &&
|
||||
git add B
|
||||
'
|
||||
|
||||
test_expect_success 'rebase -i -p --root with conflict (second part)' '
|
||||
git rebase --continue &&
|
||||
git rev-list --topo-order --parents --pretty="tformat:%s" HEAD |
|
||||
git name-rev --stdin --name-only --refs=refs/heads/conflict3 >out &&
|
||||
test_cmp expect-conflict-p out
|
||||
'
|
||||
|
||||
test_done
|
||||
|
||||
168
t/t4033-diff-patience.sh
Executable file
168
t/t4033-diff-patience.sh
Executable file
@@ -0,0 +1,168 @@
|
||||
#!/bin/sh
|
||||
|
||||
test_description='patience diff algorithm'
|
||||
|
||||
. ./test-lib.sh
|
||||
|
||||
cat >file1 <<\EOF
|
||||
#include <stdio.h>
|
||||
|
||||
// Frobs foo heartily
|
||||
int frobnitz(int foo)
|
||||
{
|
||||
int i;
|
||||
for(i = 0; i < 10; i++)
|
||||
{
|
||||
printf("Your answer is: ");
|
||||
printf("%d\n", foo);
|
||||
}
|
||||
}
|
||||
|
||||
int fact(int n)
|
||||
{
|
||||
if(n > 1)
|
||||
{
|
||||
return fact(n-1) * n;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
frobnitz(fact(10));
|
||||
}
|
||||
EOF
|
||||
|
||||
cat >file2 <<\EOF
|
||||
#include <stdio.h>
|
||||
|
||||
int fib(int n)
|
||||
{
|
||||
if(n > 2)
|
||||
{
|
||||
return fib(n-1) + fib(n-2);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Frobs foo heartily
|
||||
int frobnitz(int foo)
|
||||
{
|
||||
int i;
|
||||
for(i = 0; i < 10; i++)
|
||||
{
|
||||
printf("%d\n", foo);
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
frobnitz(fib(10));
|
||||
}
|
||||
EOF
|
||||
|
||||
cat >expect <<\EOF
|
||||
diff --git a/file1 b/file2
|
||||
index 6faa5a3..e3af329 100644
|
||||
--- a/file1
|
||||
+++ b/file2
|
||||
@@ -1,26 +1,25 @@
|
||||
#include <stdio.h>
|
||||
|
||||
+int fib(int n)
|
||||
+{
|
||||
+ if(n > 2)
|
||||
+ {
|
||||
+ return fib(n-1) + fib(n-2);
|
||||
+ }
|
||||
+ return 1;
|
||||
+}
|
||||
+
|
||||
// Frobs foo heartily
|
||||
int frobnitz(int foo)
|
||||
{
|
||||
int i;
|
||||
for(i = 0; i < 10; i++)
|
||||
{
|
||||
- printf("Your answer is: ");
|
||||
printf("%d\n", foo);
|
||||
}
|
||||
}
|
||||
|
||||
-int fact(int n)
|
||||
-{
|
||||
- if(n > 1)
|
||||
- {
|
||||
- return fact(n-1) * n;
|
||||
- }
|
||||
- return 1;
|
||||
-}
|
||||
-
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
- frobnitz(fact(10));
|
||||
+ frobnitz(fib(10));
|
||||
}
|
||||
EOF
|
||||
|
||||
test_expect_success 'patience diff' '
|
||||
|
||||
test_must_fail git diff --no-index --patience file1 file2 > output &&
|
||||
test_cmp expect output
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'patience diff output is valid' '
|
||||
|
||||
mv file2 expect &&
|
||||
git apply < output &&
|
||||
test_cmp expect file2
|
||||
|
||||
'
|
||||
|
||||
cat >uniq1 <<\EOF
|
||||
1
|
||||
2
|
||||
3
|
||||
4
|
||||
5
|
||||
6
|
||||
EOF
|
||||
|
||||
cat >uniq2 <<\EOF
|
||||
a
|
||||
b
|
||||
c
|
||||
d
|
||||
e
|
||||
f
|
||||
EOF
|
||||
|
||||
cat >expect <<\EOF
|
||||
diff --git a/uniq1 b/uniq2
|
||||
index b414108..0fdf397 100644
|
||||
--- a/uniq1
|
||||
+++ b/uniq2
|
||||
@@ -1,6 +1,6 @@
|
||||
-1
|
||||
-2
|
||||
-3
|
||||
-4
|
||||
-5
|
||||
-6
|
||||
+a
|
||||
+b
|
||||
+c
|
||||
+d
|
||||
+e
|
||||
+f
|
||||
EOF
|
||||
|
||||
test_expect_success 'completely different files' '
|
||||
|
||||
test_must_fail git diff --no-index --patience uniq1 uniq2 > output &&
|
||||
test_cmp expect output
|
||||
|
||||
'
|
||||
|
||||
test_done
|
||||
200
t/t4034-diff-words.sh
Executable file
200
t/t4034-diff-words.sh
Executable file
@@ -0,0 +1,200 @@
|
||||
#!/bin/sh
|
||||
|
||||
test_description='word diff colors'
|
||||
|
||||
. ./test-lib.sh
|
||||
|
||||
test_expect_success setup '
|
||||
|
||||
git config diff.color.old red
|
||||
git config diff.color.new green
|
||||
|
||||
'
|
||||
|
||||
decrypt_color () {
|
||||
sed \
|
||||
-e 's/.\[1m/<WHITE>/g' \
|
||||
-e 's/.\[31m/<RED>/g' \
|
||||
-e 's/.\[32m/<GREEN>/g' \
|
||||
-e 's/.\[36m/<BROWN>/g' \
|
||||
-e 's/.\[m/<RESET>/g'
|
||||
}
|
||||
|
||||
word_diff () {
|
||||
test_must_fail git diff --no-index "$@" pre post > output &&
|
||||
decrypt_color < output > output.decrypted &&
|
||||
test_cmp expect output.decrypted
|
||||
}
|
||||
|
||||
cat > pre <<\EOF
|
||||
h(4)
|
||||
|
||||
a = b + c
|
||||
EOF
|
||||
|
||||
cat > post <<\EOF
|
||||
h(4),hh[44]
|
||||
|
||||
a = b + c
|
||||
|
||||
aa = a
|
||||
|
||||
aeff = aeff * ( aaa )
|
||||
EOF
|
||||
|
||||
cat > expect <<\EOF
|
||||
<WHITE>diff --git a/pre b/post<RESET>
|
||||
<WHITE>index 330b04f..5ed8eff 100644<RESET>
|
||||
<WHITE>--- a/pre<RESET>
|
||||
<WHITE>+++ b/post<RESET>
|
||||
<BROWN>@@ -1,3 +1,7 @@<RESET>
|
||||
<RED>h(4)<RESET><GREEN>h(4),hh[44]<RESET>
|
||||
<RESET>
|
||||
a = b + c<RESET>
|
||||
|
||||
<GREEN>aa = a<RESET>
|
||||
|
||||
<GREEN>aeff = aeff * ( aaa )<RESET>
|
||||
EOF
|
||||
|
||||
test_expect_success 'word diff with runs of whitespace' '
|
||||
|
||||
word_diff --color-words
|
||||
|
||||
'
|
||||
|
||||
cat > expect <<\EOF
|
||||
<WHITE>diff --git a/pre b/post<RESET>
|
||||
<WHITE>index 330b04f..5ed8eff 100644<RESET>
|
||||
<WHITE>--- a/pre<RESET>
|
||||
<WHITE>+++ b/post<RESET>
|
||||
<BROWN>@@ -1,3 +1,7 @@<RESET>
|
||||
h(4),<GREEN>hh<RESET>[44]
|
||||
<RESET>
|
||||
a = b + c<RESET>
|
||||
|
||||
<GREEN>aa = a<RESET>
|
||||
|
||||
<GREEN>aeff = aeff * ( aaa<RESET> )
|
||||
EOF
|
||||
cp expect expect.letter-runs-are-words
|
||||
|
||||
test_expect_success 'word diff with a regular expression' '
|
||||
|
||||
word_diff --color-words="[a-z]+"
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'set a diff driver' '
|
||||
git config diff.testdriver.wordRegex "[^[:space:]]" &&
|
||||
cat <<EOF > .gitattributes
|
||||
pre diff=testdriver
|
||||
post diff=testdriver
|
||||
EOF
|
||||
'
|
||||
|
||||
test_expect_success 'option overrides .gitattributes' '
|
||||
|
||||
word_diff --color-words="[a-z]+"
|
||||
|
||||
'
|
||||
|
||||
cat > expect <<\EOF
|
||||
<WHITE>diff --git a/pre b/post<RESET>
|
||||
<WHITE>index 330b04f..5ed8eff 100644<RESET>
|
||||
<WHITE>--- a/pre<RESET>
|
||||
<WHITE>+++ b/post<RESET>
|
||||
<BROWN>@@ -1,3 +1,7 @@<RESET>
|
||||
h(4)<GREEN>,hh[44]<RESET>
|
||||
<RESET>
|
||||
a = b + c<RESET>
|
||||
|
||||
<GREEN>aa = a<RESET>
|
||||
|
||||
<GREEN>aeff = aeff * ( aaa )<RESET>
|
||||
EOF
|
||||
cp expect expect.non-whitespace-is-word
|
||||
|
||||
test_expect_success 'use regex supplied by driver' '
|
||||
|
||||
word_diff --color-words
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'set diff.wordRegex option' '
|
||||
git config diff.wordRegex "[[:alnum:]]+"
|
||||
'
|
||||
|
||||
cp expect.letter-runs-are-words expect
|
||||
|
||||
test_expect_success 'command-line overrides config' '
|
||||
word_diff --color-words="[a-z]+"
|
||||
'
|
||||
|
||||
cp expect.non-whitespace-is-word expect
|
||||
|
||||
test_expect_success '.gitattributes override config' '
|
||||
word_diff --color-words
|
||||
'
|
||||
|
||||
test_expect_success 'remove diff driver regex' '
|
||||
git config --unset diff.testdriver.wordRegex
|
||||
'
|
||||
|
||||
cat > expect <<\EOF
|
||||
<WHITE>diff --git a/pre b/post<RESET>
|
||||
<WHITE>index 330b04f..5ed8eff 100644<RESET>
|
||||
<WHITE>--- a/pre<RESET>
|
||||
<WHITE>+++ b/post<RESET>
|
||||
<BROWN>@@ -1,3 +1,7 @@<RESET>
|
||||
h(4),<GREEN>hh[44<RESET>]
|
||||
<RESET>
|
||||
a = b + c<RESET>
|
||||
|
||||
<GREEN>aa = a<RESET>
|
||||
|
||||
<GREEN>aeff = aeff * ( aaa<RESET> )
|
||||
EOF
|
||||
|
||||
test_expect_success 'use configured regex' '
|
||||
word_diff --color-words
|
||||
'
|
||||
|
||||
echo 'aaa (aaa)' > pre
|
||||
echo 'aaa (aaa) aaa' > post
|
||||
|
||||
cat > expect <<\EOF
|
||||
<WHITE>diff --git a/pre b/post<RESET>
|
||||
<WHITE>index c29453b..be22f37 100644<RESET>
|
||||
<WHITE>--- a/pre<RESET>
|
||||
<WHITE>+++ b/post<RESET>
|
||||
<BROWN>@@ -1 +1 @@<RESET>
|
||||
aaa (aaa) <GREEN>aaa<RESET>
|
||||
EOF
|
||||
|
||||
test_expect_success 'test parsing words for newline' '
|
||||
|
||||
word_diff --color-words="a+"
|
||||
|
||||
|
||||
'
|
||||
|
||||
echo '(:' > pre
|
||||
echo '(' > post
|
||||
|
||||
cat > expect <<\EOF
|
||||
<WHITE>diff --git a/pre b/post<RESET>
|
||||
<WHITE>index 289cb9d..2d06f37 100644<RESET>
|
||||
<WHITE>--- a/pre<RESET>
|
||||
<WHITE>+++ b/post<RESET>
|
||||
<BROWN>@@ -1 +1 @@<RESET>
|
||||
(<RED>:<RESET>
|
||||
EOF
|
||||
|
||||
test_expect_success 'test when words are only removed at the end' '
|
||||
|
||||
word_diff --color-words=.
|
||||
|
||||
'
|
||||
|
||||
test_done
|
||||
@@ -16,27 +16,31 @@ test_expect_success setup '
|
||||
test_tick &&
|
||||
git commit -m second &&
|
||||
|
||||
git mv one ichi &&
|
||||
test_tick &&
|
||||
git commit -m third &&
|
||||
|
||||
cp ichi ein &&
|
||||
git add ein &&
|
||||
test_tick &&
|
||||
git commit -m fourth &&
|
||||
|
||||
mkdir a &&
|
||||
echo ni >a/two &&
|
||||
git add a/two &&
|
||||
test_tick &&
|
||||
git commit -m third &&
|
||||
git commit -m fifth &&
|
||||
|
||||
echo san >a/three &&
|
||||
git add a/three &&
|
||||
git rm a/two &&
|
||||
test_tick &&
|
||||
git commit -m fourth &&
|
||||
|
||||
git rm a/three &&
|
||||
test_tick &&
|
||||
git commit -m fifth
|
||||
git commit -m sixth
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'diff-filter=A' '
|
||||
|
||||
actual=$(git log --pretty="format:%s" --diff-filter=A HEAD) &&
|
||||
expect=$(echo fourth ; echo third ; echo initial) &&
|
||||
expect=$(echo fifth ; echo fourth ; echo third ; echo initial) &&
|
||||
test "$actual" = "$expect" || {
|
||||
echo Oops
|
||||
echo "Actual: $actual"
|
||||
@@ -60,7 +64,43 @@ test_expect_success 'diff-filter=M' '
|
||||
test_expect_success 'diff-filter=D' '
|
||||
|
||||
actual=$(git log --pretty="format:%s" --diff-filter=D HEAD) &&
|
||||
expect=$(echo fifth) &&
|
||||
expect=$(echo sixth ; echo third) &&
|
||||
test "$actual" = "$expect" || {
|
||||
echo Oops
|
||||
echo "Actual: $actual"
|
||||
false
|
||||
}
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'diff-filter=R' '
|
||||
|
||||
actual=$(git log -M --pretty="format:%s" --diff-filter=R HEAD) &&
|
||||
expect=$(echo third) &&
|
||||
test "$actual" = "$expect" || {
|
||||
echo Oops
|
||||
echo "Actual: $actual"
|
||||
false
|
||||
}
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'diff-filter=C' '
|
||||
|
||||
actual=$(git log -C -C --pretty="format:%s" --diff-filter=C HEAD) &&
|
||||
expect=$(echo fourth) &&
|
||||
test "$actual" = "$expect" || {
|
||||
echo Oops
|
||||
echo "Actual: $actual"
|
||||
false
|
||||
}
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'git log --follow' '
|
||||
|
||||
actual=$(git log --follow --pretty="format:%s" ichi) &&
|
||||
expect=$(echo third ; echo second ; echo initial) &&
|
||||
test "$actual" = "$expect" || {
|
||||
echo Oops
|
||||
echo "Actual: $actual"
|
||||
@@ -72,6 +112,7 @@ test_expect_success 'diff-filter=D' '
|
||||
test_expect_success 'setup case sensitivity tests' '
|
||||
echo case >one &&
|
||||
test_tick &&
|
||||
git add one
|
||||
git commit -a -m Second
|
||||
'
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
test_description='git am not losing options'
|
||||
test_description='git am with options and not losing them'
|
||||
. ./test-lib.sh
|
||||
|
||||
tm="$TEST_DIRECTORY/t4252"
|
||||
@@ -66,4 +66,13 @@ test_expect_success 'apply to a funny path' '
|
||||
test -f "$with_sq/file-5"
|
||||
'
|
||||
|
||||
test_expect_success 'am --reject' '
|
||||
rm -rf .git/rebase-apply &&
|
||||
git reset --hard initial &&
|
||||
test_must_fail git am --reject "$tm"/am-test-6-1 &&
|
||||
grep "@@ -1,3 +1,3 @@" file-2.rej &&
|
||||
test_must_fail git diff-files --exit-code --quiet file-2 &&
|
||||
grep "[-]-reject" .git/rebase-apply/apply-opt
|
||||
'
|
||||
|
||||
test_done
|
||||
|
||||
21
t/t4252/am-test-6-1
Normal file
21
t/t4252/am-test-6-1
Normal file
@@ -0,0 +1,21 @@
|
||||
From: A U Thor <au.thor@example.com>
|
||||
Date: Thu Dec 4 16:00:00 2008 -0800
|
||||
Subject: Huh
|
||||
|
||||
Should fail and leave rejects
|
||||
|
||||
diff --git i/file-2 w/file-2
|
||||
index 06e567b..b6f3a16 100644
|
||||
--- i/file-2
|
||||
+++ w/file-2
|
||||
@@ -1,3 +1,3 @@
|
||||
-0
|
||||
+One
|
||||
2
|
||||
3
|
||||
@@ -4,4 +4,4 @@
|
||||
4
|
||||
5
|
||||
-6
|
||||
+Six
|
||||
7
|
||||
@@ -11,8 +11,8 @@ test_expect_success 'preparing origin repository' '
|
||||
git clone --bare . x &&
|
||||
test "$(GIT_CONFIG=a.git/config git config --bool core.bare)" = true &&
|
||||
test "$(GIT_CONFIG=x/config git config --bool core.bare)" = true
|
||||
git bundle create b1.bundle --all HEAD &&
|
||||
git bundle create b2.bundle --all &&
|
||||
git bundle create b1.bundle --all &&
|
||||
git bundle create b2.bundle master &&
|
||||
mkdir dir &&
|
||||
cp b1.bundle dir/b3
|
||||
cp b1.bundle b4
|
||||
@@ -116,4 +116,20 @@ test_expect_success 'bundle clone with nonexistent HEAD' '
|
||||
test ! -e .git/refs/heads/master
|
||||
'
|
||||
|
||||
test_expect_success 'clone empty repository' '
|
||||
cd "$D" &&
|
||||
mkdir empty &&
|
||||
(cd empty && git init) &&
|
||||
git clone empty empty-clone &&
|
||||
test_tick &&
|
||||
(cd empty-clone
|
||||
echo "content" >> foo &&
|
||||
git add foo &&
|
||||
git commit -m "Initial commit" &&
|
||||
git push origin master &&
|
||||
expected=$(git rev-parse master) &&
|
||||
actual=$(git --git-dir=../empty/.git rev-parse master) &&
|
||||
test $actual = $expected)
|
||||
'
|
||||
|
||||
test_done
|
||||
|
||||
38
t/t6014-rev-list-all.sh
Executable file
38
t/t6014-rev-list-all.sh
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/bin/sh
|
||||
|
||||
test_description='--all includes detached HEADs'
|
||||
|
||||
. ./test-lib.sh
|
||||
|
||||
|
||||
commit () {
|
||||
test_tick &&
|
||||
echo $1 > foo &&
|
||||
git add foo &&
|
||||
git commit -m "$1"
|
||||
}
|
||||
|
||||
test_expect_success 'setup' '
|
||||
|
||||
commit one &&
|
||||
commit two &&
|
||||
git checkout HEAD^ &&
|
||||
commit detached
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'rev-list --all lists detached HEAD' '
|
||||
|
||||
test 3 = $(git rev-list --all | wc -l)
|
||||
|
||||
'
|
||||
|
||||
test_expect_success 'repack does not lose detached HEAD' '
|
||||
|
||||
git gc &&
|
||||
git prune --expire=now &&
|
||||
git show HEAD
|
||||
|
||||
'
|
||||
|
||||
test_done
|
||||
98
t/t9134-git-svn-ignore-paths.sh
Executable file
98
t/t9134-git-svn-ignore-paths.sh
Executable file
@@ -0,0 +1,98 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2009 Vitaly Shukela
|
||||
# Copyright (c) 2009 Eric Wong
|
||||
#
|
||||
|
||||
test_description='git svn property tests'
|
||||
. ./lib-git-svn.sh
|
||||
|
||||
test_expect_success 'setup test repository' '
|
||||
svn co "$svnrepo" s &&
|
||||
(
|
||||
cd s &&
|
||||
mkdir qqq www &&
|
||||
echo test_qqq > qqq/test_qqq.txt &&
|
||||
echo test_www > www/test_www.txt &&
|
||||
svn add qqq &&
|
||||
svn add www &&
|
||||
svn commit -m "create some files" &&
|
||||
svn up &&
|
||||
echo hi >> www/test_www.txt &&
|
||||
svn commit -m "modify www/test_www.txt" &&
|
||||
svn up
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'clone an SVN repository with ignored www directory' '
|
||||
git svn clone --ignore-paths="^www" "$svnrepo" g &&
|
||||
echo test_qqq > expect &&
|
||||
for i in g/*/*.txt; do cat $i >> expect2; done &&
|
||||
test_cmp expect expect2
|
||||
'
|
||||
|
||||
test_expect_success 'SVN-side change outside of www' '
|
||||
(
|
||||
cd s &&
|
||||
echo b >> qqq/test_qqq.txt &&
|
||||
svn commit -m "SVN-side change outside of www" &&
|
||||
svn up &&
|
||||
svn log -v | fgrep "SVN-side change outside of www"
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'update git svn-cloned repo' '
|
||||
(
|
||||
cd g &&
|
||||
git svn rebase --ignore-paths="^www" &&
|
||||
printf "test_qqq\nb\n" > expect &&
|
||||
for i in */*.txt; do cat $i >> expect2; done &&
|
||||
test_cmp expect2 expect &&
|
||||
rm expect expect2
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'SVN-side change inside of ignored www' '
|
||||
(
|
||||
cd s &&
|
||||
echo zaq >> www/test_www.txt
|
||||
svn commit -m "SVN-side change inside of www/test_www.txt" &&
|
||||
svn up &&
|
||||
svn log -v | fgrep "SVN-side change inside of www/test_www.txt"
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'update git svn-cloned repo' '
|
||||
(
|
||||
cd g &&
|
||||
git svn rebase --ignore-paths="^www" &&
|
||||
printf "test_qqq\nb\n" > expect &&
|
||||
for i in */*.txt; do cat $i >> expect2; done &&
|
||||
test_cmp expect2 expect &&
|
||||
rm expect expect2
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'SVN-side change in and out of ignored www' '
|
||||
(
|
||||
cd s &&
|
||||
echo cvf >> www/test_www.txt
|
||||
echo ygg >> qqq/test_qqq.txt
|
||||
svn commit -m "SVN-side change in and out of ignored www" &&
|
||||
svn up &&
|
||||
svn log -v | fgrep "SVN-side change in and out of ignored www"
|
||||
)
|
||||
'
|
||||
|
||||
test_expect_success 'update git svn-cloned repo again' '
|
||||
(
|
||||
cd g &&
|
||||
git svn rebase --ignore-paths="^www" &&
|
||||
printf "test_qqq\nb\nygg\n" > expect &&
|
||||
for i in */*.txt; do cat $i >> expect2; done &&
|
||||
test_cmp expect2 expect &&
|
||||
rm expect expect2
|
||||
)
|
||||
'
|
||||
|
||||
test_done
|
||||
@@ -61,7 +61,7 @@ static void unlink_entry(struct cache_entry *ce)
|
||||
char *cp, *prev;
|
||||
char *name = ce->name;
|
||||
|
||||
if (has_symlink_leading_path(ce_namelen(ce), ce->name))
|
||||
if (has_symlink_or_noent_leading_path(ce_namelen(ce), ce->name))
|
||||
return;
|
||||
if (unlink(name))
|
||||
return;
|
||||
@@ -580,7 +580,7 @@ static int verify_absent(struct cache_entry *ce, const char *action,
|
||||
if (o->index_only || o->reset || !o->update)
|
||||
return 0;
|
||||
|
||||
if (has_symlink_leading_path(ce_namelen(ce), ce->name))
|
||||
if (has_symlink_or_noent_leading_path(ce_namelen(ce), ce->name))
|
||||
return 0;
|
||||
|
||||
if (!lstat(ce->name, &st)) {
|
||||
|
||||
78
userdiff.c
78
userdiff.c
@@ -6,14 +6,20 @@ static struct userdiff_driver *drivers;
|
||||
static int ndrivers;
|
||||
static int drivers_alloc;
|
||||
|
||||
#define FUNCNAME(name, pattern) \
|
||||
{ name, NULL, -1, { pattern, REG_EXTENDED } }
|
||||
#define PATTERNS(name, pattern, word_regex) \
|
||||
{ name, NULL, -1, { pattern, REG_EXTENDED }, word_regex }
|
||||
static struct userdiff_driver builtin_drivers[] = {
|
||||
FUNCNAME("html", "^[ \t]*(<[Hh][1-6][ \t].*>.*)$"),
|
||||
FUNCNAME("java",
|
||||
PATTERNS("html", "^[ \t]*(<[Hh][1-6][ \t].*>.*)$",
|
||||
"[^<>= \t]+|[^[:space:]]|[\x80-\xff]+"),
|
||||
PATTERNS("java",
|
||||
"!^[ \t]*(catch|do|for|if|instanceof|new|return|switch|throw|while)\n"
|
||||
"^[ \t]*(([ \t]*[A-Za-z_][A-Za-z_0-9]*){2,}[ \t]*\\([^;]*)$"),
|
||||
FUNCNAME("objc",
|
||||
"^[ \t]*(([ \t]*[A-Za-z_][A-Za-z_0-9]*){2,}[ \t]*\\([^;]*)$",
|
||||
"[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+[fFlL]?|0[xXbB]?[0-9a-fA-F]+[lL]?"
|
||||
"|[-+*/<>%&^|=!]="
|
||||
"|--|\\+\\+|<<=?|>>>?=?|&&|\\|\\|"
|
||||
"|[^[:space:]]|[\x80-\xff]+"),
|
||||
PATTERNS("objc",
|
||||
/* Negate C statements that can look like functions */
|
||||
"!^[ \t]*(do|for|if|else|return|switch|while)\n"
|
||||
/* Objective-C methods */
|
||||
@@ -21,20 +27,60 @@ FUNCNAME("objc",
|
||||
/* C functions */
|
||||
"^[ \t]*(([ \t]*[A-Za-z_][A-Za-z_0-9]*){2,}[ \t]*\\([^;]*)$\n"
|
||||
/* Objective-C class/protocol definitions */
|
||||
"^(@(implementation|interface|protocol)[ \t].*)$"),
|
||||
FUNCNAME("pascal",
|
||||
"^(@(implementation|interface|protocol)[ \t].*)$",
|
||||
/* -- */
|
||||
"[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+[fFlL]?|0[xXbB]?[0-9a-fA-F]+[lL]?"
|
||||
"|[-+*/<>%&^|=!]=|--|\\+\\+|<<=?|>>=?|&&|\\|\\||::|->"
|
||||
"|[^[:space:]]|[\x80-\xff]+"),
|
||||
PATTERNS("pascal",
|
||||
"^((procedure|function|constructor|destructor|interface|"
|
||||
"implementation|initialization|finalization)[ \t]*.*)$"
|
||||
"\n"
|
||||
"^(.*=[ \t]*(class|record).*)$"),
|
||||
FUNCNAME("php", "^[\t ]*((function|class).*)"),
|
||||
FUNCNAME("python", "^[ \t]*((class|def)[ \t].*)$"),
|
||||
FUNCNAME("ruby", "^[ \t]*((class|module|def)[ \t].*)$"),
|
||||
FUNCNAME("bibtex", "(@[a-zA-Z]{1,}[ \t]*\\{{0,1}[ \t]*[^ \t\"@',\\#}{~%]*).*$"),
|
||||
FUNCNAME("tex", "^(\\\\((sub)*section|chapter|part)\\*{0,1}\\{.*)$"),
|
||||
"^(.*=[ \t]*(class|record).*)$",
|
||||
/* -- */
|
||||
"[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+|0[xXbB]?[0-9a-fA-F]+"
|
||||
"|<>|<=|>=|:=|\\.\\."
|
||||
"|[^[:space:]]|[\x80-\xff]+"),
|
||||
PATTERNS("php", "^[\t ]*((function|class).*)",
|
||||
/* -- */
|
||||
"[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+|0[xXbB]?[0-9a-fA-F]+"
|
||||
"|[-+*/<>%&^|=!.]=|--|\\+\\+|<<=?|>>=?|===|&&|\\|\\||::|->"
|
||||
"|[^[:space:]]|[\x80-\xff]+"),
|
||||
PATTERNS("python", "^[ \t]*((class|def)[ \t].*)$",
|
||||
/* -- */
|
||||
"[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+[jJlL]?|0[xX]?[0-9a-fA-F]+[lL]?"
|
||||
"|[-+*/<>%&^|=!]=|//=?|<<=?|>>=?|\\*\\*=?"
|
||||
"|[^[:space:]|[\x80-\xff]+"),
|
||||
/* -- */
|
||||
PATTERNS("ruby", "^[ \t]*((class|module|def)[ \t].*)$",
|
||||
/* -- */
|
||||
"(@|@@|\\$)?[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+|0[xXbB]?[0-9a-fA-F]+|\\?(\\\\C-)?(\\\\M-)?."
|
||||
"|//=?|[-+*/<>%&^|=!]=|<<=?|>>=?|===|\\.{1,3}|::|[!=]~"
|
||||
"|[^[:space:]|[\x80-\xff]+"),
|
||||
PATTERNS("bibtex", "(@[a-zA-Z]{1,}[ \t]*\\{{0,1}[ \t]*[^ \t\"@',\\#}{~%]*).*$",
|
||||
"[={}\"]|[^={}\" \t]+"),
|
||||
PATTERNS("tex", "^(\\\\((sub)*section|chapter|part)\\*{0,1}\\{.*)$",
|
||||
"\\\\[a-zA-Z@]+|\\\\.|[a-zA-Z0-9\x80-\xff]+|[^[:space:]]"),
|
||||
PATTERNS("cpp",
|
||||
/* Jump targets or access declarations */
|
||||
"!^[ \t]*[A-Za-z_][A-Za-z_0-9]*:.*$\n"
|
||||
/* C/++ functions/methods at top level */
|
||||
"^([A-Za-z_][A-Za-z_0-9]*([ \t]+[A-Za-z_][A-Za-z_0-9]*([ \t]*::[ \t]*[^[:space:]]+)?){1,}[ \t]*\\([^;]*)$\n"
|
||||
/* compound type at top level */
|
||||
"^((struct|class|enum)[^;]*)$",
|
||||
/* -- */
|
||||
"[a-zA-Z_][a-zA-Z0-9_]*"
|
||||
"|[-+0-9.e]+[fFlL]?|0[xXbB]?[0-9a-fA-F]+[lL]?"
|
||||
"|[-+*/<>%&^|=!]=|--|\\+\\+|<<=?|>>=?|&&|\\|\\||::|->"
|
||||
"|[^[:space:]]|[\x80-\xff]+"),
|
||||
{ "default", NULL, -1, { NULL, 0 } },
|
||||
};
|
||||
#undef FUNCNAME
|
||||
#undef PATTERNS
|
||||
|
||||
static struct userdiff_driver driver_true = {
|
||||
"diff=true",
|
||||
@@ -134,6 +180,8 @@ int userdiff_config(const char *k, const char *v)
|
||||
return parse_string(&drv->external, k, v);
|
||||
if ((drv = parse_driver(k, v, "textconv")))
|
||||
return parse_string(&drv->textconv, k, v);
|
||||
if ((drv = parse_driver(k, v, "wordregex")))
|
||||
return parse_string(&drv->word_regex, k, v);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ struct userdiff_driver {
|
||||
const char *external;
|
||||
int binary;
|
||||
struct userdiff_funcname funcname;
|
||||
const char *word_regex;
|
||||
const char *textconv;
|
||||
};
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ extern "C" {
|
||||
#define XDF_IGNORE_WHITESPACE (1 << 2)
|
||||
#define XDF_IGNORE_WHITESPACE_CHANGE (1 << 3)
|
||||
#define XDF_IGNORE_WHITESPACE_AT_EOL (1 << 4)
|
||||
#define XDF_PATIENCE_DIFF (1 << 5)
|
||||
#define XDF_WHITESPACE_FLAGS (XDF_IGNORE_WHITESPACE | XDF_IGNORE_WHITESPACE_CHANGE | XDF_IGNORE_WHITESPACE_AT_EOL)
|
||||
|
||||
#define XDL_PATCH_NORMAL '-'
|
||||
|
||||
@@ -329,6 +329,9 @@ int xdl_do_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
|
||||
xdalgoenv_t xenv;
|
||||
diffdata_t dd1, dd2;
|
||||
|
||||
if (xpp->flags & XDF_PATIENCE_DIFF)
|
||||
return xdl_do_patience_diff(mf1, mf2, xpp, xe);
|
||||
|
||||
if (xdl_prepare_env(mf1, mf2, xpp, xe) < 0) {
|
||||
|
||||
return -1;
|
||||
|
||||
@@ -55,5 +55,7 @@ int xdl_build_script(xdfenv_t *xe, xdchange_t **xscr);
|
||||
void xdl_free_script(xdchange_t *xscr);
|
||||
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
|
||||
xdemitconf_t const *xecfg);
|
||||
int xdl_do_patience_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
|
||||
xdfenv_t *env);
|
||||
|
||||
#endif /* #if !defined(XDIFFI_H) */
|
||||
|
||||
381
xdiff/xpatience.c
Normal file
381
xdiff/xpatience.c
Normal file
@@ -0,0 +1,381 @@
|
||||
/*
|
||||
* LibXDiff by Davide Libenzi ( File Differential Library )
|
||||
* Copyright (C) 2003-2009 Davide Libenzi, Johannes E. Schindelin
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Lesser General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2.1 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public
|
||||
* License along with this library; if not, write to the Free Software
|
||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
*
|
||||
* Davide Libenzi <davidel@xmailserver.org>
|
||||
*
|
||||
*/
|
||||
#include "xinclude.h"
|
||||
#include "xtypes.h"
|
||||
#include "xdiff.h"
|
||||
|
||||
/*
|
||||
* The basic idea of patience diff is to find lines that are unique in
|
||||
* both files. These are intuitively the ones that we want to see as
|
||||
* common lines.
|
||||
*
|
||||
* The maximal ordered sequence of such line pairs (where ordered means
|
||||
* that the order in the sequence agrees with the order of the lines in
|
||||
* both files) naturally defines an initial set of common lines.
|
||||
*
|
||||
* Now, the algorithm tries to extend the set of common lines by growing
|
||||
* the line ranges where the files have identical lines.
|
||||
*
|
||||
* Between those common lines, the patience diff algorithm is applied
|
||||
* recursively, until no unique line pairs can be found; these line ranges
|
||||
* are handled by the well-known Myers algorithm.
|
||||
*/
|
||||
|
||||
#define NON_UNIQUE ULONG_MAX
|
||||
|
||||
/*
|
||||
* This is a hash mapping from line hash to line numbers in the first and
|
||||
* second file.
|
||||
*/
|
||||
struct hashmap {
|
||||
int nr, alloc;
|
||||
struct entry {
|
||||
unsigned long hash;
|
||||
/*
|
||||
* 0 = unused entry, 1 = first line, 2 = second, etc.
|
||||
* line2 is NON_UNIQUE if the line is not unique
|
||||
* in either the first or the second file.
|
||||
*/
|
||||
unsigned long line1, line2;
|
||||
/*
|
||||
* "next" & "previous" are used for the longest common
|
||||
* sequence;
|
||||
* initially, "next" reflects only the order in file1.
|
||||
*/
|
||||
struct entry *next, *previous;
|
||||
} *entries, *first, *last;
|
||||
/* were common records found? */
|
||||
unsigned long has_matches;
|
||||
mmfile_t *file1, *file2;
|
||||
xdfenv_t *env;
|
||||
xpparam_t const *xpp;
|
||||
};
|
||||
|
||||
/* The argument "pass" is 1 for the first file, 2 for the second. */
|
||||
static void insert_record(int line, struct hashmap *map, int pass)
|
||||
{
|
||||
xrecord_t **records = pass == 1 ?
|
||||
map->env->xdf1.recs : map->env->xdf2.recs;
|
||||
xrecord_t *record = records[line - 1], *other;
|
||||
/*
|
||||
* After xdl_prepare_env() (or more precisely, due to
|
||||
* xdl_classify_record()), the "ha" member of the records (AKA lines)
|
||||
* is _not_ the hash anymore, but a linearized version of it. In
|
||||
* other words, the "ha" member is guaranteed to start with 0 and
|
||||
* the second record's ha can only be 0 or 1, etc.
|
||||
*
|
||||
* So we multiply ha by 2 in the hope that the hashing was
|
||||
* "unique enough".
|
||||
*/
|
||||
int index = (int)((record->ha << 1) % map->alloc);
|
||||
|
||||
while (map->entries[index].line1) {
|
||||
other = map->env->xdf1.recs[map->entries[index].line1 - 1];
|
||||
if (map->entries[index].hash != record->ha ||
|
||||
!xdl_recmatch(record->ptr, record->size,
|
||||
other->ptr, other->size,
|
||||
map->xpp->flags)) {
|
||||
if (++index >= map->alloc)
|
||||
index = 0;
|
||||
continue;
|
||||
}
|
||||
if (pass == 2)
|
||||
map->has_matches = 1;
|
||||
if (pass == 1 || map->entries[index].line2)
|
||||
map->entries[index].line2 = NON_UNIQUE;
|
||||
else
|
||||
map->entries[index].line2 = line;
|
||||
return;
|
||||
}
|
||||
if (pass == 2)
|
||||
return;
|
||||
map->entries[index].line1 = line;
|
||||
map->entries[index].hash = record->ha;
|
||||
if (!map->first)
|
||||
map->first = map->entries + index;
|
||||
if (map->last) {
|
||||
map->last->next = map->entries + index;
|
||||
map->entries[index].previous = map->last;
|
||||
}
|
||||
map->last = map->entries + index;
|
||||
map->nr++;
|
||||
}
|
||||
|
||||
/*
|
||||
* This function has to be called for each recursion into the inter-hunk
|
||||
* parts, as previously non-unique lines can become unique when being
|
||||
* restricted to a smaller part of the files.
|
||||
*
|
||||
* It is assumed that env has been prepared using xdl_prepare().
|
||||
*/
|
||||
static int fill_hashmap(mmfile_t *file1, mmfile_t *file2,
|
||||
xpparam_t const *xpp, xdfenv_t *env,
|
||||
struct hashmap *result,
|
||||
int line1, int count1, int line2, int count2)
|
||||
{
|
||||
result->file1 = file1;
|
||||
result->file2 = file2;
|
||||
result->xpp = xpp;
|
||||
result->env = env;
|
||||
|
||||
/* We know exactly how large we want the hash map */
|
||||
result->alloc = count1 * 2;
|
||||
result->entries = (struct entry *)
|
||||
xdl_malloc(result->alloc * sizeof(struct entry));
|
||||
if (!result->entries)
|
||||
return -1;
|
||||
memset(result->entries, 0, result->alloc * sizeof(struct entry));
|
||||
|
||||
/* First, fill with entries from the first file */
|
||||
while (count1--)
|
||||
insert_record(line1++, result, 1);
|
||||
|
||||
/* Then search for matches in the second file */
|
||||
while (count2--)
|
||||
insert_record(line2++, result, 2);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* Find the longest sequence with a smaller last element (meaning a smaller
|
||||
* line2, as we construct the sequence with entries ordered by line1).
|
||||
*/
|
||||
static int binary_search(struct entry **sequence, int longest,
|
||||
struct entry *entry)
|
||||
{
|
||||
int left = -1, right = longest;
|
||||
|
||||
while (left + 1 < right) {
|
||||
int middle = (left + right) / 2;
|
||||
/* by construction, no two entries can be equal */
|
||||
if (sequence[middle]->line2 > entry->line2)
|
||||
right = middle;
|
||||
else
|
||||
left = middle;
|
||||
}
|
||||
/* return the index in "sequence", _not_ the sequence length */
|
||||
return left;
|
||||
}
|
||||
|
||||
/*
|
||||
* The idea is to start with the list of common unique lines sorted by
|
||||
* the order in file1. For each of these pairs, the longest (partial)
|
||||
* sequence whose last element's line2 is smaller is determined.
|
||||
*
|
||||
* For efficiency, the sequences are kept in a list containing exactly one
|
||||
* item per sequence length: the sequence with the smallest last
|
||||
* element (in terms of line2).
|
||||
*/
|
||||
static struct entry *find_longest_common_sequence(struct hashmap *map)
|
||||
{
|
||||
struct entry **sequence = xdl_malloc(map->nr * sizeof(struct entry *));
|
||||
int longest = 0, i;
|
||||
struct entry *entry;
|
||||
|
||||
for (entry = map->first; entry; entry = entry->next) {
|
||||
if (!entry->line2 || entry->line2 == NON_UNIQUE)
|
||||
continue;
|
||||
i = binary_search(sequence, longest, entry);
|
||||
entry->previous = i < 0 ? NULL : sequence[i];
|
||||
sequence[++i] = entry;
|
||||
if (i == longest)
|
||||
longest++;
|
||||
}
|
||||
|
||||
/* No common unique lines were found */
|
||||
if (!longest) {
|
||||
xdl_free(sequence);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Iterate starting at the last element, adjusting the "next" members */
|
||||
entry = sequence[longest - 1];
|
||||
entry->next = NULL;
|
||||
while (entry->previous) {
|
||||
entry->previous->next = entry;
|
||||
entry = entry->previous;
|
||||
}
|
||||
xdl_free(sequence);
|
||||
return entry;
|
||||
}
|
||||
|
||||
static int match(struct hashmap *map, int line1, int line2)
|
||||
{
|
||||
xrecord_t *record1 = map->env->xdf1.recs[line1 - 1];
|
||||
xrecord_t *record2 = map->env->xdf2.recs[line2 - 1];
|
||||
return xdl_recmatch(record1->ptr, record1->size,
|
||||
record2->ptr, record2->size, map->xpp->flags);
|
||||
}
|
||||
|
||||
static int patience_diff(mmfile_t *file1, mmfile_t *file2,
|
||||
xpparam_t const *xpp, xdfenv_t *env,
|
||||
int line1, int count1, int line2, int count2);
|
||||
|
||||
static int walk_common_sequence(struct hashmap *map, struct entry *first,
|
||||
int line1, int count1, int line2, int count2)
|
||||
{
|
||||
int end1 = line1 + count1, end2 = line2 + count2;
|
||||
int next1, next2;
|
||||
|
||||
for (;;) {
|
||||
/* Try to grow the line ranges of common lines */
|
||||
if (first) {
|
||||
next1 = first->line1;
|
||||
next2 = first->line2;
|
||||
while (next1 > line1 && next2 > line2 &&
|
||||
match(map, next1 - 1, next2 - 1)) {
|
||||
next1--;
|
||||
next2--;
|
||||
}
|
||||
} else {
|
||||
next1 = end1;
|
||||
next2 = end2;
|
||||
}
|
||||
while (line1 < next1 && line2 < next2 &&
|
||||
match(map, line1, line2)) {
|
||||
line1++;
|
||||
line2++;
|
||||
}
|
||||
|
||||
/* Recurse */
|
||||
if (next1 > line1 || next2 > line2) {
|
||||
struct hashmap submap;
|
||||
|
||||
memset(&submap, 0, sizeof(submap));
|
||||
if (patience_diff(map->file1, map->file2,
|
||||
map->xpp, map->env,
|
||||
line1, next1 - line1,
|
||||
line2, next2 - line2))
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!first)
|
||||
return 0;
|
||||
|
||||
while (first->next &&
|
||||
first->next->line1 == first->line1 + 1 &&
|
||||
first->next->line2 == first->line2 + 1)
|
||||
first = first->next;
|
||||
|
||||
line1 = first->line1 + 1;
|
||||
line2 = first->line2 + 1;
|
||||
|
||||
first = first->next;
|
||||
}
|
||||
}
|
||||
|
||||
static int fall_back_to_classic_diff(struct hashmap *map,
|
||||
int line1, int count1, int line2, int count2)
|
||||
{
|
||||
/*
|
||||
* This probably does not work outside Git, since
|
||||
* we have a very simple mmfile structure.
|
||||
*
|
||||
* Note: ideally, we would reuse the prepared environment, but
|
||||
* the libxdiff interface does not (yet) allow for diffing only
|
||||
* ranges of lines instead of the whole files.
|
||||
*/
|
||||
mmfile_t subfile1, subfile2;
|
||||
xpparam_t xpp;
|
||||
xdfenv_t env;
|
||||
|
||||
subfile1.ptr = (char *)map->env->xdf1.recs[line1 - 1]->ptr;
|
||||
subfile1.size = map->env->xdf1.recs[line1 + count1 - 2]->ptr +
|
||||
map->env->xdf1.recs[line1 + count1 - 2]->size - subfile1.ptr;
|
||||
subfile2.ptr = (char *)map->env->xdf2.recs[line2 - 1]->ptr;
|
||||
subfile2.size = map->env->xdf2.recs[line2 + count2 - 2]->ptr +
|
||||
map->env->xdf2.recs[line2 + count2 - 2]->size - subfile2.ptr;
|
||||
xpp.flags = map->xpp->flags & ~XDF_PATIENCE_DIFF;
|
||||
if (xdl_do_diff(&subfile1, &subfile2, &xpp, &env) < 0)
|
||||
return -1;
|
||||
|
||||
memcpy(map->env->xdf1.rchg + line1 - 1, env.xdf1.rchg, count1);
|
||||
memcpy(map->env->xdf2.rchg + line2 - 1, env.xdf2.rchg, count2);
|
||||
|
||||
xdl_free_env(&env);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* Recursively find the longest common sequence of unique lines,
|
||||
* and if none was found, ask xdl_do_diff() to do the job.
|
||||
*
|
||||
* This function assumes that env was prepared with xdl_prepare_env().
|
||||
*/
|
||||
static int patience_diff(mmfile_t *file1, mmfile_t *file2,
|
||||
xpparam_t const *xpp, xdfenv_t *env,
|
||||
int line1, int count1, int line2, int count2)
|
||||
{
|
||||
struct hashmap map;
|
||||
struct entry *first;
|
||||
int result = 0;
|
||||
|
||||
/* trivial case: one side is empty */
|
||||
if (!count1) {
|
||||
while(count2--)
|
||||
env->xdf2.rchg[line2++ - 1] = 1;
|
||||
return 0;
|
||||
} else if (!count2) {
|
||||
while(count1--)
|
||||
env->xdf1.rchg[line1++ - 1] = 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
memset(&map, 0, sizeof(map));
|
||||
if (fill_hashmap(file1, file2, xpp, env, &map,
|
||||
line1, count1, line2, count2))
|
||||
return -1;
|
||||
|
||||
/* are there any matching lines at all? */
|
||||
if (!map.has_matches) {
|
||||
while(count1--)
|
||||
env->xdf1.rchg[line1++ - 1] = 1;
|
||||
while(count2--)
|
||||
env->xdf2.rchg[line2++ - 1] = 1;
|
||||
xdl_free(map.entries);
|
||||
return 0;
|
||||
}
|
||||
|
||||
first = find_longest_common_sequence(&map);
|
||||
if (first)
|
||||
result = walk_common_sequence(&map, first,
|
||||
line1, count1, line2, count2);
|
||||
else
|
||||
result = fall_back_to_classic_diff(&map,
|
||||
line1, count1, line2, count2);
|
||||
|
||||
xdl_free(map.entries);
|
||||
return result;
|
||||
}
|
||||
|
||||
int xdl_do_patience_diff(mmfile_t *file1, mmfile_t *file2,
|
||||
xpparam_t const *xpp, xdfenv_t *env)
|
||||
{
|
||||
if (xdl_prepare_env(file1, file2, xpp, env) < 0)
|
||||
return -1;
|
||||
|
||||
/* environment is cleaned up in xdl_diff() */
|
||||
return patience_diff(file1, file2, xpp, env,
|
||||
1, env->xdf1.nrec, 1, env->xdf2.nrec);
|
||||
}
|
||||
@@ -290,7 +290,8 @@ int xdl_prepare_env(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
|
||||
|
||||
xdl_free_classifier(&cf);
|
||||
|
||||
if (xdl_optimize_ctxs(&xe->xdf1, &xe->xdf2) < 0) {
|
||||
if (!(xpp->flags & XDF_PATIENCE_DIFF) &&
|
||||
xdl_optimize_ctxs(&xe->xdf1, &xe->xdf2) < 0) {
|
||||
|
||||
xdl_free_ctx(&xe->xdf2);
|
||||
xdl_free_ctx(&xe->xdf1);
|
||||
|
||||
Reference in New Issue
Block a user