2006-02-20 19:57:29 +01:00
|
|
|
#!/bin/sh
|
|
|
|
#
|
|
|
|
# Copyright (c) 2006 Eric Wong
|
|
|
|
#
|
|
|
|
|
|
|
|
test_description='git-svn tests'
|
2006-06-12 14:57:02 +02:00
|
|
|
GIT_SVN_LC_ALL=$LC_ALL
|
2006-05-24 04:23:41 +02:00
|
|
|
. ./lib-git-svn.sh
|
2006-02-20 19:57:29 +01:00
|
|
|
|
|
|
|
mkdir import
|
|
|
|
cd import
|
|
|
|
|
|
|
|
echo foo > foo
|
2006-06-16 04:51:05 +02:00
|
|
|
if test -z "$NO_SYMLINK"
|
|
|
|
then
|
|
|
|
ln -s foo foo.link
|
|
|
|
fi
|
2006-02-20 19:57:29 +01:00
|
|
|
mkdir -p dir/a/b/c/d/e
|
|
|
|
echo 'deep dir' > dir/a/b/c/d/e/file
|
|
|
|
mkdir -p bar
|
|
|
|
echo 'zzz' > bar/zzz
|
|
|
|
echo '#!/bin/sh' > exec.sh
|
|
|
|
chmod +x exec.sh
|
2006-05-24 04:23:41 +02:00
|
|
|
svn import -m 'import for git-svn' . "$svnrepo" >/dev/null
|
2006-02-20 19:57:29 +01:00
|
|
|
|
|
|
|
cd ..
|
|
|
|
rm -rf import
|
|
|
|
|
|
|
|
test_expect_success \
|
|
|
|
'initialize git-svn' \
|
|
|
|
"git-svn init $svnrepo"
|
|
|
|
|
|
|
|
test_expect_success \
|
|
|
|
'import an SVN revision into git' \
|
|
|
|
'git-svn fetch'
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
test_expect_success "checkout from svn" "svn co $svnrepo $SVN_TREE"
|
2006-02-20 19:57:29 +01:00
|
|
|
|
|
|
|
name='try a deep --rmdir with a commit'
|
2006-05-24 03:34:24 +02:00
|
|
|
git checkout -f -b mybranch remotes/git-svn
|
2006-02-20 19:57:29 +01:00
|
|
|
mv dir/a/b/c/d/e/file dir/file
|
|
|
|
cp dir/file file
|
|
|
|
git update-index --add --remove dir/a/b/c/d/e/file dir/file file
|
|
|
|
git commit -m "$name"
|
|
|
|
|
|
|
|
test_expect_success "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
"git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch &&
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
svn up $SVN_TREE &&
|
2006-02-20 19:57:29 +01:00
|
|
|
test -d $SVN_TREE/dir && test ! -d $SVN_TREE/dir/a"
|
|
|
|
|
|
|
|
|
|
|
|
name='detect node change from file to directory #1'
|
|
|
|
mkdir dir/new_file
|
|
|
|
mv dir/file dir/new_file/file
|
|
|
|
mv dir/new_file dir/file
|
|
|
|
git update-index --remove dir/file
|
|
|
|
git update-index --add dir/file/file
|
|
|
|
git commit -m "$name"
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
test_expect_failure "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch' \
|
2006-02-20 19:57:29 +01:00
|
|
|
|| true
|
|
|
|
|
|
|
|
|
|
|
|
name='detect node change from directory to file #1'
|
|
|
|
rm -rf dir $GIT_DIR/index
|
2006-05-24 03:34:24 +02:00
|
|
|
git checkout -f -b mybranch2 remotes/git-svn
|
2006-02-20 19:57:29 +01:00
|
|
|
mv bar/zzz zzz
|
|
|
|
rm -rf bar
|
|
|
|
mv zzz bar
|
|
|
|
git update-index --remove -- bar/zzz
|
|
|
|
git update-index --add -- bar
|
|
|
|
git commit -m "$name"
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
test_expect_failure "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch2' \
|
2006-02-20 19:57:29 +01:00
|
|
|
|| true
|
|
|
|
|
|
|
|
|
|
|
|
name='detect node change from file to directory #2'
|
|
|
|
rm -f $GIT_DIR/index
|
2006-05-24 03:34:24 +02:00
|
|
|
git checkout -f -b mybranch3 remotes/git-svn
|
2006-02-20 19:57:29 +01:00
|
|
|
rm bar/zzz
|
|
|
|
git-update-index --remove bar/zzz
|
|
|
|
mkdir bar/zzz
|
|
|
|
echo yyy > bar/zzz/yyy
|
|
|
|
git-update-index --add bar/zzz/yyy
|
|
|
|
git commit -m "$name"
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
test_expect_failure "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch3' \
|
2006-02-20 19:57:29 +01:00
|
|
|
|| true
|
|
|
|
|
|
|
|
|
|
|
|
name='detect node change from directory to file #2'
|
|
|
|
rm -f $GIT_DIR/index
|
2006-05-24 03:34:24 +02:00
|
|
|
git checkout -f -b mybranch4 remotes/git-svn
|
2006-02-20 19:57:29 +01:00
|
|
|
rm -rf dir
|
|
|
|
git update-index --remove -- dir/file
|
|
|
|
touch dir
|
|
|
|
echo asdf > dir
|
|
|
|
git update-index --add -- dir
|
|
|
|
git commit -m "$name"
|
|
|
|
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
test_expect_failure "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
'git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch4' \
|
2006-02-20 19:57:29 +01:00
|
|
|
|| true
|
|
|
|
|
|
|
|
|
|
|
|
name='remove executable bit from a file'
|
|
|
|
rm -f $GIT_DIR/index
|
2006-05-24 03:34:24 +02:00
|
|
|
git checkout -f -b mybranch5 remotes/git-svn
|
2006-02-20 19:57:29 +01:00
|
|
|
chmod -x exec.sh
|
|
|
|
git update-index exec.sh
|
|
|
|
git commit -m "$name"
|
|
|
|
|
|
|
|
test_expect_success "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
"git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
svn up $SVN_TREE &&
|
2006-02-20 19:57:29 +01:00
|
|
|
test ! -x $SVN_TREE/exec.sh"
|
|
|
|
|
|
|
|
|
|
|
|
name='add executable bit back file'
|
|
|
|
chmod +x exec.sh
|
|
|
|
git update-index exec.sh
|
|
|
|
git commit -m "$name"
|
|
|
|
|
|
|
|
test_expect_success "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
"git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
|
git-svn: add support for Perl SVN::* libraries
This means we no longer have to deal with having bloated SVN
working copies around and we get a nice performance increase as
well because we don't have to exec the SVN binary and start a
new server connection each time.
Of course we have to manually manage memory with SVN::Pool
whenever we can, and hack around cases where SVN just eats
memory despite pools (I blame Perl, too). I would like to
keep memory usage as stable as possible during long fetch/commit
processes since I still use computers with only 256-512M RAM.
commit should always be faster with the SVN library code. The
SVN::Delta interface is leaky (or I'm not using it with pools
correctly), so I'm forking on every commit, but that doesn't
seem to hurt performance too much (at least on normal Unix/Linux
systems where fork() is pretty cheap).
fetch should be faster in most common cases, but probably not all.
fetches will be faster where client/server delta generation is
the bottleneck and not bandwidth. Of course, full-files are
generated server-side via deltas, too. Full files are always
transferred when they're updated, just like git-svnimport and
unlike command-line svn. I'm also hacking around memory leaks
(see comments) here by using some more forks.
I've tested fetch with http://, https://, file://, and svn://
repositories, so we should be reasonably covered in terms of
error handling for fetching.
Of course, we'll keep plain command-line svn compatibility as a
fallback for people running SVN 1.1 (I'm looking into library
support for 1.1.x SVN, too). If you want to force command-line
SVN usage, set GIT_SVN_NO_LIB=1 in your environment.
We also require two simultaneous connections (just like
git-svnimport), but this shouldn't be a problem for most
servers.
Less important commands:
show-ignore is slower because it requires repository
access, but -r/--revision <num> can be specified.
graft-branches may use more memory, but it's a
short-term process and is funky-filename-safe.
Signed-off-by: Eric Wong <normalperson@yhbt.net>
2006-06-13 00:23:48 +02:00
|
|
|
svn up $SVN_TREE &&
|
2006-02-20 19:57:29 +01:00
|
|
|
test -x $SVN_TREE/exec.sh"
|
|
|
|
|
|
|
|
|
|
|
|
|
2006-06-16 04:51:05 +02:00
|
|
|
if test -z "$NO_SYMLINK"
|
|
|
|
then
|
|
|
|
name='executable file becomes a symlink to bar/zzz (file)'
|
|
|
|
rm exec.sh
|
|
|
|
ln -s bar/zzz exec.sh
|
|
|
|
git update-index exec.sh
|
|
|
|
git commit -m "$name"
|
|
|
|
|
|
|
|
test_expect_success "$name" \
|
|
|
|
"git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
|
|
|
|
svn up $SVN_TREE &&
|
|
|
|
test -L $SVN_TREE/exec.sh"
|
|
|
|
|
|
|
|
name='new symlink is added to a file that was also just made executable'
|
|
|
|
chmod +x bar/zzz
|
|
|
|
ln -s bar/zzz exec-2.sh
|
|
|
|
git update-index --add bar/zzz exec-2.sh
|
|
|
|
git commit -m "$name"
|
|
|
|
|
|
|
|
test_expect_success "$name" \
|
|
|
|
"git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
|
|
|
|
svn up $SVN_TREE &&
|
|
|
|
test -x $SVN_TREE/bar/zzz &&
|
|
|
|
test -L $SVN_TREE/exec-2.sh"
|
|
|
|
|
|
|
|
name='modify a symlink to become a file'
|
|
|
|
git help > help || true
|
|
|
|
rm exec-2.sh
|
|
|
|
cp help exec-2.sh
|
|
|
|
git update-index exec-2.sh
|
|
|
|
git commit -m "$name"
|
|
|
|
|
|
|
|
test_expect_success "$name" \
|
|
|
|
"git-svn commit --find-copies-harder --rmdir remotes/git-svn..mybranch5 &&
|
|
|
|
svn up $SVN_TREE &&
|
|
|
|
test -f $SVN_TREE/exec-2.sh &&
|
|
|
|
test ! -L $SVN_TREE/exec-2.sh &&
|
|
|
|
diff -u help $SVN_TREE/exec-2.sh"
|
|
|
|
fi
|
2006-02-20 19:57:29 +01:00
|
|
|
|
|
|
|
|
2006-06-12 14:57:02 +02:00
|
|
|
if test -n "$GIT_SVN_LC_ALL" && echo $GIT_SVN_LC_ALL | grep -q '\.UTF-8$'
|
|
|
|
then
|
|
|
|
name="commit with UTF-8 message: locale: $GIT_SVN_LC_ALL"
|
|
|
|
echo '# hello' >> exec-2.sh
|
|
|
|
git update-index exec-2.sh
|
|
|
|
git commit -m 'éï∏'
|
|
|
|
export LC_ALL="$GIT_SVN_LC_ALL"
|
|
|
|
test_expect_success "$name" "git-svn commit HEAD"
|
|
|
|
unset LC_ALL
|
|
|
|
else
|
|
|
|
echo "UTF-8 locale not set, test skipped ($GIT_SVN_LC_ALL)"
|
|
|
|
fi
|
2006-02-20 19:57:29 +01:00
|
|
|
|
|
|
|
name='test fetch functionality (svn => git) with alternate GIT_SVN_ID'
|
|
|
|
GIT_SVN_ID=alt
|
|
|
|
export GIT_SVN_ID
|
|
|
|
test_expect_success "$name" \
|
2006-03-02 06:58:31 +01:00
|
|
|
"git-svn init $svnrepo && git-svn fetch &&
|
|
|
|
git-rev-list --pretty=raw remotes/git-svn | grep ^tree | uniq > a &&
|
|
|
|
git-rev-list --pretty=raw remotes/alt | grep ^tree | uniq > b &&
|
2006-02-20 19:57:29 +01:00
|
|
|
diff -u a b"
|
|
|
|
|
2006-06-16 04:51:05 +02:00
|
|
|
if test -n "$NO_SYMLINK"
|
|
|
|
then
|
|
|
|
test_done
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
2006-06-13 13:02:23 +02:00
|
|
|
name='check imported tree checksums expected tree checksums'
|
2006-06-16 04:13:56 +02:00
|
|
|
rm -f expected
|
|
|
|
if test -n "$GIT_SVN_LC_ALL" && echo $GIT_SVN_LC_ALL | grep -q '\.UTF-8$'
|
|
|
|
then
|
|
|
|
echo tree f735671b89a7eb30cab1d8597de35bd4271ab813 > expected
|
|
|
|
fi
|
|
|
|
cat >> expected <<\EOF
|
2006-06-13 13:02:23 +02:00
|
|
|
tree 4b9af72bb861eaed053854ec502cf7df72618f0f
|
|
|
|
tree 031b8d557afc6fea52894eaebb45bec52f1ba6d1
|
|
|
|
tree 0b094cbff17168f24c302e297f55bfac65eb8bd3
|
|
|
|
tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e
|
|
|
|
tree 56a30b966619b863674f5978696f4a3594f2fca9
|
|
|
|
tree d667270a1f7b109f5eb3aaea21ede14b56bfdd6e
|
|
|
|
tree 8f51f74cf0163afc9ad68a4b1537288c4558b5a4
|
|
|
|
EOF
|
|
|
|
test_expect_success "$name" "diff -u a expected"
|
|
|
|
|
2006-02-20 19:57:29 +01:00
|
|
|
test_done
|
|
|
|
|