1
0
Fork 0
mirror of https://github.com/git/git.git synced 2024-10-30 05:47:53 +01:00

Merge branch 'js/test-file-size'

Test clean-up.

* js/test-file-size:
  tests: consolidate the `file_size` function into `test-lib-functions.sh`
This commit is contained in:
Junio C Hamano 2020-11-11 13:18:39 -08:00
commit f2061f6982
5 changed files with 24 additions and 42 deletions

View file

@ -23,10 +23,6 @@ generate_random_characters () {
perl -pe "s/./chr((ord($&) % 26) + ord('a'))/sge" >"$TEST_ROOT/$NAME"
}
file_size () {
test-tool path-utils file-size "$1"
}
filter_git () {
rm -f *.log &&
git "$@"
@ -361,9 +357,9 @@ test_expect_success PERL 'required process filter should filter data' '
cp "$TEST_ROOT/test3 '\''sq'\'',\$x=.o" "testsubdir/test3 '\''sq'\'',\$x=.r" &&
>test4-empty.r &&
S=$(file_size test.r) &&
S2=$(file_size test2.r) &&
S3=$(file_size "testsubdir/test3 '\''sq'\'',\$x=.r") &&
S=$(test_file_size test.r) &&
S2=$(test_file_size test2.r) &&
S3=$(test_file_size "testsubdir/test3 '\''sq'\'',\$x=.r") &&
M=$(git hash-object test.r) &&
M2=$(git hash-object test2.r) &&
M3=$(git hash-object "testsubdir/test3 '\''sq'\'',\$x=.r") &&
@ -432,9 +428,9 @@ test_expect_success PERL 'required process filter should filter data for various
(
cd repo &&
S=$(file_size test.r) &&
S2=$(file_size test2.r) &&
S3=$(file_size "testsubdir/test3 '\''sq'\'',\$x=.r") &&
S=$(test_file_size test.r) &&
S2=$(test_file_size test2.r) &&
S3=$(test_file_size "testsubdir/test3 '\''sq'\'',\$x=.r") &&
M=$(git hash-object test.r) &&
M2=$(git hash-object test2.r) &&
M3=$(git hash-object "testsubdir/test3 '\''sq'\'',\$x=.r") &&
@ -549,7 +545,7 @@ test_expect_success PERL 'required process filter takes precedence' '
echo "*.r filter=protocol" >.gitattributes &&
cp "$TEST_ROOT/test.o" test.r &&
S=$(file_size test.r) &&
S=$(test_file_size test.r) &&
# Check that the process filter is invoked here
filter_git add . &&
@ -573,7 +569,7 @@ test_expect_success PERL 'required process filter should be used only for "clean
echo "*.r filter=protocol" >.gitattributes &&
cp "$TEST_ROOT/test.o" test.r &&
S=$(file_size test.r) &&
S=$(test_file_size test.r) &&
filter_git add . &&
cat >expected.log <<-EOF &&
@ -697,9 +693,9 @@ test_expect_success PERL 'process filter should restart after unexpected write f
echo "this is going to fail" >smudge-write-fail.o &&
cp smudge-write-fail.o smudge-write-fail.r &&
S=$(file_size test.r) &&
S2=$(file_size test2.r) &&
SF=$(file_size smudge-write-fail.r) &&
S=$(test_file_size test.r) &&
S2=$(test_file_size test2.r) &&
SF=$(test_file_size smudge-write-fail.r) &&
M=$(git hash-object test.r) &&
M2=$(git hash-object test2.r) &&
MF=$(git hash-object smudge-write-fail.r) &&
@ -752,9 +748,9 @@ test_expect_success PERL 'process filter should not be restarted if it signals a
echo "this will cause an error" >error.o &&
cp error.o error.r &&
S=$(file_size test.r) &&
S2=$(file_size test2.r) &&
SE=$(file_size error.r) &&
S=$(test_file_size test.r) &&
S2=$(test_file_size test2.r) &&
SE=$(test_file_size error.r) &&
M=$(git hash-object test.r) &&
M2=$(git hash-object test2.r) &&
ME=$(git hash-object error.r) &&
@ -797,7 +793,7 @@ test_expect_success PERL 'process filter abort stops processing of all further f
M="blob=$(git hash-object abort.r)" &&
rm -f debug.log &&
SA=$(file_size abort.r) &&
SA=$(test_file_size abort.r) &&
git add . &&
rm -f *.r &&
@ -859,7 +855,7 @@ test_expect_success PERL 'delayed checkout in process filter' '
git commit -m "test commit"
) &&
S=$(file_size "$TEST_ROOT/test.o") &&
S=$(test_file_size "$TEST_ROOT/test.o") &&
PM="ref=refs/heads/master treeish=$(git -C repo rev-parse --verify master) " &&
M="${PM}blob=$(git -C repo rev-parse --verify master:test.a)" &&
cat >a.exp <<-EOF &&

View file

@ -5,12 +5,6 @@ test_description='adding and checking out large blobs'
. ./test-lib.sh
# This should be moved to test-lib.sh together with the
# copy in t0021 after both topics have graduated to 'master'.
file_size () {
test-tool path-utils file-size "$1"
}
test_expect_success setup '
# clone does not allow us to pass core.bigfilethreshold to
# new repos, so set core.bigfilethreshold globally
@ -29,7 +23,7 @@ do
test_expect_success "add with $config" '
test_when_finished "rm -f .git/objects/pack/pack-*.* .git/index" &&
git $config add large1 &&
sz=$(file_size .git/objects/pack/pack-*.pack) &&
sz=$(test_file_size .git/objects/pack/pack-*.pack) &&
case "$expect" in
small) test "$sz" -le 100000 ;;
large) test "$sz" -ge 100000 ;;

View file

@ -4,12 +4,6 @@ test_description='pack-object compression configuration'
. ./test-lib.sh
# This should be moved to test-lib.sh together with the
# copy in t0021 after both topics have graduated to 'master'.
file_size () {
test-tool path-utils file-size "$1"
}
test_expect_success setup '
printf "%2000000s" X |
git hash-object -w --stdin >object-name &&
@ -24,7 +18,7 @@ do
test_expect_success "pack-objects with $config" '
test_when_finished "rm -f pack-*.*" &&
git $config pack-objects pack <object-name &&
sz=$(file_size pack-*.pack) &&
sz=$(test_file_size pack-*.pack) &&
case "$expect" in
small) test "$sz" -le 100000 ;;
large) test "$sz" -ge 100000 ;;

View file

@ -3,12 +3,6 @@
test_description='compression setting of fast-import utility'
. ./test-lib.sh
# This should be moved to test-lib.sh together with the
# copy in t0021 after both topics have graduated to 'master'.
file_size () {
test-tool path-utils file-size "$1"
}
import_large () {
(
echo blob
@ -24,7 +18,7 @@ do
test_when_finished "rm -f .git/objects/pack/pack-*.*" &&
test_when_finished "rm -rf .git/objects/??" &&
import_large -c fastimport.unpacklimit=0 $config &&
sz=$(file_size .git/objects/pack/pack-*.pack) &&
sz=$(test_file_size .git/objects/pack/pack-*.pack) &&
case "$expect" in
small) test "$sz" -le 100000 ;;
large) test "$sz" -ge 100000 ;;
@ -47,7 +41,7 @@ do
test_when_finished "rm -f .git/objects/pack/pack-*.*" &&
test_when_finished "rm -rf .git/objects/??" &&
import_large -c fastimport.unpacklimit=9 $config &&
sz=$(file_size .git/objects/??/????*) &&
sz=$(test_file_size .git/objects/??/????*) &&
case "$expect" in
small) test "$sz" -le 100000 ;;
large) test "$sz" -ge 100000 ;;

View file

@ -783,6 +783,10 @@ test_line_count () {
fi
}
test_file_size () {
test-tool path-utils file-size "$1"
}
# Returns success if a comma separated string of keywords ($1) contains a
# given keyword ($2).
# Examples: