mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-02 07:31:26 +00:00
Revert "backup tools bup, storeBackup: implement sanity check" on behalf of Marc.
This reverts commit c823bc37d6
. The
newly added test case needs more work to succeed on Hydra. See
<http://permalink.gmane.org/gmane.linux.distributions.nixos/11382>
for further details.
This commit is contained in:
parent
43f965f50e
commit
fe273e8965
@ -1,9 +1,6 @@
|
||||
{ stdenv, fetchgit, python, pyxattr, pylibacl, setuptools, fuse, git, perl, pandoc, makeWrapper
|
||||
, diffutils, writeTextFile, rsync
|
||||
, par2cmdline, par2Support ? false }:
|
||||
|
||||
# keep in mind you cannot prune older revisions yet! (2013-06)
|
||||
|
||||
assert par2Support -> par2cmdline != null;
|
||||
|
||||
with stdenv.lib;
|
||||
@ -13,14 +10,12 @@ stdenv.mkDerivation {
|
||||
|
||||
src = fetchgit {
|
||||
url = "https://github.com/bup/bup.git";
|
||||
rev = "98a8e2ebb775386cb7e66b1953df46cdbd4b4bd3";
|
||||
sha256 = "ab01c70f0caf993c0c05ec3a1008b5940b433bf2f7bd4e9b995d85e81958c1b7";
|
||||
rev = "96c6fa2a70425fff1e73d2e0945f8e242411ab58";
|
||||
sha256 = "0d9hgyh1g5qcpdvnqv3a5zy67x79yx9qx557rxrnxyzqckp9v75n";
|
||||
};
|
||||
|
||||
buildInputs = [ python git ];
|
||||
nativeBuildInputs = [ pandoc perl makeWrapper rsync ];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
nativeBuildInputs = [ pandoc perl makeWrapper ];
|
||||
|
||||
patchPhase = ''
|
||||
substituteInPlace Makefile --replace "-Werror" ""
|
||||
@ -29,12 +24,6 @@ stdenv.mkDerivation {
|
||||
substituteInPlace $f --replace "/usr/bin/env python" "${python}/bin/python"
|
||||
done
|
||||
substituteInPlace Makefile --replace "./format-subst.pl" "perl ./format-subst.pl"
|
||||
for t in t/*.sh t/configure-sampledata t/compare-trees; do
|
||||
substituteInPlace $t --replace "/usr/bin/env bash" "$(type -p bash)"
|
||||
done
|
||||
substituteInPlace wvtestrun --replace "/usr/bin/env perl" "${perl}/bin/perl"
|
||||
|
||||
substituteInPlace t/test.sh --replace "/bin/pwd" "$(type -P pwd)"
|
||||
'' + optionalString par2Support ''
|
||||
substituteInPlace cmd/fsck-cmd.py --replace "['par2'" "['${par2cmdline}/bin/par2'"
|
||||
'';
|
||||
@ -51,33 +40,7 @@ stdenv.mkDerivation {
|
||||
postInstall = optionalString (elem stdenv.system platforms.linux) ''
|
||||
wrapProgram $out/bin/bup --prefix PYTHONPATH : \
|
||||
${stdenv.lib.concatStringsSep ":"
|
||||
(map (path: "$(toPythonPath ${path})") [ pyxattr pylibacl setuptools fuse python.modules.readline ])}
|
||||
|
||||
## test it
|
||||
make test
|
||||
|
||||
# if make test passes the following probably passes, too
|
||||
backup_init(){
|
||||
export BUP_DIR=$TMP/bup
|
||||
PATH=$out/bin:$PATH
|
||||
bup init
|
||||
}
|
||||
backup_make(){
|
||||
( cd "$1"; tar -cvf - .) | bup split -n backup
|
||||
}
|
||||
backup_restore_latest(){
|
||||
bup join backup | ( cd "$1"; tar -xf - )
|
||||
}
|
||||
backup_verify_integrity_latest(){
|
||||
bup fsck
|
||||
}
|
||||
backup_verify_latest(){
|
||||
# maybe closest would be to mount or use the FTP like server ..
|
||||
true
|
||||
}
|
||||
|
||||
. ${import ../test-case.nix { inherit diffutils writeTextFile; }}
|
||||
backup_test backup 100M
|
||||
(map (path: "$(toPythonPath ${path})") [ pyxattr pylibacl setuptools fuse ])}
|
||||
'';
|
||||
|
||||
meta = {
|
||||
|
@ -1,4 +1,4 @@
|
||||
{stdenv, which, coreutils, perl, fetchurl, perlPackages, makeWrapper, diffutils , writeScriptBin, writeTextFile, bzip2}:
|
||||
{stdenv, which, coreutils, perl, fetchurl, perlPackages, makeWrapper, diffutils , writeScriptBin, bzip2}:
|
||||
|
||||
# quick usage:
|
||||
# storeBackup.pl --sourceDir /home/user --backupDir /tmp/my_backup_destination
|
||||
@ -48,31 +48,56 @@ stdenv.mkDerivation {
|
||||
PATH=$PATH:${dummyMount}/bin
|
||||
|
||||
|
||||
## test it
|
||||
backup_init(){
|
||||
{ # simple sanity test, test backup/restore of simple store paths
|
||||
|
||||
mkdir backup
|
||||
}
|
||||
latestBackup(){
|
||||
echo backup/default/$(ls -1 backup/default | sort | tail -n 1)
|
||||
}
|
||||
backup_make(){
|
||||
# $1=source
|
||||
$out/bin/storeBackup.pl --sourceDir "$1" --backupDir "backup"
|
||||
}
|
||||
backup_restore_latest(){
|
||||
$out/bin/storeBackupRecover.pl -b "$(latestBackup)" -t "$1" -r /
|
||||
|
||||
backupRestore(){
|
||||
source="$2"
|
||||
echo =========
|
||||
echo RUNNING TEST "$1" source: "$source"
|
||||
mkdir restored
|
||||
|
||||
$out/bin/storeBackup.pl --sourceDir "$source" --backupDir backup
|
||||
latestBackup=backup/default/$(ls -1 backup/default | sort | tail -n 1)
|
||||
$out/bin/storeBackupRecover.pl -b "$latestBackup" -t restored -r /
|
||||
${diffutils}/bin/diff -r "$source" restored
|
||||
|
||||
# storeBackupCheckSource should return 0
|
||||
$out/bin/storeBackupCheckSource.pl -s "$source" -b "$latestBackup"
|
||||
# storeBackupCheckSource should return not 0 when using different source
|
||||
! $out/bin/storeBackupCheckSource.pl -s $TMP -b "$latestBackup"
|
||||
|
||||
# storeBackupCheckBackup should return 0
|
||||
$out/bin/storeBackupCheckBackup.pl -c "$latestBackup"
|
||||
|
||||
chmod -R +w restored
|
||||
rm -fr restored
|
||||
}
|
||||
|
||||
backup_verify_integrity_latest(){
|
||||
$out/bin/storeBackupCheckBackup.pl -c "$(latestBackup)"
|
||||
}
|
||||
backup_verify_latest(){
|
||||
$out/bin/storeBackupCheckSource.pl -s "$1" -b "$(latestBackup)"
|
||||
}
|
||||
testDir=$TMP/testDir
|
||||
|
||||
. ${import ../test-case.nix { inherit diffutils writeTextFile; }}
|
||||
backup_test backup 100M
|
||||
'';
|
||||
mkdir $testDir
|
||||
echo X > $testDir/X
|
||||
ln -s ./X $testDir/Y
|
||||
|
||||
backupRestore 'test 1: backup, restore' $testDir
|
||||
|
||||
# test huge blocks, according to docs files bigger than 100MB get split
|
||||
# into pieces
|
||||
dd if=/dev/urandom bs=100M of=block-1 count=1
|
||||
dd if=/dev/urandom bs=100M of=block-2 count=1
|
||||
cat block-1 block-2 > $testDir/block
|
||||
backupRestore 'test 1 with huge block' $testDir
|
||||
|
||||
cat block-2 block-1 > $testDir/block
|
||||
backupRestore 'test 1 with huge block reversed' $testDir
|
||||
|
||||
backupRestore 'test 2: backup, restore' $out
|
||||
backupRestore 'test 3: backup, restore' $out
|
||||
backupRestore 'test 4: backup diffutils to same backup locations, restore' ${diffutils}
|
||||
}
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "Storebackup is a backup suite that stores files on other disks";
|
||||
|
@ -1,105 +0,0 @@
|
||||
# while this test suite is not perfect it will at least provide some guarantees
|
||||
# that basic features should be fine ..
|
||||
|
||||
/*
|
||||
In order to use the suite you have to define the following functions
|
||||
|
||||
backup_init
|
||||
backup_make source
|
||||
backup_restore_latest target
|
||||
backup_verify_integrity_latest
|
||||
backup_verify_latest source
|
||||
|
||||
use true if a backup system does not implement a feature
|
||||
|
||||
TODO: add test cases for all backup solutions shipping with nixpkgs
|
||||
|
||||
This does not replace the test suites shipping with the backup solutions!
|
||||
*/
|
||||
|
||||
{diffutils, writeTextFile}:
|
||||
|
||||
writeTextFile {
|
||||
name = "backup-test-case";
|
||||
text = ''
|
||||
backup_run_tests_on_source(){
|
||||
local test="$1"
|
||||
local source="$2"
|
||||
local backup="$3"
|
||||
echo =========
|
||||
echo RUNNING TEST "$test" source: "$source"
|
||||
mkdir restored
|
||||
|
||||
backup_make "$source" backup
|
||||
|
||||
{ # verify that restoring works
|
||||
backup_restore_latest restored
|
||||
${diffutils}/bin/diff -r "$source" restored
|
||||
# diff does not make a difference for symlinks, so list them and compare
|
||||
# lists
|
||||
( cd "$source"; find /var/www/ -type l) | sort > 1
|
||||
( cd "$restored"; find /var/www/ -type l) | sort > 2
|
||||
diff 1 2
|
||||
}
|
||||
|
||||
{ # verify that backup tool thinks so, too:
|
||||
backup_verify_latest "$source" backup
|
||||
# using different source verification must fail:
|
||||
! backup_verify_latest "$TMP" backup
|
||||
}
|
||||
|
||||
backup_verify_integrity_latest backup
|
||||
|
||||
chmod -R +w restored
|
||||
rm -fr restored
|
||||
}
|
||||
|
||||
backup_test(){
|
||||
set -x
|
||||
# allow getting run time to compare backup solutions
|
||||
echo "START $(date)"
|
||||
|
||||
local block_size="$2"
|
||||
|
||||
backup_init
|
||||
|
||||
if [ -z "$SKIP_SYMLINK_TEST" ]; then
|
||||
{ # create first test case directory contentents
|
||||
testDir=$TMP/test-1a
|
||||
mkdir $testDir
|
||||
echo X > $testDir/X
|
||||
ln -s ./X $testDir/Y
|
||||
}
|
||||
|
||||
backup_run_tests_on_source 'test 1a: backup, restore' "$testDir" "$backup"
|
||||
fi
|
||||
|
||||
if [ -z "$SKIP_EMPTY_DIR_TEST" ]; then
|
||||
{ # create first test case directory contentents
|
||||
testDir=$TMP/test-1b
|
||||
mkdir -p $testDir/empty-directory
|
||||
}
|
||||
|
||||
backup_run_tests_on_source 'test 1b: backup, restore' "$testDir" "$backup"
|
||||
fi
|
||||
|
||||
testDir=$TMP/test-huge-blocks
|
||||
mkdir $testDir
|
||||
# test huge blocks, according to docs files bigger than 100MB get split
|
||||
# into pieces
|
||||
dd if=/dev/urandom bs=1M of=block-0 count=20
|
||||
dd if=/dev/urandom bs="$block_size" of=block-1 count=1
|
||||
dd if=/dev/urandom bs="$block_size" of=block-2 count=1
|
||||
cat block-0 block-0 block-0 block-1 block-2 block-0 block-0 block-0 > $testDir/block
|
||||
backup_run_tests_on_source 'test 1 with huge block' $testDir
|
||||
|
||||
cat block-2 block-0 block-0 block-1 > $testDir/block
|
||||
backup_run_tests_on_source 'test 1 with huge block reversed' $testDir
|
||||
|
||||
backup_run_tests_on_source 'test 2: backup, restore' $out
|
||||
backup_run_tests_on_source 'test 3: backup, restore' $out
|
||||
backup_run_tests_on_source 'test 4: backup diffutils to same backup locations, restore' ${diffutils}
|
||||
echo "STOP $(date)"
|
||||
}
|
||||
'';
|
||||
}
|
Loading…
Reference in New Issue
Block a user