From 12c0bdf9e743195de8e16b4af4cca2b7cc8a78e0 Mon Sep 17 00:00:00 2001 From: Jean Delvare Date: Wed, 1 Feb 2012 18:08:49 +0100 Subject: backup-files: Try mass copy first on copy When copying many files to a snapshot directory, try a mass copy first, as it is much faster. It is however not portable and may thus fail. If it fails, fallback to per-file processing, which always works. This change results in a huge performance boost on systems where the cp command supports all the required options (which includes all systems using GNU coreutils.) Signed-off-by: Jean Delvare Reviewed-by: Raphael Hertzog --- quilt/scripts/backup-files.in | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/quilt/scripts/backup-files.in b/quilt/scripts/backup-files.in index 32d3d50..487a155 100644 --- a/quilt/scripts/backup-files.in +++ b/quilt/scripts/backup-files.in @@ -261,10 +261,24 @@ copy_many() done exec 3>&- - while read -d $'\0' -r - do - copy "$REPLY" - done < "$NONEMPTY_FILES" + if [ -s "$NONEMPTY_FILES" ]; then + # Try a mass copy first, as it is much faster. + # It is however not portable and may thus fail. If it fails, + # fallback to per-file processing, which always works. + + if xargs -0 cp -p --parents --target-directory="$OPT_PREFIX" \ + < "$NONEMPTY_FILES" 2> /dev/null; then + while read -d $'\0' -r + do + $ECHO "Copying $REPLY" + done < "$NONEMPTY_FILES" + else + while read -d $'\0' -r + do + copy "$REPLY" + done < "$NONEMPTY_FILES" + fi + fi } # Test if some backed up files have a link count greater than 1 -- cgit