#!/bin/bash # Run multiple dd processes in parallel to copy a single file/blockdevice. Each # dd process will operate on a dedicated part of the file at a time (1GB chunks # for big files). set -e # Source: https://stackoverflow.com/a/25268449 min() { printf "%s\n" "${@:2}" | sort "$1" | head -n1 } max() { # using sort's -r (reverse) option - using tail instead of head is also possible min "${1}r" "${@:2}" } inputfile=$1 if [[ -b "$inputfile" ]]; then inputsize=$(blockdev --getsize64 "$inputfile") else inputsize=$(stat --format %s "$inputfile") fi outputfile=$2 stepsize=1000 parallel=6 # 1GB blocks or smaller blocks if we cannot achieve the required parallelization with 1GB blocksize=$(min -g 1000000 "$((inputsize / stepsize / parallel))" ) seq 0 $stepsize $(($inputsize / $blocksize )) | xargs -P$parallel -I{} dd if="$inputfile" bs=$blocksize skip={} conv=sparse seek={} count=$stepsize status=none of="$outputfile"